diff --git a/lib/bs4/tests/__init__.py b/lib/bs4/tests/__init__.py
deleted file mode 100644
index 142c8cc3f15bc826e95840a409879d4b93bffa4c..0000000000000000000000000000000000000000
--- a/lib/bs4/tests/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-"The beautifulsoup tests."
diff --git a/lib/bs4/tests/test_builder_registry.py b/lib/bs4/tests/test_builder_registry.py
deleted file mode 100644
index 90cad829334f677a497519bb5abdaa997ff3d7d1..0000000000000000000000000000000000000000
--- a/lib/bs4/tests/test_builder_registry.py
+++ /dev/null
@@ -1,147 +0,0 @@
-"""Tests of the builder registry."""
-
-import unittest
-import warnings
-
-from bs4 import BeautifulSoup
-from bs4.builder import (
-    builder_registry as registry,
-    HTMLParserTreeBuilder,
-    TreeBuilderRegistry,
-)
-
-try:
-    from bs4.builder import HTML5TreeBuilder
-    HTML5LIB_PRESENT = True
-except ImportError:
-    HTML5LIB_PRESENT = False
-
-try:
-    from bs4.builder import (
-        LXMLTreeBuilderForXML,
-        LXMLTreeBuilder,
-        )
-    LXML_PRESENT = True
-except ImportError:
-    LXML_PRESENT = False
-
-
-class BuiltInRegistryTest(unittest.TestCase):
-    """Test the built-in registry with the default builders registered."""
-
-    def test_combination(self):
-        if LXML_PRESENT:
-            self.assertEqual(registry.lookup('fast', 'html'),
-                             LXMLTreeBuilder)
-
-        if LXML_PRESENT:
-            self.assertEqual(registry.lookup('permissive', 'xml'),
-                             LXMLTreeBuilderForXML)
-        self.assertEqual(registry.lookup('strict', 'html'),
-                          HTMLParserTreeBuilder)
-        if HTML5LIB_PRESENT:
-            self.assertEqual(registry.lookup('html5lib', 'html'),
-                              HTML5TreeBuilder)
-
-    def test_lookup_by_markup_type(self):
-        if LXML_PRESENT:
-            self.assertEqual(registry.lookup('html'), LXMLTreeBuilder)
-            self.assertEqual(registry.lookup('xml'), LXMLTreeBuilderForXML)
-        else:
-            self.assertEqual(registry.lookup('xml'), None)
-            if HTML5LIB_PRESENT:
-                self.assertEqual(registry.lookup('html'), HTML5TreeBuilder)
-            else:
-                self.assertEqual(registry.lookup('html'), HTMLParserTreeBuilder)
-
-    def test_named_library(self):
-        if LXML_PRESENT:
-            self.assertEqual(registry.lookup('lxml', 'xml'),
-                             LXMLTreeBuilderForXML)
-            self.assertEqual(registry.lookup('lxml', 'html'),
-                             LXMLTreeBuilder)
-        if HTML5LIB_PRESENT:
-            self.assertEqual(registry.lookup('html5lib'),
-                              HTML5TreeBuilder)
-
-        self.assertEqual(registry.lookup('html.parser'),
-                          HTMLParserTreeBuilder)
-
-    def test_beautifulsoup_constructor_does_lookup(self):
-
-        with warnings.catch_warnings(record=True) as w:
-            # This will create a warning about not explicitly
-            # specifying a parser, but we'll ignore it.
-
-            # You can pass in a string.
-            BeautifulSoup("", features="html")
-            # Or a list of strings.
-            BeautifulSoup("", features=["html", "fast"])
-
-        # You'll get an exception if BS can't find an appropriate
-        # builder.
-        self.assertRaises(ValueError, BeautifulSoup,
-                          "", features="no-such-feature")
-
-class RegistryTest(unittest.TestCase):
-    """Test the TreeBuilderRegistry class in general."""
-
-    def setUp(self):
-        self.registry = TreeBuilderRegistry()
-
-    def builder_for_features(self, *feature_list):
-        cls = type('Builder_' + '_'.join(feature_list),
-                   (object,), {'features' : feature_list})
-
-        self.registry.register(cls)
-        return cls
-
-    def test_register_with_no_features(self):
-        builder = self.builder_for_features()
-
-        # Since the builder advertises no features, you can't find it
-        # by looking up features.
-        self.assertEqual(self.registry.lookup('foo'), None)
-
-        # But you can find it by doing a lookup with no features, if
-        # this happens to be the only registered builder.
-        self.assertEqual(self.registry.lookup(), builder)
-
-    def test_register_with_features_makes_lookup_succeed(self):
-        builder = self.builder_for_features('foo', 'bar')
-        self.assertEqual(self.registry.lookup('foo'), builder)
-        self.assertEqual(self.registry.lookup('bar'), builder)
-
-    def test_lookup_fails_when_no_builder_implements_feature(self):
-        builder = self.builder_for_features('foo', 'bar')
-        self.assertEqual(self.registry.lookup('baz'), None)
-
-    def test_lookup_gets_most_recent_registration_when_no_feature_specified(self):
-        builder1 = self.builder_for_features('foo')
-        builder2 = self.builder_for_features('bar')
-        self.assertEqual(self.registry.lookup(), builder2)
-
-    def test_lookup_fails_when_no_tree_builders_registered(self):
-        self.assertEqual(self.registry.lookup(), None)
-
-    def test_lookup_gets_most_recent_builder_supporting_all_features(self):
-        has_one = self.builder_for_features('foo')
-        has_the_other = self.builder_for_features('bar')
-        has_both_early = self.builder_for_features('foo', 'bar', 'baz')
-        has_both_late = self.builder_for_features('foo', 'bar', 'quux')
-        lacks_one = self.builder_for_features('bar')
-        has_the_other = self.builder_for_features('foo')
-
-        # There are two builders featuring 'foo' and 'bar', but
-        # the one that also features 'quux' was registered later.
-        self.assertEqual(self.registry.lookup('foo', 'bar'),
-                          has_both_late)
-
-        # There is only one builder featuring 'foo', 'bar', and 'baz'.
-        self.assertEqual(self.registry.lookup('foo', 'bar', 'baz'),
-                          has_both_early)
-
-    def test_lookup_fails_when_cannot_reconcile_requested_features(self):
-        builder1 = self.builder_for_features('foo', 'bar')
-        builder2 = self.builder_for_features('foo', 'baz')
-        self.assertEqual(self.registry.lookup('bar', 'baz'), None)
diff --git a/lib/bs4/tests/test_docs.py b/lib/bs4/tests/test_docs.py
deleted file mode 100644
index 5b9f677093aea701364bb4735688225b11581261..0000000000000000000000000000000000000000
--- a/lib/bs4/tests/test_docs.py
+++ /dev/null
@@ -1,36 +0,0 @@
-"Test harness for doctests."
-
-# pylint: disable-msg=E0611,W0142
-
-__metaclass__ = type
-__all__ = [
-    'additional_tests',
-    ]
-
-import atexit
-import doctest
-import os
-#from pkg_resources import (
-#    resource_filename, resource_exists, resource_listdir, cleanup_resources)
-import unittest
-
-DOCTEST_FLAGS = (
-    doctest.ELLIPSIS |
-    doctest.NORMALIZE_WHITESPACE |
-    doctest.REPORT_NDIFF)
-
-
-# def additional_tests():
-#     "Run the doc tests (README.txt and docs/*, if any exist)"
-#     doctest_files = [
-#         os.path.abspath(resource_filename('bs4', 'README.txt'))]
-#     if resource_exists('bs4', 'docs'):
-#         for name in resource_listdir('bs4', 'docs'):
-#             if name.endswith('.txt'):
-#                 doctest_files.append(
-#                     os.path.abspath(
-#                         resource_filename('bs4', 'docs/%s' % name)))
-#     kwargs = dict(module_relative=False, optionflags=DOCTEST_FLAGS)
-#     atexit.register(cleanup_resources)
-#     return unittest.TestSuite((
-#         doctest.DocFileSuite(*doctest_files, **kwargs)))
diff --git a/lib/bs4/tests/test_html5lib.py b/lib/bs4/tests/test_html5lib.py
deleted file mode 100644
index 0f89d62445a3f608fa61052c912b46d3c621bea3..0000000000000000000000000000000000000000
--- a/lib/bs4/tests/test_html5lib.py
+++ /dev/null
@@ -1,130 +0,0 @@
-"""Tests to ensure that the html5lib tree builder generates good trees."""
-
-import warnings
-
-try:
-    from bs4.builder import HTML5TreeBuilder
-    HTML5LIB_PRESENT = True
-except ImportError, e:
-    HTML5LIB_PRESENT = False
-from bs4.element import SoupStrainer
-from bs4.testing import (
-    HTML5TreeBuilderSmokeTest,
-    SoupTest,
-    skipIf,
-)
-
-@skipIf(
-    not HTML5LIB_PRESENT,
-    "html5lib seems not to be present, not testing its tree builder.")
-class HTML5LibBuilderSmokeTest(SoupTest, HTML5TreeBuilderSmokeTest):
-    """See ``HTML5TreeBuilderSmokeTest``."""
-
-    @property
-    def default_builder(self):
-        return HTML5TreeBuilder()
-
-    def test_soupstrainer(self):
-        # The html5lib tree builder does not support SoupStrainers.
-        strainer = SoupStrainer("b")
-        markup = "<p>A <b>bold</b> statement.</p>"
-        with warnings.catch_warnings(record=True) as w:
-            soup = self.soup(markup, parse_only=strainer)
-        self.assertEqual(
-            soup.decode(), self.document_for(markup))
-
-        self.assertTrue(
-            "the html5lib tree builder doesn't support parse_only" in
-            str(w[0].message))
-
-    def test_correctly_nested_tables(self):
-        """html5lib inserts <tbody> tags where other parsers don't."""
-        markup = ('<table id="1">'
-                  '<tr>'
-                  "<td>Here's another table:"
-                  '<table id="2">'
-                  '<tr><td>foo</td></tr>'
-                  '</table></td>')
-
-        self.assertSoupEquals(
-            markup,
-            '<table id="1"><tbody><tr><td>Here\'s another table:'
-            '<table id="2"><tbody><tr><td>foo</td></tr></tbody></table>'
-            '</td></tr></tbody></table>')
-
-        self.assertSoupEquals(
-            "<table><thead><tr><td>Foo</td></tr></thead>"
-            "<tbody><tr><td>Bar</td></tr></tbody>"
-            "<tfoot><tr><td>Baz</td></tr></tfoot></table>")
-
-    def test_xml_declaration_followed_by_doctype(self):
-        markup = '''<?xml version="1.0" encoding="utf-8"?>
-<!DOCTYPE html>
-<html>
-  <head>
-  </head>
-  <body>
-   <p>foo</p>
-  </body>
-</html>'''
-        soup = self.soup(markup)
-        # Verify that we can reach the <p> tag; this means the tree is connected.
-        self.assertEqual(b"<p>foo</p>", soup.p.encode())
-
-    def test_reparented_markup(self):
-        markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>'
-        soup = self.soup(markup)
-        self.assertEqual(u"<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p></body>", soup.body.decode())
-        self.assertEqual(2, len(soup.find_all('p')))
-
-
-    def test_reparented_markup_ends_with_whitespace(self):
-        markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>\n'
-        soup = self.soup(markup)
-        self.assertEqual(u"<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p>\n</body>", soup.body.decode())
-        self.assertEqual(2, len(soup.find_all('p')))
-
-    def test_reparented_markup_containing_identical_whitespace_nodes(self):
-        """Verify that we keep the two whitespace nodes in this
-        document distinct when reparenting the adjacent <tbody> tags.
-        """
-        markup = '<table> <tbody><tbody><ims></tbody> </table>'
-        soup = self.soup(markup)
-        space1, space2 = soup.find_all(string=' ')
-        tbody1, tbody2 = soup.find_all('tbody')
-        assert space1.next_element is tbody1
-        assert tbody2.next_element is space2
-
-    def test_reparented_markup_containing_children(self):
-        markup = '<div><a>aftermath<p><noscript>target</noscript>aftermath</a></p></div>'
-        soup = self.soup(markup)
-        noscript = soup.noscript
-        self.assertEqual("target", noscript.next_element)
-        target = soup.find(string='target')
-
-        # The 'aftermath' string was duplicated; we want the second one.
-        final_aftermath = soup.find_all(string='aftermath')[-1]
-
-        # The <noscript> tag was moved beneath a copy of the <a> tag,
-        # but the 'target' string within is still connected to the
-        # (second) 'aftermath' string.
-        self.assertEqual(final_aftermath, target.next_element)
-        self.assertEqual(target, final_aftermath.previous_element)
-        
-    def test_processing_instruction(self):
-        """Processing instructions become comments."""
-        markup = b"""<?PITarget PIContent?>"""
-        soup = self.soup(markup)
-        assert str(soup).startswith("<!--?PITarget PIContent?-->")
-
-    def test_cloned_multivalue_node(self):
-        markup = b"""<a class="my_class"><p></a>"""
-        soup = self.soup(markup)
-        a1, a2 = soup.find_all('a')
-        self.assertEqual(a1, a2)
-        assert a1 is not a2
-
-    def test_foster_parenting(self):
-        markup = b"""<table><td></tbody>A"""
-        soup = self.soup(markup)
-        self.assertEqual(u"<body>A<table><tbody><tr><td></td></tr></tbody></table></body>", soup.body.decode())
diff --git a/lib/bs4/tests/test_htmlparser.py b/lib/bs4/tests/test_htmlparser.py
deleted file mode 100644
index b45e35f9998bcdee334e5515adb5a0dbe0c79ccf..0000000000000000000000000000000000000000
--- a/lib/bs4/tests/test_htmlparser.py
+++ /dev/null
@@ -1,32 +0,0 @@
-"""Tests to ensure that the html.parser tree builder generates good
-trees."""
-
-from pdb import set_trace
-import pickle
-from bs4.testing import SoupTest, HTMLTreeBuilderSmokeTest
-from bs4.builder import HTMLParserTreeBuilder
-
-class HTMLParserTreeBuilderSmokeTest(SoupTest, HTMLTreeBuilderSmokeTest):
-
-    @property
-    def default_builder(self):
-        return HTMLParserTreeBuilder()
-
-    def test_namespaced_system_doctype(self):
-        # html.parser can't handle namespaced doctypes, so skip this one.
-        pass
-
-    def test_namespaced_public_doctype(self):
-        # html.parser can't handle namespaced doctypes, so skip this one.
-        pass
-
-    def test_builder_is_pickled(self):
-        """Unlike most tree builders, HTMLParserTreeBuilder and will
-        be restored after pickling.
-        """
-        tree = self.soup("<a><b>foo</a>")
-        dumped = pickle.dumps(tree, 2)
-        loaded = pickle.loads(dumped)
-        self.assertTrue(isinstance(loaded.builder, type(tree.builder)))
-
-
diff --git a/lib/bs4/tests/test_lxml.py b/lib/bs4/tests/test_lxml.py
deleted file mode 100644
index a05870b912ef1d7fa33ecd8435666fa1e42f4704..0000000000000000000000000000000000000000
--- a/lib/bs4/tests/test_lxml.py
+++ /dev/null
@@ -1,76 +0,0 @@
-"""Tests to ensure that the lxml tree builder generates good trees."""
-
-import re
-import warnings
-
-try:
-    import lxml.etree
-    LXML_PRESENT = True
-    LXML_VERSION = lxml.etree.LXML_VERSION
-except ImportError, e:
-    LXML_PRESENT = False
-    LXML_VERSION = (0,)
-
-if LXML_PRESENT:
-    from bs4.builder import LXMLTreeBuilder, LXMLTreeBuilderForXML
-
-from bs4 import (
-    BeautifulSoup,
-    BeautifulStoneSoup,
-    )
-from bs4.element import Comment, Doctype, SoupStrainer
-from bs4.testing import skipIf
-from bs4.tests import test_htmlparser
-from bs4.testing import (
-    HTMLTreeBuilderSmokeTest,
-    XMLTreeBuilderSmokeTest,
-    SoupTest,
-    skipIf,
-)
-
-@skipIf(
-    not LXML_PRESENT,
-    "lxml seems not to be present, not testing its tree builder.")
-class LXMLTreeBuilderSmokeTest(SoupTest, HTMLTreeBuilderSmokeTest):
-    """See ``HTMLTreeBuilderSmokeTest``."""
-
-    @property
-    def default_builder(self):
-        return LXMLTreeBuilder()
-
-    def test_out_of_range_entity(self):
-        self.assertSoupEquals(
-            "<p>foo&#10000000000000;bar</p>", "<p>foobar</p>")
-        self.assertSoupEquals(
-            "<p>foo&#x10000000000000;bar</p>", "<p>foobar</p>")
-        self.assertSoupEquals(
-            "<p>foo&#1000000000;bar</p>", "<p>foobar</p>")
-
-    # In lxml < 2.3.5, an empty doctype causes a segfault. Skip this
-    # test if an old version of lxml is installed.
-
-    @skipIf(
-        not LXML_PRESENT or LXML_VERSION < (2,3,5,0),
-        "Skipping doctype test for old version of lxml to avoid segfault.")
-    def test_empty_doctype(self):
-        soup = self.soup("<!DOCTYPE>")
-        doctype = soup.contents[0]
-        self.assertEqual("", doctype.strip())
-
-    def test_beautifulstonesoup_is_xml_parser(self):
-        # Make sure that the deprecated BSS class uses an xml builder
-        # if one is installed.
-        with warnings.catch_warnings(record=True) as w:
-            soup = BeautifulStoneSoup("<b />")
-        self.assertEqual(u"<b/>", unicode(soup.b))
-        self.assertTrue("BeautifulStoneSoup class is deprecated" in str(w[0].message))
-
-@skipIf(
-    not LXML_PRESENT,
-    "lxml seems not to be present, not testing its XML tree builder.")
-class LXMLXMLTreeBuilderSmokeTest(SoupTest, XMLTreeBuilderSmokeTest):
-    """See ``HTMLTreeBuilderSmokeTest``."""
-
-    @property
-    def default_builder(self):
-        return LXMLTreeBuilderForXML()
diff --git a/lib/bs4/tests/test_soup.py b/lib/bs4/tests/test_soup.py
deleted file mode 100644
index f3e69edf308d1a3468190dbf28f8d953341f9524..0000000000000000000000000000000000000000
--- a/lib/bs4/tests/test_soup.py
+++ /dev/null
@@ -1,501 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Tests of Beautiful Soup as a whole."""
-
-from pdb import set_trace
-import logging
-import unittest
-import sys
-import tempfile
-
-from bs4 import (
-    BeautifulSoup,
-    BeautifulStoneSoup,
-)
-from bs4.element import (
-    CharsetMetaAttributeValue,
-    ContentMetaAttributeValue,
-    SoupStrainer,
-    NamespacedAttribute,
-    )
-import bs4.dammit
-from bs4.dammit import (
-    EntitySubstitution,
-    UnicodeDammit,
-    EncodingDetector,
-)
-from bs4.testing import (
-    SoupTest,
-    skipIf,
-)
-import warnings
-
-try:
-    from bs4.builder import LXMLTreeBuilder, LXMLTreeBuilderForXML
-    LXML_PRESENT = True
-except ImportError, e:
-    LXML_PRESENT = False
-
-PYTHON_3_PRE_3_2 = (sys.version_info[0] == 3 and sys.version_info < (3,2))
-
-class TestConstructor(SoupTest):
-
-    def test_short_unicode_input(self):
-        data = u"<h1>éé</h1>"
-        soup = self.soup(data)
-        self.assertEqual(u"éé", soup.h1.string)
-
-    def test_embedded_null(self):
-        data = u"<h1>foo\0bar</h1>"
-        soup = self.soup(data)
-        self.assertEqual(u"foo\0bar", soup.h1.string)
-
-    def test_exclude_encodings(self):
-        utf8_data = u"Räksmörgås".encode("utf-8")
-        soup = self.soup(utf8_data, exclude_encodings=["utf-8"])
-        self.assertEqual("windows-1252", soup.original_encoding)
-
-
-class TestWarnings(SoupTest):
-
-    def _no_parser_specified(self, s, is_there=True):
-        v = s.startswith(BeautifulSoup.NO_PARSER_SPECIFIED_WARNING[:80])
-        self.assertTrue(v)
-
-    def test_warning_if_no_parser_specified(self):
-        with warnings.catch_warnings(record=True) as w:
-            soup = self.soup("<a><b></b></a>")
-        msg = str(w[0].message)
-        self._assert_no_parser_specified(msg)
-
-    def test_warning_if_parser_specified_too_vague(self):
-        with warnings.catch_warnings(record=True) as w:
-            soup = self.soup("<a><b></b></a>", "html")
-        msg = str(w[0].message)
-        self._assert_no_parser_specified(msg)
-
-    def test_no_warning_if_explicit_parser_specified(self):
-        with warnings.catch_warnings(record=True) as w:
-            soup = self.soup("<a><b></b></a>", "html.parser")
-        self.assertEqual([], w)
-
-    def test_parseOnlyThese_renamed_to_parse_only(self):
-        with warnings.catch_warnings(record=True) as w:
-            soup = self.soup("<a><b></b></a>", parseOnlyThese=SoupStrainer("b"))
-        msg = str(w[0].message)
-        self.assertTrue("parseOnlyThese" in msg)
-        self.assertTrue("parse_only" in msg)
-        self.assertEqual(b"<b></b>", soup.encode())
-
-    def test_fromEncoding_renamed_to_from_encoding(self):
-        with warnings.catch_warnings(record=True) as w:
-            utf8 = b"\xc3\xa9"
-            soup = self.soup(utf8, fromEncoding="utf8")
-        msg = str(w[0].message)
-        self.assertTrue("fromEncoding" in msg)
-        self.assertTrue("from_encoding" in msg)
-        self.assertEqual("utf8", soup.original_encoding)
-
-    def test_unrecognized_keyword_argument(self):
-        self.assertRaises(
-            TypeError, self.soup, "<a>", no_such_argument=True)
-
-class TestWarnings(SoupTest):
-
-    def test_disk_file_warning(self):
-        filehandle = tempfile.NamedTemporaryFile()
-        filename = filehandle.name
-        try:
-            with warnings.catch_warnings(record=True) as w:
-                soup = self.soup(filename)
-            msg = str(w[0].message)
-            self.assertTrue("looks like a filename" in msg)
-        finally:
-            filehandle.close()
-
-        # The file no longer exists, so Beautiful Soup will no longer issue the warning.
-        with warnings.catch_warnings(record=True) as w:
-            soup = self.soup(filename)
-        self.assertEqual(0, len(w))
-
-    def test_url_warning_with_bytes_url(self):
-        with warnings.catch_warnings(record=True) as warning_list:
-            soup = self.soup(b"http://www.crummybytes.com/")
-        # Be aware this isn't the only warning that can be raised during
-        # execution..
-        self.assertTrue(any("looks like a URL" in str(w.message) 
-            for w in warning_list))
-
-    def test_url_warning_with_unicode_url(self):
-        with warnings.catch_warnings(record=True) as warning_list:
-            # note - this url must differ from the bytes one otherwise
-            # python's warnings system swallows the second warning
-            soup = self.soup(u"http://www.crummyunicode.com/")
-        self.assertTrue(any("looks like a URL" in str(w.message) 
-            for w in warning_list))
-
-    def test_url_warning_with_bytes_and_space(self):
-        with warnings.catch_warnings(record=True) as warning_list:
-            soup = self.soup(b"http://www.crummybytes.com/ is great")
-        self.assertFalse(any("looks like a URL" in str(w.message) 
-            for w in warning_list))
-
-    def test_url_warning_with_unicode_and_space(self):
-        with warnings.catch_warnings(record=True) as warning_list:
-            soup = self.soup(u"http://www.crummyuncode.com/ is great")
-        self.assertFalse(any("looks like a URL" in str(w.message) 
-            for w in warning_list))
-
-
-class TestSelectiveParsing(SoupTest):
-
-    def test_parse_with_soupstrainer(self):
-        markup = "No<b>Yes</b><a>No<b>Yes <c>Yes</c></b>"
-        strainer = SoupStrainer("b")
-        soup = self.soup(markup, parse_only=strainer)
-        self.assertEqual(soup.encode(), b"<b>Yes</b><b>Yes <c>Yes</c></b>")
-
-
-class TestEntitySubstitution(unittest.TestCase):
-    """Standalone tests of the EntitySubstitution class."""
-    def setUp(self):
-        self.sub = EntitySubstitution
-
-    def test_simple_html_substitution(self):
-        # Unicode characters corresponding to named HTML entites
-        # are substituted, and no others.
-        s = u"foo\u2200\N{SNOWMAN}\u00f5bar"
-        self.assertEqual(self.sub.substitute_html(s),
-                          u"foo&forall;\N{SNOWMAN}&otilde;bar")
-
-    def test_smart_quote_substitution(self):
-        # MS smart quotes are a common source of frustration, so we
-        # give them a special test.
-        quotes = b"\x91\x92foo\x93\x94"
-        dammit = UnicodeDammit(quotes)
-        self.assertEqual(self.sub.substitute_html(dammit.markup),
-                          "&lsquo;&rsquo;foo&ldquo;&rdquo;")
-
-    def test_xml_converstion_includes_no_quotes_if_make_quoted_attribute_is_false(self):
-        s = 'Welcome to "my bar"'
-        self.assertEqual(self.sub.substitute_xml(s, False), s)
-
-    def test_xml_attribute_quoting_normally_uses_double_quotes(self):
-        self.assertEqual(self.sub.substitute_xml("Welcome", True),
-                          '"Welcome"')
-        self.assertEqual(self.sub.substitute_xml("Bob's Bar", True),
-                          '"Bob\'s Bar"')
-
-    def test_xml_attribute_quoting_uses_single_quotes_when_value_contains_double_quotes(self):
-        s = 'Welcome to "my bar"'
-        self.assertEqual(self.sub.substitute_xml(s, True),
-                          "'Welcome to \"my bar\"'")
-
-    def test_xml_attribute_quoting_escapes_single_quotes_when_value_contains_both_single_and_double_quotes(self):
-        s = 'Welcome to "Bob\'s Bar"'
-        self.assertEqual(
-            self.sub.substitute_xml(s, True),
-            '"Welcome to &quot;Bob\'s Bar&quot;"')
-
-    def test_xml_quotes_arent_escaped_when_value_is_not_being_quoted(self):
-        quoted = 'Welcome to "Bob\'s Bar"'
-        self.assertEqual(self.sub.substitute_xml(quoted), quoted)
-
-    def test_xml_quoting_handles_angle_brackets(self):
-        self.assertEqual(
-            self.sub.substitute_xml("foo<bar>"),
-            "foo&lt;bar&gt;")
-
-    def test_xml_quoting_handles_ampersands(self):
-        self.assertEqual(self.sub.substitute_xml("AT&T"), "AT&amp;T")
-
-    def test_xml_quoting_including_ampersands_when_they_are_part_of_an_entity(self):
-        self.assertEqual(
-            self.sub.substitute_xml("&Aacute;T&T"),
-            "&amp;Aacute;T&amp;T")
-
-    def test_xml_quoting_ignoring_ampersands_when_they_are_part_of_an_entity(self):
-        self.assertEqual(
-            self.sub.substitute_xml_containing_entities("&Aacute;T&T"),
-            "&Aacute;T&amp;T")
-
-    def test_quotes_not_html_substituted(self):
-        """There's no need to do this except inside attribute values."""
-        text = 'Bob\'s "bar"'
-        self.assertEqual(self.sub.substitute_html(text), text)
-
-
-class TestEncodingConversion(SoupTest):
-    # Test Beautiful Soup's ability to decode and encode from various
-    # encodings.
-
-    def setUp(self):
-        super(TestEncodingConversion, self).setUp()
-        self.unicode_data = u'<html><head><meta charset="utf-8"/></head><body><foo>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</foo></body></html>'
-        self.utf8_data = self.unicode_data.encode("utf-8")
-        # Just so you know what it looks like.
-        self.assertEqual(
-            self.utf8_data,
-            b'<html><head><meta charset="utf-8"/></head><body><foo>Sacr\xc3\xa9 bleu!</foo></body></html>')
-
-    def test_ascii_in_unicode_out(self):
-        # ASCII input is converted to Unicode. The original_encoding
-        # attribute is set to 'utf-8', a superset of ASCII.
-        chardet = bs4.dammit.chardet_dammit
-        logging.disable(logging.WARNING)
-        try:
-            def noop(str):
-                return None
-            # Disable chardet, which will realize that the ASCII is ASCII.
-            bs4.dammit.chardet_dammit = noop
-            ascii = b"<foo>a</foo>"
-            soup_from_ascii = self.soup(ascii)
-            unicode_output = soup_from_ascii.decode()
-            self.assertTrue(isinstance(unicode_output, unicode))
-            self.assertEqual(unicode_output, self.document_for(ascii.decode()))
-            self.assertEqual(soup_from_ascii.original_encoding.lower(), "utf-8")
-        finally:
-            logging.disable(logging.NOTSET)
-            bs4.dammit.chardet_dammit = chardet
-
-    def test_unicode_in_unicode_out(self):
-        # Unicode input is left alone. The original_encoding attribute
-        # is not set.
-        soup_from_unicode = self.soup(self.unicode_data)
-        self.assertEqual(soup_from_unicode.decode(), self.unicode_data)
-        self.assertEqual(soup_from_unicode.foo.string, u'Sacr\xe9 bleu!')
-        self.assertEqual(soup_from_unicode.original_encoding, None)
-
-    def test_utf8_in_unicode_out(self):
-        # UTF-8 input is converted to Unicode. The original_encoding
-        # attribute is set.
-        soup_from_utf8 = self.soup(self.utf8_data)
-        self.assertEqual(soup_from_utf8.decode(), self.unicode_data)
-        self.assertEqual(soup_from_utf8.foo.string, u'Sacr\xe9 bleu!')
-
-    def test_utf8_out(self):
-        # The internal data structures can be encoded as UTF-8.
-        soup_from_unicode = self.soup(self.unicode_data)
-        self.assertEqual(soup_from_unicode.encode('utf-8'), self.utf8_data)
-
-    @skipIf(
-        PYTHON_3_PRE_3_2,
-        "Bad HTMLParser detected; skipping test of non-ASCII characters in attribute name.")
-    def test_attribute_name_containing_unicode_characters(self):
-        markup = u'<div><a \N{SNOWMAN}="snowman"></a></div>'
-        self.assertEqual(self.soup(markup).div.encode("utf8"), markup.encode("utf8"))
-
-class TestUnicodeDammit(unittest.TestCase):
-    """Standalone tests of UnicodeDammit."""
-
-    def test_unicode_input(self):
-        markup = u"I'm already Unicode! \N{SNOWMAN}"
-        dammit = UnicodeDammit(markup)
-        self.assertEqual(dammit.unicode_markup, markup)
-
-    def test_smart_quotes_to_unicode(self):
-        markup = b"<foo>\x91\x92\x93\x94</foo>"
-        dammit = UnicodeDammit(markup)
-        self.assertEqual(
-            dammit.unicode_markup, u"<foo>\u2018\u2019\u201c\u201d</foo>")
-
-    def test_smart_quotes_to_xml_entities(self):
-        markup = b"<foo>\x91\x92\x93\x94</foo>"
-        dammit = UnicodeDammit(markup, smart_quotes_to="xml")
-        self.assertEqual(
-            dammit.unicode_markup, "<foo>&#x2018;&#x2019;&#x201C;&#x201D;</foo>")
-
-    def test_smart_quotes_to_html_entities(self):
-        markup = b"<foo>\x91\x92\x93\x94</foo>"
-        dammit = UnicodeDammit(markup, smart_quotes_to="html")
-        self.assertEqual(
-            dammit.unicode_markup, "<foo>&lsquo;&rsquo;&ldquo;&rdquo;</foo>")
-
-    def test_smart_quotes_to_ascii(self):
-        markup = b"<foo>\x91\x92\x93\x94</foo>"
-        dammit = UnicodeDammit(markup, smart_quotes_to="ascii")
-        self.assertEqual(
-            dammit.unicode_markup, """<foo>''""</foo>""")
-
-    def test_detect_utf8(self):
-        utf8 = b"Sacr\xc3\xa9 bleu! \xe2\x98\x83"
-        dammit = UnicodeDammit(utf8)
-        self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
-        self.assertEqual(dammit.unicode_markup, u'Sacr\xe9 bleu! \N{SNOWMAN}')
-
-
-    def test_convert_hebrew(self):
-        hebrew = b"\xed\xe5\xec\xf9"
-        dammit = UnicodeDammit(hebrew, ["iso-8859-8"])
-        self.assertEqual(dammit.original_encoding.lower(), 'iso-8859-8')
-        self.assertEqual(dammit.unicode_markup, u'\u05dd\u05d5\u05dc\u05e9')
-
-    def test_dont_see_smart_quotes_where_there_are_none(self):
-        utf_8 = b"\343\202\261\343\203\274\343\202\277\343\202\244 Watch"
-        dammit = UnicodeDammit(utf_8)
-        self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
-        self.assertEqual(dammit.unicode_markup.encode("utf-8"), utf_8)
-
-    def test_ignore_inappropriate_codecs(self):
-        utf8_data = u"Räksmörgås".encode("utf-8")
-        dammit = UnicodeDammit(utf8_data, ["iso-8859-8"])
-        self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
-
-    def test_ignore_invalid_codecs(self):
-        utf8_data = u"Räksmörgås".encode("utf-8")
-        for bad_encoding in ['.utf8', '...', 'utF---16.!']:
-            dammit = UnicodeDammit(utf8_data, [bad_encoding])
-            self.assertEqual(dammit.original_encoding.lower(), 'utf-8')
-
-    def test_exclude_encodings(self):
-        # This is UTF-8.
-        utf8_data = u"Räksmörgås".encode("utf-8")
-
-        # But if we exclude UTF-8 from consideration, the guess is
-        # Windows-1252.
-        dammit = UnicodeDammit(utf8_data, exclude_encodings=["utf-8"])
-        self.assertEqual(dammit.original_encoding.lower(), 'windows-1252')
-
-        # And if we exclude that, there is no valid guess at all.
-        dammit = UnicodeDammit(
-            utf8_data, exclude_encodings=["utf-8", "windows-1252"])
-        self.assertEqual(dammit.original_encoding, None)
-
-    def test_encoding_detector_replaces_junk_in_encoding_name_with_replacement_character(self):
-        detected = EncodingDetector(
-            b'<?xml version="1.0" encoding="UTF-\xdb" ?>')
-        encodings = list(detected.encodings)
-        assert u'utf-\N{REPLACEMENT CHARACTER}' in encodings
-
-    def test_detect_html5_style_meta_tag(self):
-
-        for data in (
-            b'<html><meta charset="euc-jp" /></html>',
-            b"<html><meta charset='euc-jp' /></html>",
-            b"<html><meta charset=euc-jp /></html>",
-            b"<html><meta charset=euc-jp/></html>"):
-            dammit = UnicodeDammit(data, is_html=True)
-            self.assertEqual(
-                "euc-jp", dammit.original_encoding)
-
-    def test_last_ditch_entity_replacement(self):
-        # This is a UTF-8 document that contains bytestrings
-        # completely incompatible with UTF-8 (ie. encoded with some other
-        # encoding).
-        #
-        # Since there is no consistent encoding for the document,
-        # Unicode, Dammit will eventually encode the document as UTF-8
-        # and encode the incompatible characters as REPLACEMENT
-        # CHARACTER.
-        #
-        # If chardet is installed, it will detect that the document
-        # can be converted into ISO-8859-1 without errors. This happens
-        # to be the wrong encoding, but it is a consistent encoding, so the
-        # code we're testing here won't run.
-        #
-        # So we temporarily disable chardet if it's present.
-        doc = b"""\357\273\277<?xml version="1.0" encoding="UTF-8"?>
-<html><b>\330\250\330\252\330\261</b>
-<i>\310\322\321\220\312\321\355\344</i></html>"""
-        chardet = bs4.dammit.chardet_dammit
-        logging.disable(logging.WARNING)
-        try:
-            def noop(str):
-                return None
-            bs4.dammit.chardet_dammit = noop
-            dammit = UnicodeDammit(doc)
-            self.assertEqual(True, dammit.contains_replacement_characters)
-            self.assertTrue(u"\ufffd" in dammit.unicode_markup)
-
-            soup = BeautifulSoup(doc, "html.parser")
-            self.assertTrue(soup.contains_replacement_characters)
-        finally:
-            logging.disable(logging.NOTSET)
-            bs4.dammit.chardet_dammit = chardet
-
-    def test_byte_order_mark_removed(self):
-        # A document written in UTF-16LE will have its byte order marker stripped.
-        data = b'\xff\xfe<\x00a\x00>\x00\xe1\x00\xe9\x00<\x00/\x00a\x00>\x00'
-        dammit = UnicodeDammit(data)
-        self.assertEqual(u"<a>áé</a>", dammit.unicode_markup)
-        self.assertEqual("utf-16le", dammit.original_encoding)
-
-    def test_detwingle(self):
-        # Here's a UTF8 document.
-        utf8 = (u"\N{SNOWMAN}" * 3).encode("utf8")
-
-        # Here's a Windows-1252 document.
-        windows_1252 = (
-            u"\N{LEFT DOUBLE QUOTATION MARK}Hi, I like Windows!"
-            u"\N{RIGHT DOUBLE QUOTATION MARK}").encode("windows_1252")
-
-        # Through some unholy alchemy, they've been stuck together.
-        doc = utf8 + windows_1252 + utf8
-
-        # The document can't be turned into UTF-8:
-        self.assertRaises(UnicodeDecodeError, doc.decode, "utf8")
-
-        # Unicode, Dammit thinks the whole document is Windows-1252,
-        # and decodes it into "☃☃☃“Hi, I like Windows!”☃☃☃"
-
-        # But if we run it through fix_embedded_windows_1252, it's fixed:
-
-        fixed = UnicodeDammit.detwingle(doc)
-        self.assertEqual(
-            u"☃☃☃“Hi, I like Windows!”☃☃☃", fixed.decode("utf8"))
-
-    def test_detwingle_ignores_multibyte_characters(self):
-        # Each of these characters has a UTF-8 representation ending
-        # in \x93. \x93 is a smart quote if interpreted as
-        # Windows-1252. But our code knows to skip over multibyte
-        # UTF-8 characters, so they'll survive the process unscathed.
-        for tricky_unicode_char in (
-            u"\N{LATIN SMALL LIGATURE OE}", # 2-byte char '\xc5\x93'
-            u"\N{LATIN SUBSCRIPT SMALL LETTER X}", # 3-byte char '\xe2\x82\x93'
-            u"\xf0\x90\x90\x93", # This is a CJK character, not sure which one.
-            ):
-            input = tricky_unicode_char.encode("utf8")
-            self.assertTrue(input.endswith(b'\x93'))
-            output = UnicodeDammit.detwingle(input)
-            self.assertEqual(output, input)
-
-class TestNamedspacedAttribute(SoupTest):
-
-    def test_name_may_be_none(self):
-        a = NamespacedAttribute("xmlns", None)
-        self.assertEqual(a, "xmlns")
-
-    def test_attribute_is_equivalent_to_colon_separated_string(self):
-        a = NamespacedAttribute("a", "b")
-        self.assertEqual("a:b", a)
-
-    def test_attributes_are_equivalent_if_prefix_and_name_identical(self):
-        a = NamespacedAttribute("a", "b", "c")
-        b = NamespacedAttribute("a", "b", "c")
-        self.assertEqual(a, b)
-
-        # The actual namespace is not considered.
-        c = NamespacedAttribute("a", "b", None)
-        self.assertEqual(a, c)
-
-        # But name and prefix are important.
-        d = NamespacedAttribute("a", "z", "c")
-        self.assertNotEqual(a, d)
-
-        e = NamespacedAttribute("z", "b", "c")
-        self.assertNotEqual(a, e)
-
-
-class TestAttributeValueWithCharsetSubstitution(unittest.TestCase):
-
-    def test_content_meta_attribute_value(self):
-        value = CharsetMetaAttributeValue("euc-jp")
-        self.assertEqual("euc-jp", value)
-        self.assertEqual("euc-jp", value.original_value)
-        self.assertEqual("utf8", value.encode("utf8"))
-
-
-    def test_content_meta_attribute_value(self):
-        value = ContentMetaAttributeValue("text/html; charset=euc-jp")
-        self.assertEqual("text/html; charset=euc-jp", value)
-        self.assertEqual("text/html; charset=euc-jp", value.original_value)
-        self.assertEqual("text/html; charset=utf8", value.encode("utf8"))
diff --git a/lib/bs4/tests/test_tree.py b/lib/bs4/tests/test_tree.py
deleted file mode 100644
index a4fe0b1664d6f3e85f592392a404e501e6ad7054..0000000000000000000000000000000000000000
--- a/lib/bs4/tests/test_tree.py
+++ /dev/null
@@ -1,2044 +0,0 @@
-# -*- coding: utf-8 -*-
-"""Tests for Beautiful Soup's tree traversal methods.
-
-The tree traversal methods are the main advantage of using Beautiful
-Soup over just using a parser.
-
-Different parsers will build different Beautiful Soup trees given the
-same markup, but all Beautiful Soup trees can be traversed with the
-methods tested here.
-"""
-
-from pdb import set_trace
-import copy
-import pickle
-import re
-import warnings
-from bs4 import BeautifulSoup
-from bs4.builder import (
-    builder_registry,
-    HTMLParserTreeBuilder,
-)
-from bs4.element import (
-    PY3K,
-    CData,
-    Comment,
-    Declaration,
-    Doctype,
-    NavigableString,
-    SoupStrainer,
-    Tag,
-)
-from bs4.testing import (
-    SoupTest,
-    skipIf,
-)
-
-XML_BUILDER_PRESENT = (builder_registry.lookup("xml") is not None)
-LXML_PRESENT = (builder_registry.lookup("lxml") is not None)
-
-class TreeTest(SoupTest):
-
-    def assertSelects(self, tags, should_match):
-        """Make sure that the given tags have the correct text.
-
-        This is used in tests that define a bunch of tags, each
-        containing a single string, and then select certain strings by
-        some mechanism.
-        """
-        self.assertEqual([tag.string for tag in tags], should_match)
-
-    def assertSelectsIDs(self, tags, should_match):
-        """Make sure that the given tags have the correct IDs.
-
-        This is used in tests that define a bunch of tags, each
-        containing a single string, and then select certain strings by
-        some mechanism.
-        """
-        self.assertEqual([tag['id'] for tag in tags], should_match)
-
-
-class TestFind(TreeTest):
-    """Basic tests of the find() method.
-
-    find() just calls find_all() with limit=1, so it's not tested all
-    that thouroughly here.
-    """
-
-    def test_find_tag(self):
-        soup = self.soup("<a>1</a><b>2</b><a>3</a><b>4</b>")
-        self.assertEqual(soup.find("b").string, "2")
-
-    def test_unicode_text_find(self):
-        soup = self.soup(u'<h1>Räksmörgås</h1>')
-        self.assertEqual(soup.find(string=u'Räksmörgås'), u'Räksmörgås')
-
-    def test_unicode_attribute_find(self):
-        soup = self.soup(u'<h1 id="Räksmörgås">here it is</h1>')
-        str(soup)
-        self.assertEqual("here it is", soup.find(id=u'Räksmörgås').text)
-
-
-    def test_find_everything(self):
-        """Test an optimization that finds all tags."""
-        soup = self.soup("<a>foo</a><b>bar</b>")
-        self.assertEqual(2, len(soup.find_all()))
-
-    def test_find_everything_with_name(self):
-        """Test an optimization that finds all tags with a given name."""
-        soup = self.soup("<a>foo</a><b>bar</b><a>baz</a>")
-        self.assertEqual(2, len(soup.find_all('a')))
-
-class TestFindAll(TreeTest):
-    """Basic tests of the find_all() method."""
-
-    def test_find_all_text_nodes(self):
-        """You can search the tree for text nodes."""
-        soup = self.soup("<html>Foo<b>bar</b>\xbb</html>")
-        # Exact match.
-        self.assertEqual(soup.find_all(string="bar"), [u"bar"])
-        self.assertEqual(soup.find_all(text="bar"), [u"bar"])
-        # Match any of a number of strings.
-        self.assertEqual(
-            soup.find_all(text=["Foo", "bar"]), [u"Foo", u"bar"])
-        # Match a regular expression.
-        self.assertEqual(soup.find_all(text=re.compile('.*')),
-                         [u"Foo", u"bar", u'\xbb'])
-        # Match anything.
-        self.assertEqual(soup.find_all(text=True),
-                         [u"Foo", u"bar", u'\xbb'])
-
-    def test_find_all_limit(self):
-        """You can limit the number of items returned by find_all."""
-        soup = self.soup("<a>1</a><a>2</a><a>3</a><a>4</a><a>5</a>")
-        self.assertSelects(soup.find_all('a', limit=3), ["1", "2", "3"])
-        self.assertSelects(soup.find_all('a', limit=1), ["1"])
-        self.assertSelects(
-            soup.find_all('a', limit=10), ["1", "2", "3", "4", "5"])
-
-        # A limit of 0 means no limit.
-        self.assertSelects(
-            soup.find_all('a', limit=0), ["1", "2", "3", "4", "5"])
-
-    def test_calling_a_tag_is_calling_findall(self):
-        soup = self.soup("<a>1</a><b>2<a id='foo'>3</a></b>")
-        self.assertSelects(soup('a', limit=1), ["1"])
-        self.assertSelects(soup.b(id="foo"), ["3"])
-
-    def test_find_all_with_self_referential_data_structure_does_not_cause_infinite_recursion(self):
-        soup = self.soup("<a></a>")
-        # Create a self-referential list.
-        l = []
-        l.append(l)
-
-        # Without special code in _normalize_search_value, this would cause infinite
-        # recursion.
-        self.assertEqual([], soup.find_all(l))
-
-    def test_find_all_resultset(self):
-        """All find_all calls return a ResultSet"""
-        soup = self.soup("<a></a>")
-        result = soup.find_all("a")
-        self.assertTrue(hasattr(result, "source"))
-
-        result = soup.find_all(True)
-        self.assertTrue(hasattr(result, "source"))
-
-        result = soup.find_all(text="foo")
-        self.assertTrue(hasattr(result, "source"))
-
-
-class TestFindAllBasicNamespaces(TreeTest):
-
-    def test_find_by_namespaced_name(self):
-        soup = self.soup('<mathml:msqrt>4</mathml:msqrt><a svg:fill="red">')
-        self.assertEqual("4", soup.find("mathml:msqrt").string)
-        self.assertEqual("a", soup.find(attrs= { "svg:fill" : "red" }).name)
-
-
-class TestFindAllByName(TreeTest):
-    """Test ways of finding tags by tag name."""
-
-    def setUp(self):
-        super(TreeTest, self).setUp()
-        self.tree =  self.soup("""<a>First tag.</a>
-                                  <b>Second tag.</b>
-                                  <c>Third <a>Nested tag.</a> tag.</c>""")
-
-    def test_find_all_by_tag_name(self):
-        # Find all the <a> tags.
-        self.assertSelects(
-            self.tree.find_all('a'), ['First tag.', 'Nested tag.'])
-
-    def test_find_all_by_name_and_text(self):
-        self.assertSelects(
-            self.tree.find_all('a', text='First tag.'), ['First tag.'])
-
-        self.assertSelects(
-            self.tree.find_all('a', text=True), ['First tag.', 'Nested tag.'])
-
-        self.assertSelects(
-            self.tree.find_all('a', text=re.compile("tag")),
-            ['First tag.', 'Nested tag.'])
-
-
-    def test_find_all_on_non_root_element(self):
-        # You can call find_all on any node, not just the root.
-        self.assertSelects(self.tree.c.find_all('a'), ['Nested tag.'])
-
-    def test_calling_element_invokes_find_all(self):
-        self.assertSelects(self.tree('a'), ['First tag.', 'Nested tag.'])
-
-    def test_find_all_by_tag_strainer(self):
-        self.assertSelects(
-            self.tree.find_all(SoupStrainer('a')),
-            ['First tag.', 'Nested tag.'])
-
-    def test_find_all_by_tag_names(self):
-        self.assertSelects(
-            self.tree.find_all(['a', 'b']),
-            ['First tag.', 'Second tag.', 'Nested tag.'])
-
-    def test_find_all_by_tag_dict(self):
-        self.assertSelects(
-            self.tree.find_all({'a' : True, 'b' : True}),
-            ['First tag.', 'Second tag.', 'Nested tag.'])
-
-    def test_find_all_by_tag_re(self):
-        self.assertSelects(
-            self.tree.find_all(re.compile('^[ab]$')),
-            ['First tag.', 'Second tag.', 'Nested tag.'])
-
-    def test_find_all_with_tags_matching_method(self):
-        # You can define an oracle method that determines whether
-        # a tag matches the search.
-        def id_matches_name(tag):
-            return tag.name == tag.get('id')
-
-        tree = self.soup("""<a id="a">Match 1.</a>
-                            <a id="1">Does not match.</a>
-                            <b id="b">Match 2.</a>""")
-
-        self.assertSelects(
-            tree.find_all(id_matches_name), ["Match 1.", "Match 2."])
-
-    def test_find_with_multi_valued_attribute(self):
-        soup = self.soup(
-            "<div class='a b'>1</div><div class='a c'>2</div><div class='a d'>3</div>"
-        )
-        r1 = soup.find('div', 'a d');
-        r2 = soup.find('div', re.compile(r'a d'));
-        r3, r4 = soup.find_all('div', ['a b', 'a d']);
-        self.assertEqual('3', r1.string)
-        self.assertEqual('3', r2.string)
-        self.assertEqual('1', r3.string)
-        self.assertEqual('3', r4.string)
-
-class TestFindAllByAttribute(TreeTest):
-
-    def test_find_all_by_attribute_name(self):
-        # You can pass in keyword arguments to find_all to search by
-        # attribute.
-        tree = self.soup("""
-                         <a id="first">Matching a.</a>
-                         <a id="second">
-                          Non-matching <b id="first">Matching b.</b>a.
-                         </a>""")
-        self.assertSelects(tree.find_all(id='first'),
-                           ["Matching a.", "Matching b."])
-
-    def test_find_all_by_utf8_attribute_value(self):
-        peace = u"םולש".encode("utf8")
-        data = u'<a title="םולש"></a>'.encode("utf8")
-        soup = self.soup(data)
-        self.assertEqual([soup.a], soup.find_all(title=peace))
-        self.assertEqual([soup.a], soup.find_all(title=peace.decode("utf8")))
-        self.assertEqual([soup.a], soup.find_all(title=[peace, "something else"]))
-
-    def test_find_all_by_attribute_dict(self):
-        # You can pass in a dictionary as the argument 'attrs'. This
-        # lets you search for attributes like 'name' (a fixed argument
-        # to find_all) and 'class' (a reserved word in Python.)
-        tree = self.soup("""
-                         <a name="name1" class="class1">Name match.</a>
-                         <a name="name2" class="class2">Class match.</a>
-                         <a name="name3" class="class3">Non-match.</a>
-                         <name1>A tag called 'name1'.</name1>
-                         """)
-
-        # This doesn't do what you want.
-        self.assertSelects(tree.find_all(name='name1'),
-                           ["A tag called 'name1'."])
-        # This does what you want.
-        self.assertSelects(tree.find_all(attrs={'name' : 'name1'}),
-                           ["Name match."])
-
-        self.assertSelects(tree.find_all(attrs={'class' : 'class2'}),
-                           ["Class match."])
-
-    def test_find_all_by_class(self):
-        tree = self.soup("""
-                         <a class="1">Class 1.</a>
-                         <a class="2">Class 2.</a>
-                         <b class="1">Class 1.</b>
-                         <c class="3 4">Class 3 and 4.</c>
-                         """)
-
-        # Passing in the class_ keyword argument will search against
-        # the 'class' attribute.
-        self.assertSelects(tree.find_all('a', class_='1'), ['Class 1.'])
-        self.assertSelects(tree.find_all('c', class_='3'), ['Class 3 and 4.'])
-        self.assertSelects(tree.find_all('c', class_='4'), ['Class 3 and 4.'])
-
-        # Passing in a string to 'attrs' will also search the CSS class.
-        self.assertSelects(tree.find_all('a', '1'), ['Class 1.'])
-        self.assertSelects(tree.find_all(attrs='1'), ['Class 1.', 'Class 1.'])
-        self.assertSelects(tree.find_all('c', '3'), ['Class 3 and 4.'])
-        self.assertSelects(tree.find_all('c', '4'), ['Class 3 and 4.'])
-
-    def test_find_by_class_when_multiple_classes_present(self):
-        tree = self.soup("<gar class='foo bar'>Found it</gar>")
-
-        f = tree.find_all("gar", class_=re.compile("o"))
-        self.assertSelects(f, ["Found it"])
-
-        f = tree.find_all("gar", class_=re.compile("a"))
-        self.assertSelects(f, ["Found it"])
-
-        # If the search fails to match the individual strings "foo" and "bar",
-        # it will be tried against the combined string "foo bar".
-        f = tree.find_all("gar", class_=re.compile("o b"))
-        self.assertSelects(f, ["Found it"])
-
-    def test_find_all_with_non_dictionary_for_attrs_finds_by_class(self):
-        soup = self.soup("<a class='bar'>Found it</a>")
-
-        self.assertSelects(soup.find_all("a", re.compile("ba")), ["Found it"])
-
-        def big_attribute_value(value):
-            return len(value) > 3
-
-        self.assertSelects(soup.find_all("a", big_attribute_value), [])
-
-        def small_attribute_value(value):
-            return len(value) <= 3
-
-        self.assertSelects(
-            soup.find_all("a", small_attribute_value), ["Found it"])
-
-    def test_find_all_with_string_for_attrs_finds_multiple_classes(self):
-        soup = self.soup('<a class="foo bar"></a><a class="foo"></a>')
-        a, a2 = soup.find_all("a")
-        self.assertEqual([a, a2], soup.find_all("a", "foo"))
-        self.assertEqual([a], soup.find_all("a", "bar"))
-
-        # If you specify the class as a string that contains a
-        # space, only that specific value will be found.
-        self.assertEqual([a], soup.find_all("a", class_="foo bar"))
-        self.assertEqual([a], soup.find_all("a", "foo bar"))
-        self.assertEqual([], soup.find_all("a", "bar foo"))
-
-    def test_find_all_by_attribute_soupstrainer(self):
-        tree = self.soup("""
-                         <a id="first">Match.</a>
-                         <a id="second">Non-match.</a>""")
-
-        strainer = SoupStrainer(attrs={'id' : 'first'})
-        self.assertSelects(tree.find_all(strainer), ['Match.'])
-
-    def test_find_all_with_missing_attribute(self):
-        # You can pass in None as the value of an attribute to find_all.
-        # This will match tags that do not have that attribute set.
-        tree = self.soup("""<a id="1">ID present.</a>
-                            <a>No ID present.</a>
-                            <a id="">ID is empty.</a>""")
-        self.assertSelects(tree.find_all('a', id=None), ["No ID present."])
-
-    def test_find_all_with_defined_attribute(self):
-        # You can pass in None as the value of an attribute to find_all.
-        # This will match tags that have that attribute set to any value.
-        tree = self.soup("""<a id="1">ID present.</a>
-                            <a>No ID present.</a>
-                            <a id="">ID is empty.</a>""")
-        self.assertSelects(
-            tree.find_all(id=True), ["ID present.", "ID is empty."])
-
-    def test_find_all_with_numeric_attribute(self):
-        # If you search for a number, it's treated as a string.
-        tree = self.soup("""<a id=1>Unquoted attribute.</a>
-                            <a id="1">Quoted attribute.</a>""")
-
-        expected = ["Unquoted attribute.", "Quoted attribute."]
-        self.assertSelects(tree.find_all(id=1), expected)
-        self.assertSelects(tree.find_all(id="1"), expected)
-
-    def test_find_all_with_list_attribute_values(self):
-        # You can pass a list of attribute values instead of just one,
-        # and you'll get tags that match any of the values.
-        tree = self.soup("""<a id="1">1</a>
-                            <a id="2">2</a>
-                            <a id="3">3</a>
-                            <a>No ID.</a>""")
-        self.assertSelects(tree.find_all(id=["1", "3", "4"]),
-                           ["1", "3"])
-
-    def test_find_all_with_regular_expression_attribute_value(self):
-        # You can pass a regular expression as an attribute value, and
-        # you'll get tags whose values for that attribute match the
-        # regular expression.
-        tree = self.soup("""<a id="a">One a.</a>
-                            <a id="aa">Two as.</a>
-                            <a id="ab">Mixed as and bs.</a>
-                            <a id="b">One b.</a>
-                            <a>No ID.</a>""")
-
-        self.assertSelects(tree.find_all(id=re.compile("^a+$")),
-                           ["One a.", "Two as."])
-
-    def test_find_by_name_and_containing_string(self):
-        soup = self.soup("<b>foo</b><b>bar</b><a>foo</a>")
-        a = soup.a
-
-        self.assertEqual([a], soup.find_all("a", text="foo"))
-        self.assertEqual([], soup.find_all("a", text="bar"))
-        self.assertEqual([], soup.find_all("a", text="bar"))
-
-    def test_find_by_name_and_containing_string_when_string_is_buried(self):
-        soup = self.soup("<a>foo</a><a><b><c>foo</c></b></a>")
-        self.assertEqual(soup.find_all("a"), soup.find_all("a", text="foo"))
-
-    def test_find_by_attribute_and_containing_string(self):
-        soup = self.soup('<b id="1">foo</b><a id="2">foo</a>')
-        a = soup.a
-
-        self.assertEqual([a], soup.find_all(id=2, text="foo"))
-        self.assertEqual([], soup.find_all(id=1, text="bar"))
-
-
-
-
-class TestIndex(TreeTest):
-    """Test Tag.index"""
-    def test_index(self):
-        tree = self.soup("""<div>
-                            <a>Identical</a>
-                            <b>Not identical</b>
-                            <a>Identical</a>
-
-                            <c><d>Identical with child</d></c>
-                            <b>Also not identical</b>
-                            <c><d>Identical with child</d></c>
-                            </div>""")
-        div = tree.div
-        for i, element in enumerate(div.contents):
-            self.assertEqual(i, div.index(element))
-        self.assertRaises(ValueError, tree.index, 1)
-
-
-class TestParentOperations(TreeTest):
-    """Test navigation and searching through an element's parents."""
-
-    def setUp(self):
-        super(TestParentOperations, self).setUp()
-        self.tree = self.soup('''<ul id="empty"></ul>
-                                 <ul id="top">
-                                  <ul id="middle">
-                                   <ul id="bottom">
-                                    <b>Start here</b>
-                                   </ul>
-                                  </ul>''')
-        self.start = self.tree.b
-
-
-    def test_parent(self):
-        self.assertEqual(self.start.parent['id'], 'bottom')
-        self.assertEqual(self.start.parent.parent['id'], 'middle')
-        self.assertEqual(self.start.parent.parent.parent['id'], 'top')
-
-    def test_parent_of_top_tag_is_soup_object(self):
-        top_tag = self.tree.contents[0]
-        self.assertEqual(top_tag.parent, self.tree)
-
-    def test_soup_object_has_no_parent(self):
-        self.assertEqual(None, self.tree.parent)
-
-    def test_find_parents(self):
-        self.assertSelectsIDs(
-            self.start.find_parents('ul'), ['bottom', 'middle', 'top'])
-        self.assertSelectsIDs(
-            self.start.find_parents('ul', id="middle"), ['middle'])
-
-    def test_find_parent(self):
-        self.assertEqual(self.start.find_parent('ul')['id'], 'bottom')
-        self.assertEqual(self.start.find_parent('ul', id='top')['id'], 'top')
-
-    def test_parent_of_text_element(self):
-        text = self.tree.find(text="Start here")
-        self.assertEqual(text.parent.name, 'b')
-
-    def test_text_element_find_parent(self):
-        text = self.tree.find(text="Start here")
-        self.assertEqual(text.find_parent('ul')['id'], 'bottom')
-
-    def test_parent_generator(self):
-        parents = [parent['id'] for parent in self.start.parents
-                   if parent is not None and 'id' in parent.attrs]
-        self.assertEqual(parents, ['bottom', 'middle', 'top'])
-
-
-class ProximityTest(TreeTest):
-
-    def setUp(self):
-        super(TreeTest, self).setUp()
-        self.tree = self.soup(
-            '<html id="start"><head></head><body><b id="1">One</b><b id="2">Two</b><b id="3">Three</b></body></html>')
-
-
-class TestNextOperations(ProximityTest):
-
-    def setUp(self):
-        super(TestNextOperations, self).setUp()
-        self.start = self.tree.b
-
-    def test_next(self):
-        self.assertEqual(self.start.next_element, "One")
-        self.assertEqual(self.start.next_element.next_element['id'], "2")
-
-    def test_next_of_last_item_is_none(self):
-        last = self.tree.find(text="Three")
-        self.assertEqual(last.next_element, None)
-
-    def test_next_of_root_is_none(self):
-        # The document root is outside the next/previous chain.
-        self.assertEqual(self.tree.next_element, None)
-
-    def test_find_all_next(self):
-        self.assertSelects(self.start.find_all_next('b'), ["Two", "Three"])
-        self.start.find_all_next(id=3)
-        self.assertSelects(self.start.find_all_next(id=3), ["Three"])
-
-    def test_find_next(self):
-        self.assertEqual(self.start.find_next('b')['id'], '2')
-        self.assertEqual(self.start.find_next(text="Three"), "Three")
-
-    def test_find_next_for_text_element(self):
-        text = self.tree.find(text="One")
-        self.assertEqual(text.find_next("b").string, "Two")
-        self.assertSelects(text.find_all_next("b"), ["Two", "Three"])
-
-    def test_next_generator(self):
-        start = self.tree.find(text="Two")
-        successors = [node for node in start.next_elements]
-        # There are two successors: the final <b> tag and its text contents.
-        tag, contents = successors
-        self.assertEqual(tag['id'], '3')
-        self.assertEqual(contents, "Three")
-
-class TestPreviousOperations(ProximityTest):
-
-    def setUp(self):
-        super(TestPreviousOperations, self).setUp()
-        self.end = self.tree.find(text="Three")
-
-    def test_previous(self):
-        self.assertEqual(self.end.previous_element['id'], "3")
-        self.assertEqual(self.end.previous_element.previous_element, "Two")
-
-    def test_previous_of_first_item_is_none(self):
-        first = self.tree.find('html')
-        self.assertEqual(first.previous_element, None)
-
-    def test_previous_of_root_is_none(self):
-        # The document root is outside the next/previous chain.
-        # XXX This is broken!
-        #self.assertEqual(self.tree.previous_element, None)
-        pass
-
-    def test_find_all_previous(self):
-        # The <b> tag containing the "Three" node is the predecessor
-        # of the "Three" node itself, which is why "Three" shows up
-        # here.
-        self.assertSelects(
-            self.end.find_all_previous('b'), ["Three", "Two", "One"])
-        self.assertSelects(self.end.find_all_previous(id=1), ["One"])
-
-    def test_find_previous(self):
-        self.assertEqual(self.end.find_previous('b')['id'], '3')
-        self.assertEqual(self.end.find_previous(text="One"), "One")
-
-    def test_find_previous_for_text_element(self):
-        text = self.tree.find(text="Three")
-        self.assertEqual(text.find_previous("b").string, "Three")
-        self.assertSelects(
-            text.find_all_previous("b"), ["Three", "Two", "One"])
-
-    def test_previous_generator(self):
-        start = self.tree.find(text="One")
-        predecessors = [node for node in start.previous_elements]
-
-        # There are four predecessors: the <b> tag containing "One"
-        # the <body> tag, the <head> tag, and the <html> tag.
-        b, body, head, html = predecessors
-        self.assertEqual(b['id'], '1')
-        self.assertEqual(body.name, "body")
-        self.assertEqual(head.name, "head")
-        self.assertEqual(html.name, "html")
-
-
-class SiblingTest(TreeTest):
-
-    def setUp(self):
-        super(SiblingTest, self).setUp()
-        markup = '''<html>
-                    <span id="1">
-                     <span id="1.1"></span>
-                    </span>
-                    <span id="2">
-                     <span id="2.1"></span>
-                    </span>
-                    <span id="3">
-                     <span id="3.1"></span>
-                    </span>
-                    <span id="4"></span>
-                    </html>'''
-        # All that whitespace looks good but makes the tests more
-        # difficult. Get rid of it.
-        markup = re.compile("\n\s*").sub("", markup)
-        self.tree = self.soup(markup)
-
-
-class TestNextSibling(SiblingTest):
-
-    def setUp(self):
-        super(TestNextSibling, self).setUp()
-        self.start = self.tree.find(id="1")
-
-    def test_next_sibling_of_root_is_none(self):
-        self.assertEqual(self.tree.next_sibling, None)
-
-    def test_next_sibling(self):
-        self.assertEqual(self.start.next_sibling['id'], '2')
-        self.assertEqual(self.start.next_sibling.next_sibling['id'], '3')
-
-        # Note the difference between next_sibling and next_element.
-        self.assertEqual(self.start.next_element['id'], '1.1')
-
-    def test_next_sibling_may_not_exist(self):
-        self.assertEqual(self.tree.html.next_sibling, None)
-
-        nested_span = self.tree.find(id="1.1")
-        self.assertEqual(nested_span.next_sibling, None)
-
-        last_span = self.tree.find(id="4")
-        self.assertEqual(last_span.next_sibling, None)
-
-    def test_find_next_sibling(self):
-        self.assertEqual(self.start.find_next_sibling('span')['id'], '2')
-
-    def test_next_siblings(self):
-        self.assertSelectsIDs(self.start.find_next_siblings("span"),
-                              ['2', '3', '4'])
-
-        self.assertSelectsIDs(self.start.find_next_siblings(id='3'), ['3'])
-
-    def test_next_sibling_for_text_element(self):
-        soup = self.soup("Foo<b>bar</b>baz")
-        start = soup.find(text="Foo")
-        self.assertEqual(start.next_sibling.name, 'b')
-        self.assertEqual(start.next_sibling.next_sibling, 'baz')
-
-        self.assertSelects(start.find_next_siblings('b'), ['bar'])
-        self.assertEqual(start.find_next_sibling(text="baz"), "baz")
-        self.assertEqual(start.find_next_sibling(text="nonesuch"), None)
-
-
-class TestPreviousSibling(SiblingTest):
-
-    def setUp(self):
-        super(TestPreviousSibling, self).setUp()
-        self.end = self.tree.find(id="4")
-
-    def test_previous_sibling_of_root_is_none(self):
-        self.assertEqual(self.tree.previous_sibling, None)
-
-    def test_previous_sibling(self):
-        self.assertEqual(self.end.previous_sibling['id'], '3')
-        self.assertEqual(self.end.previous_sibling.previous_sibling['id'], '2')
-
-        # Note the difference between previous_sibling and previous_element.
-        self.assertEqual(self.end.previous_element['id'], '3.1')
-
-    def test_previous_sibling_may_not_exist(self):
-        self.assertEqual(self.tree.html.previous_sibling, None)
-
-        nested_span = self.tree.find(id="1.1")
-        self.assertEqual(nested_span.previous_sibling, None)
-
-        first_span = self.tree.find(id="1")
-        self.assertEqual(first_span.previous_sibling, None)
-
-    def test_find_previous_sibling(self):
-        self.assertEqual(self.end.find_previous_sibling('span')['id'], '3')
-
-    def test_previous_siblings(self):
-        self.assertSelectsIDs(self.end.find_previous_siblings("span"),
-                              ['3', '2', '1'])
-
-        self.assertSelectsIDs(self.end.find_previous_siblings(id='1'), ['1'])
-
-    def test_previous_sibling_for_text_element(self):
-        soup = self.soup("Foo<b>bar</b>baz")
-        start = soup.find(text="baz")
-        self.assertEqual(start.previous_sibling.name, 'b')
-        self.assertEqual(start.previous_sibling.previous_sibling, 'Foo')
-
-        self.assertSelects(start.find_previous_siblings('b'), ['bar'])
-        self.assertEqual(start.find_previous_sibling(text="Foo"), "Foo")
-        self.assertEqual(start.find_previous_sibling(text="nonesuch"), None)
-
-
-class TestTagCreation(SoupTest):
-    """Test the ability to create new tags."""
-    def test_new_tag(self):
-        soup = self.soup("")
-        new_tag = soup.new_tag("foo", bar="baz")
-        self.assertTrue(isinstance(new_tag, Tag))
-        self.assertEqual("foo", new_tag.name)
-        self.assertEqual(dict(bar="baz"), new_tag.attrs)
-        self.assertEqual(None, new_tag.parent)
-
-    def test_tag_inherits_self_closing_rules_from_builder(self):
-        if XML_BUILDER_PRESENT:
-            xml_soup = BeautifulSoup("", "lxml-xml")
-            xml_br = xml_soup.new_tag("br")
-            xml_p = xml_soup.new_tag("p")
-
-            # Both the <br> and <p> tag are empty-element, just because
-            # they have no contents.
-            self.assertEqual(b"<br/>", xml_br.encode())
-            self.assertEqual(b"<p/>", xml_p.encode())
-
-        html_soup = BeautifulSoup("", "html.parser")
-        html_br = html_soup.new_tag("br")
-        html_p = html_soup.new_tag("p")
-
-        # The HTML builder users HTML's rules about which tags are
-        # empty-element tags, and the new tags reflect these rules.
-        self.assertEqual(b"<br/>", html_br.encode())
-        self.assertEqual(b"<p></p>", html_p.encode())
-
-    def test_new_string_creates_navigablestring(self):
-        soup = self.soup("")
-        s = soup.new_string("foo")
-        self.assertEqual("foo", s)
-        self.assertTrue(isinstance(s, NavigableString))
-
-    def test_new_string_can_create_navigablestring_subclass(self):
-        soup = self.soup("")
-        s = soup.new_string("foo", Comment)
-        self.assertEqual("foo", s)
-        self.assertTrue(isinstance(s, Comment))
-
-class TestTreeModification(SoupTest):
-
-    def test_attribute_modification(self):
-        soup = self.soup('<a id="1"></a>')
-        soup.a['id'] = 2
-        self.assertEqual(soup.decode(), self.document_for('<a id="2"></a>'))
-        del(soup.a['id'])
-        self.assertEqual(soup.decode(), self.document_for('<a></a>'))
-        soup.a['id2'] = 'foo'
-        self.assertEqual(soup.decode(), self.document_for('<a id2="foo"></a>'))
-
-    def test_new_tag_creation(self):
-        builder = builder_registry.lookup('html')()
-        soup = self.soup("<body></body>", builder=builder)
-        a = Tag(soup, builder, 'a')
-        ol = Tag(soup, builder, 'ol')
-        a['href'] = 'http://foo.com/'
-        soup.body.insert(0, a)
-        soup.body.insert(1, ol)
-        self.assertEqual(
-            soup.body.encode(),
-            b'<body><a href="http://foo.com/"></a><ol></ol></body>')
-
-    def test_append_to_contents_moves_tag(self):
-        doc = """<p id="1">Don't leave me <b>here</b>.</p>
-                <p id="2">Don\'t leave!</p>"""
-        soup = self.soup(doc)
-        second_para = soup.find(id='2')
-        bold = soup.b
-
-        # Move the <b> tag to the end of the second paragraph.
-        soup.find(id='2').append(soup.b)
-
-        # The <b> tag is now a child of the second paragraph.
-        self.assertEqual(bold.parent, second_para)
-
-        self.assertEqual(
-            soup.decode(), self.document_for(
-                '<p id="1">Don\'t leave me .</p>\n'
-                '<p id="2">Don\'t leave!<b>here</b></p>'))
-
-    def test_replace_with_returns_thing_that_was_replaced(self):
-        text = "<a></a><b><c></c></b>"
-        soup = self.soup(text)
-        a = soup.a
-        new_a = a.replace_with(soup.c)
-        self.assertEqual(a, new_a)
-
-    def test_unwrap_returns_thing_that_was_replaced(self):
-        text = "<a><b></b><c></c></a>"
-        soup = self.soup(text)
-        a = soup.a
-        new_a = a.unwrap()
-        self.assertEqual(a, new_a)
-
-    def test_replace_with_and_unwrap_give_useful_exception_when_tag_has_no_parent(self):
-        soup = self.soup("<a><b>Foo</b></a><c>Bar</c>")
-        a = soup.a
-        a.extract()
-        self.assertEqual(None, a.parent)
-        self.assertRaises(ValueError, a.unwrap)
-        self.assertRaises(ValueError, a.replace_with, soup.c)
-
-    def test_replace_tag_with_itself(self):
-        text = "<a><b></b><c>Foo<d></d></c></a><a><e></e></a>"
-        soup = self.soup(text)
-        c = soup.c
-        soup.c.replace_with(c)
-        self.assertEqual(soup.decode(), self.document_for(text))
-
-    def test_replace_tag_with_its_parent_raises_exception(self):
-        text = "<a><b></b></a>"
-        soup = self.soup(text)
-        self.assertRaises(ValueError, soup.b.replace_with, soup.a)
-
-    def test_insert_tag_into_itself_raises_exception(self):
-        text = "<a><b></b></a>"
-        soup = self.soup(text)
-        self.assertRaises(ValueError, soup.a.insert, 0, soup.a)
-
-    def test_replace_with_maintains_next_element_throughout(self):
-        soup = self.soup('<p><a>one</a><b>three</b></p>')
-        a = soup.a
-        b = a.contents[0]
-        # Make it so the <a> tag has two text children.
-        a.insert(1, "two")
-
-        # Now replace each one with the empty string.
-        left, right = a.contents
-        left.replaceWith('')
-        right.replaceWith('')
-
-        # The <b> tag is still connected to the tree.
-        self.assertEqual("three", soup.b.string)
-
-    def test_replace_final_node(self):
-        soup = self.soup("<b>Argh!</b>")
-        soup.find(text="Argh!").replace_with("Hooray!")
-        new_text = soup.find(text="Hooray!")
-        b = soup.b
-        self.assertEqual(new_text.previous_element, b)
-        self.assertEqual(new_text.parent, b)
-        self.assertEqual(new_text.previous_element.next_element, new_text)
-        self.assertEqual(new_text.next_element, None)
-
-    def test_consecutive_text_nodes(self):
-        # A builder should never create two consecutive text nodes,
-        # but if you insert one next to another, Beautiful Soup will
-        # handle it correctly.
-        soup = self.soup("<a><b>Argh!</b><c></c></a>")
-        soup.b.insert(1, "Hooray!")
-
-        self.assertEqual(
-            soup.decode(), self.document_for(
-                "<a><b>Argh!Hooray!</b><c></c></a>"))
-
-        new_text = soup.find(text="Hooray!")
-        self.assertEqual(new_text.previous_element, "Argh!")
-        self.assertEqual(new_text.previous_element.next_element, new_text)
-
-        self.assertEqual(new_text.previous_sibling, "Argh!")
-        self.assertEqual(new_text.previous_sibling.next_sibling, new_text)
-
-        self.assertEqual(new_text.next_sibling, None)
-        self.assertEqual(new_text.next_element, soup.c)
-
-    def test_insert_string(self):
-        soup = self.soup("<a></a>")
-        soup.a.insert(0, "bar")
-        soup.a.insert(0, "foo")
-        # The string were added to the tag.
-        self.assertEqual(["foo", "bar"], soup.a.contents)
-        # And they were converted to NavigableStrings.
-        self.assertEqual(soup.a.contents[0].next_element, "bar")
-
-    def test_insert_tag(self):
-        builder = self.default_builder
-        soup = self.soup(
-            "<a><b>Find</b><c>lady!</c><d></d></a>", builder=builder)
-        magic_tag = Tag(soup, builder, 'magictag')
-        magic_tag.insert(0, "the")
-        soup.a.insert(1, magic_tag)
-
-        self.assertEqual(
-            soup.decode(), self.document_for(
-                "<a><b>Find</b><magictag>the</magictag><c>lady!</c><d></d></a>"))
-
-        # Make sure all the relationships are hooked up correctly.
-        b_tag = soup.b
-        self.assertEqual(b_tag.next_sibling, magic_tag)
-        self.assertEqual(magic_tag.previous_sibling, b_tag)
-
-        find = b_tag.find(text="Find")
-        self.assertEqual(find.next_element, magic_tag)
-        self.assertEqual(magic_tag.previous_element, find)
-
-        c_tag = soup.c
-        self.assertEqual(magic_tag.next_sibling, c_tag)
-        self.assertEqual(c_tag.previous_sibling, magic_tag)
-
-        the = magic_tag.find(text="the")
-        self.assertEqual(the.parent, magic_tag)
-        self.assertEqual(the.next_element, c_tag)
-        self.assertEqual(c_tag.previous_element, the)
-
-    def test_append_child_thats_already_at_the_end(self):
-        data = "<a><b></b></a>"
-        soup = self.soup(data)
-        soup.a.append(soup.b)
-        self.assertEqual(data, soup.decode())
-
-    def test_move_tag_to_beginning_of_parent(self):
-        data = "<a><b></b><c></c><d></d></a>"
-        soup = self.soup(data)
-        soup.a.insert(0, soup.d)
-        self.assertEqual("<a><d></d><b></b><c></c></a>", soup.decode())
-
-    def test_insert_works_on_empty_element_tag(self):
-        # This is a little strange, since most HTML parsers don't allow
-        # markup like this to come through. But in general, we don't
-        # know what the parser would or wouldn't have allowed, so
-        # I'm letting this succeed for now.
-        soup = self.soup("<br/>")
-        soup.br.insert(1, "Contents")
-        self.assertEqual(str(soup.br), "<br>Contents</br>")
-
-    def test_insert_before(self):
-        soup = self.soup("<a>foo</a><b>bar</b>")
-        soup.b.insert_before("BAZ")
-        soup.a.insert_before("QUUX")
-        self.assertEqual(
-            soup.decode(), self.document_for("QUUX<a>foo</a>BAZ<b>bar</b>"))
-
-        soup.a.insert_before(soup.b)
-        self.assertEqual(
-            soup.decode(), self.document_for("QUUX<b>bar</b><a>foo</a>BAZ"))
-
-    def test_insert_after(self):
-        soup = self.soup("<a>foo</a><b>bar</b>")
-        soup.b.insert_after("BAZ")
-        soup.a.insert_after("QUUX")
-        self.assertEqual(
-            soup.decode(), self.document_for("<a>foo</a>QUUX<b>bar</b>BAZ"))
-        soup.b.insert_after(soup.a)
-        self.assertEqual(
-            soup.decode(), self.document_for("QUUX<b>bar</b><a>foo</a>BAZ"))
-
-    def test_insert_after_raises_exception_if_after_has_no_meaning(self):
-        soup = self.soup("")
-        tag = soup.new_tag("a")
-        string = soup.new_string("")
-        self.assertRaises(ValueError, string.insert_after, tag)
-        self.assertRaises(NotImplementedError, soup.insert_after, tag)
-        self.assertRaises(ValueError, tag.insert_after, tag)
-
-    def test_insert_before_raises_notimplementederror_if_before_has_no_meaning(self):
-        soup = self.soup("")
-        tag = soup.new_tag("a")
-        string = soup.new_string("")
-        self.assertRaises(ValueError, string.insert_before, tag)
-        self.assertRaises(NotImplementedError, soup.insert_before, tag)
-        self.assertRaises(ValueError, tag.insert_before, tag)
-
-    def test_replace_with(self):
-        soup = self.soup(
-                "<p>There's <b>no</b> business like <b>show</b> business</p>")
-        no, show = soup.find_all('b')
-        show.replace_with(no)
-        self.assertEqual(
-            soup.decode(),
-            self.document_for(
-                "<p>There's  business like <b>no</b> business</p>"))
-
-        self.assertEqual(show.parent, None)
-        self.assertEqual(no.parent, soup.p)
-        self.assertEqual(no.next_element, "no")
-        self.assertEqual(no.next_sibling, " business")
-
-    def test_replace_first_child(self):
-        data = "<a><b></b><c></c></a>"
-        soup = self.soup(data)
-        soup.b.replace_with(soup.c)
-        self.assertEqual("<a><c></c></a>", soup.decode())
-
-    def test_replace_last_child(self):
-        data = "<a><b></b><c></c></a>"
-        soup = self.soup(data)
-        soup.c.replace_with(soup.b)
-        self.assertEqual("<a><b></b></a>", soup.decode())
-
-    def test_nested_tag_replace_with(self):
-        soup = self.soup(
-            """<a>We<b>reserve<c>the</c><d>right</d></b></a><e>to<f>refuse</f><g>service</g></e>""")
-
-        # Replace the entire <b> tag and its contents ("reserve the
-        # right") with the <f> tag ("refuse").
-        remove_tag = soup.b
-        move_tag = soup.f
-        remove_tag.replace_with(move_tag)
-
-        self.assertEqual(
-            soup.decode(), self.document_for(
-                "<a>We<f>refuse</f></a><e>to<g>service</g></e>"))
-
-        # The <b> tag is now an orphan.
-        self.assertEqual(remove_tag.parent, None)
-        self.assertEqual(remove_tag.find(text="right").next_element, None)
-        self.assertEqual(remove_tag.previous_element, None)
-        self.assertEqual(remove_tag.next_sibling, None)
-        self.assertEqual(remove_tag.previous_sibling, None)
-
-        # The <f> tag is now connected to the <a> tag.
-        self.assertEqual(move_tag.parent, soup.a)
-        self.assertEqual(move_tag.previous_element, "We")
-        self.assertEqual(move_tag.next_element.next_element, soup.e)
-        self.assertEqual(move_tag.next_sibling, None)
-
-        # The gap where the <f> tag used to be has been mended, and
-        # the word "to" is now connected to the <g> tag.
-        to_text = soup.find(text="to")
-        g_tag = soup.g
-        self.assertEqual(to_text.next_element, g_tag)
-        self.assertEqual(to_text.next_sibling, g_tag)
-        self.assertEqual(g_tag.previous_element, to_text)
-        self.assertEqual(g_tag.previous_sibling, to_text)
-
-    def test_unwrap(self):
-        tree = self.soup("""
-            <p>Unneeded <em>formatting</em> is unneeded</p>
-            """)
-        tree.em.unwrap()
-        self.assertEqual(tree.em, None)
-        self.assertEqual(tree.p.text, "Unneeded formatting is unneeded")
-
-    def test_wrap(self):
-        soup = self.soup("I wish I was bold.")
-        value = soup.string.wrap(soup.new_tag("b"))
-        self.assertEqual(value.decode(), "<b>I wish I was bold.</b>")
-        self.assertEqual(
-            soup.decode(), self.document_for("<b>I wish I was bold.</b>"))
-
-    def test_wrap_extracts_tag_from_elsewhere(self):
-        soup = self.soup("<b></b>I wish I was bold.")
-        soup.b.next_sibling.wrap(soup.b)
-        self.assertEqual(
-            soup.decode(), self.document_for("<b>I wish I was bold.</b>"))
-
-    def test_wrap_puts_new_contents_at_the_end(self):
-        soup = self.soup("<b>I like being bold.</b>I wish I was bold.")
-        soup.b.next_sibling.wrap(soup.b)
-        self.assertEqual(2, len(soup.b.contents))
-        self.assertEqual(
-            soup.decode(), self.document_for(
-                "<b>I like being bold.I wish I was bold.</b>"))
-
-    def test_extract(self):
-        soup = self.soup(
-            '<html><body>Some content. <div id="nav">Nav crap</div> More content.</body></html>')
-
-        self.assertEqual(len(soup.body.contents), 3)
-        extracted = soup.find(id="nav").extract()
-
-        self.assertEqual(
-            soup.decode(), "<html><body>Some content.  More content.</body></html>")
-        self.assertEqual(extracted.decode(), '<div id="nav">Nav crap</div>')
-
-        # The extracted tag is now an orphan.
-        self.assertEqual(len(soup.body.contents), 2)
-        self.assertEqual(extracted.parent, None)
-        self.assertEqual(extracted.previous_element, None)
-        self.assertEqual(extracted.next_element.next_element, None)
-
-        # The gap where the extracted tag used to be has been mended.
-        content_1 = soup.find(text="Some content. ")
-        content_2 = soup.find(text=" More content.")
-        self.assertEqual(content_1.next_element, content_2)
-        self.assertEqual(content_1.next_sibling, content_2)
-        self.assertEqual(content_2.previous_element, content_1)
-        self.assertEqual(content_2.previous_sibling, content_1)
-
-    def test_extract_distinguishes_between_identical_strings(self):
-        soup = self.soup("<a>foo</a><b>bar</b>")
-        foo_1 = soup.a.string
-        bar_1 = soup.b.string
-        foo_2 = soup.new_string("foo")
-        bar_2 = soup.new_string("bar")
-        soup.a.append(foo_2)
-        soup.b.append(bar_2)
-
-        # Now there are two identical strings in the <a> tag, and two
-        # in the <b> tag. Let's remove the first "foo" and the second
-        # "bar".
-        foo_1.extract()
-        bar_2.extract()
-        self.assertEqual(foo_2, soup.a.string)
-        self.assertEqual(bar_2, soup.b.string)
-
-    def test_extract_multiples_of_same_tag(self):
-        soup = self.soup("""
-<html>
-<head>
-<script>foo</script>
-</head>
-<body>
- <script>bar</script>
- <a></a>
-</body>
-<script>baz</script>
-</html>""")
-        [soup.script.extract() for i in soup.find_all("script")]
-        self.assertEqual("<body>\n\n<a></a>\n</body>", unicode(soup.body))
-
-
-    def test_extract_works_when_element_is_surrounded_by_identical_strings(self):
-        soup = self.soup(
- '<html>\n'
- '<body>hi</body>\n'
- '</html>')
-        soup.find('body').extract()
-        self.assertEqual(None, soup.find('body'))
-
-
-    def test_clear(self):
-        """Tag.clear()"""
-        soup = self.soup("<p><a>String <em>Italicized</em></a> and another</p>")
-        # clear using extract()
-        a = soup.a
-        soup.p.clear()
-        self.assertEqual(len(soup.p.contents), 0)
-        self.assertTrue(hasattr(a, "contents"))
-
-        # clear using decompose()
-        em = a.em
-        a.clear(decompose=True)
-        self.assertEqual(0, len(em.contents))
-
-    def test_string_set(self):
-        """Tag.string = 'string'"""
-        soup = self.soup("<a></a> <b><c></c></b>")
-        soup.a.string = "foo"
-        self.assertEqual(soup.a.contents, ["foo"])
-        soup.b.string = "bar"
-        self.assertEqual(soup.b.contents, ["bar"])
-
-    def test_string_set_does_not_affect_original_string(self):
-        soup = self.soup("<a><b>foo</b><c>bar</c>")
-        soup.b.string = soup.c.string
-        self.assertEqual(soup.a.encode(), b"<a><b>bar</b><c>bar</c></a>")
-
-    def test_set_string_preserves_class_of_string(self):
-        soup = self.soup("<a></a>")
-        cdata = CData("foo")
-        soup.a.string = cdata
-        self.assertTrue(isinstance(soup.a.string, CData))
-
-class TestElementObjects(SoupTest):
-    """Test various features of element objects."""
-
-    def test_len(self):
-        """The length of an element is its number of children."""
-        soup = self.soup("<top>1<b>2</b>3</top>")
-
-        # The BeautifulSoup object itself contains one element: the
-        # <top> tag.
-        self.assertEqual(len(soup.contents), 1)
-        self.assertEqual(len(soup), 1)
-
-        # The <top> tag contains three elements: the text node "1", the
-        # <b> tag, and the text node "3".
-        self.assertEqual(len(soup.top), 3)
-        self.assertEqual(len(soup.top.contents), 3)
-
-    def test_member_access_invokes_find(self):
-        """Accessing a Python member .foo invokes find('foo')"""
-        soup = self.soup('<b><i></i></b>')
-        self.assertEqual(soup.b, soup.find('b'))
-        self.assertEqual(soup.b.i, soup.find('b').find('i'))
-        self.assertEqual(soup.a, None)
-
-    def test_deprecated_member_access(self):
-        soup = self.soup('<b><i></i></b>')
-        with warnings.catch_warnings(record=True) as w:
-            tag = soup.bTag
-        self.assertEqual(soup.b, tag)
-        self.assertEqual(
-            '.bTag is deprecated, use .find("b") instead.',
-            str(w[0].message))
-
-    def test_has_attr(self):
-        """has_attr() checks for the presence of an attribute.
-
-        Please note note: has_attr() is different from
-        __in__. has_attr() checks the tag's attributes and __in__
-        checks the tag's chidlren.
-        """
-        soup = self.soup("<foo attr='bar'>")
-        self.assertTrue(soup.foo.has_attr('attr'))
-        self.assertFalse(soup.foo.has_attr('attr2'))
-
-
-    def test_attributes_come_out_in_alphabetical_order(self):
-        markup = '<b a="1" z="5" m="3" f="2" y="4"></b>'
-        self.assertSoupEquals(markup, '<b a="1" f="2" m="3" y="4" z="5"></b>')
-
-    def test_string(self):
-        # A tag that contains only a text node makes that node
-        # available as .string.
-        soup = self.soup("<b>foo</b>")
-        self.assertEqual(soup.b.string, 'foo')
-
-    def test_empty_tag_has_no_string(self):
-        # A tag with no children has no .stirng.
-        soup = self.soup("<b></b>")
-        self.assertEqual(soup.b.string, None)
-
-    def test_tag_with_multiple_children_has_no_string(self):
-        # A tag with no children has no .string.
-        soup = self.soup("<a>foo<b></b><b></b></b>")
-        self.assertEqual(soup.b.string, None)
-
-        soup = self.soup("<a>foo<b></b>bar</b>")
-        self.assertEqual(soup.b.string, None)
-
-        # Even if all the children are strings, due to trickery,
-        # it won't work--but this would be a good optimization.
-        soup = self.soup("<a>foo</b>")
-        soup.a.insert(1, "bar")
-        self.assertEqual(soup.a.string, None)
-
-    def test_tag_with_recursive_string_has_string(self):
-        # A tag with a single child which has a .string inherits that
-        # .string.
-        soup = self.soup("<a><b>foo</b></a>")
-        self.assertEqual(soup.a.string, "foo")
-        self.assertEqual(soup.string, "foo")
-
-    def test_lack_of_string(self):
-        """Only a tag containing a single text node has a .string."""
-        soup = self.soup("<b>f<i>e</i>o</b>")
-        self.assertFalse(soup.b.string)
-
-        soup = self.soup("<b></b>")
-        self.assertFalse(soup.b.string)
-
-    def test_all_text(self):
-        """Tag.text and Tag.get_text(sep=u"") -> all child text, concatenated"""
-        soup = self.soup("<a>a<b>r</b>   <r> t </r></a>")
-        self.assertEqual(soup.a.text, "ar  t ")
-        self.assertEqual(soup.a.get_text(strip=True), "art")
-        self.assertEqual(soup.a.get_text(","), "a,r, , t ")
-        self.assertEqual(soup.a.get_text(",", strip=True), "a,r,t")
-
-    def test_get_text_ignores_comments(self):
-        soup = self.soup("foo<!--IGNORE-->bar")
-        self.assertEqual(soup.get_text(), "foobar")
-
-        self.assertEqual(
-            soup.get_text(types=(NavigableString, Comment)), "fooIGNOREbar")
-        self.assertEqual(
-            soup.get_text(types=None), "fooIGNOREbar")
-
-    def test_all_strings_ignores_comments(self):
-        soup = self.soup("foo<!--IGNORE-->bar")
-        self.assertEqual(['foo', 'bar'], list(soup.strings))
-
-class TestCDAtaListAttributes(SoupTest):
-
-    """Testing cdata-list attributes like 'class'.
-    """
-    def test_single_value_becomes_list(self):
-        soup = self.soup("<a class='foo'>")
-        self.assertEqual(["foo"],soup.a['class'])
-
-    def test_multiple_values_becomes_list(self):
-        soup = self.soup("<a class='foo bar'>")
-        self.assertEqual(["foo", "bar"], soup.a['class'])
-
-    def test_multiple_values_separated_by_weird_whitespace(self):
-        soup = self.soup("<a class='foo\tbar\nbaz'>")
-        self.assertEqual(["foo", "bar", "baz"],soup.a['class'])
-
-    def test_attributes_joined_into_string_on_output(self):
-        soup = self.soup("<a class='foo\tbar'>")
-        self.assertEqual(b'<a class="foo bar"></a>', soup.a.encode())
-
-    def test_accept_charset(self):
-        soup = self.soup('<form accept-charset="ISO-8859-1 UTF-8">')
-        self.assertEqual(['ISO-8859-1', 'UTF-8'], soup.form['accept-charset'])
-
-    def test_cdata_attribute_applying_only_to_one_tag(self):
-        data = '<a accept-charset="ISO-8859-1 UTF-8"></a>'
-        soup = self.soup(data)
-        # We saw in another test that accept-charset is a cdata-list
-        # attribute for the <form> tag. But it's not a cdata-list
-        # attribute for any other tag.
-        self.assertEqual('ISO-8859-1 UTF-8', soup.a['accept-charset'])
-
-    def test_string_has_immutable_name_property(self):
-        string = self.soup("s").string
-        self.assertEqual(None, string.name)
-        def t():
-            string.name = 'foo'
-        self.assertRaises(AttributeError, t)
-
-class TestPersistence(SoupTest):
-    "Testing features like pickle and deepcopy."
-
-    def setUp(self):
-        super(TestPersistence, self).setUp()
-        self.page = """<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN"
-"http://www.w3.org/TR/REC-html40/transitional.dtd">
-<html>
-<head>
-<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
-<title>Beautiful Soup: We called him Tortoise because he taught us.</title>
-<link rev="made" href="mailto:leonardr@segfault.org">
-<meta name="Description" content="Beautiful Soup: an HTML parser optimized for screen-scraping.">
-<meta name="generator" content="Markov Approximation 1.4 (module: leonardr)">
-<meta name="author" content="Leonard Richardson">
-</head>
-<body>
-<a href="foo">foo</a>
-<a href="foo"><b>bar</b></a>
-</body>
-</html>"""
-        self.tree = self.soup(self.page)
-
-    def test_pickle_and_unpickle_identity(self):
-        # Pickling a tree, then unpickling it, yields a tree identical
-        # to the original.
-        dumped = pickle.dumps(self.tree, 2)
-        loaded = pickle.loads(dumped)
-        self.assertEqual(loaded.__class__, BeautifulSoup)
-        self.assertEqual(loaded.decode(), self.tree.decode())
-
-    def test_deepcopy_identity(self):
-        # Making a deepcopy of a tree yields an identical tree.
-        copied = copy.deepcopy(self.tree)
-        self.assertEqual(copied.decode(), self.tree.decode())
-
-    def test_copy_preserves_encoding(self):
-        soup = BeautifulSoup(b'<p>&nbsp;</p>', 'html.parser')
-        encoding = soup.original_encoding
-        copy = soup.__copy__()
-        self.assertEqual(u"<p> </p>", unicode(copy))
-        self.assertEqual(encoding, copy.original_encoding)
-
-    def test_unicode_pickle(self):
-        # A tree containing Unicode characters can be pickled.
-        html = u"<b>\N{SNOWMAN}</b>"
-        soup = self.soup(html)
-        dumped = pickle.dumps(soup, pickle.HIGHEST_PROTOCOL)
-        loaded = pickle.loads(dumped)
-        self.assertEqual(loaded.decode(), soup.decode())
-
-    def test_copy_navigablestring_is_not_attached_to_tree(self):
-        html = u"<b>Foo<a></a></b><b>Bar</b>"
-        soup = self.soup(html)
-        s1 = soup.find(string="Foo")
-        s2 = copy.copy(s1)
-        self.assertEqual(s1, s2)
-        self.assertEqual(None, s2.parent)
-        self.assertEqual(None, s2.next_element)
-        self.assertNotEqual(None, s1.next_sibling)
-        self.assertEqual(None, s2.next_sibling)
-        self.assertEqual(None, s2.previous_element)
-
-    def test_copy_navigablestring_subclass_has_same_type(self):
-        html = u"<b><!--Foo--></b>"
-        soup = self.soup(html)
-        s1 = soup.string
-        s2 = copy.copy(s1)
-        self.assertEqual(s1, s2)
-        self.assertTrue(isinstance(s2, Comment))
-
-    def test_copy_entire_soup(self):
-        html = u"<div><b>Foo<a></a></b><b>Bar</b></div>end"
-        soup = self.soup(html)
-        soup_copy = copy.copy(soup)
-        self.assertEqual(soup, soup_copy)
-
-    def test_copy_tag_copies_contents(self):
-        html = u"<div><b>Foo<a></a></b><b>Bar</b></div>end"
-        soup = self.soup(html)
-        div = soup.div
-        div_copy = copy.copy(div)
-
-        # The two tags look the same, and evaluate to equal.
-        self.assertEqual(unicode(div), unicode(div_copy))
-        self.assertEqual(div, div_copy)
-
-        # But they're not the same object.
-        self.assertFalse(div is div_copy)
-
-        # And they don't have the same relation to the parse tree. The
-        # copy is not associated with a parse tree at all.
-        self.assertEqual(None, div_copy.parent)
-        self.assertEqual(None, div_copy.previous_element)
-        self.assertEqual(None, div_copy.find(string='Bar').next_element)
-        self.assertNotEqual(None, div.find(string='Bar').next_element)
-
-class TestSubstitutions(SoupTest):
-
-    def test_default_formatter_is_minimal(self):
-        markup = u"<b>&lt;&lt;Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!&gt;&gt;</b>"
-        soup = self.soup(markup)
-        decoded = soup.decode(formatter="minimal")
-        # The < is converted back into &lt; but the e-with-acute is left alone.
-        self.assertEqual(
-            decoded,
-            self.document_for(
-                u"<b>&lt;&lt;Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!&gt;&gt;</b>"))
-
-    def test_formatter_html(self):
-        markup = u"<b>&lt;&lt;Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!&gt;&gt;</b>"
-        soup = self.soup(markup)
-        decoded = soup.decode(formatter="html")
-        self.assertEqual(
-            decoded,
-            self.document_for("<b>&lt;&lt;Sacr&eacute; bleu!&gt;&gt;</b>"))
-
-    def test_formatter_minimal(self):
-        markup = u"<b>&lt;&lt;Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!&gt;&gt;</b>"
-        soup = self.soup(markup)
-        decoded = soup.decode(formatter="minimal")
-        # The < is converted back into &lt; but the e-with-acute is left alone.
-        self.assertEqual(
-            decoded,
-            self.document_for(
-                u"<b>&lt;&lt;Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!&gt;&gt;</b>"))
-
-    def test_formatter_null(self):
-        markup = u"<b>&lt;&lt;Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!&gt;&gt;</b>"
-        soup = self.soup(markup)
-        decoded = soup.decode(formatter=None)
-        # Neither the angle brackets nor the e-with-acute are converted.
-        # This is not valid HTML, but it's what the user wanted.
-        self.assertEqual(decoded,
-                          self.document_for(u"<b><<Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!>></b>"))
-
-    def test_formatter_custom(self):
-        markup = u"<b>&lt;foo&gt;</b><b>bar</b>"
-        soup = self.soup(markup)
-        decoded = soup.decode(formatter = lambda x: x.upper())
-        # Instead of normal entity conversion code, the custom
-        # callable is called on every string.
-        self.assertEqual(
-            decoded,
-            self.document_for(u"<b><FOO></b><b>BAR</b>"))
-
-    def test_formatter_is_run_on_attribute_values(self):
-        markup = u'<a href="http://a.com?a=b&c=é">e</a>'
-        soup = self.soup(markup)
-        a = soup.a
-
-        expect_minimal = u'<a href="http://a.com?a=b&amp;c=é">e</a>'
-
-        self.assertEqual(expect_minimal, a.decode())
-        self.assertEqual(expect_minimal, a.decode(formatter="minimal"))
-
-        expect_html = u'<a href="http://a.com?a=b&amp;c=&eacute;">e</a>'
-        self.assertEqual(expect_html, a.decode(formatter="html"))
-
-        self.assertEqual(markup, a.decode(formatter=None))
-        expect_upper = u'<a href="HTTP://A.COM?A=B&C=É">E</a>'
-        self.assertEqual(expect_upper, a.decode(formatter=lambda x: x.upper()))
-
-    def test_formatter_skips_script_tag_for_html_documents(self):
-        doc = """
-  <script type="text/javascript">
-   console.log("< < hey > > ");
-  </script>
-"""
-        encoded = BeautifulSoup(doc, 'html.parser').encode()
-        self.assertTrue(b"< < hey > >" in encoded)
-
-    def test_formatter_skips_style_tag_for_html_documents(self):
-        doc = """
-  <style type="text/css">
-   console.log("< < hey > > ");
-  </style>
-"""
-        encoded = BeautifulSoup(doc, 'html.parser').encode()
-        self.assertTrue(b"< < hey > >" in encoded)
-
-    def test_prettify_leaves_preformatted_text_alone(self):
-        soup = self.soup("<div>  foo  <pre>  \tbar\n  \n  </pre>  baz  ")
-        # Everything outside the <pre> tag is reformatted, but everything
-        # inside is left alone.
-        self.assertEqual(
-            u'<div>\n foo\n <pre>  \tbar\n  \n  </pre>\n baz\n</div>',
-            soup.div.prettify())
-
-    def test_prettify_accepts_formatter(self):
-        soup = BeautifulSoup("<html><body>foo</body></html>", 'html.parser')
-        pretty = soup.prettify(formatter = lambda x: x.upper())
-        self.assertTrue("FOO" in pretty)
-
-    def test_prettify_outputs_unicode_by_default(self):
-        soup = self.soup("<a></a>")
-        self.assertEqual(unicode, type(soup.prettify()))
-
-    def test_prettify_can_encode_data(self):
-        soup = self.soup("<a></a>")
-        self.assertEqual(bytes, type(soup.prettify("utf-8")))
-
-    def test_html_entity_substitution_off_by_default(self):
-        markup = u"<b>Sacr\N{LATIN SMALL LETTER E WITH ACUTE} bleu!</b>"
-        soup = self.soup(markup)
-        encoded = soup.b.encode("utf-8")
-        self.assertEqual(encoded, markup.encode('utf-8'))
-
-    def test_encoding_substitution(self):
-        # Here's the <meta> tag saying that a document is
-        # encoded in Shift-JIS.
-        meta_tag = ('<meta content="text/html; charset=x-sjis" '
-                    'http-equiv="Content-type"/>')
-        soup = self.soup(meta_tag)
-
-        # Parse the document, and the charset apprears unchanged.
-        self.assertEqual(soup.meta['content'], 'text/html; charset=x-sjis')
-
-        # Encode the document into some encoding, and the encoding is
-        # substituted into the meta tag.
-        utf_8 = soup.encode("utf-8")
-        self.assertTrue(b"charset=utf-8" in utf_8)
-
-        euc_jp = soup.encode("euc_jp")
-        self.assertTrue(b"charset=euc_jp" in euc_jp)
-
-        shift_jis = soup.encode("shift-jis")
-        self.assertTrue(b"charset=shift-jis" in shift_jis)
-
-        utf_16_u = soup.encode("utf-16").decode("utf-16")
-        self.assertTrue("charset=utf-16" in utf_16_u)
-
-    def test_encoding_substitution_doesnt_happen_if_tag_is_strained(self):
-        markup = ('<head><meta content="text/html; charset=x-sjis" '
-                    'http-equiv="Content-type"/></head><pre>foo</pre>')
-
-        # Beautiful Soup used to try to rewrite the meta tag even if the
-        # meta tag got filtered out by the strainer. This test makes
-        # sure that doesn't happen.
-        strainer = SoupStrainer('pre')
-        soup = self.soup(markup, parse_only=strainer)
-        self.assertEqual(soup.contents[0].name, 'pre')
-
-class TestEncoding(SoupTest):
-    """Test the ability to encode objects into strings."""
-
-    def test_unicode_string_can_be_encoded(self):
-        html = u"<b>\N{SNOWMAN}</b>"
-        soup = self.soup(html)
-        self.assertEqual(soup.b.string.encode("utf-8"),
-                          u"\N{SNOWMAN}".encode("utf-8"))
-
-    def test_tag_containing_unicode_string_can_be_encoded(self):
-        html = u"<b>\N{SNOWMAN}</b>"
-        soup = self.soup(html)
-        self.assertEqual(
-            soup.b.encode("utf-8"), html.encode("utf-8"))
-
-    def test_encoding_substitutes_unrecognized_characters_by_default(self):
-        html = u"<b>\N{SNOWMAN}</b>"
-        soup = self.soup(html)
-        self.assertEqual(soup.b.encode("ascii"), b"<b>&#9731;</b>")
-
-    def test_encoding_can_be_made_strict(self):
-        html = u"<b>\N{SNOWMAN}</b>"
-        soup = self.soup(html)
-        self.assertRaises(
-            UnicodeEncodeError, soup.encode, "ascii", errors="strict")
-
-    def test_decode_contents(self):
-        html = u"<b>\N{SNOWMAN}</b>"
-        soup = self.soup(html)
-        self.assertEqual(u"\N{SNOWMAN}", soup.b.decode_contents())
-
-    def test_encode_contents(self):
-        html = u"<b>\N{SNOWMAN}</b>"
-        soup = self.soup(html)
-        self.assertEqual(
-            u"\N{SNOWMAN}".encode("utf8"), soup.b.encode_contents(
-                encoding="utf8"))
-
-    def test_deprecated_renderContents(self):
-        html = u"<b>\N{SNOWMAN}</b>"
-        soup = self.soup(html)
-        self.assertEqual(
-            u"\N{SNOWMAN}".encode("utf8"), soup.b.renderContents())
-
-    def test_repr(self):
-        html = u"<b>\N{SNOWMAN}</b>"
-        soup = self.soup(html)
-        if PY3K:
-            self.assertEqual(html, repr(soup))
-        else:
-            self.assertEqual(b'<b>\\u2603</b>', repr(soup))
-
-class TestNavigableStringSubclasses(SoupTest):
-
-    def test_cdata(self):
-        # None of the current builders turn CDATA sections into CData
-        # objects, but you can create them manually.
-        soup = self.soup("")
-        cdata = CData("foo")
-        soup.insert(1, cdata)
-        self.assertEqual(str(soup), "<![CDATA[foo]]>")
-        self.assertEqual(soup.find(text="foo"), "foo")
-        self.assertEqual(soup.contents[0], "foo")
-
-    def test_cdata_is_never_formatted(self):
-        """Text inside a CData object is passed into the formatter.
-
-        But the return value is ignored.
-        """
-
-        self.count = 0
-        def increment(*args):
-            self.count += 1
-            return "BITTER FAILURE"
-
-        soup = self.soup("")
-        cdata = CData("<><><>")
-        soup.insert(1, cdata)
-        self.assertEqual(
-            b"<![CDATA[<><><>]]>", soup.encode(formatter=increment))
-        self.assertEqual(1, self.count)
-
-    def test_doctype_ends_in_newline(self):
-        # Unlike other NavigableString subclasses, a DOCTYPE always ends
-        # in a newline.
-        doctype = Doctype("foo")
-        soup = self.soup("")
-        soup.insert(1, doctype)
-        self.assertEqual(soup.encode(), b"<!DOCTYPE foo>\n")
-
-    def test_declaration(self):
-        d = Declaration("foo")
-        self.assertEqual("<?foo?>", d.output_ready())
-
-class TestSoupSelector(TreeTest):
-
-    HTML = """
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN"
-"http://www.w3.org/TR/html4/strict.dtd">
-<html>
-<head>
-<title>The title</title>
-<link rel="stylesheet" href="blah.css" type="text/css" id="l1">
-</head>
-<body>
-<custom-dashed-tag class="dashed" id="dash1">Hello there.</custom-dashed-tag>
-<div id="main" class="fancy">
-<div id="inner">
-<h1 id="header1">An H1</h1>
-<p>Some text</p>
-<p class="onep" id="p1">Some more text</p>
-<h2 id="header2">An H2</h2>
-<p class="class1 class2 class3" id="pmulti">Another</p>
-<a href="http://bob.example.org/" rel="friend met" id="bob">Bob</a>
-<h2 id="header3">Another H2</h2>
-<a id="me" href="http://simonwillison.net/" rel="me">me</a>
-<span class="s1">
-<a href="#" id="s1a1">span1a1</a>
-<a href="#" id="s1a2">span1a2 <span id="s1a2s1">test</span></a>
-<span class="span2">
-<a href="#" id="s2a1">span2a1</a>
-</span>
-<span class="span3"></span>
-<custom-dashed-tag class="dashed" id="dash2"/>
-<div data-tag="dashedvalue" id="data1"/>
-</span>
-</div>
-<x id="xid">
-<z id="zida"/>
-<z id="zidab"/>
-<z id="zidac"/>
-</x>
-<y id="yid">
-<z id="zidb"/>
-</y>
-<p lang="en" id="lang-en">English</p>
-<p lang="en-gb" id="lang-en-gb">English UK</p>
-<p lang="en-us" id="lang-en-us">English US</p>
-<p lang="fr" id="lang-fr">French</p>
-</div>
-
-<div id="footer">
-</div>
-"""
-
-    def setUp(self):
-        self.soup = BeautifulSoup(self.HTML, 'html.parser')
-
-    def assertSelects(self, selector, expected_ids, **kwargs):
-        el_ids = [el['id'] for el in self.soup.select(selector, **kwargs)]
-        el_ids.sort()
-        expected_ids.sort()
-        self.assertEqual(expected_ids, el_ids,
-            "Selector %s, expected [%s], got [%s]" % (
-                selector, ', '.join(expected_ids), ', '.join(el_ids)
-            )
-        )
-
-    assertSelect = assertSelects
-
-    def assertSelectMultiple(self, *tests):
-        for selector, expected_ids in tests:
-            self.assertSelect(selector, expected_ids)
-
-    def test_one_tag_one(self):
-        els = self.soup.select('title')
-        self.assertEqual(len(els), 1)
-        self.assertEqual(els[0].name, 'title')
-        self.assertEqual(els[0].contents, [u'The title'])
-
-    def test_one_tag_many(self):
-        els = self.soup.select('div')
-        self.assertEqual(len(els), 4)
-        for div in els:
-            self.assertEqual(div.name, 'div')
-
-        el = self.soup.select_one('div')
-        self.assertEqual('main', el['id'])
-
-    def test_select_one_returns_none_if_no_match(self):
-        match = self.soup.select_one('nonexistenttag')
-        self.assertEqual(None, match)
-
-
-    def test_tag_in_tag_one(self):
-        els = self.soup.select('div div')
-        self.assertSelects('div div', ['inner', 'data1'])
-
-    def test_tag_in_tag_many(self):
-        for selector in ('html div', 'html body div', 'body div'):
-            self.assertSelects(selector, ['data1', 'main', 'inner', 'footer'])
-
-
-    def test_limit(self):
-        self.assertSelects('html div', ['main'], limit=1)
-        self.assertSelects('html body div', ['inner', 'main'], limit=2)
-        self.assertSelects('body div', ['data1', 'main', 'inner', 'footer'],
-                           limit=10)
-
-    def test_tag_no_match(self):
-        self.assertEqual(len(self.soup.select('del')), 0)
-
-    def test_invalid_tag(self):
-        self.assertRaises(ValueError, self.soup.select, 'tag%t')
-
-    def test_select_dashed_tag_ids(self):
-        self.assertSelects('custom-dashed-tag', ['dash1', 'dash2'])
-
-    def test_select_dashed_by_id(self):
-        dashed = self.soup.select('custom-dashed-tag[id=\"dash2\"]')
-        self.assertEqual(dashed[0].name, 'custom-dashed-tag')
-        self.assertEqual(dashed[0]['id'], 'dash2')
-
-    def test_dashed_tag_text(self):
-        self.assertEqual(self.soup.select('body > custom-dashed-tag')[0].text, u'Hello there.')
-
-    def test_select_dashed_matches_find_all(self):
-        self.assertEqual(self.soup.select('custom-dashed-tag'), self.soup.find_all('custom-dashed-tag'))
-
-    def test_header_tags(self):
-        self.assertSelectMultiple(
-            ('h1', ['header1']),
-            ('h2', ['header2', 'header3']),
-        )
-
-    def test_class_one(self):
-        for selector in ('.onep', 'p.onep', 'html p.onep'):
-            els = self.soup.select(selector)
-            self.assertEqual(len(els), 1)
-            self.assertEqual(els[0].name, 'p')
-            self.assertEqual(els[0]['class'], ['onep'])
-
-    def test_class_mismatched_tag(self):
-        els = self.soup.select('div.onep')
-        self.assertEqual(len(els), 0)
-
-    def test_one_id(self):
-        for selector in ('div#inner', '#inner', 'div div#inner'):
-            self.assertSelects(selector, ['inner'])
-
-    def test_bad_id(self):
-        els = self.soup.select('#doesnotexist')
-        self.assertEqual(len(els), 0)
-
-    def test_items_in_id(self):
-        els = self.soup.select('div#inner p')
-        self.assertEqual(len(els), 3)
-        for el in els:
-            self.assertEqual(el.name, 'p')
-        self.assertEqual(els[1]['class'], ['onep'])
-        self.assertFalse(els[0].has_attr('class'))
-
-    def test_a_bunch_of_emptys(self):
-        for selector in ('div#main del', 'div#main div.oops', 'div div#main'):
-            self.assertEqual(len(self.soup.select(selector)), 0)
-
-    def test_multi_class_support(self):
-        for selector in ('.class1', 'p.class1', '.class2', 'p.class2',
-            '.class3', 'p.class3', 'html p.class2', 'div#inner .class2'):
-            self.assertSelects(selector, ['pmulti'])
-
-    def test_multi_class_selection(self):
-        for selector in ('.class1.class3', '.class3.class2',
-                         '.class1.class2.class3'):
-            self.assertSelects(selector, ['pmulti'])
-
-    def test_child_selector(self):
-        self.assertSelects('.s1 > a', ['s1a1', 's1a2'])
-        self.assertSelects('.s1 > a span', ['s1a2s1'])
-
-    def test_child_selector_id(self):
-        self.assertSelects('.s1 > a#s1a2 span', ['s1a2s1'])
-
-    def test_attribute_equals(self):
-        self.assertSelectMultiple(
-            ('p[class="onep"]', ['p1']),
-            ('p[id="p1"]', ['p1']),
-            ('[class="onep"]', ['p1']),
-            ('[id="p1"]', ['p1']),
-            ('link[rel="stylesheet"]', ['l1']),
-            ('link[type="text/css"]', ['l1']),
-            ('link[href="blah.css"]', ['l1']),
-            ('link[href="no-blah.css"]', []),
-            ('[rel="stylesheet"]', ['l1']),
-            ('[type="text/css"]', ['l1']),
-            ('[href="blah.css"]', ['l1']),
-            ('[href="no-blah.css"]', []),
-            ('p[href="no-blah.css"]', []),
-            ('[href="no-blah.css"]', []),
-        )
-
-    def test_attribute_tilde(self):
-        self.assertSelectMultiple(
-            ('p[class~="class1"]', ['pmulti']),
-            ('p[class~="class2"]', ['pmulti']),
-            ('p[class~="class3"]', ['pmulti']),
-            ('[class~="class1"]', ['pmulti']),
-            ('[class~="class2"]', ['pmulti']),
-            ('[class~="class3"]', ['pmulti']),
-            ('a[rel~="friend"]', ['bob']),
-            ('a[rel~="met"]', ['bob']),
-            ('[rel~="friend"]', ['bob']),
-            ('[rel~="met"]', ['bob']),
-        )
-
-    def test_attribute_startswith(self):
-        self.assertSelectMultiple(
-            ('[rel^="style"]', ['l1']),
-            ('link[rel^="style"]', ['l1']),
-            ('notlink[rel^="notstyle"]', []),
-            ('[rel^="notstyle"]', []),
-            ('link[rel^="notstyle"]', []),
-            ('link[href^="bla"]', ['l1']),
-            ('a[href^="http://"]', ['bob', 'me']),
-            ('[href^="http://"]', ['bob', 'me']),
-            ('[id^="p"]', ['pmulti', 'p1']),
-            ('[id^="m"]', ['me', 'main']),
-            ('div[id^="m"]', ['main']),
-            ('a[id^="m"]', ['me']),
-            ('div[data-tag^="dashed"]', ['data1'])
-        )
-
-    def test_attribute_endswith(self):
-        self.assertSelectMultiple(
-            ('[href$=".css"]', ['l1']),
-            ('link[href$=".css"]', ['l1']),
-            ('link[id$="1"]', ['l1']),
-            ('[id$="1"]', ['data1', 'l1', 'p1', 'header1', 's1a1', 's2a1', 's1a2s1', 'dash1']),
-            ('div[id$="1"]', ['data1']),
-            ('[id$="noending"]', []),
-        )
-
-    def test_attribute_contains(self):
-        self.assertSelectMultiple(
-            # From test_attribute_startswith
-            ('[rel*="style"]', ['l1']),
-            ('link[rel*="style"]', ['l1']),
-            ('notlink[rel*="notstyle"]', []),
-            ('[rel*="notstyle"]', []),
-            ('link[rel*="notstyle"]', []),
-            ('link[href*="bla"]', ['l1']),
-            ('[href*="http://"]', ['bob', 'me']),
-            ('[id*="p"]', ['pmulti', 'p1']),
-            ('div[id*="m"]', ['main']),
-            ('a[id*="m"]', ['me']),
-            # From test_attribute_endswith
-            ('[href*=".css"]', ['l1']),
-            ('link[href*=".css"]', ['l1']),
-            ('link[id*="1"]', ['l1']),
-            ('[id*="1"]', ['data1', 'l1', 'p1', 'header1', 's1a1', 's1a2', 's2a1', 's1a2s1', 'dash1']),
-            ('div[id*="1"]', ['data1']),
-            ('[id*="noending"]', []),
-            # New for this test
-            ('[href*="."]', ['bob', 'me', 'l1']),
-            ('a[href*="."]', ['bob', 'me']),
-            ('link[href*="."]', ['l1']),
-            ('div[id*="n"]', ['main', 'inner']),
-            ('div[id*="nn"]', ['inner']),
-            ('div[data-tag*="edval"]', ['data1'])
-        )
-
-    def test_attribute_exact_or_hypen(self):
-        self.assertSelectMultiple(
-            ('p[lang|="en"]', ['lang-en', 'lang-en-gb', 'lang-en-us']),
-            ('[lang|="en"]', ['lang-en', 'lang-en-gb', 'lang-en-us']),
-            ('p[lang|="fr"]', ['lang-fr']),
-            ('p[lang|="gb"]', []),
-        )
-
-    def test_attribute_exists(self):
-        self.assertSelectMultiple(
-            ('[rel]', ['l1', 'bob', 'me']),
-            ('link[rel]', ['l1']),
-            ('a[rel]', ['bob', 'me']),
-            ('[lang]', ['lang-en', 'lang-en-gb', 'lang-en-us', 'lang-fr']),
-            ('p[class]', ['p1', 'pmulti']),
-            ('[blah]', []),
-            ('p[blah]', []),
-            ('div[data-tag]', ['data1'])
-        )
-
-    def test_quoted_space_in_selector_name(self):
-        html = """<div style="display: wrong">nope</div>
-        <div style="display: right">yes</div>
-        """
-        soup = BeautifulSoup(html, 'html.parser')
-        [chosen] = soup.select('div[style="display: right"]')
-        self.assertEqual("yes", chosen.string)
-
-    def test_unsupported_pseudoclass(self):
-        self.assertRaises(
-            NotImplementedError, self.soup.select, "a:no-such-pseudoclass")
-
-        self.assertRaises(
-            NotImplementedError, self.soup.select, "a:nth-of-type(a)")
-
-
-    def test_nth_of_type(self):
-        # Try to select first paragraph
-        els = self.soup.select('div#inner p:nth-of-type(1)')
-        self.assertEqual(len(els), 1)
-        self.assertEqual(els[0].string, u'Some text')
-
-        # Try to select third paragraph
-        els = self.soup.select('div#inner p:nth-of-type(3)')
-        self.assertEqual(len(els), 1)
-        self.assertEqual(els[0].string, u'Another')
-
-        # Try to select (non-existent!) fourth paragraph
-        els = self.soup.select('div#inner p:nth-of-type(4)')
-        self.assertEqual(len(els), 0)
-
-        # Pass in an invalid value.
-        self.assertRaises(
-            ValueError, self.soup.select, 'div p:nth-of-type(0)')
-
-    def test_nth_of_type_direct_descendant(self):
-        els = self.soup.select('div#inner > p:nth-of-type(1)')
-        self.assertEqual(len(els), 1)
-        self.assertEqual(els[0].string, u'Some text')
-
-    def test_id_child_selector_nth_of_type(self):
-        self.assertSelects('#inner > p:nth-of-type(2)', ['p1'])
-
-    def test_select_on_element(self):
-        # Other tests operate on the tree; this operates on an element
-        # within the tree.
-        inner = self.soup.find("div", id="main")
-        selected = inner.select("div")
-        # The <div id="inner"> tag was selected. The <div id="footer">
-        # tag was not.
-        self.assertSelectsIDs(selected, ['inner', 'data1'])
-
-    def test_overspecified_child_id(self):
-        self.assertSelects(".fancy #inner", ['inner'])
-        self.assertSelects(".normal #inner", [])
-
-    def test_adjacent_sibling_selector(self):
-        self.assertSelects('#p1 + h2', ['header2'])
-        self.assertSelects('#p1 + h2 + p', ['pmulti'])
-        self.assertSelects('#p1 + #header2 + .class1', ['pmulti'])
-        self.assertEqual([], self.soup.select('#p1 + p'))
-
-    def test_general_sibling_selector(self):
-        self.assertSelects('#p1 ~ h2', ['header2', 'header3'])
-        self.assertSelects('#p1 ~ #header2', ['header2'])
-        self.assertSelects('#p1 ~ h2 + a', ['me'])
-        self.assertSelects('#p1 ~ h2 + [rel="me"]', ['me'])
-        self.assertEqual([], self.soup.select('#inner ~ h2'))
-
-    def test_dangling_combinator(self):
-        self.assertRaises(ValueError, self.soup.select, 'h1 >')
-
-    def test_sibling_combinator_wont_select_same_tag_twice(self):
-        self.assertSelects('p[lang] ~ p', ['lang-en-gb', 'lang-en-us', 'lang-fr'])
-
-    # Test the selector grouping operator (the comma)
-    def test_multiple_select(self):
-        self.assertSelects('x, y', ['xid', 'yid'])
-
-    def test_multiple_select_with_no_space(self):
-        self.assertSelects('x,y', ['xid', 'yid'])
-
-    def test_multiple_select_with_more_space(self):
-        self.assertSelects('x,    y', ['xid', 'yid'])
-
-    def test_multiple_select_duplicated(self):
-        self.assertSelects('x, x', ['xid'])
-
-    def test_multiple_select_sibling(self):
-        self.assertSelects('x, y ~ p[lang=fr]', ['xid', 'lang-fr'])
-
-    def test_multiple_select_tag_and_direct_descendant(self):
-        self.assertSelects('x, y > z', ['xid', 'zidb'])
-
-    def test_multiple_select_direct_descendant_and_tags(self):
-        self.assertSelects('div > x, y, z', ['xid', 'yid', 'zida', 'zidb', 'zidab', 'zidac'])
-
-    def test_multiple_select_indirect_descendant(self):
-        self.assertSelects('div x,y,  z', ['xid', 'yid', 'zida', 'zidb', 'zidab', 'zidac'])
-
-    def test_invalid_multiple_select(self):
-        self.assertRaises(ValueError, self.soup.select, ',x, y')
-        self.assertRaises(ValueError, self.soup.select, 'x,,y')
-
-    def test_multiple_select_attrs(self):
-        self.assertSelects('p[lang=en], p[lang=en-gb]', ['lang-en', 'lang-en-gb'])
-
-    def test_multiple_select_ids(self):
-        self.assertSelects('x, y > z[id=zida], z[id=zidab], z[id=zidb]', ['xid', 'zidb', 'zidab'])
-
-    def test_multiple_select_nested(self):
-        self.assertSelects('body > div > x, y > z', ['xid', 'zidb'])
-
-
-
diff --git a/lib/enzyme/__init__.py b/lib/enzyme/__init__.py
index 4ed31f426823dd1cc09c62efb7fb815e8d2156f6..3bd89f33654d109ad911fcf366f26bc80658b6a5 100644
--- a/lib/enzyme/__init__.py
+++ b/lib/enzyme/__init__.py
@@ -1,6 +1,6 @@
 # -*- coding: utf-8 -*-
 __title__ = 'enzyme'
-__version__ = '0.4.2'
+__version__ = '0.4.1'
 __author__ = 'Antoine Bertin'
 __license__ = 'Apache 2.0'
 __copyright__ = 'Copyright 2013 Antoine Bertin'
@@ -9,8 +9,5 @@ import logging
 from .exceptions import *
 from .mkv import *
 
-class NullHandler(logging.Handler):
-    def emit(self, record):
-        pass
 
-logging.getLogger(__name__).addHandler(NullHandler())
+logging.getLogger(__name__).addHandler(logging.NullHandler())
diff --git a/lib/enzyme/tests/__init__.py b/lib/enzyme/tests/__init__.py
deleted file mode 100644
index 426d3598ffef7f2c1c6d2172569b78d378df35b0..0000000000000000000000000000000000000000
--- a/lib/enzyme/tests/__init__.py
+++ /dev/null
@@ -1,10 +0,0 @@
-# -*- coding: utf-8 -*-
-from . import test_mkv, test_parsers
-import unittest
-
-
-suite = unittest.TestSuite([test_mkv.suite(), test_parsers.suite()])
-
-
-if __name__ == '__main__':
-    unittest.TextTestRunner().run(suite)
diff --git a/lib/enzyme/tests/parsers/ebml/test1.mkv.yml b/lib/enzyme/tests/parsers/ebml/test1.mkv.yml
deleted file mode 100644
index 92642ec5d3cc7efcbd39a110b5b0e145be81b3d5..0000000000000000000000000000000000000000
--- a/lib/enzyme/tests/parsers/ebml/test1.mkv.yml
+++ /dev/null
@@ -1,2974 +0,0 @@
-- - 440786851
-  - 6
-  - EBML
-  - 0
-  - 5
-  - 19
-  - - [17026, 3, DocType, 1, 8, 8, matroska]
-    - [17031, 1, DocTypeVersion, 1, 19, 1, 2]
-    - [17029, 1, DocTypeReadVersion, 1, 23, 1, 2]
-- - 408125543
-  - 6
-  - Segment
-  - 0
-  - 32
-  - 23339305
-  - - - 290298740
-      - 6
-      - SeekHead
-      - 1
-      - 37
-      - 59
-      - - - 19899
-          - 6
-          - Seek
-          - 2
-          - 40
-          - 11
-          - - [21419, 7, SeekID, 3, 43, 4, null]
-            - [21420, 1, SeekPosition, 3, 50, 1, 64]
-        - - 19899
-          - 6
-          - Seek
-          - 2
-          - 54
-          - 12
-          - - [21419, 7, SeekID, 3, 57, 4, null]
-            - [21420, 1, SeekPosition, 3, 64, 2, 275]
-        - - 19899
-          - 6
-          - Seek
-          - 2
-          - 69
-          - 12
-          - - [21419, 7, SeekID, 3, 72, 4, null]
-            - [21420, 1, SeekPosition, 3, 79, 2, 440]
-        - - 19899
-          - 6
-          - Seek
-          - 2
-          - 84
-          - 12
-          - - [21419, 7, SeekID, 3, 87, 4, null]
-            - [21420, 1, SeekPosition, 3, 94, 2, 602]
-    - - 357149030
-      - 6
-      - Info
-      - 1
-      - 102
-      - 205
-      - - [17545, 2, Duration, 2, 105, 4, 87336.0]
-        - [19840, 4, MuxingApp, 2, 112, 39, libebml2 v0.10.0 + libmatroska2 v0.10.1]
-        - [22337, 4, WritingApp, 2, 154, 123, 'mkclean 0.5.5 ru from libebml v1.0.0
-            + libmatroska v1.0.0 + mkvmerge v4.1.1 (''Bouncin'' Back'') built on Jul  3
-            2010 22:54:08']
-        - [17505, 5, DateUTC, 2, 280, 8, !!timestamp '2010-08-21 07:23:03']
-        - [29604, 7, SegmentUID, 2, 291, 16, null]
-    - - 374648427
-      - 6
-      - Tracks
-      - 1
-      - 313
-      - 159
-      - - - 174
-          - 6
-          - TrackEntry
-          - 2
-          - 315
-          - 105
-          - - [215, 1, TrackNumber, 3, 317, 1, 1]
-            - [131, 1, TrackType, 3, 320, 1, 1]
-            - [134, 3, CodecID, 3, 323, 15, V_MS/VFW/FOURCC]
-            - [29637, 1, TrackUID, 3, 341, 4, 2422994868]
-            - [156, 1, FlagLacing, 3, 347, 1, 0]
-            - [28135, 1, MinCache, 3, 351, 1, 1]
-            - [25506, 7, CodecPrivate, 3, 355, 40, null]
-            - [2352003, 1, DefaultDuration, 3, 399, 4, 41666666]
-            - [2274716, 3, Language, 3, 407, 3, und]
-            - - 224
-              - 6
-              - Video
-              - 3
-              - 412
-              - 8
-              - - [176, 1, PixelWidth, 4, 414, 2, 854]
-                - [186, 1, PixelHeight, 4, 418, 2, 480]
-        - - 174
-          - 6
-          - TrackEntry
-          - 2
-          - 422
-          - 50
-          - - [215, 1, TrackNumber, 3, 424, 1, 2]
-            - [131, 1, TrackType, 3, 427, 1, 2]
-            - [134, 3, CodecID, 3, 430, 9, A_MPEG/L3]
-            - [29637, 1, TrackUID, 3, 442, 4, 3653291187]
-            - [2352003, 1, DefaultDuration, 3, 450, 4, 24000000]
-            - [2274716, 3, Language, 3, 458, 3, und]
-            - - 225
-              - 6
-              - Audio
-              - 3
-              - 463
-              - 9
-              - - [181, 2, SamplingFrequency, 4, 465, 4, 48000.0]
-                - [159, 1, Channels, 4, 471, 1, 2]
-    - - 307544935
-      - 6
-      - Tags
-      - 1
-      - 478
-      - 156
-      - - - 29555
-          - 6
-          - Tag
-          - 2
-          - 482
-          - 152
-          - - - 25536
-              - 6
-              - Targets
-              - 3
-              - 485
-              - 0
-              - []
-            - - 26568
-              - 6
-              - SimpleTag
-              - 3
-              - 488
-              - 34
-              - - [17827, 4, TagName, 4, 491, 5, TITLE]
-                - [17543, 4, TagString, 4, 499, 23, Big Buck Bunny - test 1]
-            - - 26568
-              - 6
-              - SimpleTag
-              - 3
-              - 525
-              - 23
-              - - [17827, 4, TagName, 4, 528, 13, DATE_RELEASED]
-                - [17543, 4, TagString, 4, 544, 4, '2010']
-            - - 26568
-              - 6
-              - SimpleTag
-              - 3
-              - 551
-              - 83
-              - - [17827, 4, TagName, 4, 554, 7, COMMENT]
-                - [17543, 4, TagString, 4, 564, 70, 'Matroska Validation File1, basic
-                    MPEG4.2 and MP3 with only SimpleBlock']
-    - - 475249515
-      - 6
-      - Cues
-      - 1
-      - 640
-      - 163
-      - - - 187
-          - 6
-          - CuePoint
-          - 2
-          - 642
-          - 12
-          - - [179, 1, CueTime, 3, 644, 1, 0]
-            - - 183
-              - 6
-              - CueTrackPositions
-              - 3
-              - 647
-              - 7
-              - - [247, 1, CueTrack, 4, 649, 1, 1]
-                - [241, 1, CueClusterPosition, 4, 652, 2, 771]
-        - - 187
-          - 6
-          - CuePoint
-          - 2
-          - 656
-          - 14
-          - - [179, 1, CueTime, 3, 658, 2, 1042]
-            - - 183
-              - 6
-              - CueTrackPositions
-              - 3
-              - 662
-              - 8
-              - - [247, 1, CueTrack, 4, 664, 1, 1]
-                - [241, 1, CueClusterPosition, 4, 667, 3, 145582]
-        - - 187
-          - 6
-          - CuePoint
-          - 2
-          - 672
-          - 14
-          - - [179, 1, CueTime, 3, 674, 2, 11667]
-            - - 183
-              - 6
-              - CueTrackPositions
-              - 3
-              - 678
-              - 8
-              - - [247, 1, CueTrack, 4, 680, 1, 1]
-                - [241, 1, CueClusterPosition, 4, 683, 3, 3131552]
-        - - 187
-          - 6
-          - CuePoint
-          - 2
-          - 688
-          - 14
-          - - [179, 1, CueTime, 3, 690, 2, 22083]
-            - - 183
-              - 6
-              - CueTrackPositions
-              - 3
-              - 694
-              - 8
-              - - [247, 1, CueTrack, 4, 696, 1, 1]
-                - [241, 1, CueClusterPosition, 4, 699, 3, 5654336]
-        - - 187
-          - 6
-          - CuePoint
-          - 2
-          - 704
-          - 14
-          - - [179, 1, CueTime, 3, 706, 2, 32500]
-            - - 183
-              - 6
-              - CueTrackPositions
-              - 3
-              - 710
-              - 8
-              - - [247, 1, CueTrack, 4, 712, 1, 1]
-                - [241, 1, CueClusterPosition, 4, 715, 3, 9696374]
-        - - 187
-          - 6
-          - CuePoint
-          - 2
-          - 720
-          - 14
-          - - [179, 1, CueTime, 3, 722, 2, 42917]
-            - - 183
-              - 6
-              - CueTrackPositions
-              - 3
-              - 726
-              - 8
-              - - [247, 1, CueTrack, 4, 728, 1, 1]
-                - [241, 1, CueClusterPosition, 4, 731, 3, 13440514]
-        - - 187
-          - 6
-          - CuePoint
-          - 2
-          - 736
-          - 14
-          - - [179, 1, CueTime, 3, 738, 2, 53333]
-            - - 183
-              - 6
-              - CueTrackPositions
-              - 3
-              - 742
-              - 8
-              - - [247, 1, CueTrack, 4, 744, 1, 1]
-                - [241, 1, CueClusterPosition, 4, 747, 3, 16690071]
-        - - 187
-          - 6
-          - CuePoint
-          - 2
-          - 752
-          - 15
-          - - [179, 1, CueTime, 3, 754, 2, 56083]
-            - - 183
-              - 6
-              - CueTrackPositions
-              - 3
-              - 758
-              - 9
-              - - [247, 1, CueTrack, 4, 760, 1, 1]
-                - [241, 1, CueClusterPosition, 4, 763, 4, 17468879]
-        - - 187
-          - 6
-          - CuePoint
-          - 2
-          - 769
-          - 16
-          - - [179, 1, CueTime, 3, 771, 3, 66500]
-            - - 183
-              - 6
-              - CueTrackPositions
-              - 3
-              - 776
-              - 9
-              - - [247, 1, CueTrack, 4, 778, 1, 1]
-                - [241, 1, CueClusterPosition, 4, 781, 4, 18628759]
-        - - 187
-          - 6
-          - CuePoint
-          - 2
-          - 787
-          - 16
-          - - [179, 1, CueTime, 3, 789, 3, 76917]
-            - - 183
-              - 6
-              - CueTrackPositions
-              - 3
-              - 794
-              - 9
-              - - [247, 1, CueTrack, 4, 796, 1, 1]
-                - [241, 1, CueClusterPosition, 4, 799, 4, 20732433]
-    - - 524531317
-      - 6
-      - Cluster
-      - 1
-      - 810
-      - 144804
-      - - [231, 1, Timecode, 2, 812, 1, 0]
-        - [163, 7, SimpleBlock, 2, 816, 5008, null]
-        - [163, 7, SimpleBlock, 2, 5827, 4464, null]
-        - [163, 7, SimpleBlock, 2, 10294, 303, null]
-        - [163, 7, SimpleBlock, 2, 10600, 303, null]
-        - [163, 7, SimpleBlock, 2, 10906, 208, null]
-        - [163, 7, SimpleBlock, 2, 11117, 676, null]
-        - [163, 7, SimpleBlock, 2, 11796, 2465, null]
-        - [163, 7, SimpleBlock, 2, 14264, 2794, null]
-        - [163, 7, SimpleBlock, 2, 17061, 4486, null]
-        - [163, 7, SimpleBlock, 2, 21550, 4966, null]
-        - [163, 7, SimpleBlock, 2, 26519, 580, null]
-        - [163, 7, SimpleBlock, 2, 27102, 4476, null]
-        - [163, 7, SimpleBlock, 2, 31581, 3077, null]
-        - [163, 7, SimpleBlock, 2, 34661, 4485, null]
-        - [163, 7, SimpleBlock, 2, 39149, 5117, null]
-        - [163, 7, SimpleBlock, 2, 44269, 1639, null]
-        - [163, 7, SimpleBlock, 2, 45911, 4521, null]
-        - [163, 7, SimpleBlock, 2, 50435, 772, null]
-        - [163, 7, SimpleBlock, 2, 51210, 4543, null]
-        - [163, 7, SimpleBlock, 2, 55756, 3371, null]
-        - [163, 7, SimpleBlock, 2, 59130, 4602, null]
-        - [163, 7, SimpleBlock, 2, 63735, 5427, null]
-        - [163, 7, SimpleBlock, 2, 69165, 1735, null]
-        - [163, 7, SimpleBlock, 2, 70903, 4790, null]
-        - [163, 7, SimpleBlock, 2, 75696, 772, null]
-        - [163, 7, SimpleBlock, 2, 76471, 4905, null]
-        - [163, 7, SimpleBlock, 2, 81379, 1639, null]
-        - [163, 7, SimpleBlock, 2, 83021, 5052, null]
-        - [163, 7, SimpleBlock, 2, 88076, 2697, null]
-        - [163, 7, SimpleBlock, 2, 90776, 5215, null]
-        - [163, 7, SimpleBlock, 2, 95994, 3371, null]
-        - [163, 7, SimpleBlock, 2, 99368, 5630, null]
-        - [163, 7, SimpleBlock, 2, 105001, 5582, null]
-        - [163, 7, SimpleBlock, 2, 110586, 5696, null]
-        - [163, 7, SimpleBlock, 2, 116285, 2505, null]
-        - [163, 7, SimpleBlock, 2, 118793, 6002, null]
-        - [163, 7, SimpleBlock, 2, 124798, 5794, null]
-        - [163, 7, SimpleBlock, 2, 130595, 2601, null]
-        - [163, 7, SimpleBlock, 2, 133199, 6520, null]
-        - [163, 7, SimpleBlock, 2, 139722, 5892, null]
-    - - 524531317
-      - 6
-      - Cluster
-      - 1
-      - 145621
-      - 41405
-      - - [231, 1, Timecode, 2, 145623, 2, 1042]
-        - [163, 7, SimpleBlock, 2, 145628, 964, null]
-        - [163, 7, SimpleBlock, 2, 146595, 2504, null]
-        - [163, 7, SimpleBlock, 2, 149102, 7082, null]
-        - [163, 7, SimpleBlock, 2, 156187, 6024, null]
-        - [163, 7, SimpleBlock, 2, 162214, 4237, null]
-        - [163, 7, SimpleBlock, 2, 166454, 7739, null]
-        - [163, 7, SimpleBlock, 2, 174196, 6210, null]
-        - [163, 7, SimpleBlock, 2, 180409, 6617, null]
-    - - 524531317
-      - 6
-      - Cluster
-      - 1
-      - 187034
-      - 2944550
-      - - [231, 1, Timecode, 2, 187036, 2, 1250]
-        - [163, 7, SimpleBlock, 2, 187041, 772, null]
-        - [163, 7, SimpleBlock, 2, 187816, 6736, null]
-        - [163, 7, SimpleBlock, 2, 194555, 8731, null]
-        - [163, 7, SimpleBlock, 2, 203289, 6522, null]
-        - [163, 7, SimpleBlock, 2, 209814, 7087, null]
-        - [163, 7, SimpleBlock, 2, 216904, 7323, null]
-        - [163, 7, SimpleBlock, 2, 224230, 7629, null]
-        - [163, 7, SimpleBlock, 2, 231862, 6546, null]
-        - [163, 7, SimpleBlock, 2, 238411, 7860, null]
-        - [163, 7, SimpleBlock, 2, 246274, 7989, null]
-        - [163, 7, SimpleBlock, 2, 254266, 8281, null]
-        - [163, 7, SimpleBlock, 2, 262550, 8399, null]
-        - [163, 7, SimpleBlock, 2, 270952, 5967, null]
-        - [163, 7, SimpleBlock, 2, 276922, 8557, null]
-        - [163, 7, SimpleBlock, 2, 285482, 8820, null]
-        - [163, 7, SimpleBlock, 2, 294305, 8886, null]
-        - [163, 7, SimpleBlock, 2, 303194, 8997, null]
-        - [163, 7, SimpleBlock, 2, 312194, 9160, null]
-        - [163, 7, SimpleBlock, 2, 321357, 6643, null]
-        - [163, 7, SimpleBlock, 2, 328003, 9359, null]
-        - [163, 7, SimpleBlock, 2, 337365, 9630, null]
-        - [163, 7, SimpleBlock, 2, 346998, 10035, null]
-        - [163, 7, SimpleBlock, 2, 357036, 10450, null]
-        - [163, 7, SimpleBlock, 2, 367489, 6641, null]
-        - [163, 7, SimpleBlock, 2, 374133, 11054, null]
-        - [163, 7, SimpleBlock, 2, 385190, 11571, null]
-        - [163, 7, SimpleBlock, 2, 396764, 11910, null]
-        - [163, 7, SimpleBlock, 2, 408677, 4492, null]
-        - [163, 7, SimpleBlock, 2, 413172, 4513, null]
-        - [163, 7, SimpleBlock, 2, 417688, 6931, null]
-        - [163, 7, SimpleBlock, 2, 424622, 5450, null]
-        - [163, 7, SimpleBlock, 2, 430075, 5226, null]
-        - [163, 7, SimpleBlock, 2, 435304, 5387, null]
-        - [163, 7, SimpleBlock, 2, 440694, 5433, null]
-        - [163, 7, SimpleBlock, 2, 446130, 5557, null]
-        - [163, 7, SimpleBlock, 2, 451690, 6163, null]
-        - [163, 7, SimpleBlock, 2, 457856, 5576, null]
-        - [163, 7, SimpleBlock, 2, 463435, 5832, null]
-        - [163, 7, SimpleBlock, 2, 469270, 5718, null]
-        - [163, 7, SimpleBlock, 2, 474991, 5658, null]
-        - [163, 7, SimpleBlock, 2, 480652, 6161, null]
-        - [163, 7, SimpleBlock, 2, 486816, 5455, null]
-        - [163, 7, SimpleBlock, 2, 492274, 5361, null]
-        - [163, 7, SimpleBlock, 2, 497638, 5391, null]
-        - [163, 7, SimpleBlock, 2, 503032, 5249, null]
-        - [163, 7, SimpleBlock, 2, 508284, 5241, null]
-        - [163, 7, SimpleBlock, 2, 513528, 6161, null]
-        - [163, 7, SimpleBlock, 2, 519692, 5189, null]
-        - [163, 7, SimpleBlock, 2, 524884, 5186, null]
-        - [163, 7, SimpleBlock, 2, 530073, 5185, null]
-        - [163, 7, SimpleBlock, 2, 535261, 5443, null]
-        - [163, 7, SimpleBlock, 2, 540707, 5587, null]
-        - [163, 7, SimpleBlock, 2, 546297, 5559, null]
-        - [163, 7, SimpleBlock, 2, 551859, 5899, null]
-        - [163, 7, SimpleBlock, 2, 557761, 6247, null]
-        - [163, 7, SimpleBlock, 2, 564011, 6210, null]
-        - [163, 7, SimpleBlock, 2, 570224, 6362, null]
-        - [163, 7, SimpleBlock, 2, 576589, 5776, null]
-        - [163, 7, SimpleBlock, 2, 582368, 6608, null]
-        - [163, 7, SimpleBlock, 2, 588979, 6560, null]
-        - [163, 7, SimpleBlock, 2, 595542, 6658, null]
-        - [163, 7, SimpleBlock, 2, 602203, 7020, null]
-        - [163, 7, SimpleBlock, 2, 609226, 7107, null]
-        - [163, 7, SimpleBlock, 2, 616336, 6063, null]
-        - [163, 7, SimpleBlock, 2, 622402, 7022, null]
-        - [163, 7, SimpleBlock, 2, 629427, 7149, null]
-        - [163, 7, SimpleBlock, 2, 636579, 7180, null]
-        - [163, 7, SimpleBlock, 2, 643762, 7213, null]
-        - [163, 7, SimpleBlock, 2, 650978, 5967, null]
-        - [163, 7, SimpleBlock, 2, 656948, 7189, null]
-        - [163, 7, SimpleBlock, 2, 664140, 7478, null]
-        - [163, 7, SimpleBlock, 2, 671621, 7488, null]
-        - [163, 7, SimpleBlock, 2, 679112, 7491, null]
-        - [163, 7, SimpleBlock, 2, 686606, 7515, null]
-        - [163, 7, SimpleBlock, 2, 694124, 5873, null]
-        - [163, 7, SimpleBlock, 2, 700000, 7718, null]
-        - [163, 7, SimpleBlock, 2, 707721, 7485, null]
-        - [163, 7, SimpleBlock, 2, 715209, 7448, null]
-        - [163, 7, SimpleBlock, 2, 722660, 7483, null]
-        - [163, 7, SimpleBlock, 2, 730146, 7497, null]
-        - [163, 7, SimpleBlock, 2, 737646, 5682, null]
-        - [163, 7, SimpleBlock, 2, 743331, 7583, null]
-        - [163, 7, SimpleBlock, 2, 750917, 7666, null]
-        - [163, 7, SimpleBlock, 2, 758586, 7792, null]
-        - [163, 7, SimpleBlock, 2, 766381, 7810, null]
-        - [163, 7, SimpleBlock, 2, 774194, 5778, null]
-        - [163, 7, SimpleBlock, 2, 779975, 7823, null]
-        - [163, 7, SimpleBlock, 2, 787801, 7962, null]
-        - [163, 7, SimpleBlock, 2, 795766, 8032, null]
-        - [163, 7, SimpleBlock, 2, 803801, 8119, null]
-        - [163, 7, SimpleBlock, 2, 811923, 8142, null]
-        - [163, 7, SimpleBlock, 2, 820068, 5874, null]
-        - [163, 7, SimpleBlock, 2, 825945, 8045, null]
-        - [163, 7, SimpleBlock, 2, 833993, 8247, null]
-        - [163, 7, SimpleBlock, 2, 842243, 8393, null]
-        - [163, 7, SimpleBlock, 2, 850639, 8264, null]
-        - [163, 7, SimpleBlock, 2, 858906, 6062, null]
-        - [163, 7, SimpleBlock, 2, 864971, 8456, null]
-        - [163, 7, SimpleBlock, 2, 873430, 8595, null]
-        - [163, 7, SimpleBlock, 2, 882028, 8604, null]
-        - [163, 7, SimpleBlock, 2, 890635, 8690, null]
-        - [163, 7, SimpleBlock, 2, 899328, 8682, null]
-        - [163, 7, SimpleBlock, 2, 908013, 5874, null]
-        - [163, 7, SimpleBlock, 2, 913890, 8927, null]
-        - [163, 7, SimpleBlock, 2, 922820, 8768, null]
-        - [163, 7, SimpleBlock, 2, 931591, 9073, null]
-        - [163, 7, SimpleBlock, 2, 940667, 9001, null]
-        - [163, 7, SimpleBlock, 2, 949671, 8907, null]
-        - [163, 7, SimpleBlock, 2, 958581, 5873, null]
-        - [163, 7, SimpleBlock, 2, 964457, 8930, null]
-        - [163, 7, SimpleBlock, 2, 973390, 8900, null]
-        - [163, 7, SimpleBlock, 2, 982293, 9019, null]
-        - [163, 7, SimpleBlock, 2, 991315, 9005, null]
-        - [163, 7, SimpleBlock, 2, 1000323, 5873, null]
-        - [163, 7, SimpleBlock, 2, 1006199, 9000, null]
-        - [163, 7, SimpleBlock, 2, 1015202, 9075, null]
-        - [163, 7, SimpleBlock, 2, 1024280, 9002, null]
-        - [163, 7, SimpleBlock, 2, 1033285, 9161, null]
-        - [163, 7, SimpleBlock, 2, 1042449, 9136, null]
-        - [163, 7, SimpleBlock, 2, 1051588, 5682, null]
-        - [163, 7, SimpleBlock, 2, 1057273, 9178, null]
-        - [163, 7, SimpleBlock, 2, 1066454, 9207, null]
-        - [163, 7, SimpleBlock, 2, 1075664, 9305, null]
-        - [163, 7, SimpleBlock, 2, 1084972, 9626, null]
-        - [163, 7, SimpleBlock, 2, 1094601, 5873, null]
-        - [163, 7, SimpleBlock, 2, 1100477, 9755, null]
-        - [163, 7, SimpleBlock, 2, 1110235, 9724, null]
-        - [163, 7, SimpleBlock, 2, 1119962, 9933, null]
-        - [163, 7, SimpleBlock, 2, 1129898, 9880, null]
-        - [163, 7, SimpleBlock, 2, 1139781, 10249, null]
-        - [163, 7, SimpleBlock, 2, 1150033, 6350, null]
-        - [163, 7, SimpleBlock, 2, 1156386, 10265, null]
-        - [163, 7, SimpleBlock, 2, 1166654, 10385, null]
-        - [163, 7, SimpleBlock, 2, 1177042, 10350, null]
-        - [163, 7, SimpleBlock, 2, 1187395, 10340, null]
-        - [163, 7, SimpleBlock, 2, 1197738, 10483, null]
-        - [163, 7, SimpleBlock, 2, 1208224, 6739, null]
-        - [163, 7, SimpleBlock, 2, 1214966, 10579, null]
-        - [163, 7, SimpleBlock, 2, 1225548, 10512, null]
-        - [163, 7, SimpleBlock, 2, 1236063, 10449, null]
-        - [163, 7, SimpleBlock, 2, 1246515, 10633, null]
-        - [163, 7, SimpleBlock, 2, 1257151, 6642, null]
-        - [163, 7, SimpleBlock, 2, 1263796, 10454, null]
-        - [163, 7, SimpleBlock, 2, 1274253, 10695, null]
-        - [163, 7, SimpleBlock, 2, 1284951, 10452, null]
-        - [163, 7, SimpleBlock, 2, 1295406, 10663, null]
-        - [163, 7, SimpleBlock, 2, 1306072, 10309, null]
-        - [163, 7, SimpleBlock, 2, 1316384, 6547, null]
-        - [163, 7, SimpleBlock, 2, 1322934, 10359, null]
-        - [163, 7, SimpleBlock, 2, 1333296, 10337, null]
-        - [163, 7, SimpleBlock, 2, 1343636, 10027, null]
-        - [163, 7, SimpleBlock, 2, 1353666, 9883, null]
-        - [163, 7, SimpleBlock, 2, 1363552, 6451, null]
-        - [163, 7, SimpleBlock, 2, 1370006, 9643, null]
-        - [163, 7, SimpleBlock, 2, 1379652, 9148, null]
-        - [163, 7, SimpleBlock, 2, 1388803, 8794, null]
-        - [163, 7, SimpleBlock, 2, 1397600, 8468, null]
-        - [163, 7, SimpleBlock, 2, 1406071, 8372, null]
-        - [163, 7, SimpleBlock, 2, 1414446, 6835, null]
-        - [163, 7, SimpleBlock, 2, 1421284, 8121, null]
-        - [163, 7, SimpleBlock, 2, 1429408, 8022, null]
-        - [163, 7, SimpleBlock, 2, 1437433, 8096, null]
-        - [163, 7, SimpleBlock, 2, 1445532, 7920, null]
-        - [163, 7, SimpleBlock, 2, 1453455, 7699, null]
-        - [163, 7, SimpleBlock, 2, 1461157, 6545, null]
-        - [163, 7, SimpleBlock, 2, 1467705, 7707, null]
-        - [163, 7, SimpleBlock, 2, 1475415, 7821, null]
-        - [163, 7, SimpleBlock, 2, 1483239, 7978, null]
-        - [163, 7, SimpleBlock, 2, 1491220, 8241, null]
-        - [163, 7, SimpleBlock, 2, 1499464, 5778, null]
-        - [163, 7, SimpleBlock, 2, 1505245, 8282, null]
-        - [163, 7, SimpleBlock, 2, 1513530, 8598, null]
-        - [163, 7, SimpleBlock, 2, 1522131, 9098, null]
-        - [163, 7, SimpleBlock, 2, 1531232, 9644, null]
-        - [163, 7, SimpleBlock, 2, 1540879, 10086, null]
-        - [163, 7, SimpleBlock, 2, 1550968, 5779, null]
-        - [163, 7, SimpleBlock, 2, 1556750, 10191, null]
-        - [163, 7, SimpleBlock, 2, 1566944, 10458, null]
-        - [163, 7, SimpleBlock, 2, 1577405, 10570, null]
-        - [163, 7, SimpleBlock, 2, 1587978, 11074, null]
-        - [163, 7, SimpleBlock, 2, 1599055, 6158, null]
-        - [163, 7, SimpleBlock, 2, 1605216, 11120, null]
-        - [163, 7, SimpleBlock, 2, 1616339, 11421, null]
-        - [163, 7, SimpleBlock, 2, 1627763, 11589, null]
-        - [163, 7, SimpleBlock, 2, 1639355, 11727, null]
-        - [163, 7, SimpleBlock, 2, 1651085, 11990, null]
-        - [163, 7, SimpleBlock, 2, 1663078, 6352, null]
-        - [163, 7, SimpleBlock, 2, 1669433, 12178, null]
-        - [163, 7, SimpleBlock, 2, 1681614, 12242, null]
-        - [163, 7, SimpleBlock, 2, 1693859, 12403, null]
-        - [163, 7, SimpleBlock, 2, 1706265, 12268, null]
-        - [163, 7, SimpleBlock, 2, 1718536, 12507, null]
-        - [163, 7, SimpleBlock, 2, 1731046, 6450, null]
-        - [163, 7, SimpleBlock, 2, 1737499, 12548, null]
-        - [163, 7, SimpleBlock, 2, 1750050, 12540, null]
-        - [163, 7, SimpleBlock, 2, 1762593, 12616, null]
-        - [163, 7, SimpleBlock, 2, 1775212, 12497, null]
-        - [163, 7, SimpleBlock, 2, 1787712, 5586, null]
-        - [163, 7, SimpleBlock, 2, 1793301, 12619, null]
-        - [163, 7, SimpleBlock, 2, 1805923, 12645, null]
-        - [163, 7, SimpleBlock, 2, 1818571, 12819, null]
-        - [163, 7, SimpleBlock, 2, 1831393, 12553, null]
-        - [163, 7, SimpleBlock, 2, 1843949, 12186, null]
-        - [163, 7, SimpleBlock, 2, 1856138, 6349, null]
-        - [163, 7, SimpleBlock, 2, 1862490, 12232, null]
-        - [163, 7, SimpleBlock, 2, 1874725, 11787, null]
-        - [163, 7, SimpleBlock, 2, 1886515, 12022, null]
-        - [163, 7, SimpleBlock, 2, 1898540, 11715, null]
-        - [163, 7, SimpleBlock, 2, 1910258, 11778, null]
-        - [163, 7, SimpleBlock, 2, 1922039, 6258, null]
-        - [163, 7, SimpleBlock, 2, 1928300, 11504, null]
-        - [163, 7, SimpleBlock, 2, 1939807, 11427, null]
-        - [163, 7, SimpleBlock, 2, 1951237, 11323, null]
-        - [163, 7, SimpleBlock, 2, 1962563, 10800, null]
-        - [163, 7, SimpleBlock, 2, 1973366, 6258, null]
-        - [163, 7, SimpleBlock, 2, 1979627, 10602, null]
-        - [163, 7, SimpleBlock, 2, 1990232, 10219, null]
-        - [163, 7, SimpleBlock, 2, 2000454, 9952, null]
-        - [163, 7, SimpleBlock, 2, 2010409, 10054, null]
-        - [163, 7, SimpleBlock, 2, 2020466, 10129, null]
-        - [163, 7, SimpleBlock, 2, 2030598, 6065, null]
-        - [163, 7, SimpleBlock, 2, 2036666, 10124, null]
-        - [163, 7, SimpleBlock, 2, 2046793, 10209, null]
-        - [163, 7, SimpleBlock, 2, 2057005, 10584, null]
-        - [163, 7, SimpleBlock, 2, 2067592, 10618, null]
-        - [163, 7, SimpleBlock, 2, 2078213, 5970, null]
-        - [163, 7, SimpleBlock, 2, 2084186, 11182, null]
-        - [163, 7, SimpleBlock, 2, 2095371, 11631, null]
-        - [163, 7, SimpleBlock, 2, 2107005, 12268, null]
-        - [163, 7, SimpleBlock, 2, 2119276, 13038, null]
-        - [163, 7, SimpleBlock, 2, 2132317, 13455, null]
-        - [163, 7, SimpleBlock, 2, 2145775, 5970, null]
-        - [163, 7, SimpleBlock, 2, 2151748, 13833, null]
-        - [163, 7, SimpleBlock, 2, 2165584, 13984, null]
-        - [163, 7, SimpleBlock, 2, 2179571, 13708, null]
-        - [163, 7, SimpleBlock, 2, 2193282, 13782, null]
-        - [163, 7, SimpleBlock, 2, 2207067, 14245, null]
-        - [163, 7, SimpleBlock, 2, 2221315, 5680, null]
-        - [163, 7, SimpleBlock, 2, 2226998, 14394, null]
-        - [163, 7, SimpleBlock, 2, 2241395, 14877, null]
-        - [163, 7, SimpleBlock, 2, 2256275, 15072, null]
-        - [163, 7, SimpleBlock, 2, 2271350, 15391, null]
-        - [163, 7, SimpleBlock, 2, 2286744, 5680, null]
-        - [163, 7, SimpleBlock, 2, 2292427, 15642, null]
-        - [163, 7, SimpleBlock, 2, 2308072, 15860, null]
-        - [163, 7, SimpleBlock, 2, 2323935, 16213, null]
-        - [163, 7, SimpleBlock, 2, 2340152, 16528, null]
-        - [163, 7, SimpleBlock, 2, 2356684, 16926, null]
-        - [163, 7, SimpleBlock, 2, 2373613, 5585, null]
-        - [163, 7, SimpleBlock, 2, 2379202, 16873, null]
-        - [163, 7, SimpleBlock, 2, 2396079, 17018, null]
-        - [163, 7, SimpleBlock, 2, 2413101, 16919, null]
-        - [163, 7, SimpleBlock, 2, 2430024, 17045, null]
-        - [163, 7, SimpleBlock, 2, 2447072, 5392, null]
-        - [163, 7, SimpleBlock, 2, 2452468, 16885, null]
-        - [163, 7, SimpleBlock, 2, 2469357, 16916, null]
-        - [163, 7, SimpleBlock, 2, 2486277, 16981, null]
-        - [163, 7, SimpleBlock, 2, 2503262, 16714, null]
-        - [163, 7, SimpleBlock, 2, 2519980, 16876, null]
-        - [163, 7, SimpleBlock, 2, 2536859, 5583, null]
-        - [163, 7, SimpleBlock, 2, 2542446, 16975, null]
-        - [163, 7, SimpleBlock, 2, 2559425, 17112, null]
-        - [163, 7, SimpleBlock, 2, 2576541, 17040, null]
-        - [163, 7, SimpleBlock, 2, 2593585, 17198, null]
-        - [163, 7, SimpleBlock, 2, 2610787, 17325, null]
-        - [163, 7, SimpleBlock, 2, 2628115, 5967, null]
-        - [163, 7, SimpleBlock, 2, 2634086, 17301, null]
-        - [163, 7, SimpleBlock, 2, 2651391, 17363, null]
-        - [163, 7, SimpleBlock, 2, 2668758, 17444, null]
-        - [163, 7, SimpleBlock, 2, 2686206, 17214, null]
-        - [163, 7, SimpleBlock, 2, 2703423, 5968, null]
-        - [163, 7, SimpleBlock, 2, 2709395, 16998, null]
-        - [163, 7, SimpleBlock, 2, 2726397, 16808, null]
-        - [163, 7, SimpleBlock, 2, 2743208, 16300, null]
-        - [163, 7, SimpleBlock, 2, 2759511, 16046, null]
-        - [163, 7, SimpleBlock, 2, 2775560, 15219, null]
-        - [163, 7, SimpleBlock, 2, 2790782, 2313, null]
-        - [163, 7, SimpleBlock, 2, 2793098, 15047, null]
-        - [163, 7, SimpleBlock, 2, 2808148, 14767, null]
-        - [163, 7, SimpleBlock, 2, 2822918, 6352, null]
-        - [163, 7, SimpleBlock, 2, 2829273, 14386, null]
-        - [163, 7, SimpleBlock, 2, 2843662, 14226, null]
-        - [163, 7, SimpleBlock, 2, 2857891, 14208, null]
-        - [163, 7, SimpleBlock, 2, 2872102, 14241, null]
-        - [163, 7, SimpleBlock, 2, 2886346, 5970, null]
-        - [163, 7, SimpleBlock, 2, 2892319, 13992, null]
-        - [163, 7, SimpleBlock, 2, 2906314, 14075, null]
-        - [163, 7, SimpleBlock, 2, 2920392, 13939, null]
-        - [163, 7, SimpleBlock, 2, 2934334, 13791, null]
-        - [163, 7, SimpleBlock, 2, 2948128, 13671, null]
-        - [163, 7, SimpleBlock, 2, 2961802, 5874, null]
-        - [163, 7, SimpleBlock, 2, 2967679, 13547, null]
-        - [163, 7, SimpleBlock, 2, 2981229, 13453, null]
-        - [163, 7, SimpleBlock, 2, 2994685, 13272, null]
-        - [163, 7, SimpleBlock, 2, 3007960, 12962, null]
-        - [163, 7, SimpleBlock, 2, 3020925, 5777, null]
-        - [163, 7, SimpleBlock, 2, 3026705, 12709, null]
-        - [163, 7, SimpleBlock, 2, 3039417, 12244, null]
-        - [163, 7, SimpleBlock, 2, 3051664, 12266, null]
-        - [163, 7, SimpleBlock, 2, 3063933, 12052, null]
-        - [163, 7, SimpleBlock, 2, 3075988, 11674, null]
-        - [163, 7, SimpleBlock, 2, 3087665, 4334, null]
-        - [163, 7, SimpleBlock, 2, 3092002, 10707, null]
-        - [163, 7, SimpleBlock, 2, 3102712, 10379, null]
-        - [163, 7, SimpleBlock, 2, 3113094, 9656, null]
-        - [163, 7, SimpleBlock, 2, 3122753, 8831, null]
-    - - 524531317
-      - 6
-      - Cluster
-      - 1
-      - 3131592
-      - 2522776
-      - - [231, 1, Timecode, 2, 3131594, 2, 11667]
-        - [163, 7, SimpleBlock, 2, 3131599, 676, null]
-        - [163, 7, SimpleBlock, 2, 3132278, 6066, null]
-        - [163, 7, SimpleBlock, 2, 3138348, 76018, null]
-        - [163, 7, SimpleBlock, 2, 3214369, 1660, null]
-        - [163, 7, SimpleBlock, 2, 3216032, 2664, null]
-        - [163, 7, SimpleBlock, 2, 3218699, 2864, null]
-        - [163, 7, SimpleBlock, 2, 3221566, 2369, null]
-        - [163, 7, SimpleBlock, 2, 3223938, 6547, null]
-        - [163, 7, SimpleBlock, 2, 3230489, 91368, null]
-        - [163, 7, SimpleBlock, 2, 3321860, 8748, null]
-        - [163, 7, SimpleBlock, 2, 3330611, 13105, null]
-        - [163, 7, SimpleBlock, 2, 3343719, 13051, null]
-        - [163, 7, SimpleBlock, 2, 3356773, 6641, null]
-        - [163, 7, SimpleBlock, 2, 3363417, 13474, null]
-        - [163, 7, SimpleBlock, 2, 3376894, 14246, null]
-        - [163, 7, SimpleBlock, 2, 3391143, 14613, null]
-        - [163, 7, SimpleBlock, 2, 3405759, 15195, null]
-        - [163, 7, SimpleBlock, 2, 3420957, 15310, null]
-        - [163, 7, SimpleBlock, 2, 3436270, 6546, null]
-        - [163, 7, SimpleBlock, 2, 3442819, 15441, null]
-        - [163, 7, SimpleBlock, 2, 3458263, 15653, null]
-        - [163, 7, SimpleBlock, 2, 3473919, 15680, null]
-        - [163, 7, SimpleBlock, 2, 3489602, 15627, null]
-        - [163, 7, SimpleBlock, 2, 3505232, 6547, null]
-        - [163, 7, SimpleBlock, 2, 3511782, 15376, null]
-        - [163, 7, SimpleBlock, 2, 3527161, 15431, null]
-        - [163, 7, SimpleBlock, 2, 3542595, 15411, null]
-        - [163, 7, SimpleBlock, 2, 3558009, 15211, null]
-        - [163, 7, SimpleBlock, 2, 3573223, 15589, null]
-        - [163, 7, SimpleBlock, 2, 3588815, 6353, null]
-        - [163, 7, SimpleBlock, 2, 3595171, 15450, null]
-        - [163, 7, SimpleBlock, 2, 3610624, 15443, null]
-        - [163, 7, SimpleBlock, 2, 3626070, 15422, null]
-        - [163, 7, SimpleBlock, 2, 3641495, 15484, null]
-        - [163, 7, SimpleBlock, 2, 3656982, 15369, null]
-        - [163, 7, SimpleBlock, 2, 3672354, 6543, null]
-        - [163, 7, SimpleBlock, 2, 3678900, 15472, null]
-        - [163, 7, SimpleBlock, 2, 3694375, 15538, null]
-        - [163, 7, SimpleBlock, 2, 3709916, 15403, null]
-        - [163, 7, SimpleBlock, 2, 3725322, 15527, null]
-        - [163, 7, SimpleBlock, 2, 3740852, 6353, null]
-        - [163, 7, SimpleBlock, 2, 3747208, 15560, null]
-        - [163, 7, SimpleBlock, 2, 3762771, 15725, null]
-        - [163, 7, SimpleBlock, 2, 3778499, 15805, null]
-        - [163, 7, SimpleBlock, 2, 3794307, 16012, null]
-        - [163, 7, SimpleBlock, 2, 3810322, 15586, null]
-        - [163, 7, SimpleBlock, 2, 3825911, 6355, null]
-        - [163, 7, SimpleBlock, 2, 3832269, 15751, null]
-        - [163, 7, SimpleBlock, 2, 3848023, 15878, null]
-        - [163, 7, SimpleBlock, 2, 3863904, 16069, null]
-        - [163, 7, SimpleBlock, 2, 3879976, 16014, null]
-        - [163, 7, SimpleBlock, 2, 3895993, 6641, null]
-        - [163, 7, SimpleBlock, 2, 3902637, 15962, null]
-        - [163, 7, SimpleBlock, 2, 3918602, 16056, null]
-        - [163, 7, SimpleBlock, 2, 3934661, 16113, null]
-        - [163, 7, SimpleBlock, 2, 3950777, 15808, null]
-        - [163, 7, SimpleBlock, 2, 3966588, 15957, null]
-        - [163, 7, SimpleBlock, 2, 3982548, 5872, null]
-        - [163, 7, SimpleBlock, 2, 3988423, 16047, null]
-        - [163, 7, SimpleBlock, 2, 4004473, 15885, null]
-        - [163, 7, SimpleBlock, 2, 4020361, 15939, null]
-        - [163, 7, SimpleBlock, 2, 4036303, 16219, null]
-        - [163, 7, SimpleBlock, 2, 4052525, 16099, null]
-        - [163, 7, SimpleBlock, 2, 4068627, 5969, null]
-        - [163, 7, SimpleBlock, 2, 4074599, 16044, null]
-        - [163, 7, SimpleBlock, 2, 4090646, 15843, null]
-        - [163, 7, SimpleBlock, 2, 4106492, 15565, null]
-        - [163, 7, SimpleBlock, 2, 4122060, 15513, null]
-        - [163, 7, SimpleBlock, 2, 4137576, 5969, null]
-        - [163, 7, SimpleBlock, 2, 4143548, 15671, null]
-        - [163, 7, SimpleBlock, 2, 4159222, 15472, null]
-        - [163, 7, SimpleBlock, 2, 4174697, 15694, null]
-        - [163, 7, SimpleBlock, 2, 4190394, 15367, null]
-        - [163, 7, SimpleBlock, 2, 4205764, 15550, null]
-        - [163, 7, SimpleBlock, 2, 4221317, 5874, null]
-        - [163, 7, SimpleBlock, 2, 4227194, 15799, null]
-        - [163, 7, SimpleBlock, 2, 4242996, 15468, null]
-        - [163, 7, SimpleBlock, 2, 4258467, 15683, null]
-        - [163, 7, SimpleBlock, 2, 4274153, 15831, null]
-        - [163, 7, SimpleBlock, 2, 4289987, 15649, null]
-        - [163, 7, SimpleBlock, 2, 4305639, 6161, null]
-        - [163, 7, SimpleBlock, 2, 4311803, 15674, null]
-        - [163, 7, SimpleBlock, 2, 4327480, 15947, null]
-        - [163, 7, SimpleBlock, 2, 4343430, 15950, null]
-        - [163, 7, SimpleBlock, 2, 4359383, 16024, null]
-        - [163, 7, SimpleBlock, 2, 4375410, 6546, null]
-        - [163, 7, SimpleBlock, 2, 4381959, 15905, null]
-        - [163, 7, SimpleBlock, 2, 4397867, 15804, null]
-        - [163, 7, SimpleBlock, 2, 4413674, 15923, null]
-        - [163, 7, SimpleBlock, 2, 4429600, 16016, null]
-        - [163, 7, SimpleBlock, 2, 4445619, 15976, null]
-        - [163, 7, SimpleBlock, 2, 4461598, 6161, null]
-        - [163, 7, SimpleBlock, 2, 4467762, 15653, null]
-        - [163, 7, SimpleBlock, 2, 4483418, 15624, null]
-        - [163, 7, SimpleBlock, 2, 4499045, 15816, null]
-        - [163, 7, SimpleBlock, 2, 4514864, 15789, null]
-        - [163, 7, SimpleBlock, 2, 4530656, 6065, null]
-        - [163, 7, SimpleBlock, 2, 4536724, 15807, null]
-        - [163, 7, SimpleBlock, 2, 4552534, 15778, null]
-        - [163, 7, SimpleBlock, 2, 4568315, 16016, null]
-        - [163, 7, SimpleBlock, 2, 4584335, 16391, null]
-        - [163, 7, SimpleBlock, 2, 4600729, 16213, null]
-        - [163, 7, SimpleBlock, 2, 4616945, 5968, null]
-        - [163, 7, SimpleBlock, 2, 4622917, 16515, null]
-        - [163, 7, SimpleBlock, 2, 4639436, 16489, null]
-        - [163, 7, SimpleBlock, 2, 4655928, 16261, null]
-        - [163, 7, SimpleBlock, 2, 4672193, 16569, null]
-        - [163, 7, SimpleBlock, 2, 4688766, 16611, null]
-        - [163, 7, SimpleBlock, 2, 4705380, 6162, null]
-        - [163, 7, SimpleBlock, 2, 4711545, 16272, null]
-        - [163, 7, SimpleBlock, 2, 4727821, 16456, null]
-        - [163, 7, SimpleBlock, 2, 4744281, 16625, null]
-        - [163, 7, SimpleBlock, 2, 4760909, 16309, null]
-        - [163, 7, SimpleBlock, 2, 4777221, 6257, null]
-        - [163, 7, SimpleBlock, 2, 4783481, 16124, null]
-        - [163, 7, SimpleBlock, 2, 4799608, 16054, null]
-        - [163, 7, SimpleBlock, 2, 4815665, 16133, null]
-        - [163, 7, SimpleBlock, 2, 4831801, 16104, null]
-        - [163, 7, SimpleBlock, 2, 4847908, 16074, null]
-        - [163, 7, SimpleBlock, 2, 4863985, 6257, null]
-        - [163, 7, SimpleBlock, 2, 4870245, 15985, null]
-        - [163, 7, SimpleBlock, 2, 4886234, 30557, null]
-        - [163, 7, SimpleBlock, 2, 4916794, 1070, null]
-        - [163, 7, SimpleBlock, 2, 4917867, 1018, null]
-        - [163, 7, SimpleBlock, 2, 4918888, 6547, null]
-        - [163, 7, SimpleBlock, 2, 4925438, 999, null]
-        - [163, 7, SimpleBlock, 2, 4926440, 978, null]
-        - [163, 7, SimpleBlock, 2, 4927421, 1346, null]
-        - [163, 7, SimpleBlock, 2, 4928770, 961, null]
-        - [163, 7, SimpleBlock, 2, 4929734, 2286, null]
-        - [163, 7, SimpleBlock, 2, 4932023, 6739, null]
-        - [163, 7, SimpleBlock, 2, 4938765, 4122, null]
-        - [163, 7, SimpleBlock, 2, 4942890, 4871, null]
-        - [163, 7, SimpleBlock, 2, 4947764, 4809, null]
-        - [163, 7, SimpleBlock, 2, 4952576, 3777, null]
-        - [163, 7, SimpleBlock, 2, 4956356, 4788, null]
-        - [163, 7, SimpleBlock, 2, 4961147, 6451, null]
-        - [163, 7, SimpleBlock, 2, 4967601, 5463, null]
-        - [163, 7, SimpleBlock, 2, 4973067, 6989, null]
-        - [163, 7, SimpleBlock, 2, 4980059, 8594, null]
-        - [163, 7, SimpleBlock, 2, 4988656, 8170, null]
-        - [163, 7, SimpleBlock, 2, 4996829, 6545, null]
-        - [163, 7, SimpleBlock, 2, 5003377, 3838, null]
-        - [163, 7, SimpleBlock, 2, 5007218, 3437, null]
-        - [163, 7, SimpleBlock, 2, 5010658, 2846, null]
-        - [163, 7, SimpleBlock, 2, 5013507, 2664, null]
-        - [163, 7, SimpleBlock, 2, 5016174, 2312, null]
-        - [163, 7, SimpleBlock, 2, 5018489, 6449, null]
-        - [163, 7, SimpleBlock, 2, 5024941, 2172, null]
-        - [163, 7, SimpleBlock, 2, 5027116, 2268, null]
-        - [163, 7, SimpleBlock, 2, 5029387, 2394, null]
-        - [163, 7, SimpleBlock, 2, 5031784, 2501, null]
-        - [163, 7, SimpleBlock, 2, 5034288, 6450, null]
-        - [163, 7, SimpleBlock, 2, 5040741, 2616, null]
-        - [163, 7, SimpleBlock, 2, 5043360, 2571, null]
-        - [163, 7, SimpleBlock, 2, 5045934, 2547, null]
-        - [163, 7, SimpleBlock, 2, 5048484, 2487, null]
-        - [163, 7, SimpleBlock, 2, 5050974, 2602, null]
-        - [163, 7, SimpleBlock, 2, 5053579, 6354, null]
-        - [163, 7, SimpleBlock, 2, 5059936, 2173, null]
-        - [163, 7, SimpleBlock, 2, 5062112, 2151, null]
-        - [163, 7, SimpleBlock, 2, 5064266, 2176, null]
-        - [163, 7, SimpleBlock, 2, 5066445, 2030, null]
-        - [163, 7, SimpleBlock, 2, 5068478, 1997, null]
-        - [163, 7, SimpleBlock, 2, 5070478, 6257, null]
-        - [163, 7, SimpleBlock, 2, 5076738, 1716, null]
-        - [163, 7, SimpleBlock, 2, 5078457, 3963, null]
-        - [163, 7, SimpleBlock, 2, 5082423, 6863, null]
-        - [163, 7, SimpleBlock, 2, 5089289, 5119, null]
-        - [163, 7, SimpleBlock, 2, 5094411, 5199, null]
-        - [163, 7, SimpleBlock, 2, 5099613, 3255, null]
-        - [163, 7, SimpleBlock, 2, 5102871, 4286, null]
-        - [163, 7, SimpleBlock, 2, 5107160, 5759, null]
-        - [163, 7, SimpleBlock, 2, 5112922, 6331, null]
-        - [163, 7, SimpleBlock, 2, 5119256, 6585, null]
-        - [163, 7, SimpleBlock, 2, 5125844, 5201, null]
-        - [163, 7, SimpleBlock, 2, 5131048, 5612, null]
-        - [163, 7, SimpleBlock, 2, 5136663, 4421, null]
-        - [163, 7, SimpleBlock, 2, 5141087, 4525, null]
-        - [163, 7, SimpleBlock, 2, 5145615, 4141, null]
-        - [163, 7, SimpleBlock, 2, 5149759, 5490, null]
-        - [163, 7, SimpleBlock, 2, 5155252, 3473, null]
-        - [163, 7, SimpleBlock, 2, 5158728, 2837, null]
-        - [163, 7, SimpleBlock, 2, 5161568, 3132, null]
-        - [163, 7, SimpleBlock, 2, 5164703, 3646, null]
-        - [163, 7, SimpleBlock, 2, 5168352, 5469, null]
-        - [163, 7, SimpleBlock, 2, 5173824, 5873, null]
-        - [163, 7, SimpleBlock, 2, 5179700, 8756, null]
-        - [163, 7, SimpleBlock, 2, 5188459, 9327, null]
-        - [163, 7, SimpleBlock, 2, 5197789, 8557, null]
-        - [163, 7, SimpleBlock, 2, 5206349, 6774, null]
-        - [163, 7, SimpleBlock, 2, 5213126, 2800, null]
-        - [163, 7, SimpleBlock, 2, 5215929, 6159, null]
-        - [163, 7, SimpleBlock, 2, 5222091, 2426, null]
-        - [163, 7, SimpleBlock, 2, 5224520, 2308, null]
-        - [163, 7, SimpleBlock, 2, 5226831, 2065, null]
-        - [163, 7, SimpleBlock, 2, 5228899, 1848, null]
-        - [163, 7, SimpleBlock, 2, 5230750, 5969, null]
-        - [163, 7, SimpleBlock, 2, 5236722, 1791, null]
-        - [163, 7, SimpleBlock, 2, 5238516, 1759, null]
-        - [163, 7, SimpleBlock, 2, 5240278, 2394, null]
-        - [163, 7, SimpleBlock, 2, 5242675, 2589, null]
-        - [163, 7, SimpleBlock, 2, 5245267, 2474, null]
-        - [163, 7, SimpleBlock, 2, 5247744, 6062, null]
-        - [163, 7, SimpleBlock, 2, 5253809, 2594, null]
-        - [163, 7, SimpleBlock, 2, 5256406, 2693, null]
-        - [163, 7, SimpleBlock, 2, 5259102, 2275, null]
-        - [163, 7, SimpleBlock, 2, 5261380, 1749, null]
-        - [163, 7, SimpleBlock, 2, 5263132, 5968, null]
-        - [163, 7, SimpleBlock, 2, 5269103, 1866, null]
-        - [163, 7, SimpleBlock, 2, 5270972, 1849, null]
-        - [163, 7, SimpleBlock, 2, 5272824, 1718, null]
-        - [163, 7, SimpleBlock, 2, 5274545, 2034, null]
-        - [163, 7, SimpleBlock, 2, 5276582, 1945, null]
-        - [163, 7, SimpleBlock, 2, 5278530, 5969, null]
-        - [163, 7, SimpleBlock, 2, 5284502, 1836, null]
-        - [163, 7, SimpleBlock, 2, 5286341, 2041, null]
-        - [163, 7, SimpleBlock, 2, 5288385, 2254, null]
-        - [163, 7, SimpleBlock, 2, 5290642, 1765, null]
-        - [163, 7, SimpleBlock, 2, 5292410, 1135, null]
-        - [163, 7, SimpleBlock, 2, 5293548, 5872, null]
-        - [163, 7, SimpleBlock, 2, 5299423, 1202, null]
-        - [163, 7, SimpleBlock, 2, 5300628, 1294, null]
-        - [163, 7, SimpleBlock, 2, 5301925, 1459, null]
-        - [163, 7, SimpleBlock, 2, 5303387, 1521, null]
-        - [163, 7, SimpleBlock, 2, 5304911, 6066, null]
-        - [163, 7, SimpleBlock, 2, 5310980, 1531, null]
-        - [163, 7, SimpleBlock, 2, 5312514, 1475, null]
-        - [163, 7, SimpleBlock, 2, 5313992, 1411, null]
-        - [163, 7, SimpleBlock, 2, 5315406, 1211, null]
-        - [163, 7, SimpleBlock, 2, 5316620, 2324, null]
-        - [163, 7, SimpleBlock, 2, 5318947, 6257, null]
-        - [163, 7, SimpleBlock, 2, 5325207, 2000, null]
-        - [163, 7, SimpleBlock, 2, 5327210, 1445, null]
-        - [163, 7, SimpleBlock, 2, 5328658, 1469, null]
-        - [163, 7, SimpleBlock, 2, 5330130, 1727, null]
-        - [163, 7, SimpleBlock, 2, 5331860, 1755, null]
-        - [163, 7, SimpleBlock, 2, 5333618, 6162, null]
-        - [163, 7, SimpleBlock, 2, 5339783, 1839, null]
-        - [163, 7, SimpleBlock, 2, 5341625, 1878, null]
-        - [163, 7, SimpleBlock, 2, 5343506, 4785, null]
-        - [163, 7, SimpleBlock, 2, 5348294, 7508, null]
-        - [163, 7, SimpleBlock, 2, 5355805, 5489, null]
-        - [163, 7, SimpleBlock, 2, 5361297, 9645, null]
-        - [163, 7, SimpleBlock, 2, 5370945, 7838, null]
-        - [163, 7, SimpleBlock, 2, 5378786, 5736, null]
-        - [163, 7, SimpleBlock, 2, 5384525, 5252, null]
-        - [163, 7, SimpleBlock, 2, 5389780, 4668, null]
-        - [163, 7, SimpleBlock, 2, 5394451, 676, null]
-        - [163, 7, SimpleBlock, 2, 5395130, 6160, null]
-        - [163, 7, SimpleBlock, 2, 5401293, 5740, null]
-        - [163, 7, SimpleBlock, 2, 5407036, 5130, null]
-        - [163, 7, SimpleBlock, 2, 5412169, 4879, null]
-        - [163, 7, SimpleBlock, 2, 5417051, 4866, null]
-        - [163, 7, SimpleBlock, 2, 5421920, 6009, null]
-        - [163, 7, SimpleBlock, 2, 5427932, 5490, null]
-        - [163, 7, SimpleBlock, 2, 5433425, 6863, null]
-        - [163, 7, SimpleBlock, 2, 5440291, 7796, null]
-        - [163, 7, SimpleBlock, 2, 5448090, 11253, null]
-        - [163, 7, SimpleBlock, 2, 5459346, 15567, null]
-        - [163, 7, SimpleBlock, 2, 5474916, 12076, null]
-        - [163, 7, SimpleBlock, 2, 5486995, 4531, null]
-        - [163, 7, SimpleBlock, 2, 5491529, 13816, null]
-        - [163, 7, SimpleBlock, 2, 5505348, 11914, null]
-        - [163, 7, SimpleBlock, 2, 5517265, 10621, null]
-        - [163, 7, SimpleBlock, 2, 5527889, 9203, null]
-        - [163, 7, SimpleBlock, 2, 5537095, 4432, null]
-        - [163, 7, SimpleBlock, 2, 5541530, 11010, null]
-        - [163, 7, SimpleBlock, 2, 5552543, 10400, null]
-        - [163, 7, SimpleBlock, 2, 5562946, 10182, null]
-        - [163, 7, SimpleBlock, 2, 5573131, 10107, null]
-        - [163, 7, SimpleBlock, 2, 5583241, 7515, null]
-        - [163, 7, SimpleBlock, 2, 5590759, 4613, null]
-        - [163, 7, SimpleBlock, 2, 5595375, 2891, null]
-        - [163, 7, SimpleBlock, 2, 5598269, 2262, null]
-        - [163, 7, SimpleBlock, 2, 5600534, 2210, null]
-        - [163, 7, SimpleBlock, 2, 5602747, 1779, null]
-        - [163, 7, SimpleBlock, 2, 5604529, 5009, null]
-        - [163, 7, SimpleBlock, 2, 5609541, 1401, null]
-        - [163, 7, SimpleBlock, 2, 5610945, 1046, null]
-        - [163, 7, SimpleBlock, 2, 5611994, 882, null]
-        - [163, 7, SimpleBlock, 2, 5612879, 877, null]
-        - [163, 7, SimpleBlock, 2, 5613759, 984, null]
-        - [163, 7, SimpleBlock, 2, 5614746, 5104, null]
-        - [163, 7, SimpleBlock, 2, 5619853, 1173, null]
-        - [163, 7, SimpleBlock, 2, 5621029, 1175, null]
-        - [163, 7, SimpleBlock, 2, 5622207, 1082, null]
-        - [163, 7, SimpleBlock, 2, 5623292, 1103, null]
-        - [163, 7, SimpleBlock, 2, 5624398, 864, null]
-        - [163, 7, SimpleBlock, 2, 5625265, 5586, null]
-        - [163, 7, SimpleBlock, 2, 5630854, 766, null]
-        - [163, 7, SimpleBlock, 2, 5631623, 867, null]
-        - [163, 7, SimpleBlock, 2, 5632493, 866, null]
-        - [163, 7, SimpleBlock, 2, 5633362, 826, null]
-        - [163, 7, SimpleBlock, 2, 5634191, 5104, null]
-        - [163, 7, SimpleBlock, 2, 5639298, 929, null]
-        - [163, 7, SimpleBlock, 2, 5640230, 984, null]
-        - [163, 7, SimpleBlock, 2, 5641217, 893, null]
-        - [163, 7, SimpleBlock, 2, 5642113, 849, null]
-        - [163, 7, SimpleBlock, 2, 5642965, 751, null]
-        - [163, 7, SimpleBlock, 2, 5643719, 5874, null]
-        - [163, 7, SimpleBlock, 2, 5649596, 664, null]
-        - [163, 7, SimpleBlock, 2, 5650263, 704, null]
-        - [163, 7, SimpleBlock, 2, 5650970, 866, null]
-        - [163, 7, SimpleBlock, 2, 5651839, 932, null]
-        - [163, 7, SimpleBlock, 2, 5652774, 580, null]
-        - [163, 7, SimpleBlock, 2, 5653357, 1011, null]
-    - - 524531317
-      - 6
-      - Cluster
-      - 1
-      - 5654376
-      - 4042030
-      - - [231, 1, Timecode, 2, 5654378, 2, 22083]
-        - [163, 7, SimpleBlock, 2, 5654383, 5487, null]
-        - [163, 7, SimpleBlock, 2, 5659874, 25991, null]
-        - [163, 7, SimpleBlock, 2, 5685868, 1412, null]
-        - [163, 7, SimpleBlock, 2, 5687283, 875, null]
-        - [163, 7, SimpleBlock, 2, 5688161, 752, null]
-        - [163, 7, SimpleBlock, 2, 5688916, 652, null]
-        - [163, 7, SimpleBlock, 2, 5689571, 4816, null]
-        - [163, 7, SimpleBlock, 2, 5694390, 631, null]
-        - [163, 7, SimpleBlock, 2, 5695024, 780, null]
-        - [163, 7, SimpleBlock, 2, 5695807, 795, null]
-        - [163, 7, SimpleBlock, 2, 5696605, 832, null]
-        - [163, 7, SimpleBlock, 2, 5697440, 5491, null]
-        - [163, 7, SimpleBlock, 2, 5702934, 816, null]
-        - [163, 7, SimpleBlock, 2, 5703753, 840, null]
-        - [163, 7, SimpleBlock, 2, 5704596, 781, null]
-        - [163, 7, SimpleBlock, 2, 5705380, 678, null]
-        - [163, 7, SimpleBlock, 2, 5706061, 624, null]
-        - [163, 7, SimpleBlock, 2, 5706688, 5585, null]
-        - [163, 7, SimpleBlock, 2, 5712276, 496, null]
-        - [163, 7, SimpleBlock, 2, 5712775, 531, null]
-        - [163, 7, SimpleBlock, 2, 5713309, 559, null]
-        - [163, 7, SimpleBlock, 2, 5713871, 566, null]
-        - [163, 7, SimpleBlock, 2, 5714440, 6450, null]
-        - [163, 7, SimpleBlock, 2, 5720893, 513, null]
-        - [163, 7, SimpleBlock, 2, 5721409, 429, null]
-        - [163, 7, SimpleBlock, 2, 5721841, 485, null]
-        - [163, 7, SimpleBlock, 2, 5722329, 554, null]
-        - [163, 7, SimpleBlock, 2, 5722886, 512, null]
-        - [163, 7, SimpleBlock, 2, 5723401, 6450, null]
-        - [163, 7, SimpleBlock, 2, 5729855, 85844, null]
-        - [163, 7, SimpleBlock, 2, 5815702, 9241, null]
-        - [163, 7, SimpleBlock, 2, 5824946, 13021, null]
-        - [163, 7, SimpleBlock, 2, 5837970, 13020, null]
-        - [163, 7, SimpleBlock, 2, 5850993, 14475, null]
-        - [163, 7, SimpleBlock, 2, 5865471, 6835, null]
-        - [163, 7, SimpleBlock, 2, 5872309, 14579, null]
-        - [163, 7, SimpleBlock, 2, 5886891, 15342, null]
-        - [163, 7, SimpleBlock, 2, 5902236, 15053, null]
-        - [163, 7, SimpleBlock, 2, 5917292, 15560, null]
-        - [163, 7, SimpleBlock, 2, 5932855, 6642, null]
-        - [163, 7, SimpleBlock, 2, 5939500, 15399, null]
-        - [163, 7, SimpleBlock, 2, 5954902, 15560, null]
-        - [163, 7, SimpleBlock, 2, 5970465, 15577, null]
-        - [163, 7, SimpleBlock, 2, 5986045, 15647, null]
-        - [163, 7, SimpleBlock, 2, 6001695, 15358, null]
-        - [163, 7, SimpleBlock, 2, 6017056, 6835, null]
-        - [163, 7, SimpleBlock, 2, 6023894, 15537, null]
-        - [163, 7, SimpleBlock, 2, 6039434, 15598, null]
-        - [163, 7, SimpleBlock, 2, 6055035, 15730, null]
-        - [163, 7, SimpleBlock, 2, 6070768, 15582, null]
-        - [163, 7, SimpleBlock, 2, 6086353, 6351, null]
-        - [163, 7, SimpleBlock, 2, 6092707, 15441, null]
-        - [163, 7, SimpleBlock, 2, 6108151, 15429, null]
-        - [163, 7, SimpleBlock, 2, 6123583, 15534, null]
-        - [163, 7, SimpleBlock, 2, 6139120, 15550, null]
-        - [163, 7, SimpleBlock, 2, 6154673, 15537, null]
-        - [163, 7, SimpleBlock, 2, 6170213, 6546, null]
-        - [163, 7, SimpleBlock, 2, 6176762, 15619, null]
-        - [163, 7, SimpleBlock, 2, 6192384, 15707, null]
-        - [163, 7, SimpleBlock, 2, 6208094, 15679, null]
-        - [163, 7, SimpleBlock, 2, 6223776, 15407, null]
-        - [163, 7, SimpleBlock, 2, 6239186, 15554, null]
-        - [163, 7, SimpleBlock, 2, 6254743, 6448, null]
-        - [163, 7, SimpleBlock, 2, 6261194, 15613, null]
-        - [163, 7, SimpleBlock, 2, 6276810, 15697, null]
-        - [163, 7, SimpleBlock, 2, 6292510, 15583, null]
-        - [163, 7, SimpleBlock, 2, 6308096, 15663, null]
-        - [163, 7, SimpleBlock, 2, 6323762, 5971, null]
-        - [163, 7, SimpleBlock, 2, 6329736, 15636, null]
-        - [163, 7, SimpleBlock, 2, 6345375, 15711, null]
-        - [163, 7, SimpleBlock, 2, 6361089, 15877, null]
-        - [163, 7, SimpleBlock, 2, 6376969, 15632, null]
-        - [163, 7, SimpleBlock, 2, 6392604, 15880, null]
-        - [163, 7, SimpleBlock, 2, 6408487, 5299, null]
-        - [163, 7, SimpleBlock, 2, 6413789, 15875, null]
-        - [163, 7, SimpleBlock, 2, 6429667, 15671, null]
-        - [163, 7, SimpleBlock, 2, 6445341, 15803, null]
-        - [163, 7, SimpleBlock, 2, 6461147, 15793, null]
-        - [163, 7, SimpleBlock, 2, 6476943, 5872, null]
-        - [163, 7, SimpleBlock, 2, 6482818, 16039, null]
-        - [163, 7, SimpleBlock, 2, 6498860, 16305, null]
-        - [163, 7, SimpleBlock, 2, 6515169, 16465, null]
-        - [163, 7, SimpleBlock, 2, 6531638, 16499, null]
-        - [163, 7, SimpleBlock, 2, 6548141, 16741, null]
-        - [163, 7, SimpleBlock, 2, 6564885, 5584, null]
-        - [163, 7, SimpleBlock, 2, 6570473, 17095, null]
-        - [163, 7, SimpleBlock, 2, 6587572, 17131, null]
-        - [163, 7, SimpleBlock, 2, 6604707, 17304, null]
-        - [163, 7, SimpleBlock, 2, 6622015, 17294, null]
-        - [163, 7, SimpleBlock, 2, 6639313, 17485, null]
-        - [163, 7, SimpleBlock, 2, 6656801, 5490, null]
-        - [163, 7, SimpleBlock, 2, 6662295, 17982, null]
-        - [163, 7, SimpleBlock, 2, 6680281, 18072, null]
-        - [163, 7, SimpleBlock, 2, 6698357, 17845, null]
-        - [163, 7, SimpleBlock, 2, 6716206, 18220, null]
-        - [163, 7, SimpleBlock, 2, 6734429, 4914, null]
-        - [163, 7, SimpleBlock, 2, 6739347, 18198, null]
-        - [163, 7, SimpleBlock, 2, 6757549, 18229, null]
-        - [163, 7, SimpleBlock, 2, 6775782, 18246, null]
-        - [163, 7, SimpleBlock, 2, 6794032, 18232, null]
-        - [163, 7, SimpleBlock, 2, 6812268, 18081, null]
-        - [163, 7, SimpleBlock, 2, 6830352, 5586, null]
-        - [163, 7, SimpleBlock, 2, 6835942, 17839, null]
-        - [163, 7, SimpleBlock, 2, 6853785, 18150, null]
-        - [163, 7, SimpleBlock, 2, 6871939, 17811, null]
-        - [163, 7, SimpleBlock, 2, 6889754, 17733, null]
-        - [163, 7, SimpleBlock, 2, 6907491, 17342, null]
-        - [163, 7, SimpleBlock, 2, 6924836, 5393, null]
-        - [163, 7, SimpleBlock, 2, 6930233, 17401, null]
-        - [163, 7, SimpleBlock, 2, 6947638, 17334, null]
-        - [163, 7, SimpleBlock, 2, 6964976, 17208, null]
-        - [163, 7, SimpleBlock, 2, 6982188, 16806, null]
-        - [163, 7, SimpleBlock, 2, 6998997, 5199, null]
-        - [163, 7, SimpleBlock, 2, 7004200, 16683, null]
-        - [163, 7, SimpleBlock, 2, 7020887, 16765, null]
-        - [163, 7, SimpleBlock, 2, 7037656, 16489, null]
-        - [163, 7, SimpleBlock, 2, 7054149, 16415, null]
-        - [163, 7, SimpleBlock, 2, 7070567, 16272, null]
-        - [163, 7, SimpleBlock, 2, 7086842, 4910, null]
-        - [163, 7, SimpleBlock, 2, 7091755, 16065, null]
-        - [163, 7, SimpleBlock, 2, 7107823, 15683, null]
-        - [163, 7, SimpleBlock, 2, 7123509, 15669, null]
-        - [163, 7, SimpleBlock, 2, 7139181, 15353, null]
-        - [163, 7, SimpleBlock, 2, 7154537, 5395, null]
-        - [163, 7, SimpleBlock, 2, 7159935, 15367, null]
-        - [163, 7, SimpleBlock, 2, 7175305, 14998, null]
-        - [163, 7, SimpleBlock, 2, 7190306, 14862, null]
-        - [163, 7, SimpleBlock, 2, 7205171, 15044, null]
-        - [163, 7, SimpleBlock, 2, 7220218, 5872, null]
-        - [163, 7, SimpleBlock, 2, 7226093, 15078, null]
-        - [163, 7, SimpleBlock, 2, 7241174, 14735, null]
-        - [163, 7, SimpleBlock, 2, 7255912, 14895, null]
-        - [163, 7, SimpleBlock, 2, 7270810, 15001, null]
-        - [163, 7, SimpleBlock, 2, 7285814, 14921, null]
-        - [163, 7, SimpleBlock, 2, 7300738, 5778, null]
-        - [163, 7, SimpleBlock, 2, 7306519, 14923, null]
-        - [163, 7, SimpleBlock, 2, 7321445, 14971, null]
-        - [163, 7, SimpleBlock, 2, 7336419, 14927, null]
-        - [163, 7, SimpleBlock, 2, 7351349, 14900, null]
-        - [163, 7, SimpleBlock, 2, 7366252, 15092, null]
-        - [163, 7, SimpleBlock, 2, 7381347, 5485, null]
-        - [163, 7, SimpleBlock, 2, 7386835, 14913, null]
-        - [163, 7, SimpleBlock, 2, 7401751, 14865, null]
-        - [163, 7, SimpleBlock, 2, 7416619, 15019, null]
-        - [163, 7, SimpleBlock, 2, 7431641, 14883, null]
-        - [163, 7, SimpleBlock, 2, 7446527, 5682, null]
-        - [163, 7, SimpleBlock, 2, 7452212, 15002, null]
-        - [163, 7, SimpleBlock, 2, 7467217, 14870, null]
-        - [163, 7, SimpleBlock, 2, 7482090, 14810, null]
-        - [163, 7, SimpleBlock, 2, 7496903, 14940, null]
-        - [163, 7, SimpleBlock, 2, 7511846, 15141, null]
-        - [163, 7, SimpleBlock, 2, 7526990, 5874, null]
-        - [163, 7, SimpleBlock, 2, 7532867, 15044, null]
-        - [163, 7, SimpleBlock, 2, 7547914, 14799, null]
-        - [163, 7, SimpleBlock, 2, 7562716, 14863, null]
-        - [163, 7, SimpleBlock, 2, 7577582, 14982, null]
-        - [163, 7, SimpleBlock, 2, 7592567, 5873, null]
-        - [163, 7, SimpleBlock, 2, 7598443, 14843, null]
-        - [163, 7, SimpleBlock, 2, 7613289, 14979, null]
-        - [163, 7, SimpleBlock, 2, 7628271, 14680, null]
-        - [163, 7, SimpleBlock, 2, 7642954, 14874, null]
-        - [163, 7, SimpleBlock, 2, 7657831, 14871, null]
-        - [163, 7, SimpleBlock, 2, 7672705, 5491, null]
-        - [163, 7, SimpleBlock, 2, 7678199, 14981, null]
-        - [163, 7, SimpleBlock, 2, 7693183, 14699, null]
-        - [163, 7, SimpleBlock, 2, 7707885, 15065, null]
-        - [163, 7, SimpleBlock, 2, 7722953, 14820, null]
-        - [163, 7, SimpleBlock, 2, 7737776, 14760, null]
-        - [163, 7, SimpleBlock, 2, 7752539, 5584, null]
-        - [163, 7, SimpleBlock, 2, 7758126, 14847, null]
-        - [163, 7, SimpleBlock, 2, 7772976, 14937, null]
-        - [163, 7, SimpleBlock, 2, 7787916, 14800, null]
-        - [163, 7, SimpleBlock, 2, 7802719, 15108, null]
-        - [163, 7, SimpleBlock, 2, 7817830, 5107, null]
-        - [163, 7, SimpleBlock, 2, 7822940, 14980, null]
-        - [163, 7, SimpleBlock, 2, 7837923, 15035, null]
-        - [163, 7, SimpleBlock, 2, 7852961, 14959, null]
-        - [163, 7, SimpleBlock, 2, 7867923, 14964, null]
-        - [163, 7, SimpleBlock, 2, 7882890, 14914, null]
-        - [163, 7, SimpleBlock, 2, 7897807, 5490, null]
-        - [163, 7, SimpleBlock, 2, 7903300, 15071, null]
-        - [163, 7, SimpleBlock, 2, 7918374, 14910, null]
-        - [163, 7, SimpleBlock, 2, 7933287, 15206, null]
-        - [163, 7, SimpleBlock, 2, 7948496, 14820, null]
-        - [163, 7, SimpleBlock, 2, 7963319, 5583, null]
-        - [163, 7, SimpleBlock, 2, 7968905, 15074, null]
-        - [163, 7, SimpleBlock, 2, 7983982, 14970, null]
-        - [163, 7, SimpleBlock, 2, 7998955, 15396, null]
-        - [163, 7, SimpleBlock, 2, 8014354, 15402, null]
-        - [163, 7, SimpleBlock, 2, 8029759, 15417, null]
-        - [163, 7, SimpleBlock, 2, 8045179, 5873, null]
-        - [163, 7, SimpleBlock, 2, 8051055, 15643, null]
-        - [163, 7, SimpleBlock, 2, 8066701, 15741, null]
-        - [163, 7, SimpleBlock, 2, 8082445, 15823, null]
-        - [163, 7, SimpleBlock, 2, 8098271, 15968, null]
-        - [163, 7, SimpleBlock, 2, 8114242, 16024, null]
-        - [163, 7, SimpleBlock, 2, 8130269, 5681, null]
-        - [163, 7, SimpleBlock, 2, 8135953, 16190, null]
-        - [163, 7, SimpleBlock, 2, 8152146, 16229, null]
-        - [163, 7, SimpleBlock, 2, 8168378, 16320, null]
-        - [163, 7, SimpleBlock, 2, 8184702, 16427, null]
-        - [163, 7, SimpleBlock, 2, 8201132, 5487, null]
-        - [163, 7, SimpleBlock, 2, 8206623, 16674, null]
-        - [163, 7, SimpleBlock, 2, 8223301, 16862, null]
-        - [163, 7, SimpleBlock, 2, 8240167, 16715, null]
-        - [163, 7, SimpleBlock, 2, 8256886, 17261, null]
-        - [163, 7, SimpleBlock, 2, 8274151, 17477, null]
-        - [163, 7, SimpleBlock, 2, 8291631, 5778, null]
-        - [163, 7, SimpleBlock, 2, 8297413, 17112, null]
-        - [163, 7, SimpleBlock, 2, 8314529, 17366, null]
-        - [163, 7, SimpleBlock, 2, 8331899, 17553, null]
-        - [163, 7, SimpleBlock, 2, 8349456, 17762, null]
-        - [163, 7, SimpleBlock, 2, 8367222, 17629, null]
-        - [163, 7, SimpleBlock, 2, 8384854, 5778, null]
-        - [163, 7, SimpleBlock, 2, 8390636, 17749, null]
-        - [163, 7, SimpleBlock, 2, 8408389, 18009, null]
-        - [163, 7, SimpleBlock, 2, 8426402, 17943, null]
-        - [163, 7, SimpleBlock, 2, 8444349, 17886, null]
-        - [163, 7, SimpleBlock, 2, 8462238, 5777, null]
-        - [163, 7, SimpleBlock, 2, 8468019, 18051, null]
-        - [163, 7, SimpleBlock, 2, 8486074, 17991, null]
-        - [163, 7, SimpleBlock, 2, 8504069, 17883, null]
-        - [163, 7, SimpleBlock, 2, 8521956, 17835, null]
-        - [163, 7, SimpleBlock, 2, 8539795, 17949, null]
-        - [163, 7, SimpleBlock, 2, 8557747, 5775, null]
-        - [163, 7, SimpleBlock, 2, 8563526, 17955, null]
-        - [163, 7, SimpleBlock, 2, 8581485, 17740, null]
-        - [163, 7, SimpleBlock, 2, 8599229, 17608, null]
-        - [163, 7, SimpleBlock, 2, 8616841, 17646, null]
-        - [163, 7, SimpleBlock, 2, 8634490, 1541, null]
-        - [163, 7, SimpleBlock, 2, 8636035, 17615, null]
-        - [163, 7, SimpleBlock, 2, 8653653, 6065, null]
-        - [163, 7, SimpleBlock, 2, 8659722, 17519, null]
-        - [163, 7, SimpleBlock, 2, 8677245, 17265, null]
-        - [163, 7, SimpleBlock, 2, 8694514, 17299, null]
-        - [163, 7, SimpleBlock, 2, 8711817, 17000, null]
-        - [163, 7, SimpleBlock, 2, 8728821, 17199, null]
-        - [163, 7, SimpleBlock, 2, 8746023, 5776, null]
-        - [163, 7, SimpleBlock, 2, 8751803, 16948, null]
-        - [163, 7, SimpleBlock, 2, 8768755, 16961, null]
-        - [163, 7, SimpleBlock, 2, 8785720, 16941, null]
-        - [163, 7, SimpleBlock, 2, 8802665, 16778, null]
-        - [163, 7, SimpleBlock, 2, 8819447, 16807, null]
-        - [163, 7, SimpleBlock, 2, 8836257, 5488, null]
-        - [163, 7, SimpleBlock, 2, 8841749, 16640, null]
-        - [163, 7, SimpleBlock, 2, 8858393, 16528, null]
-        - [163, 7, SimpleBlock, 2, 8874925, 16728, null]
-        - [163, 7, SimpleBlock, 2, 8891656, 16326, null]
-        - [163, 7, SimpleBlock, 2, 8907985, 5489, null]
-        - [163, 7, SimpleBlock, 2, 8913478, 16478, null]
-        - [163, 7, SimpleBlock, 2, 8929959, 16373, null]
-        - [163, 7, SimpleBlock, 2, 8946336, 16728, null]
-        - [163, 7, SimpleBlock, 2, 8963068, 16548, null]
-        - [163, 7, SimpleBlock, 2, 8979620, 16730, null]
-        - [163, 7, SimpleBlock, 2, 8996353, 5779, null]
-        - [163, 7, SimpleBlock, 2, 9002136, 16702, null]
-        - [163, 7, SimpleBlock, 2, 9018842, 16695, null]
-        - [163, 7, SimpleBlock, 2, 9035541, 16575, null]
-        - [163, 7, SimpleBlock, 2, 9052120, 16558, null]
-        - [163, 7, SimpleBlock, 2, 9068682, 16576, null]
-        - [163, 7, SimpleBlock, 2, 9085261, 5585, null]
-        - [163, 7, SimpleBlock, 2, 9090850, 16410, null]
-        - [163, 7, SimpleBlock, 2, 9107264, 16615, null]
-        - [163, 7, SimpleBlock, 2, 9123883, 16629, null]
-        - [163, 7, SimpleBlock, 2, 9140516, 16572, null]
-        - [163, 7, SimpleBlock, 2, 9157091, 5487, null]
-        - [163, 7, SimpleBlock, 2, 9162582, 16740, null]
-        - [163, 7, SimpleBlock, 2, 9179326, 16688, null]
-        - [163, 7, SimpleBlock, 2, 9196018, 16625, null]
-        - [163, 7, SimpleBlock, 2, 9212647, 17417, null]
-        - [163, 7, SimpleBlock, 2, 9230068, 17527, null]
-        - [163, 7, SimpleBlock, 2, 9247598, 5487, null]
-        - [163, 7, SimpleBlock, 2, 9253089, 17348, null]
-        - [163, 7, SimpleBlock, 2, 9270441, 17060, null]
-        - [163, 7, SimpleBlock, 2, 9287505, 16552, null]
-        - [163, 7, SimpleBlock, 2, 9304060, 16344, null]
-        - [163, 7, SimpleBlock, 2, 9320407, 5296, null]
-        - [163, 7, SimpleBlock, 2, 9325706, 16256, null]
-        - [163, 7, SimpleBlock, 2, 9341965, 15758, null]
-        - [163, 7, SimpleBlock, 2, 9357726, 15896, null]
-        - [163, 7, SimpleBlock, 2, 9373625, 15296, null]
-        - [163, 7, SimpleBlock, 2, 9388924, 15339, null]
-        - [163, 7, SimpleBlock, 2, 9404266, 5489, null]
-        - [163, 7, SimpleBlock, 2, 9409758, 14934, null]
-        - [163, 7, SimpleBlock, 2, 9424695, 14798, null]
-        - [163, 7, SimpleBlock, 2, 9439496, 14636, null]
-        - [163, 7, SimpleBlock, 2, 9454135, 14532, null]
-        - [163, 7, SimpleBlock, 2, 9468670, 14366, null]
-        - [163, 7, SimpleBlock, 2, 9483039, 5583, null]
-        - [163, 7, SimpleBlock, 2, 9488625, 14350, null]
-        - [163, 7, SimpleBlock, 2, 9502978, 14273, null]
-        - [163, 7, SimpleBlock, 2, 9517254, 14005, null]
-        - [163, 7, SimpleBlock, 2, 9531262, 14068, null]
-        - [163, 7, SimpleBlock, 2, 9545333, 5583, null]
-        - [163, 7, SimpleBlock, 2, 9550919, 14134, null]
-        - [163, 7, SimpleBlock, 2, 9565056, 13834, null]
-        - [163, 7, SimpleBlock, 2, 9578893, 13920, null]
-        - [163, 7, SimpleBlock, 2, 9592816, 13837, null]
-        - [163, 7, SimpleBlock, 2, 9606656, 13788, null]
-        - [163, 7, SimpleBlock, 2, 9620447, 5487, null]
-        - [163, 7, SimpleBlock, 2, 9625937, 13746, null]
-        - [163, 7, SimpleBlock, 2, 9639686, 13819, null]
-        - [163, 7, SimpleBlock, 2, 9653508, 13907, null]
-        - [163, 7, SimpleBlock, 2, 9667418, 13995, null]
-        - [163, 7, SimpleBlock, 2, 9681416, 964, null]
-        - [163, 7, SimpleBlock, 2, 9682383, 14023, null]
-    - - 524531317
-      - 6
-      - Cluster
-      - 1
-      - 9696414
-      - 3744132
-      - - [231, 1, Timecode, 2, 9696416, 2, 32500]
-        - [163, 7, SimpleBlock, 2, 9696421, 5584, null]
-        - [163, 7, SimpleBlock, 2, 9702009, 58285, null]
-        - [163, 7, SimpleBlock, 2, 9760297, 8074, null]
-        - [163, 7, SimpleBlock, 2, 9768374, 11664, null]
-        - [163, 7, SimpleBlock, 2, 9780041, 12436, null]
-        - [163, 7, SimpleBlock, 2, 9792480, 13719, null]
-        - [163, 7, SimpleBlock, 2, 9806202, 5679, null]
-        - [163, 7, SimpleBlock, 2, 9811884, 14117, null]
-        - [163, 7, SimpleBlock, 2, 9826004, 14219, null]
-        - [163, 7, SimpleBlock, 2, 9840226, 14317, null]
-        - [163, 7, SimpleBlock, 2, 9854546, 14481, null]
-        - [163, 7, SimpleBlock, 2, 9869030, 5490, null]
-        - [163, 7, SimpleBlock, 2, 9874523, 14660, null]
-        - [163, 7, SimpleBlock, 2, 9889186, 14854, null]
-        - [163, 7, SimpleBlock, 2, 9904043, 14980, null]
-        - [163, 7, SimpleBlock, 2, 9919026, 15380, null]
-        - [163, 7, SimpleBlock, 2, 9934409, 15522, null]
-        - [163, 7, SimpleBlock, 2, 9949934, 5779, null]
-        - [163, 7, SimpleBlock, 2, 9955716, 15621, null]
-        - [163, 7, SimpleBlock, 2, 9971340, 15669, null]
-        - [163, 7, SimpleBlock, 2, 9987012, 15890, null]
-        - [163, 7, SimpleBlock, 2, 10002905, 16299, null]
-        - [163, 7, SimpleBlock, 2, 10019207, 5969, null]
-        - [163, 7, SimpleBlock, 2, 10025179, 16299, null]
-        - [163, 7, SimpleBlock, 2, 10041482, 16612, null]
-        - [163, 7, SimpleBlock, 2, 10058098, 17028, null]
-        - [163, 7, SimpleBlock, 2, 10075130, 17286, null]
-        - [163, 7, SimpleBlock, 2, 10092420, 17238, null]
-        - [163, 7, SimpleBlock, 2, 10109661, 5585, null]
-        - [163, 7, SimpleBlock, 2, 10115250, 17673, null]
-        - [163, 7, SimpleBlock, 2, 10132927, 17702, null]
-        - [163, 7, SimpleBlock, 2, 10150633, 18215, null]
-        - [163, 7, SimpleBlock, 2, 10168852, 18454, null]
-        - [163, 7, SimpleBlock, 2, 10187310, 18997, null]
-        - [163, 7, SimpleBlock, 2, 10206310, 5969, null]
-        - [163, 7, SimpleBlock, 2, 10212283, 19148, null]
-        - [163, 7, SimpleBlock, 2, 10231435, 19526, null]
-        - [163, 7, SimpleBlock, 2, 10250965, 16685, null]
-        - [163, 7, SimpleBlock, 2, 10267654, 16395, null]
-        - [163, 7, SimpleBlock, 2, 10284052, 5778, null]
-        - [163, 7, SimpleBlock, 2, 10289833, 16114, null]
-        - [163, 7, SimpleBlock, 2, 10305950, 16376, null]
-        - [163, 7, SimpleBlock, 2, 10322329, 16074, null]
-        - [163, 7, SimpleBlock, 2, 10338406, 16202, null]
-        - [163, 7, SimpleBlock, 2, 10354611, 16277, null]
-        - [163, 7, SimpleBlock, 2, 10370891, 5873, null]
-        - [163, 7, SimpleBlock, 2, 10376767, 16211, null]
-        - [163, 7, SimpleBlock, 2, 10392981, 16342, null]
-        - [163, 7, SimpleBlock, 2, 10409326, 16294, null]
-        - [163, 7, SimpleBlock, 2, 10425623, 16227, null]
-        - [163, 7, SimpleBlock, 2, 10441853, 5969, null]
-        - [163, 7, SimpleBlock, 2, 10447825, 16131, null]
-        - [163, 7, SimpleBlock, 2, 10463959, 16256, null]
-        - [163, 7, SimpleBlock, 2, 10480219, 16528, null]
-        - [163, 7, SimpleBlock, 2, 10496750, 16212, null]
-        - [163, 7, SimpleBlock, 2, 10512965, 16157, null]
-        - [163, 7, SimpleBlock, 2, 10529125, 5872, null]
-        - [163, 7, SimpleBlock, 2, 10535000, 16235, null]
-        - [163, 7, SimpleBlock, 2, 10551238, 15984, null]
-        - [163, 7, SimpleBlock, 2, 10567225, 16158, null]
-        - [163, 7, SimpleBlock, 2, 10583386, 16233, null]
-        - [163, 7, SimpleBlock, 2, 10599622, 16083, null]
-        - [163, 7, SimpleBlock, 2, 10615708, 6162, null]
-        - [163, 7, SimpleBlock, 2, 10621873, 16186, null]
-        - [163, 7, SimpleBlock, 2, 10638062, 16047, null]
-        - [163, 7, SimpleBlock, 2, 10654112, 15948, null]
-        - [163, 7, SimpleBlock, 2, 10670063, 16103, null]
-        - [163, 7, SimpleBlock, 2, 10686169, 6065, null]
-        - [163, 7, SimpleBlock, 2, 10692237, 16048, null]
-        - [163, 7, SimpleBlock, 2, 10708288, 16064, null]
-        - [163, 7, SimpleBlock, 2, 10724355, 15899, null]
-        - [163, 7, SimpleBlock, 2, 10740257, 15995, null]
-        - [163, 7, SimpleBlock, 2, 10756255, 16002, null]
-        - [163, 7, SimpleBlock, 2, 10772260, 6065, null]
-        - [163, 7, SimpleBlock, 2, 10778328, 16105, null]
-        - [163, 7, SimpleBlock, 2, 10794436, 15916, null]
-        - [163, 7, SimpleBlock, 2, 10810355, 16022, null]
-        - [163, 7, SimpleBlock, 2, 10826380, 15944, null]
-        - [163, 7, SimpleBlock, 2, 10842327, 6066, null]
-        - [163, 7, SimpleBlock, 2, 10848396, 15894, null]
-        - [163, 7, SimpleBlock, 2, 10864293, 15821, null]
-        - [163, 7, SimpleBlock, 2, 10880117, 15998, null]
-        - [163, 7, SimpleBlock, 2, 10896118, 15774, null]
-        - [163, 7, SimpleBlock, 2, 10911895, 15840, null]
-        - [163, 7, SimpleBlock, 2, 10927738, 6546, null]
-        - [163, 7, SimpleBlock, 2, 10934287, 15857, null]
-        - [163, 7, SimpleBlock, 2, 10950147, 15871, null]
-        - [163, 7, SimpleBlock, 2, 10966021, 15709, null]
-        - [163, 7, SimpleBlock, 2, 10981733, 15836, null]
-        - [163, 7, SimpleBlock, 2, 10997572, 15847, null]
-        - [163, 7, SimpleBlock, 2, 11013422, 6162, null]
-        - [163, 7, SimpleBlock, 2, 11019587, 15943, null]
-        - [163, 7, SimpleBlock, 2, 11035533, 15875, null]
-        - [163, 7, SimpleBlock, 2, 11051411, 15777, null]
-        - [163, 7, SimpleBlock, 2, 11067191, 15896, null]
-        - [163, 7, SimpleBlock, 2, 11083090, 6255, null]
-        - [163, 7, SimpleBlock, 2, 11089348, 15755, null]
-        - [163, 7, SimpleBlock, 2, 11105106, 15815, null]
-        - [163, 7, SimpleBlock, 2, 11120924, 15742, null]
-        - [163, 7, SimpleBlock, 2, 11136669, 15718, null]
-        - [163, 7, SimpleBlock, 2, 11152390, 15628, null]
-        - [163, 7, SimpleBlock, 2, 11168021, 964, null]
-        - [163, 7, SimpleBlock, 2, 11168988, 6161, null]
-        - [163, 7, SimpleBlock, 2, 11175152, 15743, null]
-        - [163, 7, SimpleBlock, 2, 11190898, 15651, null]
-        - [163, 7, SimpleBlock, 2, 11206552, 15646, null]
-        - [163, 7, SimpleBlock, 2, 11222201, 15666, null]
-        - [163, 7, SimpleBlock, 2, 11237870, 15600, null]
-        - [163, 7, SimpleBlock, 2, 11253473, 6354, null]
-        - [163, 7, SimpleBlock, 2, 11259830, 15472, null]
-        - [163, 7, SimpleBlock, 2, 11275305, 15276, null]
-        - [163, 7, SimpleBlock, 2, 11290584, 15429, null]
-        - [163, 7, SimpleBlock, 2, 11306016, 15363, null]
-        - [163, 7, SimpleBlock, 2, 11321382, 15264, null]
-        - [163, 7, SimpleBlock, 2, 11336649, 6256, null]
-        - [163, 7, SimpleBlock, 2, 11342908, 15189, null]
-        - [163, 7, SimpleBlock, 2, 11358100, 15429, null]
-        - [163, 7, SimpleBlock, 2, 11373532, 15182, null]
-        - [163, 7, SimpleBlock, 2, 11388717, 15176, null]
-        - [163, 7, SimpleBlock, 2, 11403896, 6258, null]
-        - [163, 7, SimpleBlock, 2, 11410157, 15160, null]
-        - [163, 7, SimpleBlock, 2, 11425320, 15084, null]
-        - [163, 7, SimpleBlock, 2, 11440407, 15027, null]
-        - [163, 7, SimpleBlock, 2, 11455437, 15087, null]
-        - [163, 7, SimpleBlock, 2, 11470527, 15308, null]
-        - [163, 7, SimpleBlock, 2, 11485838, 6643, null]
-        - [163, 7, SimpleBlock, 2, 11492484, 14960, null]
-        - [163, 7, SimpleBlock, 2, 11507447, 15005, null]
-        - [163, 7, SimpleBlock, 2, 11522455, 15128, null]
-        - [163, 7, SimpleBlock, 2, 11537586, 15124, null]
-        - [163, 7, SimpleBlock, 2, 11552713, 15103, null]
-        - [163, 7, SimpleBlock, 2, 11567819, 6354, null]
-        - [163, 7, SimpleBlock, 2, 11574176, 15080, null]
-        - [163, 7, SimpleBlock, 2, 11589259, 15014, null]
-        - [163, 7, SimpleBlock, 2, 11604276, 14951, null]
-        - [163, 7, SimpleBlock, 2, 11619230, 14915, null]
-        - [163, 7, SimpleBlock, 2, 11634148, 6256, null]
-        - [163, 7, SimpleBlock, 2, 11640407, 14819, null]
-        - [163, 7, SimpleBlock, 2, 11655229, 14729, null]
-        - [163, 7, SimpleBlock, 2, 11669961, 14715, null]
-        - [163, 7, SimpleBlock, 2, 11684679, 14801, null]
-        - [163, 7, SimpleBlock, 2, 11699483, 14828, null]
-        - [163, 7, SimpleBlock, 2, 11714314, 6258, null]
-        - [163, 7, SimpleBlock, 2, 11720575, 14604, null]
-        - [163, 7, SimpleBlock, 2, 11735182, 14649, null]
-        - [163, 7, SimpleBlock, 2, 11749834, 14712, null]
-        - [163, 7, SimpleBlock, 2, 11764549, 14456, null]
-        - [163, 7, SimpleBlock, 2, 11779008, 6162, null]
-        - [163, 7, SimpleBlock, 2, 11785173, 14612, null]
-        - [163, 7, SimpleBlock, 2, 11799788, 14464, null]
-        - [163, 7, SimpleBlock, 2, 11814255, 14548, null]
-        - [163, 7, SimpleBlock, 2, 11828806, 14477, null]
-        - [163, 7, SimpleBlock, 2, 11843286, 14547, null]
-        - [163, 7, SimpleBlock, 2, 11857836, 6162, null]
-        - [163, 7, SimpleBlock, 2, 11864001, 14432, null]
-        - [163, 7, SimpleBlock, 2, 11878436, 14322, null]
-        - [163, 7, SimpleBlock, 2, 11892761, 14270, null]
-        - [163, 7, SimpleBlock, 2, 11907034, 14174, null]
-        - [163, 7, SimpleBlock, 2, 11921211, 14244, null]
-        - [163, 7, SimpleBlock, 2, 11935458, 5968, null]
-        - [163, 7, SimpleBlock, 2, 11941429, 14147, null]
-        - [163, 7, SimpleBlock, 2, 11955579, 14105, null]
-        - [163, 7, SimpleBlock, 2, 11969687, 14050, null]
-        - [163, 7, SimpleBlock, 2, 11983740, 14133, null]
-        - [163, 7, SimpleBlock, 2, 11997876, 5966, null]
-        - [163, 7, SimpleBlock, 2, 12003845, 14114, null]
-        - [163, 7, SimpleBlock, 2, 12017962, 13853, null]
-        - [163, 7, SimpleBlock, 2, 12031818, 14074, null]
-        - [163, 7, SimpleBlock, 2, 12045895, 13788, null]
-        - [163, 7, SimpleBlock, 2, 12059686, 13645, null]
-        - [163, 7, SimpleBlock, 2, 12073334, 5872, null]
-        - [163, 7, SimpleBlock, 2, 12079209, 13645, null]
-        - [163, 7, SimpleBlock, 2, 12092857, 13742, null]
-        - [163, 7, SimpleBlock, 2, 12106602, 13511, null]
-        - [163, 7, SimpleBlock, 2, 12120116, 13642, null]
-        - [163, 7, SimpleBlock, 2, 12133761, 5871, null]
-        - [163, 7, SimpleBlock, 2, 12139635, 13525, null]
-        - [163, 7, SimpleBlock, 2, 12153163, 13417, null]
-        - [163, 7, SimpleBlock, 2, 12166583, 13399, null]
-        - [163, 7, SimpleBlock, 2, 12179985, 13388, null]
-        - [163, 7, SimpleBlock, 2, 12193376, 13531, null]
-        - [163, 7, SimpleBlock, 2, 12206910, 5776, null]
-        - [163, 7, SimpleBlock, 2, 12212689, 13370, null]
-        - [163, 7, SimpleBlock, 2, 12226062, 13288, null]
-        - [163, 7, SimpleBlock, 2, 12239353, 13187, null]
-        - [163, 7, SimpleBlock, 2, 12252543, 13400, null]
-        - [163, 7, SimpleBlock, 2, 12265946, 13324, null]
-        - [163, 7, SimpleBlock, 2, 12279273, 5776, null]
-        - [163, 7, SimpleBlock, 2, 12285052, 13383, null]
-        - [163, 7, SimpleBlock, 2, 12298438, 13326, null]
-        - [163, 7, SimpleBlock, 2, 12311767, 13233, null]
-        - [163, 7, SimpleBlock, 2, 12325003, 13224, null]
-        - [163, 7, SimpleBlock, 2, 12338230, 5393, null]
-        - [163, 7, SimpleBlock, 2, 12343626, 13395, null]
-        - [163, 7, SimpleBlock, 2, 12357024, 13150, null]
-        - [163, 7, SimpleBlock, 2, 12370177, 13085, null]
-        - [163, 7, SimpleBlock, 2, 12383265, 13142, null]
-        - [163, 7, SimpleBlock, 2, 12396410, 12979, null]
-        - [163, 7, SimpleBlock, 2, 12409392, 5391, null]
-        - [163, 7, SimpleBlock, 2, 12414786, 13151, null]
-        - [163, 7, SimpleBlock, 2, 12427940, 12982, null]
-        - [163, 7, SimpleBlock, 2, 12440925, 12778, null]
-        - [163, 7, SimpleBlock, 2, 12453706, 12569, null]
-        - [163, 7, SimpleBlock, 2, 12466278, 5681, null]
-        - [163, 7, SimpleBlock, 2, 12471962, 12672, null]
-        - [163, 7, SimpleBlock, 2, 12484637, 12435, null]
-        - [163, 7, SimpleBlock, 2, 12497075, 12408, null]
-        - [163, 7, SimpleBlock, 2, 12509486, 12258, null]
-        - [163, 7, SimpleBlock, 2, 12521747, 12327, null]
-        - [163, 7, SimpleBlock, 2, 12534077, 5970, null]
-        - [163, 7, SimpleBlock, 2, 12540050, 12242, null]
-        - [163, 7, SimpleBlock, 2, 12552295, 12099, null]
-        - [163, 7, SimpleBlock, 2, 12564397, 12248, null]
-        - [163, 7, SimpleBlock, 2, 12576648, 11962, null]
-        - [163, 7, SimpleBlock, 2, 12588613, 11927, null]
-        - [163, 7, SimpleBlock, 2, 12600543, 5874, null]
-        - [163, 7, SimpleBlock, 2, 12606420, 11981, null]
-        - [163, 7, SimpleBlock, 2, 12618404, 11902, null]
-        - [163, 7, SimpleBlock, 2, 12630309, 11921, null]
-        - [163, 7, SimpleBlock, 2, 12642233, 11689, null]
-        - [163, 7, SimpleBlock, 2, 12653925, 2794, null]
-        - [163, 7, SimpleBlock, 2, 12656722, 11864, null]
-        - [163, 7, SimpleBlock, 2, 12668589, 11531, null]
-        - [163, 7, SimpleBlock, 2, 12680123, 11632, null]
-        - [163, 7, SimpleBlock, 2, 12691758, 5777, null]
-        - [163, 7, SimpleBlock, 2, 12697538, 11429, null]
-        - [163, 7, SimpleBlock, 2, 12708970, 11526, null]
-        - [163, 7, SimpleBlock, 2, 12720499, 11214, null]
-        - [163, 7, SimpleBlock, 2, 12731716, 11362, null]
-        - [163, 7, SimpleBlock, 2, 12743081, 5585, null]
-        - [163, 7, SimpleBlock, 2, 12748669, 11338, null]
-        - [163, 7, SimpleBlock, 2, 12760010, 11249, null]
-        - [163, 7, SimpleBlock, 2, 12771262, 11295, null]
-        - [163, 7, SimpleBlock, 2, 12782560, 11137, null]
-        - [163, 7, SimpleBlock, 2, 12793700, 11203, null]
-        - [163, 7, SimpleBlock, 2, 12804906, 5395, null]
-        - [163, 7, SimpleBlock, 2, 12810304, 11042, null]
-        - [163, 7, SimpleBlock, 2, 12821349, 11145, null]
-        - [163, 7, SimpleBlock, 2, 12832497, 10864, null]
-        - [163, 7, SimpleBlock, 2, 12843364, 10885, null]
-        - [163, 7, SimpleBlock, 2, 12854252, 5680, null]
-        - [163, 7, SimpleBlock, 2, 12859935, 10829, null]
-        - [163, 7, SimpleBlock, 2, 12870767, 10656, null]
-        - [163, 7, SimpleBlock, 2, 12881426, 10698, null]
-        - [163, 7, SimpleBlock, 2, 12892127, 10718, null]
-        - [163, 7, SimpleBlock, 2, 12902848, 10663, null]
-        - [163, 7, SimpleBlock, 2, 12913514, 5393, null]
-        - [163, 7, SimpleBlock, 2, 12918910, 10615, null]
-        - [163, 7, SimpleBlock, 2, 12929528, 10614, null]
-        - [163, 7, SimpleBlock, 2, 12940145, 10528, null]
-        - [163, 7, SimpleBlock, 2, 12950676, 10479, null]
-        - [163, 7, SimpleBlock, 2, 12961158, 10358, null]
-        - [163, 7, SimpleBlock, 2, 12971519, 5487, null]
-        - [163, 7, SimpleBlock, 2, 12977009, 10405, null]
-        - [163, 7, SimpleBlock, 2, 12987417, 10152, null]
-        - [163, 7, SimpleBlock, 2, 12997572, 10226, null]
-        - [163, 7, SimpleBlock, 2, 13007801, 10300, null]
-        - [163, 7, SimpleBlock, 2, 13018104, 5104, null]
-        - [163, 7, SimpleBlock, 2, 13023211, 10268, null]
-        - [163, 7, SimpleBlock, 2, 13033482, 10148, null]
-        - [163, 7, SimpleBlock, 2, 13043633, 10309, null]
-        - [163, 7, SimpleBlock, 2, 13053945, 10178, null]
-        - [163, 7, SimpleBlock, 2, 13064126, 10096, null]
-        - [163, 7, SimpleBlock, 2, 13074225, 5201, null]
-        - [163, 7, SimpleBlock, 2, 13079429, 10085, null]
-        - [163, 7, SimpleBlock, 2, 13089517, 10239, null]
-        - [163, 7, SimpleBlock, 2, 13099759, 10113, null]
-        - [163, 7, SimpleBlock, 2, 13109875, 10129, null]
-        - [163, 7, SimpleBlock, 2, 13120007, 5008, null]
-        - [163, 7, SimpleBlock, 2, 13125018, 10090, null]
-        - [163, 7, SimpleBlock, 2, 13135111, 10152, null]
-        - [163, 7, SimpleBlock, 2, 13145266, 10211, null]
-        - [163, 7, SimpleBlock, 2, 13155480, 9935, null]
-        - [163, 7, SimpleBlock, 2, 13165418, 10088, null]
-        - [163, 7, SimpleBlock, 2, 13175509, 5202, null]
-        - [163, 7, SimpleBlock, 2, 13180714, 9887, null]
-        - [163, 7, SimpleBlock, 2, 13190604, 9798, null]
-        - [163, 7, SimpleBlock, 2, 13200405, 9855, null]
-        - [163, 7, SimpleBlock, 2, 13210263, 9677, null]
-        - [163, 7, SimpleBlock, 2, 13219943, 9497, null]
-        - [163, 7, SimpleBlock, 2, 13229443, 5585, null]
-        - [163, 7, SimpleBlock, 2, 13235031, 9425, null]
-        - [163, 7, SimpleBlock, 2, 13244459, 9598, null]
-        - [163, 7, SimpleBlock, 2, 13254060, 9206, null]
-        - [163, 7, SimpleBlock, 2, 13263269, 9294, null]
-        - [163, 7, SimpleBlock, 2, 13272566, 6354, null]
-        - [163, 7, SimpleBlock, 2, 13278923, 9149, null]
-        - [163, 7, SimpleBlock, 2, 13288075, 9011, null]
-        - [163, 7, SimpleBlock, 2, 13297089, 8892, null]
-        - [163, 7, SimpleBlock, 2, 13305984, 8677, null]
-        - [163, 7, SimpleBlock, 2, 13314664, 8752, null]
-        - [163, 7, SimpleBlock, 2, 13323419, 6547, null]
-        - [163, 7, SimpleBlock, 2, 13329969, 8803, null]
-        - [163, 7, SimpleBlock, 2, 13338775, 8670, null]
-        - [163, 7, SimpleBlock, 2, 13347448, 8642, null]
-        - [163, 7, SimpleBlock, 2, 13356093, 8631, null]
-        - [163, 7, SimpleBlock, 2, 13364727, 5682, null]
-        - [163, 7, SimpleBlock, 2, 13370412, 8570, null]
-        - [163, 7, SimpleBlock, 2, 13378985, 8420, null]
-        - [163, 7, SimpleBlock, 2, 13387408, 8489, null]
-        - [163, 7, SimpleBlock, 2, 13395900, 8492, null]
-        - [163, 7, SimpleBlock, 2, 13404395, 8290, null]
-        - [163, 7, SimpleBlock, 2, 13412688, 2793, null]
-        - [163, 7, SimpleBlock, 2, 13415484, 8296, null]
-        - [163, 7, SimpleBlock, 2, 13423783, 8405, null]
-        - [163, 7, SimpleBlock, 2, 13432191, 8355, null]
-    - - 524531317
-      - 6
-      - Cluster
-      - 1
-      - 13440554
-      - 3249549
-      - - [231, 1, Timecode, 2, 13440556, 2, 42917]
-        - [163, 7, SimpleBlock, 2, 13440561, 676, null]
-        - [163, 7, SimpleBlock, 2, 13441240, 5489, null]
-        - [163, 7, SimpleBlock, 2, 13446733, 51446, null]
-        - [163, 7, SimpleBlock, 2, 13498182, 4220, null]
-        - [163, 7, SimpleBlock, 2, 13502405, 6526, null]
-        - [163, 7, SimpleBlock, 2, 13508934, 6389, null]
-        - [163, 7, SimpleBlock, 2, 13515326, 7048, null]
-        - [163, 7, SimpleBlock, 2, 13522377, 5490, null]
-        - [163, 7, SimpleBlock, 2, 13527870, 6914, null]
-        - [163, 7, SimpleBlock, 2, 13534787, 7116, null]
-        - [163, 7, SimpleBlock, 2, 13541906, 7356, null]
-        - [163, 7, SimpleBlock, 2, 13549265, 7645, null]
-        - [163, 7, SimpleBlock, 2, 13556913, 5585, null]
-        - [163, 7, SimpleBlock, 2, 13562501, 7360, null]
-        - [163, 7, SimpleBlock, 2, 13569864, 7213, null]
-        - [163, 7, SimpleBlock, 2, 13577080, 7193, null]
-        - [163, 7, SimpleBlock, 2, 13584276, 7232, null]
-        - [163, 7, SimpleBlock, 2, 13591511, 7281, null]
-        - [163, 7, SimpleBlock, 2, 13598795, 5680, null]
-        - [163, 7, SimpleBlock, 2, 13604478, 7357, null]
-        - [163, 7, SimpleBlock, 2, 13611838, 7403, null]
-        - [163, 7, SimpleBlock, 2, 13619244, 7427, null]
-        - [163, 7, SimpleBlock, 2, 13626674, 7673, null]
-        - [163, 7, SimpleBlock, 2, 13634350, 5489, null]
-        - [163, 7, SimpleBlock, 2, 13639842, 7819, null]
-        - [163, 7, SimpleBlock, 2, 13647664, 8057, null]
-        - [163, 7, SimpleBlock, 2, 13655724, 8240, null]
-        - [163, 7, SimpleBlock, 2, 13663967, 8555, null]
-        - [163, 7, SimpleBlock, 2, 13672525, 8858, null]
-        - [163, 7, SimpleBlock, 2, 13681386, 5381, null]
-        - [163, 7, SimpleBlock, 2, 13686770, 9423, null]
-        - [163, 7, SimpleBlock, 2, 13696196, 9727, null]
-        - [163, 7, SimpleBlock, 2, 13705926, 10330, null]
-        - [163, 7, SimpleBlock, 2, 13716259, 10596, null]
-        - [163, 7, SimpleBlock, 2, 13726858, 10571, null]
-        - [163, 7, SimpleBlock, 2, 13737432, 5683, null]
-        - [163, 7, SimpleBlock, 2, 13743118, 10613, null]
-        - [163, 7, SimpleBlock, 2, 13753734, 10893, null]
-        - [163, 7, SimpleBlock, 2, 13764630, 11063, null]
-        - [163, 7, SimpleBlock, 2, 13775696, 10909, null]
-        - [163, 7, SimpleBlock, 2, 13786608, 5486, null]
-        - [163, 7, SimpleBlock, 2, 13792097, 10803, null]
-        - [163, 7, SimpleBlock, 2, 13802903, 10949, null]
-        - [163, 7, SimpleBlock, 2, 13813855, 10796, null]
-        - [163, 7, SimpleBlock, 2, 13824654, 10855, null]
-        - [163, 7, SimpleBlock, 2, 13835512, 10674, null]
-        - [163, 7, SimpleBlock, 2, 13846189, 5200, null]
-        - [163, 7, SimpleBlock, 2, 13851392, 10485, null]
-        - [163, 7, SimpleBlock, 2, 13861880, 10737, null]
-        - [163, 7, SimpleBlock, 2, 13872620, 10837, null]
-        - [163, 7, SimpleBlock, 2, 13883460, 11119, null]
-        - [163, 7, SimpleBlock, 2, 13894582, 6161, null]
-        - [163, 7, SimpleBlock, 2, 13900746, 11117, null]
-        - [163, 7, SimpleBlock, 2, 13911866, 11056, null]
-        - [163, 7, SimpleBlock, 2, 13922925, 11181, null]
-        - [163, 7, SimpleBlock, 2, 13934109, 11216, null]
-        - [163, 7, SimpleBlock, 2, 13945328, 11249, null]
-        - [163, 7, SimpleBlock, 2, 13956580, 6161, null]
-        - [163, 7, SimpleBlock, 2, 13962744, 11095, null]
-        - [163, 7, SimpleBlock, 2, 13973842, 11220, null]
-        - [163, 7, SimpleBlock, 2, 13985065, 11147, null]
-        - [163, 7, SimpleBlock, 2, 13996215, 11224, null]
-        - [163, 7, SimpleBlock, 2, 14007442, 11201, null]
-        - [163, 7, SimpleBlock, 2, 14018646, 5491, null]
-        - [163, 7, SimpleBlock, 2, 14024140, 11272, null]
-        - [163, 7, SimpleBlock, 2, 14035415, 11123, null]
-        - [163, 7, SimpleBlock, 2, 14046541, 11354, null]
-        - [163, 7, SimpleBlock, 2, 14057898, 11251, null]
-        - [163, 7, SimpleBlock, 2, 14069152, 5873, null]
-        - [163, 7, SimpleBlock, 2, 14075028, 11219, null]
-        - [163, 7, SimpleBlock, 2, 14086250, 11150, null]
-        - [163, 7, SimpleBlock, 2, 14097403, 11010, null]
-        - [163, 7, SimpleBlock, 2, 14108416, 11187, null]
-        - [163, 7, SimpleBlock, 2, 14119606, 11061, null]
-        - [163, 7, SimpleBlock, 2, 14130670, 5680, null]
-        - [163, 7, SimpleBlock, 2, 14136353, 10999, null]
-        - [163, 7, SimpleBlock, 2, 14147355, 10922, null]
-        - [163, 7, SimpleBlock, 2, 14158280, 10778, null]
-        - [163, 7, SimpleBlock, 2, 14169061, 10831, null]
-        - [163, 7, SimpleBlock, 2, 14179895, 5969, null]
-        - [163, 7, SimpleBlock, 2, 14185867, 10646, null]
-        - [163, 7, SimpleBlock, 2, 14196516, 10783, null]
-        - [163, 7, SimpleBlock, 2, 14207302, 10694, null]
-        - [163, 7, SimpleBlock, 2, 14217999, 10551, null]
-        - [163, 7, SimpleBlock, 2, 14228553, 10232, null]
-        - [163, 7, SimpleBlock, 2, 14238788, 5488, null]
-        - [163, 7, SimpleBlock, 2, 14244279, 10166, null]
-        - [163, 7, SimpleBlock, 2, 14254448, 10369, null]
-        - [163, 7, SimpleBlock, 2, 14264820, 10309, null]
-        - [163, 7, SimpleBlock, 2, 14275132, 10050, null]
-        - [163, 7, SimpleBlock, 2, 14285185, 9831, null]
-        - [163, 7, SimpleBlock, 2, 14295019, 5381, null]
-        - [163, 7, SimpleBlock, 2, 14300403, 9790, null]
-        - [163, 7, SimpleBlock, 2, 14310196, 9781, null]
-        - [163, 7, SimpleBlock, 2, 14319980, 9691, null]
-        - [163, 7, SimpleBlock, 2, 14329674, 9640, null]
-        - [163, 7, SimpleBlock, 2, 14339317, 5968, null]
-        - [163, 7, SimpleBlock, 2, 14345288, 9567, null]
-        - [163, 7, SimpleBlock, 2, 14354858, 9593, null]
-        - [163, 7, SimpleBlock, 2, 14364454, 9481, null]
-        - [163, 7, SimpleBlock, 2, 14373938, 9196, null]
-        - [163, 7, SimpleBlock, 2, 14383137, 9329, null]
-        - [163, 7, SimpleBlock, 2, 14392469, 5970, null]
-        - [163, 7, SimpleBlock, 2, 14398442, 9824, null]
-        - [163, 7, SimpleBlock, 2, 14408269, 10746, null]
-        - [163, 7, SimpleBlock, 2, 14419018, 11335, null]
-        - [163, 7, SimpleBlock, 2, 14430356, 10367, null]
-        - [163, 7, SimpleBlock, 2, 14440726, 12364, null]
-        - [163, 7, SimpleBlock, 2, 14453093, 5489, null]
-        - [163, 7, SimpleBlock, 2, 14458585, 12120, null]
-        - [163, 7, SimpleBlock, 2, 14470708, 12407, null]
-        - [163, 7, SimpleBlock, 2, 14483118, 11822, null]
-        - [163, 7, SimpleBlock, 2, 14494943, 9897, null]
-        - [163, 7, SimpleBlock, 2, 14504843, 5873, null]
-        - [163, 7, SimpleBlock, 2, 14510719, 9439, null]
-        - [163, 7, SimpleBlock, 2, 14520161, 8651, null]
-        - [163, 7, SimpleBlock, 2, 14528815, 7775, null]
-        - [163, 7, SimpleBlock, 2, 14536593, 6840, null]
-        - [163, 7, SimpleBlock, 2, 14543436, 6619, null]
-        - [163, 7, SimpleBlock, 2, 14550058, 5583, null]
-        - [163, 7, SimpleBlock, 2, 14555644, 6141, null]
-        - [163, 7, SimpleBlock, 2, 14561788, 6076, null]
-        - [163, 7, SimpleBlock, 2, 14567867, 6044, null]
-        - [163, 7, SimpleBlock, 2, 14573914, 5676, null]
-        - [163, 7, SimpleBlock, 2, 14579593, 5490, null]
-        - [163, 7, SimpleBlock, 2, 14585086, 5501, null]
-        - [163, 7, SimpleBlock, 2, 14590590, 5301, null]
-        - [163, 7, SimpleBlock, 2, 14595894, 5021, null]
-        - [163, 7, SimpleBlock, 2, 14600918, 4890, null]
-        - [163, 7, SimpleBlock, 2, 14605811, 4454, null]
-        - [163, 7, SimpleBlock, 2, 14610268, 5295, null]
-        - [163, 7, SimpleBlock, 2, 14615566, 4043, null]
-        - [163, 7, SimpleBlock, 2, 14619612, 3760, null]
-        - [163, 7, SimpleBlock, 2, 14623375, 3239, null]
-        - [163, 7, SimpleBlock, 2, 14626617, 3786, null]
-        - [163, 7, SimpleBlock, 2, 14630406, 6048, null]
-        - [163, 7, SimpleBlock, 2, 14636457, 5393, null]
-        - [163, 7, SimpleBlock, 2, 14641853, 7637, null]
-        - [163, 7, SimpleBlock, 2, 14649493, 9427, null]
-        - [163, 7, SimpleBlock, 2, 14658923, 10261, null]
-        - [163, 7, SimpleBlock, 2, 14669187, 10309, null]
-        - [163, 7, SimpleBlock, 2, 14679499, 5485, null]
-        - [163, 7, SimpleBlock, 2, 14684988, 81128, null]
-        - [163, 7, SimpleBlock, 2, 14766119, 2985, null]
-        - [163, 7, SimpleBlock, 2, 14769107, 4541, null]
-        - [163, 7, SimpleBlock, 2, 14773651, 5172, null]
-        - [163, 7, SimpleBlock, 2, 14778826, 7922, null]
-        - [163, 7, SimpleBlock, 2, 14786751, 5392, null]
-        - [163, 7, SimpleBlock, 2, 14792146, 9646, null]
-        - [163, 7, SimpleBlock, 2, 14801795, 12038, null]
-        - [163, 7, SimpleBlock, 2, 14813836, 13795, null]
-        - [163, 7, SimpleBlock, 2, 14827634, 14528, null]
-        - [163, 7, SimpleBlock, 2, 14842165, 5681, null]
-        - [163, 7, SimpleBlock, 2, 14847849, 15597, null]
-        - [163, 7, SimpleBlock, 2, 14863450, 16822, null]
-        - [163, 7, SimpleBlock, 2, 14880276, 18050, null]
-        - [163, 7, SimpleBlock, 2, 14898330, 18837, null]
-        - [163, 7, SimpleBlock, 2, 14917171, 19247, null]
-        - [163, 7, SimpleBlock, 2, 14936421, 5392, null]
-        - [163, 7, SimpleBlock, 2, 14941817, 19069, null]
-        - [163, 7, SimpleBlock, 2, 14960890, 19463, null]
-        - [163, 7, SimpleBlock, 2, 14980357, 19931, null]
-        - [163, 7, SimpleBlock, 2, 15000292, 20799, null]
-        - [163, 7, SimpleBlock, 2, 15021095, 21176, null]
-        - [163, 7, SimpleBlock, 2, 15042274, 5487, null]
-        - [163, 7, SimpleBlock, 2, 15047765, 21537, null]
-        - [163, 7, SimpleBlock, 2, 15069306, 22280, null]
-        - [163, 7, SimpleBlock, 2, 15091590, 22732, null]
-        - [163, 7, SimpleBlock, 2, 15114326, 23371, null]
-        - [163, 7, SimpleBlock, 2, 15137700, 5678, null]
-        - [163, 7, SimpleBlock, 2, 15143382, 23440, null]
-        - [163, 7, SimpleBlock, 2, 15166826, 22016, null]
-        - [163, 7, SimpleBlock, 2, 15188846, 21824, null]
-        - [163, 7, SimpleBlock, 2, 15210674, 21546, null]
-        - [163, 7, SimpleBlock, 2, 15232224, 21501, null]
-        - [163, 7, SimpleBlock, 2, 15253728, 5585, null]
-        - [163, 7, SimpleBlock, 2, 15259317, 22122, null]
-        - [163, 7, SimpleBlock, 2, 15281443, 21956, null]
-        - [163, 7, SimpleBlock, 2, 15303403, 22514, null]
-        - [163, 7, SimpleBlock, 2, 15325921, 22574, null]
-        - [163, 7, SimpleBlock, 2, 15348498, 5489, null]
-        - [163, 7, SimpleBlock, 2, 15353991, 22991, null]
-        - [163, 7, SimpleBlock, 2, 15376986, 23508, null]
-        - [163, 7, SimpleBlock, 2, 15400498, 23870, null]
-        - [163, 7, SimpleBlock, 2, 15424372, 24440, null]
-        - [163, 7, SimpleBlock, 2, 15448816, 25013, null]
-        - [163, 7, SimpleBlock, 2, 15473832, 5199, null]
-        - [163, 7, SimpleBlock, 2, 15479035, 25337, null]
-        - [163, 7, SimpleBlock, 2, 15504376, 24717, null]
-        - [163, 7, SimpleBlock, 2, 15529097, 24623, null]
-        - [163, 7, SimpleBlock, 2, 15553724, 24344, null]
-        - [163, 7, SimpleBlock, 2, 15578072, 23717, null]
-        - [163, 7, SimpleBlock, 2, 15601792, 5680, null]
-        - [163, 7, SimpleBlock, 2, 15607476, 23417, null]
-        - [163, 7, SimpleBlock, 2, 15630897, 23226, null]
-        - [163, 7, SimpleBlock, 2, 15654127, 22676, null]
-        - [163, 7, SimpleBlock, 2, 15676807, 21990, null]
-        - [163, 7, SimpleBlock, 2, 15698800, 5776, null]
-        - [163, 7, SimpleBlock, 2, 15704580, 21261, null]
-        - [163, 7, SimpleBlock, 2, 15725845, 20986, null]
-        - [163, 7, SimpleBlock, 2, 15746835, 20141, null]
-        - [163, 7, SimpleBlock, 2, 15766980, 19845, null]
-        - [163, 7, SimpleBlock, 2, 15786829, 19632, null]
-        - [163, 7, SimpleBlock, 2, 15806464, 5875, null]
-        - [163, 7, SimpleBlock, 2, 15812343, 19280, null]
-        - [163, 7, SimpleBlock, 2, 15831627, 19167, null]
-        - [163, 7, SimpleBlock, 2, 15850798, 19204, null]
-        - [163, 7, SimpleBlock, 2, 15870006, 18863, null]
-        - [163, 7, SimpleBlock, 2, 15888872, 5682, null]
-        - [163, 7, SimpleBlock, 2, 15894558, 18701, null]
-        - [163, 7, SimpleBlock, 2, 15913263, 18677, null]
-        - [163, 7, SimpleBlock, 2, 15931944, 18223, null]
-        - [163, 7, SimpleBlock, 2, 15950171, 18362, null]
-        - [163, 7, SimpleBlock, 2, 15968537, 17943, null]
-        - [163, 7, SimpleBlock, 2, 15986483, 5681, null]
-        - [163, 7, SimpleBlock, 2, 15992168, 17666, null]
-        - [163, 7, SimpleBlock, 2, 16009838, 17249, null]
-        - [163, 7, SimpleBlock, 2, 16027091, 16713, null]
-        - [163, 7, SimpleBlock, 2, 16043807, 16212, null]
-        - [163, 7, SimpleBlock, 2, 16060022, 15866, null]
-        - [163, 7, SimpleBlock, 2, 16075891, 5779, null]
-        - [163, 7, SimpleBlock, 2, 16081673, 15394, null]
-        - [163, 7, SimpleBlock, 2, 16097070, 15150, null]
-        - [163, 7, SimpleBlock, 2, 16112223, 14891, null]
-        - [163, 7, SimpleBlock, 2, 16127117, 14570, null]
-        - [163, 7, SimpleBlock, 2, 16141690, 5777, null]
-        - [163, 7, SimpleBlock, 2, 16147470, 14314, null]
-        - [163, 7, SimpleBlock, 2, 16161787, 13823, null]
-        - [163, 7, SimpleBlock, 2, 16175613, 13404, null]
-        - [163, 7, SimpleBlock, 2, 16189020, 12774, null]
-        - [163, 7, SimpleBlock, 2, 16201797, 12584, null]
-        - [163, 7, SimpleBlock, 2, 16214384, 5584, null]
-        - [163, 7, SimpleBlock, 2, 16219971, 12212, null]
-        - [163, 7, SimpleBlock, 2, 16232186, 11618, null]
-        - [163, 7, SimpleBlock, 2, 16243807, 11021, null]
-        - [163, 7, SimpleBlock, 2, 16254831, 10348, null]
-        - [163, 7, SimpleBlock, 2, 16265182, 2602, null]
-        - [163, 7, SimpleBlock, 2, 16267787, 9776, null]
-        - [163, 7, SimpleBlock, 2, 16277566, 9134, null]
-        - [163, 7, SimpleBlock, 2, 16286703, 8473, null]
-        - [163, 7, SimpleBlock, 2, 16295179, 5872, null]
-        - [163, 7, SimpleBlock, 2, 16301054, 8042, null]
-        - [163, 7, SimpleBlock, 2, 16309099, 6886, null]
-        - [163, 7, SimpleBlock, 2, 16315988, 6278, null]
-        - [163, 7, SimpleBlock, 2, 16322269, 5524, null]
-        - [163, 7, SimpleBlock, 2, 16327796, 5777, null]
-        - [163, 7, SimpleBlock, 2, 16333576, 4813, null]
-        - [163, 7, SimpleBlock, 2, 16338392, 4026, null]
-        - [163, 7, SimpleBlock, 2, 16342421, 3079, null]
-        - [163, 7, SimpleBlock, 2, 16345503, 2908, null]
-        - [163, 7, SimpleBlock, 2, 16348414, 2809, null]
-        - [163, 7, SimpleBlock, 2, 16351226, 5585, null]
-        - [163, 7, SimpleBlock, 2, 16356814, 2952, null]
-        - [163, 7, SimpleBlock, 2, 16359769, 2981, null]
-        - [163, 7, SimpleBlock, 2, 16362753, 3155, null]
-        - [163, 7, SimpleBlock, 2, 16365911, 3321, null]
-        - [163, 7, SimpleBlock, 2, 16369235, 3586, null]
-        - [163, 7, SimpleBlock, 2, 16372824, 5779, null]
-        - [163, 7, SimpleBlock, 2, 16378606, 3776, null]
-        - [163, 7, SimpleBlock, 2, 16382385, 4020, null]
-        - [163, 7, SimpleBlock, 2, 16386408, 4418, null]
-        - [163, 7, SimpleBlock, 2, 16390829, 5383, null]
-        - [163, 7, SimpleBlock, 2, 16396215, 5295, null]
-        - [163, 7, SimpleBlock, 2, 16401513, 6085, null]
-        - [163, 7, SimpleBlock, 2, 16407601, 6943, null]
-        - [163, 7, SimpleBlock, 2, 16414547, 7869, null]
-        - [163, 7, SimpleBlock, 2, 16422419, 8274, null]
-        - [163, 7, SimpleBlock, 2, 16430696, 7703, null]
-        - [163, 7, SimpleBlock, 2, 16438402, 5969, null]
-        - [163, 7, SimpleBlock, 2, 16444374, 7035, null]
-        - [163, 7, SimpleBlock, 2, 16451412, 7128, null]
-        - [163, 7, SimpleBlock, 2, 16458543, 6985, null]
-        - [163, 7, SimpleBlock, 2, 16465531, 6926, null]
-        - [163, 7, SimpleBlock, 2, 16472460, 5872, null]
-        - [163, 7, SimpleBlock, 2, 16478335, 6447, null]
-        - [163, 7, SimpleBlock, 2, 16484785, 5798, null]
-        - [163, 7, SimpleBlock, 2, 16490586, 5291, null]
-        - [163, 7, SimpleBlock, 2, 16495880, 5001, null]
-        - [163, 7, SimpleBlock, 2, 16500884, 4734, null]
-        - [163, 7, SimpleBlock, 2, 16505621, 7311, null]
-        - [163, 7, SimpleBlock, 2, 16512935, 4435, null]
-        - [163, 7, SimpleBlock, 2, 16517373, 6151, null]
-        - [163, 7, SimpleBlock, 2, 16523527, 5456, null]
-        - [163, 7, SimpleBlock, 2, 16528986, 4818, null]
-        - [163, 7, SimpleBlock, 2, 16533807, 5462, null]
-        - [163, 7, SimpleBlock, 2, 16539272, 6159, null]
-        - [163, 7, SimpleBlock, 2, 16545434, 5465, null]
-        - [163, 7, SimpleBlock, 2, 16550902, 5201, null]
-        - [163, 7, SimpleBlock, 2, 16556106, 5002, null]
-        - [163, 7, SimpleBlock, 2, 16561111, 5430, null]
-        - [163, 7, SimpleBlock, 2, 16566544, 5970, null]
-        - [163, 7, SimpleBlock, 2, 16572517, 6146, null]
-        - [163, 7, SimpleBlock, 2, 16578666, 6690, null]
-        - [163, 7, SimpleBlock, 2, 16585359, 7086, null]
-        - [163, 7, SimpleBlock, 2, 16592448, 8078, null]
-        - [163, 7, SimpleBlock, 2, 16600529, 8723, null]
-        - [163, 7, SimpleBlock, 2, 16609255, 6067, null]
-        - [163, 7, SimpleBlock, 2, 16615325, 9113, null]
-        - [163, 7, SimpleBlock, 2, 16624441, 9253, null]
-        - [163, 7, SimpleBlock, 2, 16633697, 10193, null]
-        - [163, 7, SimpleBlock, 2, 16643893, 9354, null]
-        - [163, 7, SimpleBlock, 2, 16653250, 3948, null]
-        - [163, 7, SimpleBlock, 2, 16657201, 9131, null]
-        - [163, 7, SimpleBlock, 2, 16666335, 8881, null]
-        - [163, 7, SimpleBlock, 2, 16675219, 7845, null]
-        - [163, 7, SimpleBlock, 2, 16683067, 7036, null]
-    - - 524531317
-      - 6
-      - Cluster
-      - 1
-      - 16690110
-      - 778801
-      - - [231, 1, Timecode, 2, 16690112, 2, 53333]
-        - [163, 7, SimpleBlock, 2, 16690117, 772, null]
-        - [163, 7, SimpleBlock, 2, 16690892, 6162, null]
-        - [163, 7, SimpleBlock, 2, 16697058, 73939, null]
-        - [163, 7, SimpleBlock, 2, 16771000, 2203, null]
-        - [163, 7, SimpleBlock, 2, 16773206, 2982, null]
-        - [163, 7, SimpleBlock, 2, 16776191, 3662, null]
-        - [163, 7, SimpleBlock, 2, 16779856, 4237, null]
-        - [163, 7, SimpleBlock, 2, 16784096, 5968, null]
-        - [163, 7, SimpleBlock, 2, 16790067, 4508, null]
-        - [163, 7, SimpleBlock, 2, 16794578, 4745, null]
-        - [163, 7, SimpleBlock, 2, 16799326, 4972, null]
-        - [163, 7, SimpleBlock, 2, 16804301, 5076, null]
-        - [163, 7, SimpleBlock, 2, 16809380, 6063, null]
-        - [163, 7, SimpleBlock, 2, 16815446, 5439, null]
-        - [163, 7, SimpleBlock, 2, 16820888, 5560, null]
-        - [163, 7, SimpleBlock, 2, 16826451, 5676, null]
-        - [163, 7, SimpleBlock, 2, 16832130, 5844, null]
-        - [163, 7, SimpleBlock, 2, 16837977, 6061, null]
-        - [163, 7, SimpleBlock, 2, 16844041, 5969, null]
-        - [163, 7, SimpleBlock, 2, 16850013, 6557, null]
-        - [163, 7, SimpleBlock, 2, 16856573, 7227, null]
-        - [163, 7, SimpleBlock, 2, 16863803, 7725, null]
-        - [163, 7, SimpleBlock, 2, 16871531, 8407, null]
-        - [163, 7, SimpleBlock, 2, 16879941, 5582, null]
-        - [163, 7, SimpleBlock, 2, 16885526, 8767, null]
-        - [163, 7, SimpleBlock, 2, 16894296, 9382, null]
-        - [163, 7, SimpleBlock, 2, 16903681, 9861, null]
-        - [163, 7, SimpleBlock, 2, 16913545, 10355, null]
-        - [163, 7, SimpleBlock, 2, 16923903, 9733, null]
-        - [163, 7, SimpleBlock, 2, 16933639, 5873, null]
-        - [163, 7, SimpleBlock, 2, 16939515, 9873, null]
-        - [163, 7, SimpleBlock, 2, 16949391, 9813, null]
-        - [163, 7, SimpleBlock, 2, 16959207, 9508, null]
-        - [163, 7, SimpleBlock, 2, 16968718, 11810, null]
-        - [163, 7, SimpleBlock, 2, 16980531, 12852, null]
-        - [163, 7, SimpleBlock, 2, 16993386, 5393, null]
-        - [163, 7, SimpleBlock, 2, 16998782, 11068, null]
-        - [163, 7, SimpleBlock, 2, 17009853, 10499, null]
-        - [163, 7, SimpleBlock, 2, 17020355, 10353, null]
-        - [163, 7, SimpleBlock, 2, 17030711, 9915, null]
-        - [163, 7, SimpleBlock, 2, 17040629, 5873, null]
-        - [163, 7, SimpleBlock, 2, 17046505, 9921, null]
-        - [163, 7, SimpleBlock, 2, 17056429, 9995, null]
-        - [163, 7, SimpleBlock, 2, 17066427, 10146, null]
-        - [163, 7, SimpleBlock, 2, 17076576, 10535, null]
-        - [163, 7, SimpleBlock, 2, 17087114, 10775, null]
-        - [163, 7, SimpleBlock, 2, 17097892, 5873, null]
-        - [163, 7, SimpleBlock, 2, 17103768, 11200, null]
-        - [163, 7, SimpleBlock, 2, 17114971, 12237, null]
-        - [163, 7, SimpleBlock, 2, 17127211, 12523, null]
-        - [163, 7, SimpleBlock, 2, 17139737, 12799, null]
-        - [163, 7, SimpleBlock, 2, 17152539, 6353, null]
-        - [163, 7, SimpleBlock, 2, 17158895, 12844, null]
-        - [163, 7, SimpleBlock, 2, 17171742, 13331, null]
-        - [163, 7, SimpleBlock, 2, 17185076, 13494, null]
-        - [163, 7, SimpleBlock, 2, 17198573, 13391, null]
-        - [163, 7, SimpleBlock, 2, 17211967, 13210, null]
-        - [163, 7, SimpleBlock, 2, 17225180, 5776, null]
-        - [163, 7, SimpleBlock, 2, 17230959, 12707, null]
-        - [163, 7, SimpleBlock, 2, 17243669, 12771, null]
-        - [163, 7, SimpleBlock, 2, 17256443, 12524, null]
-        - [163, 7, SimpleBlock, 2, 17268970, 12340, null]
-        - [163, 7, SimpleBlock, 2, 17281313, 12283, null]
-        - [163, 7, SimpleBlock, 2, 17293599, 5297, null]
-        - [163, 7, SimpleBlock, 2, 17298899, 12150, null]
-        - [163, 7, SimpleBlock, 2, 17311052, 12123, null]
-        - [163, 7, SimpleBlock, 2, 17323178, 11543, null]
-        - [163, 7, SimpleBlock, 2, 17334724, 10955, null]
-        - [163, 7, SimpleBlock, 2, 17345682, 5487, null]
-        - [163, 7, SimpleBlock, 2, 17351172, 10655, null]
-        - [163, 7, SimpleBlock, 2, 17361830, 10831, null]
-        - [163, 7, SimpleBlock, 2, 17372664, 11824, null]
-        - [163, 7, SimpleBlock, 2, 17384491, 12199, null]
-        - [163, 7, SimpleBlock, 2, 17396693, 10612, null]
-        - [163, 7, SimpleBlock, 2, 17407308, 6162, null]
-        - [163, 7, SimpleBlock, 2, 17413473, 10118, null]
-        - [163, 7, SimpleBlock, 2, 17423594, 9704, null]
-        - [163, 7, SimpleBlock, 2, 17433301, 8930, null]
-        - [163, 7, SimpleBlock, 2, 17442234, 8418, null]
-        - [163, 7, SimpleBlock, 2, 17450655, 676, null]
-        - [163, 7, SimpleBlock, 2, 17451334, 8595, null]
-        - [163, 7, SimpleBlock, 2, 17459932, 8979, null]
-    - - 524531317
-      - 6
-      - Cluster
-      - 1
-      - 17468918
-      - 1159873
-      - - [231, 1, Timecode, 2, 17468920, 2, 56083]
-        - [163, 7, SimpleBlock, 2, 17468925, 676, null]
-        - [163, 7, SimpleBlock, 2, 17469604, 5777, null]
-        - [163, 7, SimpleBlock, 2, 17475385, 22461, null]
-        - [163, 7, SimpleBlock, 2, 17497849, 2509, null]
-        - [163, 7, SimpleBlock, 2, 17500361, 1485, null]
-        - [163, 7, SimpleBlock, 2, 17501849, 320, null]
-        - [163, 7, SimpleBlock, 2, 17502172, 6258, null]
-        - [163, 7, SimpleBlock, 2, 17508433, 245, null]
-        - [163, 7, SimpleBlock, 2, 17508681, 248, null]
-        - [163, 7, SimpleBlock, 2, 17508932, 233, null]
-        - [163, 7, SimpleBlock, 2, 17509168, 218, null]
-        - [163, 7, SimpleBlock, 2, 17509389, 238, null]
-        - [163, 7, SimpleBlock, 2, 17509630, 6353, null]
-        - [163, 7, SimpleBlock, 2, 17515986, 232, null]
-        - [163, 7, SimpleBlock, 2, 17516221, 224, null]
-        - [163, 7, SimpleBlock, 2, 17516448, 235, null]
-        - [163, 7, SimpleBlock, 2, 17516686, 330, null]
-        - [163, 7, SimpleBlock, 2, 17517019, 6545, null]
-        - [163, 7, SimpleBlock, 2, 17523567, 414, null]
-        - [163, 7, SimpleBlock, 2, 17523984, 499, null]
-        - [163, 7, SimpleBlock, 2, 17524486, 554, null]
-        - [163, 7, SimpleBlock, 2, 17525043, 614, null]
-        - [163, 7, SimpleBlock, 2, 17525660, 598, null]
-        - [163, 7, SimpleBlock, 2, 17526261, 5487, null]
-        - [163, 7, SimpleBlock, 2, 17531751, 640, null]
-        - [163, 7, SimpleBlock, 2, 17532394, 771, null]
-        - [163, 7, SimpleBlock, 2, 17533168, 715, null]
-        - [163, 7, SimpleBlock, 2, 17533886, 643, null]
-        - [163, 7, SimpleBlock, 2, 17534532, 686, null]
-        - [163, 7, SimpleBlock, 2, 17535221, 5779, null]
-        - [163, 7, SimpleBlock, 2, 17541003, 763, null]
-        - [163, 7, SimpleBlock, 2, 17541769, 1464, null]
-        - [163, 7, SimpleBlock, 2, 17543236, 1514, null]
-        - [163, 7, SimpleBlock, 2, 17544753, 1618, null]
-        - [163, 7, SimpleBlock, 2, 17546374, 5969, null]
-        - [163, 7, SimpleBlock, 2, 17552346, 1804, null]
-        - [163, 7, SimpleBlock, 2, 17554153, 1848, null]
-        - [163, 7, SimpleBlock, 2, 17556004, 1926, null]
-        - [163, 7, SimpleBlock, 2, 17557933, 1630, null]
-        - [163, 7, SimpleBlock, 2, 17559566, 1113, null]
-        - [163, 7, SimpleBlock, 2, 17560682, 5295, null]
-        - [163, 7, SimpleBlock, 2, 17565980, 1004, null]
-        - [163, 7, SimpleBlock, 2, 17566987, 1093, null]
-        - [163, 7, SimpleBlock, 2, 17568083, 1153, null]
-        - [163, 7, SimpleBlock, 2, 17569239, 1172, null]
-        - [163, 7, SimpleBlock, 2, 17570414, 6354, null]
-        - [163, 7, SimpleBlock, 2, 17576771, 1405, null]
-        - [163, 7, SimpleBlock, 2, 17578179, 2397, null]
-        - [163, 7, SimpleBlock, 2, 17580579, 2799, null]
-        - [163, 7, SimpleBlock, 2, 17583381, 3471, null]
-        - [163, 7, SimpleBlock, 2, 17586855, 3841, null]
-        - [163, 7, SimpleBlock, 2, 17590699, 5779, null]
-        - [163, 7, SimpleBlock, 2, 17596481, 4169, null]
-        - [163, 7, SimpleBlock, 2, 17600653, 4518, null]
-        - [163, 7, SimpleBlock, 2, 17605174, 4830, null]
-        - [163, 7, SimpleBlock, 2, 17610007, 5034, null]
-        - [163, 7, SimpleBlock, 2, 17615044, 4699, null]
-        - [163, 7, SimpleBlock, 2, 17619746, 5776, null]
-        - [163, 7, SimpleBlock, 2, 17625525, 3001, null]
-        - [163, 7, SimpleBlock, 2, 17628529, 2180, null]
-        - [163, 7, SimpleBlock, 2, 17630712, 2302, null]
-        - [163, 7, SimpleBlock, 2, 17633017, 2170, null]
-        - [163, 7, SimpleBlock, 2, 17635190, 6062, null]
-        - [163, 7, SimpleBlock, 2, 17641255, 2343, null]
-        - [163, 7, SimpleBlock, 2, 17643601, 2435, null]
-        - [163, 7, SimpleBlock, 2, 17646039, 2472, null]
-        - [163, 7, SimpleBlock, 2, 17648514, 2495, null]
-        - [163, 7, SimpleBlock, 2, 17651012, 2687, null]
-        - [163, 7, SimpleBlock, 2, 17653702, 5393, null]
-        - [163, 7, SimpleBlock, 2, 17659098, 2627, null]
-        - [163, 7, SimpleBlock, 2, 17661728, 2740, null]
-        - [163, 7, SimpleBlock, 2, 17664471, 2819, null]
-        - [163, 7, SimpleBlock, 2, 17667293, 2973, null]
-        - [163, 7, SimpleBlock, 2, 17670269, 3204, null]
-        - [163, 7, SimpleBlock, 2, 17673476, 6255, null]
-        - [163, 7, SimpleBlock, 2, 17679734, 3227, null]
-        - [163, 7, SimpleBlock, 2, 17682964, 3096, null]
-        - [163, 7, SimpleBlock, 2, 17686063, 2755, null]
-        - [163, 7, SimpleBlock, 2, 17688821, 2414, null]
-        - [163, 7, SimpleBlock, 2, 17691238, 5966, null]
-        - [163, 7, SimpleBlock, 2, 17697207, 2199, null]
-        - [163, 7, SimpleBlock, 2, 17699409, 1988, null]
-        - [163, 7, SimpleBlock, 2, 17701400, 1875, null]
-        - [163, 7, SimpleBlock, 2, 17703278, 1877, null]
-        - [163, 7, SimpleBlock, 2, 17705158, 1855, null]
-        - [163, 7, SimpleBlock, 2, 17707016, 5872, null]
-        - [163, 7, SimpleBlock, 2, 17712891, 1753, null]
-        - [163, 7, SimpleBlock, 2, 17714647, 1698, null]
-        - [163, 7, SimpleBlock, 2, 17716348, 1681, null]
-        - [163, 7, SimpleBlock, 2, 17718032, 1668, null]
-        - [163, 7, SimpleBlock, 2, 17719703, 6449, null]
-        - [163, 7, SimpleBlock, 2, 17726155, 1643, null]
-        - [163, 7, SimpleBlock, 2, 17727801, 1573, null]
-        - [163, 7, SimpleBlock, 2, 17729377, 1510, null]
-        - [163, 7, SimpleBlock, 2, 17730890, 1414, null]
-        - [163, 7, SimpleBlock, 2, 17732307, 1290, null]
-        - [163, 7, SimpleBlock, 2, 17733600, 6066, null]
-        - [163, 7, SimpleBlock, 2, 17739669, 1199, null]
-        - [163, 7, SimpleBlock, 2, 17740871, 1170, null]
-        - [163, 7, SimpleBlock, 2, 17742044, 1056, null]
-        - [163, 7, SimpleBlock, 2, 17743103, 914, null]
-        - [163, 7, SimpleBlock, 2, 17744020, 895, null]
-        - [163, 7, SimpleBlock, 2, 17744918, 6256, null]
-        - [163, 7, SimpleBlock, 2, 17751177, 772, null]
-        - [163, 7, SimpleBlock, 2, 17751952, 686, null]
-        - [163, 7, SimpleBlock, 2, 17752641, 801, null]
-        - [163, 7, SimpleBlock, 2, 17753445, 810, null]
-        - [163, 7, SimpleBlock, 2, 17754258, 5201, null]
-        - [163, 7, SimpleBlock, 2, 17759462, 816, null]
-        - [163, 7, SimpleBlock, 2, 17760281, 773, null]
-        - [163, 7, SimpleBlock, 2, 17761057, 767, null]
-        - [163, 7, SimpleBlock, 2, 17761827, 819, null]
-        - [163, 7, SimpleBlock, 2, 17762649, 878, null]
-        - [163, 7, SimpleBlock, 2, 17763530, 5777, null]
-        - [163, 7, SimpleBlock, 2, 17769310, 1042, null]
-        - [163, 7, SimpleBlock, 2, 17770355, 1207, null]
-        - [163, 7, SimpleBlock, 2, 17771565, 1260, null]
-        - [163, 7, SimpleBlock, 2, 17772828, 1224, null]
-        - [163, 7, SimpleBlock, 2, 17774055, 5679, null]
-        - [163, 7, SimpleBlock, 2, 17779737, 1156, null]
-        - [163, 7, SimpleBlock, 2, 17780896, 1212, null]
-        - [163, 7, SimpleBlock, 2, 17782111, 1231, null]
-        - [163, 7, SimpleBlock, 2, 17783345, 1228, null]
-        - [163, 7, SimpleBlock, 2, 17784576, 1295, null]
-        - [163, 7, SimpleBlock, 2, 17785874, 5010, null]
-        - [163, 7, SimpleBlock, 2, 17790887, 1319, null]
-        - [163, 7, SimpleBlock, 2, 17792209, 1331, null]
-        - [163, 7, SimpleBlock, 2, 17793543, 1360, null]
-        - [163, 7, SimpleBlock, 2, 17794906, 1380, null]
-        - [163, 7, SimpleBlock, 2, 17796289, 1470, null]
-        - [163, 7, SimpleBlock, 2, 17797762, 5968, null]
-        - [163, 7, SimpleBlock, 2, 17803733, 1471, null]
-        - [163, 7, SimpleBlock, 2, 17805207, 1209, null]
-        - [163, 7, SimpleBlock, 2, 17806419, 1172, null]
-        - [163, 7, SimpleBlock, 2, 17807594, 1246, null]
-        - [163, 7, SimpleBlock, 2, 17808843, 5874, null]
-        - [163, 7, SimpleBlock, 2, 17814721, 29320, null]
-        - [163, 7, SimpleBlock, 2, 17844044, 4031, null]
-        - [163, 7, SimpleBlock, 2, 17848078, 3115, null]
-        - [163, 7, SimpleBlock, 2, 17851196, 2426, null]
-        - [163, 7, SimpleBlock, 2, 17853625, 3597, null]
-        - [163, 7, SimpleBlock, 2, 17857225, 5586, null]
-        - [163, 7, SimpleBlock, 2, 17862814, 4146, null]
-        - [163, 7, SimpleBlock, 2, 17866963, 4167, null]
-        - [163, 7, SimpleBlock, 2, 17871133, 3878, null]
-        - [163, 7, SimpleBlock, 2, 17875014, 2803, null]
-        - [163, 7, SimpleBlock, 2, 17877820, 772, null]
-        - [163, 7, SimpleBlock, 2, 17878595, 2519, null]
-        - [163, 7, SimpleBlock, 2, 17881117, 5586, null]
-        - [163, 7, SimpleBlock, 2, 17886706, 3475, null]
-        - [163, 7, SimpleBlock, 2, 17890184, 3964, null]
-        - [163, 7, SimpleBlock, 2, 17894151, 3802, null]
-        - [163, 7, SimpleBlock, 2, 17897956, 3434, null]
-        - [163, 7, SimpleBlock, 2, 17901393, 2679, null]
-        - [163, 7, SimpleBlock, 2, 17904075, 5489, null]
-        - [163, 7, SimpleBlock, 2, 17909567, 2362, null]
-        - [163, 7, SimpleBlock, 2, 17911932, 2821, null]
-        - [163, 7, SimpleBlock, 2, 17914756, 3440, null]
-        - [163, 7, SimpleBlock, 2, 17918199, 3659, null]
-        - [163, 7, SimpleBlock, 2, 17921861, 6737, null]
-        - [163, 7, SimpleBlock, 2, 17928601, 3649, null]
-        - [163, 7, SimpleBlock, 2, 17932253, 2521, null]
-        - [163, 7, SimpleBlock, 2, 17934777, 1893, null]
-        - [163, 7, SimpleBlock, 2, 17936673, 2836, null]
-        - [163, 7, SimpleBlock, 2, 17939512, 3377, null]
-        - [163, 7, SimpleBlock, 2, 17942892, 5392, null]
-        - [163, 7, SimpleBlock, 2, 17948287, 3391, null]
-        - [163, 7, SimpleBlock, 2, 17951681, 3300, null]
-        - [163, 7, SimpleBlock, 2, 17954984, 2321, null]
-        - [163, 7, SimpleBlock, 2, 17957308, 1850, null]
-        - [163, 7, SimpleBlock, 2, 17959161, 5585, null]
-        - [163, 7, SimpleBlock, 2, 17964749, 2256, null]
-        - [163, 7, SimpleBlock, 2, 17967008, 2635, null]
-        - [163, 7, SimpleBlock, 2, 17969646, 2856, null]
-        - [163, 7, SimpleBlock, 2, 17972505, 2837, null]
-        - [163, 7, SimpleBlock, 2, 17975345, 2829, null]
-        - [163, 7, SimpleBlock, 2, 17978177, 6256, null]
-        - [163, 7, SimpleBlock, 2, 17984436, 2566, null]
-        - [163, 7, SimpleBlock, 2, 17987005, 2308, null]
-        - [163, 7, SimpleBlock, 2, 17989316, 2064, null]
-        - [163, 7, SimpleBlock, 2, 17991383, 2007, null]
-        - [163, 7, SimpleBlock, 2, 17993393, 2166, null]
-        - [163, 7, SimpleBlock, 2, 17995562, 5393, null]
-        - [163, 7, SimpleBlock, 2, 18000958, 1772, null]
-        - [163, 7, SimpleBlock, 2, 18002733, 1579, null]
-        - [163, 7, SimpleBlock, 2, 18004315, 1467, null]
-        - [163, 7, SimpleBlock, 2, 18005785, 1362, null]
-        - [163, 7, SimpleBlock, 2, 18007150, 5776, null]
-        - [163, 7, SimpleBlock, 2, 18012929, 1411, null]
-        - [163, 7, SimpleBlock, 2, 18014343, 1553, null]
-        - [163, 7, SimpleBlock, 2, 18015899, 1874, null]
-        - [163, 7, SimpleBlock, 2, 18017776, 2222, null]
-        - [163, 7, SimpleBlock, 2, 18020001, 2455, null]
-        - [163, 7, SimpleBlock, 2, 18022459, 5298, null]
-        - [163, 7, SimpleBlock, 2, 18027760, 2969, null]
-        - [163, 7, SimpleBlock, 2, 18030732, 2986, null]
-        - [163, 7, SimpleBlock, 2, 18033721, 2874, null]
-        - [163, 7, SimpleBlock, 2, 18036598, 3500, null]
-        - [163, 7, SimpleBlock, 2, 18040101, 5299, null]
-        - [163, 7, SimpleBlock, 2, 18045403, 5436, null]
-        - [163, 7, SimpleBlock, 2, 18050842, 5687, null]
-        - [163, 7, SimpleBlock, 2, 18056532, 4865, null]
-        - [163, 7, SimpleBlock, 2, 18061400, 4047, null]
-        - [163, 7, SimpleBlock, 2, 18065450, 4424, null]
-        - [163, 7, SimpleBlock, 2, 18069877, 5776, null]
-        - [163, 7, SimpleBlock, 2, 18075656, 4828, null]
-        - [163, 7, SimpleBlock, 2, 18080487, 5030, null]
-        - [163, 7, SimpleBlock, 2, 18085520, 4587, null]
-        - [163, 7, SimpleBlock, 2, 18090110, 3823, null]
-        - [163, 7, SimpleBlock, 2, 18093936, 3266, null]
-        - [163, 7, SimpleBlock, 2, 18097205, 5969, null]
-        - [163, 7, SimpleBlock, 2, 18103177, 2943, null]
-        - [163, 7, SimpleBlock, 2, 18106123, 2733, null]
-        - [163, 7, SimpleBlock, 2, 18108859, 2523, null]
-        - [163, 7, SimpleBlock, 2, 18111385, 2499, null]
-        - [163, 7, SimpleBlock, 2, 18113887, 6162, null]
-        - [163, 7, SimpleBlock, 2, 18120052, 2530, null]
-        - [163, 7, SimpleBlock, 2, 18122585, 2361, null]
-        - [163, 7, SimpleBlock, 2, 18124949, 2255, null]
-        - [163, 7, SimpleBlock, 2, 18127207, 2069, null]
-        - [163, 7, SimpleBlock, 2, 18129279, 1886, null]
-        - [163, 7, SimpleBlock, 2, 18131168, 5874, null]
-        - [163, 7, SimpleBlock, 2, 18137045, 1818, null]
-        - [163, 7, SimpleBlock, 2, 18138866, 1694, null]
-        - [163, 7, SimpleBlock, 2, 18140563, 1791, null]
-        - [163, 7, SimpleBlock, 2, 18142357, 1640, null]
-        - [163, 7, SimpleBlock, 2, 18144000, 5679, null]
-        - [163, 7, SimpleBlock, 2, 18149682, 1477, null]
-        - [163, 7, SimpleBlock, 2, 18151163, 60499, null]
-        - [163, 7, SimpleBlock, 2, 18211665, 2228, null]
-        - [163, 7, SimpleBlock, 2, 18213896, 2038, null]
-        - [163, 7, SimpleBlock, 2, 18215937, 1445, null]
-        - [163, 7, SimpleBlock, 2, 18217385, 5871, null]
-        - [163, 7, SimpleBlock, 2, 18223259, 1144, null]
-        - [163, 7, SimpleBlock, 2, 18224406, 966, null]
-        - [163, 7, SimpleBlock, 2, 18225375, 1554, null]
-        - [163, 7, SimpleBlock, 2, 18226932, 2149, null]
-        - [163, 7, SimpleBlock, 2, 18229084, 2465, null]
-        - [163, 7, SimpleBlock, 2, 18231552, 5872, null]
-        - [163, 7, SimpleBlock, 2, 18237427, 2573, null]
-        - [163, 7, SimpleBlock, 2, 18240003, 2777, null]
-        - [163, 7, SimpleBlock, 2, 18242783, 2284, null]
-        - [163, 7, SimpleBlock, 2, 18245070, 1929, null]
-        - [163, 7, SimpleBlock, 2, 18247002, 6066, null]
-        - [163, 7, SimpleBlock, 2, 18253071, 1909, null]
-        - [163, 7, SimpleBlock, 2, 18254983, 1855, null]
-        - [163, 7, SimpleBlock, 2, 18256841, 1852, null]
-        - [163, 7, SimpleBlock, 2, 18258696, 1983, null]
-        - [163, 7, SimpleBlock, 2, 18260682, 1894, null]
-        - [163, 7, SimpleBlock, 2, 18262579, 5583, null]
-        - [163, 7, SimpleBlock, 2, 18268165, 2739, null]
-        - [163, 7, SimpleBlock, 2, 18270907, 3644, null]
-        - [163, 7, SimpleBlock, 2, 18274554, 4230, null]
-        - [163, 7, SimpleBlock, 2, 18278787, 3657, null]
-        - [163, 7, SimpleBlock, 2, 18282447, 3713, null]
-        - [163, 7, SimpleBlock, 2, 18286163, 5680, null]
-        - [163, 7, SimpleBlock, 2, 18291846, 4481, null]
-        - [163, 7, SimpleBlock, 2, 18296330, 4123, null]
-        - [163, 7, SimpleBlock, 2, 18300456, 3651, null]
-        - [163, 7, SimpleBlock, 2, 18304110, 3533, null]
-        - [163, 7, SimpleBlock, 2, 18307646, 5681, null]
-        - [163, 7, SimpleBlock, 2, 18313330, 4339, null]
-        - [163, 7, SimpleBlock, 2, 18317672, 5237, null]
-        - [163, 7, SimpleBlock, 2, 18322912, 5918, null]
-        - [163, 7, SimpleBlock, 2, 18328833, 5993, null]
-        - [163, 7, SimpleBlock, 2, 18334829, 4395, null]
-        - [163, 7, SimpleBlock, 2, 18339227, 6066, null]
-        - [163, 7, SimpleBlock, 2, 18345296, 4935, null]
-        - [163, 7, SimpleBlock, 2, 18350234, 6037, null]
-        - [163, 7, SimpleBlock, 2, 18356274, 6409, null]
-        - [163, 7, SimpleBlock, 2, 18362686, 6297, null]
-        - [163, 7, SimpleBlock, 2, 18368986, 5585, null]
-        - [163, 7, SimpleBlock, 2, 18374574, 5557, null]
-        - [163, 7, SimpleBlock, 2, 18380134, 3974, null]
-        - [163, 7, SimpleBlock, 2, 18384111, 3783, null]
-        - [163, 7, SimpleBlock, 2, 18387897, 5331, null]
-        - [163, 7, SimpleBlock, 2, 18393231, 6651, null]
-        - [163, 7, SimpleBlock, 2, 18399885, 6256, null]
-        - [163, 7, SimpleBlock, 2, 18406144, 7153, null]
-        - [163, 7, SimpleBlock, 2, 18413300, 6988, null]
-        - [163, 7, SimpleBlock, 2, 18420291, 5408, null]
-        - [163, 7, SimpleBlock, 2, 18425702, 5607, null]
-        - [163, 7, SimpleBlock, 2, 18431312, 6685, null]
-        - [163, 7, SimpleBlock, 2, 18438000, 5585, null]
-        - [163, 7, SimpleBlock, 2, 18443588, 6995, null]
-        - [163, 7, SimpleBlock, 2, 18450586, 7063, null]
-        - [163, 7, SimpleBlock, 2, 18457652, 7432, null]
-        - [163, 7, SimpleBlock, 2, 18465087, 6784, null]
-        - [163, 7, SimpleBlock, 2, 18471874, 6257, null]
-        - [163, 7, SimpleBlock, 2, 18478134, 5355, null]
-        - [163, 7, SimpleBlock, 2, 18483492, 7557, null]
-        - [163, 7, SimpleBlock, 2, 18491052, 8650, null]
-        - [163, 7, SimpleBlock, 2, 18499705, 8923, null]
-        - [163, 7, SimpleBlock, 2, 18508631, 8857, null]
-        - [163, 7, SimpleBlock, 2, 18517491, 772, null]
-        - [163, 7, SimpleBlock, 2, 18518266, 5487, null]
-        - [163, 7, SimpleBlock, 2, 18523756, 7441, null]
-        - [163, 7, SimpleBlock, 2, 18531200, 8855, null]
-        - [163, 7, SimpleBlock, 2, 18540058, 9921, null]
-        - [163, 7, SimpleBlock, 2, 18549982, 10005, null]
-        - [163, 7, SimpleBlock, 2, 18559990, 10304, null]
-        - [163, 7, SimpleBlock, 2, 18570297, 5199, null]
-        - [163, 7, SimpleBlock, 2, 18575499, 10952, null]
-        - [163, 7, SimpleBlock, 2, 18586454, 11010, null]
-        - [163, 7, SimpleBlock, 2, 18597467, 9866, null]
-        - [163, 7, SimpleBlock, 2, 18607336, 10617, null]
-        - [163, 7, SimpleBlock, 2, 18617956, 10835, null]
-    - - 524531317
-      - 6
-      - Cluster
-      - 1
-      - 18628799
-      - 2103666
-      - - [231, 1, Timecode, 2, 18628801, 3, 66500]
-        - [163, 7, SimpleBlock, 2, 18628807, 772, null]
-        - [163, 7, SimpleBlock, 2, 18629582, 5776, null]
-        - [163, 7, SimpleBlock, 2, 18635362, 36579, null]
-        - [163, 7, SimpleBlock, 2, 18671944, 10731, null]
-        - [163, 7, SimpleBlock, 2, 18682678, 10066, null]
-        - [163, 7, SimpleBlock, 2, 18692747, 10462, null]
-        - [163, 7, SimpleBlock, 2, 18703212, 5680, null]
-        - [163, 7, SimpleBlock, 2, 18708895, 10053, null]
-        - [163, 7, SimpleBlock, 2, 18718951, 9561, null]
-        - [163, 7, SimpleBlock, 2, 18728515, 9702, null]
-        - [163, 7, SimpleBlock, 2, 18738220, 9853, null]
-        - [163, 7, SimpleBlock, 2, 18748076, 10160, null]
-        - [163, 7, SimpleBlock, 2, 18758239, 5777, null]
-        - [163, 7, SimpleBlock, 2, 18764019, 10507, null]
-        - [163, 7, SimpleBlock, 2, 18774529, 9258, null]
-        - [163, 7, SimpleBlock, 2, 18783790, 9531, null]
-        - [163, 7, SimpleBlock, 2, 18793324, 9334, null]
-        - [163, 7, SimpleBlock, 2, 18802661, 5971, null]
-        - [163, 7, SimpleBlock, 2, 18808635, 9543, null]
-        - [163, 7, SimpleBlock, 2, 18818181, 9583, null]
-        - [163, 7, SimpleBlock, 2, 18827767, 9467, null]
-        - [163, 7, SimpleBlock, 2, 18837237, 8785, null]
-        - [163, 7, SimpleBlock, 2, 18846025, 8852, null]
-        - [163, 7, SimpleBlock, 2, 18854880, 5585, null]
-        - [163, 7, SimpleBlock, 2, 18860468, 8937, null]
-        - [163, 7, SimpleBlock, 2, 18869408, 9268, null]
-        - [163, 7, SimpleBlock, 2, 18878679, 9540, null]
-        - [163, 7, SimpleBlock, 2, 18888222, 9647, null]
-        - [163, 7, SimpleBlock, 2, 18897872, 10183, null]
-        - [163, 7, SimpleBlock, 2, 18908058, 6063, null]
-        - [163, 7, SimpleBlock, 2, 18914124, 10189, null]
-        - [163, 7, SimpleBlock, 2, 18924316, 10411, null]
-        - [163, 7, SimpleBlock, 2, 18934730, 10327, null]
-        - [163, 7, SimpleBlock, 2, 18945060, 8746, null]
-        - [163, 7, SimpleBlock, 2, 18953809, 5778, null]
-        - [163, 7, SimpleBlock, 2, 18959590, 6640, null]
-        - [163, 7, SimpleBlock, 2, 18966233, 4282, null]
-        - [163, 7, SimpleBlock, 2, 18970518, 4188, null]
-        - [163, 7, SimpleBlock, 2, 18974709, 4033, null]
-        - [163, 7, SimpleBlock, 2, 18978745, 3879, null]
-        - [163, 7, SimpleBlock, 2, 18982627, 6161, null]
-        - [163, 7, SimpleBlock, 2, 18988791, 3886, null]
-        - [163, 7, SimpleBlock, 2, 18992680, 3964, null]
-        - [163, 7, SimpleBlock, 2, 18996647, 4541, null]
-        - [163, 7, SimpleBlock, 2, 19001191, 6260, null]
-        - [163, 7, SimpleBlock, 2, 19007454, 6063, null]
-        - [163, 7, SimpleBlock, 2, 19013520, 6518, null]
-        - [163, 7, SimpleBlock, 2, 19020041, 7033, null]
-        - [163, 7, SimpleBlock, 2, 19027077, 7616, null]
-        - [163, 7, SimpleBlock, 2, 19034696, 8183, null]
-        - [163, 7, SimpleBlock, 2, 19042882, 8734, null]
-        - [163, 7, SimpleBlock, 2, 19051619, 6067, null]
-        - [163, 7, SimpleBlock, 2, 19057689, 9116, null]
-        - [163, 7, SimpleBlock, 2, 19066808, 9011, null]
-        - [163, 7, SimpleBlock, 2, 19075822, 8607, null]
-        - [163, 7, SimpleBlock, 2, 19084432, 6087, null]
-        - [163, 7, SimpleBlock, 2, 19090522, 4136, null]
-        - [163, 7, SimpleBlock, 2, 19094661, 6061, null]
-        - [163, 7, SimpleBlock, 2, 19100725, 4215, null]
-        - [163, 7, SimpleBlock, 2, 19104943, 8273, null]
-        - [163, 7, SimpleBlock, 2, 19113219, 8929, null]
-        - [163, 7, SimpleBlock, 2, 19122151, 8522, null]
-        - [163, 7, SimpleBlock, 2, 19130676, 5682, null]
-        - [163, 7, SimpleBlock, 2, 19136361, 8505, null]
-        - [163, 7, SimpleBlock, 2, 19144869, 8264, null]
-        - [163, 7, SimpleBlock, 2, 19153136, 8036, null]
-        - [163, 7, SimpleBlock, 2, 19161175, 8376, null]
-        - [163, 7, SimpleBlock, 2, 19169554, 7908, null]
-        - [163, 7, SimpleBlock, 2, 19177465, 6255, null]
-        - [163, 7, SimpleBlock, 2, 19183723, 8350, null]
-        - [163, 7, SimpleBlock, 2, 19192076, 10471, null]
-        - [163, 7, SimpleBlock, 2, 19202550, 12007, null]
-        - [163, 7, SimpleBlock, 2, 19214560, 12700, null]
-        - [163, 7, SimpleBlock, 2, 19227263, 6449, null]
-        - [163, 7, SimpleBlock, 2, 19233715, 13211, null]
-        - [163, 7, SimpleBlock, 2, 19246929, 13350, null]
-        - [163, 7, SimpleBlock, 2, 19260282, 12895, null]
-        - [163, 7, SimpleBlock, 2, 19273180, 11656, null]
-        - [163, 7, SimpleBlock, 2, 19284839, 10411, null]
-        - [163, 7, SimpleBlock, 2, 19295253, 6451, null]
-        - [163, 7, SimpleBlock, 2, 19301707, 9994, null]
-        - [163, 7, SimpleBlock, 2, 19311704, 9523, null]
-        - [163, 7, SimpleBlock, 2, 19321230, 9295, null]
-        - [163, 7, SimpleBlock, 2, 19330528, 8371, null]
-        - [163, 7, SimpleBlock, 2, 19338902, 7294, null]
-        - [163, 7, SimpleBlock, 2, 19346199, 6161, null]
-        - [163, 7, SimpleBlock, 2, 19352363, 6261, null]
-        - [163, 7, SimpleBlock, 2, 19358628, 36475, null]
-        - [163, 7, SimpleBlock, 2, 19395106, 1597, null]
-        - [163, 7, SimpleBlock, 2, 19396706, 1054, null]
-        - [163, 7, SimpleBlock, 2, 19397763, 5967, null]
-        - [163, 7, SimpleBlock, 2, 19403733, 639, null]
-        - [163, 7, SimpleBlock, 2, 19404375, 654, null]
-        - [163, 7, SimpleBlock, 2, 19405032, 649, null]
-        - [163, 7, SimpleBlock, 2, 19405684, 671, null]
-        - [163, 7, SimpleBlock, 2, 19406358, 546, null]
-        - [163, 7, SimpleBlock, 2, 19406907, 6257, null]
-        - [163, 7, SimpleBlock, 2, 19413167, 564, null]
-        - [163, 7, SimpleBlock, 2, 19413734, 551, null]
-        - [163, 7, SimpleBlock, 2, 19414288, 644, null]
-        - [163, 7, SimpleBlock, 2, 19414935, 691, null]
-        - [163, 7, SimpleBlock, 2, 19415629, 559, null]
-        - [163, 7, SimpleBlock, 2, 19416191, 5872, null]
-        - [163, 7, SimpleBlock, 2, 19422066, 619, null]
-        - [163, 7, SimpleBlock, 2, 19422688, 773, null]
-        - [163, 7, SimpleBlock, 2, 19423464, 853, null]
-        - [163, 7, SimpleBlock, 2, 19424320, 816, null]
-        - [163, 7, SimpleBlock, 2, 19425139, 5488, null]
-        - [163, 7, SimpleBlock, 2, 19430630, 760, null]
-        - [163, 7, SimpleBlock, 2, 19431393, 977, null]
-        - [163, 7, SimpleBlock, 2, 19432373, 729, null]
-        - [163, 7, SimpleBlock, 2, 19433105, 915, null]
-        - [163, 7, SimpleBlock, 2, 19434023, 803, null]
-        - [163, 7, SimpleBlock, 2, 19434829, 5967, null]
-        - [163, 7, SimpleBlock, 2, 19440799, 629, null]
-        - [163, 7, SimpleBlock, 2, 19441431, 971, null]
-        - [163, 7, SimpleBlock, 2, 19442405, 704, null]
-        - [163, 7, SimpleBlock, 2, 19443112, 899, null]
-        - [163, 7, SimpleBlock, 2, 19444014, 5584, null]
-        - [163, 7, SimpleBlock, 2, 19449601, 814, null]
-        - [163, 7, SimpleBlock, 2, 19450418, 724, null]
-        - [163, 7, SimpleBlock, 2, 19451145, 950, null]
-        - [163, 7, SimpleBlock, 2, 19452098, 770, null]
-        - [163, 7, SimpleBlock, 2, 19452871, 973, null]
-        - [163, 7, SimpleBlock, 2, 19453847, 5776, null]
-        - [163, 7, SimpleBlock, 2, 19459626, 901, null]
-        - [163, 7, SimpleBlock, 2, 19460530, 719, null]
-        - [163, 7, SimpleBlock, 2, 19461252, 947, null]
-        - [163, 7, SimpleBlock, 2, 19462202, 662, null]
-        - [163, 7, SimpleBlock, 2, 19462867, 836, null]
-        - [163, 7, SimpleBlock, 2, 19463706, 6160, null]
-        - [163, 7, SimpleBlock, 2, 19469869, 723, null]
-        - [163, 7, SimpleBlock, 2, 19470595, 592, null]
-        - [163, 7, SimpleBlock, 2, 19471190, 837, null]
-        - [163, 7, SimpleBlock, 2, 19472030, 607, null]
-        - [163, 7, SimpleBlock, 2, 19472640, 5681, null]
-        - [163, 7, SimpleBlock, 2, 19478324, 784, null]
-        - [163, 7, SimpleBlock, 2, 19479111, 699, null]
-        - [163, 7, SimpleBlock, 2, 19479813, 517, null]
-        - [163, 7, SimpleBlock, 2, 19480333, 787, null]
-        - [163, 7, SimpleBlock, 2, 19481123, 554, null]
-        - [163, 7, SimpleBlock, 2, 19481680, 6065, null]
-        - [163, 7, SimpleBlock, 2, 19487749, 48229, null]
-        - [163, 7, SimpleBlock, 2, 19535981, 1795, null]
-        - [163, 7, SimpleBlock, 2, 19537779, 1433, null]
-        - [163, 7, SimpleBlock, 2, 19539215, 829, null]
-        - [163, 7, SimpleBlock, 2, 19540047, 772, null]
-        - [163, 7, SimpleBlock, 2, 19540822, 647, null]
-        - [163, 7, SimpleBlock, 2, 19541472, 5680, null]
-        - [163, 7, SimpleBlock, 2, 19547155, 757, null]
-        - [163, 7, SimpleBlock, 2, 19547915, 771, null]
-        - [163, 7, SimpleBlock, 2, 19548689, 699, null]
-        - [163, 7, SimpleBlock, 2, 19549391, 793, null]
-        - [163, 7, SimpleBlock, 2, 19550187, 904, null]
-        - [163, 7, SimpleBlock, 2, 19551094, 5872, null]
-        - [163, 7, SimpleBlock, 2, 19556969, 862, null]
-        - [163, 7, SimpleBlock, 2, 19557834, 1178, null]
-        - [163, 7, SimpleBlock, 2, 19559015, 1055, null]
-        - [163, 7, SimpleBlock, 2, 19560073, 1098, null]
-        - [163, 7, SimpleBlock, 2, 19561174, 5774, null]
-        - [163, 7, SimpleBlock, 2, 19566951, 1242, null]
-        - [163, 7, SimpleBlock, 2, 19568196, 1272, null]
-        - [163, 7, SimpleBlock, 2, 19569471, 1209, null]
-        - [163, 7, SimpleBlock, 2, 19570683, 1540, null]
-        - [163, 7, SimpleBlock, 2, 19572226, 1541, null]
-        - [163, 7, SimpleBlock, 2, 19573770, 5680, null]
-        - [163, 7, SimpleBlock, 2, 19579453, 1583, null]
-        - [163, 7, SimpleBlock, 2, 19581039, 1724, null]
-        - [163, 7, SimpleBlock, 2, 19582766, 1947, null]
-        - [163, 7, SimpleBlock, 2, 19584716, 1810, null]
-        - [163, 7, SimpleBlock, 2, 19586529, 5394, null]
-        - [163, 7, SimpleBlock, 2, 19591926, 1931, null]
-        - [163, 7, SimpleBlock, 2, 19593860, 1872, null]
-        - [163, 7, SimpleBlock, 2, 19595735, 1897, null]
-        - [163, 7, SimpleBlock, 2, 19597635, 1731, null]
-        - [163, 7, SimpleBlock, 2, 19599369, 1852, null]
-        - [163, 7, SimpleBlock, 2, 19601224, 5393, null]
-        - [163, 7, SimpleBlock, 2, 19606620, 1761, null]
-        - [163, 7, SimpleBlock, 2, 19608384, 1845, null]
-        - [163, 7, SimpleBlock, 2, 19610232, 1851, null]
-        - [163, 7, SimpleBlock, 2, 19612086, 1824, null]
-        - [163, 7, SimpleBlock, 2, 19613913, 1842, null]
-        - [163, 7, SimpleBlock, 2, 19615758, 5295, null]
-        - [163, 7, SimpleBlock, 2, 19621056, 1767, null]
-        - [163, 7, SimpleBlock, 2, 19622826, 1774, null]
-        - [163, 7, SimpleBlock, 2, 19624603, 1688, null]
-        - [163, 7, SimpleBlock, 2, 19626294, 1683, null]
-        - [163, 7, SimpleBlock, 2, 19627980, 5584, null]
-        - [163, 7, SimpleBlock, 2, 19633567, 1458, null]
-        - [163, 7, SimpleBlock, 2, 19635028, 1661, null]
-        - [163, 7, SimpleBlock, 2, 19636692, 1559, null]
-        - [163, 7, SimpleBlock, 2, 19638254, 1429, null]
-        - [163, 7, SimpleBlock, 2, 19639686, 1566, null]
-        - [163, 7, SimpleBlock, 2, 19641255, 5294, null]
-        - [163, 7, SimpleBlock, 2, 19646552, 1519, null]
-        - [163, 7, SimpleBlock, 2, 19648074, 1356, null]
-        - [163, 7, SimpleBlock, 2, 19649433, 1292, null]
-        - [163, 7, SimpleBlock, 2, 19650728, 1177, null]
-        - [163, 7, SimpleBlock, 2, 19651908, 5488, null]
-        - [163, 7, SimpleBlock, 2, 19657399, 1208, null]
-        - [163, 7, SimpleBlock, 2, 19658610, 1022, null]
-        - [163, 7, SimpleBlock, 2, 19659635, 943, null]
-        - [163, 7, SimpleBlock, 2, 19660581, 804, null]
-        - [163, 7, SimpleBlock, 2, 19661388, 783, null]
-        - [163, 7, SimpleBlock, 2, 19662174, 5393, null]
-        - [163, 7, SimpleBlock, 2, 19667570, 856, null]
-        - [163, 7, SimpleBlock, 2, 19668430, 28957, null]
-        - [163, 7, SimpleBlock, 2, 19697390, 3375, null]
-        - [163, 7, SimpleBlock, 2, 19700768, 3741, null]
-        - [163, 7, SimpleBlock, 2, 19704512, 3590, null]
-        - [163, 7, SimpleBlock, 2, 19708105, 6545, null]
-        - [163, 7, SimpleBlock, 2, 19714653, 3476, null]
-        - [163, 7, SimpleBlock, 2, 19718132, 3331, null]
-        - [163, 7, SimpleBlock, 2, 19721466, 3305, null]
-        - [163, 7, SimpleBlock, 2, 19724774, 3474, null]
-        - [163, 7, SimpleBlock, 2, 19728251, 6063, null]
-        - [163, 7, SimpleBlock, 2, 19734317, 3784, null]
-        - [163, 7, SimpleBlock, 2, 19738104, 3987, null]
-        - [163, 7, SimpleBlock, 2, 19742094, 4201, null]
-        - [163, 7, SimpleBlock, 2, 19746298, 3595, null]
-        - [163, 7, SimpleBlock, 2, 19749896, 3209, null]
-        - [163, 7, SimpleBlock, 2, 19753108, 6256, null]
-        - [163, 7, SimpleBlock, 2, 19759367, 3317, null]
-        - [163, 7, SimpleBlock, 2, 19762687, 3767, null]
-        - [163, 7, SimpleBlock, 2, 19766457, 3811, null]
-        - [163, 7, SimpleBlock, 2, 19770271, 3951, null]
-        - [163, 7, SimpleBlock, 2, 19774225, 6546, null]
-        - [163, 7, SimpleBlock, 2, 19780774, 3884, null]
-        - [163, 7, SimpleBlock, 2, 19784661, 3807, null]
-        - [163, 7, SimpleBlock, 2, 19788471, 4301, null]
-        - [163, 7, SimpleBlock, 2, 19792775, 5472, null]
-        - [163, 7, SimpleBlock, 2, 19798250, 6931, null]
-        - [163, 7, SimpleBlock, 2, 19805184, 5968, null]
-        - [163, 7, SimpleBlock, 2, 19811155, 8261, null]
-        - [163, 7, SimpleBlock, 2, 19819419, 9448, null]
-        - [163, 7, SimpleBlock, 2, 19828870, 9747, null]
-        - [163, 7, SimpleBlock, 2, 19838620, 10939, null]
-        - [163, 7, SimpleBlock, 2, 19849562, 11581, null]
-        - [163, 7, SimpleBlock, 2, 19861146, 6257, null]
-        - [163, 7, SimpleBlock, 2, 19867406, 12226, null]
-        - [163, 7, SimpleBlock, 2, 19879635, 12386, null]
-        - [163, 7, SimpleBlock, 2, 19892024, 12288, null]
-        - [163, 7, SimpleBlock, 2, 19904315, 12342, null]
-        - [163, 7, SimpleBlock, 2, 19916660, 6066, null]
-        - [163, 7, SimpleBlock, 2, 19922729, 12606, null]
-        - [163, 7, SimpleBlock, 2, 19935338, 12670, null]
-        - [163, 7, SimpleBlock, 2, 19948011, 14314, null]
-        - [163, 7, SimpleBlock, 2, 19962328, 14351, null]
-        - [163, 7, SimpleBlock, 2, 19976682, 14816, null]
-        - [163, 7, SimpleBlock, 2, 19991501, 6065, null]
-        - [163, 7, SimpleBlock, 2, 19997569, 14730, null]
-        - [163, 7, SimpleBlock, 2, 20012302, 14115, null]
-        - [163, 7, SimpleBlock, 2, 20026420, 12301, null]
-        - [163, 7, SimpleBlock, 2, 20038724, 9600, null]
-        - [163, 7, SimpleBlock, 2, 20048327, 7875, null]
-        - [163, 7, SimpleBlock, 2, 20056205, 6258, null]
-        - [163, 7, SimpleBlock, 2, 20062466, 7045, null]
-        - [163, 7, SimpleBlock, 2, 20069514, 5980, null]
-        - [163, 7, SimpleBlock, 2, 20075497, 5767, null]
-        - [163, 7, SimpleBlock, 2, 20081267, 5984, null]
-        - [163, 7, SimpleBlock, 2, 20087254, 5874, null]
-        - [163, 7, SimpleBlock, 2, 20093131, 6340, null]
-        - [163, 7, SimpleBlock, 2, 20099474, 6302, null]
-        - [163, 7, SimpleBlock, 2, 20105779, 6276, null]
-        - [163, 7, SimpleBlock, 2, 20112058, 6033, null]
-        - [163, 7, SimpleBlock, 2, 20118094, 5439, null]
-        - [163, 7, SimpleBlock, 2, 20123536, 5873, null]
-        - [163, 7, SimpleBlock, 2, 20129413, 60579, null]
-        - [163, 7, SimpleBlock, 2, 20189995, 7029, null]
-        - [163, 7, SimpleBlock, 2, 20197027, 9336, null]
-        - [163, 7, SimpleBlock, 2, 20206366, 10592, null]
-        - [163, 7, SimpleBlock, 2, 20216961, 5490, null]
-        - [163, 7, SimpleBlock, 2, 20222454, 12788, null]
-        - [163, 7, SimpleBlock, 2, 20235245, 13783, null]
-        - [163, 7, SimpleBlock, 2, 20249031, 14231, null]
-        - [163, 7, SimpleBlock, 2, 20263265, 15363, null]
-        - [163, 7, SimpleBlock, 2, 20278632, 17133, null]
-        - [163, 7, SimpleBlock, 2, 20295768, 5296, null]
-        - [163, 7, SimpleBlock, 2, 20301068, 17470, null]
-        - [163, 7, SimpleBlock, 2, 20318541, 14994, null]
-        - [163, 7, SimpleBlock, 2, 20333538, 14941, null]
-        - [163, 7, SimpleBlock, 2, 20348482, 14520, null]
-        - [163, 7, SimpleBlock, 2, 20363005, 15205, null]
-        - [163, 7, SimpleBlock, 2, 20378213, 5201, null]
-        - [163, 7, SimpleBlock, 2, 20383417, 15907, null]
-        - [163, 7, SimpleBlock, 2, 20399328, 16652, null]
-        - [163, 7, SimpleBlock, 2, 20415984, 17361, null]
-        - [163, 7, SimpleBlock, 2, 20433349, 17414, null]
-        - [163, 7, SimpleBlock, 2, 20450766, 5296, null]
-        - [163, 7, SimpleBlock, 2, 20456066, 17770, null]
-        - [163, 7, SimpleBlock, 2, 20473840, 17844, null]
-        - [163, 7, SimpleBlock, 2, 20491688, 16842, null]
-        - [163, 7, SimpleBlock, 2, 20508534, 16488, null]
-        - [163, 7, SimpleBlock, 2, 20525026, 16936, null]
-        - [163, 7, SimpleBlock, 2, 20541965, 676, null]
-        - [163, 7, SimpleBlock, 2, 20542644, 5870, null]
-        - [163, 7, SimpleBlock, 2, 20548518, 17904, null]
-        - [163, 7, SimpleBlock, 2, 20566426, 17748, null]
-        - [163, 7, SimpleBlock, 2, 20584178, 17327, null]
-        - [163, 7, SimpleBlock, 2, 20601509, 16804, null]
-        - [163, 7, SimpleBlock, 2, 20618317, 16609, null]
-        - [163, 7, SimpleBlock, 2, 20634929, 4620, null]
-        - [163, 7, SimpleBlock, 2, 20639553, 17718, null]
-        - [163, 7, SimpleBlock, 2, 20657275, 19234, null]
-        - [163, 7, SimpleBlock, 2, 20676513, 18749, null]
-        - [163, 7, SimpleBlock, 2, 20695266, 18574, null]
-        - [163, 7, SimpleBlock, 2, 20713844, 18621, null]
-    - - 524531317
-      - 6
-      - Cluster
-      - 1
-      - 20732473
-      - 2606864
-      - - [231, 1, Timecode, 2, 20732475, 3, 76917]
-        - [163, 7, SimpleBlock, 2, 20732481, 676, null]
-        - [163, 7, SimpleBlock, 2, 20733160, 5296, null]
-        - [163, 7, SimpleBlock, 2, 20738460, 57870, null]
-        - [163, 7, SimpleBlock, 2, 20796333, 14161, null]
-        - [163, 7, SimpleBlock, 2, 20810497, 16283, null]
-        - [163, 7, SimpleBlock, 2, 20826784, 18643, null]
-        - [163, 7, SimpleBlock, 2, 20845430, 5584, null]
-        - [163, 7, SimpleBlock, 2, 20851018, 20094, null]
-        - [163, 7, SimpleBlock, 2, 20871116, 20936, null]
-        - [163, 7, SimpleBlock, 2, 20892056, 21547, null]
-        - [163, 7, SimpleBlock, 2, 20913607, 21831, null]
-        - [163, 7, SimpleBlock, 2, 20935442, 21619, null]
-        - [163, 7, SimpleBlock, 2, 20957064, 5967, null]
-        - [163, 7, SimpleBlock, 2, 20963035, 21935, null]
-        - [163, 7, SimpleBlock, 2, 20984974, 21975, null]
-        - [163, 7, SimpleBlock, 2, 21006953, 21595, null]
-        - [163, 7, SimpleBlock, 2, 21028552, 22352, null]
-        - [163, 7, SimpleBlock, 2, 21050907, 5971, null]
-        - [163, 7, SimpleBlock, 2, 21056882, 21241, null]
-        - [163, 7, SimpleBlock, 2, 21078127, 20004, null]
-        - [163, 7, SimpleBlock, 2, 21098135, 18589, null]
-        - [163, 7, SimpleBlock, 2, 21116728, 18088, null]
-        - [163, 7, SimpleBlock, 2, 21134820, 18065, null]
-        - [163, 7, SimpleBlock, 2, 21152888, 5680, null]
-        - [163, 7, SimpleBlock, 2, 21158572, 18783, null]
-        - [163, 7, SimpleBlock, 2, 21177359, 19388, null]
-        - [163, 7, SimpleBlock, 2, 21196751, 19874, null]
-        - [163, 7, SimpleBlock, 2, 21216629, 20500, null]
-        - [163, 7, SimpleBlock, 2, 21237133, 20363, null]
-        - [163, 7, SimpleBlock, 2, 21257499, 5683, null]
-        - [163, 7, SimpleBlock, 2, 21263186, 20383, null]
-        - [163, 7, SimpleBlock, 2, 21283573, 19364, null]
-        - [163, 7, SimpleBlock, 2, 21302941, 19084, null]
-        - [163, 7, SimpleBlock, 2, 21322029, 19510, null]
-        - [163, 7, SimpleBlock, 2, 21341542, 5779, null]
-        - [163, 7, SimpleBlock, 2, 21347325, 19655, null]
-        - [163, 7, SimpleBlock, 2, 21366984, 19150, null]
-        - [163, 7, SimpleBlock, 2, 21386138, 18882, null]
-        - [163, 7, SimpleBlock, 2, 21405024, 18431, null]
-        - [163, 7, SimpleBlock, 2, 21423459, 18037, null]
-        - [163, 7, SimpleBlock, 2, 21441499, 5393, null]
-        - [163, 7, SimpleBlock, 2, 21446896, 18017, null]
-        - [163, 7, SimpleBlock, 2, 21464917, 17810, null]
-        - [163, 7, SimpleBlock, 2, 21482731, 17480, null]
-        - [163, 7, SimpleBlock, 2, 21500215, 17218, null]
-        - [163, 7, SimpleBlock, 2, 21517436, 5968, null]
-        - [163, 7, SimpleBlock, 2, 21523408, 16822, null]
-        - [163, 7, SimpleBlock, 2, 21540234, 16542, null]
-        - [163, 7, SimpleBlock, 2, 21556779, 16000, null]
-        - [163, 7, SimpleBlock, 2, 21572783, 17251, null]
-        - [163, 7, SimpleBlock, 2, 21590038, 18254, null]
-        - [163, 7, SimpleBlock, 2, 21608295, 6451, null]
-        - [163, 7, SimpleBlock, 2, 21614750, 19178, null]
-        - [163, 7, SimpleBlock, 2, 21633932, 19684, null]
-        - [163, 7, SimpleBlock, 2, 21653620, 20370, null]
-        - [163, 7, SimpleBlock, 2, 21673994, 21172, null]
-        - [163, 7, SimpleBlock, 2, 21695170, 21607, null]
-        - [163, 7, SimpleBlock, 2, 21716780, 6162, null]
-        - [163, 7, SimpleBlock, 2, 21722946, 21284, null]
-        - [163, 7, SimpleBlock, 2, 21744234, 19858, null]
-        - [163, 7, SimpleBlock, 2, 21764096, 20103, null]
-        - [163, 7, SimpleBlock, 2, 21784203, 20348, null]
-        - [163, 7, SimpleBlock, 2, 21804554, 5775, null]
-        - [163, 7, SimpleBlock, 2, 21810333, 20136, null]
-        - [163, 7, SimpleBlock, 2, 21830473, 18094, null]
-        - [163, 7, SimpleBlock, 2, 21848571, 17128, null]
-        - [163, 7, SimpleBlock, 2, 21865703, 16497, null]
-        - [163, 7, SimpleBlock, 2, 21882204, 16452, null]
-        - [163, 7, SimpleBlock, 2, 21898659, 6161, null]
-        - [163, 7, SimpleBlock, 2, 21904824, 17164, null]
-        - [163, 7, SimpleBlock, 2, 21921992, 17846, null]
-        - [163, 7, SimpleBlock, 2, 21939842, 18903, null]
-        - [163, 7, SimpleBlock, 2, 21958749, 19244, null]
-        - [163, 7, SimpleBlock, 2, 21977996, 5874, null]
-        - [163, 7, SimpleBlock, 2, 21983874, 18813, null]
-        - [163, 7, SimpleBlock, 2, 22002691, 18540, null]
-        - [163, 7, SimpleBlock, 2, 22021235, 17596, null]
-        - [163, 7, SimpleBlock, 2, 22038835, 17222, null]
-        - [163, 7, SimpleBlock, 2, 22056061, 17986, null]
-        - [163, 7, SimpleBlock, 2, 22074050, 5871, null]
-        - [163, 7, SimpleBlock, 2, 22079925, 19782, null]
-        - [163, 7, SimpleBlock, 2, 22099711, 20933, null]
-        - [163, 7, SimpleBlock, 2, 22120648, 20789, null]
-        - [163, 7, SimpleBlock, 2, 22141441, 19381, null]
-        - [163, 7, SimpleBlock, 2, 22160826, 17254, null]
-        - [163, 7, SimpleBlock, 2, 22178083, 3660, null]
-        - [163, 7, SimpleBlock, 2, 22181746, 15440, null]
-        - [163, 7, SimpleBlock, 2, 22197189, 14453, null]
-        - [163, 7, SimpleBlock, 2, 22211645, 13030, null]
-        - [163, 7, SimpleBlock, 2, 22224678, 5682, null]
-        - [163, 7, SimpleBlock, 2, 22230363, 11253, null]
-        - [163, 7, SimpleBlock, 2, 22241620, 32873, null]
-        - [163, 7, SimpleBlock, 2, 22274496, 6159, null]
-        - [163, 7, SimpleBlock, 2, 22280658, 7281, null]
-        - [163, 7, SimpleBlock, 2, 22287942, 5585, null]
-        - [163, 7, SimpleBlock, 2, 22293530, 7922, null]
-        - [163, 7, SimpleBlock, 2, 22301455, 7943, null]
-        - [163, 7, SimpleBlock, 2, 22309401, 7994, null]
-        - [163, 7, SimpleBlock, 2, 22317398, 7958, null]
-        - [163, 7, SimpleBlock, 2, 22325359, 8355, null]
-        - [163, 7, SimpleBlock, 2, 22333717, 5777, null]
-        - [163, 7, SimpleBlock, 2, 22339497, 8199, null]
-        - [163, 7, SimpleBlock, 2, 22347699, 8535, null]
-        - [163, 7, SimpleBlock, 2, 22356237, 8831, null]
-        - [163, 7, SimpleBlock, 2, 22365071, 9248, null]
-        - [163, 7, SimpleBlock, 2, 22374322, 5680, null]
-        - [163, 7, SimpleBlock, 2, 22380005, 9220, null]
-        - [163, 7, SimpleBlock, 2, 22389228, 9075, null]
-        - [163, 7, SimpleBlock, 2, 22398306, 9125, null]
-        - [163, 7, SimpleBlock, 2, 22407434, 9290, null]
-        - [163, 7, SimpleBlock, 2, 22416727, 9547, null]
-        - [163, 7, SimpleBlock, 2, 22426277, 5969, null]
-        - [163, 7, SimpleBlock, 2, 22432249, 9837, null]
-        - [163, 7, SimpleBlock, 2, 22442089, 10391, null]
-        - [163, 7, SimpleBlock, 2, 22452483, 10332, null]
-        - [163, 7, SimpleBlock, 2, 22462818, 10531, null]
-        - [163, 7, SimpleBlock, 2, 22473352, 10458, null]
-        - [163, 7, SimpleBlock, 2, 22483813, 5872, null]
-        - [163, 7, SimpleBlock, 2, 22489688, 10067, null]
-        - [163, 7, SimpleBlock, 2, 22499758, 10130, null]
-        - [163, 7, SimpleBlock, 2, 22509891, 9976, null]
-        - [163, 7, SimpleBlock, 2, 22519870, 10480, null]
-        - [163, 7, SimpleBlock, 2, 22530353, 6162, null]
-        - [163, 7, SimpleBlock, 2, 22536518, 11192, null]
-        - [163, 7, SimpleBlock, 2, 22547713, 10621, null]
-        - [163, 7, SimpleBlock, 2, 22558337, 10069, null]
-        - [163, 7, SimpleBlock, 2, 22568409, 10229, null]
-        - [163, 7, SimpleBlock, 2, 22578641, 10237, null]
-        - [163, 7, SimpleBlock, 2, 22588881, 6162, null]
-        - [163, 7, SimpleBlock, 2, 22595046, 10671, null]
-        - [163, 7, SimpleBlock, 2, 22605720, 10010, null]
-        - [163, 7, SimpleBlock, 2, 22615733, 9164, null]
-        - [163, 7, SimpleBlock, 2, 22624900, 8803, null]
-        - [163, 7, SimpleBlock, 2, 22633706, 6255, null]
-        - [163, 7, SimpleBlock, 2, 22639964, 8877, null]
-        - [163, 7, SimpleBlock, 2, 22648844, 8534, null]
-        - [163, 7, SimpleBlock, 2, 22657381, 7929, null]
-        - [163, 7, SimpleBlock, 2, 22665313, 8015, null]
-        - [163, 7, SimpleBlock, 2, 22673331, 8173, null]
-        - [163, 7, SimpleBlock, 2, 22681507, 6065, null]
-        - [163, 7, SimpleBlock, 2, 22687575, 7984, null]
-        - [163, 7, SimpleBlock, 2, 22695562, 7944, null]
-        - [163, 7, SimpleBlock, 2, 22703509, 8216, null]
-        - [163, 7, SimpleBlock, 2, 22711728, 8112, null]
-        - [163, 7, SimpleBlock, 2, 22719843, 7928, null]
-        - [163, 7, SimpleBlock, 2, 22727774, 6354, null]
-        - [163, 7, SimpleBlock, 2, 22734131, 7622, null]
-        - [163, 7, SimpleBlock, 2, 22741756, 8231, null]
-        - [163, 7, SimpleBlock, 2, 22749990, 8319, null]
-        - [163, 7, SimpleBlock, 2, 22758312, 7903, null]
-        - [163, 7, SimpleBlock, 2, 22766218, 6063, null]
-        - [163, 7, SimpleBlock, 2, 22772284, 8053, null]
-        - [163, 7, SimpleBlock, 2, 22780340, 8385, null]
-        - [163, 7, SimpleBlock, 2, 22788728, 8056, null]
-        - [163, 7, SimpleBlock, 2, 22796787, 8148, null]
-        - [163, 7, SimpleBlock, 2, 22804938, 8115, null]
-        - [163, 7, SimpleBlock, 2, 22813056, 6065, null]
-        - [163, 7, SimpleBlock, 2, 22819124, 8290, null]
-        - [163, 7, SimpleBlock, 2, 22827417, 7985, null]
-        - [163, 7, SimpleBlock, 2, 22835405, 8685, null]
-        - [163, 7, SimpleBlock, 2, 22844093, 9660, null]
-        - [163, 7, SimpleBlock, 2, 22853756, 5874, null]
-        - [163, 7, SimpleBlock, 2, 22859633, 10373, null]
-        - [163, 7, SimpleBlock, 2, 22870009, 9892, null]
-        - [163, 7, SimpleBlock, 2, 22879904, 9637, null]
-        - [163, 7, SimpleBlock, 2, 22889544, 9721, null]
-        - [163, 7, SimpleBlock, 2, 22899268, 9824, null]
-        - [163, 7, SimpleBlock, 2, 22909095, 5874, null]
-        - [163, 7, SimpleBlock, 2, 22914972, 9921, null]
-        - [163, 7, SimpleBlock, 2, 22924896, 8744, null]
-        - [163, 7, SimpleBlock, 2, 22933643, 7978, null]
-        - [163, 7, SimpleBlock, 2, 22941624, 7648, null]
-        - [163, 7, SimpleBlock, 2, 22949275, 7314, null]
-        - [163, 7, SimpleBlock, 2, 22956592, 5969, null]
-        - [163, 7, SimpleBlock, 2, 22962564, 7531, null]
-        - [163, 7, SimpleBlock, 2, 22970098, 7095, null]
-        - [163, 7, SimpleBlock, 2, 22977196, 6685, null]
-        - [163, 7, SimpleBlock, 2, 22983884, 6591, null]
-        - [163, 7, SimpleBlock, 2, 22990478, 5776, null]
-        - [163, 7, SimpleBlock, 2, 22996257, 6599, null]
-        - [163, 7, SimpleBlock, 2, 23002859, 5858, null]
-        - [163, 7, SimpleBlock, 2, 23008720, 5549, null]
-        - [163, 7, SimpleBlock, 2, 23014272, 4881, null]
-        - [163, 7, SimpleBlock, 2, 23019156, 4488, null]
-        - [163, 7, SimpleBlock, 2, 23023647, 5968, null]
-        - [163, 7, SimpleBlock, 2, 23029618, 4335, null]
-        - [163, 7, SimpleBlock, 2, 23033956, 4173, null]
-        - [163, 7, SimpleBlock, 2, 23038132, 3829, null]
-        - [163, 7, SimpleBlock, 2, 23041964, 3577, null]
-        - [163, 7, SimpleBlock, 2, 23045544, 3307, null]
-        - [163, 7, SimpleBlock, 2, 23048854, 5969, null]
-        - [163, 7, SimpleBlock, 2, 23054826, 3053, null]
-        - [163, 7, SimpleBlock, 2, 23057882, 2805, null]
-        - [163, 7, SimpleBlock, 2, 23060690, 2487, null]
-        - [163, 7, SimpleBlock, 2, 23063180, 2142, null]
-        - [163, 7, SimpleBlock, 2, 23065325, 6257, null]
-        - [163, 7, SimpleBlock, 2, 23071585, 1832, null]
-        - [163, 7, SimpleBlock, 2, 23073420, 1453, null]
-        - [163, 7, SimpleBlock, 2, 23074876, 1818, null]
-        - [163, 7, SimpleBlock, 2, 23076697, 1981, null]
-        - [163, 7, SimpleBlock, 2, 23078681, 3420, null]
-        - [163, 7, SimpleBlock, 2, 23082104, 5587, null]
-        - [163, 7, SimpleBlock, 2, 23087694, 3332, null]
-        - [163, 7, SimpleBlock, 2, 23091029, 2137, null]
-        - [163, 7, SimpleBlock, 2, 23093169, 2342, null]
-        - [163, 7, SimpleBlock, 2, 23095514, 3714, null]
-        - [163, 7, SimpleBlock, 2, 23099231, 4914, null]
-        - [163, 7, SimpleBlock, 2, 23104148, 2562, null]
-        - [163, 7, SimpleBlock, 2, 23106713, 3019, null]
-        - [163, 7, SimpleBlock, 2, 23109735, 2231, null]
-        - [163, 7, SimpleBlock, 2, 23111969, 1798, null]
-        - [163, 7, SimpleBlock, 2, 23113770, 1799, null]
-        - [163, 7, SimpleBlock, 2, 23115572, 5775, null]
-        - [163, 7, SimpleBlock, 2, 23121350, 1502, null]
-        - [163, 7, SimpleBlock, 2, 23122855, 1437, null]
-        - [163, 7, SimpleBlock, 2, 23124295, 1266, null]
-        - [163, 7, SimpleBlock, 2, 23125564, 1279, null]
-        - [163, 7, SimpleBlock, 2, 23126846, 1276, null]
-        - [163, 7, SimpleBlock, 2, 23128125, 4816, null]
-        - [163, 7, SimpleBlock, 2, 23132944, 1198, null]
-        - [163, 7, SimpleBlock, 2, 23134145, 1034, null]
-        - [163, 7, SimpleBlock, 2, 23135182, 876, null]
-        - [163, 7, SimpleBlock, 2, 23136061, 721, null]
-        - [163, 7, SimpleBlock, 2, 23136785, 4719, null]
-        - [163, 7, SimpleBlock, 2, 23141507, 541, null]
-        - [163, 7, SimpleBlock, 2, 23142051, 616, null]
-        - [163, 7, SimpleBlock, 2, 23142670, 618, null]
-        - [163, 7, SimpleBlock, 2, 23143291, 597, null]
-        - [163, 7, SimpleBlock, 2, 23143891, 440, null]
-        - [163, 7, SimpleBlock, 2, 23144334, 4527, null]
-        - [163, 7, SimpleBlock, 2, 23148864, 435, null]
-        - [163, 7, SimpleBlock, 2, 23149302, 424, null]
-        - [163, 7, SimpleBlock, 2, 23149729, 432, null]
-        - [163, 7, SimpleBlock, 2, 23150164, 417, null]
-        - [163, 7, SimpleBlock, 2, 23150584, 580, null]
-        - [163, 7, SimpleBlock, 2, 23151167, 405, null]
-        - [163, 7, SimpleBlock, 2, 23151575, 4525, null]
-        - [163, 7, SimpleBlock, 2, 23156103, 413, null]
-        - [163, 7, SimpleBlock, 2, 23156519, 411, null]
-        - [163, 7, SimpleBlock, 2, 23156933, 437, null]
-        - [163, 7, SimpleBlock, 2, 23157373, 422, null]
-        - [163, 7, SimpleBlock, 2, 23157798, 437, null]
-        - [163, 7, SimpleBlock, 2, 23158238, 4613, null]
-        - [163, 7, SimpleBlock, 2, 23162854, 458, null]
-        - [163, 7, SimpleBlock, 2, 23163315, 479, null]
-        - [163, 7, SimpleBlock, 2, 23163797, 478, null]
-        - [163, 7, SimpleBlock, 2, 23164278, 477, null]
-        - [163, 7, SimpleBlock, 2, 23164758, 4613, null]
-        - [163, 7, SimpleBlock, 2, 23169374, 489, null]
-        - [163, 7, SimpleBlock, 2, 23169866, 550, null]
-        - [163, 7, SimpleBlock, 2, 23170419, 541, null]
-        - [163, 7, SimpleBlock, 2, 23170963, 549, null]
-        - [163, 7, SimpleBlock, 2, 23171515, 583, null]
-        - [163, 7, SimpleBlock, 2, 23172101, 4613, null]
-        - [163, 7, SimpleBlock, 2, 23176717, 629, null]
-        - [163, 7, SimpleBlock, 2, 23177349, 728, null]
-        - [163, 7, SimpleBlock, 2, 23178080, 756, null]
-        - [163, 7, SimpleBlock, 2, 23178839, 1142, null]
-        - [163, 7, SimpleBlock, 2, 23179984, 5009, null]
-        - [163, 7, SimpleBlock, 2, 23184996, 1271, null]
-        - [163, 7, SimpleBlock, 2, 23186270, 1824, null]
-        - [163, 7, SimpleBlock, 2, 23188097, 3163, null]
-        - [163, 7, SimpleBlock, 2, 23191263, 3939, null]
-        - [163, 7, SimpleBlock, 2, 23195205, 4206, null]
-        - [163, 7, SimpleBlock, 2, 23199414, 5106, null]
-        - [163, 7, SimpleBlock, 2, 23204523, 4020, null]
-        - [163, 7, SimpleBlock, 2, 23208546, 4034, null]
-        - [163, 7, SimpleBlock, 2, 23212583, 4006, null]
-        - [163, 7, SimpleBlock, 2, 23216592, 3598, null]
-        - [163, 7, SimpleBlock, 2, 23220193, 3320, null]
-        - [163, 7, SimpleBlock, 2, 23223516, 5011, null]
-        - [163, 7, SimpleBlock, 2, 23228530, 2597, null]
-        - [163, 7, SimpleBlock, 2, 23231130, 2306, null]
-        - [163, 7, SimpleBlock, 2, 23233439, 1768, null]
-        - [163, 7, SimpleBlock, 2, 23235210, 1621, null]
-        - [163, 7, SimpleBlock, 2, 23236834, 4913, null]
-        - [163, 7, SimpleBlock, 2, 23241750, 1419, null]
-        - [163, 7, SimpleBlock, 2, 23243172, 1361, null]
-        - [163, 7, SimpleBlock, 2, 23244536, 1402, null]
-        - [163, 7, SimpleBlock, 2, 23245941, 1566, null]
-        - [163, 7, SimpleBlock, 2, 23247510, 1694, null]
-        - [163, 7, SimpleBlock, 2, 23249207, 4914, null]
-        - [163, 7, SimpleBlock, 2, 23254124, 2301, null]
-        - [163, 7, SimpleBlock, 2, 23256428, 2605, null]
-        - [163, 7, SimpleBlock, 2, 23259036, 2604, null]
-        - [163, 7, SimpleBlock, 2, 23261643, 2765, null]
-        - [163, 7, SimpleBlock, 2, 23264411, 4816, null]
-        - [163, 7, SimpleBlock, 2, 23269230, 2631, null]
-        - [163, 7, SimpleBlock, 2, 23271864, 2608, null]
-        - [163, 7, SimpleBlock, 2, 23274475, 2650, null]
-        - [163, 7, SimpleBlock, 2, 23277128, 3690, null]
-        - [163, 7, SimpleBlock, 2, 23280821, 4659, null]
-        - [163, 7, SimpleBlock, 2, 23285483, 5586, null]
-        - [163, 7, SimpleBlock, 2, 23291072, 5402, null]
-        - [163, 7, SimpleBlock, 2, 23296477, 5586, null]
-        - [163, 7, SimpleBlock, 2, 23302066, 4918, null]
-        - [163, 7, SimpleBlock, 2, 23306987, 3245, null]
-        - [163, 7, SimpleBlock, 2, 23310235, 3349, null]
-        - [163, 7, SimpleBlock, 2, 23313587, 5393, null]
-        - [163, 7, SimpleBlock, 2, 23318983, 3421, null]
-        - [163, 7, SimpleBlock, 2, 23322407, 3249, null]
-        - [163, 7, SimpleBlock, 2, 23325659, 2877, null]
-        - [163, 7, SimpleBlock, 2, 23328539, 2659, null]
-        - [163, 7, SimpleBlock, 2, 23331201, 2410, null]
-        - [163, 7, SimpleBlock, 2, 23333614, 2300, null]
-        - [163, 7, SimpleBlock, 2, 23335917, 1891, null]
-        - [163, 7, SimpleBlock, 2, 23337811, 1526, null]
diff --git a/lib/enzyme/tests/test_mkv.py b/lib/enzyme/tests/test_mkv.py
deleted file mode 100644
index 2403661e5296203f8d2fb52a5e38ef13cae2b68d..0000000000000000000000000000000000000000
--- a/lib/enzyme/tests/test_mkv.py
+++ /dev/null
@@ -1,607 +0,0 @@
-# -*- coding: utf-8 -*-
-from datetime import timedelta, datetime
-from enzyme.mkv import MKV, VIDEO_TRACK, AUDIO_TRACK, SUBTITLE_TRACK
-import io
-import os.path
-import requests
-import unittest
-import zipfile
-
-
-# Test directory
-TEST_DIR = os.path.join(os.path.dirname(__file__), os.path.splitext(__file__)[0])
-
-
-def setUpModule():
-    if not os.path.exists(TEST_DIR):
-        r = requests.get('http://downloads.sourceforge.net/project/matroska/test_files/matroska_test_w1_1.zip')
-        with zipfile.ZipFile(io.BytesIO(r.content), 'r') as f:
-            f.extractall(TEST_DIR)
-
-
-class MKVTestCase(unittest.TestCase):
-    def test_test1(self):
-        with io.open(os.path.join(TEST_DIR, 'test1.mkv'), 'rb') as stream:
-            mkv = MKV(stream)
-        # info
-        self.assertTrue(mkv.info.title is None)
-        self.assertTrue(mkv.info.duration == timedelta(minutes=1, seconds=27, milliseconds=336))
-        self.assertTrue(mkv.info.date_utc == datetime(2010, 8, 21, 7, 23, 3))
-        self.assertTrue(mkv.info.muxing_app == 'libebml2 v0.10.0 + libmatroska2 v0.10.1')
-        self.assertTrue(mkv.info.writing_app == 'mkclean 0.5.5 ru from libebml v1.0.0 + libmatroska v1.0.0 + mkvmerge v4.1.1 (\'Bouncin\' Back\') built on Jul  3 2010 22:54:08')
-        # video track
-        self.assertTrue(len(mkv.video_tracks) == 1)
-        self.assertTrue(mkv.video_tracks[0].type == VIDEO_TRACK)
-        self.assertTrue(mkv.video_tracks[0].number == 1)
-        self.assertTrue(mkv.video_tracks[0].name is None)
-        self.assertTrue(mkv.video_tracks[0].language == 'und')
-        self.assertTrue(mkv.video_tracks[0].enabled == True)
-        self.assertTrue(mkv.video_tracks[0].default == True)
-        self.assertTrue(mkv.video_tracks[0].forced == False)
-        self.assertTrue(mkv.video_tracks[0].lacing == False)
-        self.assertTrue(mkv.video_tracks[0].codec_id == 'V_MS/VFW/FOURCC')
-        self.assertTrue(mkv.video_tracks[0].codec_name is None)
-        self.assertTrue(mkv.video_tracks[0].width == 854)
-        self.assertTrue(mkv.video_tracks[0].height == 480)
-        self.assertTrue(mkv.video_tracks[0].interlaced == False)
-        self.assertTrue(mkv.video_tracks[0].stereo_mode is None)
-        self.assertTrue(mkv.video_tracks[0].crop == {})
-        self.assertTrue(mkv.video_tracks[0].display_width is None)
-        self.assertTrue(mkv.video_tracks[0].display_height is None)
-        self.assertTrue(mkv.video_tracks[0].display_unit is None)
-        self.assertTrue(mkv.video_tracks[0].aspect_ratio_type is None)
-        # audio track
-        self.assertTrue(len(mkv.audio_tracks) == 1)
-        self.assertTrue(mkv.audio_tracks[0].type == AUDIO_TRACK)
-        self.assertTrue(mkv.audio_tracks[0].number == 2)
-        self.assertTrue(mkv.audio_tracks[0].name is None)
-        self.assertTrue(mkv.audio_tracks[0].language == 'und')
-        self.assertTrue(mkv.audio_tracks[0].enabled == True)
-        self.assertTrue(mkv.audio_tracks[0].default == True)
-        self.assertTrue(mkv.audio_tracks[0].forced == False)
-        self.assertTrue(mkv.audio_tracks[0].lacing == True)
-        self.assertTrue(mkv.audio_tracks[0].codec_id == 'A_MPEG/L3')
-        self.assertTrue(mkv.audio_tracks[0].codec_name is None)
-        self.assertTrue(mkv.audio_tracks[0].sampling_frequency == 48000.0)
-        self.assertTrue(mkv.audio_tracks[0].channels == 2)
-        self.assertTrue(mkv.audio_tracks[0].output_sampling_frequency is None)
-        self.assertTrue(mkv.audio_tracks[0].bit_depth is None)
-        # subtitle track
-        self.assertTrue(len(mkv.subtitle_tracks) == 0)
-        # chapters
-        self.assertTrue(len(mkv.chapters) == 0)
-        # tags
-        self.assertTrue(len(mkv.tags) == 1)
-        self.assertTrue(len(mkv.tags[0].simpletags) == 3)
-        self.assertTrue(mkv.tags[0].simpletags[0].name == 'TITLE')
-        self.assertTrue(mkv.tags[0].simpletags[0].default == True)
-        self.assertTrue(mkv.tags[0].simpletags[0].language == 'und')
-        self.assertTrue(mkv.tags[0].simpletags[0].string == 'Big Buck Bunny - test 1')
-        self.assertTrue(mkv.tags[0].simpletags[0].binary is None)
-        self.assertTrue(mkv.tags[0].simpletags[1].name == 'DATE_RELEASED')
-        self.assertTrue(mkv.tags[0].simpletags[1].default == True)
-        self.assertTrue(mkv.tags[0].simpletags[1].language == 'und')
-        self.assertTrue(mkv.tags[0].simpletags[1].string == '2010')
-        self.assertTrue(mkv.tags[0].simpletags[1].binary is None)
-        self.assertTrue(mkv.tags[0].simpletags[2].name == 'COMMENT')
-        self.assertTrue(mkv.tags[0].simpletags[2].default == True)
-        self.assertTrue(mkv.tags[0].simpletags[2].language == 'und')
-        self.assertTrue(mkv.tags[0].simpletags[2].string == 'Matroska Validation File1, basic MPEG4.2 and MP3 with only SimpleBlock')
-        self.assertTrue(mkv.tags[0].simpletags[2].binary is None)
-
-    def test_test2(self):
-        with io.open(os.path.join(TEST_DIR, 'test2.mkv'), 'rb') as stream:
-            mkv = MKV(stream)
-        # info
-        self.assertTrue(mkv.info.title is None)
-        self.assertTrue(mkv.info.duration == timedelta(seconds=47, milliseconds=509))
-        self.assertTrue(mkv.info.date_utc == datetime(2011, 6, 2, 12, 45, 20))
-        self.assertTrue(mkv.info.muxing_app == 'libebml2 v0.21.0 + libmatroska2 v0.22.1')
-        self.assertTrue(mkv.info.writing_app == 'mkclean 0.8.3 ru from libebml2 v0.10.0 + libmatroska2 v0.10.1 + mkclean 0.5.5 ru from libebml v1.0.0 + libmatroska v1.0.0 + mkvmerge v4.1.1 (\'Bouncin\' Back\') built on Jul  3 2010 22:54:08')
-        # video track
-        self.assertTrue(len(mkv.video_tracks) == 1)
-        self.assertTrue(mkv.video_tracks[0].type == VIDEO_TRACK)
-        self.assertTrue(mkv.video_tracks[0].number == 1)
-        self.assertTrue(mkv.video_tracks[0].name is None)
-        self.assertTrue(mkv.video_tracks[0].language == 'und')
-        self.assertTrue(mkv.video_tracks[0].enabled == True)
-        self.assertTrue(mkv.video_tracks[0].default == True)
-        self.assertTrue(mkv.video_tracks[0].forced == False)
-        self.assertTrue(mkv.video_tracks[0].lacing == False)
-        self.assertTrue(mkv.video_tracks[0].codec_id == 'V_MPEG4/ISO/AVC')
-        self.assertTrue(mkv.video_tracks[0].codec_name is None)
-        self.assertTrue(mkv.video_tracks[0].width == 1024)
-        self.assertTrue(mkv.video_tracks[0].height == 576)
-        self.assertTrue(mkv.video_tracks[0].interlaced == False)
-        self.assertTrue(mkv.video_tracks[0].stereo_mode is None)
-        self.assertTrue(mkv.video_tracks[0].crop == {})
-        self.assertTrue(mkv.video_tracks[0].display_width == 1354)
-        self.assertTrue(mkv.video_tracks[0].display_height is None)
-        self.assertTrue(mkv.video_tracks[0].display_unit is None)
-        self.assertTrue(mkv.video_tracks[0].aspect_ratio_type is None)
-        # audio track
-        self.assertTrue(len(mkv.audio_tracks) == 1)
-        self.assertTrue(mkv.audio_tracks[0].type == AUDIO_TRACK)
-        self.assertTrue(mkv.audio_tracks[0].number == 2)
-        self.assertTrue(mkv.audio_tracks[0].name is None)
-        self.assertTrue(mkv.audio_tracks[0].language == 'und')
-        self.assertTrue(mkv.audio_tracks[0].enabled == True)
-        self.assertTrue(mkv.audio_tracks[0].default == True)
-        self.assertTrue(mkv.audio_tracks[0].forced == False)
-        self.assertTrue(mkv.audio_tracks[0].lacing == True)
-        self.assertTrue(mkv.audio_tracks[0].codec_id == 'A_AAC')
-        self.assertTrue(mkv.audio_tracks[0].codec_name is None)
-        self.assertTrue(mkv.audio_tracks[0].sampling_frequency == 48000.0)
-        self.assertTrue(mkv.audio_tracks[0].channels == 2)
-        self.assertTrue(mkv.audio_tracks[0].output_sampling_frequency is None)
-        self.assertTrue(mkv.audio_tracks[0].bit_depth is None)
-        # subtitle track
-        self.assertTrue(len(mkv.subtitle_tracks) == 0)
-        # chapters
-        self.assertTrue(len(mkv.chapters) == 0)
-        # tags
-        self.assertTrue(len(mkv.tags) == 1)
-        self.assertTrue(len(mkv.tags[0].simpletags) == 3)
-        self.assertTrue(mkv.tags[0].simpletags[0].name == 'TITLE')
-        self.assertTrue(mkv.tags[0].simpletags[0].default == True)
-        self.assertTrue(mkv.tags[0].simpletags[0].language == 'und')
-        self.assertTrue(mkv.tags[0].simpletags[0].string == 'Elephant Dream - test 2')
-        self.assertTrue(mkv.tags[0].simpletags[0].binary is None)
-        self.assertTrue(mkv.tags[0].simpletags[1].name == 'DATE_RELEASED')
-        self.assertTrue(mkv.tags[0].simpletags[1].default == True)
-        self.assertTrue(mkv.tags[0].simpletags[1].language == 'und')
-        self.assertTrue(mkv.tags[0].simpletags[1].string == '2010')
-        self.assertTrue(mkv.tags[0].simpletags[1].binary is None)
-        self.assertTrue(mkv.tags[0].simpletags[2].name == 'COMMENT')
-        self.assertTrue(mkv.tags[0].simpletags[2].default == True)
-        self.assertTrue(mkv.tags[0].simpletags[2].language == 'und')
-        self.assertTrue(mkv.tags[0].simpletags[2].string == 'Matroska Validation File 2, 100,000 timecode scale, odd aspect ratio, and CRC-32. Codecs are AVC and AAC')
-        self.assertTrue(mkv.tags[0].simpletags[2].binary is None)
-
-    def test_test3(self):
-        with io.open(os.path.join(TEST_DIR, 'test3.mkv'), 'rb') as stream:
-            mkv = MKV(stream)
-        # info
-        self.assertTrue(mkv.info.title is None)
-        self.assertTrue(mkv.info.duration == timedelta(seconds=49, milliseconds=64))
-        self.assertTrue(mkv.info.date_utc == datetime(2010, 8, 21, 21, 43, 25))
-        self.assertTrue(mkv.info.muxing_app == 'libebml2 v0.11.0 + libmatroska2 v0.10.1')
-        self.assertTrue(mkv.info.writing_app == 'mkclean 0.5.5 ro from libebml v1.0.0 + libmatroska v1.0.0 + mkvmerge v4.1.1 (\'Bouncin\' Back\') built on Jul  3 2010 22:54:08')
-        # video track
-        self.assertTrue(len(mkv.video_tracks) == 1)
-        self.assertTrue(mkv.video_tracks[0].type == VIDEO_TRACK)
-        self.assertTrue(mkv.video_tracks[0].number == 1)
-        self.assertTrue(mkv.video_tracks[0].name is None)
-        self.assertTrue(mkv.video_tracks[0].language == 'und')
-        self.assertTrue(mkv.video_tracks[0].enabled == True)
-        self.assertTrue(mkv.video_tracks[0].default == True)
-        self.assertTrue(mkv.video_tracks[0].forced == False)
-        self.assertTrue(mkv.video_tracks[0].lacing == False)
-        self.assertTrue(mkv.video_tracks[0].codec_id == 'V_MPEG4/ISO/AVC')
-        self.assertTrue(mkv.video_tracks[0].codec_name is None)
-        self.assertTrue(mkv.video_tracks[0].width == 1024)
-        self.assertTrue(mkv.video_tracks[0].height == 576)
-        self.assertTrue(mkv.video_tracks[0].interlaced == False)
-        self.assertTrue(mkv.video_tracks[0].stereo_mode is None)
-        self.assertTrue(mkv.video_tracks[0].crop == {})
-        self.assertTrue(mkv.video_tracks[0].display_width is None)
-        self.assertTrue(mkv.video_tracks[0].display_height is None)
-        self.assertTrue(mkv.video_tracks[0].display_unit is None)
-        self.assertTrue(mkv.video_tracks[0].aspect_ratio_type is None)
-        # audio track
-        self.assertTrue(len(mkv.audio_tracks) == 1)
-        self.assertTrue(mkv.audio_tracks[0].type == AUDIO_TRACK)
-        self.assertTrue(mkv.audio_tracks[0].number == 2)
-        self.assertTrue(mkv.audio_tracks[0].name is None)
-        self.assertTrue(mkv.audio_tracks[0].language is None)
-        self.assertTrue(mkv.audio_tracks[0].enabled == True)
-        self.assertTrue(mkv.audio_tracks[0].default == True)
-        self.assertTrue(mkv.audio_tracks[0].forced == False)
-        self.assertTrue(mkv.audio_tracks[0].lacing == True)
-        self.assertTrue(mkv.audio_tracks[0].codec_id == 'A_MPEG/L3')
-        self.assertTrue(mkv.audio_tracks[0].codec_name is None)
-        self.assertTrue(mkv.audio_tracks[0].sampling_frequency == 48000.0)
-        self.assertTrue(mkv.audio_tracks[0].channels == 2)
-        self.assertTrue(mkv.audio_tracks[0].output_sampling_frequency is None)
-        self.assertTrue(mkv.audio_tracks[0].bit_depth is None)
-        # subtitle track
-        self.assertTrue(len(mkv.subtitle_tracks) == 0)
-        # chapters
-        self.assertTrue(len(mkv.chapters) == 0)
-        # tags
-        self.assertTrue(len(mkv.tags) == 1)
-        self.assertTrue(len(mkv.tags[0].simpletags) == 3)
-        self.assertTrue(mkv.tags[0].simpletags[0].name == 'TITLE')
-        self.assertTrue(mkv.tags[0].simpletags[0].default == True)
-        self.assertTrue(mkv.tags[0].simpletags[0].language == 'und')
-        self.assertTrue(mkv.tags[0].simpletags[0].string == 'Elephant Dream - test 3')
-        self.assertTrue(mkv.tags[0].simpletags[0].binary is None)
-        self.assertTrue(mkv.tags[0].simpletags[1].name == 'DATE_RELEASED')
-        self.assertTrue(mkv.tags[0].simpletags[1].default == True)
-        self.assertTrue(mkv.tags[0].simpletags[1].language == 'und')
-        self.assertTrue(mkv.tags[0].simpletags[1].string == '2010')
-        self.assertTrue(mkv.tags[0].simpletags[1].binary is None)
-        self.assertTrue(mkv.tags[0].simpletags[2].name == 'COMMENT')
-        self.assertTrue(mkv.tags[0].simpletags[2].default == True)
-        self.assertTrue(mkv.tags[0].simpletags[2].language == 'und')
-        self.assertTrue(mkv.tags[0].simpletags[2].string == 'Matroska Validation File 3, header stripping on the video track and no SimpleBlock')
-        self.assertTrue(mkv.tags[0].simpletags[2].binary is None)
-
-    def test_test5(self):
-        with io.open(os.path.join(TEST_DIR, 'test5.mkv'), 'rb') as stream:
-            mkv = MKV(stream)
-        # info
-        self.assertTrue(mkv.info.title is None)
-        self.assertTrue(mkv.info.duration == timedelta(seconds=46, milliseconds=665))
-        self.assertTrue(mkv.info.date_utc == datetime(2010, 8, 21, 18, 6, 43))
-        self.assertTrue(mkv.info.muxing_app == 'libebml v1.0.0 + libmatroska v1.0.0')
-        self.assertTrue(mkv.info.writing_app == 'mkvmerge v4.0.0 (\'The Stars were mine\') built on Jun  6 2010 16:18:42')
-        # video track
-        self.assertTrue(len(mkv.video_tracks) == 1)
-        self.assertTrue(mkv.video_tracks[0].type == VIDEO_TRACK)
-        self.assertTrue(mkv.video_tracks[0].number == 1)
-        self.assertTrue(mkv.video_tracks[0].name is None)
-        self.assertTrue(mkv.video_tracks[0].language == 'und')
-        self.assertTrue(mkv.video_tracks[0].enabled == True)
-        self.assertTrue(mkv.video_tracks[0].default == True)
-        self.assertTrue(mkv.video_tracks[0].forced == False)
-        self.assertTrue(mkv.video_tracks[0].lacing == False)
-        self.assertTrue(mkv.video_tracks[0].codec_id == 'V_MPEG4/ISO/AVC')
-        self.assertTrue(mkv.video_tracks[0].codec_name is None)
-        self.assertTrue(mkv.video_tracks[0].width == 1024)
-        self.assertTrue(mkv.video_tracks[0].height == 576)
-        self.assertTrue(mkv.video_tracks[0].interlaced == False)
-        self.assertTrue(mkv.video_tracks[0].stereo_mode is None)
-        self.assertTrue(mkv.video_tracks[0].crop == {})
-        self.assertTrue(mkv.video_tracks[0].display_width == 1024)
-        self.assertTrue(mkv.video_tracks[0].display_height == 576)
-        self.assertTrue(mkv.video_tracks[0].display_unit is None)
-        self.assertTrue(mkv.video_tracks[0].aspect_ratio_type is None)
-        # audio tracks
-        self.assertTrue(len(mkv.audio_tracks) == 2)
-        self.assertTrue(mkv.audio_tracks[0].type == AUDIO_TRACK)
-        self.assertTrue(mkv.audio_tracks[0].number == 2)
-        self.assertTrue(mkv.audio_tracks[0].name is None)
-        self.assertTrue(mkv.audio_tracks[0].language == 'und')
-        self.assertTrue(mkv.audio_tracks[0].enabled == True)
-        self.assertTrue(mkv.audio_tracks[0].default == True)
-        self.assertTrue(mkv.audio_tracks[0].forced == False)
-        self.assertTrue(mkv.audio_tracks[0].lacing == True)
-        self.assertTrue(mkv.audio_tracks[0].codec_id == 'A_AAC')
-        self.assertTrue(mkv.audio_tracks[0].codec_name is None)
-        self.assertTrue(mkv.audio_tracks[0].sampling_frequency == 48000.0)
-        self.assertTrue(mkv.audio_tracks[0].channels == 2)
-        self.assertTrue(mkv.audio_tracks[0].output_sampling_frequency is None)
-        self.assertTrue(mkv.audio_tracks[0].bit_depth is None)
-        self.assertTrue(mkv.audio_tracks[1].type == AUDIO_TRACK)
-        self.assertTrue(mkv.audio_tracks[1].number == 10)
-        self.assertTrue(mkv.audio_tracks[1].name == 'Commentary')
-        self.assertTrue(mkv.audio_tracks[1].language is None)
-        self.assertTrue(mkv.audio_tracks[1].enabled == True)
-        self.assertTrue(mkv.audio_tracks[1].default == False)
-        self.assertTrue(mkv.audio_tracks[1].forced == False)
-        self.assertTrue(mkv.audio_tracks[1].lacing == True)
-        self.assertTrue(mkv.audio_tracks[1].codec_id == 'A_AAC')
-        self.assertTrue(mkv.audio_tracks[1].codec_name is None)
-        self.assertTrue(mkv.audio_tracks[1].sampling_frequency == 22050.0)
-        self.assertTrue(mkv.audio_tracks[1].channels == 1)
-        self.assertTrue(mkv.audio_tracks[1].output_sampling_frequency == 44100.0)
-        self.assertTrue(mkv.audio_tracks[1].bit_depth is None)
-        # subtitle track
-        self.assertTrue(len(mkv.subtitle_tracks) == 8)
-        self.assertTrue(mkv.subtitle_tracks[0].type == SUBTITLE_TRACK)
-        self.assertTrue(mkv.subtitle_tracks[0].number == 3)
-        self.assertTrue(mkv.subtitle_tracks[0].name is None)
-        self.assertTrue(mkv.subtitle_tracks[0].language is None)
-        self.assertTrue(mkv.subtitle_tracks[0].enabled == True)
-        self.assertTrue(mkv.subtitle_tracks[0].default == True)
-        self.assertTrue(mkv.subtitle_tracks[0].forced == False)
-        self.assertTrue(mkv.subtitle_tracks[0].lacing == False)
-        self.assertTrue(mkv.subtitle_tracks[0].codec_id == 'S_TEXT/UTF8')
-        self.assertTrue(mkv.subtitle_tracks[0].codec_name is None)
-        self.assertTrue(mkv.subtitle_tracks[1].type == SUBTITLE_TRACK)
-        self.assertTrue(mkv.subtitle_tracks[1].number == 4)
-        self.assertTrue(mkv.subtitle_tracks[1].name is None)
-        self.assertTrue(mkv.subtitle_tracks[1].language == 'hun')
-        self.assertTrue(mkv.subtitle_tracks[1].enabled == True)
-        self.assertTrue(mkv.subtitle_tracks[1].default == False)
-        self.assertTrue(mkv.subtitle_tracks[1].forced == False)
-        self.assertTrue(mkv.subtitle_tracks[1].lacing == False)
-        self.assertTrue(mkv.subtitle_tracks[1].codec_id == 'S_TEXT/UTF8')
-        self.assertTrue(mkv.subtitle_tracks[1].codec_name is None)
-        self.assertTrue(mkv.subtitle_tracks[2].type == SUBTITLE_TRACK)
-        self.assertTrue(mkv.subtitle_tracks[2].number == 5)
-        self.assertTrue(mkv.subtitle_tracks[2].name is None)
-        self.assertTrue(mkv.subtitle_tracks[2].language == 'ger')
-        self.assertTrue(mkv.subtitle_tracks[2].enabled == True)
-        self.assertTrue(mkv.subtitle_tracks[2].default == False)
-        self.assertTrue(mkv.subtitle_tracks[2].forced == False)
-        self.assertTrue(mkv.subtitle_tracks[2].lacing == False)
-        self.assertTrue(mkv.subtitle_tracks[2].codec_id == 'S_TEXT/UTF8')
-        self.assertTrue(mkv.subtitle_tracks[2].codec_name is None)
-        self.assertTrue(mkv.subtitle_tracks[3].type == SUBTITLE_TRACK)
-        self.assertTrue(mkv.subtitle_tracks[3].number == 6)
-        self.assertTrue(mkv.subtitle_tracks[3].name is None)
-        self.assertTrue(mkv.subtitle_tracks[3].language == 'fre')
-        self.assertTrue(mkv.subtitle_tracks[3].enabled == True)
-        self.assertTrue(mkv.subtitle_tracks[3].default == False)
-        self.assertTrue(mkv.subtitle_tracks[3].forced == False)
-        self.assertTrue(mkv.subtitle_tracks[3].lacing == False)
-        self.assertTrue(mkv.subtitle_tracks[3].codec_id == 'S_TEXT/UTF8')
-        self.assertTrue(mkv.subtitle_tracks[3].codec_name is None)
-        self.assertTrue(mkv.subtitle_tracks[4].type == SUBTITLE_TRACK)
-        self.assertTrue(mkv.subtitle_tracks[4].number == 8)
-        self.assertTrue(mkv.subtitle_tracks[4].name is None)
-        self.assertTrue(mkv.subtitle_tracks[4].language == 'spa')
-        self.assertTrue(mkv.subtitle_tracks[4].enabled == True)
-        self.assertTrue(mkv.subtitle_tracks[4].default == False)
-        self.assertTrue(mkv.subtitle_tracks[4].forced == False)
-        self.assertTrue(mkv.subtitle_tracks[4].lacing == False)
-        self.assertTrue(mkv.subtitle_tracks[4].codec_id == 'S_TEXT/UTF8')
-        self.assertTrue(mkv.subtitle_tracks[4].codec_name is None)
-        self.assertTrue(mkv.subtitle_tracks[5].type == SUBTITLE_TRACK)
-        self.assertTrue(mkv.subtitle_tracks[5].number == 9)
-        self.assertTrue(mkv.subtitle_tracks[5].name is None)
-        self.assertTrue(mkv.subtitle_tracks[5].language == 'ita')
-        self.assertTrue(mkv.subtitle_tracks[5].enabled == True)
-        self.assertTrue(mkv.subtitle_tracks[5].default == False)
-        self.assertTrue(mkv.subtitle_tracks[5].forced == False)
-        self.assertTrue(mkv.subtitle_tracks[5].lacing == False)
-        self.assertTrue(mkv.subtitle_tracks[5].codec_id == 'S_TEXT/UTF8')
-        self.assertTrue(mkv.subtitle_tracks[5].codec_name is None)
-        self.assertTrue(mkv.subtitle_tracks[6].type == SUBTITLE_TRACK)
-        self.assertTrue(mkv.subtitle_tracks[6].number == 11)
-        self.assertTrue(mkv.subtitle_tracks[6].name is None)
-        self.assertTrue(mkv.subtitle_tracks[6].language == 'jpn')
-        self.assertTrue(mkv.subtitle_tracks[6].enabled == True)
-        self.assertTrue(mkv.subtitle_tracks[6].default == False)
-        self.assertTrue(mkv.subtitle_tracks[6].forced == False)
-        self.assertTrue(mkv.subtitle_tracks[6].lacing == False)
-        self.assertTrue(mkv.subtitle_tracks[6].codec_id == 'S_TEXT/UTF8')
-        self.assertTrue(mkv.subtitle_tracks[6].codec_name is None)
-        self.assertTrue(mkv.subtitle_tracks[7].type == SUBTITLE_TRACK)
-        self.assertTrue(mkv.subtitle_tracks[7].number == 7)
-        self.assertTrue(mkv.subtitle_tracks[7].name is None)
-        self.assertTrue(mkv.subtitle_tracks[7].language == 'und')
-        self.assertTrue(mkv.subtitle_tracks[7].enabled == True)
-        self.assertTrue(mkv.subtitle_tracks[7].default == False)
-        self.assertTrue(mkv.subtitle_tracks[7].forced == False)
-        self.assertTrue(mkv.subtitle_tracks[7].lacing == False)
-        self.assertTrue(mkv.subtitle_tracks[7].codec_id == 'S_TEXT/UTF8')
-        self.assertTrue(mkv.subtitle_tracks[7].codec_name is None)
-        # chapters
-        self.assertTrue(len(mkv.chapters) == 0)
-        # tags
-        self.assertTrue(len(mkv.tags) == 1)
-        self.assertTrue(len(mkv.tags[0].simpletags) == 3)
-        self.assertTrue(mkv.tags[0].simpletags[0].name == 'TITLE')
-        self.assertTrue(mkv.tags[0].simpletags[0].default == True)
-        self.assertTrue(mkv.tags[0].simpletags[0].language == 'und')
-        self.assertTrue(mkv.tags[0].simpletags[0].string == 'Big Buck Bunny - test 8')
-        self.assertTrue(mkv.tags[0].simpletags[0].binary is None)
-        self.assertTrue(mkv.tags[0].simpletags[1].name == 'DATE_RELEASED')
-        self.assertTrue(mkv.tags[0].simpletags[1].default == True)
-        self.assertTrue(mkv.tags[0].simpletags[1].language == 'und')
-        self.assertTrue(mkv.tags[0].simpletags[1].string == '2010')
-        self.assertTrue(mkv.tags[0].simpletags[1].binary is None)
-        self.assertTrue(mkv.tags[0].simpletags[2].name == 'COMMENT')
-        self.assertTrue(mkv.tags[0].simpletags[2].default == True)
-        self.assertTrue(mkv.tags[0].simpletags[2].language == 'und')
-        self.assertTrue(mkv.tags[0].simpletags[2].string == 'Matroska Validation File 8, secondary audio commentary track, misc subtitle tracks')
-        self.assertTrue(mkv.tags[0].simpletags[2].binary is None)
-
-    def test_test6(self):
-        with io.open(os.path.join(TEST_DIR, 'test6.mkv'), 'rb') as stream:
-            mkv = MKV(stream)
-        # info
-        self.assertTrue(mkv.info.title is None)
-        self.assertTrue(mkv.info.duration == timedelta(seconds=87, milliseconds=336))
-        self.assertTrue(mkv.info.date_utc == datetime(2010, 8, 21, 16, 31, 55))
-        self.assertTrue(mkv.info.muxing_app == 'libebml2 v0.10.1 + libmatroska2 v0.10.1')
-        self.assertTrue(mkv.info.writing_app == 'mkclean 0.5.5 r from libebml v1.0.0 + libmatroska v1.0.0 + mkvmerge v4.0.0 (\'The Stars were mine\') built on Jun  6 2010 16:18:42')
-        # video track
-        self.assertTrue(len(mkv.video_tracks) == 1)
-        self.assertTrue(mkv.video_tracks[0].type == VIDEO_TRACK)
-        self.assertTrue(mkv.video_tracks[0].number == 1)
-        self.assertTrue(mkv.video_tracks[0].name is None)
-        self.assertTrue(mkv.video_tracks[0].language == 'und')
-        self.assertTrue(mkv.video_tracks[0].enabled == True)
-        self.assertTrue(mkv.video_tracks[0].default == False)
-        self.assertTrue(mkv.video_tracks[0].forced == False)
-        self.assertTrue(mkv.video_tracks[0].lacing == False)
-        self.assertTrue(mkv.video_tracks[0].codec_id == 'V_MS/VFW/FOURCC')
-        self.assertTrue(mkv.video_tracks[0].codec_name is None)
-        self.assertTrue(mkv.video_tracks[0].width == 854)
-        self.assertTrue(mkv.video_tracks[0].height == 480)
-        self.assertTrue(mkv.video_tracks[0].interlaced == False)
-        self.assertTrue(mkv.video_tracks[0].stereo_mode is None)
-        self.assertTrue(mkv.video_tracks[0].crop == {})
-        self.assertTrue(mkv.video_tracks[0].display_width is None)
-        self.assertTrue(mkv.video_tracks[0].display_height is None)
-        self.assertTrue(mkv.video_tracks[0].display_unit is None)
-        self.assertTrue(mkv.video_tracks[0].aspect_ratio_type is None)
-        # audio track
-        self.assertTrue(len(mkv.audio_tracks) == 1)
-        self.assertTrue(mkv.audio_tracks[0].type == AUDIO_TRACK)
-        self.assertTrue(mkv.audio_tracks[0].number == 2)
-        self.assertTrue(mkv.audio_tracks[0].name is None)
-        self.assertTrue(mkv.audio_tracks[0].language == 'und')
-        self.assertTrue(mkv.audio_tracks[0].enabled == True)
-        self.assertTrue(mkv.audio_tracks[0].default == False)
-        self.assertTrue(mkv.audio_tracks[0].forced == False)
-        self.assertTrue(mkv.audio_tracks[0].lacing == True)
-        self.assertTrue(mkv.audio_tracks[0].codec_id == 'A_MPEG/L3')
-        self.assertTrue(mkv.audio_tracks[0].codec_name is None)
-        self.assertTrue(mkv.audio_tracks[0].sampling_frequency == 48000.0)
-        self.assertTrue(mkv.audio_tracks[0].channels == 2)
-        self.assertTrue(mkv.audio_tracks[0].output_sampling_frequency is None)
-        self.assertTrue(mkv.audio_tracks[0].bit_depth is None)
-        # subtitle track
-        self.assertTrue(len(mkv.subtitle_tracks) == 0)
-        # chapters
-        self.assertTrue(len(mkv.chapters) == 0)
-        # tags
-        self.assertTrue(len(mkv.tags) == 1)
-        self.assertTrue(len(mkv.tags[0].simpletags) == 3)
-        self.assertTrue(mkv.tags[0].simpletags[0].name == 'TITLE')
-        self.assertTrue(mkv.tags[0].simpletags[0].default == True)
-        self.assertTrue(mkv.tags[0].simpletags[0].language == 'und')
-        self.assertTrue(mkv.tags[0].simpletags[0].string == 'Big Buck Bunny - test 6')
-        self.assertTrue(mkv.tags[0].simpletags[0].binary is None)
-        self.assertTrue(mkv.tags[0].simpletags[1].name == 'DATE_RELEASED')
-        self.assertTrue(mkv.tags[0].simpletags[1].default == True)
-        self.assertTrue(mkv.tags[0].simpletags[1].language == 'und')
-        self.assertTrue(mkv.tags[0].simpletags[1].string == '2010')
-        self.assertTrue(mkv.tags[0].simpletags[1].binary is None)
-        self.assertTrue(mkv.tags[0].simpletags[2].name == 'COMMENT')
-        self.assertTrue(mkv.tags[0].simpletags[2].default == True)
-        self.assertTrue(mkv.tags[0].simpletags[2].language == 'und')
-        self.assertTrue(mkv.tags[0].simpletags[2].string == 'Matroska Validation File 6, random length to code the size of Clusters and Blocks, no Cues for seeking')
-        self.assertTrue(mkv.tags[0].simpletags[2].binary is None)
-
-    def test_test7(self):
-        with io.open(os.path.join(TEST_DIR, 'test7.mkv'), 'rb') as stream:
-            mkv = MKV(stream)
-        # info
-        self.assertTrue(mkv.info.title is None)
-        self.assertTrue(mkv.info.duration == timedelta(seconds=37, milliseconds=43))
-        self.assertTrue(mkv.info.date_utc == datetime(2010, 8, 21, 17, 0, 23))
-        self.assertTrue(mkv.info.muxing_app == 'libebml2 v0.10.1 + libmatroska2 v0.10.1')
-        self.assertTrue(mkv.info.writing_app == 'mkclean 0.5.5 r from libebml v1.0.0 + libmatroska v1.0.0 + mkvmerge v4.0.0 (\'The Stars were mine\') built on Jun  6 2010 16:18:42')
-        # video track
-        self.assertTrue(len(mkv.video_tracks) == 1)
-        self.assertTrue(mkv.video_tracks[0].type == VIDEO_TRACK)
-        self.assertTrue(mkv.video_tracks[0].number == 1)
-        self.assertTrue(mkv.video_tracks[0].name is None)
-        self.assertTrue(mkv.video_tracks[0].language == 'und')
-        self.assertTrue(mkv.video_tracks[0].enabled == True)
-        self.assertTrue(mkv.video_tracks[0].default == False)
-        self.assertTrue(mkv.video_tracks[0].forced == False)
-        self.assertTrue(mkv.video_tracks[0].lacing == False)
-        self.assertTrue(mkv.video_tracks[0].codec_id == 'V_MPEG4/ISO/AVC')
-        self.assertTrue(mkv.video_tracks[0].codec_name is None)
-        self.assertTrue(mkv.video_tracks[0].width == 1024)
-        self.assertTrue(mkv.video_tracks[0].height == 576)
-        self.assertTrue(mkv.video_tracks[0].interlaced == False)
-        self.assertTrue(mkv.video_tracks[0].stereo_mode is None)
-        self.assertTrue(mkv.video_tracks[0].crop == {})
-        self.assertTrue(mkv.video_tracks[0].display_width is None)
-        self.assertTrue(mkv.video_tracks[0].display_height is None)
-        self.assertTrue(mkv.video_tracks[0].display_unit is None)
-        self.assertTrue(mkv.video_tracks[0].aspect_ratio_type is None)
-        # audio track
-        self.assertTrue(len(mkv.audio_tracks) == 1)
-        self.assertTrue(mkv.audio_tracks[0].type == AUDIO_TRACK)
-        self.assertTrue(mkv.audio_tracks[0].number == 2)
-        self.assertTrue(mkv.audio_tracks[0].name is None)
-        self.assertTrue(mkv.audio_tracks[0].language == 'und')
-        self.assertTrue(mkv.audio_tracks[0].enabled == True)
-        self.assertTrue(mkv.audio_tracks[0].default == False)
-        self.assertTrue(mkv.audio_tracks[0].forced == False)
-        self.assertTrue(mkv.audio_tracks[0].lacing == True)
-        self.assertTrue(mkv.audio_tracks[0].codec_id == 'A_AAC')
-        self.assertTrue(mkv.audio_tracks[0].codec_name is None)
-        self.assertTrue(mkv.audio_tracks[0].sampling_frequency == 48000.0)
-        self.assertTrue(mkv.audio_tracks[0].channels == 2)
-        self.assertTrue(mkv.audio_tracks[0].output_sampling_frequency is None)
-        self.assertTrue(mkv.audio_tracks[0].bit_depth is None)
-        # subtitle track
-        self.assertTrue(len(mkv.subtitle_tracks) == 0)
-        # chapters
-        self.assertTrue(len(mkv.chapters) == 0)
-        # tags
-        self.assertTrue(len(mkv.tags) == 1)
-        self.assertTrue(len(mkv.tags[0].simpletags) == 3)
-        self.assertTrue(mkv.tags[0].simpletags[0].name == 'TITLE')
-        self.assertTrue(mkv.tags[0].simpletags[0].default == True)
-        self.assertTrue(mkv.tags[0].simpletags[0].language == 'und')
-        self.assertTrue(mkv.tags[0].simpletags[0].string == 'Big Buck Bunny - test 7')
-        self.assertTrue(mkv.tags[0].simpletags[0].binary is None)
-        self.assertTrue(mkv.tags[0].simpletags[1].name == 'DATE_RELEASED')
-        self.assertTrue(mkv.tags[0].simpletags[1].default == True)
-        self.assertTrue(mkv.tags[0].simpletags[1].language == 'und')
-        self.assertTrue(mkv.tags[0].simpletags[1].string == '2010')
-        self.assertTrue(mkv.tags[0].simpletags[1].binary is None)
-        self.assertTrue(mkv.tags[0].simpletags[2].name == 'COMMENT')
-        self.assertTrue(mkv.tags[0].simpletags[2].default == True)
-        self.assertTrue(mkv.tags[0].simpletags[2].language == 'und')
-        self.assertTrue(mkv.tags[0].simpletags[2].string == 'Matroska Validation File 7, junk elements are present at the beggining or end of clusters, the parser should skip it. There is also a damaged element at 451418')
-        self.assertTrue(mkv.tags[0].simpletags[2].binary is None)
-
-    def test_test8(self):
-        with io.open(os.path.join(TEST_DIR, 'test8.mkv'), 'rb') as stream:
-            mkv = MKV(stream)
-        # info
-        self.assertTrue(mkv.info.title is None)
-        self.assertTrue(mkv.info.duration == timedelta(seconds=47, milliseconds=341))
-        self.assertTrue(mkv.info.date_utc == datetime(2010, 8, 21, 17, 22, 14))
-        self.assertTrue(mkv.info.muxing_app == 'libebml2 v0.10.1 + libmatroska2 v0.10.1')
-        self.assertTrue(mkv.info.writing_app == 'mkclean 0.5.5 r from libebml v1.0.0 + libmatroska v1.0.0 + mkvmerge v4.0.0 (\'The Stars were mine\') built on Jun  6 2010 16:18:42')
-        # video track
-        self.assertTrue(len(mkv.video_tracks) == 1)
-        self.assertTrue(mkv.video_tracks[0].type == VIDEO_TRACK)
-        self.assertTrue(mkv.video_tracks[0].number == 1)
-        self.assertTrue(mkv.video_tracks[0].name is None)
-        self.assertTrue(mkv.video_tracks[0].language == 'und')
-        self.assertTrue(mkv.video_tracks[0].enabled == True)
-        self.assertTrue(mkv.video_tracks[0].default == False)
-        self.assertTrue(mkv.video_tracks[0].forced == False)
-        self.assertTrue(mkv.video_tracks[0].lacing == False)
-        self.assertTrue(mkv.video_tracks[0].codec_id == 'V_MPEG4/ISO/AVC')
-        self.assertTrue(mkv.video_tracks[0].codec_name is None)
-        self.assertTrue(mkv.video_tracks[0].width == 1024)
-        self.assertTrue(mkv.video_tracks[0].height == 576)
-        self.assertTrue(mkv.video_tracks[0].interlaced == False)
-        self.assertTrue(mkv.video_tracks[0].stereo_mode is None)
-        self.assertTrue(mkv.video_tracks[0].crop == {})
-        self.assertTrue(mkv.video_tracks[0].display_width is None)
-        self.assertTrue(mkv.video_tracks[0].display_height is None)
-        self.assertTrue(mkv.video_tracks[0].display_unit is None)
-        self.assertTrue(mkv.video_tracks[0].aspect_ratio_type is None)
-        # audio track
-        self.assertTrue(len(mkv.audio_tracks) == 1)
-        self.assertTrue(mkv.audio_tracks[0].type == AUDIO_TRACK)
-        self.assertTrue(mkv.audio_tracks[0].number == 2)
-        self.assertTrue(mkv.audio_tracks[0].name is None)
-        self.assertTrue(mkv.audio_tracks[0].language == 'und')
-        self.assertTrue(mkv.audio_tracks[0].enabled == True)
-        self.assertTrue(mkv.audio_tracks[0].default == False)
-        self.assertTrue(mkv.audio_tracks[0].forced == False)
-        self.assertTrue(mkv.audio_tracks[0].lacing == True)
-        self.assertTrue(mkv.audio_tracks[0].codec_id == 'A_AAC')
-        self.assertTrue(mkv.audio_tracks[0].codec_name is None)
-        self.assertTrue(mkv.audio_tracks[0].sampling_frequency == 48000.0)
-        self.assertTrue(mkv.audio_tracks[0].channels == 2)
-        self.assertTrue(mkv.audio_tracks[0].output_sampling_frequency is None)
-        self.assertTrue(mkv.audio_tracks[0].bit_depth is None)
-        # subtitle track
-        self.assertTrue(len(mkv.subtitle_tracks) == 0)
-        # chapters
-        self.assertTrue(len(mkv.chapters) == 0)
-        # tags
-        self.assertTrue(len(mkv.tags) == 1)
-        self.assertTrue(len(mkv.tags[0].simpletags) == 3)
-        self.assertTrue(mkv.tags[0].simpletags[0].name == 'TITLE')
-        self.assertTrue(mkv.tags[0].simpletags[0].default == True)
-        self.assertTrue(mkv.tags[0].simpletags[0].language == 'und')
-        self.assertTrue(mkv.tags[0].simpletags[0].string == 'Big Buck Bunny - test 8')
-        self.assertTrue(mkv.tags[0].simpletags[0].binary is None)
-        self.assertTrue(mkv.tags[0].simpletags[1].name == 'DATE_RELEASED')
-        self.assertTrue(mkv.tags[0].simpletags[1].default == True)
-        self.assertTrue(mkv.tags[0].simpletags[1].language == 'und')
-        self.assertTrue(mkv.tags[0].simpletags[1].string == '2010')
-        self.assertTrue(mkv.tags[0].simpletags[1].binary is None)
-        self.assertTrue(mkv.tags[0].simpletags[2].name == 'COMMENT')
-        self.assertTrue(mkv.tags[0].simpletags[2].default == True)
-        self.assertTrue(mkv.tags[0].simpletags[2].language == 'und')
-        self.assertTrue(mkv.tags[0].simpletags[2].string == 'Matroska Validation File 8, audio missing between timecodes 6.019s and 6.360s')
-        self.assertTrue(mkv.tags[0].simpletags[2].binary is None)
-
-
-def suite():
-    suite = unittest.TestSuite()
-    suite.addTest(unittest.TestLoader().loadTestsFromTestCase(MKVTestCase))
-    return suite
-
-if __name__ == '__main__':
-    unittest.TextTestRunner().run(suite())
diff --git a/lib/enzyme/tests/test_parsers.py b/lib/enzyme/tests/test_parsers.py
deleted file mode 100644
index 0fa320ce013b6767e8e70ad537c01499f45968ed..0000000000000000000000000000000000000000
--- a/lib/enzyme/tests/test_parsers.py
+++ /dev/null
@@ -1,122 +0,0 @@
-# -*- coding: utf-8 -*-
-from enzyme.parsers import ebml
-import io
-import os.path
-import requests
-import unittest
-import yaml
-import zipfile
-
-
-# Test directory
-TEST_DIR = os.path.join(os.path.dirname(__file__), os.path.splitext(__file__)[0])
-
-# EBML validation directory
-EBML_VALIDATION_DIR = os.path.join(os.path.dirname(__file__), 'parsers', 'ebml')
-
-
-def setUpModule():
-    if not os.path.exists(TEST_DIR):
-        r = requests.get('http://downloads.sourceforge.net/project/matroska/test_files/matroska_test_w1_1.zip')
-        with zipfile.ZipFile(io.BytesIO(r.content), 'r') as f:
-            f.extractall(TEST_DIR)
-
-
-class EBMLTestCase(unittest.TestCase):
-    def setUp(self):
-        self.stream = io.open(os.path.join(TEST_DIR, 'test1.mkv'), 'rb')
-        with io.open(os.path.join(EBML_VALIDATION_DIR, 'test1.mkv.yml'), 'r') as yml:
-            self.validation = yaml.safe_load(yml)
-        self.specs = ebml.get_matroska_specs()
-
-    def tearDown(self):
-        self.stream.close()
-
-    def check_element(self, element_id, element_type, element_name, element_level, element_position, element_size, element_data, element,
-                      ignore_element_types=None, ignore_element_names=None, max_level=None):
-        """Recursively check an element"""
-        # base
-        self.assertTrue(element.id == element_id)
-        self.assertTrue(element.type == element_type)
-        self.assertTrue(element.name == element_name)
-        self.assertTrue(element.level == element_level)
-        self.assertTrue(element.position == element_position)
-        self.assertTrue(element.size == element_size)
-        # Element
-        if not isinstance(element_data, list):
-            self.assertTrue(type(element) == ebml.Element)
-            if element_type != ebml.BINARY:
-                self.assertTrue(element.data == element_data)
-            return
-        # MasterElement
-        if ignore_element_types is not None:  # filter validation on element types
-            element_data = [e for e in element_data if e[1] not in ignore_element_types]
-        if ignore_element_names is not None:  # filter validation on element names
-            element_data = [e for e in element_data if e[2] not in ignore_element_names]
-        if element.level == max_level:  # special check when maximum level is reached
-            self.assertTrue(element.data is None)
-            return
-        self.assertTrue(len(element.data) == len(element_data))
-        for i in range(len(element.data)):
-            self.check_element(element_data[i][0], element_data[i][1], element_data[i][2], element_data[i][3],
-                               element_data[i][4], element_data[i][5], element_data[i][6], element.data[i], ignore_element_types,
-                               ignore_element_names, max_level)
-
-    def test_parse_full(self):
-        result = ebml.parse(self.stream, self.specs)
-        self.assertTrue(len(result) == len(self.validation))
-        for i in range(len(self.validation)):
-            self.check_element(self.validation[i][0], self.validation[i][1], self.validation[i][2], self.validation[i][3],
-                               self.validation[i][4], self.validation[i][5], self.validation[i][6], result[i])
-
-    def test_parse_ignore_element_types(self):
-        ignore_element_types = [ebml.INTEGER, ebml.BINARY]
-        result = ebml.parse(self.stream, self.specs, ignore_element_types=ignore_element_types)
-        self.validation = [e for e in self.validation if e[1] not in ignore_element_types]
-        self.assertTrue(len(result) == len(self.validation))
-        for i in range(len(self.validation)):
-            self.check_element(self.validation[i][0], self.validation[i][1], self.validation[i][2], self.validation[i][3],
-                               self.validation[i][4], self.validation[i][5], self.validation[i][6], result[i], ignore_element_types=ignore_element_types)
-
-    def test_parse_ignore_element_names(self):
-        ignore_element_names = ['EBML', 'SimpleBlock']
-        result = ebml.parse(self.stream, self.specs, ignore_element_names=ignore_element_names)
-        self.validation = [e for e in self.validation if e[2] not in ignore_element_names]
-        self.assertTrue(len(result) == len(self.validation))
-        for i in range(len(self.validation)):
-            self.check_element(self.validation[i][0], self.validation[i][1], self.validation[i][2], self.validation[i][3],
-                               self.validation[i][4], self.validation[i][5], self.validation[i][6], result[i], ignore_element_names=ignore_element_names)
-
-    def test_parse_max_level(self):
-        max_level = 3
-        result = ebml.parse(self.stream, self.specs, max_level=max_level)
-        self.validation = [e for e in self.validation if e[3] <= max_level]
-        self.assertTrue(len(result) == len(self.validation))
-        for i in range(len(self.validation)):
-            self.check_element(self.validation[i][0], self.validation[i][1], self.validation[i][2], self.validation[i][3],
-                               self.validation[i][4], self.validation[i][5], self.validation[i][6], result[i], max_level=max_level)
-
-
-def generate_yml(filename, specs):
-    """Generate  a validation file for the test video"""
-    def _to_builtin(elements):
-        """Recursively convert elements to built-in types"""
-        result = []
-        for e in elements:
-            if isinstance(e, ebml.MasterElement):
-                result.append((e.id, e.type, e.name, e.level, e.position, e.size, _to_builtin(e.data)))
-            else:
-                result.append((e.id, e.type, e.name, e.level, e.position, e.size, None if isinstance(e.data, io.BytesIO) else e.data))
-        return result
-    video = io.open(os.path.join(TEST_DIR, filename), 'rb')
-    yml = io.open(os.path.join(EBML_VALIDATION_DIR, filename + '.yml'), 'w')
-    yaml.safe_dump(_to_builtin(ebml.parse(video, specs)), yml)
-
-
-def suite():
-    suite = unittest.TestSuite()
-    suite.addTest(unittest.TestLoader().loadTestsFromTestCase(EBMLTestCase))
-    return suite
-
-if __name__ == '__main__':
-    unittest.TextTestRunner().run(suite())
diff --git a/lib/fanart/tests/__init__.py b/lib/fanart/tests/__init__.py
deleted file mode 100644
index 957cbe388c57468295787d5cde0dad6b381dffbf..0000000000000000000000000000000000000000
--- a/lib/fanart/tests/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-import os
-
-LOCALDIR = os.path.dirname(__file__)
diff --git a/lib/fanart/tests/json/wilfred.json b/lib/fanart/tests/json/wilfred.json
deleted file mode 100644
index 2065f9cfe054c6c6da717405fa0fc2fb9d33252e..0000000000000000000000000000000000000000
--- a/lib/fanart/tests/json/wilfred.json
+++ /dev/null
@@ -1,196 +0,0 @@
-{
-    "logos": [
-        {
-            "lang": "en",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/clearlogo/wilfred-us-4e04b6495dfd3.png",
-            "likes": 2,
-            "id": 11977
-        },
-        {
-            "lang": "en",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/clearlogo/wilfred-us-517ac36e39f67.png",
-            "likes": 1,
-            "id": 28249
-        },
-        {
-            "lang": "en",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/clearlogo/wilfred-us-51f557082cfde.png",
-            "likes": 0,
-            "id": 31817
-        }
-    ],
-    "arts": [
-        {
-            "lang": "en",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/clearart/wilfred-us-4e05f10e87711.png",
-            "likes": 2,
-            "id": 11987
-        },
-        {
-            "lang": "en",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/clearart/wilfred-us-4e2f151d5ed62.png",
-            "likes": 1,
-            "id": 12470
-        }
-    ],
-    "name": "Wilfred (US)",
-    "hdarts": [
-        {
-            "lang": "en",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/hdclearart/wilfred-us-505f94ed0ba13.png",
-            "likes": 1,
-            "id": 21112
-        },
-        {
-            "lang": "he",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/hdclearart/wilfred-us-52403264aa3ec.png",
-            "likes": 1,
-            "id": 33751
-        }
-    ],
-    "backgrounds": [
-        {
-            "lang": "en",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/showbackground/wilfred-us-5034dbd49115e.jpg",
-            "id": 19965,
-            "season": 0,
-            "likes": 0
-        },
-        {
-            "lang": "en",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/showbackground/wilfred-us-50b0c92db6973.jpg",
-            "id": 23166,
-            "season": 0,
-            "likes": 0
-        },
-        {
-            "lang": "en",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/showbackground/wilfred-us-50b0c92dbb46b.jpg",
-            "id": 23167,
-            "season": 0,
-            "likes": 0
-        },
-        {
-            "lang": "en",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/showbackground/wilfred-us-50b0c92dbb9d1.jpg",
-            "id": 23168,
-            "season": 0,
-            "likes": 0
-        }
-    ],
-    "thumbs": [
-        {
-            "lang": "en",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/tvthumb/wilfred-us-501cf526174fe.jpg",
-            "likes": 1,
-            "id": 19596
-        },
-        {
-            "lang": "en",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/tvthumb/wilfred-us-51bfb4a105904.jpg",
-            "likes": 0,
-            "id": 30060
-        }
-    ],
-    "characters": [],
-    "posters": [
-        {
-            "lang": "he",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/tvposter/wilfred-us-525d893230d7c.jpg",
-            "likes": 1,
-            "id": 34584
-        }
-    ],
-    "seasons": [
-        {
-            "lang": "he",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/seasonthumb/wilfred-us-52403782bab55.jpg",
-            "id": 33752,
-            "season": 1,
-            "likes": 1
-        },
-        {
-            "lang": "he",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/seasonthumb/wilfred-us-5240379335232.jpg",
-            "id": 33753,
-            "season": 2,
-            "likes": 1
-        },
-        {
-            "lang": "he",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/seasonthumb/wilfred-us-524037bc83c7d.jpg",
-            "id": 33754,
-            "season": 3,
-            "likes": 1
-        },
-        {
-            "lang": "en",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/seasonthumb/wilfred-us-501bb0a8e60f9.jpg",
-            "id": 19586,
-            "season": 1,
-            "likes": 0
-        },
-        {
-            "lang": "en",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/seasonthumb/wilfred-us-501bb0b4bf229.jpg",
-            "id": 19587,
-            "season": 2,
-            "likes": 0
-        },
-        {
-            "lang": "en",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/seasonthumb/wilfred-us-501bb144e6a46.jpg",
-            "id": 19588,
-            "season": 0,
-            "likes": 0
-        },
-        {
-            "lang": "en",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/seasonthumb/wilfred-us-51c953105ef77.jpg",
-            "id": 30309,
-            "season": 3,
-            "likes": 0
-        }
-    ],
-    "banners": [
-        {
-            "lang": "he",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/tvbanner/wilfred-us-52403a7185070.jpg",
-            "likes": 1,
-            "id": 33755
-        },
-        {
-            "lang": "en",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/tvbanner/wilfred-us-5265193db51f7.jpg",
-            "likes": 0,
-            "id": 34716
-        }
-    ],
-    "hdlogos": [
-        {
-            "lang": "en",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/hdtvlogo/wilfred-us-505f373be58e6.png",
-            "likes": 1,
-            "id": 21101
-        },
-        {
-            "lang": "en",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/hdtvlogo/wilfred-us-517ac360def17.png",
-            "likes": 1,
-            "id": 28248
-        },
-        {
-            "lang": "he",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/hdtvlogo/wilfred-us-52402df7ed945.png",
-            "likes": 1,
-            "id": 33750
-        },
-        {
-            "lang": "en",
-            "url": "http://assets.fanart.tv/fanart/tv/239761/hdtvlogo/wilfred-us-51f556fb4abd3.png",
-            "likes": 0,
-            "id": 31816
-        }
-    ],
-    "tvdbid": "239761"
-}
diff --git a/lib/fanart/tests/response/50x50.png b/lib/fanart/tests/response/50x50.png
deleted file mode 100644
index 112875e86d8f45e615a61461608ccd9f19a3dd08..0000000000000000000000000000000000000000
Binary files a/lib/fanart/tests/response/50x50.png and /dev/null differ
diff --git a/lib/fanart/tests/response/movie_thg.json b/lib/fanart/tests/response/movie_thg.json
deleted file mode 100644
index 77b6130e9983e5e07922e6ebbf0caed068824740..0000000000000000000000000000000000000000
--- a/lib/fanart/tests/response/movie_thg.json
+++ /dev/null
@@ -1,174 +0,0 @@
-{
-    "The Hunger Games": {
-        "tmdb_id": "70160",
-        "imdb_id": "tt1392170",
-        "movieart": [
-            {
-                "id": "1226",
-                "url": "http://assets.fanart.tv/fanart/movies/70160/movieart/the-hunger-games-4f6dc995edb8f.png",
-                "lang": "en",
-                "likes": "3"
-            },
-            {
-                "id": "1225",
-                "url": "http://assets.fanart.tv/fanart/movies/70160/movieart/the-hunger-games-4f6dc980b4514.png",
-                "lang": "en",
-                "likes": "1"
-            }
-        ],
-        "movielogo": [
-            {
-                "id": "1230",
-                "url": "http://assets.fanart.tv/fanart/movies/70160/movielogo/the-hunger-games-4f6e0e63a9d29.png",
-                "lang": "en",
-                "likes": "2"
-            },
-            {
-                "id": "8020",
-                "url": "http://assets.fanart.tv/fanart/movies/70160/movielogo/the-hunger-games-5018f873b5188.png",
-                "lang": "en",
-                "likes": "1"
-            },
-            {
-                "id": "1224",
-                "url": "http://assets.fanart.tv/fanart/movies/70160/movielogo/the-hunger-games-4f6dc95a08de1.png",
-                "lang": "en",
-                "likes": "0"
-            }
-        ],
-        "moviedisc": [
-            {
-                "id": "8431",
-                "url": "http://assets.fanart.tv/fanart/movies/70160/moviedisc/the-hunger-games-501db4437623f.png",
-                "lang": "en",
-                "likes": "1",
-                "disc": "1",
-                "disc_type": "dvd"
-            },
-            {
-                "id": "9787",
-                "url": "http://assets.fanart.tv/fanart/movies/70160/moviedisc/the-hunger-games-502fd6d695a60.png",
-                "lang": "en",
-                "likes": "1",
-                "disc": "1",
-                "disc_type": "bluray"
-            }
-        ],
-        "moviethumb": [
-            {
-                "id": "10687",
-                "url": "http://assets.fanart.tv/fanart/movies/70160/moviethumb/the-hunger-games-503c88b32cf66.jpg",
-                "lang": "en",
-                "likes": "0"
-            }
-        ],
-        "hdmovielogo": [
-            {
-                "id": "13004",
-                "url": "http://assets.fanart.tv/fanart/movies/70160/hdmovielogo/the-hunger-games-50500118613e3.png",
-                "lang": "en",
-                "likes": "0"
-            }
-        ],
-        "moviebackground": [
-            {
-                "id": "14043",
-                "url": "http://assets.fanart.tv/fanart/movies/70160/moviebackground/the-hunger-games-5057c79ad3c56.jpg",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "14044",
-                "url": "http://assets.fanart.tv/fanart/movies/70160/moviebackground/the-hunger-games-5057c79ad5526.jpg",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "15911",
-                "url": "http://assets.fanart.tv/fanart/movies/70160/moviebackground/the-hunger-games-5071de49311d1.jpg",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "15914",
-                "url": "http://assets.fanart.tv/fanart/movies/70160/moviebackground/the-hunger-games-5071df619b835.jpg",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "15917",
-                "url": "http://assets.fanart.tv/fanart/movies/70160/moviebackground/the-hunger-games-5071e01fee856.jpg",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "15918",
-                "url": "http://assets.fanart.tv/fanart/movies/70160/moviebackground/the-hunger-games-5071e0adcc57a.jpg",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "15919",
-                "url": "http://assets.fanart.tv/fanart/movies/70160/moviebackground/the-hunger-games-5071e12006159.jpg",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "15921",
-                "url": "http://assets.fanart.tv/fanart/movies/70160/moviebackground/the-hunger-games-5071e206aa2ac.jpg",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "15922",
-                "url": "http://assets.fanart.tv/fanart/movies/70160/moviebackground/the-hunger-games-5071e2869d774.jpg",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "15925",
-                "url": "http://assets.fanart.tv/fanart/movies/70160/moviebackground/the-hunger-games-5071e30069b72.jpg",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "15927",
-                "url": "http://assets.fanart.tv/fanart/movies/70160/moviebackground/the-hunger-games-5071e3c4979b7.jpg",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "15930",
-                "url": "http://assets.fanart.tv/fanart/movies/70160/moviebackground/the-hunger-games-5071e5b3f039b.jpg",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "15931",
-                "url": "http://assets.fanart.tv/fanart/movies/70160/moviebackground/the-hunger-games-5071e6369e812.jpg",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "15936",
-                "url": "http://assets.fanart.tv/fanart/movies/70160/moviebackground/the-hunger-games-5071e8749e73a.jpg",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "15937",
-                "url": "http://assets.fanart.tv/fanart/movies/70160/moviebackground/the-hunger-games-5071e9913bfeb.jpg",
-                "lang": "en",
-                "likes": "0"
-            }
-        ],
-        "hdmovieclearart": [
-            {
-                "id": "14104",
-                "url": "http://assets.fanart.tv/fanart/movies/70160/hdmovieclearart/the-hunger-games-50582453b1375.png",
-                "lang": "en",
-                "likes": "0"
-            }
-        ]
-    }
-}
diff --git a/lib/fanart/tests/response/music_a7f.json b/lib/fanart/tests/response/music_a7f.json
deleted file mode 100644
index de1a123fca3349dee52157e43d28ca057d458f99..0000000000000000000000000000000000000000
--- a/lib/fanart/tests/response/music_a7f.json
+++ /dev/null
@@ -1,171 +0,0 @@
-{
-    "Avenged Sevenfold": {
-        "mbid_id": "24e1b53c-3085-4581-8472-0b0088d2508c",
-        "artistbackground": [
-            {
-                "id": "3027",
-                "url": "http://assets.fanart.tv/fanart/music/24e1b53c-3085-4581-8472-0b0088d2508c/artistbackground/avenged-sevenfold-4ddd7889a0fcf.jpg",
-                "likes": "0"
-            },
-            {
-                "id": "64046",
-                "url": "http://assets.fanart.tv/fanart/music/24e1b53c-3085-4581-8472-0b0088d2508c/artistbackground/avenged-sevenfold-50c4db9a2c6e2.jpg",
-                "likes": "0"
-            },
-            {
-                "id": "64048",
-                "url": "http://assets.fanart.tv/fanart/music/24e1b53c-3085-4581-8472-0b0088d2508c/artistbackground/avenged-sevenfold-50c4dc653f004.jpg",
-                "likes": "0"
-            }
-        ],
-        "albums": {
-            "180560ee-2d9d-33cf-8de7-cdaaba610739": {
-                "albumcover": [
-                    {
-                        "id": "3028",
-                        "url": "http://assets.fanart.tv/fanart/music/24e1b53c-3085-4581-8472-0b0088d2508c/albumcover/city-of-evil-4ddd79ca0beea.jpg",
-                        "likes": "0"
-                    }
-                ],
-                "cdart": [
-                    {
-                        "id": "9921",
-                        "url": "http://assets.fanart.tv/fanart/music/24e1b53c-3085-4581-8472-0b0088d2508c/cdart/city-of-evil-4e5f7b9f50d37.png",
-                        "likes": "0",
-                        "disc": "1",
-                        "size": "1000"
-                    }
-                ]
-            },
-            "1c7120ae-32b6-3693-8974-599977b01601": {
-                "albumcover": [
-                    {
-                        "id": "3029",
-                        "url": "http://assets.fanart.tv/fanart/music/24e1b53c-3085-4581-8472-0b0088d2508c/albumcover/waking-the-fallen-4ddd79ca1b11e.jpg",
-                        "likes": "0"
-                    }
-                ],
-                "cdart": [
-                    {
-                        "id": "9922",
-                        "url": "http://assets.fanart.tv/fanart/music/24e1b53c-3085-4581-8472-0b0088d2508c/cdart/waking-the-fallen-4e5f7b9f5ebdf.png",
-                        "likes": "0",
-                        "disc": "1",
-                        "size": "1000"
-                    }
-                ]
-            },
-            "94672194-7f42-3965-a489-f2f3cdc1c79e": {
-                "albumcover": [
-                    {
-                        "id": "3030",
-                        "url": "http://assets.fanart.tv/fanart/music/24e1b53c-3085-4581-8472-0b0088d2508c/albumcover/avenged-sevenfold-4ddd79ca1bcd6.jpg",
-                        "likes": "0"
-                    }
-                ],
-                "cdart": [
-                    {
-                        "id": "9923",
-                        "url": "http://assets.fanart.tv/fanart/music/24e1b53c-3085-4581-8472-0b0088d2508c/cdart/avenged-sevenfold-4e5f7b9f5fb7f.png",
-                        "likes": "0",
-                        "disc": "1",
-                        "size": "1000"
-                    }
-                ]
-            },
-            "9d642393-0005-3e89-b3d4-35d89c2f6ad6": {
-                "albumcover": [
-                    {
-                        "id": "3031",
-                        "url": "http://assets.fanart.tv/fanart/music/24e1b53c-3085-4581-8472-0b0088d2508c/albumcover/sounding-the-seventh-trumpet-4ddd79ca1d05e.jpg",
-                        "likes": "0"
-                    }
-                ],
-                "cdart": [
-                    {
-                        "id": "9924",
-                        "url": "http://assets.fanart.tv/fanart/music/24e1b53c-3085-4581-8472-0b0088d2508c/cdart/sounding-the-seventh-trumpet-4e5f7b9f62e47.png",
-                        "likes": "0",
-                        "disc": "1",
-                        "size": "1000"
-                    }
-                ]
-            },
-            "fe4373ed-5e89-46b3-b4c0-31433ce217df": {
-                "albumcover": [
-                    {
-                        "id": "3032",
-                        "url": "http://assets.fanart.tv/fanart/music/24e1b53c-3085-4581-8472-0b0088d2508c/albumcover/nightmare-4ddd79ca1dffe.jpg",
-                        "likes": "0"
-                    }
-                ],
-                "cdart": [
-                    {
-                        "id": "11630",
-                        "url": "http://assets.fanart.tv/fanart/music/24e1b53c-3085-4581-8472-0b0088d2508c/cdart/nightmare-4e8059a3c581c.png",
-                        "likes": "0",
-                        "disc": "1",
-                        "size": "1000"
-                    }
-                ]
-            },
-            "41d1b72b-1eee-3319-937f-c85d6d2fcfbb": {
-                "albumcover": [
-                    {
-                        "id": "61014",
-                        "url": "http://assets.fanart.tv/fanart/music/24e1b53c-3085-4581-8472-0b0088d2508c/albumcover/warmness-on-the-soul-509d2e9150bf4.jpg",
-                        "likes": "0"
-                    }
-                ]
-            }
-        },
-        "musiclogo": [
-            {
-                "id": "5712",
-                "url": "http://assets.fanart.tv/fanart/music/24e1b53c-3085-4581-8472-0b0088d2508c/musiclogo/avenged-sevenfold-4dfc8aee78b49.png",
-                "likes": "0"
-            },
-            {
-                "id": "41835",
-                "url": "http://assets.fanart.tv/fanart/music/24e1b53c-3085-4581-8472-0b0088d2508c/musiclogo/avenged-sevenfold-4ffc75f3a7e54.png",
-                "likes": "0"
-            },
-            {
-                "id": "41836",
-                "url": "http://assets.fanart.tv/fanart/music/24e1b53c-3085-4581-8472-0b0088d2508c/musiclogo/avenged-sevenfold-4ffc75f3a8473.png",
-                "likes": "0"
-            }
-        ],
-        "artistthumb": [
-            {
-                "id": "31109",
-                "url": "http://assets.fanart.tv/fanart/music/24e1b53c-3085-4581-8472-0b0088d2508c/artistthumb/avenged-sevenfold-4fb2b533bc73a.jpg",
-                "likes": "0"
-            },
-            {
-                "id": "64042",
-                "url": "http://assets.fanart.tv/fanart/music/24e1b53c-3085-4581-8472-0b0088d2508c/artistthumb/avenged-sevenfold-50c4d9279d6e9.jpg",
-                "likes": "0"
-            }
-        ],
-        "hdmusiclogo": [
-            {
-                "id": "49644",
-                "url": "http://assets.fanart.tv/fanart/music/24e1b53c-3085-4581-8472-0b0088d2508c/hdmusiclogo/avenged-sevenfold-503fcebece042.png",
-                "likes": "0"
-            },
-            {
-                "id": "49645",
-                "url": "http://assets.fanart.tv/fanart/music/24e1b53c-3085-4581-8472-0b0088d2508c/hdmusiclogo/avenged-sevenfold-503fcebecf17e.png",
-                "likes": "0"
-            }
-        ],
-        "musicbanner": [
-            {
-                "id": "52630",
-                "url": "http://assets.fanart.tv/fanart/music/24e1b53c-3085-4581-8472-0b0088d2508c/musicbanner/avenged-sevenfold-505b2346a559d.jpg",
-                "likes": "0"
-            }
-        ]
-    }
-}
diff --git a/lib/fanart/tests/response/tv_239761.json b/lib/fanart/tests/response/tv_239761.json
deleted file mode 100644
index bce4fda2574eeaf78f39b89c3fdaca575a4c7558..0000000000000000000000000000000000000000
--- a/lib/fanart/tests/response/tv_239761.json
+++ /dev/null
@@ -1,196 +0,0 @@
-{
-    "Wilfred (US)": {
-        "hdclearart": [
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/hdclearart/wilfred-us-505f94ed0ba13.png",
-                "lang": "en",
-                "id": "21112",
-                "likes": "1"
-            },
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/hdclearart/wilfred-us-52403264aa3ec.png",
-                "lang": "he",
-                "id": "33751",
-                "likes": "1"
-            }
-        ],
-        "seasonthumb": [
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/seasonthumb/wilfred-us-52403782bab55.jpg",
-                "lang": "he",
-                "id": "33752",
-                "season": "1",
-                "likes": "1"
-            },
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/seasonthumb/wilfred-us-5240379335232.jpg",
-                "lang": "he",
-                "id": "33753",
-                "season": "2",
-                "likes": "1"
-            },
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/seasonthumb/wilfred-us-524037bc83c7d.jpg",
-                "lang": "he",
-                "id": "33754",
-                "season": "3",
-                "likes": "1"
-            },
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/seasonthumb/wilfred-us-501bb0a8e60f9.jpg",
-                "lang": "en",
-                "id": "19586",
-                "season": "1",
-                "likes": "0"
-            },
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/seasonthumb/wilfred-us-501bb0b4bf229.jpg",
-                "lang": "en",
-                "id": "19587",
-                "season": "2",
-                "likes": "0"
-            },
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/seasonthumb/wilfred-us-501bb144e6a46.jpg",
-                "lang": "en",
-                "id": "19588",
-                "season": "0",
-                "likes": "0"
-            },
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/seasonthumb/wilfred-us-51c953105ef77.jpg",
-                "lang": "en",
-                "id": "30309",
-                "season": "3",
-                "likes": "0"
-            }
-        ],
-        "tvbanner": [
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/tvbanner/wilfred-us-52403a7185070.jpg",
-                "lang": "he",
-                "id": "33755",
-                "likes": "1"
-            },
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/tvbanner/wilfred-us-5265193db51f7.jpg",
-                "lang": "en",
-                "id": "34716",
-                "likes": "0"
-            }
-        ],
-        "thetvdb_id": "239761",
-        "clearlogo": [
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/clearlogo/wilfred-us-4e04b6495dfd3.png",
-                "lang": "en",
-                "id": "11977",
-                "likes": "2"
-            },
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/clearlogo/wilfred-us-517ac36e39f67.png",
-                "lang": "en",
-                "id": "28249",
-                "likes": "1"
-            },
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/clearlogo/wilfred-us-51f557082cfde.png",
-                "lang": "en",
-                "id": "31817",
-                "likes": "0"
-            }
-        ],
-        "tvposter": [
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/tvposter/wilfred-us-525d893230d7c.jpg",
-                "lang": "he",
-                "id": "34584",
-                "likes": "1"
-            }
-        ],
-        "showbackground": [
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/showbackground/wilfred-us-5034dbd49115e.jpg",
-                "lang": "en",
-                "id": "19965",
-                "season": "all",
-                "likes": "0"
-            },
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/showbackground/wilfred-us-50b0c92db6973.jpg",
-                "lang": "en",
-                "id": "23166",
-                "season": "all",
-                "likes": "0"
-            },
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/showbackground/wilfred-us-50b0c92dbb46b.jpg",
-                "lang": "en",
-                "id": "23167",
-                "season": "all",
-                "likes": "0"
-            },
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/showbackground/wilfred-us-50b0c92dbb9d1.jpg",
-                "lang": "en",
-                "id": "23168",
-                "season": "all",
-                "likes": "0"
-            }
-        ],
-        "tvthumb": [
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/tvthumb/wilfred-us-501cf526174fe.jpg",
-                "lang": "en",
-                "id": "19596",
-                "likes": "1"
-            },
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/tvthumb/wilfred-us-51bfb4a105904.jpg",
-                "lang": "en",
-                "id": "30060",
-                "likes": "0"
-            }
-        ],
-        "clearart": [
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/clearart/wilfred-us-4e05f10e87711.png",
-                "lang": "en",
-                "id": "11987",
-                "likes": "2"
-            },
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/clearart/wilfred-us-4e2f151d5ed62.png",
-                "lang": "en",
-                "id": "12470",
-                "likes": "1"
-            }
-        ],
-        "hdtvlogo": [
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/hdtvlogo/wilfred-us-505f373be58e6.png",
-                "lang": "en",
-                "id": "21101",
-                "likes": "1"
-            },
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/hdtvlogo/wilfred-us-517ac360def17.png",
-                "lang": "en",
-                "id": "28248",
-                "likes": "1"
-            },
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/hdtvlogo/wilfred-us-52402df7ed945.png",
-                "lang": "he",
-                "id": "33750",
-                "likes": "1"
-            },
-            {
-                "url": "http://assets.fanart.tv/fanart/tv/239761/hdtvlogo/wilfred-us-51f556fb4abd3.png",
-                "lang": "en",
-                "id": "31816",
-                "likes": "0"
-            }
-        ]
-    }
-}
diff --git a/lib/fanart/tests/response/tv_79349.json b/lib/fanart/tests/response/tv_79349.json
deleted file mode 100644
index 73d1698b9123f30bd5eed4b98d7f13445ba1ccdc..0000000000000000000000000000000000000000
--- a/lib/fanart/tests/response/tv_79349.json
+++ /dev/null
@@ -1,756 +0,0 @@
-{
-    "Dexter": {
-        "thetvdb_id": "79349",
-        "hdtvlogo": [
-            {
-                "id": "20959",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/hdtvlogo/dexter-50575994eb118.png",
-                "lang": "en",
-                "likes": "10"
-            },
-            {
-                "id": "20378",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/hdtvlogo/dexter-503fc2f24d9b3.png",
-                "lang": "en",
-                "likes": "5"
-            }
-        ],
-        "hdclearart": [
-            {
-                "id": "23059",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/hdclearart/dexter-50af98e73b0a5.png",
-                "lang": "en",
-                "likes": "8"
-            },
-            {
-                "id": "24313",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/hdclearart/dexter-50eb4363da522.png",
-                "lang": "en",
-                "likes": "5"
-            },
-            {
-                "id": "20560",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/hdclearart/dexter-504775fd50557.png",
-                "lang": "en",
-                "likes": "4"
-            },
-            {
-                "id": "29495",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/hdclearart/dexter-51aa63100548b.png",
-                "lang": "en",
-                "likes": "3"
-            },
-            {
-                "id": "26712",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/hdclearart/dexter-51400b1672938.png",
-                "lang": "en",
-                "likes": "1"
-            },
-            {
-                "id": "29496",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/hdclearart/dexter-51aa724f0a2ab.png",
-                "lang": "en",
-                "likes": "1"
-            },
-            {
-                "id": "29505",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/hdclearart/dexter-51aab23851368.png",
-                "lang": "en",
-                "likes": "1"
-            },
-            {
-                "id": "29594",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/hdclearart/dexter-51afbcdf38d5e.png",
-                "lang": "en",
-                "likes": "1"
-            },
-            {
-                "id": "29595",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/hdclearart/dexter-51afbcdf3ea8e.png",
-                "lang": "en",
-                "likes": "1"
-            }
-        ],
-        "clearlogo": [
-            {
-                "id": "20958",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/clearlogo/dexter-5057573260826.png",
-                "lang": "en",
-                "likes": "6"
-            },
-            {
-                "id": "2114",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/clearlogo/Dexter-79349-2.png",
-                "lang": "en",
-                "likes": "4"
-            },
-            {
-                "id": "14577",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/clearlogo/dexter-4ecdf0c030189.png",
-                "lang": "en",
-                "likes": "3"
-            },
-            {
-                "id": "16685",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/clearlogo/dexter-4f6879db58edf.png",
-                "lang": "ru",
-                "likes": "1"
-            }
-        ],
-        "characterart": [
-            {
-                "id": "16825",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/characterart/dexter-4f76318ae4410.png",
-                "lang": "en",
-                "likes": "5"
-            },
-            {
-                "id": "29497",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/characterart/dexter-51aa726346bcf.png",
-                "lang": "en",
-                "likes": "3"
-            },
-            {
-                "id": "14981",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/characterart/dexter-4eface5cee809.png",
-                "lang": "en",
-                "likes": "1"
-            },
-            {
-                "id": "16996",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/characterart/dexter-4f8189d220d4b.png",
-                "lang": "en",
-                "likes": "1"
-            },
-            {
-                "id": "26713",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/characterart/dexter-51400b26c65de.png",
-                "lang": "en",
-                "likes": "1"
-            },
-            {
-                "id": "29597",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/characterart/dexter-51afbcf6002a7.png",
-                "lang": "en",
-                "likes": "1"
-            },
-            {
-                "id": "29598",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/characterart/dexter-51afbcf6006e6.png",
-                "lang": "en",
-                "likes": "1"
-            },
-            {
-                "id": "29646",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/characterart/dexter-51b0fc45e0dc0.png",
-                "lang": "en",
-                "likes": "1"
-            }
-        ],
-        "clearart": [
-            {
-                "id": "4980",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/clearart/D_79349 (3).png",
-                "lang": "en",
-                "likes": "4"
-            },
-            {
-                "id": "14579",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/clearart/dexter-4ecdf0db2adf1.png",
-                "lang": "en",
-                "likes": "3"
-            },
-            {
-                "id": "16682",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/clearart/dexter-4f68753540f2d.png",
-                "lang": "ru",
-                "likes": "1"
-            },
-            {
-                "id": "4982",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/clearart/D_79349.png",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "4983",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/clearart/D_79349 (1).png",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "4984",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/clearart/D_79349 (0).png",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "14578",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/clearart/dexter-4ecdf0cf3fb38.png",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "17196",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/clearart/dexter-4f8af83f3bde7.png",
-                "lang": "en",
-                "likes": "0"
-            }
-        ],
-        "showbackground": [
-            {
-                "id": "18467",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/showbackground/dexter-4fc683691dea7.jpg",
-                "lang": "en",
-                "likes": "4",
-                "season": "1"
-            },
-            {
-                "id": "18950",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/showbackground/dexter-4fdf608e2df53.jpg",
-                "lang": "en",
-                "likes": "2",
-                "season": "3"
-            },
-            {
-                "id": "18466",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/showbackground/dexter-4fc6830dc2ccc.jpg",
-                "lang": "en",
-                "likes": "1",
-                "season": "4"
-            },
-            {
-                "id": "18468",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/showbackground/dexter-4fc683a5ab451.jpg",
-                "lang": "en",
-                "likes": "1",
-                "season": "6"
-            },
-            {
-                "id": "21524",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/showbackground/dexter-506bdd9c35771.jpg",
-                "lang": "en",
-                "likes": "1",
-                "season": "all"
-            },
-            {
-                "id": "21526",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/showbackground/dexter-506bddc9f04cb.jpg",
-                "lang": "en",
-                "likes": "1",
-                "season": ""
-            },
-            {
-                "id": "21530",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/showbackground/dexter-506bde2654668.jpg",
-                "lang": "en",
-                "likes": "1",
-                "season": "all"
-            },
-            {
-                "id": "24058",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/showbackground/dexter-50def777ea9c8.jpg",
-                "lang": "en",
-                "likes": "1",
-                "season": "all"
-            },
-            {
-                "id": "18515",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/showbackground/dexter-4fc8eab16803c.jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "all"
-            },
-            {
-                "id": "18947",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/showbackground/dexter-4fdf5e107be0d.jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "5"
-            },
-            {
-                "id": "18949",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/showbackground/dexter-4fdf601385517.jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "all"
-            },
-            {
-                "id": "18952",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/showbackground/dexter-4fdf6386ce1c1.jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "all"
-            },
-            {
-                "id": "21525",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/showbackground/dexter-506bddb3bd3f4.jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "all"
-            },
-            {
-                "id": "21527",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/showbackground/dexter-506bdddc3f476.jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "all"
-            },
-            {
-                "id": "21529",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/showbackground/dexter-506bde113406e.jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "all"
-            },
-            {
-                "id": "24046",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/showbackground/dexter-50de1f84e736f.jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "all"
-            },
-            {
-                "id": "24048",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/showbackground/dexter-50de1f84e7d57.jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "all"
-            },
-            {
-                "id": "24049",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/showbackground/dexter-50de21ac3ae25.jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "all"
-            },
-            {
-                "id": "24054",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/showbackground/dexter-50def777e84d0.jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "all"
-            },
-            {
-                "id": "24055",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/showbackground/dexter-50def777e8dbc.jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "all"
-            },
-            {
-                "id": "24056",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/showbackground/dexter-50def777e9762.jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "all"
-            },
-            {
-                "id": "24986",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/showbackground/dexter-5101fa187c857.jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "all"
-            }
-        ],
-        "seasonthumb": [
-            {
-                "id": "18986",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/dexter-4fe21b7708ebe.jpg",
-                "lang": "en",
-                "likes": "3",
-                "season": "6"
-            },
-            {
-                "id": "5002",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/Dexter (6).jpg",
-                "lang": "en",
-                "likes": "1",
-                "season": "3"
-            },
-            {
-                "id": "5003",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/Dexter (5).jpg",
-                "lang": "en",
-                "likes": "1",
-                "season": "1"
-            },
-            {
-                "id": "17802",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/dexter-4fa981a7251d7.jpg",
-                "lang": "en",
-                "likes": "1",
-                "season": "5"
-            },
-            {
-                "id": "17823",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/dexter-4faab0bccbfb6.jpg",
-                "lang": "en",
-                "likes": "1",
-                "season": "6"
-            },
-            {
-                "id": "18980",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/dexter-4fe21a6955116.jpg",
-                "lang": "en",
-                "likes": "1",
-                "season": "1"
-            },
-            {
-                "id": "18982",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/dexter-4fe21b0767edb.jpg",
-                "lang": "en",
-                "likes": "1",
-                "season": "2"
-            },
-            {
-                "id": "18983",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/dexter-4fe21b292d661.jpg",
-                "lang": "en",
-                "likes": "1",
-                "season": "3"
-            },
-            {
-                "id": "18984",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/dexter-4fe21b42d983d.jpg",
-                "lang": "en",
-                "likes": "1",
-                "season": "4"
-            },
-            {
-                "id": "18985",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/dexter-4fe21b5847d7b.jpg",
-                "lang": "en",
-                "likes": "1",
-                "season": "5"
-            },
-            {
-                "id": "21883",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/dexter-5071800d37e80.jpg",
-                "lang": "en",
-                "likes": "1",
-                "season": "7"
-            },
-            {
-                "id": "4989",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/Dexter (9).jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "all"
-            },
-            {
-                "id": "4990",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/Dexter (19).jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "4"
-            },
-            {
-                "id": "4991",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/Dexter (18).jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "4"
-            },
-            {
-                "id": "4992",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/Dexter (17).jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "3"
-            },
-            {
-                "id": "4993",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/Dexter (16).jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "2"
-            },
-            {
-                "id": "4994",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/Dexter (15).jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "1"
-            },
-            {
-                "id": "4995",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/Dexter (14).jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "all"
-            },
-            {
-                "id": "4996",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/Dexter (13).jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "4"
-            },
-            {
-                "id": "4997",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/Dexter (12).jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "3"
-            },
-            {
-                "id": "4998",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/Dexter (11).jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "2"
-            },
-            {
-                "id": "4999",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/Dexter (10).jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "1"
-            },
-            {
-                "id": "5000",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/Dexter (8).jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "2"
-            },
-            {
-                "id": "5001",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/Dexter (7).jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "4"
-            },
-            {
-                "id": "5004",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/Dexter.jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "all"
-            },
-            {
-                "id": "5005",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/Dexter (4).jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "5"
-            },
-            {
-                "id": "5006",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/Dexter (3).jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "4"
-            },
-            {
-                "id": "5007",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/Dexter (2).jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "3"
-            },
-            {
-                "id": "5008",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/Dexter (1).jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "2"
-            },
-            {
-                "id": "5009",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/Dexter (0).jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "1"
-            },
-            {
-                "id": "17803",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/dexter-4fa981a7258fb.jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "5"
-            },
-            {
-                "id": "17804",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/dexter-4fa981a725c14.jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "5"
-            },
-            {
-                "id": "17805",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/dexter-4fa981c6607e4.jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "0"
-            },
-            {
-                "id": "17807",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/dexter-4fa98ac2b811d.jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "6"
-            },
-            {
-                "id": "17808",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/dexter-4fa98ac2b87ab.jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "6"
-            },
-            {
-                "id": "17810",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/dexter-4fa994697afa3.jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "6"
-            },
-            {
-                "id": "18514",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/dexter-4fc8e9fa79bf8.jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "7"
-            },
-            {
-                "id": "31022",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/dexter-51dc720661cb7.jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "8"
-            },
-            {
-                "id": "31023",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/seasonthumb/dexter-51dc72a19a0bb.jpg",
-                "lang": "en",
-                "likes": "0",
-                "season": "8"
-            }
-        ],
-        "tvthumb": [
-            {
-                "id": "5012",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/tvthumb/D_79349 (10).jpg",
-                "lang": "en",
-                "likes": "2"
-            },
-            {
-                "id": "5023",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/tvthumb/D_79349 (0).jpg",
-                "lang": "en",
-                "likes": "2"
-            },
-            {
-                "id": "14580",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/tvthumb/dexter-4ecdf5027a53c.jpg",
-                "lang": "en",
-                "likes": "2"
-            },
-            {
-                "id": "5013",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/tvthumb/D_79349 (9).jpg",
-                "lang": "en",
-                "likes": "1"
-            },
-            {
-                "id": "5016",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/tvthumb/D_79349 (6).jpg",
-                "lang": "en",
-                "likes": "1"
-            },
-            {
-                "id": "5020",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/tvthumb/D_79349 (2).jpg",
-                "lang": "en",
-                "likes": "1"
-            },
-            {
-                "id": "29341",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/tvthumb/dexter-51a338d376b4a.jpg",
-                "lang": "de",
-                "likes": "1"
-            },
-            {
-                "id": "31722",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/tvthumb/dexter-51f27112a2a89.jpg",
-                "lang": "en",
-                "likes": "1"
-            },
-            {
-                "id": "5010",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/tvthumb/D_79349 (12).jpg",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "5011",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/tvthumb/D_79349 (11).jpg",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "5014",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/tvthumb/D_79349 (8).jpg",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "5015",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/tvthumb/D_79349 (7).jpg",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "5017",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/tvthumb/D_79349 (5).jpg",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "5018",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/tvthumb/D_79349 (4).jpg",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "5019",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/tvthumb/D_79349 (3).jpg",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "5021",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/tvthumb/D_79349.jpg",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "5022",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/tvthumb/D_79349 (1).jpg",
-                "lang": "en",
-                "likes": "0"
-            },
-            {
-                "id": "14277",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/tvthumb/dexter-4ead4375923fd.jpg",
-                "lang": "en",
-                "likes": "0"
-            }
-        ],
-        "tvbanner": [
-            {
-                "id": "30062",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/tvbanner/dexter-51bfc857c84fd.jpg",
-                "lang": "en",
-                "likes": "1"
-            },
-            {
-                "id": "30063",
-                "url": "http://assets.fanart.tv/fanart/tv/79349/tvbanner/dexter-51bfc89667267.jpg",
-                "lang": "en",
-                "likes": "1"
-            }
-        ]
-    }
-}
\ No newline at end of file
diff --git a/lib/fanart/tests/test_core.py b/lib/fanart/tests/test_core.py
deleted file mode 100644
index 2cdb71fbceaea55e9d1828927e26a607c8cfad04..0000000000000000000000000000000000000000
--- a/lib/fanart/tests/test_core.py
+++ /dev/null
@@ -1,23 +0,0 @@
-from unittest import TestCase
-from fanart.core import Request
-from fanart.errors import RequestFanartError, ResponseFanartError
-from httpretty import httprettified, HTTPretty
-
-
-class RequestTestCase(TestCase):
-    def test_valitate_error(self):
-        self.assertRaises(RequestFanartError, Request, 'key', 'id', 'sport')
-
-    @httprettified
-    def test_response_error(self):
-        request = Request('apikey', 'objid', 'series')
-        HTTPretty.register_uri(
-            HTTPretty.GET,
-            'http://api.fanart.tv/webservice/series/apikey/objid/JSON/all/1/2',
-            body='Please specify a valid API key',
-        )
-        try:
-            request.response()
-        except ResponseFanartError as e:
-            self.assertEqual(repr(e), "ResponseFanartError('No JSON object could be decoded',)")
-            self.assertEqual(str(e), 'No JSON object could be decoded')
diff --git a/lib/fanart/tests/test_immutable.py b/lib/fanart/tests/test_immutable.py
deleted file mode 100644
index 8a0149dbe94c65f899d2e67400a5f130958a9b77..0000000000000000000000000000000000000000
--- a/lib/fanart/tests/test_immutable.py
+++ /dev/null
@@ -1,49 +0,0 @@
-from unittest import TestCase
-from fanart.immutable import Immutable
-
-
-class TestMutable(object):
-    def __init__(self, spam, ham, eggs):
-        self.spam = spam
-        self.ham = ham
-        self.eggs = eggs
-
-    @Immutable.mutablemethod
-    def anyway(self):
-        self.spam = self.ham + self.eggs
-
-
-class TestImmutable(TestMutable, Immutable):
-    @Immutable.mutablemethod
-    def __init__(self, *args, **kwargs):
-        super(TestImmutable, self).__init__(*args, **kwargs)
-
-
-class ImmutableTestCase(TestCase):
-    def setUp(self):
-        self.instance = TestImmutable('spam', 'ham', 'eggs')
-
-    def test_set_raises(self):
-        self.assertRaises(TypeError, self.instance.__setattr__, 'spam', 'ham')
-
-    def test_set(self):
-        self.instance._mutable = True
-        self.instance.spam = 'ham'
-        self.assertEqual(self.instance.spam, 'ham')
-
-    def test_del_raises(self):
-        self.assertRaises(TypeError, self.instance.__delattr__, 'spam')
-
-    def test_del(self):
-        self.instance._mutable = True
-        del self.instance.spam
-        self.assertRaises(AttributeError, self.instance.__getattribute__, 'spam')
-
-    def test_equal(self):
-        new_instance = TestImmutable('spam', 'ham', 'eggs')
-        self.assertEqual(self.instance, new_instance)
-
-    def test_mutable_dec(self):
-        instance = TestMutable('spam', 'ham', 'eggs')
-        instance.anyway()
-        self.assertEqual(instance.spam, 'hameggs')
diff --git a/lib/fanart/tests/test_items.py b/lib/fanart/tests/test_items.py
deleted file mode 100644
index e6c304389bba2d00d7b52ba2071e64bff0859f0a..0000000000000000000000000000000000000000
--- a/lib/fanart/tests/test_items.py
+++ /dev/null
@@ -1,27 +0,0 @@
-from unittest import TestCase
-import os
-from fanart.items import LeafItem
-from httpretty import httprettified, HTTPretty
-from fanart.tests import LOCALDIR
-
-
-class LeafItemTestCase(TestCase):
-    def setUp(self):
-        self.leaf = LeafItem(id=11977, likes=2, url='http://test.tv/50x50.txt')
-
-    def test_str(self):
-        self.assertEqual(str(self.leaf), 'http://test.tv/50x50.txt')
-
-    @httprettified
-    def test_content(self):
-        with open(os.path.join(LOCALDIR, 'response/50x50.png')) as fp:
-            body = fp.read()
-        HTTPretty.register_uri(
-            HTTPretty.GET,
-            'http://test.tv/50x50.txt',
-            body=body
-        )
-        self.assertEqual(self.leaf.content(), body)
-        self.assertEqual(len(HTTPretty.latest_requests), 1)
-        self.assertEqual(self.leaf.content(), body)  # Cached
-        self.assertEqual(len(HTTPretty.latest_requests), 1)
diff --git a/lib/fanart/tests/test_movie.py b/lib/fanart/tests/test_movie.py
deleted file mode 100644
index f127c28e2a0c417781cac4d3159aa3f1a1be2e0b..0000000000000000000000000000000000000000
--- a/lib/fanart/tests/test_movie.py
+++ /dev/null
@@ -1,21 +0,0 @@
-import os
-import unittest
-from httpretty import HTTPretty, httprettified
-from fanart.movie import *
-from fanart.tests import LOCALDIR
-os.environ['FANART_APIKEY'] = 'e3c7f0d0beeaf45b3a0dd3b9dd8a3338'
-
-
-class TvItemTestCase(unittest.TestCase):
-    @httprettified
-    def test_get(self):
-        with open(os.path.join(LOCALDIR, 'response/movie_thg.json')) as fp:
-            body = fp.read()
-        HTTPretty.register_uri(
-            HTTPretty.GET,
-            'http://api.fanart.tv/webservice/movie/e3c7f0d0beeaf45b3a0dd3b9dd8a3338/70160/JSON/all/1/2',
-            body=body
-        )
-        hunger_games = Movie.get(id=70160)
-        self.assertEqual(hunger_games.tmdbid, '70160')
-        self.assertEqual(hunger_games, eval(repr(hunger_games)))
diff --git a/lib/fanart/tests/test_music.py b/lib/fanart/tests/test_music.py
deleted file mode 100644
index 8c5d107d8406d93c288ec5236aeb4a8a9460eca9..0000000000000000000000000000000000000000
--- a/lib/fanart/tests/test_music.py
+++ /dev/null
@@ -1,22 +0,0 @@
-import os
-import unittest
-from httpretty import HTTPretty, httprettified
-from fanart.music import *
-from fanart.tests import LOCALDIR
-os.environ['FANART_APIKEY'] = 'e3c7f0d0beeaf45b3a0dd3b9dd8a3338'
-
-
-class ArtistItemTestCase(unittest.TestCase):
-    @httprettified
-    def test_get(self):
-        with open(os.path.join(LOCALDIR, 'response/music_a7f.json')) as fp:
-            body = fp.read()
-        HTTPretty.register_uri(
-            HTTPretty.GET,
-            'http://api.fanart.tv/webservice/artist/e3c7f0d0beeaf45b3a0dd3b9dd8a3338/24e1b53c-3085-4581-8472-0b0088d2508c/JSON/all/1/2',
-            body=body
-        )
-        a7f = Artist.get(id='24e1b53c-3085-4581-8472-0b0088d2508c')
-        self.assertEqual(a7f.mbid, '24e1b53c-3085-4581-8472-0b0088d2508c')
-        self.assertEqual(a7f, eval(repr(a7f)))
-        self.assertEqual(len(a7f.thumbs), 2)
diff --git a/lib/fanart/tests/test_tv.py b/lib/fanart/tests/test_tv.py
deleted file mode 100644
index eb5e742576028be765eee84fa132bb4bb9569002..0000000000000000000000000000000000000000
--- a/lib/fanart/tests/test_tv.py
+++ /dev/null
@@ -1,46 +0,0 @@
-import json
-from fanart.errors import ResponseFanartError
-import os
-import unittest
-from httpretty import HTTPretty, httprettified
-from fanart.tv import *
-from fanart.tests import LOCALDIR
-os.environ['FANART_APIKEY'] = 'e3c7f0d0beeaf45b3a0dd3b9dd8a3338'
-
-
-class TvItemTestCase(unittest.TestCase):
-    @httprettified
-    def test_get_wilfred(self):
-        with open(os.path.join(LOCALDIR, 'response/tv_239761.json')) as fp:
-            body = fp.read()
-        HTTPretty.register_uri(
-            HTTPretty.GET,
-            'http://api.fanart.tv/webservice/series/e3c7f0d0beeaf45b3a0dd3b9dd8a3338/239761/JSON/all/1/2',
-            body=body
-        )
-        wilfred = TvShow.get(id=239761)
-        self.assertEqual(wilfred.tvdbid, '239761')
-        with open(os.path.join(LOCALDIR, 'json/wilfred.json')) as fp:
-            self.assertEqual(json.loads(wilfred.json()), json.load(fp))
-
-    @httprettified
-    def test_get_dexter(self):
-        with open(os.path.join(LOCALDIR, 'response/tv_79349.json')) as fp:
-            body = fp.read()
-        HTTPretty.register_uri(
-            HTTPretty.GET,
-            'http://api.fanart.tv/webservice/series/e3c7f0d0beeaf45b3a0dd3b9dd8a3338/79349/JSON/all/1/2',
-            body=body
-        )
-        dexter = TvShow.get(id=79349)
-        self.assertEqual(dexter.tvdbid, '79349')
-        self.assertEqual(dexter, eval(repr(dexter)))
-
-    @httprettified
-    def test_get_null(self):
-        HTTPretty.register_uri(
-            HTTPretty.GET,
-            'http://api.fanart.tv/webservice/series/e3c7f0d0beeaf45b3a0dd3b9dd8a3338/79349/JSON/all/1/2',
-            body='null'
-        )
-        self.assertRaises(ResponseFanartError, TvShow.get, id=79349)
diff --git a/lib/guessit/test/__init__.py b/lib/guessit/test/__init__.py
deleted file mode 100644
index e5be370e4be5007b33fd87ec270e91eea041b66a..0000000000000000000000000000000000000000
--- a/lib/guessit/test/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# pylint: disable=no-self-use, pointless-statement, missing-docstring, invalid-name
diff --git a/lib/guessit/test/episodes.yml b/lib/guessit/test/episodes.yml
deleted file mode 100644
index adc4755e1208a4deb5f3d780a41e77a9811aa1e2..0000000000000000000000000000000000000000
--- a/lib/guessit/test/episodes.yml
+++ /dev/null
@@ -1,2048 +0,0 @@
-? __default__
-: type: episode
-
-? Series/Californication/Season 2/Californication.2x05.Vaginatown.HDTV.XviD-0TV.avi
-: title: Californication
-  season: 2
-  episode: 5
-  episode_title: Vaginatown
-  format: HDTV
-  video_codec: XviD
-  release_group: 0TV
-  container: avi
-
-? Series/dexter/Dexter.5x02.Hello,.Bandit.ENG.-.sub.FR.HDTV.XviD-AlFleNi-TeaM.[tvu.org.ru].avi
-: title: Dexter
-  season: 5
-  episode: 2
-  episode_title: Hello, Bandit
-  language: English
-  subtitle_language: French
-  format: HDTV
-  video_codec: XviD
-  release_group: AlFleNi-TeaM
-  website: tvu.org.ru
-  container: avi
-
-? Series/Treme/Treme.1x03.Right.Place,.Wrong.Time.HDTV.XviD-NoTV.avi
-: title: Treme
-  season: 1
-  episode: 3
-  episode_title: Right Place, Wrong Time
-  format: HDTV
-  video_codec: XviD
-  release_group: NoTV
-
-? Series/Duckman/Duckman - S1E13 Joking The Chicken (unedited).avi
-: title: Duckman
-  season: 1
-  episode: 13
-  episode_title: Joking The Chicken
-
-? Series/Simpsons/Saison 12 Français/Simpsons,.The.12x08.A.Bas.Le.Sergent.Skinner.FR.avi
-: title: The Simpsons
-  season: 12
-  episode: 8
-  episode_title: A Bas Le Sergent Skinner
-  language: French
-
-? Series/Duckman/Duckman - 101 (01) - 20021107 - I, Duckman.avi
-: title: Duckman
-  season: 1
-  episode: 1
-  episode_title: I, Duckman
-  date: 2002-11-07
-
-? Series/Simpsons/Saison 12 Français/Simpsons,.The.12x08.A.Bas.Le.Sergent.Skinner.FR.avi
-: title: The Simpsons
-  season: 12
-  episode: 8
-  episode_title: A Bas Le Sergent Skinner
-  language: French
-
-? Series/Futurama/Season 3 (mkv)/[™] Futurama - S03E22 - Le chef de fer à 30% ( 30 Percent Iron Chef ).mkv
-: title: Futurama
-  season: 3
-  episode: 22
-  episode_title: Le chef de fer à 30%
-
-? Series/The Office/Season 6/The Office - S06xE01.avi
-: title: The Office
-  season: 6
-  episode: 1
-
-? series/The Office/Season 4/The Office [401] Fun Run.avi
-: title: The Office
-  season: 4
-  episode: 1
-  episode_title: Fun Run
-
-? Series/Mad Men Season 1 Complete/Mad.Men.S01E01.avi
-: title: Mad Men
-  season: 1
-  episode: 1
-  other: Complete
-
-? series/Psych/Psych S02 Season 2 Complete English DVD/Psych.S02E02.65.Million.Years.Off.avi
-: title: Psych
-  season: 2
-  episode: 2
-  episode_title: 65 Million Years Off
-  language: english
-  format: DVD
-  other: Complete
-
-? series/Psych/Psych S02 Season 2 Complete English DVD/Psych.S02E03.Psy.Vs.Psy.Français.srt
-: title: Psych
-  season: 2
-  episode: 3
-  episode_title: Psy Vs Psy
-  format: DVD
-  language: English
-  subtitle_language: French
-  other: Complete
-
-? Series/Pure Laine/Pure.Laine.1x01.Toutes.Couleurs.Unies.FR.(Québec).DVB-Kceb.[tvu.org.ru].avi
-: title: Pure Laine
-  season: 1
-  episode: 1
-  episode_title: Toutes Couleurs Unies
-  format: DVB
-  release_group: Kceb
-  language: french
-  website: tvu.org.ru
-
-? Series/Pure Laine/2x05 - Pure Laine - Je Me Souviens.avi
-: title: Pure Laine
-  season: 2
-  episode: 5
-  episode_title: Je Me Souviens
-
-? Series/Tout sur moi/Tout sur moi - S02E02 - Ménage à trois (14-01-2008) [Rip by Ampli].avi
-: title: Tout sur moi
-  season: 2
-  episode: 2
-  episode_title: Ménage à trois
-  date: 2008-01-14
-
-? The.Mentalist.2x21.18-5-4.ENG.-.sub.FR.HDTV.XviD-AlFleNi-TeaM.[tvu.org.ru].avi
-: title: The Mentalist
-  season: 2
-  episode: 21
-  episode_title: 18-5-4
-  language: english
-  subtitle_language: french
-  format: HDTV
-  video_codec: XviD
-  release_group: AlFleNi-TeaM
-  website: tvu.org.ru
-
-? series/__ Incomplete __/Dr Slump (Catalan)/Dr._Slump_-_003_DVB-Rip_Catalan_by_kelf.avi
-: title: Dr Slump
-  episode: 3
-  format: DVB
-  language: catalan
-
-# Disabling this test because it just doesn't looks like a serie ...
-#? series/Ren and Stimpy - Black_hole_[DivX].avi
-#: title: Ren and Stimpy
-#  episode_title: Black hole
-#  video_codec: DivX
-
-# Disabling this test because it just doesn't looks like a serie ...
-# ? Series/Walt Disney/Donald.Duck.-.Good.Scouts.[www.bigernie.jump.to].avi
-#: title: Donald Duck
-#  episode_title: Good Scouts
-#  website: www.bigernie.jump.to
-
-? Series/Neverwhere/Neverwhere.05.Down.Street.[tvu.org.ru].avi
-: title: Neverwhere
-  episode: 5
-  episode_title: Down Street
-  website: tvu.org.ru
-
-? Series/South Park/Season 4/South.Park.4x07.Cherokee.Hair.Tampons.DVDRip.[tvu.org.ru].avi
-: title: South Park
-  season: 4
-  episode: 7
-  episode_title: Cherokee Hair Tampons
-  format: DVD
-  website: tvu.org.ru
-
-? Series/Kaamelott/Kaamelott - Livre V - Ep 23 - Le Forfait.avi
-: title: Kaamelott
-  alternative_title: Livre V
-  episode: 23
-  episode_title: Le Forfait
-
-? Series/Duckman/Duckman - 110 (10) - 20021218 - Cellar Beware.avi
-: title: Duckman
-  season: 1
-  episode: 10
-  date: 2002-12-18
-  episode_title: Cellar Beware
-
-# Removing this test because it doesn't look like a series
-# ? Series/Ren & Stimpy/Ren And Stimpy - Onward & Upward-Adult Party Cartoon.avi
-# : title: Ren And Stimpy
-#   episode_title: Onward & Upward-Adult Party Cartoon
-
-? Series/Breaking Bad/Minisodes/Breaking.Bad.(Minisodes).01.Good.Cop.Bad.Cop.WEBRip.XviD.avi
-: title: Breaking Bad
-  episode_format: Minisode
-  episode: 1
-  episode_title: Good Cop Bad Cop
-  format: WEBRip
-  video_codec: XviD
-
-? Series/My Name Is Earl/My.Name.Is.Earl.S01Extras.-.Bad.Karma.DVDRip.XviD.avi
-: title: My Name Is Earl
-  season: 1
-  episode_title: Extras - Bad Karma
-  format: DVD
-  episode_details: Extras
-  video_codec: XviD
-
-? series/Freaks And Geeks/Season 1/Episode 4 - Kim Kelly Is My Friend-eng(1).srt
-: title: Freaks And Geeks
-  season: 1
-  episode: 4
-  episode_title: Kim Kelly Is My Friend
-  subtitle_language: English  # This is really a subtitle_language, despite guessit 1.x assert for language.
-
-? /mnt/series/The Big Bang Theory/S01/The.Big.Bang.Theory.S01E01.mkv
-: title: The Big Bang Theory
-  season: 1
-  episode: 1
-
-? /media/Parks_and_Recreation-s03-e01.mkv
-: title: Parks and Recreation
-  season: 3
-  episode: 1
-
-? /media/Parks_and_Recreation-s03-e02-Flu_Season.mkv
-: title: Parks and Recreation
-  season: 3
-  episode_title: Flu Season
-  episode: 2
-
-? /media/Parks_and_Recreation-s03-x01.mkv
-: title: Parks and Recreation
-  season: 3
-  episode: 1
-
-? /media/Parks_and_Recreation-s03-x02-Gag_Reel.mkv
-: title: Parks and Recreation
-  season: 3
-  episode: 2
-  episode_title: Gag Reel
-
-? /media/Band_of_Brothers-e01-Currahee.mkv
-: title: Band of Brothers
-  episode: 1
-  episode_title: Currahee
-
-? /media/Band_of_Brothers-x02-We_Stand_Alone_Together.mkv
-: title: Band of Brothers
-  bonus: 2
-  bonus_title: We Stand Alone Together
-
-? /TV Shows/Mad.M-5x9.mkv
-: title: Mad M
-  season: 5
-  episode: 9
-
-? /TV Shows/new.girl.117.hdtv-lol.mp4
-: title: new girl
-  season: 1
-  episode: 17
-  format: HDTV
-  release_group: lol
-
-? Kaamelott - 5x44x45x46x47x48x49x50.avi
-: title: Kaamelott
-  season: 5
-  episode: [44, 45, 46, 47, 48, 49, 50]
-
-? Example S01E01-02.avi
-? Example S01E01E02.avi
-: title: Example
-  season: 1
-  episode: [1, 2]
-
-? Series/Baccano!/Baccano!_-_T1_-_Trailer_-_[Ayu](dae8173e).mkv
-: title: Baccano!
-  other: Trailer
-  release_group: Ayu
-  episode_title: T1
-  crc32: dae8173e
-
-? Series/Doctor Who (2005)/Season 06/Doctor Who (2005) - S06E01 - The Impossible Astronaut (1).avi
-: title: Doctor Who
-  year: 2005
-  season: 6
-  episode: 1
-  episode_title: The Impossible Astronaut
-
-? The Sopranos - [05x07] - In Camelot.mp4
-: title: The Sopranos
-  season: 5
-  episode: 7
-  episode_title: In Camelot
-
-? The.Office.(US).1x03.Health.Care.HDTV.XviD-LOL.avi
-: title: The Office
-  country: US
-  season: 1
-  episode: 3
-  episode_title: Health Care
-  format: HDTV
-  video_codec: XviD
-  release_group: LOL
-
-? /Volumes/data-1/Series/Futurama/Season 3/Futurama_-_S03_DVD_Bonus_-_Deleted_Scenes_Part_3.ogm
-: title: Futurama
-  season: 3
-  part: 3
-  other: Bonus
-  episode_title: Deleted Scenes
-  format: DVD
-
-? Ben.and.Kate.S01E02.720p.HDTV.X264-DIMENSION.mkv
-: title: Ben and Kate
-  season: 1
-  episode: 2
-  screen_size: 720p
-  format: HDTV
-  video_codec: h264
-  release_group: DIMENSION
-
-? /volume1/TV Series/Drawn Together/Season 1/Drawn Together 1x04 Requiem for a Reality Show.avi
-: title: Drawn Together
-  season: 1
-  episode: 4
-  episode_title: Requiem for a Reality Show
-
-? Sons.of.Anarchy.S05E06.720p.WEB.DL.DD5.1.H.264-CtrlHD.mkv
-: title: Sons of Anarchy
-  season: 5
-  episode: 6
-  screen_size: 720p
-  format: WEB-DL
-  audio_channels: "5.1"
-  audio_codec: DolbyDigital
-  video_codec: h264
-  release_group: CtrlHD
-
-? /media/bdc64bfe-e36f-4af8-b550-e6fd2dfaa507/TV_Shows/Doctor Who (2005)/Saison 6/Doctor Who (2005) - S06E13 - The Wedding of River Song.mkv
-: title: Doctor Who
-  season: 6
-  episode: 13
-  year: 2005
-  episode_title: The Wedding of River Song
-  uuid: bdc64bfe-e36f-4af8-b550-e6fd2dfaa507
-
-? /mnt/videos/tvshows/Doctor Who/Season 06/E13 - The Wedding of River Song.mkv
-: title: Doctor Who
-  season: 6
-  episode: 13
-  episode_title: The Wedding of River Song
-
-? The.Simpsons.S24E03.Adventures.in.Baby-Getting.720p.WEB-DL.DD5.1.H.264-CtrlHD.mkv
-: title: The Simpsons
-  season: 24
-  episode: 3
-  episode_title: Adventures in Baby-Getting
-  screen_size: 720p
-  format: WEB-DL
-  audio_channels: "5.1"
-  audio_codec: DolbyDigital
-  video_codec: h264
-  release_group: CtrlHD
-
-? /home/disaster/Videos/TV/Merlin/merlin_2008.5x02.arthurs_bane_part_two.repack.720p_hdtv_x264-fov.mkv
-: title: merlin
-  season: 5
-  episode: 2
-  part: 2
-  episode_title: arthurs bane
-  screen_size: 720p
-  format: HDTV
-  video_codec: h264
-  release_group: fov
-  year: 2008
-  other: Proper
-  proper_count: 1
-
-? "Da Vinci's Demons - 1x04 - The Magician.mkv"
-: title: "Da Vinci's Demons"
-  season: 1
-  episode: 4
-  episode_title: The Magician
-
-? CSI.S013E18.Sheltered.720p.WEB-DL.DD5.1.H.264.mkv
-: title: CSI
-  season: 13
-  episode: 18
-  episode_title: Sheltered
-  screen_size: 720p
-  format: WEB-DL
-  audio_channels: "5.1"
-  audio_codec: DolbyDigital
-  video_codec: h264
-
-? Game of Thrones S03E06 1080i HDTV DD5.1 MPEG2-TrollHD.ts
-: title: Game of Thrones
-  season: 3
-  episode: 6
-  screen_size: 1080i
-  format: HDTV
-  audio_channels: "5.1"
-  audio_codec: DolbyDigital
-  video_codec: Mpeg2
-  release_group: TrollHD
-
-? gossip.girl.s01e18.hdtv.xvid-2hd.eng.srt
-: title: gossip girl
-  season: 1
-  episode: 18
-  format: HDTV
-  video_codec: XviD
-  release_group: 2hd
-  subtitle_language: english
-
-? Wheels.S03E01E02.720p.HDTV.x264-IMMERSE.mkv
-: title: Wheels
-  season: 3
-  episode: [1, 2]
-  screen_size: 720p
-  format: HDTV
-  video_codec: h264
-  release_group: IMMERSE
-
-? Wheels.S03E01-02.720p.HDTV.x264-IMMERSE.mkv
-: title: Wheels
-  season: 3
-  episode: [1, 2]
-  screen_size: 720p
-  format: HDTV
-  video_codec: h264
-  release_group: IMMERSE
-
-? Wheels.S03E01-E02.720p.HDTV.x264-IMMERSE.mkv
-: title: Wheels
-  season: 3
-  episode: [1, 2]
-  screen_size: 720p
-  format: HDTV
-  video_codec: h264
-  release_group: IMMERSE
-
-? Wheels.S03E01-04.720p.HDTV.x264-IMMERSE.mkv
-: title: Wheels
-  season: 3
-  episode: [1, 2, 3, 4]
-  screen_size: 720p
-  format: HDTV
-  video_codec: h264
-  release_group: IMMERSE
-
-? Marvels.Agents.of.S.H.I.E.L.D-S01E06.720p.HDTV.X264-DIMENSION.mkv
-: title: Marvels Agents of S.H.I.E.L.D
-  season: 1
-  episode: 6
-  screen_size: 720p
-  format: HDTV
-  video_codec: h264
-  release_group: DIMENSION
-
-? Marvels.Agents.of.S.H.I.E.L.D.S01E06.720p.HDTV.X264-DIMENSION.mkv
-: title: Marvels Agents of S.H.I.E.L.D.
-  season: 1
-  episode: 6
-  screen_size: 720p
-  format: HDTV
-  video_codec: h264
-  release_group: DIMENSION
-
-? Marvels.Agents.of.S.H.I.E.L.D..S01E06.720p.HDTV.X264-DIMENSION.mkv
-: title: Marvels Agents of S.H.I.E.L.D.
-  season: 1
-  episode: 6
-  screen_size: 720p
-  format: HDTV
-  video_codec: h264
-  release_group: DIMENSION
-
-? Series/Friday Night Lights/Season 1/Friday Night Lights S01E19 - Ch-Ch-Ch-Ch-Changes.avi
-: title: Friday Night Lights
-  season: 1
-  episode: 19
-  episode_title: Ch-Ch-Ch-Ch-Changes
-
-? Dexter Saison VII FRENCH.BDRip.XviD-MiND.nfo
-: title: Dexter
-  season: 7
-  video_codec: XviD
-  language: French
-  format: BluRay
-  release_group: MiND
-
-? Dexter Saison sept FRENCH.BDRip.XviD-MiND.nfo
-: title: Dexter
-  season: 7
-  video_codec: XviD
-  language: French
-  format: BluRay
-  release_group: MiND
-
-? "Pokémon S16 - E29 - 1280*720 HDTV VF.mkv"
-: title: Pokémon
-  format: HDTV
-  language: French
-  season: 16
-  episode: 29
-  screen_size: 720p
-
-? One.Piece.E576.VOSTFR.720p.HDTV.x264-MARINE-FORD.mkv
-: episode: 576
-  video_codec: h264
-  format: HDTV
-  title: One Piece
-  release_group: MARINE-FORD
-  subtitle_language: French
-  screen_size: 720p
-
-? Dexter.S08E12.FINAL.MULTi.1080p.BluRay.x264-MiND.mkv
-: video_codec: h264
-  episode: 12
-  season: 8
-  format: BluRay
-  title: Dexter
-  other: FINAL
-  language: Multiple languages
-  release_group: MiND
-  screen_size: 1080p
-
-? One Piece - E623 VOSTFR HD [www.manga-ddl-free.com].mkv
-: website: www.manga-ddl-free.com
-  episode: 623
-  subtitle_language: French
-  title: One Piece
-  other: HD
-
-? Falling Skies Saison 1.HDLight.720p.x264.VFF.mkv
-: language: French
-  screen_size: 720p
-  season: 1
-  title: Falling Skies
-  video_codec: h264
-  other: HDLight
-
-? Sleepy.Hollow.S01E09.720p.WEB-DL.DD5.1.H.264-BP.mkv
-: episode: 9
-  video_codec: h264
-  format: WEB-DL
-  title: Sleepy Hollow
-  audio_channels: "5.1"
-  screen_size: 720p
-  season: 1
-  video_profile: BP
-  audio_codec: DolbyDigital
-
-? Sleepy.Hollow.S01E09.720p.WEB-DL.DD5.1.H.264-BS.mkv
-: episode: 9
-  video_codec: h264
-  format: WEB-DL
-  title: Sleepy Hollow
-  audio_channels: "5.1"
-  screen_size: 720p
-  season: 1
-  release_group: BS
-  audio_codec: DolbyDigital
-
-? Battlestar.Galactica.S00.Pilot.FRENCH.DVDRip.XviD-NOTAG.avi
-: title: Battlestar Galactica
-  season: 0
-  episode_details: Pilot
-  episode_title: Pilot
-  language: French
-  format: DVD
-  video_codec: XviD
-  release_group: NOTAG
-
-? The Big Bang Theory S00E00 Unaired Pilot VOSTFR TVRip XviD-VioCs
-: title: The Big Bang Theory
-  season: 0
-  episode: 0
-  subtitle_language: French
-  format: TV
-  video_codec: XviD
-  release_group: VioCs
-  episode_details: [Unaired, Pilot]
-
-? The Big Bang Theory S01E00 PROPER Unaired Pilot TVRip XviD-GIGGITY
-: title: The Big Bang Theory
-  season: 1
-  episode: 0
-  format: TV
-  video_codec: XviD
-  release_group: GIGGITY
-  other: Proper
-  proper_count: 1
-  episode_details: [Unaired, Pilot]
-
-? Pawn.Stars.S2014E18.720p.HDTV.x264-KILLERS
-: title: Pawn Stars
-  season: 2014
-  year: 2014
-  episode: 18
-  screen_size: 720p
-  format: HDTV
-  video_codec: h264
-  release_group: KILLERS
-
-? 2.Broke.Girls.S03E10.480p.HDTV.x264-mSD.mkv
-: title: 2 Broke Girls
-  season: 3
-  episode: 10
-  screen_size: 480p
-  format: HDTV
-  video_codec: h264
-  release_group: mSD
-
-? House.of.Cards.2013.S02E03.1080p.NF.WEBRip.DD5.1.x264-NTb.mkv
-: title: House of Cards
-  year: 2013
-  season: 2
-  episode: 3
-  screen_size: 1080p
-  other: Netflix
-  format: WEBRip
-  audio_channels: "5.1"
-  audio_codec: DolbyDigital
-  video_codec: h264
-  release_group: NTb
-
-? the.100.109.hdtv-lol.mp4
-: title: the 100
-  season: 1
-  episode: 9
-  format: HDTV
-  release_group: lol
-
-? Criminal.Minds.5x03.Reckoner.ENG.-.sub.FR.HDTV.XviD-STi.[tvu.org.ru].avi
-: title: Criminal Minds
-  language: English
-  subtitle_language: French
-  season: 5
-  episode: 3
-  video_codec: XviD
-  format: HDTV
-  website: tvu.org.ru
-  release_group: STi
-  episode_title: Reckoner
-
-? 03-Criminal.Minds.avi
-: title: Criminal Minds
-  episode: 3
-
-? '[Evil-Saizen]_Laughing_Salesman_14_[DVD][1C98686A].mkv'
-: crc32: 1C98686A
-  episode: 14
-  format: DVD
-  release_group: Evil-Saizen
-  title: Laughing Salesman
-
-? '[Kaylith] Zankyou no Terror - 04 [480p][B4D4514E].mp4'
-: crc32: B4D4514E
-  episode: 4
-  release_group: Kaylith
-  screen_size: 480p
-  title: Zankyou no Terror
-
-? '[PuyaSubs!] Seirei Tsukai no Blade Dance - 05 [720p][32DD560E].mkv'
-: crc32: 32DD560E
-  episode: 5
-  release_group: PuyaSubs!
-  screen_size: 720p
-  title: Seirei Tsukai no Blade Dance
-
-? '[Doremi].Happiness.Charge.Precure.27.[1280x720].[DC91581A].mkv'
-: crc32: DC91581A
-  episode: 27
-  release_group: Doremi
-  screen_size: 720p
-  title: Happiness Charge Precure
-
-? "[Daisei] Free!:Iwatobi Swim Club - 01 ~ (BD 720p 10-bit AAC) [99E8E009].mkv"
-: audio_codec: AAC
-  crc32: 99E8E009
-  episode: 1
-  format: BluRay
-  release_group: Daisei
-  screen_size: 720p
-  title: Free!:Iwatobi Swim Club
-  video_profile: 10bit
-
-? '[Tsundere] Boku wa Tomodachi ga Sukunai - 03 [BDRip h264 1920x1080 10bit FLAC][AF0C22CC].mkv'
-: audio_codec: FLAC
-  crc32: AF0C22CC
-  episode: 3
-  format: BluRay
-  release_group: Tsundere
-  screen_size: 1080p
-  title: Boku wa Tomodachi ga Sukunai
-  video_codec: h264
-  video_profile: 10bit
-
-? '[t.3.3.d]_Mikakunin_de_Shinkoukei_-_12_[720p][5DDC1352].mkv'
-: crc32: 5DDC1352
-  episode: 12
-  screen_size: 720p
-  title: Mikakunin de Shinkoukei
-  release_group: t.3.3.d
-
-? '[Anime-Koi] Sabagebu! - 06 [h264-720p][ABB3728A].mkv'
-: crc32: ABB3728A
-  episode: 6
-  release_group: Anime-Koi
-  screen_size: 720p
-  title: Sabagebu!
-  video_codec: h264
-
-? '[aprm-Diogo4D] [BD][1080p] Nagi no Asukara 08 [4D102B7C].mkv'
-: crc32: 4D102B7C
-  episode: 8
-  format: BluRay
-  release_group: aprm-Diogo4D
-  screen_size: 1080p
-  title: Nagi no Asukara
-
-? '[Akindo-SSK] Zankyou no Terror - 05 [720P][Sub_ITA][F5CCE87C].mkv'
-: crc32: F5CCE87C
-  episode: 5
-  release_group: Akindo-SSK
-  screen_size: 720p
-  title: Zankyou no Terror
-  subtitle_language: it
-
-? Naruto Shippuden Episode 366 VOSTFR.avi
-: episode: 366
-  title: Naruto Shippuden
-  subtitle_language: fr
-
-? Naruto Shippuden Episode 366v2 VOSTFR.avi
-: episode: 366
-  version: 2
-  title: Naruto Shippuden
-  subtitle_language: fr
-
-? '[HorribleSubs] Ao Haru Ride - 06 [480p].mkv'
-: episode: 6
-  release_group: HorribleSubs
-  screen_size: 480p
-  title: Ao Haru Ride
-
-? '[DeadFish] Tari Tari - 01 [BD][720p][AAC].mp4'
-: audio_codec: AAC
-  episode: 1
-  format: BluRay
-  release_group: DeadFish
-  screen_size: 720p
-  title: Tari Tari
-
-? '[NoobSubs] Sword Art Online II 06 (720p 8bit AAC).mp4'
-: audio_codec: AAC
-  episode: 6
-  release_group: NoobSubs
-  screen_size: 720p
-  title: Sword Art Online II
-  video_profile: 8bit
-
-? '[DeadFish] 01 - Tari Tari [BD][720p][AAC].mp4'
-: audio_codec: AAC
-  episode: 1
-  format: BluRay
-  release_group: DeadFish
-  screen_size: 720p
-  title: Tari Tari
-
-? '[NoobSubs] 06 Sword Art Online II (720p 8bit AAC).mp4'
-: audio_codec: AAC
-  episode: 6
-  release_group: NoobSubs
-  screen_size: 720p
-  title: Sword Art Online II
-  video_profile: 8bit
-
-? '[DeadFish] 12 - Tari Tari [BD][720p][AAC].mp4'
-: audio_codec: AAC
-  episode: 12
-  format: BluRay
-  release_group: DeadFish
-  screen_size: 720p
-  title: Tari Tari
-
-? Something.Season.2.1of4.Ep.Title.HDTV.torrent
-: episode_count: 4
-  episode: 1
-  format: HDTV
-  season: 2
-  title: Something
-  episode_title: Title
-  container: torrent
-
-? Something.Season.2of5.3of9.Ep.Title.HDTV.torrent
-: episode_count: 9
-  episode: 3
-  format: HDTV
-  season: 2
-  season_count: 5
-  title: Something
-  episode_title: Title
-  container: torrent
-
-? Something.Other.Season.3of5.Complete.HDTV.torrent
-: format: HDTV
-  other: Complete
-  season: 3
-  season_count: 5
-  title: Something Other
-  container: torrent
-
-? Something.Other.Season.1-3.avi
-: season: [1, 2, 3]
-  title: Something Other
-
-? Something.Other.Season.1&3.avi
-: season: [1, 3]
-  title: Something Other
-
-? Something.Other.Season.1&3-1to12ep.avi
-: season: [1, 3]
-  title: Something Other
-
-? W2Test.123.HDTV.XViD-FlexGet
-: episode: 23
-  season: 1
-  format: HDTV
-  release_group: FlexGet
-  title: W2Test
-  video_codec: XviD
-
-? W2Test.123.HDTV.XViD-FlexGet
-: options: --episode-prefer-number
-  episode: 123
-  format: HDTV
-  release_group: FlexGet
-  title: W2Test
-  video_codec: XviD
-
-? FooBar.0307.PDTV-FlexGet
-: episode: 7
-  format: DVB
-  release_group: FlexGet
-  season: 3
-  title: FooBar
-
-? FooBar.0307.PDTV-FlexGet
-? FooBar.307.PDTV-FlexGet
-: options: --episode-prefer-number
-  episode: 307
-  format: DVB
-  release_group: FlexGet
-  title: FooBar
-
-? FooBar.07.PDTV-FlexGet
-: options: --episode-prefer-number
-  episode: 7
-  format: DVB
-  release_group: FlexGet
-  title: FooBar
-
-? FooBar.7.PDTV-FlexGet
-: options: --episode-prefer-number
-  episode: 7
-  format: DVB
-  release_group: FlexGet
-  title: FooBar
-
-? FooBar.0307.PDTV-FlexGet
-: episode: 7
-  format: DVB
-  release_group: FlexGet
-  season: 3
-  title: FooBar
-
-? FooBar.307.PDTV-FlexGet
-: episode: 7
-  format: DVB
-  release_group: FlexGet
-  season: 3
-  title: FooBar
-
-? FooBar.07.PDTV-FlexGet
-: episode: 7
-  format: DVB
-  release_group: FlexGet
-  title: FooBar
-
-? FooBar.07v4.PDTV-FlexGet
-: episode: 7
-  version: 4
-  format: DVB
-  release_group: FlexGet
-  title: FooBar
-
-? FooBar.7.PDTV-FlexGet
-: format: DVB
-  release_group: FlexGet
-  title: FooBar 7
-  type: movie
-
-? FooBar.7.PDTV-FlexGet
-: options: -t episode
-  episode: 7
-  format: DVB
-  release_group: FlexGet
-  title: FooBar
-
-? FooBar.7v3.PDTV-FlexGet
-: options: -t episode
-  episode: 7
-  version: 3
-  format: DVB
-  release_group: FlexGet
-  title: FooBar
-
-? Test.S02E01.hdtv.real.proper
-: episode: 1
-  format: HDTV
-  other: Proper
-  proper_count: 2
-  season: 2
-  title: Test
-
-? Real.Test.S02E01.hdtv.proper
-: episode: 1
-  format: HDTV
-  other: Proper
-  proper_count: 1
-  season: 2
-  title: Real Test
-
-? Test.Real.S02E01.hdtv.proper
-: episode: 1
-  format: HDTV
-  other: Proper
-  proper_count: 1
-  season: 2
-  title: Test Real
-
-? Test.S02E01.hdtv.proper
-: episode: 1
-  format: HDTV
-  other: Proper
-  proper_count: 1
-  season: 2
-  title: Test
-
-? Test.S02E01.hdtv.real.repack.proper
-: episode: 1
-  format: HDTV
-  other: Proper
-  proper_count: 3
-  season: 2
-  title: Test
-
-? Date.Show.03-29-2012.HDTV.XViD-FlexGet
-: date: 2012-03-29
-  format: HDTV
-  release_group: FlexGet
-  title: Date Show
-  video_codec: XviD
-
-? Something.1x5.Season.Complete-FlexGet
-: episode: 5
-  other: Complete
-  season: 1
-  title: Something
-  release_group: FlexGet
-
-? Something Seasons 1 & 2 - Complete
-: other: Complete
-  season:
-  - 1
-  - 2
-  title: Something
-
-? Something Seasons 4 Complete
-: other: Complete
-  season: 4
-  title: Something
-
-? Something.1xAll.Season.Complete-FlexGet
-: other: Complete
-  season: 1
-  title: Something
-  release_group: FlexGet
-
-? Something.1xAll-FlexGet
-: other: Complete
-  season: 1
-  title: Something
-  release_group: FlexGet
-
-? FlexGet.US.S2013E14.Title.Here.720p.HDTV.AAC5.1.x264-NOGRP
-: audio_channels: '5.1'
-  audio_codec: AAC
-  country: US
-  episode: 14
-  format: HDTV
-  release_group: NOGRP
-  screen_size: 720p
-  season: 2013
-  title: FlexGet
-  episode_title: Title Here
-  video_codec: h264
-  year: 2013
-
-? FlexGet.14.of.21.Title.Here.720p.HDTV.AAC5.1.x264-NOGRP
-: audio_channels: '5.1'
-  audio_codec: AAC
-  episode_count: 21
-  episode: 14
-  format: HDTV
-  release_group: NOGRP
-  screen_size: 720p
-  title: FlexGet
-  episode_title: Title Here
-  video_codec: h264
-
-? FlexGet.Series.2013.14.of.21.Title.Here.720p.HDTV.AAC5.1.x264-NOGRP
-: audio_channels: '5.1'
-  audio_codec: AAC
-  episode_count: 21
-  episode: 14
-  format: HDTV
-  release_group: NOGRP
-  screen_size: 720p
-  season: 2013
-  title: FlexGet
-  episode_title: Title Here
-  video_codec: h264
-  year: 2013
-
-? Something.S04E05E09
-: episode: # 1.x guessit this as a range from 5 to 9. But not sure if it should ...
-  - 5
-  - 9
-  season: 4
-  title: Something
-
-? FooBar 360 1080i
-: options: --episode-prefer-number
-  episode: 360
-  screen_size: 1080i
-  title: FooBar
-
-? FooBar 360 1080i
-: episode: 60
-  season: 3
-  screen_size: 1080i
-  title: FooBar
-
-? FooBar 360
-: screen_size: 360p
-  title: FooBar
-
-? BarFood christmas special HDTV
-: options: --expected-title BarFood
-  format: HDTV
-  title: BarFood
-  episode_title: christmas special
-  episode_details: Special
-
-? Something.2008x12.13-FlexGet
-: title: Something
-  date: 2008-12-13
-  episode_title: FlexGet
-
-? '[Ignored] Test 12'
-: episode: 12
-  release_group: Ignored
-  title: Test
-
-? '[FlexGet] Test 12'
-: episode: 12
-  release_group: FlexGet
-  title: Test
-
-? Test.13.HDTV-Ignored
-: episode: 13
-  format: HDTV
-  release_group: Ignored
-  title: Test
-
-? Test.13.HDTV-Ignored
-: options: --expected-series test
-  episode: 13
-  format: HDTV
-  release_group: Ignored
-  title: Test
-
-? Test.13.HDTV-Ignored
-: title: Test
-  episode: 13
-  format: HDTV
-  release_group: Ignored
-
-? Test.13.HDTV-Ignored
-: episode: 13
-  format: HDTV
-  release_group: Ignored
-  title: Test
-
-? Test.13.HDTV-FlexGet
-: episode: 13
-  format: HDTV
-  release_group: FlexGet
-  title: Test
-
-? Test.14.HDTV-Name
-: episode: 14
-  format: HDTV
-  release_group: Name
-  title: Test
-
-? Real.Time.With.Bill.Maher.2014.10.31.HDTV.XviD-AFG.avi
-: date: 2014-10-31
-  format: HDTV
-  release_group: AFG
-  title: Real Time With Bill Maher
-  video_codec: XviD
-
-? Arrow.S03E21.Al.Sah-Him.1080p.WEB-DL.DD5.1.H.264-BS.mkv
-: title: Arrow
-  season: 3
-  episode: 21
-  episode_title: Al Sah-Him
-  screen_size: 1080p
-  audio_codec: DolbyDigital
-  audio_channels: "5.1"
-  video_codec: h264
-  release_group: BS
-  format: WEB-DL
-
-? How to Make It in America - S02E06 - I'm Sorry, Who's Yosi?.mkv
-: title: How to Make It in America
-  season: 2
-  episode: 6
-  episode_title: I'm Sorry, Who's Yosi?
-
-? 24.S05E07.FRENCH.DVDRip.XviD-FiXi0N.avi
-: episode: 7
-  format: DVD
-  language: fr
-  season: 5
-  title: '24'
-  video_codec: XviD
-  release_group: FiXi0N
-
-? 12.Monkeys.S01E12.FRENCH.BDRip.x264-VENUE.mkv
-: episode: 12
-  format: BluRay
-  language: fr
-  release_group: VENUE
-  season: 1
-  title: 12 Monkeys
-  video_codec: h264
-
-? The.Daily.Show.2015.07.01.Kirsten.Gillibrand.Extended.720p.CC.WEBRip.AAC2.0.x264-BTW.mkv
-: audio_channels: '2.0'
-  audio_codec: AAC
-  date: 2015-07-01
-  format: WEBRip
-  other: [Extended, CC]
-  release_group: BTW
-  screen_size: 720p
-  title: The Daily Show
-  episode_title: Kirsten Gillibrand
-  video_codec: h264
-
-? The.Daily.Show.2015.07.01.Kirsten.Gillibrand.Extended.Interview.720p.CC.WEBRip.AAC2.0.x264-BTW.mkv
-: audio_channels: '2.0'
-  audio_codec: AAC
-  date: 2015-07-01
-  format: WEBRip
-  other: CC
-  release_group: BTW
-  screen_size: 720p
-  title: The Daily Show
-  episode_title: Kirsten Gillibrand Extended Interview
-  video_codec: h264
-
-? The.Daily.Show.2015.07.02.Sarah.Vowell.CC.WEBRip.AAC2.0.x264-BTW.mkv
-: audio_channels: '2.0'
-  audio_codec: AAC
-  date: 2015-07-02
-  format: WEBRip
-  other: CC
-  release_group: BTW
-  title: The Daily Show
-  episode_title: Sarah Vowell
-  video_codec: h264
-
-? 90.Day.Fiance.S02E07.I.Have.To.Tell.You.Something.720p.HDTV.x264-W4F
-: episode: 7
-  format: HDTV
-  screen_size: 720p
-  season: 2
-  title: 90 Day Fiance
-  episode_title: I Have To Tell You Something
-  release_group: W4F
-
-? Doctor.Who.2005.S04E06.FRENCH.LD.DVDRip.XviD-TRACKS.avi
-: episode: 6
-  format: DVD
-  language: fr
-  release_group: TRACKS
-  season: 4
-  title: Doctor Who
-  other: LD
-  video_codec: XviD
-  year: 2005
-
-? Astro.Le.Petit.Robot.S01E01+02.FRENCH.DVDRiP.X264.INT-BOOLZ.mkv
-: episode: [1, 2]
-  format: DVD
-  language: fr
-  release_group: INT-BOOLZ
-  season: 1
-  title: Astro Le Petit Robot
-  video_codec: h264
-
-? Annika.Bengtzon.2012.E01.Le.Testament.De.Nobel.FRENCH.DVDRiP.XViD-STVFRV.avi
-: episode: 1
-  format: DVD
-  language: fr
-  release_group: STVFRV
-  title: Annika Bengtzon
-  episode_title: Le Testament De Nobel
-  video_codec: XviD
-  year: 2012
-
-? Dead.Set.02.FRENCH.LD.DVDRip.XviD-EPZ.avi
-: episode: 2
-  format: DVD
-  language: fr
-  other: LD
-  release_group: EPZ
-  title: Dead Set
-  video_codec: XviD
-
-? Phineas and Ferb S01E00 & S01E01 & S01E02
-: episode: [0, 1, 2]
-  season: 1
-  title: Phineas and Ferb
-
-? Show.Name.S01E02.S01E03.HDTV.XViD.Etc-Group
-: episode: [2, 3]
-  format: HDTV
-  release_group: Etc-Group
-  season: 1
-  title: Show Name
-  video_codec: XviD
-
-? Show Name - S01E02 - S01E03 - S01E04 - Ep Name
-: episode: [2, 3, 4]
-  season: 1
-  title: Show Name
-  episode_title: Ep Name
-
-? Show.Name.1x02.1x03.HDTV.XViD.Etc-Group
-: episode: [2, 3]
-  format: HDTV
-  release_group: Etc-Group
-  season: 1
-  title: Show Name
-  video_codec: XviD
-
-? Show Name - 1x02 - 1x03 - 1x04 - Ep Name
-: episode: [2, 3, 4]
-  season: 1
-  title: Show Name
-  episode_title: Ep Name
-
-? Show.Name.S01E02.HDTV.XViD.Etc-Group
-: episode: 2
-  format: HDTV
-  release_group: Etc-Group
-  season: 1
-  title: Show Name
-  video_codec: XviD
-
-? Show Name - S01E02 - My Ep Name
-: episode: 2
-  season: 1
-  title: Show Name
-  episode_title: My Ep Name
-
-? Show Name - S01.E03 - My Ep Name
-: episode: 3
-  season: 1
-  title: Show Name
-  episode_title: My Ep Name
-
-? Show.Name.S01E02E03.HDTV.XViD.Etc-Group
-: episode: [2, 3]
-  format: HDTV
-  release_group: Etc-Group
-  season: 1
-  title: Show Name
-  video_codec: XviD
-
-? Show Name - S01E02-03 - My Ep Name
-: episode: [2, 3]
-  season: 1
-  title: Show Name
-  episode_title: My Ep Name
-
-? Show.Name.S01.E02.E03
-: episode: [2, 3]
-  season: 1
-  title: Show Name
-
-? Show_Name.1x02.HDTV_XViD_Etc-Group
-: episode: 2
-  format: HDTV
-  release_group: Etc-Group
-  season: 1
-  title: Show Name
-  video_codec: XviD
-
-? Show Name - 1x02 - My Ep Name
-: episode: 2
-  season: 1
-  title: Show Name
-  episode_title: My Ep Name
-
-? Show_Name.1x02x03x04.HDTV_XViD_Etc-Group
-: episode: [2, 3, 4]
-  format: HDTV
-  release_group: Etc-Group
-  season: 1
-  title: Show Name
-  video_codec: XviD
-
-? Show Name - 1x02-03-04 - My Ep Name
-: episode: [2, 3, 4]
-  season: 1
-  title: Show Name
-  episode_title: My Ep Name
-
-# 1x guess this as episode 100 but 101 as episode 1 season 1.
-? Show.Name.100.Event.2010.11.23.HDTV.XViD.Etc-Group
-: date: 2010-11-23
-  season: 1
-  episode: 0
-  format: HDTV
-  release_group: Etc-Group
-  title: Show Name
-  episode_title: Event
-  video_codec: XviD
-
-? Show.Name.101.Event.2010.11.23.HDTV.XViD.Etc-Group
-: date: 2010-11-23
-  season: 1
-  episode: 1
-  format: HDTV
-  release_group: Etc-Group
-  title: Show Name
-  episode_title: Event
-  video_codec: XviD
-
-? Show.Name.2010.11.23.HDTV.XViD.Etc-Group
-: date: 2010-11-23
-  format: HDTV
-  release_group: Etc-Group
-  title: Show Name
-
-? Show Name - 2010-11-23 - Ep Name
-: date: 2010-11-23
-  title: Show Name
-  episode_title: Ep Name
-
-? Show Name Season 1 Episode 2 Ep Name
-: episode: 2
-  season: 1
-  title: Show Name
-  episode_title: Ep Name
-
-? Show.Name.S01.HDTV.XViD.Etc-Group
-: format: HDTV
-  release_group: Etc-Group
-  season: 1
-  title: Show Name
-  video_codec: XviD
-
-? Show.Name.E02-03
-: episode: [2, 3]
-  title: Show Name
-
-? Show.Name.E02.2010
-: episode: 2
-  year: 2010
-  title: Show Name
-
-? Show.Name.E23.Test
-: episode: 23
-  title: Show Name
-  episode_title: Test
-
-? Show.Name.Part.3.HDTV.XViD.Etc-Group
-: part: 3
-  title: Show Name
-  format: HDTV
-  video_codec: XviD
-  release_group: Etc-Group
-  type: movie
-  # Fallback to movie type because we can't tell it's a series ...
-
-? Show.Name.Part.1.and.Part.2.Blah-Group
-: part: [1, 2]
-  title: Show Name
-  type: movie
-  # Fallback to movie type because we can't tell it's a series ...
-
-? Show Name - 01 - Ep Name
-: episode: 1
-  title: Show Name
-  episode_title: Ep Name
-
-? 01 - Ep Name
-: episode: 1
-  title: Ep Name
-
-? Show.Name.102.HDTV.XViD.Etc-Group
-: episode: 2
-  format: HDTV
-  release_group: Etc-Group
-  season: 1
-  title: Show Name
-  video_codec: XviD
-
-? '[HorribleSubs] Maria the Virgin Witch - 01 [720p].mkv'
-: episode: 1
-  release_group: HorribleSubs
-  screen_size: 720p
-  title: Maria the Virgin Witch
-
-? '[ISLAND]One_Piece_679_[VOSTFR]_[V1]_[8bit]_[720p]_[EB7838FC].mp4'
-: options: -E
-  crc32: EB7838FC
-  episode: 679
-  release_group: ISLAND
-  screen_size: 720p
-  title: One Piece
-  subtitle_language: fr
-  video_profile: 8bit
-  version: 1
-
-? '[ISLAND]One_Piece_679_[VOSTFR]_[8bit]_[720p]_[EB7838FC].mp4'
-: options: -E
-  crc32: EB7838FC
-  episode: 679
-  release_group: ISLAND
-  screen_size: 720p
-  title: One Piece
-  subtitle_language: fr
-  video_profile: 8bit
-
-? '[Kaerizaki-Fansub]_One_Piece_679_[VOSTFR][HD_1280x720].mp4'
-: options: -E
-  episode: 679
-  other: HD
-  release_group: Kaerizaki-Fansub
-  screen_size: 720p
-  title: One Piece
-  subtitle_language: fr
-
-? '[Kaerizaki-Fansub]_One_Piece_679_[VOSTFR][FANSUB][HD_1280x720].mp4'
-: options: -E
-  episode: 679
-  other:
-  - Fansub
-  - HD
-  release_group: Kaerizaki-Fansub
-  screen_size: 720p
-  title: One Piece
-  subtitle_language: fr
-
-? '[Kaerizaki-Fansub]_One_Piece_681_[VOSTFR][HD_1280x720]_V2.mp4'
-: options: -E
-  episode: 681
-  other: HD
-  release_group: Kaerizaki-Fansub
-  screen_size: 720p
-  title: One Piece
-  subtitle_language: fr
-  version: 2
-
-? '[Kaerizaki-Fansub] High School DxD New 04 VOSTFR HD (1280x720) V2.mp4'
-: options: -E
-  episode: 4
-  other: HD
-  release_group: Kaerizaki-Fansub
-  screen_size: 720p
-  title: High School DxD New
-  subtitle_language: fr
-  version: 2
-
-? '[Kaerizaki-Fansub] One Piece 603 VOSTFR PS VITA (960x544) V2.mp4'
-: options: -E
-  episode: 603
-  release_group:
-    - Kaerizaki-Fansub
-    - PS VITA
-  screen_size: 960x544
-  title: One Piece
-  subtitle_language: fr
-  version: 2
-
-? '[Group Name] Show Name.13'
-: episode: 13
-  release_group: Group Name
-  title: Show Name
-
-? '[Group Name] Show Name - 13'
-: episode: 13
-  release_group: Group Name
-  title: Show Name
-
-? '[Group Name] Show Name 13'
-: episode: 13
-  release_group: Group Name
-  title: Show Name
-
-# [Group Name] Show Name.13-14
-# [Group Name] Show Name - 13-14
-# Show Name 13-14
-
-? '[Stratos-Subs]_Infinite_Stratos_-_12_(1280x720_H.264_AAC)_[379759DB]'
-: audio_codec: AAC
-  crc32: 379759DB
-  episode: 12
-  release_group: Stratos-Subs
-  screen_size: 720p
-  title: Infinite Stratos
-  video_codec: h264
-
-# [ShinBunBu-Subs] Bleach - 02-03 (CX 1280x720 x264 AAC)
-
-? '[SGKK] Bleach 312v1 [720p/MKV]'
-: options: -E  # guessit 1.x for episode only when version is guessed, but it's doesn't make it consistent.
-  episode: 312
-  release_group: SGKK
-  screen_size: 720p
-  title: Bleach
-  version: 1
-
-? '[Ayako]_Infinite_Stratos_-_IS_-_07_[H264][720p][EB7838FC]'
-: crc32: EB7838FC
-  episode: 7
-  release_group: Ayako
-  screen_size: 720p
-  title: Infinite Stratos
-  video_codec: h264
-
-? '[Ayako] Infinite Stratos - IS - 07v2 [H264][720p][44419534]'
-: crc32: '44419534'
-  episode: 7
-  release_group: Ayako
-  screen_size: 720p
-  title: Infinite Stratos
-  video_codec: h264
-  version: 2
-
-? '[Ayako-Shikkaku] Oniichan no Koto Nanka Zenzen Suki Janain Dakara ne - 10 [LQ][h264][720p] [8853B21C]'
-: crc32: 8853B21C
-  episode: 10
-  release_group: Ayako-Shikkaku
-  screen_size: 720p
-  title: Oniichan no Koto Nanka Zenzen Suki Janain Dakara ne
-  video_codec: h264
-
-# TODO: Add support for absolute episodes
-? Bleach - s16e03-04 - 313-314
-? Bleach.s16e03-04.313-314
-? Bleach.s16e03-04.313-314
-? Bleach - s16e03-04 - 313-314
-? Bleach.s16e03-04.313-314
-? Bleach s16e03e04 313-314
-: episode: [3, 4]
-  season: 16
-  title: Bleach
-
-? Bleach - 313-314
-: options: -E
-  episode: [313, 314]
-  title: Bleach
-
-? '[ShinBunBu-Subs] Bleach - 02-03 (CX 1280x720 x264 AAC)'
-: audio_codec: AAC
-  episode: [2, 3]
-  release_group: ShinBunBu-Subs
-  screen_size: 720p
-  title: Bleach
-  video_codec: h264
-
-? 003. Show Name - Ep Name.avi
-: episode: 3
-  title: Show Name
-  episode_title: Ep Name
-
-? 003-004. Show Name - Ep Name.avi
-: episode: [3, 4]
-  title: Show Name
-  episode_title: Ep Name
-
-? One Piece - 102
-: episode: 2
-  season: 1
-  title: One Piece
-
-? "[ACX]_Wolf's_Spirit_001.mkv"
-: episode: 1
-  release_group: ACX
-  title: "Wolf's Spirit"
-
-? Project.Runway.S14E00.and.S14E01.(Eng.Subs).SDTV.x264-[2Maverick].mp4
-: episode: [0, 1]
-  format: TV
-  release_group: 2Maverick
-  season: 14
-  title: Project Runway
-  subtitle_language: en
-  video_codec: h264
-
-? '[Hatsuyuki-Kaitou]_Fairy_Tail_2_-_16-20_[720p][10bit].torrent'
-: episode: [16, 17, 18, 19, 20]
-  release_group: Hatsuyuki-Kaitou
-  screen_size: 720p
-  title: Fairy Tail 2
-  video_profile: 10bit
-
-? '[Hatsuyuki-Kaitou]_Fairy_Tail_2_-_16-20_(191-195)_[720p][10bit].torrent'
-: options: -E
-  episode: [16, 17, 18, 19, 20, 191, 192, 193, 194, 195]
-  release_group: Hatsuyuki-Kaitou
-  screen_size: 720p
-  title: Fairy Tail 2
-
-? "Looney Tunes 1940x01 Porky's Last Stand.mkv"
-: episode: 1
-  season: 1940
-  title: Looney Tunes
-  episode_title: Porky's Last Stand
-  year: 1940
-
-? The.Good.Wife.S06E01.E10.720p.WEB-DL.DD5.1.H.264-CtrlHD/The.Good.Wife.S06E09.Trust.Issues.720p.WEB-DL.DD5.1.H.264-CtrlHD.mkv
-: audio_channels: '5.1'
-  audio_codec: DolbyDigital
-  episode: 9
-  format: WEB-DL
-  release_group: CtrlHD
-  screen_size: 720p
-  season: 6
-  title: The Good Wife
-  episode_title: Trust Issues
-  video_codec: h264
-
-? Fear the Walking Dead - 01x02 - So Close, Yet So Far.REPACK-KILLERS.French.C.updated.Addic7ed.com.mkv
-: episode: 2
-  language: fr
-  other: Proper
-  proper_count: 1
-  season: 1
-  title: Fear the Walking Dead
-  episode_title: So Close, Yet So Far
-
-? Fear the Walking Dead - 01x02 - En Close, Yet En Far.REPACK-KILLERS.French.C.updated.Addic7ed.com.mkv
-: episode: 2
-  language: fr
-  other: Proper
-  proper_count: 1
-  season: 1
-  title: Fear the Walking Dead
-  episode_title: En Close, Yet En Far
-
-? /av/unsorted/The.Daily.Show.2015.07.22.Jake.Gyllenhaal.720p.HDTV.x264-BATV.mkv
-: date: 2015-07-22
-  format: HDTV
-  release_group: BATV
-  screen_size: 720p
-  title: The Daily Show
-  episode_title: Jake Gyllenhaal
-  video_codec: h264
-
-? "[7.1.7.8.5] Foo Bar - 11 (H.264) [5235532D].mkv"
-: options: -E
-  episode: 11
-
-? my 720p show S01E02
-: options: -T "my 720p show"
-  title: my 720p show
-  season: 1
-  episode: 2
-
-? my 720p show S01E02 720p
-: options: -T "my 720p show"
-  title: my 720p show
-  season: 1
-  episode: 2
-  screen_size: 720p
-
-? -my 720p show S01E02
-: options: -T "re:my \d+p show"
-  screen_size: 720p
-
-? Show S01E02
-: options: -T "The Show"
-  title: Show
-  season: 1
-  episode: 2
-
-? Foo's &amp; Bars (2009) S01E01 720p XviD-2HD[AOEU]
-: episode: 1
-  release_group: 2HD[AOEU]
-  screen_size: 720p
-  season: 1
-  title: Foo's &amp; Bars
-  video_codec: XviD
-  year: 2009
-
-? Date.Series.10-11-2008.XViD
-: date: 2008-11-10
-  title: Date
-  video_codec: XviD
-
-? Scrubs/SEASON-06/Scrubs.S06E09.My.Perspective.DVDRip.XviD-WAT/scrubs.s06e09.dvdrip.xvid-wat.avi
-: container: avi
-  episode: 9
-  episode_title: My Perspective
-  format: DVD
-  mimetype: video/x-msvideo
-  release_group: WAT
-  season: 6
-  title: Scrubs
-  video_codec: XviD
-
-? '[PuyaSubs!] Digimon Adventure tri - 01 [720p][F9967949].mkv'
-: container: mkv
-  crc32: F9967949
-  episode: 1
-  mimetype: video/x-matroska
-  release_group: PuyaSubs!
-  screen_size: 720p
-  title: Digimon Adventure tri
-
-? Sherlock.S01.720p.BluRay.x264-AVCHD
-: format: BluRay
-  screen_size: 720p
-  season: 1
-  title: Sherlock
-  video_codec: h264
-
-? Running.Wild.With.Bear.Grylls.S02E07.Michael.B.Jordan.PROPER.HDTV.x264-W4F.avi
-: container: avi
-  episode: 7
-  episode_title: Michael B Jordan
-  format: HDTV
-  mimetype: video/x-msvideo
-  other: Proper
-  proper_count: 1
-  release_group: W4F
-  season: 2
-  title: Running Wild With Bear Grylls
-  video_codec: h264
-
-? Homeland.S05E11.Our.Man.in.Damascus.German.Sub.720p.HDTV.x264.iNTERNAL-BaCKToRG
-: episode: 11
-  episode_title: Our Man in Damascus
-  format: HDTV
-  release_group: iNTERNAL-BaCKToRG
-  screen_size: 720p
-  season: 5
-  subtitle_language: de
-  title: Homeland
-  type: episode
-  video_codec: h264
-
-? Breaking.Bad.S01E01.2008.BluRay.VC1.1080P.5.1.WMV-NOVO
-: title: Breaking Bad
-  season: 1
-  episode: 1
-  year: 2008
-  format: BluRay
-  screen_size: 1080p
-  audio_channels: '5.1'
-  container: WMV
-  release_group: NOVO
-  type: episode
-
-? Cosmos.A.Space.Time.Odyssey.S01E02.HDTV.x264.PROPER-LOL
-: title: Cosmos A Space Time Odyssey
-  season: 1
-  episode: 2
-  format: HDTV
-  video_codec: h264
-  other: Proper
-  proper_count: 1
-  release_group: LOL
-  type: episode
-
-? Fear.The.Walking.Dead.S02E01.HDTV.x264.AAC.MP4-k3n
-: title: Fear The Walking Dead
-  season: 2
-  episode: 1
-  format: HDTV
-  video_codec: h264
-  audio_codec: AAC
-  container: MP4
-  release_group: k3n
-  type: episode
-
-? Elementary.S01E01.Pilot.DVDSCR.x264.PREAiR-NoGRP
-: title: Elementary
-  season: 1
-  episode: 1
-  episode_details: Pilot
-  episode_title: Pilot
-  format: DVD
-  video_codec: h264
-  other: [Screener, Preair]
-  release_group: NoGRP
-  type: episode
-
-? Once.Upon.a.Time.S05E19.HDTV.x264.REPACK-LOL[ettv]
-: title: Once Upon a Time
-  season: 5
-  episode: 19
-  format: HDTV
-  video_codec: h264
-  other: Proper
-  proper_count: 1
-  release_group: LOL[ettv]
-  type: episode
-
-? Show.Name.S01E03.WEB-DL.x264.HUN-nIk
-: title: Show Name
-  season: 1
-  episode: 3
-  format: WEB-DL
-  video_codec: h264
-  language: hu
-  release_group: nIk
-  type: episode
-
-? Game.of.Thrones.S6.Ep5.X265.Dolby.2.0.KTM3.mp4
-: audio_channels: '2.0'
-  audio_codec: DolbyDigital
-  container: mp4
-  episode: 5
-  release_group: KTM3
-  season: 6
-  title: Game of Thrones
-  type: episode
-  video_codec: h265
-
-? Fargo.-.Season.1.-.720p.BluRay.-.x264.-.ShAaNiG
-: format: BluRay
-  release_group: ShAaNiG
-  screen_size: 720p
-  season: 1
-  title: Fargo
-  type: episode
-  video_codec: h264
-
-? Show.Name.S02E02.Episode.Title.1080p.WEB-DL.x264.5.1Ch.-.Group
-: audio_channels: '5.1'
-  episode: 2
-  episode_title: Episode Title
-  format: WEB-DL
-  release_group: Group
-  screen_size: 1080p
-  season: 2
-  title: Show Name
-  type: episode
-  video_codec: h264
-
-? Breaking.Bad.S01E01.2008.BluRay.VC1.1080P.5.1.WMV-NOVO
-: audio_channels: '5.1'
-  container: WMV
-  episode: 1
-  format: BluRay
-  release_group: NOVO
-  screen_size: 1080p
-  season: 1
-  title: Breaking Bad
-  type: episode
-  year: 2008
-
-? Cosmos.A.Space.Time.Odyssey.S01E02.HDTV.x264.PROPER-LOL
-: episode: 2
-  format: HDTV
-  other: Proper
-  proper_count: 1
-  release_group: LOL
-  season: 1
-  title: Cosmos A Space Time Odyssey
-  type: episode
-  video_codec: h264
-
-? Elementary.S01E01.Pilot.DVDSCR.x264.PREAiR-NoGRP
-: episode: 1
-  episode_details: Pilot
-  episode_title: Pilot
-  format: DVD
-  other:
-  - Screener
-  - Preair
-  release_group: NoGRP
-  season: 1
-  title: Elementary
-  type: episode
-  video_codec: h264
-
-? Fear.The.Walking.Dead.S02E01.HDTV.x264.AAC.MP4-k3n.mp4
-: audio_codec: AAC
-  container:
-  - MP4
-  - mp4
-  episode: 1
-  format: HDTV
-  mimetype: video/mp4
-  release_group: k3n
-  season: 2
-  title: Fear The Walking Dead
-  type: episode
-  video_codec: h264
-
-? Game.of.Thrones.S03.1080p.BluRay.DTS-HD.MA.5.1.AVC.REMUX-FraMeSToR
-: audio_channels: '5.1'
-  audio_codec: DTS
-  audio_profile: HDMA
-  format: BluRay
-  other: Remux
-  release_group: FraMeSToR
-  screen_size: 1080p
-  season: 3
-  title: Game of Thrones
-  type: episode
-
-? Show.Name.S01E02.HDTV.x264.NL-subs-ABC
-: episode: 2
-  format: HDTV
-  release_group: ABC
-  season: 1
-  subtitle_language: nl
-  title: Show Name
-  type: episode
-  video_codec: h264
-
-? Friends.S01-S10.COMPLETE.720p.BluRay.x264-PtM
-: format: BluRay
-  other: Complete
-  release_group: PtM
-  screen_size: 720p
-  season: # Should it be [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] ?
-  - 1
-  - 2
-  - 3
-  - 4
-  - 5
-  - 6
-  - 7
-  - 8
-  - 9
-  - 10
-  title: Friends
-  type: episode
-  video_codec: h264
-
-? Duck.Dynasty.S02E07.Streik.German.DOKU.DL.WS.DVDRiP.x264-CDP
-: episode: 7
-  episode_title: Streik German DOKU
-  format: DVD
-  language: mul
-  other: WideScreen
-  release_group: CDP
-  season: 2
-  title: Duck Dynasty
-  type: episode
-  video_codec: h264
-
-? Family.Guy.S13E14.JOLO.German.AC3D.DL.720p.WebHD.x264-CDD
-: audio_codec: AC3
-  episode: 14
-  episode_title: JOLO German
-  format: WEB-DL
-  language: mul
-  release_group: CDD
-  screen_size: 720p
-  season: 13
-  title: Family Guy
-  type: episode
-  video_codec: h264
-
-? How.I.Met.Your.Mother.COMPLETE.SERIES.DVDRip.XviD-AR
-: options: -L en -C us
-  format: DVD
-  other: Complete
-  release_group: AR
-  title: How I Met Your Mother
-  type: movie
-  video_codec: XviD
-
-? Show Name The Complete Seasons 1 to 5 720p BluRay x265 HEVC-SUJAIDR[UTR]
-: format: BluRay
-  other: Complete
-  release_group: SUJAIDR[UTR]
-  screen_size: 720p
-  season:
-  - 1
-  - 2
-  - 3
-  - 4
-  - 5
-  title: Show Name
-  type: episode
-  video_codec: h265
-
-? Fear.the.Walking.Dead.-.Season.2.epi.02.XviD.Eng.Ac3-5.1.sub.ita.eng.iCV-MIRCrew
-: options: -t episode
-  audio_channels: '5.1'
-  audio_codec: AC3
-  episode: 2
-  episode_title: epi
-  language: en
-  release_group: iCV-MIRCrew
-  season: 2
-  subtitle_language: it
-  title: Fear the Walking Dead
-  type: episode
-  video_codec: XviD
-
-? Game.Of.Thrones.S06E04.720p.PROPER.HDTV.x264-HDD
-: episode: 4
-  format: HDTV
-  other: Proper
-  proper_count: 1
-  release_group: HDD
-  screen_size: 720p
-  season: 6
-  title: Game Of Thrones
-  type: episode
-  video_codec: h264
\ No newline at end of file
diff --git a/lib/guessit/test/movies.yml b/lib/guessit/test/movies.yml
deleted file mode 100644
index a132b1167c3948a9e6533dda4c2d4992a8bbf24f..0000000000000000000000000000000000000000
--- a/lib/guessit/test/movies.yml
+++ /dev/null
@@ -1,837 +0,0 @@
-? __default__
-: type: movie
-
-? Movies/Fear and Loathing in Las Vegas (1998)/Fear.and.Loathing.in.Las.Vegas.720p.HDDVD.DTS.x264-ESiR.mkv
-: title: Fear and Loathing in Las Vegas
-  year: 1998
-  screen_size: 720p
-  format: HD-DVD
-  audio_codec: DTS
-  video_codec: h264
-  container: mkv
-  release_group: ESiR
-
-? Movies/El Dia de la Bestia (1995)/El.dia.de.la.bestia.DVDrip.Spanish.DivX.by.Artik[SEDG].avi
-: title: El Dia de la Bestia
-  year: 1995
-  format: DVD
-  language: spanish
-  video_codec: DivX
-  release_group: Artik[SEDG]
-  container: avi
-
-? Movies/Dark City (1998)/Dark.City.(1998).DC.BDRip.720p.DTS.X264-CHD.mkv
-: title: Dark City
-  year: 1998
-  format: BluRay
-  screen_size: 720p
-  audio_codec: DTS
-  video_codec: h264
-  release_group: CHD
-
-? Movies/Sin City (BluRay) (2005)/Sin.City.2005.BDRip.720p.x264.AC3-SEPTiC.mkv
-: title: Sin City
-  year: 2005
-  format: BluRay
-  screen_size: 720p
-  video_codec: h264
-  audio_codec: AC3
-  release_group: SEPTiC
-
-? Movies/Borat (2006)/Borat.(2006).R5.PROPER.REPACK.DVDRip.XviD-PUKKA.avi
-: title: Borat
-  year: 2006
-  proper_count: 2
-  format: DVD
-  other: [ R5, Proper ]
-  video_codec: XviD
-  release_group: PUKKA
-
-? "[XCT].Le.Prestige.(The.Prestige).DVDRip.[x264.HP.He-Aac.{Fr-Eng}.St{Fr-Eng}.Chaps].mkv"
-: title: Le Prestige
-  format: DVD
-  video_codec: h264
-  video_profile: HP
-  audio_codec: AAC
-  audio_profile: HE
-  language: [ french, english ]
-  subtitle_language: [ french, english ]
-  release_group: Chaps
-
-? Battle Royale (2000)/Battle.Royale.(Batoru.Rowaiaru).(2000).(Special.Edition).CD1of2.DVDRiP.XviD-[ZeaL].avi
-: title: Battle Royale
-  year: 2000
-  edition: Special Edition
-  cd: 1
-  cd_count: 2
-  format: DVD
-  video_codec: XviD
-  release_group: ZeaL
-
-? Movies/Brazil (1985)/Brazil_Criterion_Edition_(1985).CD2.avi
-: title: Brazil
-  edition: Criterion Edition
-  year: 1985
-  cd: 2
-
-? Movies/Persepolis (2007)/[XCT] Persepolis [H264+Aac-128(Fr-Eng)+ST(Fr-Eng)+Ind].mkv
-: title: Persepolis
-  year: 2007
-  video_codec: h264
-  audio_codec: AAC
-  language: [ French, English ]
-  subtitle_language: [ French, English ]
-  release_group: Ind
-
-? Movies/Toy Story (1995)/Toy Story [HDTV 720p English-Spanish].mkv
-: title: Toy Story
-  year: 1995
-  format: HDTV
-  screen_size: 720p
-  language: [ english, spanish ]
-
-? Movies/Office Space (1999)/Office.Space.[Dual-DVDRip].[Spanish-English].[XviD-AC3-AC3].[by.Oswald].avi
-: title: Office Space
-  year: 1999
-  format: DVD
-  language: [ english, spanish ]
-  video_codec: XviD
-  audio_codec: AC3
-
-? Movies/Wild Zero (2000)/Wild.Zero.DVDivX-EPiC.avi
-: title: Wild Zero
-  year: 2000
-  video_codec: DivX
-  release_group: EPiC
-
-? movies/Baraka_Edition_Collector.avi
-: title: Baraka
-  edition: Collector Edition
-
-? Movies/Blade Runner (1982)/Blade.Runner.(1982).(Director's.Cut).CD1.DVDRip.XviD.AC3-WAF.avi
-: title: Blade Runner
-  year: 1982
-  edition: Director's cut
-  cd: 1
-  format: DVD
-  video_codec: XviD
-  audio_codec: AC3
-  release_group: WAF
-
-? movies/American.The.Bill.Hicks.Story.2009.DVDRip.XviD-EPiSODE.[UsaBit.com]/UsaBit.com_esd-americanbh.avi
-: title: American The Bill Hicks Story
-  year: 2009
-  format: DVD
-  video_codec: XviD
-  release_group: EPiSODE
-  website: UsaBit.com
-
-? movies/Charlie.And.Boots.DVDRip.XviD-TheWretched/wthd-cab.avi
-: title: Charlie And Boots
-  format: DVD
-  video_codec: XviD
-  release_group: TheWretched
-
-? movies/Steig Larsson Millenium Trilogy (2009) BRrip 720 AAC x264/(1)The Girl With The Dragon Tattoo (2009) BRrip 720 AAC x264.mkv
-: title: The Girl With The Dragon Tattoo
-  #film_title: Steig Larsson Millenium Trilogy
-  #film: 1
-  year: 2009
-  format: BluRay
-  audio_codec: AAC
-  video_codec: h264
-  screen_size: 720p
-
-? movies/Greenberg.REPACK.LiMiTED.DVDRip.XviD-ARROW/arw-repack-greenberg.dvdrip.xvid.avi
-: title: Greenberg
-  format: DVD
-  video_codec: XviD
-  release_group: ARROW
-  other: ['Proper', 'Limited']
-  proper_count: 1
-
-? Movies/Fr - Paris 2054, Renaissance (2005) - De Christian Volckman - (Film Divx Science Fiction Fantastique Thriller Policier N&B).avi
-: title: Paris 2054, Renaissance
-  year: 2005
-  language: french
-  video_codec: DivX
-
-? Movies/[阿维达].Avida.2006.FRENCH.DVDRiP.XViD-PROD.avi
-: title: Avida
-  year: 2006
-  language: french
-  format: DVD
-  video_codec: XviD
-  release_group: PROD
-
-? Movies/Alice in Wonderland DVDRip.XviD-DiAMOND/dmd-aw.avi
-: title: Alice in Wonderland
-  format: DVD
-  video_codec: XviD
-  release_group: DiAMOND
-
-? Movies/Ne.Le.Dis.A.Personne.Fr 2 cd/personnea_mp.avi
-: title: Ne Le Dis A Personne
-  language: french
-  cd_count: 2
-
-? Movies/Bunker Palace Hôtel (Enki Bilal) (1989)/Enki Bilal - Bunker Palace Hotel (Fr Vhs Rip).avi
-: title: Bunker Palace Hôtel
-  year: 1989
-  language: french
-  format: VHS
-
-? Movies/21 (2008)/21.(2008).DVDRip.x264.AC3-FtS.[sharethefiles.com].mkv
-: title: "21"
-  year: 2008
-  format: DVD
-  video_codec: h264
-  audio_codec: AC3
-  release_group: FtS
-  website: sharethefiles.com
-
-? Movies/9 (2009)/9.2009.Blu-ray.DTS.720p.x264.HDBRiSe.[sharethefiles.com].mkv
-: title: "9"
-  year: 2009
-  format: BluRay
-  audio_codec: DTS
-  screen_size: 720p
-  video_codec: h264
-  release_group: HDBRiSe
-  website: sharethefiles.com
-
-? Movies/Mamma.Mia.2008.DVDRip.AC3.XviD-CrazyTeam/Mamma.Mia.2008.DVDRip.AC3.XviD-CrazyTeam.avi
-: title: Mamma Mia
-  year: 2008
-  format: DVD
-  audio_codec: AC3
-  video_codec: XviD
-  release_group: CrazyTeam
-
-? Movies/M.A.S.H. (1970)/MASH.(1970).[Divx.5.02][Dual-Subtitulos][DVDRip].ogm
-: title: MASH
-  year: 1970
-  video_codec: DivX
-  format: DVD
-
-? Movies/The Doors (1991)/09.03.08.The.Doors.(1991).BDRip.720p.AC3.X264-HiS@SiLUHD-English.[sharethefiles.com].mkv
-: title: The Doors
-  year: 1991
-  date: 2008-03-09
-  format: BluRay
-  screen_size: 720p
-  audio_codec: AC3
-  video_codec: h264
-  release_group: HiS@SiLUHD
-  language: english
-  website: sharethefiles.com
-
-? Movies/The Doors (1991)/08.03.09.The.Doors.(1991).BDRip.720p.AC3.X264-HiS@SiLUHD-English.[sharethefiles.com].mkv
-: options: --date-year-first
-  title: The Doors
-  year: 1991
-  date: 2008-03-09
-  format: BluRay
-  screen_size: 720p
-  audio_codec: AC3
-  video_codec: h264
-  release_group: HiS@SiLUHD
-  language: english
-  website: sharethefiles.com
-
-? Movies/Ratatouille/video_ts-ratatouille.srt
-: title: Ratatouille
-  format: DVD
-
-# Removing this one because 001 is guessed as an episode number.
-# ? Movies/001 __ A classer/Fantomas se déchaine - Louis de Funès.avi
-# : title: Fantomas se déchaine
-
-? Movies/Comme une Image (2004)/Comme.Une.Image.FRENCH.DVDRiP.XViD-NTK.par-www.divx-overnet.com.avi
-: title: Comme une Image
-  year: 2004
-  language: french
-  format: DVD
-  video_codec: XviD
-  release_group: NTK
-  website: www.divx-overnet.com
-
-? Movies/Fantastic Mr Fox/Fantastic.Mr.Fox.2009.DVDRip.{x264+LC-AAC.5.1}{Fr-Eng}{Sub.Fr-Eng}-™.[sharethefiles.com].mkv
-: title: Fantastic Mr Fox
-  year: 2009
-  format: DVD
-  video_codec: h264
-  audio_codec: AAC
-  audio_profile: LC
-  audio_channels: "5.1"
-  language: [ french, english ]
-  subtitle_language: [ french, english ]
-  website: sharethefiles.com
-
-? Movies/Somewhere.2010.DVDRip.XviD-iLG/i-smwhr.avi
-: title: Somewhere
-  year: 2010
-  format: DVD
-  video_codec: XviD
-  release_group: iLG
-
-? Movies/Moon_(2009).mkv
-: title: Moon
-  year: 2009
-
-? Movies/Moon_(2009)-x02-Making_Of.mkv
-: title: Moon
-  year: 2009
-  bonus: 2
-  bonus_title: Making Of
-
-? movies/James_Bond-f17-Goldeneye.mkv
-: title: Goldeneye
-  film_title: James Bond
-  film: 17
-
-
-? /movies/James_Bond-f21-Casino_Royale.mkv
-: title: Casino Royale
-  film_title: James Bond
-  film: 21
-
-? /movies/James_Bond-f21-Casino_Royale-x01-Becoming_Bond.mkv
-: title: Casino Royale
-  film_title: James Bond
-  film: 21
-  bonus: 1
-  bonus_title: Becoming Bond
-
-? /movies/James_Bond-f21-Casino_Royale-x02-Stunts.mkv
-: title: Casino Royale
-  film_title: James Bond
-  film: 21
-  bonus: 2
-  bonus_title: Stunts
-
-? OSS_117--Cairo,_Nest_of_Spies.mkv
-: title: OSS 117
-# TODO: Implement subTitle for movies.
-
-? The Godfather Part 3.mkv
-? The Godfather Part III.mkv
-: title: The Godfather
-  part: 3
-
-? Foobar Part VI.mkv
-: title: Foobar
-  part: 6
-
-? The_Insider-(1999)-x02-60_Minutes_Interview-1996.mp4
-: title: The Insider
-  year: 1999
-  bonus: 2
-  bonus_title: 60 Minutes Interview-1996
-
-? Rush.._Beyond_The_Lighted_Stage-x09-Between_Sun_and_Moon-2002_Hartford.mkv
-: title: Rush Beyond The Lighted Stage
-  bonus: 9
-  bonus_title: Between Sun and Moon
-  year: 2002
-
-? /public/uTorrent/Downloads Finished/Movies/Indiana.Jones.and.the.Temple.of.Doom.1984.HDTV.720p.x264.AC3.5.1-REDµX/Indiana.Jones.and.the.Temple.of.Doom.1984.HDTV.720p.x264.AC3.5.1-REDµX.mkv
-: title: Indiana Jones and the Temple of Doom
-  year: 1984
-  format: HDTV
-  screen_size: 720p
-  video_codec: h264
-  audio_codec: AC3
-  audio_channels: "5.1"
-  release_group: REDµX
-
-? The.Director’s.Notebook.2006.Blu-Ray.x264.DXVA.720p.AC3-de[42].mkv
-: title: The Director’s Notebook
-  year: 2006
-  format: BluRay
-  video_codec: h264
-  video_api: DXVA
-  screen_size: 720p
-  audio_codec: AC3
-  release_group: de[42]
-
-
-? Movies/Cosmopolis.2012.LiMiTED.720p.BluRay.x264-AN0NYM0US[bb]/ano-cosmo.720p.mkv
-: title: Cosmopolis
-  year: 2012
-  screen_size: 720p
-  video_codec: h264
-  release_group: AN0NYM0US[bb]
-  format: BluRay
-  other: Limited
-
-? movies/La Science des Rêves (2006)/La.Science.Des.Reves.FRENCH.DVDRip.XviD-MP-AceBot.avi
-: title: La Science des Rêves
-  year: 2006
-  format: DVD
-  video_codec: XviD
-  video_profile: MP
-  release_group: AceBot
-  language: French
-
-? The_Italian_Job.mkv
-: title: The Italian Job
-
-? The.Rum.Diary.2011.1080p.BluRay.DTS.x264.D-Z0N3.mkv
-: title: The Rum Diary
-  year: 2011
-  screen_size: 1080p
-  format: BluRay
-  video_codec: h264
-  audio_codec: DTS
-  release_group: D-Z0N3
-
-? Life.Of.Pi.2012.1080p.BluRay.DTS.x264.D-Z0N3.mkv
-: title: Life Of Pi
-  year: 2012
-  screen_size: 1080p
-  format: BluRay
-  video_codec: h264
-  audio_codec: DTS
-  release_group: D-Z0N3
-
-? The.Kings.Speech.2010.1080p.BluRay.DTS.x264.D Z0N3.mkv
-: title: The Kings Speech
-  year: 2010
-  screen_size: 1080p
-  format: BluRay
-  audio_codec: DTS
-  video_codec: h264
-  release_group: D Z0N3
-
-? Street.Kings.2008.BluRay.1080p.DTS.x264.dxva EuReKA.mkv
-: title: Street Kings
-  year: 2008
-  format: BluRay
-  screen_size: 1080p
-  audio_codec: DTS
-  video_codec: h264
-  video_api: DXVA
-  release_group: EuReKA
-
-? 2001.A.Space.Odyssey.1968.HDDVD.1080p.DTS.x264.dxva EuReKA.mkv
-: title: 2001 A Space Odyssey
-  year: 1968
-  format: HD-DVD
-  screen_size: 1080p
-  audio_codec: DTS
-  video_codec: h264
-  video_api: DXVA
-  release_group: EuReKA
-
-? 2012.2009.720p.BluRay.x264.DTS WiKi.mkv
-: title: "2012"
-  year: 2009
-  screen_size: 720p
-  format: BluRay
-  video_codec: h264
-  audio_codec: DTS
-  release_group: WiKi
-
-? /share/Download/movie/Dead Man Down (2013) BRRiP XViD DD5_1 Custom NLSubs =-_lt Q_o_Q gt-=_/XD607ebb-BRc59935-5155473f-1c5f49/XD607ebb-BRc59935-5155473f-1c5f49.avi
-: title: Dead Man Down
-  year: 2013
-  format: BluRay
-  video_codec: XviD
-  audio_channels: "5.1"
-  audio_codec: DolbyDigital
-  uuid: XD607ebb-BRc59935-5155473f-1c5f49
-
-? Pacific.Rim.3D.2013.COMPLETE.BLURAY-PCH.avi
-: title: Pacific Rim
-  year: 2013
-  format: BluRay
-  other:
-   - Complete
-   - 3D
-  release_group: PCH
-
-? Immersion.French.2011.STV.READNFO.QC.FRENCH.ENGLISH.NTSC.DVDR.nfo
-: title: Immersion French
-  year: 2011
-  language:
-    - French
-    - English
-  format: DVD
-  other: NTSC
-
-? Immersion.French.2011.STV.READNFO.QC.FRENCH.NTSC.DVDR.nfo
-: title: Immersion French
-  year: 2011
-  language: French
-  format: DVD
-  other: NTSC
-
-? Immersion.French.2011.STV.READNFO.QC.NTSC.DVDR.nfo
-: title: Immersion
-  language: French
-  year: 2011
-  format: DVD
-  other: NTSC
-
-? French.Immersion.2011.STV.READNFO.QC.ENGLISH.NTSC.DVDR.nfo
-: title: French Immersion
-  year: 2011
-  language: ENGLISH
-  format: DVD
-  other: NTSC
-
-? Howl's_Moving_Castle_(2004)_[720p,HDTV,x264,DTS]-FlexGet.avi
-: video_codec: h264
-  format: HDTV
-  title: Howl's Moving Castle
-  screen_size: 720p
-  year: 2004
-  audio_codec: DTS
-  release_group: FlexGet
-
-? Pirates de langkasuka.2008.FRENCH.1920X1080.h264.AVC.AsiaRa.mkv
-: screen_size: 1080p
-  year: 2008
-  language: French
-  video_codec: h264
-  title: Pirates de langkasuka
-  release_group: AsiaRa
-
-? Masala (2013) Telugu Movie HD DVDScr XviD - Exclusive.avi
-: year: 2013
-  video_codec: XviD
-  title: Masala
-  format: HD-DVD
-  other: Screener
-  language: Telugu
-  release_group: Exclusive
-
-? Django Unchained 2012 DVDSCR X264 AAC-P2P.nfo
-: year: 2012
-  other: Screener
-  video_codec: h264
-  title: Django Unchained
-  audio_codec: AAC
-  format: DVD
-  release_group: P2P
-
-? Ejecutiva.En.Apuros(2009).BLURAY.SCR.Xvid.Spanish.LanzamientosD.nfo
-: year: 2009
-  other: Screener
-  format: BluRay
-  video_codec: XviD
-  language: Spanish
-  title: Ejecutiva En Apuros
-
-? Die.Schluempfe.2.German.DL.1080p.BluRay.x264-EXQUiSiTE.mkv
-: title: Die Schluempfe 2
-  format: BluRay
-  language:
-    - Multiple languages
-    - German
-  video_codec: h264
-  release_group: EXQUiSiTE
-  screen_size: 1080p
-
-? Rocky 1976 French SubForced BRRip x264 AC3-FUNKY.mkv
-: title: Rocky
-  year: 1976
-  subtitle_language: French
-  format: BluRay
-  video_codec: h264
-  audio_codec: AC3
-  release_group: FUNKY
-
-? REDLINE (BD 1080p H264 10bit FLAC) [3xR].mkv
-: title: REDLINE
-  format: BluRay
-  video_codec: h264
-  video_profile: 10bit
-  audio_codec: FLAC
-  screen_size: 1080p
-
-? The.Lizzie.McGuire.Movie.(2003).HR.DVDRiP.avi
-: title: The Lizzie McGuire Movie
-  year: 2003
-  format: DVD
-  other: HR
-
-? Hua.Mulan.BRRIP.MP4.x264.720p-HR.avi
-: title: Hua Mulan
-  video_codec: h264
-  format: BluRay
-  screen_size: 720p
-  other: HR
-
-? Dr.Seuss.The.Lorax.2012.DVDRip.LiNE.XviD.AC3.HQ.Hive-CM8.mp4
-: video_codec: XviD
-  title: Dr Seuss The Lorax
-  format: DVD
-  other: LiNE
-  year: 2012
-  audio_codec: AC3
-  audio_profile: HQ
-  release_group: Hive-CM8
-
-? "Star Wars: Episode IV - A New Hope (2004) Special Edition.MKV"
-: title: "Star Wars: Episode IV"
-  alternative_title: A New Hope
-  year: 2004
-  edition: Special Edition
-
-? Dr.LiNE.The.Lorax.2012.DVDRip.LiNE.XviD.AC3.HQ.Hive-CM8.mp4
-: video_codec: XviD
-  title: Dr LiNE The Lorax
-  format: DVD
-  other: LiNE
-  year: 2012
-  audio_codec: AC3
-  audio_profile: HQ
-  release_group: Hive-CM8
-
-? Dr.LiNE.The.Lorax.2012.DVDRip.XviD.AC3.HQ.Hive-CM8.mp4
-: video_codec: XviD
-  title: Dr LiNE The Lorax
-  format: DVD
-  year: 2012
-  audio_codec: AC3
-  audio_profile: HQ
-  release_group: Hive-CM8
-
-? Perfect Child-2007-TRUEFRENCH-TVRip.Xvid-h@mster.avi
-: release_group: h@mster
-  title: Perfect Child
-  video_codec: XviD
-  language: French
-  format: TV
-  year: 2007
-
-? entre.ciel.et.terre.(1994).dvdrip.h264.aac-psypeon.avi
-: audio_codec: AAC
-  format: DVD
-  release_group: psypeon
-  title: entre ciel et terre
-  video_codec: h264
-  year: 1994
-
-? Yves.Saint.Laurent.2013.FRENCH.DVDSCR.MD.XviD-ViVARiUM.avi
-: format: DVD
-  language: French
-  other:
-      - MD
-      - Screener
-  release_group: ViVARiUM
-  title: Yves Saint Laurent
-  video_codec: XviD
-  year: 2013
-
-? Echec et Mort - Hard to Kill - Steven Seagal Multi 1080p BluRay x264 CCATS.avi
-: format: BluRay
-  language: Multiple languages
-  release_group: CCATS
-  screen_size: 1080p
-  title: Echec et Mort
-  alternative_title:
-    - Hard to Kill
-    - Steven Seagal
-  video_codec: h264
-
-? Paparazzi - Timsit/Lindon (MKV 1080p tvripHD)
-: options: -n
-  title: Paparazzi
-  alternative_title:
-    - Timsit
-    - Lindon
-  screen_size: 1080p
-  container: MKV
-  format: HDTV
-
-? some.movie.720p.bluray.x264-mind
-: title: some movie
-  screen_size: 720p
-  video_codec: h264
-  release_group: mind
-  format: BluRay
-
-? Dr LiNE The Lorax 720p h264 BluRay
-: title: Dr LiNE The Lorax
-  screen_size: 720p
-  video_codec: h264
-  format: BluRay
-
-#TODO: Camelcase implementation
-#? BeatdownFrenchDVDRip.mkv
-#: options: -c
-#  title: Beatdown
-#  language: French
-#  format: DVD
-
-#? YvesSaintLaurent2013FrenchDVDScrXvid.avi
-#: options: -c
-#  format: DVD
-#  language: French
-#  other: Screener
-#  title: Yves saint laurent
-#  video_codec: XviD
-#  year: 2013
-
-
-? Elle.s.en.va.720p.mkv
-: screen_size: 720p
-  title: Elle s en va
-
-? FooBar.7.PDTV-FlexGet
-: format: DVB
-  release_group: FlexGet
-  title: FooBar 7
-
-? h265 - HEVC Riddick Unrated Director Cut French 1080p DTS.mkv
-: audio_codec: DTS
-  edition: Director's cut
-  language: fr
-  screen_size: 1080p
-  title: Riddick
-  other: Unrated
-  video_codec: h265
-
-? "[h265 - HEVC] Riddick Unrated Director Cut French [1080p DTS].mkv"
-: audio_codec: DTS
-  edition: Director's cut
-  language: fr
-  screen_size: 1080p
-  title: Riddick
-  other: Unrated
-  video_codec: h265
-
-? Barbecue-2014-French-mHD-1080p
-: language: fr
-  other: mHD
-  screen_size: 1080p
-  title: Barbecue
-  year: 2014
-
-? Underworld Quadrilogie VO+VFF+VFQ 1080p HDlight.x264~Tonyk~Monde Infernal
-: language: fr
-  other:
-    - HDLight
-    - OV
-  screen_size: 1080p
-  title: Underworld Quadrilogie
-  video_codec: h264
-
-? A Bout Portant (The Killers).PAL.Multi.DVD-R-KZ
-: format: DVD
-  language: mul
-  release_group: KZ
-  title: A Bout Portant
-
-? "Mise à Sac (Alain Cavalier, 1967) [Vhs.Rip.Vff]"
-: format: VHS
-  language: fr
-  title: "Mise à Sac"
-  year: 1967
-
-? A Bout Portant (The Killers).PAL.Multi.DVD-R-KZ
-: format: DVD
-  other: PAL
-  language: mul
-  release_group: KZ
-  title: A Bout Portant
-
-? Youth.In.Revolt.(Be.Bad).2009.MULTI.1080p.LAME3*92-MEDIOZZ
-: audio_codec: MP3
-  language: mul
-  release_group: MEDIOZZ
-  screen_size: 1080p
-  title: Youth In Revolt
-  year: 2009
-
-? La Defense Lincoln (The Lincoln Lawyer) 2011 [DVDRIP][Vostfr]
-: format: DVD
-  subtitle_language: fr
-  title: La Defense Lincoln
-  year: 2011
-
-? '[h265 - HEVC] Fight Club French 1080p DTS.'
-: audio_codec: DTS
-  language: fr
-  screen_size: 1080p
-  title: Fight Club
-  video_codec: h265
-
-? Love Gourou (Mike Myers) - FR
-: language: fr
-  title: Love Gourou
-
-? '[h265 - hevc] transformers 2 1080p french ac3 6ch.'
-: audio_channels: '5.1'
-  audio_codec: AC3
-  language: fr
-  screen_size: 1080p
-  title: transformers 2
-  video_codec: h265
-
-? 1.Angry.Man.1957.mkv
-: title: 1 Angry Man
-  year: 1957
-
-? 12.Angry.Men.1957.mkv
-: title: 12 Angry Men
-  year: 1957
-
-? 123.Angry.Men.1957.mkv
-: title: 123 Angry Men
-  year: 1957
-
-? "Looney Tunes 1444x866 Porky's Last Stand.mkv"
-: screen_size: 1444x866
-  title: Looney Tunes
-
-? Das.Appartement.German.AC3D.DL.720p.BluRay.x264-TVP
-: audio_codec: AC3
-  format: BluRay
-  language: mul
-  release_group: TVP
-  screen_size: 720p
-  title: Das Appartement German
-  type: movie
-  video_codec: h264
-
-? Das.Appartement.GERMAN.AC3D.DL.720p.BluRay.x264-TVP
-: audio_codec: AC3
-  format: BluRay
-  language:
-  - de
-  - mul
-  release_group: TVP
-  screen_size: 720p
-  title: Das Appartement
-  video_codec: h264
-
-? Hyena.Road.2015.German.1080p.DL.DTSHD.Bluray.x264-pmHD
-: audio_codec: DTS
-  audio_profile: HD
-  format: BluRay
-  language:
-  - de
-  - mul
-  release_group: pmHD
-  screen_size: 1080p
-  title: Hyena Road
-  type: movie
-  video_codec: h264
-  year: 2015
-
-? Hyena.Road.2015.German.Ep.Title.1080p.DL.DTSHD.Bluray.x264-pmHD
-: audio_codec: DTS
-  audio_profile: HD
-  episode_title: German Ep Title
-  format: BluRay
-  language: mul
-  release_group: pmHD
-  screen_size: 1080p
-  title: Hyena Road
-  type: movie
-  video_codec: h264
-  year: 2015
diff --git a/lib/guessit/test/rules/__init__.py b/lib/guessit/test/rules/__init__.py
deleted file mode 100644
index e5be370e4be5007b33fd87ec270e91eea041b66a..0000000000000000000000000000000000000000
--- a/lib/guessit/test/rules/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# pylint: disable=no-self-use, pointless-statement, missing-docstring, invalid-name
diff --git a/lib/guessit/test/rules/audio_codec.yml b/lib/guessit/test/rules/audio_codec.yml
deleted file mode 100644
index b744d7bf85259c29e95eb56a6848601ea7b3715d..0000000000000000000000000000000000000000
--- a/lib/guessit/test/rules/audio_codec.yml
+++ /dev/null
@@ -1,83 +0,0 @@
-# Multiple input strings having same expected results can be chained.
-# Use $ marker to check inputs that should not match results.
-
-
-? +MP3
-? +lame
-? +lame3.12
-? +lame3.100
-: audio_codec: MP3
-
-? +DolbyDigital
-? +DD
-? +Dolby Digital
-: audio_codec: DolbyDigital
-
-? +DolbyAtmos
-? +Dolby Atmos
-? +Atmos
-? -Atmosphere
-: audio_codec: DolbyAtmos
-
-? +AAC
-: audio_codec: AAC
-
-? +AC3
-: audio_codec: AC3
-
-? +Flac
-: audio_codec: FLAC
-
-? +DTS
-: audio_codec: DTS
-
-? +True-HD
-? +trueHD
-: audio_codec: TrueHD
-
-? +DTS-HD
-: audio_codec: DTS
-  audio_profile: HD
-
-? +DTS-HDma
-: audio_codec: DTS
-  audio_profile: HDMA
-
-? +AC3-hq
-: audio_codec: AC3
-  audio_profile: HQ
-
-? +AAC-HE
-: audio_codec: AAC
-  audio_profile: HE
-
-? +AAC-LC
-: audio_codec: AAC
-  audio_profile: LC
-
-? +AAC2.0
-: audio_codec: AAC
-  audio_channels: '2.0'
-
-? +7.1
-? +7ch
-? +8ch
-: audio_channels: '7.1'
-
-? +5.1
-? +5ch
-? +6ch
-: audio_channels: '5.1'
-
-? +2ch
-? +2.0
-? +stereo
-: audio_channels: '2.0'
-
-? +1ch
-? +mono
-: audio_channels: '1.0'
-
-? DD5.1
-: audio_codec: DolbyDigital
-  audio_channels: '5.1'
diff --git a/lib/guessit/test/rules/bonus.yml b/lib/guessit/test/rules/bonus.yml
deleted file mode 100644
index 6ef6f5b254a02753caf30efa0bf769f107619b8b..0000000000000000000000000000000000000000
--- a/lib/guessit/test/rules/bonus.yml
+++ /dev/null
@@ -1,9 +0,0 @@
-# Multiple input strings having same expected results can be chained.
-# Use - marker to check inputs that should not match results.
-? Movie Title-x01-Other Title.mkv
-? Movie Title-x01-Other Title
-? directory/Movie Title-x01-Other Title/file.mkv
-: title: Movie Title
-  bonus_title: Other Title
-  bonus: 1
-
diff --git a/lib/guessit/test/rules/cds.yml b/lib/guessit/test/rules/cds.yml
deleted file mode 100644
index cc63765e5184fc98e4ded51a0a739760f7eed7f1..0000000000000000000000000000000000000000
--- a/lib/guessit/test/rules/cds.yml
+++ /dev/null
@@ -1,10 +0,0 @@
-# Multiple input strings having same expected results can be chained.
-# Use - marker to check inputs that should not match results.
-? cd 1of3
-: cd: 1
-  cd_count: 3
-
-? Some.Title-DVDRIP-x264-CDP
-: cd: !!null
-  release_group: CDP
-  video_codec: h264
diff --git a/lib/guessit/test/rules/country.yml b/lib/guessit/test/rules/country.yml
deleted file mode 100644
index f2da1b2057c7e1ceea7b62e87bfaf9b30edfdb05..0000000000000000000000000000000000000000
--- a/lib/guessit/test/rules/country.yml
+++ /dev/null
@@ -1,10 +0,0 @@
-# Multiple input strings having same expected results can be chained.
-# Use $ marker to check inputs that should not match results.
-? Us.this.is.title
-? this.is.title.US
-: country: US
-  title: this is title
-
-? This.is.us.title
-: title: This is us title
-
diff --git a/lib/guessit/test/rules/date.yml b/lib/guessit/test/rules/date.yml
deleted file mode 100644
index d7379f03c123498a087f76bd72581fc2ca5305f5..0000000000000000000000000000000000000000
--- a/lib/guessit/test/rules/date.yml
+++ /dev/null
@@ -1,50 +0,0 @@
-# Multiple input strings having same expected results can be chained.
-# Use - marker to check inputs that should not match results.
-? +09.03.08
-? +09.03.2008
-? +2008.03.09
-: date: 2008-03-09
-
-? +31.01.15
-? +31.01.2015
-? +15.01.31
-? +2015.01.31
-: date: 2015-01-31
-
-? +01.02.03
-: date: 2003-02-01
-
-? +01.02.03
-: options: --date-year-first
-  date: 2001-02-03
-
-? +01.02.03
-: options: --date-day-first
-  date: 2003-02-01
-
-? 1919
-? 2030
-: !!map {}
-
-? 2029
-: year: 2029
-
-? (1920)
-: year: 1920
-
-? 2012
-: year: 2012
-
-? 2011 2013 (2012) (2015)  # first marked year is guessed.
-: title: "2011 2013"
-  year: 2012
-
-? 2012 2009 S01E02 2015  # If no year is marked, the second one is guessed.
-: title: "2012"
-  year: 2009
-  episode_title: "2015"
-
-? Something 2 mar 2013)
-: title: Something
-  date: 2013-03-02
-  type: episode
diff --git a/lib/guessit/test/rules/edition.yml b/lib/guessit/test/rules/edition.yml
deleted file mode 100644
index bc35b85e6b1afd88b18ec630f8e2be9f0cd62860..0000000000000000000000000000000000000000
--- a/lib/guessit/test/rules/edition.yml
+++ /dev/null
@@ -1,25 +0,0 @@
-# Multiple input strings having same expected results can be chained.
-# Use - marker to check inputs that should not match results.
-? Director's cut
-? Edition Director's cut
-: edition: Director's cut
-
-? Collector
-? Collector Edition
-? Edition Collector
-: edition: Collector Edition
-
-? Special Edition
-? Edition Special
-? -Special
-: edition: Special Edition
-
-? Criterion Edition
-? Edition Criterion
-? -Criterion
-: edition: Criterion Edition
-
-? Deluxe
-? Deluxe Edition
-? Edition Deluxe
-: edition: Deluxe Edition
diff --git a/lib/guessit/test/rules/episodes.yml b/lib/guessit/test/rules/episodes.yml
deleted file mode 100644
index a75e67029f4af250acc85ba8e34991579405472e..0000000000000000000000000000000000000000
--- a/lib/guessit/test/rules/episodes.yml
+++ /dev/null
@@ -1,247 +0,0 @@
-# Multiple input strings having same expected results can be chained.
-# Use $ marker to check inputs that should not match results.
-? +2x5
-? +2X5
-? +02x05
-? +2X05
-? +02x5
-? S02E05
-? s02e05
-? s02e5
-? s2e05
-? s02ep05
-? s2EP5
-? -s03e05
-? -s02e06
-? -3x05
-? -2x06
-: season: 2
-  episode: 5
-
-? "+0102"
-? "+102"
-: season: 1
-  episode: 2
-
-? "0102 S03E04"
-? "S03E04 102"
-: season: 3
-  episode: 4
-
-? +serie Saison 2 other
-? +serie Season 2 other
-? +serie Saisons 2 other
-? +serie Seasons 2 other
-? +serie Serie 2 other
-? +serie Series 2 other
-? +serie Season Two other
-? +serie Season II other
-: season: 2
-
-? Some Series.S02E01.Episode.title.mkv
-? Some Series/Season 02/E01-Episode title.mkv
-? Some Series/Season 02/Some Series-E01-Episode title.mkv
-? Some Dummy Directory/Season 02/Some Series-E01-Episode title.mkv
-? -Some Dummy Directory/Season 02/E01-Episode title.mkv
-? Some Series/Unsafe Season 02/Some Series-E01-Episode title.mkv
-? -Some Series/Unsafe Season 02/E01-Episode title.mkv
-? Some Series/Season 02/E01-Episode title.mkv
-? Some Series/ Season 02/E01-Episode title.mkv
-? Some Dummy Directory/Some Series S02/E01-Episode title.mkv
-? Some Dummy Directory/S02 Some Series/E01-Episode title.mkv
-: title: Some Series
-  episode_title: Episode title
-  season: 2
-  episode: 1
-
-? Some Series.S02E01.mkv
-? Some Series/Season 02/E01.mkv
-? Some Series/Season 02/Some Series-E01.mkv
-? Some Dummy Directory/Season 02/Some Series-E01.mkv
-? -Some Dummy Directory/Season 02/E01.mkv
-? Some Series/Unsafe Season 02/Some Series-E01.mkv
-? -Some Series/Unsafe Season 02/E01.mkv
-? Some Series/Season 02/E01.mkv
-? Some Series/ Season 02/E01.mkv
-? Some Dummy Directory/Some Series S02/E01-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.mkv
-: title: Some Series
-  season: 2
-  episode: 1
-
-? Some Series S03E01E02
-: title: Some Series
-  season: 3
-  episode: [1, 2]
-
-? Some Series S01S02S03
-? Some Series S01-02-03
-? Some Series S01 S02 S03
-? Some Series S01 02 03
-: title: Some Series
-  season: [1, 2, 3]
-
-? Some Series E01E02E03
-? Some Series E01-02-03
-? Some Series E01-03
-? Some Series E01 E02 E03
-? Some Series E01 02 03
-: title: Some Series
-  episode: [1, 2, 3]
-
-? Some Series E01E02E04
-? Some Series E01 E02 E04
-? Some Series E01 02 04
-: title: Some Series
-  episode: [1, 2, 4]
-
-? Some Series E01-02-04
-? Some Series E01-04
-? Some Series E01-04
-: title: Some Series
-  episode: [1, 2, 3, 4]
-
-? Some Series E01-02-E04
-: title: Some Series
-  episode: [1, 2, 3, 4]
-
-? Episode 3
-? -Episode III
-: episode: 3
-
-? Episode 3
-? Episode III
-: options: -t episode
-  episode: 3
-
-? -A very special movie
-: episode_details: Special
-
-? A very special episode
-: options: -t episode
-  episode_details: Special
-
-? 12 Monkeys\Season 01\Episode 05\12 Monkeys - S01E05 - The Night Room.mkv
-: container: mkv
-  title: 12 Monkeys
-  episode: 5
-  season: 1
-
-? S03E02.X.1080p
-: episode: 2
-  screen_size: 1080p
-  season: 3
-
-? Something 1 x 2-FlexGet
-: options: -t episode
-  title: Something
-  season: 1
-  episode: 2
-  episode_title: FlexGet
-
-? Show.Name.-.Season.1.to.3.-.Mp4.1080p
-? Show.Name.-.Season.1~3.-.Mp4.1080p
-? Show.Name.-.Saison.1.a.3.-.Mp4.1080p
-: container: MP4
-  screen_size: 1080p
-  season:
-  - 1
-  - 2
-  - 3
-  title: Show Name
-
-? Show.Name.Season.1.3&5.HDTV.XviD-GoodGroup[SomeTrash]
-? Show.Name.Season.1.3 and 5.HDTV.XviD-GoodGroup[SomeTrash]
-: format: HDTV
-  release_group: GoodGroup[SomeTrash]
-  season:
-  - 1
-  - 3
-  - 5
-  title: Show Name
-  type: episode
-  video_codec: XviD
-
-? Show.Name.Season.1.2.3-5.HDTV.XviD-GoodGroup[SomeTrash]
-? Show.Name.Season.1.2.3~5.HDTV.XviD-GoodGroup[SomeTrash]
-? Show.Name.Season.1.2.3 to 5.HDTV.XviD-GoodGroup[SomeTrash]
-: format: HDTV
-  release_group: GoodGroup[SomeTrash]
-  season:
-  - 1
-  - 2
-  - 3
-  - 4
-  - 5
-  title: Show Name
-  type: episode
-  video_codec: XviD
-
-? The.Get.Down.S01EP01.FRENCH.720p.WEBRIP.XVID-STR
-: episode: 1
-  format: WEBRip
-  language: fr
-  release_group: STR
-  screen_size: 720p
-  season: 1
-  title: The Get Down
-  type: episode
-  video_codec: XviD
-
-? My.Name.Is.Earl.S01E01-S01E21.SWE-SUB
-: episode:
-  - 1
-  - 2
-  - 3
-  - 4
-  - 5
-  - 6
-  - 7
-  - 8
-  - 9
-  - 10
-  - 11
-  - 12
-  - 13
-  - 14
-  - 15
-  - 16
-  - 17
-  - 18
-  - 19
-  - 20
-  - 21
-  season: 1
-  subtitle_language: sv
-  title: My Name Is Earl
-  type: episode
-
-? Show.Name.Season.4.Episodes.1-12
-: episode:
-  - 1
-  - 2
-  - 3
-  - 4
-  - 5
-  - 6
-  - 7
-  - 8
-  - 9
-  - 10
-  - 11
-  - 12
-  season: 4
-  title: Show Name
-  type: episode
-
-? show name s01.to.s04
-: season:
-  - 1
-  - 2
-  - 3
-  - 4
-  title: show name
-  type: episode
-
-? epi
-: options: -t episode
-  title: epi
\ No newline at end of file
diff --git a/lib/guessit/test/rules/film.yml b/lib/guessit/test/rules/film.yml
deleted file mode 100644
index 1f77433185b8b169c3749f9e88895233a661f293..0000000000000000000000000000000000000000
--- a/lib/guessit/test/rules/film.yml
+++ /dev/null
@@ -1,9 +0,0 @@
-# Multiple input strings having same expected results can be chained.
-# Use - marker to check inputs that should not match results.
-? Film Title-f01-Series Title.mkv
-? Film Title-f01-Series Title
-? directory/Film Title-f01-Series Title/file.mkv
-: title: Series Title
-  film_title: Film Title
-  film: 1
-
diff --git a/lib/guessit/test/rules/format.yml b/lib/guessit/test/rules/format.yml
deleted file mode 100644
index cf3dea921354a7c006185580335d6d3b1a05ab2c..0000000000000000000000000000000000000000
--- a/lib/guessit/test/rules/format.yml
+++ /dev/null
@@ -1,112 +0,0 @@
-# Multiple input strings having same expected results can be chained.
-# Use - marker to check inputs that should not match results.
-? +VHS
-? +VHSRip
-? +VHS-Rip
-? +VhS_rip
-? +VHS.RIP
-? -VHSAnythingElse
-? -SomeVHS stuff
-? -VH
-? -VHx
-? -VHxRip
-: format: VHS
-
-? +Cam
-? +CamRip
-? +CaM Rip
-? +Cam_Rip
-? +cam.rip
-: format: Cam
-
-? +Telesync
-? +TS
-? +HD TS
-? -Hd.Ts # ts file extension
-? -HD.TS # ts file extension
-? +Hd-Ts
-: format: Telesync
-
-? +Workprint
-? +workPrint
-? +WorkPrint
-? +WP
-? -Work Print
-: format: Workprint
-
-? +Telecine
-? +teleCine
-? +TC
-? -Tele Cine
-: format: Telecine
-
-? +PPV
-? +ppv-rip
-: format: PPV
-
-? -TV
-? +SDTV
-? +SDTVRIP
-? +Rip sd tv
-? +TvRip
-? +Rip TV
-: format: TV
-
-? +DVB
-? +DVB-Rip
-? +DvBRiP
-? +pdTV
-? +Pd Tv
-: format: DVB
-
-? +DVD
-? +DVD-RIP
-? +video ts
-? +DVDR
-? +DVD 9
-? +dvd 5
-? -dvd ts
-: format: DVD
-  -format: ts
-
-? +HDTV
-? +tv rip hd
-? +HDtv Rip
-? +HdRip
-: format: HDTV
-
-? +VOD
-? +VodRip
-? +vod rip
-: format: VOD
-
-? +webrip
-? +Web Rip
-: format: WEBRip
-
-? +webdl
-? +Web DL
-? +webHD
-? +WEB hd
-? +web
-: format: WEB-DL
-
-? +HDDVD
-? +hd dvd
-? +hdDvdRip
-: format: HD-DVD
-
-? +BluRay
-? +BluRay rip
-? +BD
-? +BR
-? +BDRip
-? +BR rip
-? +BD5
-? +BD9
-? +BD25
-? +bd50
-: format: BluRay
-
-? XVID.NTSC.DVDR.nfo
-: format: DVD
diff --git a/lib/guessit/test/rules/language.yml b/lib/guessit/test/rules/language.yml
deleted file mode 100644
index 51bbd8da87ec635486257b709883c53eebf0eed8..0000000000000000000000000000000000000000
--- a/lib/guessit/test/rules/language.yml
+++ /dev/null
@@ -1,39 +0,0 @@
-# Multiple input strings having same expected results can be chained.
-# Use - marker to check inputs that should not match results.
-? +English
-? .ENG.
-: language: English
-
-? +French
-: language: French
-
-? +SubFrench
-? +SubFr
-? +STFr
-? ST.FR
-: subtitle_language: French
-
-? +ENG.-.sub.FR
-? ENG.-.FR Sub
-? +ENG.-.SubFR
-? +ENG.-.FRSUB
-? +ENG.-.FRSUBS
-? +ENG.-.FR-SUBS
-: language: English
-  subtitle_language: French
-
-? "{Fr-Eng}.St{Fr-Eng}"
-? "Le.Prestige[x264.{Fr-Eng}.St{Fr-Eng}.Chaps].mkv"
-: language: [French, English]
-  subtitle_language: [French, English]
-
-? +ENG.-.sub.SWE
-? ENG.-.SWE Sub
-? +ENG.-.SubSWE
-? +ENG.-.SWESUB
-? +ENG.-.sub.SV
-? ENG.-.SV Sub
-? +ENG.-.SubSV
-? +ENG.-.SVSUB
-: language: English
-  subtitle_language: Swedish
\ No newline at end of file
diff --git a/lib/guessit/test/rules/other.yml b/lib/guessit/test/rules/other.yml
deleted file mode 100644
index cce8cbd05f1bbd6464394f336f50b4028b1b053b..0000000000000000000000000000000000000000
--- a/lib/guessit/test/rules/other.yml
+++ /dev/null
@@ -1,137 +0,0 @@
-# Multiple input strings having same expected results can be chained.
-# Use - marker to check inputs that should not match results.
-? +DVDSCR
-? +DVDScreener
-? +DVD-SCR
-? +DVD Screener
-? +DVD AnythingElse Screener
-? -DVD AnythingElse SCR
-: other: Screener
-
-? +AudioFix
-? +AudioFixed
-? +Audio Fix
-? +Audio Fixed
-: other: AudioFix
-
-? +SyncFix
-? +SyncFixed
-? +Sync Fix
-? +Sync Fixed
-: other: SyncFix
-
-? +DualAudio
-? +Dual Audio
-: other: DualAudio
-
-? +ws
-? +WideScreen
-? +Wide Screen
-: other: WideScreen
-
-? +NF
-? +Netflix
-: other: Netflix
-
-# Fix and Real must be surround by others properties to be matched.
-? DVD.Real.XViD
-? DVD.fix.XViD
-? -DVD.Real
-? -DVD.Fix
-? -Real.XViD
-? -Fix.XViD
-: other: Proper
-  proper_count: 1
-
-? -DVD.BlablaBla.Fix.Blablabla.XVID
-? -DVD.BlablaBla.Fix.XVID
-? -DVD.Fix.Blablabla.XVID
-: other: Proper
-  proper_count: 1
-
-
-? DVD.Real.PROPER.REPACK
-: other: Proper
-  proper_count: 3
-
-
-? Proper
-? +Repack
-? +Rerip
-: other: Proper
-  proper_count: 1
-
-? XViD.Fansub
-: other: Fansub
-
-? XViD.Fastsub
-: other: Fastsub
-
-? +Season Complete
-? -Complete
-: other: Complete
-
-? R5
-? RC
-: other: R5
-
-? PreAir
-? Pre Air
-: other: Preair
-
-? Screener
-: other: Screener
-
-? Remux
-: other: Remux
-
-? 3D
-: other: 3D
-
-? HD
-: other: HD
-
-? mHD # ??
-: other: mHD
-
-? HDLight
-: other: HDLight
-
-? HQ
-: other: HQ
-
-? ddc
-: other: DDC
-
-? hr
-: other: HR
-
-? PAL
-: other: PAL
-
-? SECAM
-: other: SECAM
-
-? NTSC
-: other: NTSC
-
-? CC
-: other: CC
-
-? LD
-: other: LD
-
-? MD
-: other: MD
-
-? -The complete movie
-: other: Complete
-
-? +The complete movie
-: title: The complete movie
-
-? +AC3-HQ
-: audio_profile: HQ
-
-? Other-HQ
-: other: HQ
diff --git a/lib/guessit/test/rules/part.yml b/lib/guessit/test/rules/part.yml
deleted file mode 100644
index 72f3d98a8343e6267d5567f26d03c0c29847b624..0000000000000000000000000000000000000000
--- a/lib/guessit/test/rules/part.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-# Multiple input strings having same expected results can be chained.
-# Use - marker to check inputs that should not match results.
-? Filename Part 3.mkv
-? Filename Part III.mkv
-? Filename Part Three.mkv
-? Filename Part Trois.mkv
-: title: Filename
-  part: 3
-
-? Part 3
-? Part III
-? Part Three
-? Part Trois
-? Part3
-: part: 3
-
-? -Something.Apt.1
-: part: 1
\ No newline at end of file
diff --git a/lib/guessit/test/rules/processors.yml b/lib/guessit/test/rules/processors.yml
deleted file mode 100644
index ee906b2c3f7dd3cbbded8859e276c1730844bd05..0000000000000000000000000000000000000000
--- a/lib/guessit/test/rules/processors.yml
+++ /dev/null
@@ -1,8 +0,0 @@
-# Multiple input strings having same expected results can be chained.
-# Use $ marker to check inputs that should not match results.
-
-# Prefer information for last path.
-? Some movie (2000)/Some movie (2001).mkv
-? Some movie (2001)/Some movie.mkv
-: year: 2001
-  container: mkv
diff --git a/lib/guessit/test/rules/release_group.yml b/lib/guessit/test/rules/release_group.yml
deleted file mode 100644
index d048ff716854d6c3004174ef6e95c1825937d459..0000000000000000000000000000000000000000
--- a/lib/guessit/test/rules/release_group.yml
+++ /dev/null
@@ -1,41 +0,0 @@
-# Multiple input strings having same expected results can be chained.
-# Use - marker to check inputs that should not match results.
-? Some.Title.XViD-ReleaseGroup
-? Some.Title.XViD-ReleaseGroup.mkv
-: release_group: ReleaseGroup
-
-? Some.Title.XViD-by.Artik[SEDG].avi
-: release_group: Artik[SEDG]
-
-? "[ABC] Some.Title.avi"
-? some/folder/[ABC]Some.Title.avi
-: release_group: ABC
-
-? "[ABC] Some.Title.XViD-GRP.avi"
-? some/folder/[ABC]Some.Title.XViD-GRP.avi
-: release_group: GRP
-
-? "[ABC] Some.Title.S01E02.avi"
-? some/folder/[ABC]Some.Title.S01E02.avi
-: release_group: ABC
-
-? Some.Title.XViD-S2E02.NoReleaseGroup.avi
-: release_group: !!null
-
-? Test.S01E01-FooBar-Group
-: options: -G group -G xxxx
-  episode: 1
-  episode_title: FooBar
-  release_group: Group
-  season: 1
-  title: Test
-  type: episode
-
-? Test.S01E01-FooBar-Group
-: options: -G re:gr.?up -G xxxx
-  episode: 1
-  episode_title: FooBar
-  release_group: Group
-  season: 1
-  title: Test
-  type: episode
diff --git a/lib/guessit/test/rules/screen_size.yml b/lib/guessit/test/rules/screen_size.yml
deleted file mode 100644
index 1145dd7eb3dc8676328aa6fc5e320d46d90f4653..0000000000000000000000000000000000000000
--- a/lib/guessit/test/rules/screen_size.yml
+++ /dev/null
@@ -1,69 +0,0 @@
-# Multiple input strings having same expected results can be chained.
-# Use - marker to check inputs that should not match results.
-? +360p
-? +360px
-? +360i
-? "+360"
-? +500x360
-: screen_size: 360p
-
-? +368p
-? +368px
-? +368i
-? "+368"
-? +500x368
-: screen_size: 368p
-
-? +480p
-? +480px
-? +480i
-? "+480"
-? +500x480
-: screen_size: 480p
-
-? +576p
-? +576px
-? +576i
-? "+576"
-? +500x576
-: screen_size: 576p
-
-? +720p
-? +720px
-? 720hd
-? 720pHD
-? +720i
-? "+720"
-? +500x720
-: screen_size: 720p
-
-? +900p
-? +900px
-? +900i
-? "+900"
-? +500x900
-: screen_size: 900p
-
-? +1080p
-? +1080px
-? +1080hd
-? +1080pHD
-? -1080i
-? "+1080"
-? +500x1080
-: screen_size: 1080p
-
-? +1080i
-? -1080p
-: screen_size: 1080i
-
-? +2160p
-? +2160px
-? +2160i
-? "+2160"
-? +4096x2160
-: screen_size: 4K
-
-? Test.File.720hd.bluray
-? Test.File.720p50
-: screen_size: 720p
diff --git a/lib/guessit/test/rules/title.yml b/lib/guessit/test/rules/title.yml
deleted file mode 100644
index fffaf8a259d4422477369d498a79784c8cd9d31b..0000000000000000000000000000000000000000
--- a/lib/guessit/test/rules/title.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-# Multiple input strings having same expected results can be chained.
-# Use - marker to check inputs that should not match results.
-? Title Only
-? -Title XViD 720p Only
-? sub/folder/Title Only
-? -sub/folder/Title XViD 720p Only
-? Title Only.mkv
-? Title Only.avi
-: title: Title Only
-
-? Title Only/title_only.mkv
-: title: Title Only
-
-? title_only.mkv
-: title: title only
-
-? Some Title/some.title.mkv
-? some.title/Some.Title.mkv
-: title: Some Title
-
-? SOME TITLE/Some.title.mkv
-? Some.title/SOME TITLE.mkv
-: title: Some title
-
-? some title/Some.title.mkv
-? Some.title/some title.mkv
-: title: Some title
-
-? Some other title/Some.Other.title.mkv
-? Some.Other title/Some other title.mkv
-: title: Some Other title
-
diff --git a/lib/guessit/test/rules/video_codec.yml b/lib/guessit/test/rules/video_codec.yml
deleted file mode 100644
index d195eaafe9e97313c44c2638d3133556c104ebbd..0000000000000000000000000000000000000000
--- a/lib/guessit/test/rules/video_codec.yml
+++ /dev/null
@@ -1,54 +0,0 @@
-# Multiple input strings having same expected results can be chained.
-# Use - marker to check inputs that should not match results.
-? rv10
-? rv13
-? RV20
-? Rv30
-? rv40
-? -xrv40
-: video_codec: Real
-
-? mpeg2
-? MPEG2
-? -mpeg
-? -mpeg 2  # Not sure if we should ignore this one ...
-? -xmpeg2
-? -mpeg2x
-: video_codec: Mpeg2
-
-? DivX
-? -div X
-? divx
-? dvdivx
-? DVDivX
-: video_codec: DivX
-
-? XviD
-? xvid
-? -x vid
-: video_codec: XviD
-
-? h264
-? x264
-? h.264
-? x.264
-? mpeg4-AVC
-? -MPEG-4
-? -mpeg4
-? -mpeg
-? -h 265
-? -x265
-: video_codec: h264
-
-? h265
-? x265
-? h.265
-? x.265
-? hevc
-? -h 264
-? -x264
-: video_codec: h265
-
-? h265-HP
-: video_codec: h265
-  video_profile: HP
\ No newline at end of file
diff --git a/lib/guessit/test/rules/website.yml b/lib/guessit/test/rules/website.yml
deleted file mode 100644
index 11d434d2a966b4e2810570dc94617144b4742e78..0000000000000000000000000000000000000000
--- a/lib/guessit/test/rules/website.yml
+++ /dev/null
@@ -1,23 +0,0 @@
-# Multiple input strings having same expected results can be chained.
-# Use - marker to check inputs that should not match results.
-? +tvu.org.ru
-? -tvu.unsafe.ru
-: website: tvu.org.ru
-
-? +www.nimp.na
-? -somewww.nimp.na
-? -www.nimp.nawouak
-? -nimp.na
-: website: www.nimp.na
-
-? +wawa.co.uk
-? -wawa.uk
-: website: wawa.co.uk
-
-? -Dark.Net.S01E06.720p.HDTV.x264-BATV
-  -Dark.Net.2015.720p.HDTV.x264-BATV
-: website: Dark.Net
-
-? Dark.Net.S01E06.720p.HDTV.x264-BATV
-  Dark.Net.2015.720p.HDTV.x264-BATV
-: title: Dark Net
diff --git a/lib/guessit/test/test-input-file.txt b/lib/guessit/test/test-input-file.txt
deleted file mode 100644
index 656bc9317ddb3b44bfcf1e7784b2cc840299885e..0000000000000000000000000000000000000000
--- a/lib/guessit/test/test-input-file.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Fear.and.Loathing.in.Las.Vegas.FRENCH.ENGLISH.720p.HDDVD.DTS.x264-ESiR.mkv
-SecondFile.avi
\ No newline at end of file
diff --git a/lib/guessit/test/test_api.py b/lib/guessit/test/test_api.py
deleted file mode 100644
index ca33df044af72d89391d5308ca21b189f7b98ecd..0000000000000000000000000000000000000000
--- a/lib/guessit/test/test_api.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# pylint: disable=no-self-use, pointless-statement, missing-docstring, invalid-name, pointless-string-statement
-
-import os
-
-import pytest
-import six
-
-from ..api import guessit, properties, GuessitException
-
-__location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
-
-
-def test_default():
-    ret = guessit('Fear.and.Loathing.in.Las.Vegas.FRENCH.ENGLISH.720p.HDDVD.DTS.x264-ESiR.mkv')
-    assert ret and 'title' in ret
-
-
-def test_forced_unicode():
-    ret = guessit(u'Fear.and.Loathing.in.Las.Vegas.FRENCH.ENGLISH.720p.HDDVD.DTS.x264-ESiR.mkv')
-    assert ret and 'title' in ret and isinstance(ret['title'], six.text_type)
-
-
-def test_forced_binary():
-    ret = guessit(b'Fear.and.Loathing.in.Las.Vegas.FRENCH.ENGLISH.720p.HDDVD.DTS.x264-ESiR.mkv')
-    assert ret and 'title' in ret and isinstance(ret['title'], six.binary_type)
-
-
-def test_unicode_japanese():
-    ret = guessit('[阿维达].Avida.2006.FRENCH.DVDRiP.XViD-PROD.avi')
-    assert ret and 'title' in ret
-
-
-def test_unicode_japanese_options():
-    ret = guessit("[阿维达].Avida.2006.FRENCH.DVDRiP.XViD-PROD.avi", options={"expected_title": ["阿维达"]})
-    assert ret and 'title' in ret and ret['title'] == "阿维达"
-
-
-def test_forced_unicode_japanese_options():
-    ret = guessit(u"[阿维达].Avida.2006.FRENCH.DVDRiP.XViD-PROD.avi", options={"expected_title": [u"阿维达"]})
-    assert ret and 'title' in ret and ret['title'] == u"阿维达"
-
-# TODO: This doesn't compile on python 3, but should be tested on python 2.
-"""
-if six.PY2:
-    def test_forced_binary_japanese_options():
-        ret = guessit(b"[阿维达].Avida.2006.FRENCH.DVDRiP.XViD-PROD.avi", options={"expected_title": [b"阿维达"]})
-        assert ret and 'title' in ret and ret['title'] == b"阿维达"
-"""
-
-
-def test_properties():
-    props = properties()
-    assert 'video_codec' in props.keys()
-
-
-def test_exception():
-    with pytest.raises(GuessitException) as excinfo:
-        guessit(object())
-    assert "An internal error has occured in guessit" in str(excinfo.value)
-    assert "Guessit Exception Report" in str(excinfo.value)
-    assert "Please report at https://github.com/guessit-io/guessit/issues" in str(excinfo.value)
diff --git a/lib/guessit/test/test_api_unicode_literals.py b/lib/guessit/test/test_api_unicode_literals.py
deleted file mode 100644
index 3347a7d8924920c06af544cd1236bf31e91d6f4e..0000000000000000000000000000000000000000
--- a/lib/guessit/test/test_api_unicode_literals.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# pylint: disable=no-self-use, pointless-statement, missing-docstring, invalid-name, pointless-string-statement
-
-
-from __future__ import unicode_literals
-
-import os
-
-import pytest
-import six
-
-from ..api import guessit, properties, GuessitException
-
-__location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
-
-
-def test_default():
-    ret = guessit('Fear.and.Loathing.in.Las.Vegas.FRENCH.ENGLISH.720p.HDDVD.DTS.x264-ESiR.mkv')
-    assert ret and 'title' in ret
-
-
-def test_forced_unicode():
-    ret = guessit(u'Fear.and.Loathing.in.Las.Vegas.FRENCH.ENGLISH.720p.HDDVD.DTS.x264-ESiR.mkv')
-    assert ret and 'title' in ret and isinstance(ret['title'], six.text_type)
-
-
-def test_forced_binary():
-    ret = guessit(b'Fear.and.Loathing.in.Las.Vegas.FRENCH.ENGLISH.720p.HDDVD.DTS.x264-ESiR.mkv')
-    assert ret and 'title' in ret and isinstance(ret['title'], six.binary_type)
-
-
-def test_unicode_japanese():
-    ret = guessit('[阿维达].Avida.2006.FRENCH.DVDRiP.XViD-PROD.avi')
-    assert ret and 'title' in ret
-
-
-def test_unicode_japanese_options():
-    ret = guessit("[阿维达].Avida.2006.FRENCH.DVDRiP.XViD-PROD.avi", options={"expected_title": ["阿维达"]})
-    assert ret and 'title' in ret and ret['title'] == "阿维达"
-
-
-def test_forced_unicode_japanese_options():
-    ret = guessit(u"[阿维达].Avida.2006.FRENCH.DVDRiP.XViD-PROD.avi", options={"expected_title": [u"阿维达"]})
-    assert ret and 'title' in ret and ret['title'] == u"阿维达"
-
-# TODO: This doesn't compile on python 3, but should be tested on python 2.
-"""
-if six.PY2:
-    def test_forced_binary_japanese_options():
-        ret = guessit(b"[阿维达].Avida.2006.FRENCH.DVDRiP.XViD-PROD.avi", options={"expected_title": [b"阿维达"]})
-        assert ret and 'title' in ret and ret['title'] == b"阿维达"
-"""
-
-
-def test_properties():
-    props = properties()
-    assert 'video_codec' in props.keys()
-
-
-def test_exception():
-    with pytest.raises(GuessitException) as excinfo:
-        guessit(object())
-    assert "An internal error has occured in guessit" in str(excinfo.value)
-    assert "Guessit Exception Report" in str(excinfo.value)
-    assert "Please report at https://github.com/guessit-io/guessit/issues" in str(excinfo.value)
diff --git a/lib/guessit/test/test_benchmark.py b/lib/guessit/test/test_benchmark.py
deleted file mode 100644
index 34386e307538f1ef12a3a5b4ef3688702d7d0285..0000000000000000000000000000000000000000
--- a/lib/guessit/test/test_benchmark.py
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# pylint: disable=no-self-use,pointless-statement,missing-docstring,invalid-name,line-too-long
-import time
-
-import pytest
-
-from ..api import guessit
-
-
-def case1():
-    return guessit('Fear.and.Loathing.in.Las.Vegas.FRENCH.ENGLISH.720p.HDDVD.DTS.x264-ESiR.mkv')
-
-
-def case2():
-    return guessit('Movies/Fantastic Mr Fox/Fantastic.Mr.Fox.2009.DVDRip.{x264+LC-AAC.5.1}{Fr-Eng}{Sub.Fr-Eng}-™.[sharethefiles.com].mkv')
-
-
-def case3():
-    return guessit('Series/dexter/Dexter.5x02.Hello,.Bandit.ENG.-.sub.FR.HDTV.XviD-AlFleNi-TeaM.[tvu.org.ru].avi')
-
-
-def case4():
-    return guessit('Movies/The Doors (1991)/09.03.08.The.Doors.(1991).BDRip.720p.AC3.X264-HiS@SiLUHD-English.[sharethefiles.com].mkv')
-
-
-@pytest.mark.benchmark(
-    group="Performance Tests",
-    min_time=1,
-    max_time=2,
-    min_rounds=5,
-    timer=time.time,
-    disable_gc=True,
-    warmup=False
-)
-@pytest.mark.skipif(True, reason="Disabled")
-class TestBenchmark(object):
-    def test_case1(self, benchmark):
-        ret = benchmark(case1)
-        assert ret
-
-    def test_case2(self, benchmark):
-        ret = benchmark(case2)
-        assert ret
-
-    def test_case3(self, benchmark):
-        ret = benchmark(case3)
-        assert ret
-
-    def test_case4(self, benchmark):
-        ret = benchmark(case4)
-        assert ret
diff --git a/lib/guessit/test/test_main.py b/lib/guessit/test/test_main.py
deleted file mode 100644
index cbdba7aa4cf635e4778d5d435de1c40a94c8f8d6..0000000000000000000000000000000000000000
--- a/lib/guessit/test/test_main.py
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# pylint: disable=no-self-use, pointless-statement, missing-docstring, invalid-name
-
-import os
-
-import pytest
-
-from ..__main__ import main
-
-__location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
-
-
-def test_main_no_args():
-    main([])
-
-
-def test_main():
-    main(['Fear.and.Loathing.in.Las.Vegas.FRENCH.ENGLISH.720p.HDDVD.DTS.x264-ESiR.mkv'])
-
-
-def test_main_unicode():
-    main(['[阿维达].Avida.2006.FRENCH.DVDRiP.XViD-PROD.avi'])
-
-
-def test_main_forced_unicode():
-    main([u'Fear.and.Loathing.in.Las.Vegas.FRENCH.ENGLISH.720p.HDDVD.DTS.x264-ESiR.mkv'])
-
-
-def test_main_verbose():
-    main(['Fear.and.Loathing.in.Las.Vegas.FRENCH.ENGLISH.720p.HDDVD.DTS.x264-ESiR.mkv', '--verbose'])
-
-
-def test_main_yaml():
-    main(['Fear.and.Loathing.in.Las.Vegas.FRENCH.ENGLISH.720p.HDDVD.DTS.x264-ESiR.mkv', '--yaml'])
-
-
-def test_main_json():
-    main(['Fear.and.Loathing.in.Las.Vegas.FRENCH.ENGLISH.720p.HDDVD.DTS.x264-ESiR.mkv', '--json'])
-
-
-def test_main_show_property():
-    main(['Fear.and.Loathing.in.Las.Vegas.FRENCH.ENGLISH.720p.HDDVD.DTS.x264-ESiR.mkv', '-P', 'title'])
-
-
-def test_main_advanced():
-    main(['Fear.and.Loathing.in.Las.Vegas.FRENCH.ENGLISH.720p.HDDVD.DTS.x264-ESiR.mkv', '-a'])
-
-
-def test_main_input():
-    main(['--input', os.path.join(__location__, 'test-input-file.txt')])
-
-
-def test_main_properties():
-    main(['-p'])
-    main(['-p', '--json'])
-    main(['-p', '--yaml'])
-
-
-def test_main_values():
-    main(['-V'])
-    main(['-V', '--json'])
-    main(['-V', '--yaml'])
-
-
-def test_main_help():
-    with pytest.raises(SystemExit):
-        main(['--help'])
-
-
-def test_main_version():
-    main(['--version'])
diff --git a/lib/guessit/test/test_yml.py b/lib/guessit/test/test_yml.py
deleted file mode 100644
index c8e3d193de5b64630aca456240216278e5fb39c9..0000000000000000000000000000000000000000
--- a/lib/guessit/test/test_yml.py
+++ /dev/null
@@ -1,285 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# pylint: disable=no-self-use, pointless-statement, missing-docstring, invalid-name
-import logging
-
-# io.open supports encoding= in python 2.7
-from io import open  # pylint: disable=redefined-builtin
-import os
-import yaml
-
-import six
-
-import babelfish
-import pytest
-
-from rebulk.remodule import re
-from rebulk.utils import is_iterable
-
-from guessit.options import parse_options
-from ..yamlutils import OrderedDictYAMLLoader
-from .. import guessit
-
-
-logger = logging.getLogger(__name__)
-
-__location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
-
-filename_predicate = None
-string_predicate = None
-
-
-# filename_predicate = lambda filename: 'episode_title' in filename
-# string_predicate = lambda string: '-DVD.BlablaBla.Fix.Blablabla.XVID' in string
-
-
-class EntryResult(object):
-    def __init__(self, string, negates=False):
-        self.string = string
-        self.negates = negates
-        self.valid = []
-        self.missing = []
-        self.different = []
-        self.extra = []
-        self.others = []
-
-    @property
-    def ok(self):
-        if self.negates:
-            return self.missing or self.different
-        return not self.missing and not self.different and not self.extra and not self.others
-
-    @property
-    def warning(self):
-        if self.negates:
-            return False
-        return not self.missing and not self.different and self.extra
-
-    @property
-    def error(self):
-        if self.negates:
-            return not self.missing and not self.different and not self.others
-        return self.missing or self.different or self.others
-
-    def __repr__(self):
-        if self.ok:
-            return self.string + ': OK!'
-        elif self.warning:
-            return '%s%s: WARNING! (valid=%i, extra=%i)' % ('-' if self.negates else '', self.string, len(self.valid),
-                                                            len(self.extra))
-        elif self.error:
-            return '%s%s: ERROR! (valid=%i, missing=%i, different=%i, extra=%i, others=%i)' % \
-                   ('-' if self.negates else '', self.string, len(self.valid), len(self.missing), len(self.different),
-                    len(self.extra), len(self.others))
-        else:
-            return '%s%s: UNKOWN! (valid=%i, missing=%i, different=%i, extra=%i, others=%i)' % \
-                   ('-' if self.negates else '', self.string, len(self.valid), len(self.missing), len(self.different),
-                    len(self.extra), len(self.others))
-
-    @property
-    def details(self):
-        ret = []
-        if self.valid:
-            ret.append('valid=' + str(len(self.valid)))
-        for valid in self.valid:
-            ret.append(' ' * 4 + str(valid))
-        if self.missing:
-            ret.append('missing=' + str(len(self.missing)))
-        for missing in self.missing:
-            ret.append(' ' * 4 + str(missing))
-        if self.different:
-            ret.append('different=' + str(len(self.different)))
-        for different in self.different:
-            ret.append(' ' * 4 + str(different))
-        if self.extra:
-            ret.append('extra=' + str(len(self.extra)))
-        for extra in self.extra:
-            ret.append(' ' * 4 + str(extra))
-        if self.others:
-            ret.append('others=' + str(len(self.others)))
-        for other in self.others:
-            ret.append(' ' * 4 + str(other))
-        return ret
-
-
-class Results(list):
-    def assert_ok(self):
-        errors = [entry for entry in self if entry.error]
-        assert not errors
-
-
-def files_and_ids(predicate=None):
-    files = []
-    ids = []
-
-    for (dirpath, _, filenames) in os.walk(__location__):
-        if dirpath == __location__:
-            dirpath_rel = ''
-        else:
-            dirpath_rel = os.path.relpath(dirpath, __location__)
-        for filename in filenames:
-            name, ext = os.path.splitext(filename)
-            filepath = os.path.join(dirpath_rel, filename)
-            if ext == '.yml' and (not predicate or predicate(filepath)):
-                files.append(filepath)
-                ids.append(os.path.join(dirpath_rel, name))
-
-    return files, ids
-
-
-class TestYml(object):
-    """
-    Run tests from yaml files.
-    Multiple input strings having same expected results can be chained.
-    Use $ marker to check inputs that should not match results.
-    """
-
-    options_re = re.compile(r'^([ \+-]+)(.*)')
-
-    files, ids = files_and_ids(filename_predicate)
-
-    @staticmethod
-    def set_default(expected, default):
-        if default:
-            for k, v in default.items():
-                if k not in expected:
-                    expected[k] = v
-
-    @pytest.mark.parametrize('filename', files, ids=ids)
-    def test(self, filename, caplog):
-        caplog.setLevel(logging.INFO)
-        with open(os.path.join(__location__, filename), 'r', encoding='utf-8') as infile:
-            data = yaml.load(infile, OrderedDictYAMLLoader)
-        entries = Results()
-
-        last_expected = None
-        for string, expected in reversed(list(data.items())):
-            if expected is None:
-                data[string] = last_expected
-            else:
-                last_expected = expected
-
-        default = None
-        try:
-            default = data['__default__']
-            del data['__default__']
-        except KeyError:
-            pass
-
-        for string, expected in data.items():
-            TestYml.set_default(expected, default)
-            entry = self.check_data(filename, string, expected)
-            entries.append(entry)
-        entries.assert_ok()
-
-    def check_data(self, filename, string, expected):
-        if six.PY2 and isinstance(string, six.text_type):
-            string = string.encode('utf-8')
-            converts = []
-            for k, v in expected.items():
-                if isinstance(v, six.text_type):
-                    v = v.encode('utf-8')
-                    converts.append((k, v))
-            for k, v in converts:
-                expected[k] = v
-        if not isinstance(string, str):
-            string = str(string)
-        if not string_predicate or string_predicate(string):  # pylint: disable=not-callable
-            entry = self.check(string, expected)
-            if entry.ok:
-                logger.debug('[' + filename + '] ' + str(entry))
-            elif entry.warning:
-                logger.warning('[' + filename + '] ' + str(entry))
-            elif entry.error:
-                logger.error('[' + filename + '] ' + str(entry))
-                for line in entry.details:
-                    logger.error('[' + filename + '] ' + ' ' * 4 + line)
-        return entry
-
-    def check(self, string, expected):
-        negates, global_, string = self.parse_token_options(string)
-
-        options = expected.get('options')
-        if options is None:
-            options = {}
-        if not isinstance(options, dict):
-            options = parse_options(options)
-        if 'implicit' not in options:
-            options['implicit'] = True
-        try:
-            result = guessit(string, options)
-        except Exception as exc:
-            logger.error('[' + string + '] Exception: ' + str(exc))
-            raise exc
-
-        entry = EntryResult(string, negates)
-
-        if global_:
-            self.check_global(string, result, entry)
-
-        self.check_expected(result, expected, entry)
-
-        return entry
-
-    def parse_token_options(self, string):
-        matches = self.options_re.search(string)
-        negates = False
-        global_ = False
-        if matches:
-            string = matches.group(2)
-            for opt in matches.group(1):
-                if '-' in opt:
-                    negates = True
-                if '+' in opt:
-                    global_ = True
-        return negates, global_, string
-
-    def check_global(self, string, result, entry):
-        global_span = []
-        for result_matches in result.matches.values():
-            for result_match in result_matches:
-                if not global_span:
-                    global_span = list(result_match.span)
-                else:
-                    if global_span[0] > result_match.span[0]:
-                        global_span[0] = result_match.span[0]
-                    if global_span[1] < result_match.span[1]:
-                        global_span[1] = result_match.span[1]
-        if global_span and global_span[1] - global_span[0] < len(string):
-            entry.others.append("Match is not global")
-
-    def is_same(self, value, expected):
-        values = set(value) if is_iterable(value) else set((value,))
-        expecteds = set(expected) if is_iterable(expected) else set((expected,))
-        if len(values) != len(expecteds):
-            return False
-        if isinstance(next(iter(values)), babelfish.Language):
-            # pylint: disable=no-member
-            expecteds = set([babelfish.Language.fromguessit(expected) for expected in expecteds])
-        elif isinstance(next(iter(values)), babelfish.Country):
-            # pylint: disable=no-member
-            expecteds = set([babelfish.Country.fromguessit(expected) for expected in expecteds])
-        return values == expecteds
-
-    def check_expected(self, result, expected, entry):
-        if expected:
-            for expected_key, expected_value in expected.items():
-                if expected_key and expected_key != 'options' and expected_value is not None:
-                    negates_key, _, result_key = self.parse_token_options(expected_key)
-                    if result_key in result.keys():
-                        if not self.is_same(result[result_key], expected_value):
-                            if negates_key:
-                                entry.valid.append((expected_key, expected_value))
-                            else:
-                                entry.different.append((expected_key, expected_value, result[expected_key]))
-                        else:
-                            if negates_key:
-                                entry.different.append((expected_key, expected_value, result[expected_key]))
-                            else:
-                                entry.valid.append((expected_key, expected_value))
-                    elif not negates_key:
-                        entry.missing.append((expected_key, expected_value))
-
-        for result_key, result_value in result.items():
-            if result_key not in expected.keys():
-                entry.extra.append((result_key, result_value))
diff --git a/lib/guessit/test/various.yml b/lib/guessit/test/various.yml
deleted file mode 100644
index 72e2f602f68ab5a2ad01122c9b59308a34d2e128..0000000000000000000000000000000000000000
--- a/lib/guessit/test/various.yml
+++ /dev/null
@@ -1,800 +0,0 @@
-? Movies/Fear and Loathing in Las Vegas (1998)/Fear.and.Loathing.in.Las.Vegas.720p.HDDVD.DTS.x264-ESiR.mkv
-: type: movie
-  title: Fear and Loathing in Las Vegas
-  year: 1998
-  screen_size: 720p
-  format: HD-DVD
-  audio_codec: DTS
-  video_codec: h264
-  release_group: ESiR
-
-? Series/Duckman/Duckman - 101 (01) - 20021107 - I, Duckman.avi
-: type: episode
-  title: Duckman
-  season: 1
-  episode: 1
-  episode_title: I, Duckman
-  date: 2002-11-07
-
-? Series/Neverwhere/Neverwhere.05.Down.Street.[tvu.org.ru].avi
-: type: episode
-  title: Neverwhere
-  episode: 5
-  episode_title: Down Street
-  website: tvu.org.ru
-
-? Neverwhere.05.Down.Street.[tvu.org.ru].avi
-: type: episode
-  title: Neverwhere
-  episode: 5
-  episode_title: Down Street
-  website: tvu.org.ru
-
-? Series/Breaking Bad/Minisodes/Breaking.Bad.(Minisodes).01.Good.Cop.Bad.Cop.WEBRip.XviD.avi
-: type: episode
-  title: Breaking Bad
-  episode_format: Minisode
-  episode: 1
-  episode_title: Good Cop Bad Cop
-  format: WEBRip
-  video_codec: XviD
-
-? Series/Kaamelott/Kaamelott - Livre V - Ep 23 - Le Forfait.avi
-: type: episode
-  title: Kaamelott
-  episode: 23
-  episode_title: Le Forfait
-
-? Movies/The Doors (1991)/09.03.08.The.Doors.(1991).BDRip.720p.AC3.X264-HiS@SiLUHD-English.[sharethefiles.com].mkv
-: type: movie
-  title: The Doors
-  year: 1991
-  date: 2008-03-09
-  format: BluRay
-  screen_size: 720p
-  audio_codec: AC3
-  video_codec: h264
-  release_group: HiS@SiLUHD
-  language: english
-  website: sharethefiles.com
-
-? Movies/M.A.S.H. (1970)/MASH.(1970).[Divx.5.02][Dual-Subtitulos][DVDRip].ogm
-: type: movie
-  title: MASH
-  year: 1970
-  video_codec: DivX
-  format: DVD
-
-? the.mentalist.501.hdtv-lol.mp4
-: type: episode
-  title: the mentalist
-  season: 5
-  episode: 1
-  format: HDTV
-  release_group: lol
-
-? the.simpsons.2401.hdtv-lol.mp4
-: type: episode
-  title: the simpsons
-  season: 24
-  episode: 1
-  format: HDTV
-  release_group: lol
-
-? Homeland.S02E01.HDTV.x264-EVOLVE.mp4
-: type: episode
-  title: Homeland
-  season: 2
-  episode: 1
-  format: HDTV
-  video_codec: h264
-  release_group: EVOLVE
-
-? /media/Band_of_Brothers-e01-Currahee.mkv
-: type: episode
-  title: Band of Brothers
-  episode: 1
-  episode_title: Currahee
-
-? /media/Band_of_Brothers-x02-We_Stand_Alone_Together.mkv
-: type: episode
-  title: Band of Brothers
-  bonus: 2
-  bonus_title: We Stand Alone Together
-
-? /movies/James_Bond-f21-Casino_Royale-x02-Stunts.mkv
-: type: movie
-  title: Casino Royale
-  film_title: James Bond
-  film: 21
-  bonus: 2
-  bonus_title: Stunts
-
-? /TV Shows/new.girl.117.hdtv-lol.mp4
-: type: episode
-  title: new girl
-  season: 1
-  episode: 17
-  format: HDTV
-  release_group: lol
-
-? The.Office.(US).1x03.Health.Care.HDTV.XviD-LOL.avi
-: type: episode
-  title: The Office
-  country: US
-  season: 1
-  episode: 3
-  episode_title: Health Care
-  format: HDTV
-  video_codec: XviD
-  release_group: LOL
-
-? The_Insider-(1999)-x02-60_Minutes_Interview-1996.mp4
-: type: movie
-  title: The Insider
-  year: 1999
-  bonus: 2
-  bonus_title: 60 Minutes Interview-1996
-
-? OSS_117--Cairo,_Nest_of_Spies.mkv
-: type: movie
-  title: OSS 117
-  alternative_title: Cairo, Nest of Spies
-
-? Rush.._Beyond_The_Lighted_Stage-x09-Between_Sun_and_Moon-2002_Hartford.mkv
-: type: movie
-  title: Rush Beyond The Lighted Stage
-  bonus: 9
-  bonus_title: Between Sun and Moon
-  year: 2002
-
-? House.Hunters.International.S56E06.720p.hdtv.x264.mp4
-: type: episode
-  title: House Hunters International
-  season: 56
-  episode: 6
-  screen_size: 720p
-  format: HDTV
-  video_codec: h264
-
-? White.House.Down.2013.1080p.BluRay.DTS-HD.MA.5.1.x264-PublicHD.mkv
-: type: movie
-  title: White House Down
-  year: 2013
-  screen_size: 1080p
-  format: BluRay
-  audio_codec: DTS
-  audio_profile: HDMA
-  video_codec: h264
-  release_group: PublicHD
-  audio_channels: "5.1"
-
-? White.House.Down.2013.1080p.BluRay.DTSHD.MA.5.1.x264-PublicHD.mkv
-: type: movie
-  title: White House Down
-  year: 2013
-  screen_size: 1080p
-  format: BluRay
-  audio_codec: DTS
-  audio_profile: HDMA
-  video_codec: h264
-  release_group: PublicHD
-  audio_channels: "5.1"
-
-? Hostages.S01E01.Pilot.for.Air.720p.WEB-DL.DD5.1.H.264-NTb.nfo
-: type: episode
-  title: Hostages
-  episode_title: Pilot for Air
-  season: 1
-  episode: 1
-  screen_size: 720p
-  format: WEB-DL
-  audio_channels: "5.1"
-  video_codec: h264
-  audio_codec: DolbyDigital
-  release_group: NTb
-
-? Despicable.Me.2.2013.1080p.BluRay.x264-VeDeTT.nfo
-: type: movie
-  title: Despicable Me 2
-  year: 2013
-  screen_size: 1080p
-  format: BluRay
-  video_codec: h264
-  release_group: VeDeTT
-
-? Le Cinquieme Commando 1971 SUBFORCED FRENCH DVDRiP XViD AC3 Bandix.mkv
-: type: movie
-  audio_codec: AC3
-  format: DVD
-  release_group: Bandix
-  subtitle_language: French
-  title: Le Cinquieme Commando
-  video_codec: XviD
-  year: 1971
-
-? Le Seigneur des Anneaux - La Communauté de l'Anneau - Version Longue - BDRip.mkv
-: type: movie
-  format: BluRay
-  title: Le Seigneur des Anneaux
-
-? La petite bande (Michel Deville - 1983) VF PAL MP4 x264 AAC.mkv
-: type: movie
-  audio_codec: AAC
-  language: French
-  title: La petite bande
-  video_codec: h264
-  year: 1983
-  other: PAL
-
-? Retour de Flammes (Gregor Schnitzler 2003) FULL DVD.iso
-: type: movie
-  format: DVD
-  title: Retour de Flammes
-  type: movie
-  year: 2003
-
-? A.Common.Title.Special.2014.avi
-: type: movie
-  year: 2014
-  title: A Common Title Special
-
-? A.Common.Title.2014.Special.avi
-: type: episode
-  year: 2014
-  title: A Common Title
-  episode_title: Special
-  episode_details: Special
-
-? A.Common.Title.2014.Special.Edition.avi
-: type: movie
-  year: 2014
-  title: A Common Title
-  edition: Special Edition
-
-? Downton.Abbey.2013.Christmas.Special.HDTV.x264-FoV.mp4
-: type: episode
-  year: 2013
-  title: Downton Abbey
-  episode_title: Christmas Special
-  video_codec: h264
-  release_group: FoV
-  format: HDTV
-  episode_details: Special
-
-? Doctor_Who_2013_Christmas_Special.The_Time_of_The_Doctor.HD
-: type: episode
-  title: Doctor Who
-  other: HD
-  episode_details: Special
-  episode_title: Christmas Special The Time of The Doctor
-  year: 2013
-
-? Doctor Who 2005 50th Anniversary Special The Day of the Doctor 3.avi
-: type: episode
-  title: Doctor Who
-  episode_details: Special
-  episode_title: 50th Anniversary Special The Day of the Doctor 3
-  year: 2005
-
-? Robot Chicken S06-Born Again Virgin Christmas Special HDTV x264.avi
-: type: episode
-  title: Robot Chicken
-  format: HDTV
-  season: 6
-  episode_title: Born Again Virgin Christmas Special
-  video_codec: h264
-  episode_details: Special
-
-? Wicked.Tuna.S03E00.Head.To.Tail.Special.HDTV.x264-YesTV
-: type: episode
-  title: Wicked Tuna
-  episode_title: Head To Tail Special
-  release_group: YesTV
-  season: 3
-  episode: 0
-  video_codec: h264
-  format: HDTV
-  episode_details: Special
-
-? The.Voice.UK.S03E12.HDTV.x264-C4TV
-: episode: 12
-  video_codec: h264
-  format: HDTV
-  title: The Voice
-  release_group: C4TV
-  season: 3
-  country: United Kingdom
-  type: episode
-
-? /tmp/star.trek.9/star.trek.9.mkv
-: type: movie
-  title: star trek 9
-
-? star.trek.9.mkv
-: type: movie
-  title: star trek 9
-
-? FlexGet.S01E02.TheName.HDTV.xvid
-: episode: 2
-  format: HDTV
-  season: 1
-  title: FlexGet
-  episode_title: TheName
-  type: episode
-  video_codec: XviD
-
-? FlexGet.S01E02.TheName.HDTV.xvid
-: episode: 2
-  format: HDTV
-  season: 1
-  title: FlexGet
-  episode_title: TheName
-  type: episode
-  video_codec: XviD
-
-? some.series.S03E14.Title.Here.720p
-: episode: 14
-  screen_size: 720p
-  season: 3
-  title: some series
-  episode_title: Title Here
-  type: episode
-
-? '[the.group] Some.Series.S03E15.Title.Two.720p'
-: episode: 15
-  release_group: the.group
-  screen_size: 720p
-  season: 3
-  title: Some Series
-  episode_title: Title Two
-  type: episode
-
-? 'HD 720p: Some series.S03E16.Title.Three'
-: episode: 16
-  other: HD
-  screen_size: 720p
-  season: 3
-  title: Some series
-  episode_title: Title Three
-  type: episode
-
-? Something.Season.2.1of4.Ep.Title.HDTV.torrent
-: episode_count: 4
-  episode: 1
-  format: HDTV
-  season: 2
-  title: Something
-  episode_title: Title
-  type: episode
-  container: torrent
-
-? Show-A (US) - Episode Title S02E09 hdtv
-: country: US
-  episode: 9
-  format: HDTV
-  season: 2
-  title: Show-A
-  type: episode
-
-? Jack's.Show.S03E01.blah.1080p
-: episode: 1
-  screen_size: 1080p
-  season: 3
-  title: Jack's Show
-  episode_title: blah
-  type: episode
-
-? FlexGet.epic
-: title: FlexGet epic
-  type: movie
-
-? FlexGet.Apt.1
-: title: FlexGet Apt 1
-  type: movie
-
-? FlexGet.aptitude
-: title: FlexGet aptitude
-  type: movie
-
-? FlexGet.Step1
-: title: FlexGet Step1
-  type: movie
-
-? Movies/El Bosque Animado (1987)/El.Bosque.Animado.[Jose.Luis.Cuerda.1987].[Xvid-Dvdrip-720 * 432].avi
-: format: DVD
-  screen_size: 720x432
-  title: El Bosque Animado
-  video_codec: XviD
-  year: 1987
-  type: movie
-
-? Movies/El Bosque Animado (1987)/El.Bosque.Animado.[Jose.Luis.Cuerda.1987].[Xvid-Dvdrip-720x432].avi
-: format: DVD
-  screen_size: 720x432
-  title: El Bosque Animado
-  video_codec: XviD
-  year: 1987
-  type: movie
-
-? 2009.shoot.fruit.chan.multi.dvd9.pal
-: format: DVD
-  language: mul
-  other: PAL
-  title: shoot fruit chan
-  type: movie
-  year: 2009
-
-? 2009.shoot.fruit.chan.multi.dvd5.pal
-: format: DVD
-  language: mul
-  other: PAL
-  title: shoot fruit chan
-  type: movie
-  year: 2009
-
-? The.Flash.2014.S01E01.PREAIR.WEBRip.XviD-EVO.avi
-: episode: 1
-  format: WEBRip
-  other: Preair
-  release_group: EVO
-  season: 1
-  title: The Flash
-  type: episode
-  video_codec: XviD
-  year: 2014
-
-? Ice.Lake.Rebels.S01E06.Ice.Lake.Games.720p.HDTV.x264-DHD
-: episode: 6
-  format: HDTV
-  release_group: DHD
-  screen_size: 720p
-  season: 1
-  title: Ice Lake Rebels
-  episode_title: Ice Lake Games
-  type: episode
-  video_codec: h264
-
-? The League - S06E10 - Epi Sexy.mkv
-: episode: 10
-  season: 6
-  title: The League
-  episode_title: Epi Sexy
-  type: episode
-
-? Stay (2005) [1080p]/Stay.2005.1080p.BluRay.x264.YIFY.mp4
-: format: BluRay
-  release_group: YIFY
-  screen_size: 1080p
-  title: Stay
-  type: movie
-  video_codec: h264
-  year: 2005
-
-? /media/live/A/Anger.Management.S02E82.720p.HDTV.X264-DIMENSION.mkv
-: format: HDTV
-  release_group: DIMENSION
-  screen_size: 720p
-  title: Anger Management
-  type: episode
-  season: 2
-  episode: 82
-  video_codec: h264
-
-? "[Figmentos] Monster 34 - At the End of Darkness [781219F1].mkv"
-: type: episode
-  release_group: Figmentos
-  title: Monster
-  episode: 34
-  episode_title: At the End of Darkness
-  crc32: 781219F1
-
-? Game.of.Thrones.S05E07.720p.HDTV-KILLERS.mkv
-: type: episode
-  episode: 7
-  format: HDTV
-  release_group: KILLERS
-  screen_size: 720p
-  season: 5
-  title: Game of Thrones
-
-? Game.of.Thrones.S05E07.HDTV.720p-KILLERS.mkv
-: type: episode
-  episode: 7
-  format: HDTV
-  release_group: KILLERS
-  screen_size: 720p
-  season: 5
-  title: Game of Thrones
-
-? Parks and Recreation - [04x12] - Ad Campaign.avi
-: type: episode
-  title: Parks and Recreation
-  season: 4
-  episode: 12
-  episode_title: Ad Campaign
-
-? Star Trek Into Darkness (2013)/star.trek.into.darkness.2013.720p.web-dl.h264-publichd.mkv
-: type: movie
-  title: Star Trek Into Darkness
-  year: 2013
-  screen_size: 720p
-  format: WEB-DL
-  video_codec: h264
-  release_group: publichd
-
-? /var/medias/series/The Originals/Season 02/The.Originals.S02E15.720p.HDTV.X264-DIMENSION.mkv
-: type: episode
-  title: The Originals
-  season: 2
-  episode: 15
-  screen_size: 720p
-  format: HDTV
-  video_codec: h264
-  release_group: DIMENSION
-
-? Test.S01E01E07-FooBar-Group.avi
-: container: avi
-  episode:
-  - 1
-  - 7
-  episode_title: FooBar-Group  # Make sure it doesn't conflict with uuid
-  mimetype: video/x-msvideo
-  season: 1
-  title: Test
-  type: episode
-
-? TEST.S01E02.2160p.NF.WEBRip.x264.DD5.1-ABC
-: audio_channels: '5.1'
-  audio_codec: DolbyDigital
-  episode: 2
-  format: WEBRip
-  other: Netflix
-  release_group: ABC
-  screen_size: 4K
-  season: 1
-  title: TEST
-  type: episode
-  video_codec: h264
-
-? TEST.2015.12.30.720p.WEBRip.h264-ABC
-: date: 2015-12-30
-  format: WEBRip
-  release_group: ABC
-  screen_size: 720p
-  title: TEST
-  type: episode
-  video_codec: h264
-
-? TEST.S01E10.24.1080p.NF.WEBRip.AAC2.0.x264-ABC
-: audio_channels: '2.0'
-  audio_codec: AAC
-  episode: 10
-  episode_title: '24'
-  format: WEBRip
-  other: Netflix
-  release_group: ABC
-  screen_size: 1080p
-  season: 1
-  title: TEST
-  type: episode
-  video_codec: h264
-
-? TEST.S01E10.24.1080p.NF.WEBRip.AAC2.0.x264-ABC
-: audio_channels: '2.0'
-  audio_codec: AAC
-  episode: 10
-  episode_title: '24'
-  format: WEBRip
-  other: Netflix
-  release_group: ABC
-  screen_size: 1080p
-  season: 1
-  title: TEST
-  type: episode
-  video_codec: h264
-
-? TEST.S01E10.24.1080p.NF.WEBRip.AAC.2.0.x264-ABC
-: audio_channels: '2.0'
-  audio_codec: AAC
-  episode: 10
-  episode_title: '24'
-  format: WEBRip
-  other: Netflix
-  release_group: ABC
-  screen_size: 1080p
-  season: 1
-  title: TEST
-  type: episode
-  video_codec: h264
-
-? TEST.S05E02.720p.iP.WEBRip.AAC2.0.H264-ABC
-: audio_channels: '2.0'
-  audio_codec: AAC
-  episode: 2
-  format: WEBRip
-  release_group: ABC
-  screen_size: 720p
-  season: 5
-  title: TEST
-  type: episode
-  video_codec: h264
-
-? TEST.S03E07.720p.WEBRip.AAC2.0.x264-ABC
-: audio_channels: '2.0'
-  audio_codec: AAC
-  episode: 7
-  format: WEBRip
-  release_group: ABC
-  screen_size: 720p
-  season: 3
-  title: TEST
-  type: episode
-  video_codec: h264
-
-? TEST.S15E15.24.1080p.FREE.WEBRip.AAC2.0.x264-ABC
-: audio_channels: '2.0'
-  audio_codec: AAC
-  episode: 15
-  episode_title: '24'
-  format: WEBRip
-  release_group: ABC
-  screen_size: 1080p
-  season: 15
-  title: TEST
-  type: episode
-  video_codec: h264
-
-? TEST.S11E11.24.720p.ETV.WEBRip.AAC2.0.x264-ABC
-: audio_channels: '2.0'
-  audio_codec: AAC
-  episode: 11
-  episode_title: '24'
-  format: WEBRip
-  release_group: ABC
-  screen_size: 720p
-  season: 11
-  title: TEST
-  type: episode
-  video_codec: h264
-
-? TEST.2015.1080p.HC.WEBRip.x264.AAC2.0-ABC
-: audio_channels: '2.0'
-  audio_codec: AAC
-  format: WEBRip
-  release_group: ABC
-  screen_size: 1080p
-  title: TEST
-  type: movie
-  video_codec: h264
-  year: 2015
-
-? TEST.2015.1080p.3D.BluRay.Half-SBS.x264.DTS-HD.MA.7.1-ABC
-: audio_channels: '7.1'
-  audio_codec: DTS
-  audio_profile: HDMA
-  format: BluRay
-  other: 3D
-  release_group: ABC
-  screen_size: 1080p
-  title: TEST
-  type: movie
-  video_codec: h264
-  year: 2015
-
-? TEST.2015.1080p.3D.BluRay.Half-OU.x264.DTS-HD.MA.7.1-ABC
-: audio_channels: '7.1'
-  audio_codec: DTS
-  audio_profile: HDMA
-  format: BluRay
-  other: 3D
-  release_group: ABC
-  screen_size: 1080p
-  title: TEST
-  type: movie
-  video_codec: h264
-  year: 2015
-
-? TEST.2015.1080p.3D.BluRay.Half-OU.x264.DTS-HD.MA.TrueHD.7.1.Atmos-ABC
-: audio_channels: '7.1'
-  audio_codec:
-  - DTS
-  - TrueHD
-  - DolbyAtmos
-  audio_profile: HDMA
-  format: BluRay
-  other: 3D
-  release_group: ABC
-  screen_size: 1080p
-  title: TEST
-  type: movie
-  video_codec: h264
-  year: 2015
-
-? TEST.2015.1080p.3D.BluRay.Half-SBS.x264.DTS-HD.MA.TrueHD.7.1.Atmos-ABC
-: audio_channels: '7.1'
-  audio_codec:
-  - DTS
-  - TrueHD
-  - DolbyAtmos
-  audio_profile: HDMA
-  format: BluRay
-  other: 3D
-  release_group: ABC
-  screen_size: 1080p
-  title: TEST
-  type: movie
-  video_codec: h264
-  year: 2015
-
-? TEST.2015.1080p.BluRay.REMUX.AVC.DTS-HD.MA.TrueHD.7.1.Atmos-ABC
-: audio_channels: '7.1'
-  audio_codec:
-  - DTS
-  - TrueHD
-  - DolbyAtmos
-  audio_profile: HDMA
-  format: BluRay
-  other: Remux
-  release_group: ABC
-  screen_size: 1080p
-  title: TEST
-  type: movie
-  year: 2015
-
-? Gangs of New York 2002 REMASTERED 1080p BluRay x264-AVCHD
-: format: BluRay
-  other: Remastered
-  screen_size: 1080p
-  title: Gangs of New York
-  type: movie
-  video_codec: h264
-  year: 2002
-
-? Peep.Show.S06E02.DVDrip.x264-faks86.mkv
-: container: mkv
-  episode: 2
-  format: DVD
-  release_group: faks86
-  season: 6
-  title: Peep Show
-  type: episode
-  video_codec: h264
-
-? The Soup - 11x41 - October 8, 2014.mp4
-: container: mp4
-  episode: 41
-  episode_title: October 8
-  season: 11
-  title: The Soup
-  type: episode
-  year: 2014
-
-? Red.Rock.S02E59.WEB-DLx264-JIVE
-: episode: 59
-  season: 2
-  format: WEB-DL
-  release_group: JIVE
-  title: Red Rock
-  type: episode
-  video_codec: h264
-
-? Pawn.Stars.S12E31.Deals.On.Wheels.PDTVx264-JIVE
-: episode: 31
-  episode_title: Deals On Wheels
-  season: 12
-  format: DVB
-  release_group: JIVE
-  title: Pawn Stars
-  type: episode
-  video_codec: h264
-
-? Duck.Dynasty.S09E09.Van.He-llsing.HDTVx264-JIVE
-: episode: 9
-  episode_title: Van He-llsing
-  season: 9
-  format: HDTV
-  release_group: JIVE
-  title: Duck Dynasty
-  type: episode
-  video_codec: h264
\ No newline at end of file
diff --git a/lib/httplib2/test/__init__.py b/lib/httplib2/test/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/lib/httplib2/test/brokensocket/socket.py b/lib/httplib2/test/brokensocket/socket.py
deleted file mode 100644
index ff7c0b74001a2d18e22a2a10407a90bec918f8ad..0000000000000000000000000000000000000000
--- a/lib/httplib2/test/brokensocket/socket.py
+++ /dev/null
@@ -1 +0,0 @@
-from realsocket import gaierror, error, getaddrinfo, SOCK_STREAM
diff --git a/lib/httplib2/test/functional/test_proxies.py b/lib/httplib2/test/functional/test_proxies.py
deleted file mode 100644
index 0b7880fe52db48637291ebd17e0e585c230f2153..0000000000000000000000000000000000000000
--- a/lib/httplib2/test/functional/test_proxies.py
+++ /dev/null
@@ -1,88 +0,0 @@
-import unittest
-import errno
-import os
-import signal
-import subprocess
-import tempfile
-
-import nose
-
-import httplib2
-from httplib2 import socks
-from httplib2.test import miniserver
-
-tinyproxy_cfg = """
-User "%(user)s"
-Port %(port)s
-Listen 127.0.0.1
-PidFile "%(pidfile)s"
-LogFile "%(logfile)s"
-MaxClients 2
-StartServers 1
-LogLevel Info
-"""
-
-
-class FunctionalProxyHttpTest(unittest.TestCase):
-    def setUp(self):
-        if not socks:
-            raise nose.SkipTest('socks module unavailable')
-        if not subprocess:
-            raise nose.SkipTest('subprocess module unavailable')
-
-        # start a short-lived miniserver so we can get a likely port
-        # for the proxy
-        self.httpd, self.proxyport = miniserver.start_server(
-            miniserver.ThisDirHandler)
-        self.httpd.shutdown()
-        self.httpd, self.port = miniserver.start_server(
-            miniserver.ThisDirHandler)
-
-        self.pidfile = tempfile.mktemp()
-        self.logfile = tempfile.mktemp()
-        fd, self.conffile = tempfile.mkstemp()
-        f = os.fdopen(fd, 'w')
-        our_cfg = tinyproxy_cfg % {'user': os.getlogin(),
-                                   'pidfile': self.pidfile,
-                                   'port': self.proxyport,
-                                   'logfile': self.logfile}
-        f.write(our_cfg)
-        f.close()
-        try:
-            # TODO use subprocess.check_call when 2.4 is dropped
-            ret = subprocess.call(['tinyproxy', '-c', self.conffile])
-            self.assertEqual(0, ret)
-        except OSError, e:
-            if e.errno == errno.ENOENT:
-                raise nose.SkipTest('tinyproxy not available')
-            raise
-
-    def tearDown(self):
-        self.httpd.shutdown()
-        try:
-            pid = int(open(self.pidfile).read())
-            os.kill(pid, signal.SIGTERM)
-        except OSError, e:
-            if e.errno == errno.ESRCH:
-                print '\n\n\nTinyProxy Failed to start, log follows:'
-                print open(self.logfile).read()
-                print 'end tinyproxy log\n\n\n'
-            raise
-        map(os.unlink, (self.pidfile,
-                        self.logfile,
-                        self.conffile))
-
-    def testSimpleProxy(self):
-        proxy_info = httplib2.ProxyInfo(socks.PROXY_TYPE_HTTP,
-                                        'localhost', self.proxyport)
-        client = httplib2.Http(proxy_info=proxy_info)
-        src = 'miniserver.py'
-        response, body = client.request('http://localhost:%d/%s' %
-                                        (self.port, src))
-        self.assertEqual(response.status, 200)
-        self.assertEqual(body, open(os.path.join(miniserver.HERE, src)).read())
-        lf = open(self.logfile).read()
-        expect = ('Established connection to host "127.0.0.1" '
-                  'using file descriptor')
-        self.assertTrue(expect in lf,
-                        'tinyproxy did not proxy a request for miniserver')
diff --git a/lib/httplib2/test/miniserver.py b/lib/httplib2/test/miniserver.py
deleted file mode 100644
index e32bf5e5f9bf59763623408baae5ef634c88d51b..0000000000000000000000000000000000000000
--- a/lib/httplib2/test/miniserver.py
+++ /dev/null
@@ -1,100 +0,0 @@
-import logging
-import os
-import select
-import SimpleHTTPServer
-import SocketServer
-import threading
-
-HERE = os.path.dirname(__file__)
-logger = logging.getLogger(__name__)
-
-
-class ThisDirHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
-    def translate_path(self, path):
-        path = path.split('?', 1)[0].split('#', 1)[0]
-        return os.path.join(HERE, *filter(None, path.split('/')))
-
-    def log_message(self, s, *args):
-        # output via logging so nose can catch it
-        logger.info(s, *args)
-
-
-class ShutdownServer(SocketServer.TCPServer):
-    """Mixin that allows serve_forever to be shut down.
-
-    The methods in this mixin are backported from SocketServer.py in the Python
-    2.6.4 standard library. The mixin is unnecessary in 2.6 and later, when
-    BaseServer supports the shutdown method directly.
-    """
-
-    def __init__(self, *args, **kwargs):
-        SocketServer.TCPServer.__init__(self, *args, **kwargs)
-        self.__is_shut_down = threading.Event()
-        self.__serving = False
-
-    def serve_forever(self, poll_interval=0.1):
-        """Handle one request at a time until shutdown.
-
-        Polls for shutdown every poll_interval seconds. Ignores
-        self.timeout. If you need to do periodic tasks, do them in
-        another thread.
-        """
-        self.__serving = True
-        self.__is_shut_down.clear()
-        while self.__serving:
-            r, w, e = select.select([self.socket], [], [], poll_interval)
-            if r:
-                self._handle_request_noblock()
-        self.__is_shut_down.set()
-
-    def shutdown(self):
-        """Stops the serve_forever loop.
-
-        Blocks until the loop has finished. This must be called while
-        serve_forever() is running in another thread, or it will deadlock.
-        """
-        self.__serving = False
-        self.__is_shut_down.wait()
-
-    def handle_request(self):
-        """Handle one request, possibly blocking.
-
-        Respects self.timeout.
-        """
-        # Support people who used socket.settimeout() to escape
-        # handle_request before self.timeout was available.
-        timeout = self.socket.gettimeout()
-        if timeout is None:
-            timeout = self.timeout
-        elif self.timeout is not None:
-            timeout = min(timeout, self.timeout)
-        fd_sets = select.select([self], [], [], timeout)
-        if not fd_sets[0]:
-            self.handle_timeout()
-            return
-        self._handle_request_noblock()
-
-    def _handle_request_noblock(self):
-        """Handle one request, without blocking.
-
-        I assume that select.select has returned that the socket is
-        readable before this function was called, so there should be
-        no risk of blocking in get_request().
-        """
-        try:
-            request, client_address = self.get_request()
-        except socket.error:
-            return
-        if self.verify_request(request, client_address):
-            try:
-                self.process_request(request, client_address)
-            except:
-                self.handle_error(request, client_address)
-                self.close_request(request)
-
-
-def start_server(handler):
-    httpd = ShutdownServer(("", 0), handler)
-    threading.Thread(target=httpd.serve_forever).start()
-    _, port = httpd.socket.getsockname()
-    return httpd, port
diff --git a/lib/httplib2/test/other_cacerts.txt b/lib/httplib2/test/other_cacerts.txt
deleted file mode 100644
index 360954a2868212ae1625b65f598c7ed76cdfc78b..0000000000000000000000000000000000000000
--- a/lib/httplib2/test/other_cacerts.txt
+++ /dev/null
@@ -1,70 +0,0 @@
-# Certifcate Authority certificates for validating SSL connections.
-#
-# This file contains PEM format certificates generated from
-# http://mxr.mozilla.org/seamonkey/source/security/nss/lib/ckfw/builtins/certdata.txt
-#
-# ***** BEGIN LICENSE BLOCK *****
-# Version: MPL 1.1/GPL 2.0/LGPL 2.1
-#
-# The contents of this file are subject to the Mozilla Public License Version
-# 1.1 (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-# http://www.mozilla.org/MPL/
-#
-# Software distributed under the License is distributed on an "AS IS" basis,
-# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
-# for the specific language governing rights and limitations under the
-# License.
-#
-# The Original Code is the Netscape security libraries.
-#
-# The Initial Developer of the Original Code is
-# Netscape Communications Corporation.
-# Portions created by the Initial Developer are Copyright (C) 1994-2000
-# the Initial Developer. All Rights Reserved.
-#
-# Contributor(s):
-#
-# Alternatively, the contents of this file may be used under the terms of
-# either the GNU General Public License Version 2 or later (the "GPL"), or
-# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
-# in which case the provisions of the GPL or the LGPL are applicable instead
-# of those above. If you wish to allow use of your version of this file only
-# under the terms of either the GPL or the LGPL, and not to allow others to
-# use your version of this file under the terms of the MPL, indicate your
-# decision by deleting the provisions above and replace them with the notice
-# and other provisions required by the GPL or the LGPL. If you do not delete
-# the provisions above, a recipient may use your version of this file under
-# the terms of any one of the MPL, the GPL or the LGPL.
-#
-# ***** END LICENSE BLOCK *****
-
-
-Comodo CA Limited, CN=Trusted Certificate Services
-==================================================
-
------BEGIN CERTIFICATE-----
-MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEb
-MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
-GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0
-aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEwMDAwMDBaFw0yODEyMzEyMzU5NTla
-MH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1hbmNoZXN0ZXIxEDAO
-BgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUwIwYD
-VQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0B
-AQEFAAOCAQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWW
-fnJSoBVC21ndZHoa0Lh73TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMt
-TGo87IvDktJTdyR0nAducPy9C1t2ul/y/9c3S0pgePfw+spwtOpZqqPOSC+pw7IL
-fhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6juljatEPmsbS9Is6FARW
-1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsSivnkBbA7
-kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0G
-A1UdDgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYD
-VR0TAQH/BAUwAwEB/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21v
-ZG9jYS5jb20vVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRo
-dHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMu
-Y3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8NtwuleGFTQQuS9/
-HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32
-pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxIS
-jBc/lDb+XbDABHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+
-xqFx7D+gIIxmOom0jtTYsU0lR+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/Atyjcn
-dBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O9y5Xt5hwXsjEeLBi
------END CERTIFICATE-----
diff --git a/lib/httplib2/test/smoke_test.py b/lib/httplib2/test/smoke_test.py
deleted file mode 100644
index 9f1e6f01d6dacb3c28faa942853487bf21afd77e..0000000000000000000000000000000000000000
--- a/lib/httplib2/test/smoke_test.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import os
-import unittest
-
-import httplib2
-
-from httplib2.test import miniserver
-
-
-class HttpSmokeTest(unittest.TestCase):
-    def setUp(self):
-        self.httpd, self.port = miniserver.start_server(
-            miniserver.ThisDirHandler)
-
-    def tearDown(self):
-        self.httpd.shutdown()
-
-    def testGetFile(self):
-        client = httplib2.Http()
-        src = 'miniserver.py'
-        response, body = client.request('http://localhost:%d/%s' %
-                                        (self.port, src))
-        self.assertEqual(response.status, 200)
-        self.assertEqual(body, open(os.path.join(miniserver.HERE, src)).read())
diff --git a/lib/httplib2/test/test_no_socket.py b/lib/httplib2/test/test_no_socket.py
deleted file mode 100644
index 66ba05637648b13b01f64a9d238dbb5182bbc303..0000000000000000000000000000000000000000
--- a/lib/httplib2/test/test_no_socket.py
+++ /dev/null
@@ -1,24 +0,0 @@
-"""Tests for httplib2 when the socket module is missing.
-
-This helps ensure compatibility with environments such as AppEngine.
-"""
-import os
-import sys
-import unittest
-
-import httplib2
-
-class MissingSocketTest(unittest.TestCase):
-    def setUp(self):
-        self._oldsocks = httplib2.socks
-        httplib2.socks = None
-
-    def tearDown(self):
-        httplib2.socks = self._oldsocks
-
-    def testProxyDisabled(self):
-        proxy_info = httplib2.ProxyInfo('blah',
-                                        'localhost', 0)
-        client = httplib2.Http(proxy_info=proxy_info)
-        self.assertRaises(httplib2.ProxiesUnavailableError,
-                          client.request, 'http://localhost:-1/')
diff --git a/lib/ndg/httpsclient/test/README b/lib/ndg/httpsclient/test/README
deleted file mode 100644
index cc10f870530c4d86de776429d112d561b3ae4856..0000000000000000000000000000000000000000
--- a/lib/ndg/httpsclient/test/README
+++ /dev/null
@@ -1,26 +0,0 @@
-NDG HTTPS Client Unit tests directory
-=====================================
-The unit tests expect to connect to a simple HTTPS server listening on port 
-4443.  An OpenSSL script is provided for this purpose in scripts/.  To run,
-
-$ ./scripts/openssl_https_server.sh
-
-Unit tests
-----------
-Run for example,
-
-$ python ./test_urllib2.py
-
-Troubleshooting
----------------
- * Run the openssl script from *this* directory.  
- * Also ensure it is has execute bits set. e.g.
-
- $ chmod 755 ./scripts/openssl_https_server.sh
- 
- * You may need to set the no_proxy environment variable if you have a HTTPS
- proxy in place:
- 
- $ export no_proxy=localhost
-
-
diff --git a/lib/ndg/httpsclient/test/__init__.py b/lib/ndg/httpsclient/test/__init__.py
deleted file mode 100644
index 4e8196a5c6002e0e7c8259250b50446774e7eaff..0000000000000000000000000000000000000000
--- a/lib/ndg/httpsclient/test/__init__.py
+++ /dev/null
@@ -1,29 +0,0 @@
-"""Unit tests package for ndg_httpsclient
-
-PyOpenSSL utility to make a httplib-like interface suitable for use with 
-urllib2
-"""
-__author__ = "P J Kershaw (STFC)"
-__date__ = "05/01/12"
-__copyright__ = "(C) 2012 Science and Technology Facilities Council"
-__license__ = "BSD - see LICENSE file in top-level directory"
-__contact__ = "Philip.Kershaw@stfc.ac.uk"
-__revision__ = '$Id$'
-import os
-import unittest
-    
-class Constants(object):
-    '''Convenience base class from which other unit tests can extend.  Its
-    sets the generic data directory path'''
-    PORT = 4443
-    PORT2 = 4444
-    HOSTNAME = 'localhost'
-    TEST_URI = 'https://%s:%d' % (HOSTNAME, PORT)
-    TEST_URI2 = 'https://%s:%d' % (HOSTNAME, PORT2)
-
-    UNITTEST_DIR = os.path.dirname(os.path.abspath(__file__))
-    CACERT_DIR = os.path.join(UNITTEST_DIR, 'pki', 'ca')
-    SSL_CERT_FILENAME = 'localhost.crt'
-    SSL_CERT_FILEPATH = os.path.join(UNITTEST_DIR, 'pki', SSL_CERT_FILENAME)
-    SSL_PRIKEY_FILENAME = 'localhost.key'
-    SSL_PRIKEY_FILEPATH = os.path.join(UNITTEST_DIR, 'pki', SSL_PRIKEY_FILENAME)
diff --git a/lib/ndg/httpsclient/test/pki/localhost.crt b/lib/ndg/httpsclient/test/pki/localhost.crt
deleted file mode 100644
index 257a5d5d096264d9040cc65c3d95312e1565636b..0000000000000000000000000000000000000000
--- a/lib/ndg/httpsclient/test/pki/localhost.crt
+++ /dev/null
@@ -1,14 +0,0 @@
------BEGIN CERTIFICATE-----
-MIICFjCCAX+gAwIBAgIBCjANBgkqhkiG9w0BAQQFADAzMQwwCgYDVQQKEwNOREcx
-ETAPBgNVBAsTCFNlY3VyaXR5MRAwDgYDVQQDEwdUZXN0IENBMB4XDTEyMDIwODE2
-MTE1M1oXDTE3MDIwNjE2MTE1M1owNTERMA8GA1UECxMIU2VjdXJpdHkxEjAQBgNV
-BAMTCWxvY2FsaG9zdDEMMAoGA1UEChMDTkRHMIGfMA0GCSqGSIb3DQEBAQUAA4GN
-ADCBiQKBgQCdhZgzD0xusZqzdphETJPgb4QK/sdDpF8EOT/20bAuyRgGt7papJmc
-6UtdgS5b9bGh6sRXx+vSKiTqq1ZFLOjnn3OQKhdrK2VU8XiD5rjuwTuNzser0uba
-lTOW5/2yVab+uZ/vw4yxR64+KdyBuVopXV9STuh12Q0JSrXzdH82iQIDAQABozgw
-NjAMBgNVHRMBAf8EAjAAMCYGA1UdEQQfMB2CCWxvY2FsaG9zdIIQbG9jYWxob3N0
-LmRvbWFpbjANBgkqhkiG9w0BAQQFAAOBgQBAAQCTkLfgYAjvm63KRXcE8djkYIVQ
-LleHNrCad/v3zNFK0PPCjIeBSWlI/1bPhJDCpfwpvJLk86DrB97Q3IafU2ml7DkC
-93bi3iaDy4jI1uskvlM516iaBQx1DCIa4gesluBAnZFvby8HX9y/A7tn5Ew2vdQJ
-upkcCUswsU4MSA==
------END CERTIFICATE-----
diff --git a/lib/ndg/httpsclient/test/pki/localhost.key b/lib/ndg/httpsclient/test/pki/localhost.key
deleted file mode 100644
index 17fe929e1ffeac81f08bdfd685707c02143c0627..0000000000000000000000000000000000000000
--- a/lib/ndg/httpsclient/test/pki/localhost.key
+++ /dev/null
@@ -1,15 +0,0 @@
------BEGIN RSA PRIVATE KEY-----
-MIICWwIBAAKBgQCdhZgzD0xusZqzdphETJPgb4QK/sdDpF8EOT/20bAuyRgGt7pa
-pJmc6UtdgS5b9bGh6sRXx+vSKiTqq1ZFLOjnn3OQKhdrK2VU8XiD5rjuwTuNzser
-0ubalTOW5/2yVab+uZ/vw4yxR64+KdyBuVopXV9STuh12Q0JSrXzdH82iQIDAQAB
-AoGAejr+HTDT2FlMd9Gg2e6qGM+voHCO4vgbGsXp0nZnxgYY9K2Al3F+GXoWFxp0
-hLsj+UaY0Jy7art1JfuJ1+e/WTR+0s4c6IbZCy0fHF4i29wUI5lc0zSmtePgITOD
-tvgtJ8ji+ESq7sRyXO0Eb8wFJPyLj3efoeBQUl8Om1XMYGECQQDLayMY8dgqZCMK
-iRU0wrCgzu/1tNBv1hRwip+rOTiqqL+MAKSYg1XtWSlm2RojiNmBfvPo+7VrXZMu
-Nt1cBoOtAkEAxj1TuJRmZMf1QFuvv6DLloMmhilGkFobWysUZW18J8FyM+vI5kvH
-TjRp2ZGkSw7Fsl+MUpQdfNOkd7pilJd5zQJAPofWqCpf2tghdXGiVS+sACLc3NkS
-Ye6bJeVXI9lZNAzfpPfloQRue6G2+miuglHlGsudyvblU/XV8pTnAwz1mQJACyu3
-hQYvwuwVoNvJyoWYE1IuoI7A4C+DrR5/VrvVrDPVaKGXv4pzn6+Ka20ukeAyObvy
-n1CjXL5cXTbOiUsD3QJAPe8Rw/Nu3o76tZfWB3irvjZ/mUDPhEppSis5oJY/exoB
-O96/99UXZNwSbDII0gjBPN2pd2kf/Ik3EQlxiryZuw==
------END RSA PRIVATE KEY-----
diff --git a/lib/ndg/httpsclient/test/scripts/openssl_https_server.sh b/lib/ndg/httpsclient/test/scripts/openssl_https_server.sh
deleted file mode 100755
index 4f9c339074af4a3aa37c656a72ed0cc2d5c6efc4..0000000000000000000000000000000000000000
--- a/lib/ndg/httpsclient/test/scripts/openssl_https_server.sh
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/bin/sh
-openssl s_server -www -cert pki/localhost.crt -key pki/localhost.key -accept 4443
diff --git a/lib/ndg/httpsclient/test/test_https.py b/lib/ndg/httpsclient/test/test_https.py
deleted file mode 100644
index 8400e807a4489a13adb5a1bd51ca42070876d4f6..0000000000000000000000000000000000000000
--- a/lib/ndg/httpsclient/test/test_https.py
+++ /dev/null
@@ -1,119 +0,0 @@
-"""unit tests module for ndg.httpsclient.https.HTTPSconnection class
-
-PyOpenSSL utility to make a httplib-like interface suitable for use with 
-urllib2
-"""
-__author__ = "P J Kershaw (STFC)"
-__date__ = "06/01/12"
-__copyright__ = "(C) 2012 Science and Technology Facilities Council"
-__license__ = "BSD - see LICENSE file in top-level directory"
-__contact__ = "Philip.Kershaw@stfc.ac.uk"
-__revision__ = '$Id$'
-import logging
-logging.basicConfig(level=logging.DEBUG)
-log = logging.getLogger(__name__)
-import unittest
-import socket
-
-from OpenSSL import SSL
-
-from ndg.httpsclient.test import Constants
-from ndg.httpsclient.https import HTTPSConnection
-from ndg.httpsclient.ssl_peer_verification import ServerSSLCertVerification
-
-
-class TestHTTPSConnection(unittest.TestCase):
-    '''Test ndg HTTPS client HTTPSConnection class'''
-
-    def test01_open(self):
-        conn = HTTPSConnection(Constants.HOSTNAME, port=Constants.PORT)
-        conn.connect()
-        conn.request('GET', '/')
-        resp = conn.getresponse()
-        print('Response = %s' % resp.read())
-        conn.close()
-
-    def test02_open_fails(self):
-        conn = HTTPSConnection(Constants.HOSTNAME, port=Constants.PORT2)
-        self.failUnlessRaises(socket.error, conn.connect)
-
-    def test03_ssl_verification_of_peer_fails(self):
-        ctx = SSL.Context(SSL.SSLv3_METHOD)
-        
-        def verify_callback(conn, x509, errnum, errdepth, preverify_ok): 
-            log.debug('SSL peer certificate verification failed for %r',
-                      x509.get_subject())
-            return preverify_ok 
-            
-        ctx.set_verify(SSL.VERIFY_PEER, verify_callback)
-        ctx.set_verify_depth(9)
-        
-        # Set bad location - unit test dir has no CA certs to verify with
-        ctx.load_verify_locations(None, Constants.UNITTEST_DIR)
-        
-        conn = HTTPSConnection(Constants.HOSTNAME, port=Constants.PORT,
-                               ssl_context=ctx)
-        conn.connect()        
-        self.failUnlessRaises(SSL.Error, conn.request, 'GET', '/')
-
-    def test03_ssl_verification_of_peer_succeeds(self):
-        ctx = SSL.Context(SSL.SSLv3_METHOD)
-        
-        verify_callback = lambda conn, x509, errnum, errdepth, preverify_ok: \
-            preverify_ok 
-            
-        ctx.set_verify(SSL.VERIFY_PEER, verify_callback)
-        ctx.set_verify_depth(9)
-        
-        # Set correct location for CA certs to verify with
-        ctx.load_verify_locations(None, Constants.CACERT_DIR)
-        
-        conn = HTTPSConnection(Constants.HOSTNAME, port=Constants.PORT,
-                               ssl_context=ctx)
-        conn.connect()
-        conn.request('GET', '/')
-        resp = conn.getresponse()
-        print('Response = %s' % resp.read())
-
-    def test04_ssl_verification_with_subj_alt_name(self):
-        ctx = SSL.Context(SSL.SSLv3_METHOD)
-        
-        verify_callback = ServerSSLCertVerification(hostname='localhost')
-            
-        ctx.set_verify(SSL.VERIFY_PEER, verify_callback)
-        ctx.set_verify_depth(9)
-        
-        # Set correct location for CA certs to verify with
-        ctx.load_verify_locations(None, Constants.CACERT_DIR)
-        
-        conn = HTTPSConnection(Constants.HOSTNAME, port=Constants.PORT,
-                               ssl_context=ctx)
-        conn.connect()
-        conn.request('GET', '/')
-        resp = conn.getresponse()
-        print('Response = %s' % resp.read())
-
-    def test04_ssl_verification_with_subj_common_name(self):
-        ctx = SSL.Context(SSL.SSLv3_METHOD)
-        
-        # Explicitly set verification of peer hostname using peer certificate
-        # subject common name
-        verify_callback = ServerSSLCertVerification(hostname='localhost',
-                                                    subj_alt_name_match=False)
-
-        ctx.set_verify(SSL.VERIFY_PEER, verify_callback)
-        ctx.set_verify_depth(9)
-        
-        # Set correct location for CA certs to verify with
-        ctx.load_verify_locations(None, Constants.CACERT_DIR)
-        
-        conn = HTTPSConnection(Constants.HOSTNAME, port=Constants.PORT,
-                               ssl_context=ctx)
-        conn.connect()
-        conn.request('GET', '/')
-        resp = conn.getresponse()
-        print('Response = %s' % resp.read())
-
-        
-if __name__ == "__main__":
-    unittest.main()
\ No newline at end of file
diff --git a/lib/ndg/httpsclient/test/test_urllib2.py b/lib/ndg/httpsclient/test/test_urllib2.py
deleted file mode 100644
index 9c1ef8df73f02564c1500492fc7f8bf58939b5fc..0000000000000000000000000000000000000000
--- a/lib/ndg/httpsclient/test/test_urllib2.py
+++ /dev/null
@@ -1,50 +0,0 @@
-"""unit tests module for ndg.httpsclient.urllib2_build_opener module
-
-PyOpenSSL utility to make a httplib-like interface suitable for use with 
-urllib2
-"""
-__author__ = "P J Kershaw (STFC)"
-__date__ = "06/01/12"
-__copyright__ = "(C) 2012 Science and Technology Facilities Council"
-__license__ = "BSD - see LICENSE file in top-level directory"
-__contact__ = "Philip.Kershaw@stfc.ac.uk"
-__revision__ = '$Id$'
-from urllib2 import URLError
-import unittest
-
-from OpenSSL import SSL
-from ndg.httpsclient.test import Constants
-from ndg.httpsclient.urllib2_build_opener import build_opener
-
-
-class Urllib2TestCase(unittest.TestCase):
-    """Unit tests for urllib2 functionality"""
-    
-    def test01_urllib2_build_opener(self):     
-        opener = build_opener()
-        self.assert_(opener)
-
-    def test02_open(self):
-        opener = build_opener()
-        res = opener.open(Constants.TEST_URI)
-        self.assert_(res)
-        print("res = %s" % res.read())
-
-    def test03_open_fails_unknown_loc(self):
-        opener = build_opener()
-        self.failUnlessRaises(URLError, opener.open, Constants.TEST_URI2)
-        
-    def test04_open_peer_cert_verification_fails(self):
-        # Explicitly set empty CA directory to make verification fail
-        ctx = SSL.Context(SSL.SSLv3_METHOD)
-        verify_callback = lambda conn, x509, errnum, errdepth, preverify_ok: \
-            preverify_ok 
-            
-        ctx.set_verify(SSL.VERIFY_PEER, verify_callback)
-        ctx.load_verify_locations(None, './')
-        opener = build_opener(ssl_context=ctx)
-        self.failUnlessRaises(SSL.Error, opener.open, Constants.TEST_URI)
-        
-        
-if __name__ == "__main__":
-    unittest.main()
diff --git a/lib/ndg/httpsclient/test/test_utils.py b/lib/ndg/httpsclient/test/test_utils.py
deleted file mode 100644
index fe496a69052f67acc1216fdac2f4a84e10344576..0000000000000000000000000000000000000000
--- a/lib/ndg/httpsclient/test/test_utils.py
+++ /dev/null
@@ -1,61 +0,0 @@
-"""unit tests module for ndg.httpsclient.utils module
-
-PyOpenSSL utility to make a httplib-like interface suitable for use with 
-urllib2
-"""
-__author__ = "P J Kershaw (STFC)"
-__date__ = "06/01/12"
-__copyright__ = "(C) 2012 Science and Technology Facilities Council"
-__license__ = "BSD - see LICENSE file in top-level directory"
-__contact__ = "Philip.Kershaw@stfc.ac.uk"
-__revision__ = '$Id$'
-import unittest
-import os
-
-from OpenSSL import SSL
-
-from ndg.httpsclient.test import Constants
-from ndg.httpsclient.utils import (Configuration, fetch_from_url, open_url,
-                                   _should_use_proxy)
-
-
-class TestUtilsModule(unittest.TestCase):
-    '''Test ndg.httpsclient.utils module'''
-
-    def test01_configuration(self):
-        config = Configuration(SSL.Context(SSL.SSLv3_METHOD), True)
-        self.assert_(config.ssl_context)
-        self.assertEquals(config.debug, True)
-
-    def test02_fetch_from_url(self):
-        config = Configuration(SSL.Context(SSL.SSLv3_METHOD), True)
-        res = fetch_from_url(Constants.TEST_URI, config)
-        self.assert_(res)
-        
-    def test03_open_url(self):
-        config = Configuration(SSL.Context(SSL.SSLv3_METHOD), True)
-        res = open_url(Constants.TEST_URI, config)
-        self.assertEqual(res[0], 200, 
-                         'open_url for %r failed' % Constants.TEST_URI)
-        
-    def test04__should_use_proxy(self):
-        if 'no_proxy' in os.environ:
-            no_proxy = os.environ['no_proxy']
-            del os.environ['no_proxy']
-        else:
-            no_proxy = None
-               
-        self.assertTrue(_should_use_proxy(Constants.TEST_URI), 
-                        'Expecting use proxy = True')
-        
-        os.environ['no_proxy'] = 'localhost,localhost.localdomain'
-        self.assertFalse(_should_use_proxy(Constants.TEST_URI), 
-                         'Expecting use proxy = False')
-        
-        if no_proxy is not None:
-            os.environ['no_proxy'] = no_proxy
-        else:
-            del os.environ['no_proxy']
-    
-if __name__ == "__main__":
-    unittest.main()
\ No newline at end of file
diff --git a/lib/rarfile/test/__init__.py b/lib/rarfile/test/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/lib/rarfile/test/files/ctime0.rar b/lib/rarfile/test/files/ctime0.rar
deleted file mode 100644
index d72c62ddf23fc85552ce3ad3ab79a89fddbce452..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/ctime0.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/ctime0.rar.exp b/lib/rarfile/test/files/ctime0.rar.exp
deleted file mode 100644
index 8095f121d16ed7a491dfc927907e62da7361cde6..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/ctime0.rar.exp
+++ /dev/null
@@ -1,7 +0,0 @@
-Archive: test/files/ctime0.rar
-FILE: hdrlen=46 datlen=0
-  flags=0x9020:EXTTIME,LONG,D128
-  os=2:WIN ver=29 mode=0x20 meth=0 cmp=0 dec=0 vol=0
-  crc=0x00000000 (0) date_time=2011-05-10 21:28:47
-  name=afile.txt
-  mtime=2011-05-10T21:28:47.899345
diff --git a/lib/rarfile/test/files/ctime1.rar b/lib/rarfile/test/files/ctime1.rar
deleted file mode 100644
index 89d82557950158571dace5962d58f61f5113bde4..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/ctime1.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/ctime1.rar.exp b/lib/rarfile/test/files/ctime1.rar.exp
deleted file mode 100644
index 39154118cf7cb99b42e29402ffb5064e637d5078..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/ctime1.rar.exp
+++ /dev/null
@@ -1,8 +0,0 @@
-Archive: test/files/ctime1.rar
-FILE: hdrlen=50 datlen=0
-  flags=0x9020:EXTTIME,LONG,D128
-  os=2:WIN ver=29 mode=0x20 meth=0 cmp=0 dec=0 vol=0
-  crc=0x00000000 (0) date_time=2011-05-10 21:28:47
-  name=afile.txt
-  mtime=2011-05-10T21:28:47.899345
-  ctime=2011-05-10T21:28:47
diff --git a/lib/rarfile/test/files/ctime2.rar b/lib/rarfile/test/files/ctime2.rar
deleted file mode 100644
index 09c91371028ce52fdfa47ecc2ccf57c30ae32d73..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/ctime2.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/ctime2.rar.exp b/lib/rarfile/test/files/ctime2.rar.exp
deleted file mode 100644
index 09d5ba3b81908c83e4e1d4cefb63294d8838adb4..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/ctime2.rar.exp
+++ /dev/null
@@ -1,8 +0,0 @@
-Archive: test/files/ctime2.rar
-FILE: hdrlen=51 datlen=0
-  flags=0x9020:EXTTIME,LONG,D128
-  os=2:WIN ver=29 mode=0x20 meth=0 cmp=0 dec=0 vol=0
-  crc=0x00000000 (0) date_time=2011-05-10 21:28:47
-  name=afile.txt
-  mtime=2011-05-10T21:28:47.899345
-  ctime=2011-05-10T21:28:47.897843
diff --git a/lib/rarfile/test/files/ctime3.rar b/lib/rarfile/test/files/ctime3.rar
deleted file mode 100644
index a32fa14f4af42673911345542a0150ad51d9dbed..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/ctime3.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/ctime3.rar.exp b/lib/rarfile/test/files/ctime3.rar.exp
deleted file mode 100644
index 537160974bfc3a01eef8fddb1bb72ec85d42febb..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/ctime3.rar.exp
+++ /dev/null
@@ -1,8 +0,0 @@
-Archive: test/files/ctime3.rar
-FILE: hdrlen=52 datlen=0
-  flags=0x9020:EXTTIME,LONG,D128
-  os=2:WIN ver=29 mode=0x20 meth=0 cmp=0 dec=0 vol=0
-  crc=0x00000000 (0) date_time=2011-05-10 21:28:47
-  name=afile.txt
-  mtime=2011-05-10T21:28:47.899345
-  ctime=2011-05-10T21:28:47.899328
diff --git a/lib/rarfile/test/files/ctime4.rar b/lib/rarfile/test/files/ctime4.rar
deleted file mode 100644
index 921e0da6a652e8cc1d63970f4d14d327cf87566d..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/ctime4.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/ctime4.rar.exp b/lib/rarfile/test/files/ctime4.rar.exp
deleted file mode 100644
index 3f756151fcaae0a09fa2e40f4a682fab03eca3ef..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/ctime4.rar.exp
+++ /dev/null
@@ -1,8 +0,0 @@
-Archive: test/files/ctime4.rar
-FILE: hdrlen=53 datlen=0
-  flags=0x9020:EXTTIME,LONG,D128
-  os=2:WIN ver=29 mode=0x20 meth=0 cmp=0 dec=0 vol=0
-  crc=0x00000000 (0) date_time=2011-05-10 21:28:47
-  name=afile.txt
-  mtime=2011-05-10T21:28:47.899345
-  ctime=2011-05-10T21:28:47.899345
diff --git a/lib/rarfile/test/files/rar15-comment-lock.rar b/lib/rarfile/test/files/rar15-comment-lock.rar
deleted file mode 100644
index 462f2625a039b271e9131f605a67af17b5198e0c..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar15-comment-lock.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/rar15-comment-lock.rar.exp b/lib/rarfile/test/files/rar15-comment-lock.rar.exp
deleted file mode 100644
index 13824a329153a0dad53666bc1a2c9a0cd55ca573..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/rar15-comment-lock.rar.exp
+++ /dev/null
@@ -1,14 +0,0 @@
-Archive: test/files/rar15-comment-lock.rar
-  comment='RARcomment -----'
-FILE: hdrlen=72 datlen=7
-  flags=0x8008:COMMENT,LONG,D64
-  os=0:DOS ver=15 mode=0x20 meth=3 cmp=7 dec=7 vol=0
-  crc=0xe27f07a9 (3799975849) date_time=2010-11-03 19:49:32
-  name=FILE1.TXT
-  comment='file1comment -----'
-FILE: hdrlen=72 datlen=8
-  flags=0x8008:COMMENT,LONG,D64
-  os=0:DOS ver=15 mode=0x20 meth=0 cmp=8 dec=8 vol=0
-  crc=0x3c4306f7 (1011025655) date_time=2010-11-03 19:49:38
-  name=FILE2.TXT
-  comment='file2comment -----'
diff --git a/lib/rarfile/test/files/rar15-comment.rar b/lib/rarfile/test/files/rar15-comment.rar
deleted file mode 100644
index f193bb0f52b4721a1a417cb2e6275d4b0f532b1e..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar15-comment.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/rar15-comment.rar.exp b/lib/rarfile/test/files/rar15-comment.rar.exp
deleted file mode 100644
index f9eb010cae9870df4fd5a1c040231aa09aa9f9a3..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/rar15-comment.rar.exp
+++ /dev/null
@@ -1,14 +0,0 @@
-Archive: test/files/rar15-comment.rar
-  comment='RARcomment -----'
-FILE: hdrlen=72 datlen=7
-  flags=0x8008:COMMENT,LONG,D64
-  os=0:DOS ver=15 mode=0x20 meth=3 cmp=7 dec=7 vol=0
-  crc=0xe27f07a9 (3799975849) date_time=2010-11-03 19:49:32
-  name=FILE1.TXT
-  comment='file1comment -----'
-FILE: hdrlen=72 datlen=8
-  flags=0x8008:COMMENT,LONG,D64
-  os=0:DOS ver=15 mode=0x20 meth=0 cmp=8 dec=8 vol=0
-  crc=0x3c4306f7 (1011025655) date_time=2010-11-03 19:49:38
-  name=FILE2.TXT
-  comment='file2comment -----'
diff --git a/lib/rarfile/test/files/rar202-comment-nopsw.rar b/lib/rarfile/test/files/rar202-comment-nopsw.rar
deleted file mode 100644
index 329dc72af0e52e6d105984fa5b49e5d0d41ee6da..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar202-comment-nopsw.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/rar202-comment-nopsw.rar.exp b/lib/rarfile/test/files/rar202-comment-nopsw.rar.exp
deleted file mode 100644
index 2fbb69d0a57d52a1f30769a276512c8972cc75db..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/rar202-comment-nopsw.rar.exp
+++ /dev/null
@@ -1,14 +0,0 @@
-Archive: test/files/rar202-comment-nopsw.rar
-  comment='RARcomment'
-FILE: hdrlen=66 datlen=7
-  flags=0x8008:COMMENT,LONG,D64
-  os=0:DOS ver=20 mode=0x20 meth=0 cmp=7 dec=7 vol=0
-  crc=0x7a197dba (2048490938) date_time=2010-11-03 00:27:28
-  name=FILE1.TXT
-  comment='file1comment'
-FILE: hdrlen=66 datlen=7
-  flags=0x8008:COMMENT,LONG,D64
-  os=0:DOS ver=20 mode=0x20 meth=0 cmp=7 dec=7 vol=0
-  crc=0x785fc3e3 (2019541987) date_time=2010-11-03 00:27:34
-  name=FILE2.TXT
-  comment='file2comment'
diff --git a/lib/rarfile/test/files/rar202-comment-psw.rar b/lib/rarfile/test/files/rar202-comment-psw.rar
deleted file mode 100644
index 60fb14f42b4063ba32f65125061dc36222956bd9..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar202-comment-psw.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/rar202-comment-psw.rar.exp b/lib/rarfile/test/files/rar202-comment-psw.rar.exp
deleted file mode 100644
index 137533679d6cb73fa17f1f144d1b6e9eeba85dcb..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/rar202-comment-psw.rar.exp
+++ /dev/null
@@ -1,14 +0,0 @@
-Archive: test/files/rar202-comment-psw.rar
-  comment='RARcomment'
-FILE: hdrlen=66 datlen=32
-  flags=0x800c:PASSWORD,COMMENT,LONG,D64
-  os=0:DOS ver=20 mode=0x20 meth=3 cmp=32 dec=7 vol=0
-  crc=0x7a197dba (2048490938) date_time=2010-11-03 00:27:28
-  name=FILE1.TXT
-  comment='file1comment'
-FILE: hdrlen=66 datlen=32
-  flags=0x800c:PASSWORD,COMMENT,LONG,D64
-  os=0:DOS ver=20 mode=0x20 meth=3 cmp=32 dec=7 vol=0
-  crc=0x785fc3e3 (2019541987) date_time=2010-11-03 00:27:34
-  name=FILE2.TXT
-  comment='file2comment'
diff --git a/lib/rarfile/test/files/rar3-comment-hpsw.rar b/lib/rarfile/test/files/rar3-comment-hpsw.rar
deleted file mode 100644
index 37210ad622d6e4530340a232143344bf722733a9..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar3-comment-hpsw.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/rar3-comment-hpsw.rar.exp b/lib/rarfile/test/files/rar3-comment-hpsw.rar.exp
deleted file mode 100644
index c24a2291c49ed28a5a8f4a4873d52914de00cd82..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/rar3-comment-hpsw.rar.exp
+++ /dev/null
@@ -1,16 +0,0 @@
-Archive: test/files/rar3-comment-hpsw.rar
-  comment='RARcomment\n'
-FILE: hdrlen=51 datlen=16
-  flags=0x9424:PASSWORD,SALT,EXTTIME,LONG,D128
-  os=3:UNIX ver=29 mode=0100644 meth=3 cmp=16 dec=0 vol=0
-  crc=0x00000000 (0) date_time=2010-11-02 10:03:25
-  name=file1.txt
-  mtime=2010-11-02T10:03:25
-  comment='Comment1v2\n'
-FILE: hdrlen=51 datlen=16
-  flags=0x9424:PASSWORD,SALT,EXTTIME,LONG,D128
-  os=3:UNIX ver=29 mode=0100644 meth=3 cmp=16 dec=0 vol=0
-  crc=0x00000000 (0) date_time=2010-11-02 10:03:25
-  name=file2.txt
-  mtime=2010-11-02T10:03:25
-  comment='Comment2v2\n'
diff --git a/lib/rarfile/test/files/rar3-comment-plain.rar b/lib/rarfile/test/files/rar3-comment-plain.rar
deleted file mode 100644
index 29d8cb00d605e93e3cdfae4a198a9e3ec7424dfb..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar3-comment-plain.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/rar3-comment-plain.rar.exp b/lib/rarfile/test/files/rar3-comment-plain.rar.exp
deleted file mode 100644
index ed9a8a27e146ec6c88a047ffd7a933b1dccfef25..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/rar3-comment-plain.rar.exp
+++ /dev/null
@@ -1,16 +0,0 @@
-Archive: test/files/rar3-comment-plain.rar
-  comment='RARcomment\n'
-FILE: hdrlen=43 datlen=8
-  flags=0x9020:EXTTIME,LONG,D128
-  os=3:UNIX ver=29 mode=0100644 meth=3 cmp=8 dec=0 vol=0
-  crc=0x00000000 (0) date_time=2010-11-02 10:03:25
-  name=file1.txt
-  mtime=2010-11-02T10:03:25
-  comment='Comment1v2\n'
-FILE: hdrlen=43 datlen=8
-  flags=0x9020:EXTTIME,LONG,D128
-  os=3:UNIX ver=29 mode=0100644 meth=3 cmp=8 dec=0 vol=0
-  crc=0x00000000 (0) date_time=2010-11-02 10:03:25
-  name=file2.txt
-  mtime=2010-11-02T10:03:25
-  comment='Comment2v2\n'
diff --git a/lib/rarfile/test/files/rar3-comment-psw.rar b/lib/rarfile/test/files/rar3-comment-psw.rar
deleted file mode 100644
index dd1beabf48e5981759fe94cae31ed5261447b02a..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar3-comment-psw.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/rar3-comment-psw.rar.exp b/lib/rarfile/test/files/rar3-comment-psw.rar.exp
deleted file mode 100644
index 9e70920709f79d069d6a63c6bfa242fbf9f69339..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/rar3-comment-psw.rar.exp
+++ /dev/null
@@ -1,16 +0,0 @@
-Archive: test/files/rar3-comment-psw.rar
-  comment='RARcomment\n'
-FILE: hdrlen=51 datlen=16
-  flags=0x9424:PASSWORD,SALT,EXTTIME,LONG,D128
-  os=3:UNIX ver=29 mode=0100644 meth=3 cmp=16 dec=0 vol=0
-  crc=0x00000000 (0) date_time=2010-11-02 10:03:25
-  name=file1.txt
-  mtime=2010-11-02T10:03:25
-  comment='Comment1v2\n'
-FILE: hdrlen=51 datlen=16
-  flags=0x9424:PASSWORD,SALT,EXTTIME,LONG,D128
-  os=3:UNIX ver=29 mode=0100644 meth=3 cmp=16 dec=0 vol=0
-  crc=0x00000000 (0) date_time=2010-11-02 10:03:25
-  name=file2.txt
-  mtime=2010-11-02T10:03:25
-  comment='Comment2v2\n'
diff --git a/lib/rarfile/test/files/rar3-old.r00 b/lib/rarfile/test/files/rar3-old.r00
deleted file mode 100644
index 0ba628367eaae36c92a648217e9baf96125c2acb..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar3-old.r00 and /dev/null differ
diff --git a/lib/rarfile/test/files/rar3-old.r01 b/lib/rarfile/test/files/rar3-old.r01
deleted file mode 100644
index 9bc19dde8cb2e46d3e064b83074c7fc4e7e5c3da..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar3-old.r01 and /dev/null differ
diff --git a/lib/rarfile/test/files/rar3-old.rar b/lib/rarfile/test/files/rar3-old.rar
deleted file mode 100644
index 962d8b6f4b9d7e60396c1b5366f8962868c71fe1..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar3-old.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/rar3-old.rar.exp b/lib/rarfile/test/files/rar3-old.rar.exp
deleted file mode 100644
index 711c3128eda5e9222e5cbea8d419670abea6e6d3..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/rar3-old.rar.exp
+++ /dev/null
@@ -1,13 +0,0 @@
-Archive: test/files/rar3-old.rar
-FILE: hdrlen=50 datlen=102310
-  flags=0x9022:SPLIT_AFTER,EXTTIME,LONG,D128
-  os=3:UNIX ver=20 mode=0100664 meth=0 cmp=205000 dec=205000 vol=0
-  crc=0x509ad74c (1352324940) date_time=2016-05-24 11:42:37
-  name=vols/bigfile.txt
-  mtime=2016-05-24T11:42:37
-FILE: hdrlen=52 datlen=2050
-  flags=0x9020:EXTTIME,LONG,D128
-  os=3:UNIX ver=20 mode=0100664 meth=0 cmp=2050 dec=2050 vol=2
-  crc=0xd08a1f86 (3498712966) date_time=2016-05-24 11:42:43
-  name=vols/smallfile.txt
-  mtime=2016-05-24T11:42:43
diff --git a/lib/rarfile/test/files/rar3-vols.part1.rar b/lib/rarfile/test/files/rar3-vols.part1.rar
deleted file mode 100644
index 483812fcb1c3630bdaa006dac709a4327862b8f6..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar3-vols.part1.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/rar3-vols.part1.rar.exp b/lib/rarfile/test/files/rar3-vols.part1.rar.exp
deleted file mode 100644
index 0a2921b6388daf8845c284f7767d75df194590df..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/rar3-vols.part1.rar.exp
+++ /dev/null
@@ -1,13 +0,0 @@
-Archive: test/files/rar3-vols.part1.rar
-FILE: hdrlen=50 datlen=102310
-  flags=0x9022:SPLIT_AFTER,EXTTIME,LONG,D128
-  os=3:UNIX ver=20 mode=0100664 meth=0 cmp=205000 dec=205000 vol=0
-  crc=0x509ad74c (1352324940) date_time=2016-05-24 11:42:37
-  name=vols/bigfile.txt
-  mtime=2016-05-24T11:42:37
-FILE: hdrlen=52 datlen=2050
-  flags=0x9020:EXTTIME,LONG,D128
-  os=3:UNIX ver=20 mode=0100664 meth=0 cmp=2050 dec=2050 vol=2
-  crc=0xd08a1f86 (3498712966) date_time=2016-05-24 11:42:43
-  name=vols/smallfile.txt
-  mtime=2016-05-24T11:42:43
diff --git a/lib/rarfile/test/files/rar3-vols.part2.rar b/lib/rarfile/test/files/rar3-vols.part2.rar
deleted file mode 100644
index 0fa8f87fae09ce30ac1e25b2b1f5610fdc706ea0..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar3-vols.part2.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/rar3-vols.part2.rar.exp b/lib/rarfile/test/files/rar3-vols.part2.rar.exp
deleted file mode 100644
index 0fd1a78db09f1719136318b3f432f0eb6b77f020..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/rar3-vols.part2.rar.exp
+++ /dev/null
@@ -1,2 +0,0 @@
-Archive: test/files/rar3-vols.part2.rar
- --- test/files/rar3-vols.part2.rar is middle part of multi-vol archive ---
diff --git a/lib/rarfile/test/files/rar3-vols.part3.rar b/lib/rarfile/test/files/rar3-vols.part3.rar
deleted file mode 100644
index 5d32e160fded21ab5cd2a34a91d13305419c30dd..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar3-vols.part3.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/rar3-vols.part3.rar.exp b/lib/rarfile/test/files/rar3-vols.part3.rar.exp
deleted file mode 100644
index 9a125f41de94b4c82b38ccdbd6bd536a206f11d9..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/rar3-vols.part3.rar.exp
+++ /dev/null
@@ -1,2 +0,0 @@
-Archive: test/files/rar3-vols.part3.rar
- --- test/files/rar3-vols.part3.rar is middle part of multi-vol archive ---
diff --git a/lib/rarfile/test/files/rar5-blake.rar b/lib/rarfile/test/files/rar5-blake.rar
deleted file mode 100644
index 48aa9233beb7d55efcf0defec1109f74d15b7c5a..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar5-blake.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/rar5-blake.rar.exp b/lib/rarfile/test/files/rar5-blake.rar.exp
deleted file mode 100644
index f6d606f33a020f1871c977979afc7572742d138c..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/rar5-blake.rar.exp
+++ /dev/null
@@ -1,22 +0,0 @@
-Archive: test/files/rar5-blake.rar
-  comment='RAR5 archive - blake\n'
-R5_FILE: hdrlen=76 datlen=55 hdr_extra=46
-  block_flags=0x0003:EXTRA,DATA
-  name=stest1.txt
-  file_flags=0x0000:-
-  cmp_algo=0 cmp_meth=3 dict=0 solid=False
-  os=1:UNIX mode=0100644 cmp=55 dec=2048 vol=0
-  blake2sp=7cd5c1ac31f0cf58844a57fb9072c44768dbea1456e37c21e491f4853982ede0
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
-R5_FILE: hdrlen=92 datlen=2048 hdr_extra=62
-  block_flags=0x0003:EXTRA,DATA
-  name=stest2.txt
-  file_flags=0x0000:-
-  cmp_algo=0 cmp_meth=0 dict=0 solid=False
-  os=1:UNIX mode=0100644 cmp=2048 dec=2048 vol=0
-  blake2sp=7cd5c1ac31f0cf58844a57fb9072c44768dbea1456e37c21e491f4853982ede0
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
-  ctime=2016-05-22T09:12:33+00:00
-  atime=2016-05-22T09:12:37+00:00
diff --git a/lib/rarfile/test/files/rar5-crc.rar b/lib/rarfile/test/files/rar5-crc.rar
deleted file mode 100644
index 2b087d70462137d93b62ac976fa39a4b67fbc06b..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar5-crc.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/rar5-crc.rar.exp b/lib/rarfile/test/files/rar5-crc.rar.exp
deleted file mode 100644
index d7d41ee9e6431a6fb30326339a9a4d46090b8a6d..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/rar5-crc.rar.exp
+++ /dev/null
@@ -1,22 +0,0 @@
-Archive: test/files/rar5-crc.rar
-  comment='RAR5 archive - crc\n'
-R5_FILE: hdrlen=45 datlen=55 hdr_extra=11
-  block_flags=0x0003:EXTRA,DATA
-  name=stest1.txt
-  file_flags=0x0004:CRC32
-  cmp_algo=0 cmp_meth=3 dict=0 solid=False
-  os=1:UNIX mode=0100644 cmp=55 dec=2048 vol=0
-  crc=0xc5b7e6a2 (3317163682)
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
-R5_FILE: hdrlen=61 datlen=2048 hdr_extra=27
-  block_flags=0x0003:EXTRA,DATA
-  name=stest2.txt
-  file_flags=0x0004:CRC32
-  cmp_algo=0 cmp_meth=0 dict=0 solid=False
-  os=1:UNIX mode=0100644 cmp=2048 dec=2048 vol=0
-  crc=0xc5b7e6a2 (3317163682)
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
-  ctime=2016-05-22T09:12:33+00:00
-  atime=2016-05-22T09:12:37+00:00
diff --git a/lib/rarfile/test/files/rar5-dups.rar b/lib/rarfile/test/files/rar5-dups.rar
deleted file mode 100644
index 8085bcd0c5a826ab78c46d83d252ac3d7f8f0513..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar5-dups.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/rar5-dups.rar.exp b/lib/rarfile/test/files/rar5-dups.rar.exp
deleted file mode 100644
index 5e0f3a44f402959ac9dee259c7f538aef52b575c..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/rar5-dups.rar.exp
+++ /dev/null
@@ -1,90 +0,0 @@
-Archive: test/files/rar5-dups.rar
-R5_FILE: hdrlen=43 datlen=55 hdr_extra=11
-  block_flags=0x0003:EXTRA,DATA
-  name=stest1.txt
-  file_flags=0x0004:CRC32
-  cmp_algo=0 cmp_meth=3 dict=0 solid=False
-  os=0:WINDOWS mode=0x20 cmp=55 dec=2048 vol=0
-  crc=0xc5b7e6a2 (3317163682)
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
-R5_FILE: hdrlen=58 datlen=0 hdr_extra=26
-  block_flags=0x0003:EXTRA,DATA
-  name=stest2.txt
-  file_flags=0x0004:CRC32
-  cmp_algo=0 cmp_meth=0 dict=0 solid=False
-  os=0:WINDOWS mode=0x20 cmp=0 dec=2048 vol=0
-  crc=0x00000000 (0)
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
-  redir: type=FILE_COPY flags=0:- destination=stest1.txt
-R5_FILE: hdrlen=58 datlen=0 hdr_extra=26
-  block_flags=0x0003:EXTRA,DATA
-  name=stest3.txt
-  file_flags=0x0004:CRC32
-  cmp_algo=0 cmp_meth=0 dict=0 solid=False
-  os=0:WINDOWS mode=0x20 cmp=0 dec=2048 vol=0
-  crc=0x00000000 (0)
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
-  redir: type=FILE_COPY flags=0:- destination=stest1.txt
-R5_FILE: hdrlen=58 datlen=0 hdr_extra=26
-  block_flags=0x0003:EXTRA,DATA
-  name=stest4.txt
-  file_flags=0x0004:CRC32
-  cmp_algo=0 cmp_meth=0 dict=0 solid=False
-  os=0:WINDOWS mode=0x20 cmp=0 dec=2048 vol=0
-  crc=0x00000000 (0)
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
-  redir: type=FILE_COPY flags=0:- destination=stest1.txt
-R5_FILE: hdrlen=58 datlen=0 hdr_extra=26
-  block_flags=0x0003:EXTRA,DATA
-  name=stest5.txt
-  file_flags=0x0004:CRC32
-  cmp_algo=0 cmp_meth=0 dict=0 solid=False
-  os=0:WINDOWS mode=0x20 cmp=0 dec=2048 vol=0
-  crc=0x00000000 (0)
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
-  redir: type=FILE_COPY flags=0:- destination=stest1.txt
-R5_FILE: hdrlen=58 datlen=0 hdr_extra=26
-  block_flags=0x0003:EXTRA,DATA
-  name=stest6.txt
-  file_flags=0x0004:CRC32
-  cmp_algo=0 cmp_meth=0 dict=0 solid=False
-  os=0:WINDOWS mode=0x20 cmp=0 dec=2048 vol=0
-  crc=0x00000000 (0)
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
-  redir: type=FILE_COPY flags=0:- destination=stest1.txt
-R5_FILE: hdrlen=58 datlen=0 hdr_extra=26
-  block_flags=0x0003:EXTRA,DATA
-  name=stest7.txt
-  file_flags=0x0004:CRC32
-  cmp_algo=0 cmp_meth=0 dict=0 solid=False
-  os=0:WINDOWS mode=0x20 cmp=0 dec=2048 vol=0
-  crc=0x00000000 (0)
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
-  redir: type=FILE_COPY flags=0:- destination=stest1.txt
-R5_FILE: hdrlen=58 datlen=0 hdr_extra=26
-  block_flags=0x0003:EXTRA,DATA
-  name=stest8.txt
-  file_flags=0x0004:CRC32
-  cmp_algo=0 cmp_meth=0 dict=0 solid=False
-  os=0:WINDOWS mode=0x20 cmp=0 dec=2048 vol=0
-  crc=0x00000000 (0)
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
-  redir: type=FILE_COPY flags=0:- destination=stest1.txt
-R5_FILE: hdrlen=58 datlen=0 hdr_extra=26
-  block_flags=0x0003:EXTRA,DATA
-  name=stest9.txt
-  file_flags=0x0004:CRC32
-  cmp_algo=0 cmp_meth=0 dict=0 solid=False
-  os=0:WINDOWS mode=0x20 cmp=0 dec=2048 vol=0
-  crc=0x00000000 (0)
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
-  redir: type=FILE_COPY flags=0:- destination=stest1.txt
diff --git a/lib/rarfile/test/files/rar5-hlink.rar b/lib/rarfile/test/files/rar5-hlink.rar
deleted file mode 100644
index bc1d2f321ed95a0002fe734ffbacdd82b47dcb9e..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar5-hlink.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/rar5-hlink.rar.exp b/lib/rarfile/test/files/rar5-hlink.rar.exp
deleted file mode 100644
index 45664d225a19e8b5d45a1bff59930496c973f0b8..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/rar5-hlink.rar.exp
+++ /dev/null
@@ -1,30 +0,0 @@
-Archive: test/files/rar5-hlink.rar
-R5_FILE: hdrlen=37 datlen=55 hdr_extra=0
-  block_flags=0x0002:DATA
-  name=stest1.txt
-  file_flags=0x0006:MTIME,CRC32
-  cmp_algo=0 cmp_meth=3 dict=0 solid=False
-  os=1:UNIX mode=0100664 cmp=55 dec=2048 vol=0
-  crc=0xc5b7e6a2 (3317163682)
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
-R5_FILE: hdrlen=53 datlen=0 hdr_extra=15
-  block_flags=0x0003:EXTRA,DATA
-  name=stest2.txt
-  file_flags=0x0006:MTIME,CRC32
-  cmp_algo=0 cmp_meth=0 dict=0 solid=False
-  os=1:UNIX mode=0100664 cmp=0 dec=2048 vol=0
-  crc=0x00000000 (0)
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
-  redir: type=HARD_LINK flags=0:- destination=stest1.txt
-R5_FILE: hdrlen=53 datlen=0 hdr_extra=15
-  block_flags=0x0003:EXTRA,DATA
-  name=stest3.txt
-  file_flags=0x0006:MTIME,CRC32
-  cmp_algo=0 cmp_meth=0 dict=0 solid=False
-  os=1:UNIX mode=0100664 cmp=0 dec=2048 vol=0
-  crc=0x00000000 (0)
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
-  redir: type=HARD_LINK flags=0:- destination=stest1.txt
diff --git a/lib/rarfile/test/files/rar5-hpsw.rar b/lib/rarfile/test/files/rar5-hpsw.rar
deleted file mode 100644
index ed6d2f1b4bfb7555897312f8687b6026bb2ed432..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar5-hpsw.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/rar5-hpsw.rar.exp b/lib/rarfile/test/files/rar5-hpsw.rar.exp
deleted file mode 100644
index fc6b85ed2d3d0615a2d8346bf96f883c9132885f..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/rar5-hpsw.rar.exp
+++ /dev/null
@@ -1,24 +0,0 @@
-Archive: test/files/rar5-hpsw.rar
-  comment='RAR5 archive - hdr-password\n'
-R5_FILE: hdrlen=94 datlen=64 hdr_extra=60
-  block_flags=0x0003:EXTRA,DATA
-  name=stest1.txt
-  file_flags=0x0004:CRC32
-  cmp_algo=0 cmp_meth=3 dict=0 solid=False
-  os=1:UNIX mode=0100644 cmp=64 dec=2048 vol=0
-  crc=0xc5b7e6a2 (3317163682)
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
-  algo=0:AES256 enc_flags=0001:CHECKVAL kdf_lg=15 kdf_count=32768 salt=a798cbfb5c85ea540a42d4d4c2872790 iv=ef62e5dedb427780eb0de68de9c00a88 checkval=1d29a48fc21e63ea4a5f40d3
-R5_FILE: hdrlen=110 datlen=2048 hdr_extra=76
-  block_flags=0x0003:EXTRA,DATA
-  name=stest2.txt
-  file_flags=0x0004:CRC32
-  cmp_algo=0 cmp_meth=0 dict=0 solid=False
-  os=1:UNIX mode=0100644 cmp=2048 dec=2048 vol=0
-  crc=0xc5b7e6a2 (3317163682)
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
-  ctime=2016-05-22T09:12:33+00:00
-  atime=2016-05-22T09:12:37+00:00
-  algo=0:AES256 enc_flags=0001:CHECKVAL kdf_lg=15 kdf_count=32768 salt=a798cbfb5c85ea540a42d4d4c2872790 iv=0c41c02fa9c09e63bb0dda405e7f10b8 checkval=1d29a48fc21e63ea4a5f40d3
diff --git a/lib/rarfile/test/files/rar5-psw-blake.rar b/lib/rarfile/test/files/rar5-psw-blake.rar
deleted file mode 100644
index 4a3300819e58c95c16d043c4ec1a36ff80ff59fa..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar5-psw-blake.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/rar5-psw-blake.rar.exp b/lib/rarfile/test/files/rar5-psw-blake.rar.exp
deleted file mode 100644
index 88e07c01c70581fe7035d04c124461ed4e69a4e0..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/rar5-psw-blake.rar.exp
+++ /dev/null
@@ -1,24 +0,0 @@
-Archive: test/files/rar5-psw-blake.rar
-  comment='RAR5 archive - nohdr-password-blake\n'
-R5_FILE: hdrlen=125 datlen=64 hdr_extra=95
-  block_flags=0x0003:EXTRA,DATA
-  name=stest1.txt
-  file_flags=0x0000:-
-  cmp_algo=0 cmp_meth=3 dict=0 solid=False
-  os=1:UNIX mode=0100644 cmp=64 dec=2048 vol=0
-  blake2sp=5cdbd3f49f594a35edb63c923b64abdadba3bf1468b6259ee1e1c54ba2f0d65e
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
-  algo=0:AES256 enc_flags=0003:CHECKVAL,TWEAKED kdf_lg=15 kdf_count=32768 salt=9db5988f74a009519a4073bdfda0e046 iv=4209dcd38ab7be9299ab3e74d3abc760 checkval=3978909499ddfdb80dfc899e
-R5_FILE: hdrlen=142 datlen=2048 hdr_extra=111
-  block_flags=0x0003:EXTRA,DATA
-  name=stest2.txt
-  file_flags=0x0000:-
-  cmp_algo=0 cmp_meth=0 dict=0 solid=False
-  os=1:UNIX mode=0100644 cmp=2048 dec=2048 vol=0
-  blake2sp=5cdbd3f49f594a35edb63c923b64abdadba3bf1468b6259ee1e1c54ba2f0d65e
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
-  ctime=2016-05-22T09:12:33+00:00
-  atime=2016-05-22T09:12:37+00:00
-  algo=0:AES256 enc_flags=0003:CHECKVAL,TWEAKED kdf_lg=15 kdf_count=32768 salt=9db5988f74a009519a4073bdfda0e046 iv=e420cdee55a90d84a6341ba90d41d6e0 checkval=3978909499ddfdb80dfc899e
diff --git a/lib/rarfile/test/files/rar5-psw.rar b/lib/rarfile/test/files/rar5-psw.rar
deleted file mode 100644
index f01e63aac6e3f899cf30c505fcf50bef46e44154..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar5-psw.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/rar5-psw.rar.exp b/lib/rarfile/test/files/rar5-psw.rar.exp
deleted file mode 100644
index 3250a418afd9ecc97022f2e62825579d2299dbb5..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/rar5-psw.rar.exp
+++ /dev/null
@@ -1,24 +0,0 @@
-Archive: test/files/rar5-psw.rar
-  comment='RAR5 archive - nohdr-password\n'
-R5_FILE: hdrlen=94 datlen=64 hdr_extra=60
-  block_flags=0x0003:EXTRA,DATA
-  name=stest1.txt
-  file_flags=0x0004:CRC32
-  cmp_algo=0 cmp_meth=3 dict=0 solid=False
-  os=1:UNIX mode=0100644 cmp=64 dec=2048 vol=0
-  crc=0xba28eeea (3123244778)
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
-  algo=0:AES256 enc_flags=0003:CHECKVAL,TWEAKED kdf_lg=15 kdf_count=32768 salt=7e982cdd1ae21c36c7a391da8d088a68 iv=a03aad26f4827b87d54c725ce73b6967 checkval=8151cff63649c16e186a220f
-R5_FILE: hdrlen=110 datlen=2048 hdr_extra=76
-  block_flags=0x0003:EXTRA,DATA
-  name=stest2.txt
-  file_flags=0x0004:CRC32
-  cmp_algo=0 cmp_meth=0 dict=0 solid=False
-  os=1:UNIX mode=0100644 cmp=2048 dec=2048 vol=0
-  crc=0xba28eeea (3123244778)
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
-  ctime=2016-05-22T09:12:33+00:00
-  atime=2016-05-22T09:12:37+00:00
-  algo=0:AES256 enc_flags=0003:CHECKVAL,TWEAKED kdf_lg=15 kdf_count=32768 salt=7e982cdd1ae21c36c7a391da8d088a68 iv=df83fa0cd86e88b8b6f851467d7949d2 checkval=8151cff63649c16e186a220f
diff --git a/lib/rarfile/test/files/rar5-quick-open.rar b/lib/rarfile/test/files/rar5-quick-open.rar
deleted file mode 100644
index ef9a1b21e54de94cfa78c57523d190eefc5d7525..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar5-quick-open.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/rar5-quick-open.rar.exp b/lib/rarfile/test/files/rar5-quick-open.rar.exp
deleted file mode 100644
index 7cd41fd150137a35740dfc39ed2ec2decf215758..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/rar5-quick-open.rar.exp
+++ /dev/null
@@ -1,19 +0,0 @@
-Archive: test/files/rar5-quick-open.rar
-R5_FILE: hdrlen=37 datlen=55 hdr_extra=0
-  block_flags=0x0002:DATA
-  name=stest1.txt
-  file_flags=0x0006:MTIME,CRC32
-  cmp_algo=0 cmp_meth=3 dict=0 solid=False
-  os=1:UNIX mode=0100644 cmp=55 dec=2048 vol=0
-  crc=0xc5b7e6a2 (3317163682)
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
-R5_FILE: hdrlen=37 datlen=55 hdr_extra=0
-  block_flags=0x0002:DATA
-  name=stest2.txt
-  file_flags=0x0006:MTIME,CRC32
-  cmp_algo=0 cmp_meth=3 dict=0 solid=False
-  os=1:UNIX mode=0100644 cmp=55 dec=2048 vol=0
-  crc=0xc5b7e6a2 (3317163682)
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
diff --git a/lib/rarfile/test/files/rar5-solid-qo.rar b/lib/rarfile/test/files/rar5-solid-qo.rar
deleted file mode 100644
index 084caaef017d97ee363eb698c99cc4e7d7dbc7c2..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar5-solid-qo.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/rar5-solid-qo.rar.exp b/lib/rarfile/test/files/rar5-solid-qo.rar.exp
deleted file mode 100644
index 9b217c85e33de00dd31e2410f588e01c87541aab..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/rar5-solid-qo.rar.exp
+++ /dev/null
@@ -1,37 +0,0 @@
-Archive: test/files/rar5-solid-qo.rar
-R5_FILE: hdrlen=45 datlen=59 hdr_extra=0
-  block_flags=0x0002:DATA
-  name=somedir/stest1.txt
-  file_flags=0x0006:MTIME,CRC32
-  cmp_algo=0 cmp_meth=3 dict=3 solid=False
-  os=1:UNIX mode=0100644 cmp=59 dec=2048 vol=0
-  crc=0xc5b7e6a2 (3317163682)
-  date_time=2016-05-22 09:48:36
-  mtime=2016-05-22T09:48:36+00:00
-R5_FILE: hdrlen=37 datlen=12 hdr_extra=0
-  block_flags=0x0002:DATA
-  name=stest1.txt
-  file_flags=0x0006:MTIME,CRC32
-  cmp_algo=0 cmp_meth=3 dict=3 solid=True
-  os=1:UNIX mode=0100644 cmp=12 dec=2048 vol=0
-  crc=0xc5b7e6a2 (3317163682)
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
-R5_FILE: hdrlen=45 datlen=12 hdr_extra=0
-  block_flags=0x0002:DATA
-  name=somedir/stest2.txt
-  file_flags=0x0006:MTIME,CRC32
-  cmp_algo=0 cmp_meth=3 dict=3 solid=True
-  os=1:UNIX mode=0100644 cmp=12 dec=2048 vol=0
-  crc=0xc5b7e6a2 (3317163682)
-  date_time=2016-05-22 09:48:36
-  mtime=2016-05-22T09:48:36+00:00
-R5_FILE: hdrlen=37 datlen=12 hdr_extra=0
-  block_flags=0x0002:DATA
-  name=stest2.txt
-  file_flags=0x0006:MTIME,CRC32
-  cmp_algo=0 cmp_meth=3 dict=3 solid=True
-  os=1:UNIX mode=0100644 cmp=12 dec=2048 vol=0
-  crc=0xc5b7e6a2 (3317163682)
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
diff --git a/lib/rarfile/test/files/rar5-solid.rar b/lib/rarfile/test/files/rar5-solid.rar
deleted file mode 100644
index 277cfe7206417067faf2fbd7c3f0bd7a1bf878a2..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar5-solid.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/rar5-solid.rar.exp b/lib/rarfile/test/files/rar5-solid.rar.exp
deleted file mode 100644
index bffb410c85c19b3b464f159a20a4e859f508f1ab..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/rar5-solid.rar.exp
+++ /dev/null
@@ -1,15 +0,0 @@
-Archive: test/files/rar5-solid.rar
-R5_FILE: hdrlen=33 datlen=58 hdr_extra=0
-  block_flags=0x0002:DATA
-  name=stest1.txt
-  file_flags=0x0004:CRC32
-  cmp_algo=0 cmp_meth=3 dict=3 solid=False
-  os=1:UNIX mode=0100666 cmp=58 dec=2048 vol=0
-  crc=0xc5b7e6a2 (3317163682)
-R5_FILE: hdrlen=33 datlen=13 hdr_extra=0
-  block_flags=0x0002:DATA
-  name=stest2.txt
-  file_flags=0x0004:CRC32
-  cmp_algo=0 cmp_meth=3 dict=3 solid=True
-  os=1:UNIX mode=0100666 cmp=13 dec=2048 vol=0
-  crc=0xc5b7e6a2 (3317163682)
diff --git a/lib/rarfile/test/files/rar5-times.rar b/lib/rarfile/test/files/rar5-times.rar
deleted file mode 100644
index 999c60631f9b4a7e766d9a963295461992804611..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar5-times.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/rar5-times.rar.exp b/lib/rarfile/test/files/rar5-times.rar.exp
deleted file mode 100644
index 312016618878fec3cfb007a41f2a87c382b2f767..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/rar5-times.rar.exp
+++ /dev/null
@@ -1,11 +0,0 @@
-Archive: test/files/rar5-times.rar
-R5_FILE: hdrlen=45 datlen=55 hdr_extra=11
-  block_flags=0x0003:EXTRA,DATA
-  name=stest1.txt
-  file_flags=0x0004:CRC32
-  cmp_algo=0 cmp_meth=3 dict=0 solid=False
-  os=1:UNIX mode=0100644 cmp=55 dec=2048 vol=0
-  crc=0xc5b7e6a2 (3317163682)
-  date_time=2011-06-12 09:53:33
-  mtime=2011-06-12T09:53:33+00:00
-  atime=2016-05-22T09:12:36+00:00
diff --git a/lib/rarfile/test/files/rar5-times2.rar b/lib/rarfile/test/files/rar5-times2.rar
deleted file mode 100644
index 8f150660a316bbc0826c51b6f293b97733bee25b..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar5-times2.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/rar5-times2.rar.exp b/lib/rarfile/test/files/rar5-times2.rar.exp
deleted file mode 100644
index 9acdf2b3cf8c0132a6805b12af20b2858b47b9d1..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/rar5-times2.rar.exp
+++ /dev/null
@@ -1,10 +0,0 @@
-Archive: test/files/rar5-times2.rar
-R5_FILE: hdrlen=42 datlen=0 hdr_extra=11
-  block_flags=0x0003:EXTRA,DATA
-  name=afile.txt
-  file_flags=0x0004:CRC32
-  cmp_algo=0 cmp_meth=0 dict=0 solid=False
-  os=0:WINDOWS mode=0x20 cmp=0 dec=0 vol=0
-  crc=0x00000000 (0)
-  date_time=2011-05-10 18:28:47
-  mtime=2011-05-10T18:28:47.899345+00:00
diff --git a/lib/rarfile/test/files/rar5-vols.part1.rar b/lib/rarfile/test/files/rar5-vols.part1.rar
deleted file mode 100644
index 0926f2f8f6d74bd1a7e8c4c1ebe74432c0b8d4e2..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar5-vols.part1.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/rar5-vols.part1.rar.exp b/lib/rarfile/test/files/rar5-vols.part1.rar.exp
deleted file mode 100644
index 669eb90f92a16c5b0e174685e53b3dcd0653b2dc..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/rar5-vols.part1.rar.exp
+++ /dev/null
@@ -1,19 +0,0 @@
-Archive: test/files/rar5-vols.part1.rar
-R5_FILE: hdrlen=45 datlen=205000 hdr_extra=0
-  block_flags=0x0012:DATA,SPLIT_AFTER
-  name=vols/bigfile.txt
-  file_flags=0x0006:MTIME,CRC32
-  cmp_algo=0 cmp_meth=0 dict=0 solid=False
-  os=1:UNIX mode=0100664 cmp=205000 dec=205000 vol=0
-  crc=0x509ad74c (1352324940)
-  date_time=2016-05-24 08:42:37
-  mtime=2016-05-24T08:42:37+00:00
-R5_FILE: hdrlen=45 datlen=2050 hdr_extra=0
-  block_flags=0x0002:DATA
-  name=vols/smallfile.txt
-  file_flags=0x0006:MTIME,CRC32
-  cmp_algo=0 cmp_meth=0 dict=0 solid=False
-  os=1:UNIX mode=0100664 cmp=2050 dec=2050 vol=2
-  crc=0xd08a1f86 (3498712966)
-  date_time=2016-05-24 08:42:43
-  mtime=2016-05-24T08:42:43+00:00
diff --git a/lib/rarfile/test/files/rar5-vols.part2.rar b/lib/rarfile/test/files/rar5-vols.part2.rar
deleted file mode 100644
index d4f55a2f859d0c7b422b99e111db4fc5e8b7e34f..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar5-vols.part2.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/rar5-vols.part2.rar.exp b/lib/rarfile/test/files/rar5-vols.part2.rar.exp
deleted file mode 100644
index 8bdd5793f5994a1809245b08549b89db4038cb9e..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/rar5-vols.part2.rar.exp
+++ /dev/null
@@ -1,2 +0,0 @@
-Archive: test/files/rar5-vols.part2.rar
- --- test/files/rar5-vols.part2.rar is middle part of multi-vol archive ---
diff --git a/lib/rarfile/test/files/rar5-vols.part3.rar b/lib/rarfile/test/files/rar5-vols.part3.rar
deleted file mode 100644
index ba6e9924a6db984d321364b8788301cda8c3bfba..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/rar5-vols.part3.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/rar5-vols.part3.rar.exp b/lib/rarfile/test/files/rar5-vols.part3.rar.exp
deleted file mode 100644
index 3ffe8c3734792001f7d8605e49be6adbb73ffa39..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/rar5-vols.part3.rar.exp
+++ /dev/null
@@ -1,2 +0,0 @@
-Archive: test/files/rar5-vols.part3.rar
- --- test/files/rar5-vols.part3.rar is middle part of multi-vol archive ---
diff --git a/lib/rarfile/test/files/seektest.rar b/lib/rarfile/test/files/seektest.rar
deleted file mode 100644
index b1d72bb722beb61852858674bfd00c7df2106fa0..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/seektest.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/seektest.rar.exp b/lib/rarfile/test/files/seektest.rar.exp
deleted file mode 100644
index d77e9aecd93ebf76a292045c4590a3a19263e3f3..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/seektest.rar.exp
+++ /dev/null
@@ -1,13 +0,0 @@
-Archive: test/files/seektest.rar
-FILE: hdrlen=44 datlen=90
-  flags=0x9020:EXTTIME,LONG,D128
-  os=3:UNIX ver=29 mode=0100644 meth=5 cmp=90 dec=2048 vol=0
-  crc=0xc5b7e6a2 (3317163682) date_time=2011-06-12 12:53:33
-  name=stest1.txt
-  mtime=2011-06-12T12:53:33
-FILE: hdrlen=44 datlen=2048
-  flags=0x9020:EXTTIME,LONG,D128
-  os=3:UNIX ver=20 mode=0100644 meth=0 cmp=2048 dec=2048 vol=0
-  crc=0xc5b7e6a2 (3317163682) date_time=2011-06-12 12:53:33
-  name=stest2.txt
-  mtime=2011-06-12T12:53:33
diff --git a/lib/rarfile/test/files/unicode.rar b/lib/rarfile/test/files/unicode.rar
deleted file mode 100644
index 7453ac0fb56b441aa72513c14d38f8a098525a66..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/unicode.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/unicode.rar.exp b/lib/rarfile/test/files/unicode.rar.exp
deleted file mode 100644
index 131d7830af7cae734bef68001fdf725961a38815..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/unicode.rar.exp
+++ /dev/null
@@ -1,11 +0,0 @@
-Archive: test/files/unicode.rar
-FILE: hdrlen=54 datlen=17
-  flags=0x8080:LONG,D1024
-  os=3:UNIX ver=29 mode=0100644 meth=5 cmp=17 dec=2 vol=0
-  crc=0x6751fc53 (1733426259) date_time=2011-07-06 16:48:04
-  name=уииоотивл.txt
-FILE: hdrlen=52 datlen=13
-  flags=0x8090:SOLID,LONG,D1024
-  os=3:UNIX ver=29 mode=0100644 meth=5 cmp=13 dec=2 vol=0
-  crc=0x6751fc53 (1733426259) date_time=2011-07-06 16:48:04
-  name=𝐀𝐁𝐁𝐂.txt
diff --git a/lib/rarfile/test/files/unicode2.rar b/lib/rarfile/test/files/unicode2.rar
deleted file mode 100644
index 93de5b1f02f023abfd35e8577fc90ea899ef4f89..0000000000000000000000000000000000000000
Binary files a/lib/rarfile/test/files/unicode2.rar and /dev/null differ
diff --git a/lib/rarfile/test/files/unicode2.rar.exp b/lib/rarfile/test/files/unicode2.rar.exp
deleted file mode 100644
index bed334c86ff8cc74f4339d12c811b6075525d9f4..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/files/unicode2.rar.exp
+++ /dev/null
@@ -1,11 +0,0 @@
-Archive: test/files/unicode2.rar
-FILE: hdrlen=62 datlen=2
-  flags=0x8220:UNICODE,LONG,D128
-  os=2:WIN ver=29 mode=0x20 meth=0 cmp=2 dec=2 vol=0
-  crc=0x6751fc53 (1733426259) date_time=2011-07-06 16:48:04
-  name=𝐀𝐁𝐁𝐂.txt
-FILE: hdrlen=59 datlen=2
-  flags=0x8220:UNICODE,LONG,D128
-  os=2:WIN ver=29 mode=0x20 meth=0 cmp=2 dec=2 vol=0
-  crc=0x6751fc53 (1733426259) date_time=2011-07-06 16:48:04
-  name=уииоотивл.txt
diff --git a/lib/rarfile/test/run_dump.sh b/lib/rarfile/test/run_dump.sh
deleted file mode 100755
index 59d29dda5495b18122a12ac31fa3358417957410..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/run_dump.sh
+++ /dev/null
@@ -1,43 +0,0 @@
-#! /bin/sh
-
-PYTHON="$1"
-tag="$2"
-
-test -n "$tag" || { echo "usage: $0 PY TAG"; exit 1; }
-
-mkdir -p tmp
-diffs="tmp/output.$tag.diffs"
-rm -f "$diffs"
-
-quiet=""
-quiet="1"
-
-vprintf=printf
-vecho=echo
-
-if test -n "$quiet"; then
-  echo "[$tag] testing structure dump"
-  vprintf=true
-  vecho=true
-fi
-
-result=0
-for f in test/files/*.rar; do
-  $vprintf "%s -> %-30s .. " "$tag" "$f"
-  "$PYTHON" dumprar.py -v -ppassword "$f" > "$f.$tag"
-  if diff -uw "$f.exp" "$f.$tag" > /dev/null; then
-    $vecho "ok"
-    rm -f "$f.$tag"
-  else
-    $vecho "FAIL"
-    if test -n "$quiet"; then
-      printf "[%s] %-30s .. FAILED\n" "$tag" "$f"
-    fi
-    echo "#### $py ####" >> "$diffs"
-    diff -uw "$f.exp" "$f.$tag" >> "$diffs"
-    result=1
-  fi
-done
-
-exit $result
-
diff --git a/lib/rarfile/test/run_dump_all.sh b/lib/rarfile/test/run_dump_all.sh
deleted file mode 100755
index 2e55fc5f30e2c2d3fdcf0be93c941fffc4f6b7f2..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/run_dump_all.sh
+++ /dev/null
@@ -1,18 +0,0 @@
-#! /bin/sh
-
-JAVA_OPTIONS="-Dpython.path=`pwd`/.."
-export JAVA_OPTIONS
-
-plist="python2.7 python3.2 python3.3 python3.4 python3.5 python3.6 pypy jython jython2.7"
-
-result=0
-for py in $plist; do
-  if which $py > /dev/null; then
-    ./test/run_dump.sh "$py" "$py" || result=1
-    echo ""
-  else
-    echo $py not available
-    echo ""
-  fi
-done
-
diff --git a/lib/rarfile/test/test_api.py b/lib/rarfile/test/test_api.py
deleted file mode 100644
index b3a60524104e996d79f16aba3bc80af54f729608..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/test_api.py
+++ /dev/null
@@ -1,233 +0,0 @@
-"""API tests.
-"""
-
-import sys
-import io
-import os
-
-from nose.tools import *
-
-import rarfile
-
-#
-# test start
-#
-
-@raises(NotImplementedError)
-def test_bad_arc_mode_w():
-    rarfile.RarFile('test/files/rar3-comment-plain.rar', 'w')
-
-@raises(NotImplementedError)
-def test_bad_arc_mode_rb():
-    rarfile.RarFile('test/files/rar3-comment-plain.rar', 'rb')
-
-@raises(ValueError)
-def test_bad_errs():
-    rarfile.RarFile('test/files/rar3-comment-plain.rar', 'r', errors='foo')
-
-@raises(NotImplementedError)
-def test_bad_open_mode_w():
-    rf = rarfile.RarFile('test/files/rar3-comment-plain.rar')
-    rf.open('qwe', 'w')
-
-@raises(rarfile.PasswordRequired)
-def test_bad_open_psw():
-    rf = rarfile.RarFile('test/files/rar3-comment-psw.rar')
-    rf.open('file1.txt')
-
-@raises(ValueError)
-def test_bad_filelike():
-    rarfile.is_rarfile(bytearray(10))
-
-def test_open_psw_late_rar3():
-    rf = rarfile.RarFile('test/files/rar3-comment-psw.rar')
-    rf.open('file1.txt', 'r', 'password').read()
-    rf.open('file1.txt', 'r', u'password').read()
-
-def test_open_psw_late_rar5():
-    rf = rarfile.RarFile('test/files/rar5-psw.rar')
-    rf.open('stest1.txt', 'r', 'password').read()
-    rf.open('stest1.txt', 'r', u'password').read()
-
-def test_read_psw_late_rar3():
-    rf = rarfile.RarFile('test/files/rar3-comment-psw.rar')
-    rf.read('file1.txt', 'password')
-    rf.read('file1.txt', u'password')
-
-def test_read_psw_late_rar5():
-    rf = rarfile.RarFile('test/files/rar5-psw.rar')
-    rf.read('stest1.txt', 'password')
-    rf.read('stest1.txt', u'password')
-
-@raises(rarfile.BadRarFile) # needs better error
-def test_open_psw_late():
-    rf = rarfile.RarFile('test/files/rar5-psw.rar')
-    rf.read('stest1.txt', 'password222')
-
-def test_detection():
-    eq_(rarfile.is_rarfile('test/files/ctime4.rar.exp'), False)
-    eq_(rarfile.is_rarfile('test/files/ctime4.rar'), True)
-    eq_(rarfile.is_rarfile('test/files/rar5-crc.rar'), True)
-
-@raises(rarfile.BadRarFile)
-def test_signature_error():
-    rarfile.RarFile('test/files/ctime4.rar.exp')
-
-@raises(rarfile.BadRarFile)
-def test_signature_error_mem():
-    data = io.BytesIO(b'x'*40)
-    rarfile.RarFile(data)
-
-def test_with():
-    with rarfile.RarFile('test/files/rar5-crc.rar') as rf:
-        with rf.open('stest1.txt') as f:
-            while 1:
-                buf = f.read(7)
-                if not buf:
-                    break
-
-def test_readline():
-    def load_readline(rf, fn):
-        with rf.open(fn) as f:
-            tr = io.TextIOWrapper(io.BufferedReader(f))
-            res = []
-            while 1:
-                ln = tr.readline()
-                if not ln:
-                    break
-                res.append(ln)
-        return res
-
-    rf = rarfile.RarFile('test/files/seektest.rar')
-    v1 = load_readline(rf, 'stest1.txt')
-    v2 = load_readline(rf, 'stest2.txt')
-    eq_(len(v1), 512)
-    eq_(v1, v2)
-
-_old_stdout = None
-_buf_stdout = None
-
-def install_buf():
-    global _old_stdout, _buf_stdout
-    _buf_stdout = io.StringIO()
-    _old_stdout = sys.stdout
-    sys.stdout = _buf_stdout
-
-def uninstall_buf():
-    sys.stdout = _old_stdout
-
-@with_setup(install_buf, uninstall_buf)
-def test_printdir():
-    rf = rarfile.RarFile('test/files/seektest.rar')
-    rf.printdir()
-    eq_(_buf_stdout.getvalue(), u'stest1.txt\nstest2.txt\n')
-
-def test_testrar():
-    rf = rarfile.RarFile('test/files/seektest.rar')
-    rf.testrar()
-
-def test_testrar_mem():
-    arc = open('test/files/seektest.rar', 'rb').read()
-    rf = rarfile.RarFile(io.BytesIO(arc))
-    rf.testrar()
-
-def clean_extract_dirs():
-    for dn in ['tmp/extract1', 'tmp/extract2', 'tmp/extract3']:
-        for fn in ['stest1.txt', 'stest2.txt']:
-            try:
-                os.unlink(os.path.join(dn, fn))
-            except OSError:
-                pass
-        try:
-            os.rmdir(dn)
-        except OSError:
-            pass
-
-@with_setup(clean_extract_dirs, clean_extract_dirs)
-def test_extract():
-    os.makedirs('tmp/extract1')
-    os.makedirs('tmp/extract2')
-    os.makedirs('tmp/extract3')
-    rf = rarfile.RarFile('test/files/seektest.rar')
-
-    rf.extractall('tmp/extract1')
-    assert_true(os.path.isfile('tmp/extract1/stest1.txt'))
-    assert_true(os.path.isfile('tmp/extract1/stest2.txt'))
-
-    rf.extract('stest1.txt', 'tmp/extract2')
-    assert_true(os.path.isfile('tmp/extract2/stest1.txt'))
-    assert_false(os.path.isfile('tmp/extract2/stest2.txt'))
-
-    inf = rf.getinfo('stest2.txt')
-    rf.extract(inf, 'tmp/extract3')
-    assert_false(os.path.isfile('tmp/extract3/stest1.txt'))
-    assert_true(os.path.isfile('tmp/extract3/stest2.txt'))
-
-    rf.extractall('tmp/extract2', ['stest1.txt'])
-    assert_true(os.path.isfile('tmp/extract2/stest1.txt'))
-
-    rf.extractall('tmp/extract3', [rf.getinfo('stest2.txt')])
-    assert_true(os.path.isfile('tmp/extract3/stest2.txt'))
-
-@with_setup(clean_extract_dirs, clean_extract_dirs)
-def test_extract_mem():
-    os.makedirs('tmp/extract1')
-    os.makedirs('tmp/extract2')
-    os.makedirs('tmp/extract3')
-    arc = open('test/files/seektest.rar', 'rb').read()
-    rf = rarfile.RarFile(io.BytesIO(arc))
-
-    rf.extractall('tmp/extract1')
-    assert_true(os.path.isfile('tmp/extract1/stest1.txt'))
-    assert_true(os.path.isfile('tmp/extract1/stest2.txt'))
-
-    rf.extract('stest1.txt', 'tmp/extract2')
-    assert_true(os.path.isfile('tmp/extract2/stest1.txt'))
-    assert_false(os.path.isfile('tmp/extract2/stest2.txt'))
-
-    inf = rf.getinfo('stest2.txt')
-    rf.extract(inf, 'tmp/extract3')
-    assert_false(os.path.isfile('tmp/extract3/stest1.txt'))
-    assert_true(os.path.isfile('tmp/extract3/stest2.txt'))
-
-def test_infocb():
-    infos = []
-    def info_cb(info):
-        infos.append( (info.type, info.needs_password(), info.isdir(), info._must_disable_hack()) )
-
-    rf = rarfile.RarFile('test/files/seektest.rar', info_callback=info_cb)
-    eq_(infos, [
-        (rarfile.RAR_BLOCK_MAIN, False, False, False),
-        (rarfile.RAR_BLOCK_FILE, False, False, False),
-        (rarfile.RAR_BLOCK_FILE, False, False, False),
-        (rarfile.RAR_BLOCK_ENDARC, False, False, False)])
-
-    infos = []
-    rf = rarfile.RarFile('test/files/rar5-solid-qo.rar', info_callback=info_cb)
-    eq_(infos, [
-        (rarfile.RAR_BLOCK_MAIN, False, False, True),
-        (rarfile.RAR_BLOCK_FILE, False, False, False),
-        (rarfile.RAR_BLOCK_FILE, False, False, True),
-        (rarfile.RAR_BLOCK_FILE, False, False, True),
-        (rarfile.RAR_BLOCK_FILE, False, False, True),
-        (rarfile.RAR_BLOCK_SUB, False, False, False),
-        (rarfile.RAR_BLOCK_ENDARC, False, False, False)])
-
-def install_alt_tool():
-    rarfile.ORIG_UNRAR_TOOL = 'x_unrar_missing'
-    rarfile._check_unrar_tool()
-
-def uninstall_alt_tool():
-    rarfile.ORIG_UNRAR_TOOL = 'unrar'
-    rarfile._check_unrar_tool()
-
-def test_read_rar3():
-    with rarfile.RarFile('test/files/seektest.rar') as rf:
-        for fn in rf.namelist():
-            rf.read(fn)
-
-@with_setup(install_alt_tool, uninstall_alt_tool)
-def test_alt_tool():
-    #test_read_rar3()
-    pass
-
diff --git a/lib/rarfile/test/test_crypto.py b/lib/rarfile/test/test_crypto.py
deleted file mode 100644
index 909f0ee37f1e4823d5859a4da704c723c3fd564a..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/test_crypto.py
+++ /dev/null
@@ -1,44 +0,0 @@
-"""Crypto tests.
-"""
-
-from __future__ import division, print_function
-
-from binascii import unhexlify
-
-from nose.tools import *
-
-import rarfile
-
-try:
-    from cryptography.hazmat.primitives.ciphers import algorithms, modes, Cipher
-    from cryptography.hazmat.backends import default_backend
-    def aes_encrypt(key, iv, data):
-        ciph = Cipher(algorithms.AES(key), modes.CBC(iv), default_backend())
-        enc = ciph.encryptor()
-        return enc.update(data)
-except ImportError:
-    pass
-
-if rarfile._have_crypto:
-    def test_aes128_cbc():
-        data = b'0123456789abcdef' * 2
-        key = b'\x02' * 16
-        iv = b'\x80' * 16
-
-        #encdata = aes_encrypt(key, iv, data)
-        encdata = unhexlify('4b0d438b4a1b972bd4ab81cd64674dcce4b0158090fbe616f455354284d53502')
-
-        ctx = rarfile.AES_CBC_Decrypt(key, iv)
-        eq_(ctx.decrypt(encdata), data)
-
-    def test_aes256_cbc():
-        data = b'0123456789abcdef' * 2
-        key = b'\x52' * 32
-        iv = b'\x70' * 16
-
-        #encdata = aes_encrypt(key, iv, data)
-        encdata = unhexlify('24988f387592e4d95b6eaab013137a221f81b25aa7ecde0ef4f4d7a95f92c250')
-
-        ctx = rarfile.AES_CBC_Decrypt(key, iv)
-        eq_(ctx.decrypt(encdata), data)
-
diff --git a/lib/rarfile/test/test_format.py b/lib/rarfile/test/test_format.py
deleted file mode 100644
index db88b639029fcaade5d7d51d6d11d2d85f7c0c9d..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/test_format.py
+++ /dev/null
@@ -1,223 +0,0 @@
-"""Format details.
-"""
-
-import sys
-import io
-import os
-
-from datetime import datetime
-from nose.tools import *
-
-import rarfile
-
-def render_date(dt):
-    if isinstance(dt, datetime):
-        return dt.isoformat('T')
-    elif isinstance(dt, tuple):
-        return '%04d-%02d-%02d %02d:%02d:%02d' % dt
-    else:
-        return dt
-
-def mkitem(**kwargs):
-    res = {}
-    for k in kwargs:
-        if kwargs[k] is not None:
-            res[k] = kwargs[k]
-    return res
-
-def dumparc(rf):
-    res = []
-    for item in rf.infolist():
-        info = mkitem(fn=item.filename,
-                      file_size=item.file_size,
-                      compress_size=item.compress_size,
-                      CRC=item.CRC,
-                      date_time=render_date(item.date_time),
-                      arctime=render_date(item.arctime),
-                      mtime=render_date(item.mtime),
-                      atime=render_date(item.atime),
-                      ctime=render_date(item.ctime),
-                      comment=item.comment,
-                      extract_version=item.extract_version,
-                      compress_type=item.compress_type,
-                      mode=item.mode,
-                      host_os=item.host_os)
-        res.append(info)
-    return res
-
-def diffs(a, b):
-    if len(a) != len(b):
-        return 'Different lengths'
-    problems = []
-    for i, xa in enumerate(a):
-        xb = b[i]
-        for k in xa:
-            if k not in xb:
-                problems.append('NewKey(%d,%s)=%r' % (i, k, xa[k]))
-        for k in xb:
-            if k not in xa:
-                problems.append('MissingKey(%d,%s)=%r' % (i, k, xb[k]))
-        for k in xa:
-            if k in xb and xa[k] != xb[k]:
-                problems.append('ErrValue(%d,%s):got=%r/exp=%r' % (i, k, xa[k], xb[k]))
-    return '; '.join(problems)
-
-def cmp_struct(a, b):
-    eq_(a, b, diffs(a, b))
-
-#
-# test start
-#
-
-def test_rar3_header_encryption():
-    r = rarfile.RarFile('test/files/rar3-comment-hpsw.rar', 'r')
-    eq_(r.needs_password(), True)
-    eq_(r.comment, None)
-    eq_(r.namelist(), [])
-
-    try:
-        r.setpassword('password')
-        assert_true(r.needs_password())
-        eq_(r.namelist(), [u'file1.txt', u'file2.txt'])
-        assert_not_equal(r.comment, None)
-        eq_(r.comment, 'RARcomment\n')
-    except rarfile.NoCrypto:
-        pass
-
-def test_rar5_header_encryption():
-    r = rarfile.RarFile('test/files/rar5-hpsw.rar')
-    eq_(r.needs_password(), True)
-    eq_(r.comment, None)
-    eq_(r.namelist(), [])
-
-    try:
-        r.setpassword('password')
-        assert_true(r.needs_password())
-        eq_(r.namelist(), [u'stest1.txt', u'stest2.txt'])
-        assert_not_equal(r.comment, None)
-        eq_(r.comment, 'RAR5 archive - hdr-password\n')
-    except rarfile.NoCrypto:
-        pass
-    r.close()
-
-def get_vol_info(extver=20, tz='', hr='11'):
-    return [
-        mkitem(CRC=1352324940,
-               date_time='2016-05-24 %s:42:37%s' % (hr, ''),
-               mtime='2016-05-24T%s:42:37%s' % (hr, tz),
-               compress_type=48,
-               compress_size=205000,
-               extract_version=extver,
-               file_size=205000,
-               mode=33204,
-               host_os=3,
-               fn=u'vols/bigfile.txt'),
-        mkitem(CRC=3498712966,
-               date_time='2016-05-24 %s:42:43%s' % (hr, ''),
-               mtime='2016-05-24T%s:42:43%s' % (hr, tz),
-               extract_version=extver,
-               compress_type=48,
-               compress_size=2050,
-               file_size=2050,
-               mode=33204,
-               host_os=3,
-               fn=u'vols/smallfile.txt')]
-
-def test_rar3_vols():
-    r = rarfile.RarFile('test/files/rar3-vols.part1.rar')
-    eq_(r.needs_password(), False)
-    eq_(r.comment, None)
-    eq_(r.strerror(), None)
-    cmp_struct(dumparc(r), get_vol_info())
-    eq_(r.volumelist(), [
-        'test/files/rar3-vols.part1.rar',
-        'test/files/rar3-vols.part2.rar',
-        'test/files/rar3-vols.part3.rar'])
-
-def test_rar3_oldvols():
-    r = rarfile.RarFile('test/files/rar3-old.rar')
-    eq_(r.needs_password(), False)
-    eq_(r.comment, None)
-    eq_(r.strerror(), None)
-    cmp_struct(dumparc(r), get_vol_info())
-    eq_(r.volumelist(), [
-        'test/files/rar3-old.rar',
-        'test/files/rar3-old.r00',
-        'test/files/rar3-old.r01'])
-
-def test_rar5_vols():
-    r = rarfile.RarFile('test/files/rar5-vols.part1.rar')
-    eq_(r.needs_password(), False)
-    eq_(r.comment, None)
-    eq_(r.strerror(), None)
-    cmp_struct(dumparc(r), get_vol_info(50, '+00:00', '08'))
-    eq_(r.volumelist(), [
-        'test/files/rar5-vols.part1.rar',
-        'test/files/rar5-vols.part2.rar',
-        'test/files/rar5-vols.part3.rar'])
-
-def expect_ctime(mtime, ctime):
-    return [mkitem(
-        mtime=mtime,
-        date_time=mtime.split('.')[0].replace('T', ' '),
-        ctime=ctime,
-        compress_size=0,
-        file_size=0,
-        CRC=0,
-        fn=u'afile.txt',
-        extract_version=29,
-        compress_type=48,
-        mode=32,
-        host_os=2)]
-
-def test_rar3_ctime0():
-    r = rarfile.RarFile('test/files/ctime0.rar')
-    cmp_struct(dumparc(r), expect_ctime('2011-05-10T21:28:47.899345', None))
-
-def test_rar3_ctime1():
-    r = rarfile.RarFile('test/files/ctime1.rar')
-    cmp_struct(dumparc(r), expect_ctime('2011-05-10T21:28:47.899345', '2011-05-10T21:28:47'))
-
-def test_rar3_ctime2():
-    r = rarfile.RarFile('test/files/ctime2.rar')
-    cmp_struct(dumparc(r), expect_ctime('2011-05-10T21:28:47.899345', '2011-05-10T21:28:47.897843'))
-
-def test_rar3_ctime3():
-    r = rarfile.RarFile('test/files/ctime3.rar')
-    cmp_struct(dumparc(r), expect_ctime('2011-05-10T21:28:47.899345', '2011-05-10T21:28:47.899328'))
-
-def test_rar3_ctime4():
-    r = rarfile.RarFile('test/files/ctime4.rar')
-    cmp_struct(dumparc(r), expect_ctime('2011-05-10T21:28:47.899345', '2011-05-10T21:28:47.899345'))
-
-def test_rar5_times():
-    r = rarfile.RarFile('test/files/rar5-times.rar')
-    cmp_struct(dumparc(r), [mkitem(
-            fn=u'stest1.txt',
-            file_size=2048,
-            compress_size=55,
-            compress_type=rarfile.RAR_M3,
-            extract_version=50,
-            host_os=rarfile.RAR_OS_UNIX,
-            mode=33188,
-            date_time='2011-06-12 09:53:33',
-            mtime='2011-06-12T09:53:33+00:00',
-            atime='2016-05-22T09:12:36+00:00',
-            CRC=3317163682
-        )])
-
-def test_oldvols():
-    eq_(rarfile._next_oldvol('qq00.part0.rar'), 'qq00.part0.r00')
-    eq_(rarfile._next_oldvol('qq00.part0.r00'), 'qq00.part0.r01')
-    eq_(rarfile._next_oldvol('qq00.part0.r29'), 'qq00.part0.r30')
-    eq_(rarfile._next_oldvol('qq00.part0.r99'), 'qq00.part0.s00')
-
-def test_newvols():
-    eq_(rarfile._next_newvol('qq00.part0.rar'), 'qq00.part1.rar')
-    eq_(rarfile._next_newvol('qq00.part09.rar'), 'qq00.part10.rar')
-    eq_(rarfile._next_newvol('qq00.part99.rar'), 'qq00.paru00.rar')
-
-@raises(rarfile.BadRarName)
-def test_newvols_err():
-    rarfile._next_newvol('xx.rar')
-
diff --git a/lib/rarfile/test/test_hashing.py b/lib/rarfile/test/test_hashing.py
deleted file mode 100644
index d2efb71a0060315e4500bd946adc3d70bd28e319..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/test_hashing.py
+++ /dev/null
@@ -1,78 +0,0 @@
-"""Hashing tests.
-"""
-
-from __future__ import division, print_function
-
-from binascii import unhexlify
-
-from nose.tools import *
-
-import rarfile
-
-from rarfile import Blake2SP, CRC32Context, NoHashContext, tohex
-
-def test_nohash():
-    eq_(NoHashContext('').hexdigest(), None)
-    eq_(NoHashContext('asd').hexdigest(), None)
-    md = NoHashContext()
-    md.update('asd')
-    eq_(md.digest(), None)
-
-def test_crc32():
-    eq_(CRC32Context(b'').hexdigest(), '00000000')
-    eq_(CRC32Context(b'Hello').hexdigest(), 'f7d18982')
-    eq_(CRC32Context(b'Bye').hexdigest(), '4f7ad7d4')
-
-    md = CRC32Context()
-    md.update(b'He')
-    md.update(b'll')
-    md.update(b'o')
-    eq_(md.hexdigest(), 'f7d18982')
-
-def xblake2sp(xdata):
-    data = unhexlify(xdata)
-    md = Blake2SP()
-    md.update(data)
-    return md.hexdigest()
-
-def xblake2sp_slow(xdata):
-    data = unhexlify(xdata)
-    md = Blake2SP()
-    buf = memoryview(data)
-    pos = 0
-    while pos < len(buf):
-        md.update(buf[pos : pos+3])
-        pos += 3
-    return md.hexdigest()
-
-
-if rarfile._have_blake2:
-    def test_blake2sp():
-        eq_(Blake2SP(b'').hexdigest(), 'dd0e891776933f43c7d032b08a917e25741f8aa9a12c12e1cac8801500f2ca4f')
-        eq_(Blake2SP(b'Hello').hexdigest(), '0d6bae0db99f99183d060f7994bb94b45c6490b2a0a628b8b1346ebea8ec1d66')
-
-        eq_(xblake2sp(''), 'dd0e891776933f43c7d032b08a917e25741f8aa9a12c12e1cac8801500f2ca4f')
-        eq_(xblake2sp('00'), 'a6b9eecc25227ad788c99d3f236debc8da408849e9a5178978727a81457f7239')
-
-        long1 = '000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f3031'
-        eq_(xblake2sp(long1), '270affa6426f1a515c9b76dfc27d181fc2fd57d082a3ba2c1eef071533a6dfb7')
-
-        long2 = long1 * 20
-        eq_(xblake2sp(long2), '24a78d92592d0761a3681f32935225ca55ffb8eb16b55ab9481c89c59a985ff3')
-        eq_(xblake2sp_slow(long2), '24a78d92592d0761a3681f32935225ca55ffb8eb16b55ab9481c89c59a985ff3')
-
-def test_hmac_sha256():
-    eq_(tohex(rarfile.hmac_sha256(b'key', b'data')), '5031fe3d989c6d1537a013fa6e739da23463fdaec3b70137d828e36ace221bd0')
-
-def test_rar3_s2k():
-    exp = ('a160cb31cb262e9231c0b6fc984fbb0d', 'aa54a659fb0c359b30f353a6343fb11d')
-    key, iv = rarfile.rar3_s2k(b'password', unhexlify('00FF00'))
-    eq_((tohex(key), tohex(iv)), exp)
-    key, iv = rarfile.rar3_s2k(u'password', unhexlify('00FF00'))
-    eq_((tohex(key), tohex(iv)), exp)
-
-if rarfile._have_crypto:
-    def test_pbkdf2_hmac_sha256():
-        eq_(tohex(rarfile.pbkdf2_sha256(b'password', b'salt', 100)),
-            '07e6997180cf7f12904f04100d405d34888fdf62af6d506a0ecc23b196fe99d8')
-
diff --git a/lib/rarfile/test/test_korrupt.py b/lib/rarfile/test/test_korrupt.py
deleted file mode 100644
index 422b3b250236293f53b5c8147c356dfc807af89c..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/test_korrupt.py
+++ /dev/null
@@ -1,50 +0,0 @@
-"""test corrupt file parsing.
-"""
-
-import rarfile
-import glob
-import io
-
-def try_read(tmpfn):
-    try:
-        rf = rarfile.RarFile(tmpfn, errors='strict')
-        if rf.needs_password():
-            rf.setpassword('password')
-    except rarfile.Error:
-        return
-    for fn in rf.namelist():
-        try:
-            data = rf.read(fn)
-        except rarfile.Error:
-            pass
-
-def process_rar(rarfn, quick=False):
-    data = open(rarfn, "rb").read()
-    for n in range(len(data)):
-        bad = data[:n]
-        try_read(io.BytesIO(bad))
-
-    crap = b'\x00\xff\x01\x80\x7f'
-    if quick:
-        crap = b'\xff'
-    for n in range(1, len(data)):
-        for i in range(len(crap)):
-            c = crap[i:i+1]
-            bad = data[:n - 1] + c + data[n:]
-            try_read(io.BytesIO(bad))
-
-def test_corrupt_quick_rar3():
-    process_rar("test/files/rar3-comment-plain.rar", True)
-
-def test_corrupt_quick_rar5():
-    process_rar("test/files/rar5-times.rar", True)
-
-def test_corrupt_all():
-    test_rar_list = glob.glob('test/files/*.rar')
-    test_rar_list = []
-    for rar in test_rar_list:
-        process_rar(rar)
-
-if __name__ == '__main__':
-    test_corrupt_quick_rar5()
-
diff --git a/lib/rarfile/test/test_reading.py b/lib/rarfile/test/test_reading.py
deleted file mode 100644
index 15909003ae1c4c1a904aa86aa98d8f6fa5389bce..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/test_reading.py
+++ /dev/null
@@ -1,150 +0,0 @@
-"""Read all test files.
-"""
-
-import io
-
-from glob import glob
-
-import rarfile
-
-from nose.tools import *
-
-_done_reading = set()
-
-def run_reading_normal(fn, comment):
-    try:
-        rf = rarfile.RarFile(fn)
-    except rarfile.NeedFirstVolume:
-        return
-    if rf.needs_password():
-        rf.setpassword('password')
-    eq_(rf.strerror(), None)
-    eq_(rf.comment, comment)
-    for ifn in rf.namelist():
-
-        # full read
-        rf.read(ifn)
-
-        # read from stream
-        item = rf.getinfo(ifn)
-        f = rf.open(ifn)
-        total = 0
-        while 1:
-            buf = f.read(1024)
-            if not buf:
-                break
-            total += len(buf)
-        f.close()
-        eq_(total, item.file_size)
-
-        # read from stream with readinto
-        bbuf = bytearray(1024)
-        with rf.open(ifn) as f:
-            res = f.readinto(memoryview(bbuf))
-            if res == 0:
-                break
-
-def run_reading_inmem(fn, comment):
-    try:
-        rf = rarfile.RarFile(fn)
-    except rarfile.NeedFirstVolume:
-        return
-    if len(rf.volumelist()) > 1:
-        return
-
-    buf = io.open(fn, 'rb').read()
-    run_reading_normal(io.BytesIO(buf), comment)
-
-def run_reading(fn, comment=None):
-    _done_reading.add(fn)
-    run_reading_normal(fn, comment)
-    run_reading_inmem(fn, comment)
-
-def test_reading_rar3_ctime():
-    run_reading('test/files/ctime0.rar')
-    run_reading('test/files/ctime1.rar')
-    run_reading('test/files/ctime2.rar')
-    run_reading('test/files/ctime3.rar')
-    run_reading('test/files/ctime4.rar')
-
-def test_reading_rar2():
-    run_reading('test/files/rar15-comment-lock.rar', u'RARcomment -----')
-    run_reading('test/files/rar15-comment.rar', u'RARcomment -----')
-    run_reading('test/files/rar202-comment-nopsw.rar', u'RARcomment')
-
-def test_reading_rar3():
-    run_reading('test/files/rar3-comment-plain.rar', u'RARcomment\n')
-    run_reading('test/files/seektest.rar')
-    run_reading('test/files/unicode.rar')
-    run_reading('test/files/unicode2.rar')
-
-def test_reading_rar2_psw():
-    run_reading('test/files/rar202-comment-psw.rar', u'RARcomment')
-
-def test_reading_rar3_psw():
-    run_reading('test/files/rar3-comment-psw.rar', u'RARcomment\n')
-
-if rarfile._have_crypto:
-    def test_reading_rar3_hpsw():
-        run_reading('test/files/rar3-comment-hpsw.rar', u'RARcomment\n')
-else:
-    @raises(rarfile.NoCrypto)
-    def test_reading_rar3_hpsw_nocrypto():
-        run_reading('test/files/rar3-comment-hpsw.rar', u'RARcomment\n')
-
-def test_reading_rar3_vols():
-    run_reading('test/files/rar3-old.rar')
-    run_reading('test/files/rar3-vols.part1.rar')
-    run_reading('test/files/rar3-vols.part2.rar')
-    run_reading('test/files/rar3-vols.part3.rar')
-
-def test_reading_rar5_blake():
-    run_reading('test/files/rar5-blake.rar', u'RAR5 archive - blake\n')
-
-def test_reading_rar5_crc():
-    run_reading('test/files/rar5-crc.rar', u'RAR5 archive - crc\n')
-
-def test_reading_rar5_links():
-    run_reading('test/files/rar5-dups.rar')
-    run_reading('test/files/rar5-hlink.rar')
-
-def test_reading_rar5_quick_open():
-    run_reading('test/files/rar5-quick-open.rar')
-
-def test_reading_rar5_solid_qo():
-    run_reading('test/files/rar5-solid-qo.rar')
-
-def test_reading_rar5_solid():
-    run_reading('test/files/rar5-solid.rar')
-
-def test_reading_rar5_times():
-    run_reading('test/files/rar5-times.rar')
-    run_reading('test/files/rar5-times2.rar')
-
-def test_reading_rar5_vols():
-    run_reading('test/files/rar5-vols.part1.rar')
-    run_reading('test/files/rar5-vols.part2.rar')
-    run_reading('test/files/rar5-vols.part3.rar')
-
-if rarfile._have_crypto:
-    def test_reading_rar5_hpsw():
-        run_reading('test/files/rar5-hpsw.rar', u'RAR5 archive - hdr-password\n')
-else:
-    @raises(rarfile.NoCrypto)
-    def test_reading_rar5_hpsw():
-        run_reading('test/files/rar5-hpsw.rar', u'RAR5 archive - hdr-password\n')
-
-def test_reading_rar5_psw_blake():
-    run_reading('test/files/rar5-psw-blake.rar', u'RAR5 archive - nohdr-password-blake\n')
-
-def test_reading_rar5_psw():
-    run_reading('test/files/rar5-psw.rar', u'RAR5 archive - nohdr-password\n')
-
-def test_reading_missed():
-    problems = []
-    missed = []
-    for fn in glob('test/files/*.rar'):
-        if fn not in _done_reading:
-            missed.append(fn)
-    eq_(missed, problems)
-
diff --git a/lib/rarfile/test/test_seek.py b/lib/rarfile/test/test_seek.py
deleted file mode 100644
index 702870417a1f22611d4205cb4a08b1c2d120e709..0000000000000000000000000000000000000000
--- a/lib/rarfile/test/test_seek.py
+++ /dev/null
@@ -1,93 +0,0 @@
-"""Test seeking on files.
-"""
-
-import io
-import rarfile
-
-from nose.tools import *
-
-ARC = 'test/files/seektest.rar'
-
-def do_seek(f, pos, lim):
-    ofs = pos*4
-    fsize = lim*4
-
-    if ofs < 0:
-        exp = 0
-    elif ofs > fsize:
-        exp = fsize
-    else:
-        exp = ofs
-
-    f.seek(ofs)
-
-    got = f.tell()
-
-    eq_(got, exp)
-    ln = f.read(4)
-    if got == fsize and ln:
-        raise Exception('unexpected read')
-    if not ln and got < fsize:
-        raise Exception('unexpected read failure')
-    if ln:
-        spos = int(ln)
-        eq_(spos*4, got)
-
-def run_seek(rf, fn):
-    inf = rf.getinfo(fn)
-    cnt = int(inf.file_size / 4)
-    f = rf.open(fn)
-
-    do_seek(f, int(cnt/2), cnt)
-    do_seek(f, 0, cnt)
-
-    for i in range(int(cnt/2)):
-        do_seek(f, i*2, cnt)
-
-    for i in range(cnt):
-        do_seek(f, i*2 - int(cnt / 2), cnt)
-
-    for i in range(cnt + 10):
-        do_seek(f, cnt - i - 5, cnt)
-
-    f.close()
-
-def run_arc(arc, desc):
-    files = ['stest1.txt', 'stest2.txt']
-    rf = rarfile.RarFile(arc)
-    for fn in files:
-        run_seek(rf, fn)
-
-def test_seek_filename():
-    run_arc(ARC, "fn")
-
-def test_seek_stringio():
-    data = open(ARC, 'rb').read()
-
-    # filelike: cStringIO
-    try:
-        import cStringIO
-        run_arc(cStringIO.StringIO(data), "cStringIO")
-    except ImportError:
-        pass
-
-    # filelike: StringIO
-    try:
-        import StringIO
-        run_arc(StringIO.StringIO(data), "StringIO")
-    except ImportError:
-        pass
-
-def test_seek_bytesio():
-    # filelike: io.BytesIO, io.open()
-    data = open(ARC, 'rb').read()
-    run_arc(io.BytesIO(data), "io.BytesIO")
-
-def test_seek_open():
-    # filelike: file()
-    run_arc(open(ARC, 'rb'), "open")
-
-def test_seek_ioopen():
-    # filelike: io.open()
-    run_arc(io.open(ARC, 'rb'), "io.open")
-
diff --git a/lib/rebulk/test/__init__.py b/lib/rebulk/test/__init__.py
deleted file mode 100644
index 0ab48c94bc06fc5d0cbe9808646069afd5406c19..0000000000000000000000000000000000000000
--- a/lib/rebulk/test/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# pylint: disable=no-self-use, pointless-statement, missing-docstring
diff --git a/lib/rebulk/test/default_rules_module.py b/lib/rebulk/test/default_rules_module.py
deleted file mode 100644
index 5eed8e0d83a89f642b50e4c0354e1b9f77553c98..0000000000000000000000000000000000000000
--- a/lib/rebulk/test/default_rules_module.py
+++ /dev/null
@@ -1,79 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# pylint: disable=no-self-use, pointless-statement, missing-docstring, invalid-name
-from ..match import Match
-from ..rules import Rule, RemoveMatch, AppendMatch, RenameMatch, AppendTags, RemoveTags
-
-
-class RuleRemove0(Rule):
-    consequence = RemoveMatch
-    def when(self, matches, context):
-        return matches[0]
-
-
-class RuleAppend0(Rule):
-    consequence = AppendMatch()
-    def when(self, matches, context):
-        return Match(5, 10)
-
-class RuleRename0(Rule):
-    consequence = [RenameMatch('renamed')]
-    def when(self, matches, context):
-        return [Match(5, 10, name="original")]
-
-class RuleRemove1(Rule):
-    consequence = [RemoveMatch()]
-    def when(self, matches, context):
-        return [matches[0]]
-
-class RuleAppend1(Rule):
-    consequence = [AppendMatch]
-    def when(self, matches, context):
-        return [Match(5, 10)]
-
-class RuleRename1(Rule):
-    consequence = RenameMatch('renamed')
-    def when(self, matches, context):
-        return [Match(5, 10, name="original")]
-
-class RuleAppend2(Rule):
-    consequence = [AppendMatch('renamed')]
-    properties = {'renamed': [None]}
-    def when(self, matches, context):
-        return [Match(5, 10)]
-
-class RuleRename2(Rule):
-    consequence = RenameMatch('renamed')
-    def when(self, matches, context):
-        return Match(5, 10, name="original")
-
-class RuleAppend3(Rule):
-    consequence = AppendMatch('renamed')
-    properties = {'renamed': [None]}
-    def when(self, matches, context):
-        return [Match(5, 10)]
-
-class RuleRename3(Rule):
-    consequence = [RenameMatch('renamed')]
-    def when(self, matches, context):
-        return Match(5, 10, name="original")
-
-class RuleAppendTags0(Rule):
-    consequence = AppendTags(['new-tag'])
-    def when(self, matches, context):
-        return matches.named('tags', 0)
-
-class RuleRemoveTags0(Rule):
-    consequence = RemoveTags(['new-tag'])
-    def when(self, matches, context):
-        return matches.named('tags', 0)
-
-class RuleAppendTags1(Rule):
-    consequence = AppendTags(['new-tag'])
-    def when(self, matches, context):
-        return matches.named('tags')
-
-class RuleRemoveTags1(Rule):
-    consequence = RemoveTags(['new-tag'])
-    def when(self, matches, context):
-        return matches.named('tags')
diff --git a/lib/rebulk/test/rebulk_rules_module.py b/lib/rebulk/test/rebulk_rules_module.py
deleted file mode 100644
index 0bd5ef33a18e8e95fe4fb989f9e14df39692d57a..0000000000000000000000000000000000000000
--- a/lib/rebulk/test/rebulk_rules_module.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# pylint: disable=no-self-use, pointless-statement, missing-docstring, invalid-name
-from rebulk.rules import Rule, RemoveMatch, CustomRule
-
-
-class RemoveAllButLastYear(Rule):
-    consequence = RemoveMatch
-    def when(self, matches, context):
-        entries = matches.named('year')
-        return entries[:-1]
-
-
-class PrefixedSuffixedYear(CustomRule):
-    def when(self, matches, context):
-        toRemove = []
-        years = matches.named('year')
-        for year in years:
-            if not matches.previous(year, lambda p: p.name == 'yearPrefix') and \
-                   not matches.next(year, lambda n: n.name == 'yearSuffix'):
-                toRemove.append(year)
-        return toRemove
-
-    def then(self, matches, when_response, context):
-        for to_remove in when_response:
-            matches.remove(to_remove)
-
-
-class PrefixedSuffixedYearNoLambda(Rule):
-    consequence = RemoveMatch
-    def when(self, matches, context):
-        toRemove = []
-        years = matches.named('year')
-        for year in years:
-            if not [m for m in matches.previous(year) if m.name == 'yearPrefix'] and \
-                    not [m for m in matches.next(year) if m.name == 'yearSuffix']:
-                toRemove.append(year)
-        return toRemove
diff --git a/lib/rebulk/test/rules_module.py b/lib/rebulk/test/rules_module.py
deleted file mode 100644
index 887b81da8637c39f0acd303490b1d750a08dd930..0000000000000000000000000000000000000000
--- a/lib/rebulk/test/rules_module.py
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# pylint: disable=no-self-use, pointless-statement, missing-docstring, invalid-name
-from ..match import Match
-from ..rules import Rule
-
-
-class Rule3(Rule):
-    def when(self, matches, context):
-        return context.get('when')
-
-    def then(self, matches, when_response, context):
-        assert when_response in [True, False]
-        matches.append(Match(3, 4))
-
-
-class Rule2(Rule):
-    dependency = Rule3
-
-    def when(self, matches, context):
-        return True
-
-    def then(self, matches, when_response, context):
-        assert when_response
-        matches.append(Match(3, 4))
-
-
-class Rule1(Rule):
-    dependency = Rule2
-
-    def when(self, matches, context):
-        return True
-
-    def then(self, matches, when_response, context):
-        assert when_response
-        matches.clear()
-
-
-class Rule0(Rule):
-    dependency = Rule1
-
-    def when(self, matches, context):
-        return True
-
-    def then(self, matches, when_response, context):
-        assert when_response
-        matches.append(Match(3, 4))
-
-
-class Rule1Disabled(Rule1):
-    name = "Disabled Rule1"
-
-    def enabled(self, context):
-        return False
diff --git a/lib/rebulk/test/test_chain.py b/lib/rebulk/test/test_chain.py
deleted file mode 100644
index 8238ad638bd7dfd6454d28d3529439c08a682ce5..0000000000000000000000000000000000000000
--- a/lib/rebulk/test/test_chain.py
+++ /dev/null
@@ -1,303 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# pylint: disable=no-self-use, pointless-statement, missing-docstring, no-member
-import re
-
-from functools import partial
-
-from ..validators import chars_surround
-from ..rebulk import Rebulk, FunctionalPattern, RePattern, StringPattern
-
-
-def test_chain_close():
-    rebulk = Rebulk()
-    ret = rebulk.chain().close()
-
-    assert ret == rebulk
-    assert len(rebulk.effective_patterns()) == 1
-
-
-def test_build_chain():
-    rebulk = Rebulk()
-
-    def digit(input_string):
-        i = input_string.find("1849")
-        if i > -1:
-            return i, i + len("1849")
-
-    ret = rebulk.chain() \
-        .functional(digit) \
-        .string("test").repeater(2) \
-        .string("x").repeater('{1,3}') \
-        .string("optional").repeater('?') \
-        .regex("f?x").repeater('+') \
-        .close()
-
-    assert ret == rebulk
-    assert len(rebulk.effective_patterns()) == 1
-
-    chain = rebulk.effective_patterns()[0]
-
-    assert len(chain.parts) == 5
-
-    assert isinstance(chain.parts[0].pattern, FunctionalPattern)
-    assert chain.parts[0].repeater_start == 1
-    assert chain.parts[0].repeater_end == 1
-
-    assert isinstance(chain.parts[1].pattern, StringPattern)
-    assert chain.parts[1].repeater_start == 2
-    assert chain.parts[1].repeater_end == 2
-
-    assert isinstance(chain.parts[2].pattern, StringPattern)
-    assert chain.parts[2].repeater_start == 1
-    assert chain.parts[2].repeater_end == 3
-
-    assert isinstance(chain.parts[3].pattern, StringPattern)
-    assert chain.parts[3].repeater_start == 0
-    assert chain.parts[3].repeater_end == 1
-
-    assert isinstance(chain.parts[4].pattern, RePattern)
-    assert chain.parts[4].repeater_start == 1
-    assert chain.parts[4].repeater_end is None
-
-
-def test_chain_defaults():
-    rebulk = Rebulk()
-    rebulk.defaults(validator=lambda x: True, ignore_names=['testIgnore'], children=True)
-
-    rebulk.chain()\
-        .regex("(?P<test>test)") \
-        .regex(" ").repeater("*") \
-        .regex("(?P<testIgnore>testIgnore)")
-    matches = rebulk.matches("test testIgnore")
-
-    assert len(matches) == 1
-    assert matches[0].name == "test"
-
-
-def test_matches():
-    rebulk = Rebulk()
-
-    def digit(input_string):
-        i = input_string.find("1849")
-        if i > -1:
-            return i, i + len("1849")
-
-    input_string = "1849testtestxxfixfux_foxabc1849testtestxoptionalfoxabc"
-
-    chain = rebulk.chain() \
-        .functional(digit) \
-        .string("test").hidden().repeater(2) \
-        .string("x").hidden().repeater('{1,3}') \
-        .string("optional").hidden().repeater('?') \
-        .regex("f.?x", name='result').repeater('+') \
-        .close()
-
-    matches = chain.matches(input_string)
-
-    assert len(matches) == 2
-    children = matches[0].children
-
-    assert children[0].value == '1849'
-    assert children[1].value == 'fix'
-    assert children[2].value == 'fux'
-
-    children = matches[1].children
-    assert children[0].value == '1849'
-    assert children[1].value == 'fox'
-
-    input_string = "_1850testtestxoptionalfoxabc"
-    matches = chain.matches(input_string)
-
-    assert len(matches) == 0
-
-    input_string = "_1849testtesttesttestxoptionalfoxabc"
-    matches = chain.matches(input_string)
-
-    assert len(matches) == 0
-
-    input_string = "_1849testtestxxxxoptionalfoxabc"
-    matches = chain.matches(input_string)
-
-    assert len(matches) == 0
-
-    input_string = "_1849testtestoptionalfoxabc"
-    matches = chain.matches(input_string)
-
-    assert len(matches) == 0
-
-    input_string = "_1849testtestxoptionalabc"
-    matches = chain.matches(input_string)
-
-    assert len(matches) == 0
-
-    input_string = "_1849testtestxoptionalfaxabc"
-    matches = chain.matches(input_string)
-
-    assert len(matches) == 1
-    children = matches[0].children
-
-    assert children[0].value == '1849'
-    assert children[1].value == 'fax'
-
-
-def test_matches_2():
-    rebulk = Rebulk() \
-        .regex_defaults(flags=re.IGNORECASE) \
-        .chain(children=True, formatter={'episode': int}) \
-        .defaults(formatter={'version': int}) \
-        .regex(r'e(?P<episode>\d{1,4})') \
-        .regex(r'v(?P<version>\d+)').repeater('?') \
-        .regex(r'[ex-](?P<episode>\d{1,4})').repeater('*') \
-        .close()
-
-    matches = rebulk.matches("This is E14v2-15E16x17")
-    assert len(matches) == 5
-
-    assert matches[0].name == 'episode'
-    assert matches[0].value == 14
-
-    assert matches[1].name == 'version'
-    assert matches[1].value == 2
-
-    assert matches[2].name == 'episode'
-    assert matches[2].value == 15
-
-    assert matches[3].name == 'episode'
-    assert matches[3].value == 16
-
-    assert matches[4].name == 'episode'
-    assert matches[4].value == 17
-
-
-def test_matches_3():
-    alt_dash = (r'@', r'[\W_]')  # abbreviation
-
-    rebulk = Rebulk()
-
-    rebulk.chain(formatter={'season': int, 'episode': int},
-                 tags=['SxxExx'],
-                 abbreviations=[alt_dash],
-                 private_names=['episodeSeparator', 'seasonSeparator'],
-                 children=True,
-                 private_parent=True,
-                 conflict_solver=lambda match, other: match
-                 if match.name in ['season', 'episode'] and other.name in
-                 ['screen_size', 'video_codec', 'audio_codec',
-                  'audio_channels', 'container', 'date']
-                 else '__default__') \
-        .regex(r'(?P<season>\d+)@?x@?(?P<episode>\d+)') \
-        .regex(r'(?P<episodeSeparator>x|-|\+|&)(?P<episode>\d+)').repeater('*') \
-        .chain() \
-        .regex(r'S(?P<season>\d+)@?(?:xE|Ex|E|x)@?(?P<episode>\d+)') \
-        .regex(r'(?:(?P<episodeSeparator>xE|Ex|E|x|-|\+|&)(?P<episode>\d+))').repeater('*') \
-        .chain() \
-        .regex(r'S(?P<season>\d+)') \
-        .regex(r'(?P<seasonSeparator>S|-|\+|&)(?P<season>\d+)').repeater('*')
-
-    matches = rebulk.matches("test-01x02-03")
-    assert len(matches) == 3
-
-    assert matches[0].name == 'season'
-    assert matches[0].value == 1
-
-    assert matches[1].name == 'episode'
-    assert matches[1].value == 2
-
-    assert matches[2].name == 'episode'
-    assert matches[2].value == 3
-
-    matches = rebulk.matches("test-S01E02-03")
-
-    assert len(matches) == 3
-    assert matches[0].name == 'season'
-    assert matches[0].value == 1
-
-    assert matches[1].name == 'episode'
-    assert matches[1].value == 2
-
-    assert matches[2].name == 'episode'
-    assert matches[2].value == 3
-
-    matches = rebulk.matches("test-S01-02-03-04")
-
-    assert len(matches) == 4
-    assert matches[0].name == 'season'
-    assert matches[0].value == 1
-
-    assert matches[1].name == 'season'
-    assert matches[1].value == 2
-
-    assert matches[2].name == 'season'
-    assert matches[2].value == 3
-
-    assert matches[3].name == 'season'
-    assert matches[3].value == 4
-
-
-def test_matches_4():
-    seps_surround = partial(chars_surround, " ")
-
-    rebulk = Rebulk()
-    rebulk.regex_defaults(flags=re.IGNORECASE)
-    rebulk.defaults(private_names=['episodeSeparator', 'seasonSeparator'], validate_all=True,
-                    validator={'__parent__': seps_surround}, children=True, private_parent=True)
-
-    rebulk.chain(formatter={'episode': int, 'version': int}) \
-        .defaults(validator=None) \
-        .regex(r'e(?P<episode>\d{1,4})') \
-        .regex(r'v(?P<version>\d+)').repeater('?') \
-        .regex(r'(?P<episodeSeparator>e|x|-)(?P<episode>\d{1,4})').repeater('*')
-
-    matches = rebulk.matches("Some Series E01E02E03")
-    assert len(matches) == 3
-
-    assert matches[0].value == 1
-    assert matches[1].value == 2
-    assert matches[2].value == 3
-
-
-def test_matches_5():
-    seps_surround = partial(chars_surround, " ")
-
-    rebulk = Rebulk()
-    rebulk.regex_defaults(flags=re.IGNORECASE)
-    rebulk.defaults(private_names=['episodeSeparator', 'seasonSeparator'], validate_all=True,
-                    validator={'__parent__': seps_surround}, children=True, private_parent=True)
-
-    rebulk.chain(formatter={'episode': int, 'version': int}) \
-        .defaults(validator=None) \
-        .regex(r'e(?P<episode>\d{1,4})') \
-        .regex(r'v(?P<version>\d+)').repeater('?') \
-        .regex(r'(?P<episodeSeparator>e|x|-)(?P<episode>\d{1,4})').repeater('{2,3}')
-
-    matches = rebulk.matches("Some Series E01E02E03")
-    assert len(matches) == 3
-
-    matches = rebulk.matches("Some Series E01E02")
-    assert len(matches) == 0
-
-    matches = rebulk.matches("Some Series E01E02E03E04E05E06")  # Parent can't be validated, so no results at all
-    assert len(matches) == 0
-
-
-def test_matches_6():
-    rebulk = Rebulk()
-    rebulk.regex_defaults(flags=re.IGNORECASE)
-    rebulk.defaults(private_names=['episodeSeparator', 'seasonSeparator'], validate_all=True,
-                    validator=None, children=True, private_parent=True)
-
-    rebulk.chain(formatter={'episode': int, 'version': int}) \
-        .defaults(validator=None) \
-        .regex(r'e(?P<episode>\d{1,4})') \
-        .regex(r'v(?P<version>\d+)').repeater('?') \
-        .regex(r'(?P<episodeSeparator>e|x|-)(?P<episode>\d{1,4})').repeater('{2,3}')
-
-    matches = rebulk.matches("Some Series E01E02E03")
-    assert len(matches) == 3
-
-    matches = rebulk.matches("Some Series E01E02")
-    assert len(matches) == 0
-
-    matches = rebulk.matches("Some Series E01E02E03E04E05E06")  # No validator on parent, so it should give 4 episodes.
-    assert len(matches) == 4
diff --git a/lib/rebulk/test/test_debug.py b/lib/rebulk/test/test_debug.py
deleted file mode 100644
index a35f95fdf345afc04d7c63baf21d2a3e15430e44..0000000000000000000000000000000000000000
--- a/lib/rebulk/test/test_debug.py
+++ /dev/null
@@ -1,83 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# pylint: disable=no-self-use, pointless-statement, missing-docstring, protected-access, invalid-name
-
-from ..pattern import StringPattern
-from ..rebulk import Rebulk
-from ..match import Match
-from .. import debug
-from .default_rules_module import RuleRemove0
-
-
-class TestDebug(object):
-
-
-    #request.addfinalizer(disable_debug)
-
-
-
-    debug.DEBUG = True
-    pattern = StringPattern(1, 3, value="es")
-
-    match = Match(1, 3, value="es")
-    rule = RuleRemove0()
-
-    input_string = "This is a debug test"
-    rebulk = Rebulk().string("debug") \
-        .string("is")
-
-    matches = rebulk.matches(input_string)
-    debug.DEBUG = False
-
-    @classmethod
-    def setup_class(cls):
-        debug.DEBUG = True
-
-    @classmethod
-    def teardown_class(cls):
-        debug.DEBUG = False
-
-    def test_pattern(self):
-        assert self.pattern.defined_at.lineno == 20
-        assert self.pattern.defined_at.name == 'rebulk.test.test_debug'
-        assert self.pattern.defined_at.filename.endswith('test_debug.py')
-
-        assert str(self.pattern.defined_at) == 'test_debug.py#L20'
-        assert repr(self.pattern) == '<StringPattern@test_debug.py#L20:(1, 3)>'
-
-    def test_match(self):
-        assert self.match.defined_at.lineno == 22
-        assert self.match.defined_at.name == 'rebulk.test.test_debug'
-        assert self.match.defined_at.filename.endswith('test_debug.py')
-
-        assert str(self.match.defined_at) == 'test_debug.py#L22'
-
-    def test_rule(self):
-        assert self.rule.defined_at.lineno == 23
-        assert self.rule.defined_at.name == 'rebulk.test.test_debug'
-        assert self.rule.defined_at.filename.endswith('test_debug.py')
-
-        assert str(self.rule.defined_at) == 'test_debug.py#L23'
-        assert repr(self.rule) == '<RuleRemove0@test_debug.py#L23>'
-
-    def test_rebulk(self):
-        """
-        This test fails on travis CI, can't find out why there's 1 line offset ...
-        """
-        assert self.rebulk._patterns[0].defined_at.lineno in [26, 27]
-        assert self.rebulk._patterns[0].defined_at.name == 'rebulk.test.test_debug'
-        assert self.rebulk._patterns[0].defined_at.filename.endswith('test_debug.py')
-
-        assert str(self.rebulk._patterns[0].defined_at) in ['test_debug.py#L26', 'test_debug.py#L27']
-
-        assert self.rebulk._patterns[1].defined_at.lineno in [27, 28]
-        assert self.rebulk._patterns[1].defined_at.name == 'rebulk.test.test_debug'
-        assert self.rebulk._patterns[1].defined_at.filename.endswith('test_debug.py')
-
-        assert str(self.rebulk._patterns[1].defined_at) in ['test_debug.py#L27', 'test_debug.py#L28']
-
-        assert self.matches[0].defined_at == self.rebulk._patterns[0].defined_at
-        assert self.matches[1].defined_at == self.rebulk._patterns[1].defined_at
-
-    def test_repr(self):
-        str(self.matches)
diff --git a/lib/rebulk/test/test_introspector.py b/lib/rebulk/test/test_introspector.py
deleted file mode 100644
index 24c0c5001a2e9f9692af28bf5a16bde8948b2332..0000000000000000000000000000000000000000
--- a/lib/rebulk/test/test_introspector.py
+++ /dev/null
@@ -1,138 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-"""
-Introspector tests
-"""
-# pylint: disable=no-self-use,pointless-statement,missing-docstring,protected-access,invalid-name
-from ..rebulk import Rebulk
-from .. import introspector
-from .default_rules_module import RuleAppend2, RuleAppend3
-
-
-def test_string_introspector():
-    rebulk = Rebulk().string('One', 'Two', 'Three', name='first').string('1', '2', '3', name='second')
-
-    introspected = introspector.introspect(rebulk, None)
-
-    assert len(introspected.patterns) == 2
-
-    first_properties = introspected.patterns[0].properties
-    assert len(first_properties) == 1
-    first_properties['first'] == ['One', 'Two', 'Three']
-
-    second_properties = introspected.patterns[1].properties
-    assert len(second_properties) == 1
-    second_properties['second'] == ['1', '2', '3']
-
-    properties = introspected.properties
-    assert len(properties) == 2
-    assert properties['first'] == first_properties['first']
-    assert properties['second'] == second_properties['second']
-
-
-def test_string_properties():
-    rebulk = Rebulk()\
-        .string('One', 'Two', 'Three', name='first', properties={'custom': ['One']})\
-        .string('1', '2', '3', name='second', properties={'custom': [1]})
-
-    introspected = introspector.introspect(rebulk, None)
-
-    assert len(introspected.patterns) == 2
-    assert len(introspected.rules) == 2
-
-    first_properties = introspected.patterns[0].properties
-    assert len(first_properties) == 1
-    first_properties['custom'] == ['One']
-
-    second_properties = introspected.patterns[1].properties
-    assert len(second_properties) == 1
-    second_properties['custom'] == [1]
-
-    properties = introspected.properties
-    assert len(properties) == 1
-    assert properties['custom'] == ['One', 1]
-
-
-def test_various_pattern():
-    rebulk = Rebulk()\
-        .regex('One', 'Two', 'Three', name='first', value="string") \
-        .string('1', '2', '3', name='second', value="digit") \
-        .string('4', '5', '6', name='third') \
-        .string('private', private=True) \
-        .functional(lambda string: (0, 5), name='func', value='test') \
-        .regex('One', 'Two', 'Three', name='regex_name') \
-        .regex('(?P<one>One)(?P<two>Two)(?P<three>Three)') \
-        .functional(lambda string: (6, 10), name='func2') \
-        .string('7', name='third')
-
-    introspected = introspector.introspect(rebulk, None)
-
-    assert len(introspected.patterns) == 8
-    assert len(introspected.rules) == 2
-
-    first_properties = introspected.patterns[0].properties
-    assert len(first_properties) == 1
-    first_properties['first'] == ['string']
-
-    second_properties = introspected.patterns[1].properties
-    assert len(second_properties) == 1
-    second_properties['second'] == ['digit']
-
-    third_properties = introspected.patterns[2].properties
-    assert len(third_properties) == 1
-    third_properties['third'] == ['4', '5', '6']
-
-    func_properties = introspected.patterns[3].properties
-    assert len(func_properties) == 1
-    func_properties['func'] == ['test']
-
-    regex_name_properties = introspected.patterns[4].properties
-    assert len(regex_name_properties) == 1
-    regex_name_properties['regex_name'] == [None]
-
-    regex_groups_properties = introspected.patterns[5].properties
-    assert len(regex_groups_properties) == 3
-    regex_groups_properties['one'] == [None]
-    regex_groups_properties['two'] == [None]
-    regex_groups_properties['three'] == [None]
-
-    func2_properties = introspected.patterns[6].properties
-    assert len(func2_properties) == 1
-    func2_properties['func2'] == [None]
-
-    append_third_properties = introspected.patterns[7].properties
-    assert len(append_third_properties) == 1
-    append_third_properties['third'] == [None]
-
-    properties = introspected.properties
-    assert len(properties) == 9
-    assert properties['first'] == first_properties['first']
-    assert properties['second'] == second_properties['second']
-    assert properties['third'] == third_properties['third'] + append_third_properties['third']
-    assert properties['func'] == func_properties['func']
-    assert properties['regex_name'] == regex_name_properties['regex_name']
-    assert properties['one'] == regex_groups_properties['one']
-    assert properties['two'] == regex_groups_properties['two']
-    assert properties['three'] == regex_groups_properties['three']
-    assert properties['func2'] == func2_properties['func2']
-
-
-def test_rule_properties():
-    rebulk = Rebulk(default_rules=False).rules(RuleAppend2, RuleAppend3)
-
-    introspected = introspector.introspect(rebulk, None)
-
-    assert len(introspected.rules) == 2
-    assert len(introspected.patterns) == 0
-
-    rule_properties = introspected.rules[0].properties
-    assert len(rule_properties) == 1
-    assert rule_properties['renamed'] == [None]
-
-    rule_properties = introspected.rules[1].properties
-    assert len(rule_properties) == 1
-    assert rule_properties['renamed'] == [None]
-
-    properties = introspected.properties
-    assert len(properties) == 1
-    assert properties['renamed'] == [None]
diff --git a/lib/rebulk/test/test_loose.py b/lib/rebulk/test/test_loose.py
deleted file mode 100644
index bc0c6bca121de1c51624ac0d015da1bf53ec0fa5..0000000000000000000000000000000000000000
--- a/lib/rebulk/test/test_loose.py
+++ /dev/null
@@ -1,83 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# pylint: disable=no-self-use, pointless-statement, missing-docstring, invalid-name
-
-from ..loose import call
-
-
-def test_loose_function():
-
-    def func(v1, v2, v3=3, v4=4):
-        return v1 + v2 + v3 + v4
-
-    assert call(func, 1, 2) == func(1, 2)
-    assert call(func, 1, 2, 3, 5) == func(1, 2, 3, 5)
-    assert call(func, 1, 2, v3=4, v4=5) == func(1, 2, v3=4, v4=5)
-    assert call(func, 1, 2, 3, 4, 5) == func(1, 2, 3, 4)
-    assert call(func, 1, 2, 3, 4, more=5) == func(1, 2, 3, 4)
-
-
-def test_loose_varargs_function():
-    def func(v1, v2, *args):
-        return v1 + v2 + args[0] if len(args) > 0 else 3 + args[1] if len(args) > 1 else 4
-
-    assert call(func, 1, 2) == func(1, 2)
-    assert call(func, 1, 2, 3, 5) == func(1, 2, 3, 5)
-    assert call(func, 1, 2, 3, 4, 5) == func(1, 2, 3, 4)
-
-
-def test_loose_kwargs_function():
-    def func(v1, v2, **kwargs):
-        return v1 + v2 + kwargs.get('v3', 3) + kwargs.get('v4', 4)
-
-    assert call(func, v1=1, v2=2) == func(v1=1, v2=2)
-    assert call(func, v1=1, v2=2, v3=3, v4=5) == func(v1=1, v2=2, v3=3, v4=5)
-
-
-def test_loose_class():
-    class Dummy(object):
-        def __init__(self, v1, v2, v3=3, v4=4):
-            self.v1 = v1
-            self.v2 = v2
-            self.v3 = v3
-            self.v4 = v4
-
-        def call(self):
-            return self.v1 + self.v2 + self.v3 + self.v4
-
-    assert call(Dummy, 1, 2).call() == Dummy(1, 2).call()
-    assert call(Dummy, 1, 2, 3, 5).call() == Dummy(1, 2, 3, 5).call()
-    assert call(Dummy, 1, 2, v3=4, v4=5).call() == Dummy(1, 2, v3=4, v4=5).call()
-    assert call(Dummy, 1, 2, 3, 4, 5).call() == Dummy(1, 2, 3, 4).call()
-    assert call(Dummy, 1, 2, 3, 4, more=5).call() == Dummy(1, 2, 3, 4).call()
-
-
-def test_loose_varargs_class():
-    class Dummy(object):
-        def __init__(self, v1, v2, *args):
-            self.v1 = v1
-            self.v2 = v2
-            self.v3 = args[0] if len(args) > 0 else 3
-            self.v4 = args[1] if len(args) > 1 else 4
-
-        def call(self):
-            return self.v1 + self.v2 + self.v3 + self.v4
-
-    assert call(Dummy, 1, 2).call() == Dummy(1, 2).call()
-    assert call(Dummy, 1, 2, 3, 5).call() == Dummy(1, 2, 3, 5).call()
-    assert call(Dummy, 1, 2, 3, 4, 5).call() == Dummy(1, 2, 3, 4).call()
-
-
-def test_loose_kwargs_class():
-    class Dummy(object):
-        def __init__(self, v1, v2, **kwargs):
-            self.v1 = v1
-            self.v2 = v2
-            self.v3 = kwargs.get('v3', 3)
-            self.v4 = kwargs.get('v4', 4)
-
-        def call(self):
-            return self.v1 + self.v2 + self.v3 + self.v4
-
-    assert call(Dummy, v1=1, v2=2).call() == Dummy(v1=1, v2=2).call()
-    assert call(Dummy, v1=1, v2=2, v3=3, v4=5).call() == Dummy(v1=1, v2=2, v3=3, v4=5).call()
diff --git a/lib/rebulk/test/test_match.py b/lib/rebulk/test/test_match.py
deleted file mode 100644
index efbc63d0e602fb1e930a86ceb1f549988909c992..0000000000000000000000000000000000000000
--- a/lib/rebulk/test/test_match.py
+++ /dev/null
@@ -1,565 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# pylint: disable=no-self-use, pointless-statement, missing-docstring, unneeded-not
-
-import pytest
-import six
-
-from ..match import Match, Matches
-from ..pattern import StringPattern, RePattern
-from ..formatters import formatters
-
-
-class TestMatchClass(object):
-    def test_repr(self):
-        match1 = Match(1, 3, value="es")
-
-        assert repr(match1) == '<es:(1, 3)>'
-
-        match2 = Match(0, 4, value="test", private=True, name="abc", tags=['one', 'two'])
-
-        assert repr(match2) == '<test:(0, 4)+private+name=abc+tags=[\'one\', \'two\']>'
-
-    def test_names(self):
-        parent = Match(0, 10, name="test")
-        parent.children.append(Match(0, 10, name="child1", parent=parent))
-        parent.children.append(Match(0, 10, name="child2", parent=parent))
-
-        assert set(parent.names) == set(["child1", "child2"])
-
-    def test_equality(self):
-        match1 = Match(1, 3, value="es")
-        match2 = Match(1, 3, value="es")
-
-        other = object()
-
-        assert hash(match1) == hash(match2)
-        assert hash(match1) != hash(other)
-
-        assert match1 == match2
-        assert not match1 == other
-
-    def test_inequality(self):
-        match1 = Match(0, 2, value="te")
-        match2 = Match(2, 4, value="st")
-        match3 = Match(0, 2, value="other")
-
-        other = object()
-
-        assert hash(match1) != hash(match2)
-        assert hash(match1) != hash(match3)
-
-        assert match1 != other
-        assert match1 != match2
-        assert match1 != match3
-
-    def test_length(self):
-        match1 = Match(0, 4, value="test")
-        match2 = Match(0, 2, value="spanIsUsed")
-
-        assert len(match1) == 4
-        assert len(match2) == 2
-
-    def test_compare(self):
-        match1 = Match(0, 2, value="te")
-        match2 = Match(2, 4, value="st")
-
-        other = object()
-
-        assert match1 < match2
-        assert match1 <= match2
-
-        assert match2 > match1
-        assert match2 >= match1
-
-        if six.PY3:
-            with pytest.raises(TypeError):
-                match1 < other
-
-            with pytest.raises(TypeError):
-                match1 <= other
-
-            with pytest.raises(TypeError):
-                match1 > other
-
-            with pytest.raises(TypeError):
-                match1 >= other
-        else:
-            assert match1 < other
-            assert match1 <= other
-            assert not match1 > other
-            assert not match1 >= other
-
-    def test_value(self):
-        match1 = Match(1, 3)
-        match1.value = "test"
-
-        assert match1.value == "test"
-
-
-class TestMatchesClass(object):
-    match1 = Match(0, 2, value="te", name="start")
-    match2 = Match(2, 3, value="s", tags="tag1")
-    match3 = Match(3, 4, value="t", tags=["tag1", "tag2"])
-    match4 = Match(2, 4, value="st", name="end")
-
-    def test_tag(self):
-        matches = Matches()
-        matches.append(self.match1)
-        matches.append(self.match2)
-        matches.append(self.match3)
-        matches.append(self.match4)
-
-        assert "start" in matches.names
-        assert "end" in matches.names
-
-        assert "tag1" in matches.tags
-        assert "tag2" in matches.tags
-
-        tag1 = matches.tagged("tag1")
-        assert len(tag1) == 2
-        assert tag1[0] == self.match2
-        assert tag1[1] == self.match3
-
-        tag2 = matches.tagged("tag2")
-        assert len(tag2) == 1
-        assert tag2[0] == self.match3
-
-        start = matches.named("start")
-        assert len(start) == 1
-        assert start[0] == self.match1
-
-        end = matches.named("end")
-        assert len(end) == 1
-        assert end[0] == self.match4
-
-    def test_base(self):
-        matches = Matches()
-        matches.append(self.match1)
-
-        assert len(matches) == 1
-        assert repr(matches) == repr([self.match1])
-        assert list(matches.starting(0)) == [self.match1]
-        assert list(matches.ending(2)) == [self.match1]
-
-        matches.append(self.match2)
-        matches.append(self.match3)
-        matches.append(self.match4)
-
-        assert len(matches) == 4
-        assert list(matches.starting(2)) == [self.match2, self.match4]
-        assert list(matches.starting(3)) == [self.match3]
-        assert list(matches.ending(3)) == [self.match2]
-        assert list(matches.ending(4)) == [self.match3, self.match4]
-        assert list(matches.range()) == [self.match1, self.match2, self.match4, self.match3]
-        assert list(matches.range(0)) == [self.match1, self.match2, self.match4, self.match3]
-        assert list(matches.range(0, 3)) == [self.match1, self.match2, self.match4]
-        assert list(matches.range(2, 3)) == [self.match2, self.match4]
-        assert list(matches.range(3, 4)) == [self.match4, self.match3]
-
-        matches.remove(self.match1)
-        assert len(matches) == 3
-        assert len(matches.starting(0)) == 0
-        assert len(matches.ending(2)) == 0
-
-        matches.clear()
-
-        assert len(matches) == 0
-        assert len(matches.starting(0)) == 0
-        assert len(matches.starting(2)) == 0
-        assert len(matches.starting(3)) == 0
-        assert len(matches.ending(2)) == 0
-        assert len(matches.ending(3)) == 0
-        assert len(matches.ending(4)) == 0
-
-    def test_get_slices(self):
-        matches = Matches()
-        matches.append(self.match1)
-        matches.append(self.match2)
-        matches.append(self.match3)
-        matches.append(self.match4)
-
-        slice_matches = matches[1:3]
-
-        assert isinstance(slice_matches, Matches)
-
-        assert len(slice_matches) == 2
-        assert slice_matches[0] == self.match2
-        assert slice_matches[1] == self.match3
-
-    def test_remove_slices(self):
-        matches = Matches()
-        matches.append(self.match1)
-        matches.append(self.match2)
-        matches.append(self.match3)
-        matches.append(self.match4)
-
-        del matches[1:3]
-
-        assert len(matches) == 2
-        assert matches[0] == self.match1
-        assert matches[1] == self.match4
-
-    def test_set_slices(self):
-        matches = Matches()
-        matches.append(self.match1)
-        matches.append(self.match2)
-        matches.append(self.match3)
-        matches.append(self.match4)
-
-        matches[1:3] = self.match1, self.match4
-
-        assert len(matches) == 4
-        assert matches[0] == self.match1
-        assert matches[1] == self.match1
-        assert matches[2] == self.match4
-        assert matches[3] == self.match4
-
-    def test_set_index(self):
-        matches = Matches()
-        matches.append(self.match1)
-        matches.append(self.match2)
-        matches.append(self.match3)
-
-        matches[1] = self.match4
-
-        assert len(matches) == 3
-        assert matches[0] == self.match1
-        assert matches[1] == self.match4
-        assert matches[2] == self.match3
-
-    def test_constructor(self):
-        matches = Matches([self.match1, self.match2, self.match3, self.match4])
-
-        assert len(matches) == 4
-        assert list(matches.starting(0)) == [self.match1]
-        assert list(matches.ending(2)) == [self.match1]
-        assert list(matches.starting(2)) == [self.match2, self.match4]
-        assert list(matches.starting(3)) == [self.match3]
-        assert list(matches.ending(3)) == [self.match2]
-        assert list(matches.ending(4)) == [self.match3, self.match4]
-
-    def test_constructor_kwargs(self):
-        matches = Matches([self.match1, self.match2, self.match3, self.match4], input_string="test")
-
-        assert len(matches) == 4
-        assert matches.input_string == "test"
-        assert list(matches.starting(0)) == [self.match1]
-        assert list(matches.ending(2)) == [self.match1]
-        assert list(matches.starting(2)) == [self.match2, self.match4]
-        assert list(matches.starting(3)) == [self.match3]
-        assert list(matches.ending(3)) == [self.match2]
-        assert list(matches.ending(4)) == [self.match3, self.match4]
-
-    def test_crop(self):
-        input_string = "abcdefghijklmnopqrstuvwxyz"
-
-        match1 = Match(1, 10, input_string=input_string)
-        match2 = Match(0, 2, input_string=input_string)
-        match3 = Match(8, 15, input_string=input_string)
-
-        ret = match1.crop([match2, match3.span])
-
-        assert len(ret) == 1
-
-        assert ret[0].span == (2, 8)
-        assert ret[0].value == "cdefgh"
-
-        ret = match1.crop((1, 10))
-        assert len(ret) == 0
-
-        ret = match1.crop((1, 3))
-        assert len(ret) == 1
-        assert ret[0].span == (3, 10)
-
-        ret = match1.crop((7, 10))
-        assert len(ret) == 1
-        assert ret[0].span == (1, 7)
-
-        ret = match1.crop((0, 12))
-        assert len(ret) == 0
-
-        ret = match1.crop((4, 6))
-        assert len(ret) == 2
-
-        assert ret[0].span == (1, 4)
-        assert ret[1].span == (6, 10)
-
-        ret = match1.crop([(3, 5), (7, 9)])
-        assert len(ret) == 3
-
-        assert ret[0].span == (1, 3)
-        assert ret[1].span == (5, 7)
-        assert ret[2].span == (9, 10)
-
-    def test_split(self):
-        input_string = "123 +word1  -  word2  + word3  456"
-        match = Match(3, len(input_string) - 3, input_string=input_string)
-        splitted = match.split(" -+")
-
-        assert len(splitted) == 3
-        assert [split.value for split in splitted] == ["word1", "word2", "word3"]
-
-
-class TestMaches(object):
-    def test_names(self):
-        input_string = "One Two Three"
-
-        matches = Matches()
-
-        matches.extend(StringPattern("One", name="1-str", tags=["One", "str"]).matches(input_string))
-        matches.extend(RePattern("One", name="1-re", tags=["One", "re"]).matches(input_string))
-        matches.extend(StringPattern("Two", name="2-str", tags=["Two", "str"]).matches(input_string))
-        matches.extend(RePattern("Two", name="2-re", tags=["Two", "re"]).matches(input_string))
-        matches.extend(StringPattern("Three", name="3-str", tags=["Three", "str"]).matches(input_string))
-        matches.extend(RePattern("Three", name="3-re", tags=["Three", "re"]).matches(input_string))
-
-        assert set(matches.names) == set(["1-str", "1-re", "2-str", "2-re", "3-str", "3-re"])
-
-    def test_filters(self):
-        input_string = "One Two Three"
-
-        matches = Matches()
-
-        matches.extend(StringPattern("One", name="1-str", tags=["One", "str"]).matches(input_string))
-        matches.extend(RePattern("One", name="1-re", tags=["One", "re"]).matches(input_string))
-        matches.extend(StringPattern("Two", name="2-str", tags=["Two", "str"]).matches(input_string))
-        matches.extend(RePattern("Two", name="2-re", tags=["Two", "re"]).matches(input_string))
-        matches.extend(StringPattern("Three", name="3-str", tags=["Three", "str"]).matches(input_string))
-        matches.extend(RePattern("Three", name="3-re", tags=["Three", "re"]).matches(input_string))
-
-        selection = matches.starting(0)
-        assert len(selection) == 2
-
-        selection = matches.starting(0, lambda m: "str" in m.tags)
-        assert len(selection) == 1
-        assert selection[0].pattern.name == "1-str"
-
-        selection = matches.ending(7, predicate=lambda m: "str" in m.tags)
-        assert len(selection) == 1
-        assert selection[0].pattern.name == "2-str"
-
-        selection = matches.previous(matches.named("2-str")[0])
-        assert len(selection) == 2
-        assert selection[0].pattern.name == "1-str"
-        assert selection[1].pattern.name == "1-re"
-
-        selection = matches.previous(matches.named("2-str", 0), lambda m: "str" in m.tags)
-        assert len(selection) == 1
-        assert selection[0].pattern.name == "1-str"
-
-        selection = matches.next(matches.named("2-str", 0))
-        assert len(selection) == 2
-        assert selection[0].pattern.name == "3-str"
-        assert selection[1].pattern.name == "3-re"
-
-        selection = matches.next(matches.named("2-str", 0), index=0, predicate=lambda m: "re" in m.tags)
-        assert selection is not None
-        assert selection.pattern.name == "3-re"
-
-        selection = matches.next(matches.named("2-str", index=0), lambda m: "re" in m.tags)
-        assert len(selection) == 1
-        assert selection[0].pattern.name == "3-re"
-
-        selection = matches.named("2-str", lambda m: "re" in m.tags)
-        assert len(selection) == 0
-
-        selection = matches.named("2-re", lambda m: "re" in m.tags, 0)
-        assert selection is not None
-        assert selection.name == "2-re"  # pylint:disable=no-member
-
-        selection = matches.named("2-re", lambda m: "re" in m.tags)
-        assert len(selection) == 1
-        assert selection[0].name == "2-re"
-
-        selection = matches.named("2-re", lambda m: "re" in m.tags, index=1000)
-        assert selection is None
-
-    def test_raw(self):
-        input_string = "0123456789"
-
-        match = Match(0, 10, input_string=input_string, formatter=lambda s: s*2)
-
-        assert match.value == match.raw * 2
-        assert match.raw == input_string
-
-        match.raw_end = 9
-        match.raw_start = 1
-
-        assert match.value == match.raw * 2
-        assert match.raw == input_string[1:9]
-
-        match.raw_end = None
-        match.raw_start = None
-
-        assert match.value == match.raw * 2
-        assert match.raw == input_string
-
-
-    def test_formatter_chain(self):
-        input_string = "100"
-
-        match = Match(0, 3, input_string=input_string, formatter=formatters(int, lambda s: s*2, lambda  s: s+10))
-
-        assert match.raw == input_string
-        assert match.value == 100 * 2 + 10
-
-
-    def test_to_dict(self):
-        input_string = "One Two Two Three"
-
-        matches = Matches()
-
-        matches.extend(StringPattern("One", name="1", tags=["One", "str"]).matches(input_string))
-        matches.extend(RePattern("One", name="1", tags=["One", "re"]).matches(input_string))
-        matches.extend(StringPattern("Two", name="2", tags=["Two", "str"]).matches(input_string))
-        matches.extend(RePattern("Two", name="2", tags=["Two", "re"]).matches(input_string))
-        matches.extend(RePattern("Two", name="2", tags=["Two", "reBis"]).matches(input_string))
-        matches.extend(StringPattern("Three", name="3", tags=["Three", "str"]).matches(input_string))
-        matches.extend(RePattern("Three", name="3bis", tags=["Three", "re"]).matches(input_string))
-        matches.extend(RePattern(r"(\w+)", name="words").matches(input_string))
-
-        kvalues = matches.to_dict()
-        assert kvalues == {"1": "One",
-                           "2": "Two",
-                           "3": "Three",
-                           "3bis": "Three",
-                           "words": "One"}
-        assert kvalues.values_list["words"] == ["One", "Two", "Three"]
-
-        kvalues = matches.to_dict(details=True, implicit=True)
-        assert kvalues["1"].value == "One"
-
-        assert len(kvalues["2"]) == 2
-        assert kvalues["2"][0].value == "Two"
-        assert kvalues["2"][1].value == "Two"
-
-        assert kvalues["3"].value == "Three"
-        assert kvalues["3bis"].value == "Three"
-
-        assert len(kvalues["words"]) == 4
-        assert kvalues["words"][0].value == "One"
-        assert kvalues["words"][1].value == "Two"
-        assert kvalues["words"][2].value == "Two"
-        assert kvalues["words"][3].value == "Three"
-
-        kvalues = matches.to_dict(details=True)
-        assert kvalues["1"].value == "One"
-
-        assert len(kvalues.values_list["2"]) == 2
-        assert kvalues.values_list["2"][0].value == "Two"
-        assert kvalues.values_list["2"][1].value == "Two"
-
-        assert kvalues["3"].value == "Three"
-        assert kvalues["3bis"].value == "Three"
-
-        assert len(kvalues.values_list["words"]) == 4
-        assert kvalues.values_list["words"][0].value == "One"
-        assert kvalues.values_list["words"][1].value == "Two"
-        assert kvalues.values_list["words"][2].value == "Two"
-        assert kvalues.values_list["words"][3].value == "Three"
-
-    def test_chains(self):
-        input_string = "wordX 10 20 30 40 wordA, wordB, wordC 70 80 wordX"
-
-        matches = Matches(input_string=input_string)
-
-        matches.extend(RePattern(r"\d+", name="digit").matches(input_string))
-        matches.extend(RePattern("[a-zA-Z]+", name="word").matches(input_string))
-
-        assert len(matches) == 11
-
-        a_start = input_string.find('wordA')
-
-        b_start = input_string.find('wordB')
-        b_end = b_start + len('wordB')
-
-        c_start = input_string.find('wordC')
-        c_end = c_start + len('wordC')
-
-        chain_before = matches.chain_before(b_start, " ,", predicate=lambda match: match.name == "word")
-        assert len(chain_before) == 1
-        assert chain_before[0].value == 'wordA'
-
-        chain_before = matches.chain_before(Match(b_start, b_start), " ,", predicate=lambda match: match.name == "word")
-        assert len(chain_before) == 1
-        assert chain_before[0].value == 'wordA'
-
-        chain_before = matches.chain_before(b_start, " ,", predicate=lambda match: match.name == "digit")
-        assert len(chain_before) == 0
-
-        chain_before = matches.chain_before(a_start, " ,", predicate=lambda match: match.name == "digit")
-        assert len(chain_before) == 4
-        assert [match.value for match in chain_before] == ["40", "30", "20", "10"]
-
-        chain_after = matches.chain_after(b_end, " ,", predicate=lambda match: match.name == "word")
-        assert len(chain_after) == 1
-        assert chain_after[0].value == 'wordC'
-
-        chain_after = matches.chain_after(Match(b_end, b_end), " ,", predicate=lambda match: match.name == "word")
-        assert len(chain_after) == 1
-        assert chain_after[0].value == 'wordC'
-
-        chain_after = matches.chain_after(b_end, " ,", predicate=lambda match: match.name == "digit")
-        assert len(chain_after) == 0
-
-        chain_after = matches.chain_after(c_end, " ,", predicate=lambda match: match.name == "digit")
-        assert len(chain_after) == 2
-        assert [match.value for match in chain_after] == ["70", "80"]
-
-        chain_after = matches.chain_after(c_end, " ,", end=10000, predicate=lambda match: match.name == "digit")
-        assert len(chain_after) == 2
-        assert [match.value for match in chain_after] == ["70", "80"]
-
-    def test_holes(self):
-        input_string = '1'*10+'2'*10+'3'*10+'4'*10+'5'*10+'6'*10+'7'*10
-
-        hole1 = Match(0, 10, input_string=input_string)
-        hole2 = Match(20, 30, input_string=input_string)
-        hole3 = Match(30, 40, input_string=input_string)
-        hole4 = Match(60, 70, input_string=input_string)
-
-        matches = Matches([hole1, hole2], input_string=input_string)
-        matches.append(hole3)
-        matches.append(hole4)
-
-        holes = list(matches.holes())
-        assert len(holes) == 2
-        assert holes[0].span == (10, 20)
-        assert holes[0].value == '2'*10
-        assert holes[1].span == (40, 60)
-        assert holes[1].value == '5' * 10 + '6' * 10
-
-        holes = list(matches.holes(5, 15))
-        assert len(holes) == 1
-        assert holes[0].span == (10, 15)
-        assert holes[0].value == '2'*5
-
-        holes = list(matches.holes(5, 15, formatter=lambda value: "formatted"))
-        assert len(holes) == 1
-        assert holes[0].span == (10, 15)
-        assert holes[0].value == "formatted"
-
-        holes = list(matches.holes(5, 15, predicate=lambda hole: False))
-        assert len(holes) == 0
-
-    def test_holes_empty(self):
-        input_string = "Test hole on empty matches"
-        matches = Matches(input_string=input_string)
-        holes = matches.holes()
-        assert len(holes) == 1
-        assert holes[0].value == input_string
-
-    def test_holes_seps(self):
-        input_string = "Test hole - with many separators + included"
-        match = StringPattern("many").matches(input_string)
-
-        matches = Matches(match, input_string)
-        holes = matches.holes()
-
-        assert len(holes) == 2
-
-        holes = matches.holes(seps="-+")
-
-        assert len(holes) == 4
-        assert [hole.value for hole in holes] == ["Test hole ", " with ", " separators ", " included"]
diff --git a/lib/rebulk/test/test_pattern.py b/lib/rebulk/test/test_pattern.py
deleted file mode 100644
index fadca5f2c3ed0cec152fd249a0a8d8879782be30..0000000000000000000000000000000000000000
--- a/lib/rebulk/test/test_pattern.py
+++ /dev/null
@@ -1,848 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# pylint: disable=no-self-use, pointless-statement, missing-docstring, unbalanced-tuple-unpacking
-
-import re
-import pytest
-
-from ..pattern import StringPattern, RePattern, FunctionalPattern, REGEX_AVAILABLE
-from ..match import Match
-
-class TestStringPattern(object):
-    """
-    Tests for StringPattern matching
-    """
-
-    input_string = "An Abyssinian fly playing a Celtic violin was annoyed by trashy flags on " \
-                   "which were the Hebrew letter qoph."
-
-    def test_single(self):
-        pattern = StringPattern("Celtic")
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-        assert isinstance(matches[0], Match)
-        assert matches[0].pattern == pattern
-        assert matches[0].span == (28, 34)
-        assert matches[0].value == "Celtic"
-
-    def test_repr(self):
-        pattern = StringPattern("Celtic")
-
-        assert repr(pattern) == '<StringPattern:(\'Celtic\',)>'
-
-    def test_ignore_case(self):
-        pattern = StringPattern("celtic", ignore_case=False)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 0
-
-        pattern = StringPattern("celtic", ignore_case=True)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-        assert matches[0].value == "Celtic"
-
-    def test_private_names(self):
-        pattern = StringPattern("celtic", name="test", private_names=["test"], ignore_case=True)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-        assert matches[0].private
-
-    def test_ignore_names(self):
-        pattern = StringPattern("celtic", name="test", ignore_names=["test"], ignore_case=True)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 0
-
-    def test_no_match(self):
-        pattern = StringPattern("Python")
-
-        matches = list(pattern.matches(self.input_string))
-        assert not matches
-
-    def test_multiple_patterns(self):
-        pattern = StringPattern("playing", "annoyed", "Hebrew")
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 3
-
-        assert isinstance(matches[0], Match)
-        assert matches[0].pattern == pattern
-        assert matches[0].span == (18, 25)
-        assert matches[0].value == "playing"
-
-        assert isinstance(matches[1], Match)
-        assert matches[1].pattern == pattern
-        assert matches[1].span == (46, 53)
-        assert matches[1].value == "annoyed"
-
-        assert isinstance(matches[2], Match)
-        assert matches[2].pattern == pattern
-        assert matches[2].span == (88, 94)
-        assert matches[2].value == "Hebrew"
-
-    def test_start_end_kwargs(self):
-        pattern = StringPattern("Abyssinian", start=20, end=40)
-        matches = list(pattern.matches(self.input_string))
-
-        assert len(matches) == 0
-
-    def test_matches_kwargs(self):
-        pattern = StringPattern("Abyssinian", name="test", value="AB")
-        matches = list(pattern.matches(self.input_string))
-
-        assert len(matches) == 1
-        assert matches[0].name == "test"
-        assert matches[0].value == "AB"
-
-
-class TestRePattern(object):
-    """
-    Tests for RePattern matching
-    """
-
-    input_string = "An Abyssinian fly playing a Celtic violin was annoyed by trashy flags on " \
-                   "which were the Hebrew letter qoph."
-
-    def test_single_compiled(self):
-        pattern = RePattern(re.compile("Celt.?c"))
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-        assert isinstance(matches[0], Match)
-        assert matches[0].pattern == pattern
-        assert matches[0].span == (28, 34)
-        assert matches[0].value == "Celtic"
-
-    def test_single_string(self):
-        pattern = RePattern("Celt.?c")
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-        assert isinstance(matches[0], Match)
-        assert matches[0].pattern == pattern
-        assert matches[0].span == (28, 34)
-        assert matches[0].value == "Celtic"
-
-    def test_single_kwargs(self):
-        pattern = RePattern({"pattern": "celt.?c", "flags": re.IGNORECASE})
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-        assert isinstance(matches[0], Match)
-        assert matches[0].pattern == pattern
-        assert matches[0].span == (28, 34)
-        assert matches[0].value == "Celtic"
-
-    def test_single_vargs(self):
-        pattern = RePattern(("celt.?c", re.IGNORECASE))
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-        assert isinstance(matches[0], Match)
-        assert matches[0].pattern == pattern
-        assert matches[0].span == (28, 34)
-        assert matches[0].value == "Celtic"
-
-    def test_no_match(self):
-        pattern = RePattern("abc.?def")
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 0
-
-    def test_shortcuts(self):
-        pattern = RePattern("Celtic-violin", abbreviations=[("-", r"[\W_]+")])
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-
-        pattern = RePattern({"pattern": "celtic-violin", "flags": re.IGNORECASE}, abbreviations=[("-", r"[\W_]+")])
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-
-    def test_multiple_patterns(self):
-        pattern = RePattern("pla.?ing", "ann.?yed", "Heb.?ew")
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 3
-
-        assert isinstance(matches[0], Match)
-        assert matches[0].pattern == pattern
-        assert matches[0].span == (18, 25)
-        assert matches[0].value == "playing"
-
-        assert isinstance(matches[1], Match)
-        assert matches[1].pattern == pattern
-        assert matches[1].span == (46, 53)
-        assert matches[1].value == "annoyed"
-
-        assert isinstance(matches[2], Match)
-        assert matches[2].pattern == pattern
-        assert matches[2].span == (88, 94)
-        assert matches[2].value == "Hebrew"
-
-    def test_unnamed_groups(self):
-        pattern = RePattern(r"(Celt.?c)\s+(\w+)")
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-
-        parent = matches[0]
-
-        assert isinstance(parent, Match)
-        assert parent.pattern == pattern
-        assert parent.span == (28, 41)
-        assert parent.name is None
-        assert parent.value == "Celtic violin"
-
-        assert len(parent.children) == 2
-
-        group1, group2 = parent.children
-
-        assert isinstance(group1, Match)
-        assert group1.pattern == pattern
-        assert group1.span == (28, 34)
-        assert group1.name is None
-        assert group1.value == "Celtic"
-        assert group1.parent == parent
-
-        assert isinstance(group2, Match)
-        assert group2.pattern == pattern
-        assert group2.span == (35, 41)
-        assert group2.name is None
-        assert group2.value == "violin"
-        assert group2.parent == parent
-
-    def test_named_groups(self):
-        pattern = RePattern(r"(?P<param1>Celt.?c)\s+(?P<param2>\w+)")
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-
-        parent = matches[0]
-
-        assert isinstance(parent, Match)
-        assert parent.pattern == pattern
-        assert parent.span == (28, 41)
-        assert parent.name is None
-        assert parent.value == "Celtic violin"
-
-        assert len(parent.children) == 2
-        group1, group2 = parent.children
-
-        assert isinstance(group1, Match)
-        assert group1.pattern == pattern
-        assert group1.span == (28, 34)
-        assert group1.name == "param1"
-        assert group1.value == "Celtic"
-        assert group1.parent == parent
-
-        assert isinstance(group2, Match)
-        assert group2.pattern == pattern
-        assert group2.span == (35, 41)
-        assert group2.name == "param2"
-        assert group2.value == "violin"
-        assert group2.parent == parent
-
-    def test_children(self):
-        pattern = RePattern(r"(?P<param1>Celt.?c)\s+(?P<param2>\w+)", children=True)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 2
-        group1, group2 = matches
-
-        assert isinstance(group1, Match)
-        assert group1.pattern == pattern
-        assert group1.span == (28, 34)
-        assert group1.name == "param1"
-        assert group1.value == "Celtic"
-
-        assert isinstance(group2, Match)
-        assert group2.pattern == pattern
-        assert group2.span == (35, 41)
-        assert group2.name == "param2"
-        assert group2.value == "violin"
-
-    def test_children_parent_private(self):
-        pattern = RePattern(r"(?P<param1>Celt.?c)\s+(?P<param2>\w+)", children=True, private_parent=True)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 3
-        parent, group1, group2 = matches
-
-        assert isinstance(group1, Match)
-        assert parent.private
-        assert parent.pattern == pattern
-        assert parent.span == (28, 41)
-        assert parent.name is None
-        assert parent.value == "Celtic violin"
-
-        assert isinstance(group1, Match)
-        assert not group1.private
-        assert group1.pattern == pattern
-        assert group1.span == (28, 34)
-        assert group1.name == "param1"
-        assert group1.value == "Celtic"
-
-        assert isinstance(group2, Match)
-        assert not group2.private
-        assert group2.pattern == pattern
-        assert group2.span == (35, 41)
-        assert group2.name == "param2"
-        assert group2.value == "violin"
-
-    def test_parent_children_private(self):
-        pattern = RePattern(r"(?P<param1>Celt.?c)\s+(?P<param2>\w+)", private_children=True)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 3
-        parent, group1, group2 = matches
-
-        assert isinstance(group1, Match)
-        assert not parent.private
-        assert parent.pattern == pattern
-        assert parent.span == (28, 41)
-        assert parent.name is None
-        assert parent.value == "Celtic violin"
-
-        assert isinstance(group1, Match)
-        assert group1.private
-        assert group1.pattern == pattern
-        assert group1.span == (28, 34)
-        assert group1.name == "param1"
-        assert group1.value == "Celtic"
-
-        assert isinstance(group2, Match)
-        assert group2.private
-        assert group2.pattern == pattern
-        assert group2.span == (35, 41)
-        assert group2.name == "param2"
-        assert group2.value == "violin"
-
-    def test_every(self):
-        pattern = RePattern(r"(?P<param1>Celt.?c)\s+(?P<param2>\w+)", every=True)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 3
-        parent, group1, group2 = matches
-
-        assert isinstance(group1, Match)
-        assert not parent.private
-        assert parent.pattern == pattern
-        assert parent.span == (28, 41)
-        assert parent.name is None
-        assert parent.value == "Celtic violin"
-
-        assert isinstance(group1, Match)
-        assert not group1.private
-        assert group1.pattern == pattern
-        assert group1.span == (28, 34)
-        assert group1.name == "param1"
-        assert group1.value == "Celtic"
-
-        assert isinstance(group2, Match)
-        assert not group2.private
-        assert group2.pattern == pattern
-        assert group2.span == (35, 41)
-        assert group2.name == "param2"
-        assert group2.value == "violin"
-
-    def test_private_names(self):
-        pattern = RePattern(r"(?P<param1>Celt.?c)\s+(?P<param2>\w+)", private_names=["param2"], children=True)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 2
-        assert matches[0].name == "param1"
-        assert not matches[0].private
-        assert matches[1].name == "param2"
-        assert matches[1].private
-
-    def test_ignore_names(self):
-        pattern = RePattern(r"(?P<param1>Celt.?c)\s+(?P<param2>\w+)", ignore_names=["param2"], children=True)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-        assert matches[0].name == "param1"
-
-    def test_matches_kwargs(self):
-        pattern = RePattern("He.rew", name="test", value="HE")
-        matches = list(pattern.matches(self.input_string))
-
-        assert len(matches) == 1
-        assert matches[0].name == "test"
-        assert matches[0].value == "HE"
-
-        pattern = RePattern("H(e.)(rew)", name="test", value="HE")
-        matches = list(pattern.matches(self.input_string))
-
-        assert len(matches) == 1
-        assert matches[0].name == "test"
-        assert matches[0].value == "HE"
-
-        children = matches[0].children
-        assert len(children) == 2
-        assert children[0].name is "test"
-        assert children[0].value == "HE"
-
-        assert children[1].name is "test"
-        assert children[1].value == "HE"
-
-        pattern = RePattern("H(?P<first>e.)(?P<second>rew)", name="test", value="HE")
-        matches = list(pattern.matches(self.input_string))
-
-        assert len(matches) == 1
-        assert matches[0].name == "test"
-        assert matches[0].value == "HE"
-
-        children = matches[0].children
-        assert len(children) == 2
-        assert children[0].name == "first"
-        assert children[0].value == "HE"
-
-        assert children[1].name == "second"
-        assert children[1].value == "HE"
-
-
-class TestFunctionalPattern(object):
-    """
-    Tests for FunctionalPattern matching
-    """
-
-    input_string = "An Abyssinian fly playing a Celtic violin was annoyed by trashy flags on " \
-                   "which were the Hebrew letter qoph."
-
-    def test_single_vargs(self):
-        def func(input_string):
-            i = input_string.find("fly")
-            if i > -1:
-                return i, i + len("fly"), "fly", "functional"
-
-        pattern = FunctionalPattern(func)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-        assert isinstance(matches[0], Match)
-        assert matches[0].pattern == pattern
-        assert matches[0].span == (14, 17)
-        assert matches[0].name == "functional"
-        assert matches[0].value == "fly"
-
-    def test_single_kwargs(self):
-        def func(input_string):
-            i = input_string.find("fly")
-            if i > -1:
-                return {"start": i, "end": i + len("fly"), "name": "functional"}
-
-        pattern = FunctionalPattern(func)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-        assert isinstance(matches[0], Match)
-        assert matches[0].pattern == pattern
-        assert matches[0].span == (14, 17)
-        assert matches[0].name == "functional"
-        assert matches[0].value == "fly"
-
-    def test_multiple_objects(self):
-        def func(input_string):
-            i = input_string.find("fly")
-            matches = []
-            if i > -1:
-                matches.append((i, i + len("fly"), {'name': "functional"}))
-                i = input_string.find("annoyed")
-            if i > -1:
-                matches.append((i, i + len("annoyed")))
-            i = input_string.find("Hebrew")
-            if i > -1:
-                matches.append({"start": i, "end": i + len("Hebrew")})
-            return matches
-
-        pattern = FunctionalPattern(func)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 3
-        assert isinstance(matches[0], Match)
-        assert matches[0].pattern == pattern
-        assert matches[0].span == (14, 17)
-        assert matches[0].name == "functional"
-        assert matches[0].value == "fly"
-
-        assert isinstance(matches[1], Match)
-        assert matches[1].pattern == pattern
-        assert matches[1].span == (46, 53)
-        assert matches[1].value == "annoyed"
-
-        assert isinstance(matches[2], Match)
-        assert matches[2].pattern == pattern
-        assert matches[2].span == (88, 94)
-        assert matches[2].value == "Hebrew"
-
-    def test_multiple_generator(self):
-        def func(input_string):
-            i = input_string.find("fly")
-            if i > -1:
-                yield (i, i + len("fly"), {'name': "functional"})
-            i = input_string.find("annoyed")
-            if i > -1:
-                yield (i, i + len("annoyed"))
-            i = input_string.find("Hebrew")
-            if i > -1:
-                yield (i, {"end": i + len("Hebrew")})
-
-        pattern = FunctionalPattern(func)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 3
-        assert isinstance(matches[0], Match)
-        assert matches[0].pattern == pattern
-        assert matches[0].span == (14, 17)
-        assert matches[0].name == "functional"
-        assert matches[0].value == "fly"
-
-        assert isinstance(matches[1], Match)
-        assert matches[1].pattern == pattern
-        assert matches[1].span == (46, 53)
-        assert matches[1].value == "annoyed"
-
-        assert isinstance(matches[2], Match)
-        assert matches[2].pattern == pattern
-        assert matches[2].span == (88, 94)
-        assert matches[2].value == "Hebrew"
-
-    def test_no_match(self):
-        pattern = FunctionalPattern(lambda x: None)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 0
-
-    def test_multiple_patterns(self):
-        def playing(input_string):
-            i = input_string.find("playing")
-            if i > -1:
-                return i, i + len("playing")
-
-        def annoyed(input_string):
-            i = input_string.find("annoyed")
-            if i > -1:
-                return i, i + len("annoyed")
-
-        def hebrew(input_string):
-            i = input_string.find("Hebrew")
-            if i > -1:
-                return i, i + len("Hebrew")
-
-        pattern = FunctionalPattern(playing, annoyed, hebrew)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 3
-
-        assert isinstance(matches[0], Match)
-        assert matches[0].pattern == pattern
-        assert matches[0].span == (18, 25)
-        assert matches[0].value == "playing"
-
-        assert isinstance(matches[1], Match)
-        assert matches[1].pattern == pattern
-        assert matches[1].span == (46, 53)
-        assert matches[1].value == "annoyed"
-
-        assert isinstance(matches[2], Match)
-        assert matches[2].pattern == pattern
-        assert matches[2].span == (88, 94)
-        assert matches[2].value == "Hebrew"
-
-    def test_matches_kwargs(self):
-        def playing(input_string):
-            i = input_string.find("playing")
-            if i > -1:
-                return i, i + len("playing")
-
-        pattern = FunctionalPattern(playing, name="test", value="PLAY")
-        matches = list(pattern.matches(self.input_string))
-
-        assert len(matches) == 1
-        assert matches[0].name == "test"
-        assert matches[0].value == "PLAY"
-
-
-class TestValue(object):
-    """
-    Tests for value option
-    """
-
-    input_string = "This string contains 1849 a number"
-
-    def test_str_value(self):
-        pattern = StringPattern("1849", name="dummy", value="test")
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-        assert isinstance(matches[0], Match)
-        assert matches[0].pattern == pattern
-        assert matches[0].span == (21, 25)
-        assert matches[0].value == "test"
-
-    def test_dict_child_value(self):
-        pattern = RePattern(r"(?P<strParam>cont.?ins)\s+(?P<intParam>\d+)",
-                            formatter={'intParam': lambda x: int(x) * 2,
-                                       'strParam': lambda x: "really " + x},
-                            format_all=True,
-                            value={'intParam': 'INT_PARAM_VALUE'})
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-
-        parent = matches[0]
-        assert len(parent.children) == 2
-
-        group1, group2 = parent.children
-
-        assert isinstance(group1, Match)
-        assert group1.pattern == pattern
-        assert group1.span == (12, 20)
-        assert group1.value == "really contains"
-
-        assert isinstance(group2, Match)
-        assert group2.pattern == pattern
-        assert group2.span == (21, 25)
-        assert group2.value == 'INT_PARAM_VALUE'
-
-    def test_dict_default_value(self):
-        pattern = RePattern(r"(?P<strParam>cont.?ins)\s+(?P<intParam>\d+)",
-                            formatter={'intParam': lambda x: int(x) * 2,
-                                       'strParam': lambda x: "really " + x},
-                            format_all=True,
-                            value={'__children__': 'CHILD', 'strParam': 'STR_VALUE', '__parent__': 'PARENT'})
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-
-        parent = matches[0]
-        assert parent.value == "PARENT"
-        assert len(parent.children) == 2
-
-        group1, group2 = parent.children
-
-        assert isinstance(group1, Match)
-        assert group1.pattern == pattern
-        assert group1.span == (12, 20)
-        assert group1.value == "STR_VALUE"
-
-        assert isinstance(group2, Match)
-        assert group2.pattern == pattern
-        assert group2.span == (21, 25)
-        assert group2.value == "CHILD"
-
-
-class TestFormatter(object):
-    """
-    Tests for formatter option
-    """
-
-    input_string = "This string contains 1849 a number"
-
-    def test_single_string(self):
-        pattern = StringPattern("1849", name="dummy", formatter=lambda x: int(x) / 2)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-        assert isinstance(matches[0], Match)
-        assert matches[0].pattern == pattern
-        assert matches[0].span == (21, 25)
-        assert matches[0].value == 1849 / 2
-
-    def test_single_re_no_group(self):
-        pattern = RePattern(r"\d+", formatter=lambda x: int(x) * 2)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-        assert isinstance(matches[0], Match)
-        assert matches[0].pattern == pattern
-        assert matches[0].span == (21, 25)
-        assert matches[0].value == 1849 * 2
-
-    def test_single_re_named_groups(self):
-        pattern = RePattern(r"(?P<strParam>cont.?ins)\s+(?P<intParam>\d+)",
-                            formatter={'intParam': lambda x: int(x) * 2,
-                                       'strParam': lambda x: "really " + x}, format_all=True)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-
-        parent = matches[0]
-        assert len(parent.children) == 2
-
-        group1, group2 = parent.children
-
-        assert isinstance(group1, Match)
-        assert group1.pattern == pattern
-        assert group1.span == (12, 20)
-        assert group1.value == "really contains"
-
-        assert isinstance(group2, Match)
-        assert group2.pattern == pattern
-        assert group2.span == (21, 25)
-        assert group2.value == 1849 * 2
-
-    def test_repeated_captures_option(self):
-        pattern = RePattern(r"\[(\d+)\](?:-(\d+))*")
-
-        matches = list(pattern.matches("[02]-03-04-05-06"))
-        assert len(matches) == 1
-
-        match = matches[0]
-        if REGEX_AVAILABLE:
-            assert len(match.children) == 5
-            assert [child.value for child in match.children] == ["02", "03", "04", "05", "06"]
-        else:
-            assert len(match.children) == 2
-            assert [child.value for child in match.children] == ["02", "06"]
-
-            with pytest.raises(NotImplementedError):
-                RePattern(r"\[(\d+)\](?:-(\d+))*", repeated_captures=True)
-
-        pattern = RePattern(r"\[(\d+)\](?:-(\d+))*", repeated_captures=False)
-
-        matches = list(pattern.matches("[02]-03-04-05-06"))
-        assert len(matches) == 1
-
-        match = matches[0]
-        assert len(match.children) == 2
-        assert [child.value for child in match.children] == ["02", "06"]
-
-    def test_single_functional(self):
-        def digit(input_string):
-            i = input_string.find("1849")
-            if i > -1:
-                return i, i + len("1849")
-
-        pattern = FunctionalPattern(digit, formatter=lambda x: int(x) * 3)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-        assert isinstance(matches[0], Match)
-        assert matches[0].pattern == pattern
-        assert matches[0].span == (21, 25)
-        assert matches[0].value == 1849 * 3
-
-
-class TestValidator(object):
-    """
-    Tests for validator option
-    """
-
-    input_string = "This string contains 1849 a number"
-
-    @staticmethod
-    def true_validator(match):
-        return int(match.value) < 1850
-
-    @staticmethod
-    def false_validator(match):
-        return int(match.value) >= 1850
-
-    def test_single_string(self):
-        pattern = StringPattern("1849", name="dummy", validator=self.false_validator)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 0
-
-        pattern = StringPattern("1849", validator=self.true_validator)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-
-    def test_single_re_no_group(self):
-        pattern = RePattern(r"\d+", validator=self.false_validator)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 0
-
-        pattern = RePattern(r"\d+", validator=self.true_validator)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-
-    def test_single_re_named_groups(self):
-        pattern = RePattern(r"(?P<strParam>cont.?ins)\s+(?P<intParam>\d+)",
-                            validator={'intParam': self.false_validator}, validate_all=True)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 0
-
-        pattern = RePattern(r"(?P<strParam>cont.?ins)\s+(?P<intParam>\d+)",
-                            validator={'intParam': self.true_validator}, validate_all=True)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-
-    def test_validate_all(self):
-        pattern = RePattern(r"contains (?P<intParam>\d+)", formatter=int, validator=lambda match: match.value < 100,
-                            children=True)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 0
-
-        pattern = RePattern(r"contains (?P<intParam>\d+)", formatter=int, validator=lambda match: match.value > 100,
-                            children=True)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-
-        def invalid_func(match):
-            if match.name == 'intParam':
-                return True
-            else:
-                return match.value.startswith('abc')
-
-        pattern = RePattern(r"contains (?P<intParam>\d+)", formatter=int, validator=invalid_func, validate_all=True,
-                            children=True)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 0
-
-        def func(match):
-            if match.name == 'intParam':
-                return True
-            else:
-                return match.value.startswith('contains')
-
-        pattern = RePattern(r"contains (?P<intParam>\d+)", formatter=int, validator=func, validate_all=True,
-                            children=True)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-
-    def test_format_all(self):
-        pattern = RePattern(r"contains (?P<intParam>\d+)", formatter=int,
-                            children=True)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
-        for match in matches:
-            assert match.value is not None
-
-        with pytest.raises(ValueError):
-            pattern = RePattern(r"contains (?P<intParam>\d+)", formatter=int, format_all=True)
-            matches = list(pattern.matches(self.input_string))
-            for match in matches:
-                assert match.value is not None
-
-    def test_single_functional(self):
-        def digit(input_string):
-            i = input_string.find("1849")
-            if i > -1:
-                return i, i + len("1849")
-
-        pattern = FunctionalPattern(digit, validator=self.false_validator)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 0
-
-        pattern = FunctionalPattern(digit, validator=self.true_validator)
-
-        matches = list(pattern.matches(self.input_string))
-        assert len(matches) == 1
diff --git a/lib/rebulk/test/test_processors.py b/lib/rebulk/test/test_processors.py
deleted file mode 100644
index 7afd45352557d0b9d11e5035142efcbd994a39e8..0000000000000000000000000000000000000000
--- a/lib/rebulk/test/test_processors.py
+++ /dev/null
@@ -1,215 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# pylint: disable=no-self-use, pointless-statement, missing-docstring, no-member
-
-from ..pattern import StringPattern, RePattern
-from ..processors import ConflictSolver
-from ..rules import execute_rule
-from ..match import Matches
-
-
-def test_conflict_1():
-    input_string = "abcdefghijklmnopqrstuvwxyz"
-
-    pattern = StringPattern("ijklmn", "kl", "abcdef", "ab", "ef", "yz")
-    matches = Matches(pattern.matches(input_string))
-
-    execute_rule(ConflictSolver(), matches, None)
-
-    values = [x.value for x in matches]
-
-    assert values == ["ijklmn", "abcdef", "yz"]
-
-
-def test_conflict_2():
-    input_string = "abcdefghijklmnopqrstuvwxyz"
-
-    pattern = StringPattern("ijklmn", "jklmnopqrst")
-    matches = Matches(pattern.matches(input_string))
-
-    execute_rule(ConflictSolver(), matches, None)
-
-    values = [x.value for x in matches]
-
-    assert values == ["jklmnopqrst"]
-
-
-def test_conflict_3():
-    input_string = "abcdefghijklmnopqrstuvwxyz"
-
-    pattern = StringPattern("ijklmnopqrst", "jklmnopqrst")
-    matches = Matches(pattern.matches(input_string))
-
-    execute_rule(ConflictSolver(), matches, None)
-
-    values = [x.value for x in matches]
-
-    assert values == ["ijklmnopqrst"]
-
-
-def test_conflict_4():
-    input_string = "123456789"
-
-    pattern = StringPattern("123", "456789")
-    matches = Matches(pattern.matches(input_string))
-
-    execute_rule(ConflictSolver(), matches, None)
-
-    values = [x.value for x in matches]
-    assert values == ["123", "456789"]
-
-
-def test_conflict_5():
-    input_string = "123456789"
-
-    pattern = StringPattern("123456", "789")
-    matches = Matches(pattern.matches(input_string))
-
-    execute_rule(ConflictSolver(), matches, None)
-
-    values = [x.value for x in matches]
-    assert values == ["123456", "789"]
-
-
-def test_prefer_longer_parent():
-    input_string = "xxx.1x02.xxx"
-
-    re1 = RePattern("([0-9]+)x([0-9]+)", name='prefer', children=True, formatter=int)
-    re2 = RePattern("x([0-9]+)", name='skip', children=True)
-
-    matches = Matches(re1.matches(input_string))
-    matches.extend(re2.matches(input_string))
-
-    execute_rule(ConflictSolver(), matches, None)
-    assert len(matches) == 2
-    assert matches[0].value == 1
-    assert matches[1].value == 2
-
-
-def test_conflict_solver_1():
-    input_string = "123456789"
-
-    re1 = StringPattern("2345678", conflict_solver=lambda match, conflicting: '__default__')
-    re2 = StringPattern("34567")
-
-    matches = Matches(re1.matches(input_string))
-    matches.extend(re2.matches(input_string))
-
-    execute_rule(ConflictSolver(), matches, None)
-    assert len(matches) == 1
-    assert matches[0].value == "2345678"
-
-
-def test_conflict_solver_2():
-    input_string = "123456789"
-
-    re1 = StringPattern("2345678", conflict_solver=lambda match, conflicting: '__default__')
-    re2 = StringPattern("34567", conflict_solver=lambda match, conflicting: conflicting)
-
-    matches = Matches(re1.matches(input_string))
-    matches.extend(re2.matches(input_string))
-
-    execute_rule(ConflictSolver(), matches, None)
-    assert len(matches) == 1
-    assert matches[0].value == "34567"
-
-
-def test_conflict_solver_3():
-    input_string = "123456789"
-
-    re1 = StringPattern("2345678", conflict_solver=lambda match, conflicting: match)
-    re2 = StringPattern("34567")
-
-    matches = Matches(re1.matches(input_string))
-    matches.extend(re2.matches(input_string))
-
-    execute_rule(ConflictSolver(), matches, None)
-    assert len(matches) == 1
-    assert matches[0].value == "34567"
-
-
-def test_conflict_solver_4():
-    input_string = "123456789"
-
-    re1 = StringPattern("2345678")
-    re2 = StringPattern("34567", conflict_solver=lambda match, conflicting: conflicting)
-
-    matches = Matches(re1.matches(input_string))
-    matches.extend(re2.matches(input_string))
-
-    execute_rule(ConflictSolver(), matches, None)
-    assert len(matches) == 1
-    assert matches[0].value == "34567"
-
-
-def test_conflict_solver_5():
-    input_string = "123456789"
-
-    re1 = StringPattern("2345678", conflict_solver=lambda match, conflicting: conflicting)
-    re2 = StringPattern("34567")
-
-    matches = Matches(re1.matches(input_string))
-    matches.extend(re2.matches(input_string))
-
-    execute_rule(ConflictSolver(), matches, None)
-    assert len(matches) == 1
-    assert matches[0].value == "2345678"
-
-
-def test_conflict_solver_6():
-    input_string = "123456789"
-
-    re1 = StringPattern("2345678")
-    re2 = StringPattern("34567", conflict_solver=lambda match, conflicting: conflicting)
-
-    matches = Matches(re1.matches(input_string))
-    matches.extend(re2.matches(input_string))
-
-    execute_rule(ConflictSolver(), matches, None)
-    assert len(matches) == 1
-    assert matches[0].value == "34567"
-
-
-def test_conflict_solver_7():
-    input_string = "102"
-
-    re1 = StringPattern("102")
-    re2 = StringPattern("02")
-
-    matches = Matches(re2.matches(input_string))
-    matches.extend(re1.matches(input_string))
-
-    execute_rule(ConflictSolver(), matches, None)
-    assert len(matches) == 1
-    assert matches[0].value == "102"
-
-
-def test_unresolved():
-    input_string = "123456789"
-
-    re1 = StringPattern("23456")
-    re2 = StringPattern("34567")
-
-    matches = Matches(re1.matches(input_string))
-    matches.extend(re2.matches(input_string))
-
-    execute_rule(ConflictSolver(), matches, None)
-    assert len(matches) == 2
-
-    re1 = StringPattern("34567")
-    re2 = StringPattern("2345678", conflict_solver=lambda match, conflicting: None)
-
-    matches = Matches(re1.matches(input_string))
-    matches.extend(re2.matches(input_string))
-
-    execute_rule(ConflictSolver(), matches, None)
-    assert len(matches) == 2
-
-    re1 = StringPattern("34567", conflict_solver=lambda match, conflicting: None)
-    re2 = StringPattern("2345678")
-
-    matches = Matches(re1.matches(input_string))
-    matches.extend(re2.matches(input_string))
-
-    execute_rule(ConflictSolver(), matches, None)
-    assert len(matches) == 2
diff --git a/lib/rebulk/test/test_rebulk.py b/lib/rebulk/test/test_rebulk.py
deleted file mode 100644
index bf0bc9669415cd5a92d2d26a20e986ecfe7d82c0..0000000000000000000000000000000000000000
--- a/lib/rebulk/test/test_rebulk.py
+++ /dev/null
@@ -1,419 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# pylint: disable=no-self-use, pointless-statement, missing-docstring, no-member
-
-from ..rebulk import Rebulk
-from ..rules import Rule
-from . import rebulk_rules_module as rm
-
-
-def test_rebulk_simple():
-    rebulk = Rebulk()
-
-    rebulk.string("quick")
-    rebulk.regex("f.x")
-
-    def func(input_string):
-        i = input_string.find("over")
-        if i > -1:
-            return i, i + len("over")
-
-    rebulk.functional(func)
-
-    input_string = "The quick brown fox jumps over the lazy dog"
-
-    matches = rebulk.matches(input_string)
-    assert len(matches) == 3
-
-    assert matches[0].value == "quick"
-    assert matches[1].value == "fox"
-    assert matches[2].value == "over"
-
-
-def test_rebulk_composition():
-    rebulk = Rebulk()
-
-    rebulk.string("quick")
-    rebulk.rebulk(Rebulk().regex("f.x"))
-
-    rebulk.rebulk(Rebulk(disabled=lambda context: True).functional(lambda string: None))
-
-    input_string = "The quick brown fox jumps over the lazy dog"
-
-    matches = rebulk.matches(input_string)
-    assert len(matches) == 2
-
-    assert matches[0].value == "quick"
-    assert matches[1].value == "fox"
-
-
-def test_rebulk_context():
-    rebulk = Rebulk()
-
-    context = {'nostring': True, 'word': 'lazy'}
-
-    rebulk.string("quick", disabled=lambda context: context.get('nostring', False))
-    rebulk.regex("f.x", disabled=lambda context: context.get('noregex', False))
-
-    def func(input_string, context):
-        word = context.get('word', 'over')
-        i = input_string.find(word)
-        if i > -1:
-            return i, i + len(word)
-
-    rebulk.functional(func)
-
-    input_string = "The quick brown fox jumps over the lazy dog"
-
-    matches = rebulk.matches(input_string, context)
-    assert len(matches) == 2
-
-    assert matches[0].value == "fox"
-    assert matches[1].value == "lazy"
-
-
-def test_rebulk_prefer_longer():
-    input_string = "The quick brown fox jumps over the lazy dog"
-
-    matches = Rebulk().string("quick").string("own").regex("br.{2}n").matches(input_string)
-
-    assert len(matches) == 2
-
-    assert matches[0].value == "quick"
-    assert matches[1].value == "brown"
-
-
-def test_rebulk_defaults():
-    input_string = "The quick brown fox jumps over the lazy dog"
-
-    def func(input_string):
-        i = input_string.find("fox")
-        if i > -1:
-            return i, i + len("fox")
-
-    matches = Rebulk()\
-        .string_defaults(name="string", tags=["a", "b"])\
-        .regex_defaults(name="regex") \
-        .functional_defaults(name="functional") \
-        .string("quick", tags=["c"])\
-        .functional(func)\
-        .regex("br.{2}n") \
-        .matches(input_string)
-    assert matches[0].name == "string"
-    assert matches[0].tags == ["a", "b", "c"]
-    assert matches[1].name == "functional"
-    assert matches[2].name == "regex"
-
-    matches = Rebulk() \
-        .defaults(name="default", tags=["0"])\
-        .string_defaults(name="string", tags=["a", "b"]) \
-        .functional_defaults(name="functional", tags=["1"]) \
-        .string("quick", tags=["c"]) \
-        .functional(func) \
-        .regex("br.{2}n") \
-        .matches(input_string)
-    assert matches[0].name == "string"
-    assert matches[0].tags == ["0", "a", "b", "c"]
-    assert matches[1].name == "functional"
-    assert matches[1].tags == ["0", "1"]
-    assert matches[2].name == "default"
-    assert matches[2].tags == ["0"]
-
-
-def test_rebulk_rebulk():
-    input_string = "The quick brown fox jumps over the lazy dog"
-
-    base = Rebulk().string("quick")
-    child = Rebulk().string("own").regex("br.{2}n")
-
-    matches = base.rebulk(child).matches(input_string)
-
-    assert len(matches) == 2
-
-    assert matches[0].value == "quick"
-    assert matches[1].value == "brown"
-
-
-def test_rebulk_no_default():
-    input_string = "The quick brown fox jumps over the lazy dog"
-
-    matches = Rebulk(default_rules=False).string("quick").string("own").regex("br.{2}n").matches(input_string)
-
-    assert len(matches) == 3
-
-    assert matches[0].value == "quick"
-    assert matches[1].value == "own"
-    assert matches[2].value == "brown"
-
-
-def test_rebulk_empty_match():
-    input_string = "The quick brown fox jumps over the lazy dog"
-
-    matches = Rebulk(default_rules=False).string("quick").string("own").regex("br(.*?)own", children=True)\
-        .matches(input_string)
-
-    assert len(matches) == 2
-
-    assert matches[0].value == "quick"
-    assert matches[1].value == "own"
-
-
-def test_rebulk_tags_names():
-    rebulk = Rebulk()
-
-    rebulk.string("quick", name="str", tags=["first", "other"])
-    rebulk.regex("f.x", tags="other")
-
-    def func(input_string):
-        i = input_string.find("over")
-        if i > -1:
-            return i, i + len("over"), {'tags': ['custom']}
-
-    rebulk.functional(func, name="fn")
-
-    def func2(input_string):
-        i = input_string.find("lazy")
-        if i > -1:
-            return {'start': i, 'end': i + len("lazy"), 'tags': ['custom']}
-
-    rebulk.functional(func2, name="fn")
-
-    input_string = "The quick brown fox jumps over the lazy dog"
-
-    matches = rebulk.matches(input_string)
-    assert len(matches) == 4
-
-    assert len(matches.named("str")) == 1
-    assert len(matches.named("fn")) == 2
-    assert len(matches.named("false")) == 0
-    assert len(matches.tagged("false")) == 0
-    assert len(matches.tagged("first")) == 1
-    assert len(matches.tagged("other")) == 2
-    assert len(matches.tagged("custom")) == 2
-
-
-def test_rebulk_rules_1():
-    rebulk = Rebulk()
-
-    rebulk.regex(r'\d{4}', name="year")
-    rebulk.rules(rm.RemoveAllButLastYear)
-
-    matches = rebulk.matches("1984 keep only last 1968 entry 1982 case")
-    assert len(matches) == 1
-    assert matches[0].value == "1982"
-
-
-def test_rebulk_rules_2():
-    rebulk = Rebulk()
-
-    rebulk.regex(r'\d{4}', name="year")
-    rebulk.string(r'year', name="yearPrefix", private=True)
-    rebulk.string(r'keep', name="yearSuffix", private=True)
-    rebulk.rules(rm.PrefixedSuffixedYear)
-
-    matches = rebulk.matches("Keep suffix 1984 keep prefixed year 1968 and remove the rest 1982")
-    assert len(matches) == 2
-    assert matches[0].value == "1984"
-    assert matches[1].value == "1968"
-
-
-def test_rebulk_rules_3():
-    rebulk = Rebulk()
-
-    rebulk.regex(r'\d{4}', name="year")
-    rebulk.string(r'year', name="yearPrefix", private=True)
-    rebulk.string(r'keep', name="yearSuffix", private=True)
-    rebulk.rules(rm.PrefixedSuffixedYearNoLambda)
-
-    matches = rebulk.matches("Keep suffix 1984 keep prefixed year 1968 and remove the rest 1982")
-    assert len(matches) == 2
-    assert matches[0].value == "1984"
-    assert matches[1].value == "1968"
-
-
-def test_rebulk_rules_4():
-    class FirstOnlyRule(Rule):
-        def when(self, matches, context):
-            grabbed = matches.named("grabbed", 0)
-            if grabbed and matches.previous(grabbed):
-                return grabbed
-
-        def then(self, matches, when_response, context):
-            matches.remove(when_response)
-
-    rebulk = Rebulk()
-
-    rebulk.regex("This match (.*?)grabbed", name="grabbed")
-    rebulk.regex("if it's (.*?)first match", private=True)
-
-    rebulk.rules(FirstOnlyRule)
-
-    matches = rebulk.matches("This match is grabbed only if it's the first match")
-    assert len(matches) == 1
-    assert matches[0].value == "This match is grabbed"
-
-    matches = rebulk.matches("if it's NOT the first match, This match is NOT grabbed")
-    assert len(matches) == 0
-
-
-class TestMarkers(object):
-    def test_one_marker(self):
-        class MarkerRule(Rule):
-            def when(self, matches, context):
-                word_match = matches.named("word", 0)
-                marker = matches.markers.at_match(word_match, lambda marker: marker.name == "mark1", 0)
-                if not marker:
-                    return word_match
-
-            def then(self, matches, when_response, context):
-                matches.remove(when_response)
-
-        rebulk = Rebulk().regex(r'\(.*?\)', marker=True, name="mark1") \
-            .regex(r'\[.*?\]', marker=True, name="mark2") \
-            .string("word", name="word") \
-            .rules(MarkerRule)
-
-        matches = rebulk.matches("grab (word) only if it's in parenthesis")
-
-        assert len(matches) == 1
-        assert matches[0].value == "word"
-
-        matches = rebulk.matches("don't grab [word] if it's in braket")
-        assert len(matches) == 0
-
-        matches = rebulk.matches("don't grab word at all")
-        assert len(matches) == 0
-
-    def test_multiple_marker(self):
-        class MarkerRule(Rule):
-            def when(self, matches, context):
-                word_match = matches.named("word", 0)
-                marker = matches.markers.at_match(word_match,
-                                                  lambda marker: marker.name == "mark1" or marker.name == "mark2")
-                if len(marker) < 2:
-                    return word_match
-
-            def then(self, matches, when_response, context):
-                matches.remove(when_response)
-
-        rebulk = Rebulk().regex(r'\(.*?\)', marker=True, name="mark1") \
-            .regex(r'\[.*?\]', marker=True, name="mark2") \
-            .regex("w.*?d", name="word") \
-            .rules(MarkerRule)
-
-        matches = rebulk.matches("[grab (word) only] if it's in parenthesis and brakets")
-
-        assert len(matches) == 1
-        assert matches[0].value == "word"
-
-        matches = rebulk.matches("[don't grab](word)[if brakets are outside]")
-        assert len(matches) == 0
-
-        matches = rebulk.matches("(grab w[or)d even] if it's partially in parenthesis and brakets")
-        assert len(matches) == 1
-        assert matches[0].value == "w[or)d"
-
-    def test_at_index_marker(self):
-        class MarkerRule(Rule):
-            def when(self, matches, context):
-                word_match = matches.named("word", 0)
-                marker = matches.markers.at_index(word_match.start,
-                                                  lambda marker: marker.name == "mark1", 0)
-                if not marker:
-                    return word_match
-
-            def then(self, matches, when_response, context):
-                matches.remove(when_response)
-
-        rebulk = Rebulk().regex(r'\(.*?\)', marker=True, name="mark1") \
-            .regex("w.*?d", name="word") \
-            .rules(MarkerRule)
-
-        matches = rebulk.matches("gr(ab wo)rd only if starting of match is inside parenthesis")
-
-        assert len(matches) == 1
-        assert matches[0].value == "wo)rd"
-
-        matches = rebulk.matches("don't grab wo(rd if starting of match is not inside parenthesis")
-
-        assert len(matches) == 0
-
-    def test_remove_marker(self):
-        class MarkerRule(Rule):
-            def when(self, matches, context):
-                marker = matches.markers.named("mark1", 0)
-                if marker:
-                    return marker
-
-            def then(self, matches, when_response, context):
-                matches.markers.remove(when_response)
-
-        rebulk = Rebulk().regex(r'\(.*?\)', marker=True, name="mark1") \
-            .regex("w.*?d", name="word") \
-            .rules(MarkerRule)
-
-        matches = rebulk.matches("grab word event (if it's not) inside parenthesis")
-
-        assert len(matches) == 1
-        assert matches[0].value == "word"
-
-        assert not matches.markers
-
-
-class TestUnicode(object):
-    def test_rebulk_simple(self):
-        input_string = u"敏捷的棕色狐狸跳過懶狗"
-
-        rebulk = Rebulk()
-
-        rebulk.string(u"敏")
-        rebulk.regex(u"捷")
-
-        def func(input_string):
-            i = input_string.find(u"的")
-            if i > -1:
-                return i, i + len(u"的")
-
-        rebulk.functional(func)
-
-        matches = rebulk.matches(input_string)
-        assert len(matches) == 3
-
-        assert matches[0].value == u"敏"
-        assert matches[1].value == u"捷"
-        assert matches[2].value == u"的"
-
-
-class TestImmutable(object):
-    def test_starting(self):
-        input_string = "The quick brown fox jumps over the lazy dog"
-        matches = Rebulk().string("quick").string("over").string("fox").matches(input_string)
-
-        for i in range(0, len(input_string)):
-            starting = matches.starting(i)
-            for match in list(starting):
-                starting.remove(match)
-
-        assert len(matches) == 3
-
-    def test_ending(self):
-        input_string = "The quick brown fox jumps over the lazy dog"
-        matches = Rebulk().string("quick").string("over").string("fox").matches(input_string)
-
-        for i in range(0, len(input_string)):
-            starting = matches.ending(i)
-            for match in list(starting):
-                starting.remove(match)
-
-        assert len(matches) == 3
-
-    def test_named(self):
-        input_string = "The quick brown fox jumps over the lazy dog"
-        matches = Rebulk().defaults(name='test').string("quick").string("over").string("fox").matches(input_string)
-
-        named = matches.named('test')
-        for match in list(named):
-            named.remove(match)
-
-        assert len(named) == 0
-        assert len(matches) == 3
diff --git a/lib/rebulk/test/test_rules.py b/lib/rebulk/test/test_rules.py
deleted file mode 100644
index 47b6f5fcacf8948d6816bf04a201f08bf763299c..0000000000000000000000000000000000000000
--- a/lib/rebulk/test/test_rules.py
+++ /dev/null
@@ -1,197 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# pylint: disable=no-self-use, pointless-statement, missing-docstring, invalid-name, no-member
-import pytest
-from rebulk.test.default_rules_module import RuleRemove0, RuleAppend0, RuleRename0, RuleAppend1, RuleRemove1, \
-    RuleRename1, RuleAppend2, RuleRename2, RuleAppend3, RuleRename3, RuleAppendTags0, RuleRemoveTags0, \
-    RuleAppendTags1, RuleRemoveTags1
-
-from ..rules import Rules
-from ..match import Matches, Match
-
-from .rules_module import Rule1, Rule2, Rule3, Rule0, Rule1Disabled
-from . import rules_module as rm
-
-
-def test_rule_priority():
-    matches = Matches([Match(1, 2)])
-
-    rules = Rules(Rule1, Rule2())
-
-    rules.execute_all_rules(matches, {})
-    assert len(matches) == 0
-    matches = Matches([Match(1, 2)])
-
-    rules = Rules(Rule1(), Rule0)
-
-    rules.execute_all_rules(matches, {})
-    assert len(matches) == 1
-    assert matches[0] == Match(3, 4)
-
-
-def test_rules_duplicates():
-    matches = Matches([Match(1, 2)])
-
-    rules = Rules(Rule1, Rule1)
-
-    with pytest.raises(ValueError):
-        rules.execute_all_rules(matches, {})
-
-
-def test_rule_disabled():
-    matches = Matches([Match(1, 2)])
-
-    rules = Rules(Rule1Disabled(), Rule2())
-
-    rules.execute_all_rules(matches, {})
-    assert len(matches) == 2
-    assert matches[0] == Match(1, 2)
-    assert matches[1] == Match(3, 4)
-
-
-def test_rule_when():
-    matches = Matches([Match(1, 2)])
-
-    rules = Rules(Rule3())
-
-    rules.execute_all_rules(matches, {'when': False})
-    assert len(matches) == 1
-    assert matches[0] == Match(1, 2)
-
-    matches = Matches([Match(1, 2)])
-
-    rules.execute_all_rules(matches, {'when': True})
-    assert len(matches) == 2
-    assert matches[0] == Match(1, 2)
-    assert matches[1] == Match(3, 4)
-
-
-class TestDefaultRules(object):
-    def test_remove(self):
-        rules = Rules(RuleRemove0)
-
-        matches = Matches([Match(1, 2)])
-        rules.execute_all_rules(matches, {})
-
-        assert len(matches) == 0
-
-        rules = Rules(RuleRemove1)
-
-        matches = Matches([Match(1, 2)])
-        rules.execute_all_rules(matches, {})
-
-        assert len(matches) == 0
-
-    def test_append(self):
-        rules = Rules(RuleAppend0)
-
-        matches = Matches([Match(1, 2)])
-        rules.execute_all_rules(matches, {})
-
-        assert len(matches) == 2
-
-        rules = Rules(RuleAppend1)
-
-        matches = Matches([Match(1, 2)])
-        rules.execute_all_rules(matches, {})
-
-        assert len(matches) == 2
-
-        rules = Rules(RuleAppend2)
-
-        matches = Matches([Match(1, 2)])
-        rules.execute_all_rules(matches, {})
-
-        assert len(matches) == 2
-        assert len(matches.named('renamed')) == 1
-
-        rules = Rules(RuleAppend3)
-
-        matches = Matches([Match(1, 2)])
-        rules.execute_all_rules(matches, {})
-
-        assert len(matches) == 2
-        assert len(matches.named('renamed')) == 1
-
-    def test_rename(self):
-        rules = Rules(RuleRename0)
-
-        matches = Matches([Match(1, 2, name='original')])
-        rules.execute_all_rules(matches, {})
-
-        assert len(matches.named('original')) == 1
-        assert len(matches.named('renamed')) == 0
-
-        rules = Rules(RuleRename1)
-
-        matches = Matches([Match(5, 10, name='original')])
-        rules.execute_all_rules(matches, {})
-
-        assert len(matches.named('original')) == 0
-        assert len(matches.named('renamed')) == 1
-
-        rules = Rules(RuleRename2)
-
-        matches = Matches([Match(5, 10, name='original')])
-        rules.execute_all_rules(matches, {})
-
-        assert len(matches.named('original')) == 0
-        assert len(matches.named('renamed')) == 1
-
-        rules = Rules(RuleRename3)
-
-        matches = Matches([Match(5, 10, name='original')])
-        rules.execute_all_rules(matches, {})
-
-        assert len(matches.named('original')) == 0
-        assert len(matches.named('renamed')) == 1
-
-    def test_append_tags(self):
-        rules = Rules(RuleAppendTags0)
-
-        matches = Matches([Match(1, 2, name='tags', tags=['other'])])
-        rules.execute_all_rules(matches, {})
-
-        assert len(matches.named('tags')) == 1
-        assert matches.named('tags', index=0).tags == ['other', 'new-tag']
-
-        rules = Rules(RuleAppendTags1)
-
-        matches = Matches([Match(1, 2, name='tags', tags=['other'])])
-        rules.execute_all_rules(matches, {})
-
-        assert len(matches.named('tags')) == 1
-        assert matches.named('tags', index=0).tags == ['other', 'new-tag']
-
-    def test_remove_tags(self):
-        rules = Rules(RuleRemoveTags0)
-
-        matches = Matches([Match(1, 2, name='tags', tags=['other', 'new-tag'])])
-        rules.execute_all_rules(matches, {})
-
-        assert len(matches.named('tags')) == 1
-        assert matches.named('tags', index=0).tags == ['other']
-
-        rules = Rules(RuleRemoveTags1)
-
-        matches = Matches([Match(1, 2, name='tags', tags=['other', 'new-tag'])])
-        rules.execute_all_rules(matches, {})
-
-        assert len(matches.named('tags')) == 1
-        assert matches.named('tags', index=0).tags == ['other']
-
-
-def test_rule_module():
-    rules = Rules(rm)
-
-    matches = Matches([Match(1, 2)])
-    rules.execute_all_rules(matches, {})
-
-    assert len(matches) == 1
-
-
-def test_rule_repr():
-    assert str(Rule0()) == "<Rule0>"
-    assert str(Rule1()) == "<Rule1>"
-    assert str(Rule2()) == "<Rule2>"
-    assert str(Rule1Disabled()) == "<Disabled Rule1>"
diff --git a/lib/rebulk/test/test_toposort.py b/lib/rebulk/test/test_toposort.py
deleted file mode 100644
index 76ea60313bc4fcfd9b79dd94c8dd867c079aacb3..0000000000000000000000000000000000000000
--- a/lib/rebulk/test/test_toposort.py
+++ /dev/null
@@ -1,111 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# Copyright 2014 True Blade Systems, Inc.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Original:
-#   - https://bitbucket.org/ericvsmith/toposort (1.4)
-# Modifications:
-#   - port to pytest
-# pylint: skip-file
-
-import pytest
-from ..toposort import toposort, toposort_flatten, CyclicDependency
-
-
-class TestCase(object):
-    def test_simple(self):
-        results = list(toposort({2: set([11]), 9: set([11, 8]), 10: set([11, 3]), 11: set([7, 5]), 8: set([7, 3])}))
-        expected = [set([3, 5, 7]), set([8, 11]), set([2, 9, 10])]
-        assert results == expected
-
-        # make sure self dependencies are ignored
-        results = list(toposort({2: set([2, 11]), 9: set([11, 8]), 10: set([10, 11, 3]), 11: set([7, 5]), 8: set([7, 3])}))
-        expected = [set([3, 5, 7]), set([8, 11]), set([2, 9, 10])]
-        assert results == expected
-
-        assert list(toposort({1: set()})) == [set([1])]
-        assert list(toposort({1: set([1])})) == [set([1])]
-
-    def test_no_dependencies(self):
-        assert list(toposort({1: set([2]), 3: set([4]), 5: set([6])})) == [set([2, 4, 6]), set([1, 3, 5])]
-        assert list(toposort({1: set(), 3: set(), 5: set()})) == [set([1, 3, 5])]
-
-    def test_empty(self):
-        assert list(toposort({})) == []
-
-    def test_strings(self):
-        results = list(toposort({'2': set(['11']), '9': set(['11', '8']), '10': set(['11', '3']), '11': set(['7', '5']), '8': set(['7', '3'])}))
-        expected = [set(['3', '5', '7']), set(['8', '11']), set(['2', '9', '10'])]
-        assert results == expected
-
-    def test_objects(self):
-        o2 = object()
-        o3 = object()
-        o5 = object()
-        o7 = object()
-        o8 = object()
-        o9 = object()
-        o10 = object()
-        o11 = object()
-        results = list(toposort({o2: set([o11]), o9: set([o11, o8]), o10: set([o11, o3]), o11: set([o7, o5]), o8: set([o7, o3, o8])}))
-        expected = [set([o3, o5, o7]), set([o8, o11]), set([o2, o9, o10])]
-        assert results == expected
-
-    def test_cycle(self):
-        # a simple, 2 element cycle
-        with pytest.raises(CyclicDependency):
-            list(toposort({1: set([2]), 2: set([1])}))
-
-        # an indirect cycle
-        with pytest.raises(CyclicDependency):
-            list(toposort({1: set([2]), 2: set([3]), 3: set([1])}))
-
-    def test_input_not_modified(self):
-        data = {2: set([11]),
-                9: set([11, 8]),
-                10: set([11, 3]),
-                11: set([7, 5]),
-                8: set([7, 3, 8]),  # includes something self-referential
-                }
-        orig = data.copy()
-        results = list(toposort(data))
-        assert data == orig
-
-    def test_input_not_modified_when_cycle_error(self):
-        data = {1: set([2]),
-                2: set([1]),
-                3: set([4]),
-                }
-        orig = data.copy()
-        with pytest.raises(CyclicDependency):
-            list(toposort(data))
-        assert data == orig
-
-
-class TestCaseAll(object):
-    def test_sort_flatten(self):
-        data = {2: set([11]),
-                9: set([11, 8]),
-                10: set([11, 3]),
-                11: set([7, 5]),
-                8: set([7, 3, 8]),  # includes something self-referential
-                }
-        expected = [set([3, 5, 7]), set([8, 11]), set([2, 9, 10])]
-        assert list(toposort(data)) == expected
-
-        # now check the sorted results
-        results = []
-        for item in expected:
-            results.extend(sorted(item))
-        assert toposort_flatten(data) == results
-
-        # and the unsorted results. break the results up into groups to compare them
-        actual = toposort_flatten(data, False)
-        results = [set([i for i in actual[0:3]]), set([i for i in actual[3:5]]), set([i for i in actual[5:8]])]
-        assert results == expected
diff --git a/lib/rebulk/test/test_validators.py b/lib/rebulk/test/test_validators.py
deleted file mode 100644
index 38511cbffb609133aebfa17ac05668a47f1bec09..0000000000000000000000000000000000000000
--- a/lib/rebulk/test/test_validators.py
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# pylint: disable=no-self-use, pointless-statement, missing-docstring, invalid-name
-
-from functools import partial
-
-from rebulk.pattern import StringPattern
-
-from ..validators import chars_before, chars_after, chars_surround, validators
-
-chars = ' _.'
-left = partial(chars_before, chars)
-right = partial(chars_after, chars)
-surrounding = partial(chars_surround, chars)
-
-
-def test_left_chars():
-    matches = list(StringPattern("word", validator=left).matches("xxxwordxxx"))
-    assert len(matches) == 0
-
-    matches = list(StringPattern("word", validator=left).matches("xxx_wordxxx"))
-    assert len(matches) == 1
-
-    matches = list(StringPattern("word", validator=left).matches("wordxxx"))
-    assert len(matches) == 1
-
-
-def test_right_chars():
-    matches = list(StringPattern("word", validator=right).matches("xxxwordxxx"))
-    assert len(matches) == 0
-
-    matches = list(StringPattern("word", validator=right).matches("xxxword.xxx"))
-    assert len(matches) == 1
-
-    matches = list(StringPattern("word", validator=right).matches("xxxword"))
-    assert len(matches) == 1
-
-
-def test_surrounding_chars():
-    matches = list(StringPattern("word", validator=surrounding).matches("xxxword xxx"))
-    assert len(matches) == 0
-
-    matches = list(StringPattern("word", validator=surrounding).matches("xxx.wordxxx"))
-    assert len(matches) == 0
-
-    matches = list(StringPattern("word", validator=surrounding).matches("xxx word_xxx"))
-    assert len(matches) == 1
-
-    matches = list(StringPattern("word", validator=surrounding).matches("word"))
-    assert len(matches) == 1
-
-
-def test_chain():
-    matches = list(StringPattern("word", validator=validators(left, right)).matches("xxxword xxx"))
-    assert len(matches) == 0
-
-    matches = list(StringPattern("word", validator=validators(left, right)).matches("xxx.wordxxx"))
-    assert len(matches) == 0
-
-    matches = list(StringPattern("word", validator=validators(left, right)).matches("xxx word_xxx"))
-    assert len(matches) == 1
-
-    matches = list(StringPattern("word", validator=validators(left, right)).matches("word"))
-    assert len(matches) == 1
diff --git a/lib/stevedore/tests/__init__.py b/lib/stevedore/tests/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/lib/stevedore/tests/extension_unimportable.py b/lib/stevedore/tests/extension_unimportable.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/lib/stevedore/tests/manager.py b/lib/stevedore/tests/manager.py
deleted file mode 100644
index 28c37321762dfb549872083f64d120f46d080e9c..0000000000000000000000000000000000000000
--- a/lib/stevedore/tests/manager.py
+++ /dev/null
@@ -1,59 +0,0 @@
-"""TestExtensionManager
-
-Extension manager used only for testing.
-"""
-
-import logging
-import warnings
-
-from stevedore import extension
-
-
-LOG = logging.getLogger(__name__)
-
-
-class TestExtensionManager(extension.ExtensionManager):
-    """ExtensionManager that is explicitly initialized for tests.
-
-    .. deprecated:: 0.13
-
-       Use the :func:`make_test_instance` class method of the class
-       being replaced by the test instance instead of using this class
-       directly.
-
-    :param extensions: Pre-configured Extension instances to use
-                       instead of loading them from entry points.
-    :type extensions: list of :class:`~stevedore.extension.Extension`
-    :param namespace: The namespace for the entry points.
-    :type namespace: str
-    :param invoke_on_load: Boolean controlling whether to invoke the
-        object returned by the entry point after the driver is loaded.
-    :type invoke_on_load: bool
-    :param invoke_args: Positional arguments to pass when invoking
-        the object returned by the entry point. Only used if invoke_on_load
-        is True.
-    :type invoke_args: tuple
-    :param invoke_kwds: Named arguments to pass when invoking
-        the object returned by the entry point. Only used if invoke_on_load
-        is True.
-    :type invoke_kwds: dict
-
-    """
-
-    def __init__(self, extensions,
-                 namespace='test',
-                 invoke_on_load=False,
-                 invoke_args=(),
-                 invoke_kwds={}):
-        super(TestExtensionManager, self).__init__(namespace,
-                                                   invoke_on_load,
-                                                   invoke_args,
-                                                   invoke_kwds,
-                                                   )
-        self.extensions = extensions
-        warnings.warn(
-            'TestExtesionManager has been replaced by make_test_instance()',
-            DeprecationWarning)
-
-    def _load_plugins(self, *args, **kwds):
-        return []
diff --git a/lib/stevedore/tests/test_callback.py b/lib/stevedore/tests/test_callback.py
deleted file mode 100644
index 1e9f5b18ac5ea96ea0b3074b51efda4bf930e5a7..0000000000000000000000000000000000000000
--- a/lib/stevedore/tests/test_callback.py
+++ /dev/null
@@ -1,25 +0,0 @@
-"""Tests for failure loading callback
-"""
-from testtools.matchers import GreaterThan
-
-from stevedore import extension
-from stevedore.tests import utils
-
-
-class TestCallback(utils.TestCase):
-    def test_extension_failure_custom_callback(self):
-        errors = []
-
-        def failure_callback(manager, entrypoint, error):
-            errors.append((manager, entrypoint, error))
-
-        em = extension.ExtensionManager('stevedore.test.extension',
-                                        invoke_on_load=True,
-                                        on_load_failure_callback=
-                                        failure_callback)
-        extensions = list(em.extensions)
-        self.assertThat(len(extensions), GreaterThan(0))
-        self.assertEqual(len(errors), 2)
-        for manager, entrypoint, error in errors:
-            self.assertIs(manager, em)
-            self.assertIsInstance(error, (IOError, ImportError))
diff --git a/lib/stevedore/tests/test_dispatch.py b/lib/stevedore/tests/test_dispatch.py
deleted file mode 100644
index 1cd3bd43cd9bda3a9d2d583f1f36d97be18edbb1..0000000000000000000000000000000000000000
--- a/lib/stevedore/tests/test_dispatch.py
+++ /dev/null
@@ -1,91 +0,0 @@
-from stevedore.tests import utils
-from stevedore import dispatch
-
-
-def check_dispatch(ep, *args, **kwds):
-    return ep.name == 't2'
-
-
-class TestDispatch(utils.TestCase):
-    def check_dispatch(ep, *args, **kwds):
-        return ep.name == 't2'
-
-    def test_dispatch(self):
-
-        def invoke(ep, *args, **kwds):
-            return (ep.name, args, kwds)
-
-        em = dispatch.DispatchExtensionManager('stevedore.test.extension',
-                                               lambda *args, **kwds: True,
-                                               invoke_on_load=True,
-                                               invoke_args=('a',),
-                                               invoke_kwds={'b': 'B'},
-                                               )
-        self.assertEqual(len(em.extensions), 2)
-        self.assertEqual(set(em.names()), set(['t1', 't2']))
-
-        results = em.map(check_dispatch,
-                         invoke,
-                         'first',
-                         named='named value',
-                         )
-        expected = [('t2', ('first',), {'named': 'named value'})]
-        self.assertEqual(results, expected)
-
-    def test_dispatch_map_method(self):
-        em = dispatch.DispatchExtensionManager('stevedore.test.extension',
-                                               lambda *args, **kwds: True,
-                                               invoke_on_load=True,
-                                               invoke_args=('a',),
-                                               invoke_kwds={'b': 'B'},
-                                               )
-
-        results = em.map_method(check_dispatch, 'get_args_and_data', 'first')
-        self.assertEqual(results, [(('a',), {'b': 'B'}, 'first')])
-
-    def test_name_dispatch(self):
-
-        def invoke(ep, *args, **kwds):
-            return (ep.name, args, kwds)
-
-        em = dispatch.NameDispatchExtensionManager('stevedore.test.extension',
-                                                   lambda *args, **kwds: True,
-                                                   invoke_on_load=True,
-                                                   invoke_args=('a',),
-                                                   invoke_kwds={'b': 'B'},
-                                                   )
-        self.assertEqual(len(em.extensions), 2)
-        self.assertEqual(set(em.names()), set(['t1', 't2']))
-
-        results = em.map(['t2'], invoke, 'first', named='named value',)
-        expected = [('t2', ('first',), {'named': 'named value'})]
-        self.assertEqual(results, expected)
-
-    def test_name_dispatch_ignore_missing(self):
-
-        def invoke(ep, *args, **kwds):
-            return (ep.name, args, kwds)
-
-        em = dispatch.NameDispatchExtensionManager(
-            'stevedore.test.extension',
-            lambda *args, **kwds: True,
-            invoke_on_load=True,
-            invoke_args=('a',),
-            invoke_kwds={'b': 'B'},
-        )
-
-        results = em.map(['t3', 't1'], invoke, 'first', named='named value',)
-        expected = [('t1', ('first',), {'named': 'named value'})]
-        self.assertEqual(results, expected)
-
-    def test_name_dispatch_map_method(self):
-        em = dispatch.NameDispatchExtensionManager(
-            'stevedore.test.extension',
-            lambda *args, **kwds: True,
-            invoke_on_load=True,
-            invoke_args=('a',),
-            invoke_kwds={'b': 'B'},
-        )
-
-        results = em.map_method(['t3', 't1'], 'get_args_and_data', 'first')
-        self.assertEqual(results, [(('a',), {'b': 'B'}, 'first')])
diff --git a/lib/stevedore/tests/test_driver.py b/lib/stevedore/tests/test_driver.py
deleted file mode 100644
index 0a919cf76b1980263d067560eae9f54f6b7f2263..0000000000000000000000000000000000000000
--- a/lib/stevedore/tests/test_driver.py
+++ /dev/null
@@ -1,77 +0,0 @@
-"""Tests for stevedore.extension
-"""
-
-import pkg_resources
-
-from stevedore import driver
-from stevedore import exception
-from stevedore import extension
-from stevedore.tests import test_extension
-from stevedore.tests import utils
-
-
-class TestCallback(utils.TestCase):
-    def test_detect_plugins(self):
-        em = driver.DriverManager('stevedore.test.extension', 't1')
-        names = sorted(em.names())
-        self.assertEqual(names, ['t1'])
-
-    def test_call(self):
-        def invoke(ext, *args, **kwds):
-            return (ext.name, args, kwds)
-        em = driver.DriverManager('stevedore.test.extension', 't1')
-        result = em(invoke, 'a', b='C')
-        self.assertEqual(result, ('t1', ('a',), {'b': 'C'}))
-
-    def test_driver_property_not_invoked_on_load(self):
-        em = driver.DriverManager('stevedore.test.extension', 't1',
-                                  invoke_on_load=False)
-        d = em.driver
-        self.assertIs(d, test_extension.FauxExtension)
-
-    def test_driver_property_invoked_on_load(self):
-        em = driver.DriverManager('stevedore.test.extension', 't1',
-                                  invoke_on_load=True)
-        d = em.driver
-        self.assertIsInstance(d, test_extension.FauxExtension)
-
-    def test_no_drivers(self):
-        try:
-            driver.DriverManager('stevedore.test.extension.none', 't1')
-        except exception.NoMatches as err:
-            self.assertIn("No 'stevedore.test.extension.none' driver found",
-                          str(err))
-
-    def test_bad_driver(self):
-        try:
-            driver.DriverManager('stevedore.test.extension', 'e2')
-        except ImportError:
-            pass
-        else:
-            self.assertEquals(False, "No error raised")
-
-    def test_multiple_drivers(self):
-        # The idea for this test was contributed by clayg:
-        # https://gist.github.com/clayg/6311348
-        extensions = [
-            extension.Extension(
-                'backend',
-                pkg_resources.EntryPoint.parse('backend = pkg1:driver'),
-                'pkg backend',
-                None,
-            ),
-            extension.Extension(
-                'backend',
-                pkg_resources.EntryPoint.parse('backend = pkg2:driver'),
-                'pkg backend',
-                None,
-            ),
-        ]
-        try:
-            dm = driver.DriverManager.make_test_instance(extensions[0])
-            # Call the initialization code that verifies the extension
-            dm._init_plugins(extensions)
-        except exception.MultipleMatches as err:
-            self.assertIn("Multiple", str(err))
-        else:
-            self.fail('Should have had an error')
diff --git a/lib/stevedore/tests/test_enabled.py b/lib/stevedore/tests/test_enabled.py
deleted file mode 100644
index 7040d032efa27a286f9b8503362d31525462ea13..0000000000000000000000000000000000000000
--- a/lib/stevedore/tests/test_enabled.py
+++ /dev/null
@@ -1,30 +0,0 @@
-from stevedore import enabled
-from stevedore.tests import utils
-
-
-class TestEnabled(utils.TestCase):
-    def test_enabled(self):
-        def check_enabled(ep):
-            return ep.name == 't2'
-        em = enabled.EnabledExtensionManager(
-            'stevedore.test.extension',
-            check_enabled,
-            invoke_on_load=True,
-            invoke_args=('a',),
-            invoke_kwds={'b': 'B'},
-        )
-        self.assertEqual(len(em.extensions), 1)
-        self.assertEqual(em.names(), ['t2'])
-
-    def test_enabled_after_load(self):
-        def check_enabled(ext):
-            return ext.obj and ext.name == 't2'
-        em = enabled.EnabledExtensionManager(
-            'stevedore.test.extension',
-            check_enabled,
-            invoke_on_load=True,
-            invoke_args=('a',),
-            invoke_kwds={'b': 'B'},
-        )
-        self.assertEqual(len(em.extensions), 1)
-        self.assertEqual(em.names(), ['t2'])
diff --git a/lib/stevedore/tests/test_example_fields.py b/lib/stevedore/tests/test_example_fields.py
deleted file mode 100644
index 86aebf912f0f469d657531b40be4fc71c63cf626..0000000000000000000000000000000000000000
--- a/lib/stevedore/tests/test_example_fields.py
+++ /dev/null
@@ -1,29 +0,0 @@
-"""Tests for stevedore.example2.fields
-"""
-
-from stevedore.example2 import fields
-from stevedore.tests import utils
-
-
-class TestExampleFields(utils.TestCase):
-    def test_simple_items(self):
-        f = fields.FieldList(100)
-        text = ''.join(f.format({'a': 'A', 'b': 'B'}))
-        expected = '\n'.join([
-            ': a : A',
-            ': b : B',
-            '',
-        ])
-        self.assertEqual(text, expected)
-
-    def test_long_item(self):
-        f = fields.FieldList(25)
-        text = ''.join(f.format({'name':
-                       'a value longer than the allowed width'}))
-        expected = '\n'.join([
-            ': name : a value longer',
-            '    than the allowed',
-            '    width',
-            '',
-        ])
-        self.assertEqual(text, expected)
diff --git a/lib/stevedore/tests/test_example_simple.py b/lib/stevedore/tests/test_example_simple.py
deleted file mode 100644
index 2558fb7badb3d75dc1439d0536593dd61683cadf..0000000000000000000000000000000000000000
--- a/lib/stevedore/tests/test_example_simple.py
+++ /dev/null
@@ -1,17 +0,0 @@
-"""Tests for stevedore.example.simple
-"""
-
-from stevedore.example import simple
-from stevedore.tests import utils
-
-
-class TestExampleSimple(utils.TestCase):
-    def test_simple_items(self):
-        f = simple.Simple(100)
-        text = ''.join(f.format({'a': 'A', 'b': 'B'}))
-        expected = '\n'.join([
-            'a = A',
-            'b = B',
-            '',
-        ])
-        self.assertEqual(text, expected)
diff --git a/lib/stevedore/tests/test_extension.py b/lib/stevedore/tests/test_extension.py
deleted file mode 100644
index 1fe02422e1e70f5b007ab09803767ff748d2e58c..0000000000000000000000000000000000000000
--- a/lib/stevedore/tests/test_extension.py
+++ /dev/null
@@ -1,212 +0,0 @@
-"""Tests for stevedore.extension
-"""
-
-import mock
-
-from stevedore import exception
-from stevedore import extension
-from stevedore.tests import utils
-
-
-ALL_NAMES = ['e1', 't1', 't2']
-WORKING_NAMES = ['t1', 't2']
-
-
-class FauxExtension(object):
-    def __init__(self, *args, **kwds):
-        self.args = args
-        self.kwds = kwds
-
-    def get_args_and_data(self, data):
-        return self.args, self.kwds, data
-
-
-class BrokenExtension(object):
-    def __init__(self, *args, **kwds):
-        raise IOError("Did not create")
-
-
-class TestCallback(utils.TestCase):
-    def test_detect_plugins(self):
-        em = extension.ExtensionManager('stevedore.test.extension')
-        names = sorted(em.names())
-        self.assertEqual(names, ALL_NAMES)
-
-    def test_get_by_name(self):
-        em = extension.ExtensionManager('stevedore.test.extension')
-        e = em['t1']
-        self.assertEqual(e.name, 't1')
-
-    def test_contains_by_name(self):
-        em = extension.ExtensionManager('stevedore.test.extension')
-        self.assertEqual('t1' in em, True)
-
-    def test_get_by_name_missing(self):
-        em = extension.ExtensionManager('stevedore.test.extension')
-        try:
-            em['t3']
-        except KeyError:
-            pass
-        else:
-            assert False, 'Failed to raise KeyError'
-
-    def test_load_multiple_times_entry_points(self):
-        # We expect to get the same EntryPoint object because we save them
-        # in the cache.
-        em1 = extension.ExtensionManager('stevedore.test.extension')
-        eps1 = [ext.entry_point for ext in em1]
-        em2 = extension.ExtensionManager('stevedore.test.extension')
-        eps2 = [ext.entry_point for ext in em2]
-        self.assertIs(eps1[0], eps2[0])
-
-    def test_load_multiple_times_plugins(self):
-        # We expect to get the same plugin object (module or class)
-        # because the underlying import machinery will cache the values.
-        em1 = extension.ExtensionManager('stevedore.test.extension')
-        plugins1 = [ext.plugin for ext in em1]
-        em2 = extension.ExtensionManager('stevedore.test.extension')
-        plugins2 = [ext.plugin for ext in em2]
-        self.assertIs(plugins1[0], plugins2[0])
-
-    def test_use_cache(self):
-        # If we insert something into the cache of entry points,
-        # the manager should not have to call into pkg_resources
-        # to find the plugins.
-        cache = extension.ExtensionManager.ENTRY_POINT_CACHE
-        cache['stevedore.test.faux'] = []
-        with mock.patch('pkg_resources.iter_entry_points',
-                        side_effect=
-                        AssertionError('called iter_entry_points')):
-            em = extension.ExtensionManager('stevedore.test.faux')
-            names = em.names()
-        self.assertEqual(names, [])
-
-    def test_iterable(self):
-        em = extension.ExtensionManager('stevedore.test.extension')
-        names = sorted(e.name for e in em)
-        self.assertEqual(names, ALL_NAMES)
-
-    def test_invoke_on_load(self):
-        em = extension.ExtensionManager('stevedore.test.extension',
-                                        invoke_on_load=True,
-                                        invoke_args=('a',),
-                                        invoke_kwds={'b': 'B'},
-                                        )
-        self.assertEqual(len(em.extensions), 2)
-        for e in em.extensions:
-            self.assertEqual(e.obj.args, ('a',))
-            self.assertEqual(e.obj.kwds, {'b': 'B'})
-
-    def test_map_return_values(self):
-        def mapped(ext, *args, **kwds):
-            return ext.name
-
-        em = extension.ExtensionManager('stevedore.test.extension',
-                                        invoke_on_load=True,
-                                        )
-        results = em.map(mapped)
-        self.assertEqual(sorted(results), WORKING_NAMES)
-
-    def test_map_arguments(self):
-        objs = []
-
-        def mapped(ext, *args, **kwds):
-            objs.append((ext, args, kwds))
-
-        em = extension.ExtensionManager('stevedore.test.extension',
-                                        invoke_on_load=True,
-                                        )
-        em.map(mapped, 1, 2, a='A', b='B')
-        self.assertEqual(len(objs), 2)
-        names = sorted([o[0].name for o in objs])
-        self.assertEqual(names, WORKING_NAMES)
-        for o in objs:
-            self.assertEqual(o[1], (1, 2))
-            self.assertEqual(o[2], {'a': 'A', 'b': 'B'})
-
-    def test_map_eats_errors(self):
-        def mapped(ext, *args, **kwds):
-            raise RuntimeError('hard coded error')
-
-        em = extension.ExtensionManager('stevedore.test.extension',
-                                        invoke_on_load=True,
-                                        )
-        results = em.map(mapped, 1, 2, a='A', b='B')
-        self.assertEqual(results, [])
-
-    def test_map_propagate_exceptions(self):
-        def mapped(ext, *args, **kwds):
-            raise RuntimeError('hard coded error')
-
-        em = extension.ExtensionManager('stevedore.test.extension',
-                                        invoke_on_load=True,
-                                        propagate_map_exceptions=True
-                                        )
-
-        try:
-            em.map(mapped, 1, 2, a='A', b='B')
-            assert False
-        except RuntimeError:
-            pass
-
-    def test_map_errors_when_no_plugins(self):
-        expected_str = 'No stevedore.test.extension.none extensions found'
-
-        def mapped(ext, *args, **kwds):
-            pass
-
-        em = extension.ExtensionManager('stevedore.test.extension.none',
-                                        invoke_on_load=True,
-                                        )
-        try:
-            em.map(mapped, 1, 2, a='A', b='B')
-        except exception.NoMatches as err:
-            self.assertEqual(expected_str, str(err))
-
-    def test_map_method(self):
-        em = extension.ExtensionManager('stevedore.test.extension',
-                                        invoke_on_load=True,
-                                        )
-
-        result = em.map_method('get_args_and_data', 42)
-        self.assertEqual(set(r[2] for r in result), set([42]))
-
-
-class TestLoadRequirementsNewSetuptools(utils.TestCase):
-    # setuptools 11.3 and later
-
-    def setUp(self):
-        super(TestLoadRequirementsNewSetuptools, self).setUp()
-        self.mock_ep = mock.Mock(spec=['require', 'resolve', 'load', 'name'])
-        self.em = extension.ExtensionManager.make_test_instance([])
-
-    def test_verify_requirements(self):
-        self.em._load_one_plugin(self.mock_ep, False, (), {},
-                                 verify_requirements=True)
-        self.mock_ep.require.assert_called_once_with()
-        self.mock_ep.resolve.assert_called_once_with()
-
-    def test_no_verify_requirements(self):
-        self.em._load_one_plugin(self.mock_ep, False, (), {},
-                                 verify_requirements=False)
-        self.assertEqual(0, self.mock_ep.require.call_count)
-        self.mock_ep.resolve.assert_called_once_with()
-
-
-class TestLoadRequirementsOldSetuptools(utils.TestCase):
-    # Before setuptools 11.3
-
-    def setUp(self):
-        super(TestLoadRequirementsOldSetuptools, self).setUp()
-        self.mock_ep = mock.Mock(spec=['load', 'name'])
-        self.em = extension.ExtensionManager.make_test_instance([])
-
-    def test_verify_requirements(self):
-        self.em._load_one_plugin(self.mock_ep, False, (), {},
-                                 verify_requirements=True)
-        self.mock_ep.load.assert_called_once_with(require=True)
-
-    def test_no_verify_requirements(self):
-        self.em._load_one_plugin(self.mock_ep, False, (), {},
-                                 verify_requirements=False)
-        self.mock_ep.load.assert_called_once_with(require=False)
diff --git a/lib/stevedore/tests/test_hook.py b/lib/stevedore/tests/test_hook.py
deleted file mode 100644
index b95f4b84880d2ba72430978a0888fef3df7598d3..0000000000000000000000000000000000000000
--- a/lib/stevedore/tests/test_hook.py
+++ /dev/null
@@ -1,43 +0,0 @@
-from stevedore import hook
-from stevedore.tests import utils
-
-
-class TestHook(utils.TestCase):
-    def test_hook(self):
-        em = hook.HookManager(
-            'stevedore.test.extension',
-            't1',
-            invoke_on_load=True,
-            invoke_args=('a',),
-            invoke_kwds={'b': 'B'},
-        )
-        self.assertEqual(len(em.extensions), 1)
-        self.assertEqual(em.names(), ['t1'])
-
-    def test_get_by_name(self):
-        em = hook.HookManager(
-            'stevedore.test.extension',
-            't1',
-            invoke_on_load=True,
-            invoke_args=('a',),
-            invoke_kwds={'b': 'B'},
-        )
-        e_list = em['t1']
-        self.assertEqual(len(e_list), 1)
-        e = e_list[0]
-        self.assertEqual(e.name, 't1')
-
-    def test_get_by_name_missing(self):
-        em = hook.HookManager(
-            'stevedore.test.extension',
-            't1',
-            invoke_on_load=True,
-            invoke_args=('a',),
-            invoke_kwds={'b': 'B'},
-        )
-        try:
-            em['t2']
-        except KeyError:
-            pass
-        else:
-            assert False, 'Failed to raise KeyError'
diff --git a/lib/stevedore/tests/test_named.py b/lib/stevedore/tests/test_named.py
deleted file mode 100644
index bbac13709d33ddfe7bc09f263b4bd6d81c069b23..0000000000000000000000000000000000000000
--- a/lib/stevedore/tests/test_named.py
+++ /dev/null
@@ -1,58 +0,0 @@
-from stevedore import named
-from stevedore.tests import utils
-
-import mock
-
-
-class TestNamed(utils.TestCase):
-    def test_named(self):
-        em = named.NamedExtensionManager(
-            'stevedore.test.extension',
-            names=['t1'],
-            invoke_on_load=True,
-            invoke_args=('a',),
-            invoke_kwds={'b': 'B'},
-        )
-        actual = em.names()
-        self.assertEqual(actual, ['t1'])
-
-    def test_enabled_before_load(self):
-        # Set up the constructor for the FauxExtension to cause an
-        # AssertionError so the test fails if the class is instantiated,
-        # which should only happen if it is loaded before the name of the
-        # extension is compared against the names that should be loaded by
-        # the manager.
-        init_name = 'stevedore.tests.test_extension.FauxExtension.__init__'
-        with mock.patch(init_name) as m:
-            m.side_effect = AssertionError
-            em = named.NamedExtensionManager(
-                'stevedore.test.extension',
-                # Look for an extension that does not exist so the
-                # __init__ we mocked should never be invoked.
-                names=['no-such-extension'],
-                invoke_on_load=True,
-                invoke_args=('a',),
-                invoke_kwds={'b': 'B'},
-            )
-            actual = em.names()
-            self.assertEqual(actual, [])
-
-    def test_extensions_listed_in_name_order(self):
-        # Since we don't know the "natural" order of the extensions, run
-        # the test both ways: if the sorting is broken, one of them will
-        # fail
-        em = named.NamedExtensionManager(
-            'stevedore.test.extension',
-            names=['t1', 't2'],
-            name_order=True
-        )
-        actual = em.names()
-        self.assertEqual(actual, ['t1', 't2'])
-
-        em = named.NamedExtensionManager(
-            'stevedore.test.extension',
-            names=['t2', 't1'],
-            name_order=True
-        )
-        actual = em.names()
-        self.assertEqual(actual, ['t2', 't1'])
diff --git a/lib/stevedore/tests/test_sphinxext.py b/lib/stevedore/tests/test_sphinxext.py
deleted file mode 100644
index 60b47944f7a85539dd916947a175b90a45a8558a..0000000000000000000000000000000000000000
--- a/lib/stevedore/tests/test_sphinxext.py
+++ /dev/null
@@ -1,120 +0,0 @@
-#    Licensed under the Apache License, Version 2.0 (the "License"); you may
-#    not use this file except in compliance with the License. You may obtain
-#    a copy of the License at
-#
-#         http://www.apache.org/licenses/LICENSE-2.0
-#
-#    Unless required by applicable law or agreed to in writing, software
-#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-#    License for the specific language governing permissions and limitations
-#    under the License.
-"""Tests for the sphinx extension
-"""
-
-from __future__ import unicode_literals
-
-from stevedore import extension
-from stevedore import sphinxext
-from stevedore.tests import utils
-
-import mock
-import pkg_resources
-
-
-def _make_ext(name, docstring):
-    def inner():
-        pass
-
-    inner.__doc__ = docstring
-    m1 = mock.Mock(spec=pkg_resources.EntryPoint)
-    m1.module_name = '%s_module' % name
-    s = mock.Mock(return_value='ENTRY_POINT(%s)' % name)
-    m1.__str__ = s
-    return extension.Extension(name, m1, inner, None)
-
-
-class TestSphinxExt(utils.TestCase):
-
-    def setUp(self):
-        super(TestSphinxExt, self).setUp()
-        self.exts = [
-            _make_ext('test1', 'One-line docstring'),
-            _make_ext('test2', 'Multi-line docstring\n\nAnother para'),
-        ]
-        self.em = extension.ExtensionManager.make_test_instance(self.exts)
-
-    def test_simple_list(self):
-        results = list(sphinxext._simple_list(self.em))
-        self.assertEqual(
-            [
-                ('* test1 -- One-line docstring', 'test1_module'),
-                ('* test2 -- Multi-line docstring', 'test2_module'),
-            ],
-            results,
-        )
-
-    def test_simple_list_no_docstring(self):
-        ext = [_make_ext('nodoc', None)]
-        em = extension.ExtensionManager.make_test_instance(ext)
-        results = list(sphinxext._simple_list(em))
-        self.assertEqual(
-            [
-                ('* nodoc -- ', 'nodoc_module'),
-            ],
-            results,
-        )
-
-    def test_detailed_list(self):
-        results = list(sphinxext._detailed_list(self.em))
-        self.assertEqual(
-            [
-                ('test1', 'test1_module'),
-                ('-----', 'test1_module'),
-                ('\n', 'test1_module'),
-                ('One-line docstring', 'test1_module'),
-                ('\n', 'test1_module'),
-                ('test2', 'test2_module'),
-                ('-----', 'test2_module'),
-                ('\n', 'test2_module'),
-                ('Multi-line docstring\n\nAnother para', 'test2_module'),
-                ('\n', 'test2_module'),
-            ],
-            results,
-        )
-
-    def test_detailed_list_format(self):
-        results = list(sphinxext._detailed_list(self.em, over='+', under='+'))
-        self.assertEqual(
-            [
-                ('+++++', 'test1_module'),
-                ('test1', 'test1_module'),
-                ('+++++', 'test1_module'),
-                ('\n', 'test1_module'),
-                ('One-line docstring', 'test1_module'),
-                ('\n', 'test1_module'),
-                ('+++++', 'test2_module'),
-                ('test2', 'test2_module'),
-                ('+++++', 'test2_module'),
-                ('\n', 'test2_module'),
-                ('Multi-line docstring\n\nAnother para', 'test2_module'),
-                ('\n', 'test2_module'),
-            ],
-            results,
-        )
-
-    def test_detailed_list_no_docstring(self):
-        ext = [_make_ext('nodoc', None)]
-        em = extension.ExtensionManager.make_test_instance(ext)
-        results = list(sphinxext._detailed_list(em))
-        self.assertEqual(
-            [
-                ('nodoc', 'nodoc_module'),
-                ('-----', 'nodoc_module'),
-                ('\n', 'nodoc_module'),
-                ('.. warning:: No documentation found in ENTRY_POINT(nodoc)',
-                 'nodoc_module'),
-                ('\n', 'nodoc_module'),
-            ],
-            results,
-        )
diff --git a/lib/stevedore/tests/test_test_manager.py b/lib/stevedore/tests/test_test_manager.py
deleted file mode 100644
index 70aa1003b30ce3740893b40396b6eb0aaf0dd99d..0000000000000000000000000000000000000000
--- a/lib/stevedore/tests/test_test_manager.py
+++ /dev/null
@@ -1,204 +0,0 @@
-from mock import Mock, sentinel
-from stevedore import (ExtensionManager, NamedExtensionManager, HookManager,
-                       DriverManager, EnabledExtensionManager)
-from stevedore.dispatch import (DispatchExtensionManager,
-                                NameDispatchExtensionManager)
-from stevedore.extension import Extension
-from stevedore.tests import utils
-
-
-test_extension = Extension('test_extension', None, None, None)
-test_extension2 = Extension('another_one', None, None, None)
-
-mock_entry_point = Mock(module_name='test.extension', attrs=['obj'])
-a_driver = Extension('test_driver', mock_entry_point, sentinel.driver_plugin,
-                     sentinel.driver_obj)
-
-
-# base ExtensionManager
-class TestTestManager(utils.TestCase):
-    def test_instance_should_use_supplied_extensions(self):
-        extensions = [test_extension, test_extension2]
-        em = ExtensionManager.make_test_instance(extensions)
-        self.assertEqual(extensions, em.extensions)
-
-    def test_instance_should_have_default_namespace(self):
-        em = ExtensionManager.make_test_instance([])
-        self.assertEqual(em.namespace, 'TESTING')
-
-    def test_instance_should_use_supplied_namespace(self):
-        namespace = 'testing.1.2.3'
-        em = ExtensionManager.make_test_instance([], namespace=namespace)
-        self.assertEqual(namespace, em.namespace)
-
-    def test_extension_name_should_be_listed(self):
-        em = ExtensionManager.make_test_instance([test_extension])
-        self.assertIn(test_extension.name, em.names())
-
-    def test_iterator_should_yield_extension(self):
-        em = ExtensionManager.make_test_instance([test_extension])
-        self.assertEqual(test_extension, next(iter(em)))
-
-    def test_manager_should_allow_name_access(self):
-        em = ExtensionManager.make_test_instance([test_extension])
-        self.assertEqual(test_extension, em[test_extension.name])
-
-    def test_manager_should_call(self):
-        em = ExtensionManager.make_test_instance([test_extension])
-        func = Mock()
-        em.map(func)
-        func.assert_called_once_with(test_extension)
-
-    def test_manager_should_call_all(self):
-        em = ExtensionManager.make_test_instance([test_extension2,
-                                                  test_extension])
-        func = Mock()
-        em.map(func)
-        func.assert_any_call(test_extension2)
-        func.assert_any_call(test_extension)
-
-    def test_manager_return_values(self):
-        def mapped(ext, *args, **kwds):
-            return ext.name
-
-        em = ExtensionManager.make_test_instance([test_extension2,
-                                                  test_extension])
-        results = em.map(mapped)
-        self.assertEqual(sorted(results), ['another_one', 'test_extension'])
-
-    def test_manager_should_eat_exceptions(self):
-        em = ExtensionManager.make_test_instance([test_extension])
-
-        func = Mock(side_effect=RuntimeError('hard coded error'))
-
-        results = em.map(func, 1, 2, a='A', b='B')
-        self.assertEqual(results, [])
-
-    def test_manager_should_propagate_exceptions(self):
-        em = ExtensionManager.make_test_instance([test_extension],
-                                                 propagate_map_exceptions=True)
-        self.skipTest('Skipping temporarily')
-        func = Mock(side_effect=RuntimeError('hard coded error'))
-        em.map(func, 1, 2, a='A', b='B')
-
-    # NamedExtensionManager
-    def test_named_manager_should_use_supplied_extensions(self):
-        extensions = [test_extension, test_extension2]
-        em = NamedExtensionManager.make_test_instance(extensions)
-        self.assertEqual(extensions, em.extensions)
-
-    def test_named_manager_should_have_default_namespace(self):
-        em = NamedExtensionManager.make_test_instance([])
-        self.assertEqual(em.namespace, 'TESTING')
-
-    def test_named_manager_should_use_supplied_namespace(self):
-        namespace = 'testing.1.2.3'
-        em = NamedExtensionManager.make_test_instance([], namespace=namespace)
-        self.assertEqual(namespace, em.namespace)
-
-    def test_named_manager_should_populate_names(self):
-        extensions = [test_extension, test_extension2]
-        em = NamedExtensionManager.make_test_instance(extensions)
-        self.assertEqual(em.names(), ['test_extension', 'another_one'])
-
-    # HookManager
-    def test_hook_manager_should_use_supplied_extensions(self):
-        extensions = [test_extension, test_extension2]
-        em = HookManager.make_test_instance(extensions)
-        self.assertEqual(extensions, em.extensions)
-
-    def test_hook_manager_should_be_first_extension_name(self):
-        extensions = [test_extension, test_extension2]
-        em = HookManager.make_test_instance(extensions)
-        # This will raise KeyError if the names don't match
-        assert(em[test_extension.name])
-
-    def test_hook_manager_should_have_default_namespace(self):
-        em = HookManager.make_test_instance([test_extension])
-        self.assertEqual(em.namespace, 'TESTING')
-
-    def test_hook_manager_should_use_supplied_namespace(self):
-        namespace = 'testing.1.2.3'
-        em = HookManager.make_test_instance([test_extension],
-                                            namespace=namespace)
-        self.assertEqual(namespace, em.namespace)
-
-    def test_hook_manager_should_return_named_extensions(self):
-        hook1 = Extension('captain', None, None, None)
-        hook2 = Extension('captain', None, None, None)
-        em = HookManager.make_test_instance([hook1, hook2])
-        self.assertEqual([hook1, hook2], em['captain'])
-
-    # DriverManager
-    def test_driver_manager_should_use_supplied_extension(self):
-        em = DriverManager.make_test_instance(a_driver)
-        self.assertEqual([a_driver], em.extensions)
-
-    def test_driver_manager_should_have_default_namespace(self):
-        em = DriverManager.make_test_instance(a_driver)
-        self.assertEqual(em.namespace, 'TESTING')
-
-    def test_driver_manager_should_use_supplied_namespace(self):
-        namespace = 'testing.1.2.3'
-        em = DriverManager.make_test_instance(a_driver, namespace=namespace)
-        self.assertEqual(namespace, em.namespace)
-
-    def test_instance_should_use_driver_name(self):
-        em = DriverManager.make_test_instance(a_driver)
-        self.assertEqual(['test_driver'], em.names())
-
-    def test_instance_call(self):
-        def invoke(ext, *args, **kwds):
-            return ext.name, args, kwds
-
-        em = DriverManager.make_test_instance(a_driver)
-        result = em(invoke, 'a', b='C')
-        self.assertEqual(result, ('test_driver', ('a',), {'b': 'C'}))
-
-    def test_instance_driver_property(self):
-        em = DriverManager.make_test_instance(a_driver)
-        self.assertEqual(sentinel.driver_obj, em.driver)
-
-    # EnabledExtensionManager
-    def test_enabled_instance_should_use_supplied_extensions(self):
-        extensions = [test_extension, test_extension2]
-        em = EnabledExtensionManager.make_test_instance(extensions)
-        self.assertEqual(extensions, em.extensions)
-
-    # DispatchExtensionManager
-    def test_dispatch_instance_should_use_supplied_extensions(self):
-        extensions = [test_extension, test_extension2]
-        em = DispatchExtensionManager.make_test_instance(extensions)
-        self.assertEqual(extensions, em.extensions)
-
-    def test_dispatch_map_should_invoke_filter_for_extensions(self):
-        em = DispatchExtensionManager.make_test_instance([test_extension,
-                                                          test_extension2])
-        filter_func = Mock(return_value=False)
-        args = ('A',)
-        kw = {'big': 'Cheese'}
-        em.map(filter_func, None, *args, **kw)
-        filter_func.assert_any_call(test_extension, *args, **kw)
-        filter_func.assert_any_call(test_extension2, *args, **kw)
-
-    # NameDispatchExtensionManager
-    def test_name_dispatch_instance_should_use_supplied_extensions(self):
-        extensions = [test_extension, test_extension2]
-        em = NameDispatchExtensionManager.make_test_instance(extensions)
-
-        self.assertEqual(extensions, em.extensions)
-
-    def test_name_dispatch_instance_should_build_extension_name_map(self):
-        extensions = [test_extension, test_extension2]
-        em = NameDispatchExtensionManager.make_test_instance(extensions)
-        self.assertEqual(test_extension, em.by_name[test_extension.name])
-        self.assertEqual(test_extension2, em.by_name[test_extension2.name])
-
-    def test_named_dispatch_map_should_invoke_filter_for_extensions(self):
-        em = NameDispatchExtensionManager.make_test_instance([test_extension,
-                                                              test_extension2])
-        func = Mock()
-        args = ('A',)
-        kw = {'BIGGER': 'Cheese'}
-        em.map(['test_extension'], func, *args, **kw)
-        func.assert_called_once_with(test_extension, *args, **kw)
diff --git a/lib/stevedore/tests/utils.py b/lib/stevedore/tests/utils.py
deleted file mode 100644
index 01e2a4645fc49dcafbc0f643e1895eccf781024a..0000000000000000000000000000000000000000
--- a/lib/stevedore/tests/utils.py
+++ /dev/null
@@ -1,5 +0,0 @@
-from oslotest import base as test_base
-
-
-class TestCase(test_base.BaseTestCase):
-    pass
diff --git a/lib/tornado/test/__init__.py b/lib/tornado/test/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/lib/tornado/test/__main__.py b/lib/tornado/test/__main__.py
deleted file mode 100644
index c78478cbd3f201a50c3d1cd28781f8044b47de82..0000000000000000000000000000000000000000
--- a/lib/tornado/test/__main__.py
+++ /dev/null
@@ -1,14 +0,0 @@
-"""Shim to allow python -m tornado.test.
-
-This only works in python 2.7+.
-"""
-from __future__ import absolute_import, division, print_function
-
-from tornado.test.runtests import all, main
-
-# tornado.testing.main autodiscovery relies on 'all' being present in
-# the main module, so import it here even though it is not used directly.
-# The following line prevents a pyflakes warning.
-all = all
-
-main()
diff --git a/lib/tornado/test/asyncio_test.py b/lib/tornado/test/asyncio_test.py
deleted file mode 100644
index d0e3f2b020dfdea6bc9d5b5b1c36a7cf9ec8cc96..0000000000000000000000000000000000000000
--- a/lib/tornado/test/asyncio_test.py
+++ /dev/null
@@ -1,119 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from __future__ import absolute_import, division, print_function
-
-from tornado import gen
-from tornado.testing import AsyncTestCase, gen_test
-from tornado.test.util import unittest, skipBefore33, skipBefore35, exec_test
-
-try:
-    from tornado.platform.asyncio import asyncio
-except ImportError:
-    asyncio = None
-else:
-    from tornado.platform.asyncio import AsyncIOLoop, to_asyncio_future
-    # This is used in dynamically-evaluated code, so silence pyflakes.
-    to_asyncio_future
-
-
-@unittest.skipIf(asyncio is None, "asyncio module not present")
-class AsyncIOLoopTest(AsyncTestCase):
-    def get_new_ioloop(self):
-        io_loop = AsyncIOLoop()
-        asyncio.set_event_loop(io_loop.asyncio_loop)
-        return io_loop
-
-    def test_asyncio_callback(self):
-        # Basic test that the asyncio loop is set up correctly.
-        asyncio.get_event_loop().call_soon(self.stop)
-        self.wait()
-
-    @gen_test
-    def test_asyncio_future(self):
-        # Test that we can yield an asyncio future from a tornado coroutine.
-        # Without 'yield from', we must wrap coroutines in ensure_future,
-        # which was introduced during Python 3.4, deprecating the prior "async".
-        if hasattr(asyncio, 'ensure_future'):
-            ensure_future = asyncio.ensure_future
-        else:
-            ensure_future = asyncio.async
-
-        x = yield ensure_future(
-            asyncio.get_event_loop().run_in_executor(None, lambda: 42))
-        self.assertEqual(x, 42)
-
-    @skipBefore33
-    @gen_test
-    def test_asyncio_yield_from(self):
-        # Test that we can use asyncio coroutines with 'yield from'
-        # instead of asyncio.async(). This requires python 3.3 syntax.
-        namespace = exec_test(globals(), locals(), """
-        @gen.coroutine
-        def f():
-            event_loop = asyncio.get_event_loop()
-            x = yield from event_loop.run_in_executor(None, lambda: 42)
-            return x
-        """)
-        result = yield namespace['f']()
-        self.assertEqual(result, 42)
-
-    @skipBefore35
-    def test_asyncio_adapter(self):
-        # This test demonstrates that when using the asyncio coroutine
-        # runner (i.e. run_until_complete), the to_asyncio_future
-        # adapter is needed. No adapter is needed in the other direction,
-        # as demonstrated by other tests in the package.
-        @gen.coroutine
-        def tornado_coroutine():
-            yield gen.Task(self.io_loop.add_callback)
-            raise gen.Return(42)
-        native_coroutine_without_adapter = exec_test(globals(), locals(), """
-        async def native_coroutine_without_adapter():
-            return await tornado_coroutine()
-        """)["native_coroutine_without_adapter"]
-
-        native_coroutine_with_adapter = exec_test(globals(), locals(), """
-        async def native_coroutine_with_adapter():
-            return await to_asyncio_future(tornado_coroutine())
-        """)["native_coroutine_with_adapter"]
-
-        # Use the adapter, but two degrees from the tornado coroutine.
-        native_coroutine_with_adapter2 = exec_test(globals(), locals(), """
-        async def native_coroutine_with_adapter2():
-            return await to_asyncio_future(native_coroutine_without_adapter())
-        """)["native_coroutine_with_adapter2"]
-
-        # Tornado supports native coroutines both with and without adapters
-        self.assertEqual(
-            self.io_loop.run_sync(native_coroutine_without_adapter),
-            42)
-        self.assertEqual(
-            self.io_loop.run_sync(native_coroutine_with_adapter),
-            42)
-        self.assertEqual(
-            self.io_loop.run_sync(native_coroutine_with_adapter2),
-            42)
-
-        # Asyncio only supports coroutines that yield asyncio-compatible
-        # Futures.
-        with self.assertRaises(RuntimeError):
-            asyncio.get_event_loop().run_until_complete(
-                native_coroutine_without_adapter())
-        self.assertEqual(
-            asyncio.get_event_loop().run_until_complete(
-                native_coroutine_with_adapter()),
-            42)
-        self.assertEqual(
-            asyncio.get_event_loop().run_until_complete(
-                native_coroutine_with_adapter2()),
-            42)
diff --git a/lib/tornado/test/auth_test.py b/lib/tornado/test/auth_test.py
deleted file mode 100644
index 400fc4f4582454d0b168422a27071ccbfe53a721..0000000000000000000000000000000000000000
--- a/lib/tornado/test/auth_test.py
+++ /dev/null
@@ -1,547 +0,0 @@
-# These tests do not currently do much to verify the correct implementation
-# of the openid/oauth protocols, they just exercise the major code paths
-# and ensure that it doesn't blow up (e.g. with unicode/bytes issues in
-# python 3)
-
-
-from __future__ import absolute_import, division, print_function
-from tornado.auth import OpenIdMixin, OAuthMixin, OAuth2Mixin, TwitterMixin, AuthError, GoogleOAuth2Mixin, FacebookGraphMixin
-from tornado.concurrent import Future
-from tornado.escape import json_decode
-from tornado import gen
-from tornado.httputil import url_concat
-from tornado.log import gen_log
-from tornado.testing import AsyncHTTPTestCase, ExpectLog
-from tornado.web import RequestHandler, Application, asynchronous, HTTPError
-
-
-class OpenIdClientLoginHandler(RequestHandler, OpenIdMixin):
-    def initialize(self, test):
-        self._OPENID_ENDPOINT = test.get_url('/openid/server/authenticate')
-
-    @asynchronous
-    def get(self):
-        if self.get_argument('openid.mode', None):
-            self.get_authenticated_user(
-                self.on_user, http_client=self.settings['http_client'])
-            return
-        res = self.authenticate_redirect()
-        assert isinstance(res, Future)
-        assert res.done()
-
-    def on_user(self, user):
-        if user is None:
-            raise Exception("user is None")
-        self.finish(user)
-
-
-class OpenIdServerAuthenticateHandler(RequestHandler):
-    def post(self):
-        if self.get_argument('openid.mode') != 'check_authentication':
-            raise Exception("incorrect openid.mode %r")
-        self.write('is_valid:true')
-
-
-class OAuth1ClientLoginHandler(RequestHandler, OAuthMixin):
-    def initialize(self, test, version):
-        self._OAUTH_VERSION = version
-        self._OAUTH_REQUEST_TOKEN_URL = test.get_url('/oauth1/server/request_token')
-        self._OAUTH_AUTHORIZE_URL = test.get_url('/oauth1/server/authorize')
-        self._OAUTH_ACCESS_TOKEN_URL = test.get_url('/oauth1/server/access_token')
-
-    def _oauth_consumer_token(self):
-        return dict(key='asdf', secret='qwer')
-
-    @asynchronous
-    def get(self):
-        if self.get_argument('oauth_token', None):
-            self.get_authenticated_user(
-                self.on_user, http_client=self.settings['http_client'])
-            return
-        res = self.authorize_redirect(http_client=self.settings['http_client'])
-        assert isinstance(res, Future)
-
-    def on_user(self, user):
-        if user is None:
-            raise Exception("user is None")
-        self.finish(user)
-
-    def _oauth_get_user(self, access_token, callback):
-        if self.get_argument('fail_in_get_user', None):
-            raise Exception("failing in get_user")
-        if access_token != dict(key='uiop', secret='5678'):
-            raise Exception("incorrect access token %r" % access_token)
-        callback(dict(email='foo@example.com'))
-
-
-class OAuth1ClientLoginCoroutineHandler(OAuth1ClientLoginHandler):
-    """Replaces OAuth1ClientLoginCoroutineHandler's get() with a coroutine."""
-    @gen.coroutine
-    def get(self):
-        if self.get_argument('oauth_token', None):
-            # Ensure that any exceptions are set on the returned Future,
-            # not simply thrown into the surrounding StackContext.
-            try:
-                yield self.get_authenticated_user()
-            except Exception as e:
-                self.set_status(503)
-                self.write("got exception: %s" % e)
-        else:
-            yield self.authorize_redirect()
-
-
-class OAuth1ClientRequestParametersHandler(RequestHandler, OAuthMixin):
-    def initialize(self, version):
-        self._OAUTH_VERSION = version
-
-    def _oauth_consumer_token(self):
-        return dict(key='asdf', secret='qwer')
-
-    def get(self):
-        params = self._oauth_request_parameters(
-            'http://www.example.com/api/asdf',
-            dict(key='uiop', secret='5678'),
-            parameters=dict(foo='bar'))
-        self.write(params)
-
-
-class OAuth1ServerRequestTokenHandler(RequestHandler):
-    def get(self):
-        self.write('oauth_token=zxcv&oauth_token_secret=1234')
-
-
-class OAuth1ServerAccessTokenHandler(RequestHandler):
-    def get(self):
-        self.write('oauth_token=uiop&oauth_token_secret=5678')
-
-
-class OAuth2ClientLoginHandler(RequestHandler, OAuth2Mixin):
-    def initialize(self, test):
-        self._OAUTH_AUTHORIZE_URL = test.get_url('/oauth2/server/authorize')
-
-    def get(self):
-        res = self.authorize_redirect()
-        assert isinstance(res, Future)
-        assert res.done()
-
-
-class FacebookClientLoginHandler(RequestHandler, FacebookGraphMixin):
-    def initialize(self, test):
-        self._OAUTH_AUTHORIZE_URL = test.get_url('/facebook/server/authorize')
-        self._OAUTH_ACCESS_TOKEN_URL = test.get_url('/facebook/server/access_token')
-        self._FACEBOOK_BASE_URL = test.get_url('/facebook/server')
-
-    @gen.coroutine
-    def get(self):
-        if self.get_argument("code", None):
-            user = yield self.get_authenticated_user(
-                redirect_uri=self.request.full_url(),
-                client_id=self.settings["facebook_api_key"],
-                client_secret=self.settings["facebook_secret"],
-                code=self.get_argument("code"))
-            self.write(user)
-        else:
-            yield self.authorize_redirect(
-                redirect_uri=self.request.full_url(),
-                client_id=self.settings["facebook_api_key"],
-                extra_params={"scope": "read_stream,offline_access"})
-
-
-class FacebookServerAccessTokenHandler(RequestHandler):
-    def get(self):
-        self.write(dict(access_token="asdf", expires_in=3600))
-
-
-class FacebookServerMeHandler(RequestHandler):
-    def get(self):
-        self.write('{}')
-
-
-class TwitterClientHandler(RequestHandler, TwitterMixin):
-    def initialize(self, test):
-        self._OAUTH_REQUEST_TOKEN_URL = test.get_url('/oauth1/server/request_token')
-        self._OAUTH_ACCESS_TOKEN_URL = test.get_url('/twitter/server/access_token')
-        self._OAUTH_AUTHORIZE_URL = test.get_url('/oauth1/server/authorize')
-        self._TWITTER_BASE_URL = test.get_url('/twitter/api')
-
-    def get_auth_http_client(self):
-        return self.settings['http_client']
-
-
-class TwitterClientLoginHandler(TwitterClientHandler):
-    @asynchronous
-    def get(self):
-        if self.get_argument("oauth_token", None):
-            self.get_authenticated_user(self.on_user)
-            return
-        self.authorize_redirect()
-
-    def on_user(self, user):
-        if user is None:
-            raise Exception("user is None")
-        self.finish(user)
-
-
-class TwitterClientLoginGenEngineHandler(TwitterClientHandler):
-    @asynchronous
-    @gen.engine
-    def get(self):
-        if self.get_argument("oauth_token", None):
-            user = yield self.get_authenticated_user()
-            self.finish(user)
-        else:
-            # Old style: with @gen.engine we can ignore the Future from
-            # authorize_redirect.
-            self.authorize_redirect()
-
-
-class TwitterClientLoginGenCoroutineHandler(TwitterClientHandler):
-    @gen.coroutine
-    def get(self):
-        if self.get_argument("oauth_token", None):
-            user = yield self.get_authenticated_user()
-            self.finish(user)
-        else:
-            # New style: with @gen.coroutine the result must be yielded
-            # or else the request will be auto-finished too soon.
-            yield self.authorize_redirect()
-
-
-class TwitterClientShowUserHandler(TwitterClientHandler):
-    @asynchronous
-    @gen.engine
-    def get(self):
-        # TODO: would be nice to go through the login flow instead of
-        # cheating with a hard-coded access token.
-        response = yield gen.Task(self.twitter_request,
-                                  '/users/show/%s' % self.get_argument('name'),
-                                  access_token=dict(key='hjkl', secret='vbnm'))
-        if response is None:
-            self.set_status(500)
-            self.finish('error from twitter request')
-        else:
-            self.finish(response)
-
-
-class TwitterClientShowUserFutureHandler(TwitterClientHandler):
-    @asynchronous
-    @gen.engine
-    def get(self):
-        try:
-            response = yield self.twitter_request(
-                '/users/show/%s' % self.get_argument('name'),
-                access_token=dict(key='hjkl', secret='vbnm'))
-        except AuthError as e:
-            self.set_status(500)
-            self.finish(str(e))
-            return
-        assert response is not None
-        self.finish(response)
-
-
-class TwitterServerAccessTokenHandler(RequestHandler):
-    def get(self):
-        self.write('oauth_token=hjkl&oauth_token_secret=vbnm&screen_name=foo')
-
-
-class TwitterServerShowUserHandler(RequestHandler):
-    def get(self, screen_name):
-        if screen_name == 'error':
-            raise HTTPError(500)
-        assert 'oauth_nonce' in self.request.arguments
-        assert 'oauth_timestamp' in self.request.arguments
-        assert 'oauth_signature' in self.request.arguments
-        assert self.get_argument('oauth_consumer_key') == 'test_twitter_consumer_key'
-        assert self.get_argument('oauth_signature_method') == 'HMAC-SHA1'
-        assert self.get_argument('oauth_version') == '1.0'
-        assert self.get_argument('oauth_token') == 'hjkl'
-        self.write(dict(screen_name=screen_name, name=screen_name.capitalize()))
-
-
-class TwitterServerVerifyCredentialsHandler(RequestHandler):
-    def get(self):
-        assert 'oauth_nonce' in self.request.arguments
-        assert 'oauth_timestamp' in self.request.arguments
-        assert 'oauth_signature' in self.request.arguments
-        assert self.get_argument('oauth_consumer_key') == 'test_twitter_consumer_key'
-        assert self.get_argument('oauth_signature_method') == 'HMAC-SHA1'
-        assert self.get_argument('oauth_version') == '1.0'
-        assert self.get_argument('oauth_token') == 'hjkl'
-        self.write(dict(screen_name='foo', name='Foo'))
-
-
-class AuthTest(AsyncHTTPTestCase):
-    def get_app(self):
-        return Application(
-            [
-                # test endpoints
-                ('/openid/client/login', OpenIdClientLoginHandler, dict(test=self)),
-                ('/oauth10/client/login', OAuth1ClientLoginHandler,
-                 dict(test=self, version='1.0')),
-                ('/oauth10/client/request_params',
-                 OAuth1ClientRequestParametersHandler,
-                 dict(version='1.0')),
-                ('/oauth10a/client/login', OAuth1ClientLoginHandler,
-                 dict(test=self, version='1.0a')),
-                ('/oauth10a/client/login_coroutine',
-                 OAuth1ClientLoginCoroutineHandler,
-                 dict(test=self, version='1.0a')),
-                ('/oauth10a/client/request_params',
-                 OAuth1ClientRequestParametersHandler,
-                 dict(version='1.0a')),
-                ('/oauth2/client/login', OAuth2ClientLoginHandler, dict(test=self)),
-
-                ('/facebook/client/login', FacebookClientLoginHandler, dict(test=self)),
-
-                ('/twitter/client/login', TwitterClientLoginHandler, dict(test=self)),
-                ('/twitter/client/login_gen_engine', TwitterClientLoginGenEngineHandler, dict(test=self)),
-                ('/twitter/client/login_gen_coroutine', TwitterClientLoginGenCoroutineHandler, dict(test=self)),
-                ('/twitter/client/show_user', TwitterClientShowUserHandler, dict(test=self)),
-                ('/twitter/client/show_user_future', TwitterClientShowUserFutureHandler, dict(test=self)),
-
-                # simulated servers
-                ('/openid/server/authenticate', OpenIdServerAuthenticateHandler),
-                ('/oauth1/server/request_token', OAuth1ServerRequestTokenHandler),
-                ('/oauth1/server/access_token', OAuth1ServerAccessTokenHandler),
-
-                ('/facebook/server/access_token', FacebookServerAccessTokenHandler),
-                ('/facebook/server/me', FacebookServerMeHandler),
-                ('/twitter/server/access_token', TwitterServerAccessTokenHandler),
-                (r'/twitter/api/users/show/(.*)\.json', TwitterServerShowUserHandler),
-                (r'/twitter/api/account/verify_credentials\.json', TwitterServerVerifyCredentialsHandler),
-            ],
-            http_client=self.http_client,
-            twitter_consumer_key='test_twitter_consumer_key',
-            twitter_consumer_secret='test_twitter_consumer_secret',
-            facebook_api_key='test_facebook_api_key',
-            facebook_secret='test_facebook_secret')
-
-    def test_openid_redirect(self):
-        response = self.fetch('/openid/client/login', follow_redirects=False)
-        self.assertEqual(response.code, 302)
-        self.assertTrue(
-            '/openid/server/authenticate?' in response.headers['Location'])
-
-    def test_openid_get_user(self):
-        response = self.fetch('/openid/client/login?openid.mode=blah&openid.ns.ax=http://openid.net/srv/ax/1.0&openid.ax.type.email=http://axschema.org/contact/email&openid.ax.value.email=foo@example.com')
-        response.rethrow()
-        parsed = json_decode(response.body)
-        self.assertEqual(parsed["email"], "foo@example.com")
-
-    def test_oauth10_redirect(self):
-        response = self.fetch('/oauth10/client/login', follow_redirects=False)
-        self.assertEqual(response.code, 302)
-        self.assertTrue(response.headers['Location'].endswith(
-            '/oauth1/server/authorize?oauth_token=zxcv'))
-        # the cookie is base64('zxcv')|base64('1234')
-        self.assertTrue(
-            '_oauth_request_token="enhjdg==|MTIzNA=="' in response.headers['Set-Cookie'],
-            response.headers['Set-Cookie'])
-
-    def test_oauth10_get_user(self):
-        response = self.fetch(
-            '/oauth10/client/login?oauth_token=zxcv',
-            headers={'Cookie': '_oauth_request_token=enhjdg==|MTIzNA=='})
-        response.rethrow()
-        parsed = json_decode(response.body)
-        self.assertEqual(parsed['email'], 'foo@example.com')
-        self.assertEqual(parsed['access_token'], dict(key='uiop', secret='5678'))
-
-    def test_oauth10_request_parameters(self):
-        response = self.fetch('/oauth10/client/request_params')
-        response.rethrow()
-        parsed = json_decode(response.body)
-        self.assertEqual(parsed['oauth_consumer_key'], 'asdf')
-        self.assertEqual(parsed['oauth_token'], 'uiop')
-        self.assertTrue('oauth_nonce' in parsed)
-        self.assertTrue('oauth_signature' in parsed)
-
-    def test_oauth10a_redirect(self):
-        response = self.fetch('/oauth10a/client/login', follow_redirects=False)
-        self.assertEqual(response.code, 302)
-        self.assertTrue(response.headers['Location'].endswith(
-            '/oauth1/server/authorize?oauth_token=zxcv'))
-        # the cookie is base64('zxcv')|base64('1234')
-        self.assertTrue(
-            '_oauth_request_token="enhjdg==|MTIzNA=="' in response.headers['Set-Cookie'],
-            response.headers['Set-Cookie'])
-
-    def test_oauth10a_get_user(self):
-        response = self.fetch(
-            '/oauth10a/client/login?oauth_token=zxcv',
-            headers={'Cookie': '_oauth_request_token=enhjdg==|MTIzNA=='})
-        response.rethrow()
-        parsed = json_decode(response.body)
-        self.assertEqual(parsed['email'], 'foo@example.com')
-        self.assertEqual(parsed['access_token'], dict(key='uiop', secret='5678'))
-
-    def test_oauth10a_request_parameters(self):
-        response = self.fetch('/oauth10a/client/request_params')
-        response.rethrow()
-        parsed = json_decode(response.body)
-        self.assertEqual(parsed['oauth_consumer_key'], 'asdf')
-        self.assertEqual(parsed['oauth_token'], 'uiop')
-        self.assertTrue('oauth_nonce' in parsed)
-        self.assertTrue('oauth_signature' in parsed)
-
-    def test_oauth10a_get_user_coroutine_exception(self):
-        response = self.fetch(
-            '/oauth10a/client/login_coroutine?oauth_token=zxcv&fail_in_get_user=true',
-            headers={'Cookie': '_oauth_request_token=enhjdg==|MTIzNA=='})
-        self.assertEqual(response.code, 503)
-
-    def test_oauth2_redirect(self):
-        response = self.fetch('/oauth2/client/login', follow_redirects=False)
-        self.assertEqual(response.code, 302)
-        self.assertTrue('/oauth2/server/authorize?' in response.headers['Location'])
-
-    def test_facebook_login(self):
-        response = self.fetch('/facebook/client/login', follow_redirects=False)
-        self.assertEqual(response.code, 302)
-        self.assertTrue('/facebook/server/authorize?' in response.headers['Location'])
-        response = self.fetch('/facebook/client/login?code=1234', follow_redirects=False)
-        self.assertEqual(response.code, 200)
-        user = json_decode(response.body)
-        self.assertEqual(user['access_token'], 'asdf')
-        self.assertEqual(user['session_expires'], '3600')
-
-    def base_twitter_redirect(self, url):
-        # Same as test_oauth10a_redirect
-        response = self.fetch(url, follow_redirects=False)
-        self.assertEqual(response.code, 302)
-        self.assertTrue(response.headers['Location'].endswith(
-            '/oauth1/server/authorize?oauth_token=zxcv'))
-        # the cookie is base64('zxcv')|base64('1234')
-        self.assertTrue(
-            '_oauth_request_token="enhjdg==|MTIzNA=="' in response.headers['Set-Cookie'],
-            response.headers['Set-Cookie'])
-
-    def test_twitter_redirect(self):
-        self.base_twitter_redirect('/twitter/client/login')
-
-    def test_twitter_redirect_gen_engine(self):
-        self.base_twitter_redirect('/twitter/client/login_gen_engine')
-
-    def test_twitter_redirect_gen_coroutine(self):
-        self.base_twitter_redirect('/twitter/client/login_gen_coroutine')
-
-    def test_twitter_get_user(self):
-        response = self.fetch(
-            '/twitter/client/login?oauth_token=zxcv',
-            headers={'Cookie': '_oauth_request_token=enhjdg==|MTIzNA=='})
-        response.rethrow()
-        parsed = json_decode(response.body)
-        self.assertEqual(parsed,
-                         {u'access_token': {u'key': u'hjkl',
-                                            u'screen_name': u'foo',
-                                            u'secret': u'vbnm'},
-                          u'name': u'Foo',
-                          u'screen_name': u'foo',
-                          u'username': u'foo'})
-
-    def test_twitter_show_user(self):
-        response = self.fetch('/twitter/client/show_user?name=somebody')
-        response.rethrow()
-        self.assertEqual(json_decode(response.body),
-                         {'name': 'Somebody', 'screen_name': 'somebody'})
-
-    def test_twitter_show_user_error(self):
-        with ExpectLog(gen_log, 'Error response HTTP 500'):
-            response = self.fetch('/twitter/client/show_user?name=error')
-        self.assertEqual(response.code, 500)
-        self.assertEqual(response.body, b'error from twitter request')
-
-    def test_twitter_show_user_future(self):
-        response = self.fetch('/twitter/client/show_user_future?name=somebody')
-        response.rethrow()
-        self.assertEqual(json_decode(response.body),
-                         {'name': 'Somebody', 'screen_name': 'somebody'})
-
-    def test_twitter_show_user_future_error(self):
-        response = self.fetch('/twitter/client/show_user_future?name=error')
-        self.assertEqual(response.code, 500)
-        self.assertIn(b'Error response HTTP 500', response.body)
-
-
-class GoogleLoginHandler(RequestHandler, GoogleOAuth2Mixin):
-    def initialize(self, test):
-        self.test = test
-        self._OAUTH_REDIRECT_URI = test.get_url('/client/login')
-        self._OAUTH_AUTHORIZE_URL = test.get_url('/google/oauth2/authorize')
-        self._OAUTH_ACCESS_TOKEN_URL = test.get_url('/google/oauth2/token')
-
-    @gen.coroutine
-    def get(self):
-        code = self.get_argument('code', None)
-        if code is not None:
-            # retrieve authenticate google user
-            access = yield self.get_authenticated_user(self._OAUTH_REDIRECT_URI,
-                                                       code)
-            user = yield self.oauth2_request(
-                self.test.get_url("/google/oauth2/userinfo"),
-                access_token=access["access_token"])
-            # return the user and access token as json
-            user["access_token"] = access["access_token"]
-            self.write(user)
-        else:
-            yield self.authorize_redirect(
-                redirect_uri=self._OAUTH_REDIRECT_URI,
-                client_id=self.settings['google_oauth']['key'],
-                client_secret=self.settings['google_oauth']['secret'],
-                scope=['profile', 'email'],
-                response_type='code',
-                extra_params={'prompt': 'select_account'})
-
-
-class GoogleOAuth2AuthorizeHandler(RequestHandler):
-    def get(self):
-        # issue a fake auth code and redirect to redirect_uri
-        code = 'fake-authorization-code'
-        self.redirect(url_concat(self.get_argument('redirect_uri'),
-                                 dict(code=code)))
-
-
-class GoogleOAuth2TokenHandler(RequestHandler):
-    def post(self):
-        assert self.get_argument('code') == 'fake-authorization-code'
-        # issue a fake token
-        self.finish({
-            'access_token': 'fake-access-token',
-            'expires_in': 'never-expires'
-        })
-
-
-class GoogleOAuth2UserinfoHandler(RequestHandler):
-    def get(self):
-        assert self.get_argument('access_token') == 'fake-access-token'
-        # return a fake user
-        self.finish({
-            'name': 'Foo',
-            'email': 'foo@example.com'
-        })
-
-
-class GoogleOAuth2Test(AsyncHTTPTestCase):
-    def get_app(self):
-        return Application(
-            [
-                # test endpoints
-                ('/client/login', GoogleLoginHandler, dict(test=self)),
-
-                # simulated google authorization server endpoints
-                ('/google/oauth2/authorize', GoogleOAuth2AuthorizeHandler),
-                ('/google/oauth2/token', GoogleOAuth2TokenHandler),
-                ('/google/oauth2/userinfo', GoogleOAuth2UserinfoHandler),
-            ],
-            google_oauth={
-                "key": 'fake_google_client_id',
-                "secret": 'fake_google_client_secret'
-            })
-
-    def test_google_login(self):
-        response = self.fetch('/client/login')
-        self.assertDictEqual({
-            u'name': u'Foo',
-            u'email': u'foo@example.com',
-            u'access_token': u'fake-access-token',
-        }, json_decode(response.body))
diff --git a/lib/tornado/test/concurrent_test.py b/lib/tornado/test/concurrent_test.py
deleted file mode 100644
index 4d89f572375e61f9806dafb94417a1d2384775d3..0000000000000000000000000000000000000000
--- a/lib/tornado/test/concurrent_test.py
+++ /dev/null
@@ -1,435 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2012 Facebook
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-from __future__ import absolute_import, division, print_function
-
-import gc
-import logging
-import re
-import socket
-import sys
-import traceback
-
-from tornado.concurrent import Future, return_future, ReturnValueIgnoredError, run_on_executor
-from tornado.escape import utf8, to_unicode
-from tornado import gen
-from tornado.iostream import IOStream
-from tornado.log import app_log
-from tornado import stack_context
-from tornado.tcpserver import TCPServer
-from tornado.testing import AsyncTestCase, ExpectLog, LogTrapTestCase, bind_unused_port, gen_test
-from tornado.test.util import unittest
-
-
-try:
-    from concurrent import futures
-except ImportError:
-    futures = None
-
-
-class ReturnFutureTest(AsyncTestCase):
-    @return_future
-    def sync_future(self, callback):
-        callback(42)
-
-    @return_future
-    def async_future(self, callback):
-        self.io_loop.add_callback(callback, 42)
-
-    @return_future
-    def immediate_failure(self, callback):
-        1 / 0
-
-    @return_future
-    def delayed_failure(self, callback):
-        self.io_loop.add_callback(lambda: 1 / 0)
-
-    @return_future
-    def return_value(self, callback):
-        # Note that the result of both running the callback and returning
-        # a value (or raising an exception) is unspecified; with current
-        # implementations the last event prior to callback resolution wins.
-        return 42
-
-    @return_future
-    def no_result_future(self, callback):
-        callback()
-
-    def test_immediate_failure(self):
-        with self.assertRaises(ZeroDivisionError):
-            # The caller sees the error just like a normal function.
-            self.immediate_failure(callback=self.stop)
-        # The callback is not run because the function failed synchronously.
-        self.io_loop.add_timeout(self.io_loop.time() + 0.05, self.stop)
-        result = self.wait()
-        self.assertIs(result, None)
-
-    def test_return_value(self):
-        with self.assertRaises(ReturnValueIgnoredError):
-            self.return_value(callback=self.stop)
-
-    def test_callback_kw(self):
-        future = self.sync_future(callback=self.stop)
-        result = self.wait()
-        self.assertEqual(result, 42)
-        self.assertEqual(future.result(), 42)
-
-    def test_callback_positional(self):
-        # When the callback is passed in positionally, future_wrap shouldn't
-        # add another callback in the kwargs.
-        future = self.sync_future(self.stop)
-        result = self.wait()
-        self.assertEqual(result, 42)
-        self.assertEqual(future.result(), 42)
-
-    def test_no_callback(self):
-        future = self.sync_future()
-        self.assertEqual(future.result(), 42)
-
-    def test_none_callback_kw(self):
-        # explicitly pass None as callback
-        future = self.sync_future(callback=None)
-        self.assertEqual(future.result(), 42)
-
-    def test_none_callback_pos(self):
-        future = self.sync_future(None)
-        self.assertEqual(future.result(), 42)
-
-    def test_async_future(self):
-        future = self.async_future()
-        self.assertFalse(future.done())
-        self.io_loop.add_future(future, self.stop)
-        future2 = self.wait()
-        self.assertIs(future, future2)
-        self.assertEqual(future.result(), 42)
-
-    @gen_test
-    def test_async_future_gen(self):
-        result = yield self.async_future()
-        self.assertEqual(result, 42)
-
-    def test_delayed_failure(self):
-        future = self.delayed_failure()
-        self.io_loop.add_future(future, self.stop)
-        future2 = self.wait()
-        self.assertIs(future, future2)
-        with self.assertRaises(ZeroDivisionError):
-            future.result()
-
-    def test_kw_only_callback(self):
-        @return_future
-        def f(**kwargs):
-            kwargs['callback'](42)
-        future = f()
-        self.assertEqual(future.result(), 42)
-
-    def test_error_in_callback(self):
-        self.sync_future(callback=lambda future: 1 / 0)
-        # The exception gets caught by our StackContext and will be re-raised
-        # when we wait.
-        self.assertRaises(ZeroDivisionError, self.wait)
-
-    def test_no_result_future(self):
-        future = self.no_result_future(self.stop)
-        result = self.wait()
-        self.assertIs(result, None)
-        # result of this future is undefined, but not an error
-        future.result()
-
-    def test_no_result_future_callback(self):
-        future = self.no_result_future(callback=lambda: self.stop())
-        result = self.wait()
-        self.assertIs(result, None)
-        future.result()
-
-    @gen_test
-    def test_future_traceback(self):
-        @return_future
-        @gen.engine
-        def f(callback):
-            yield gen.Task(self.io_loop.add_callback)
-            try:
-                1 / 0
-            except ZeroDivisionError:
-                self.expected_frame = traceback.extract_tb(
-                    sys.exc_info()[2], limit=1)[0]
-                raise
-        try:
-            yield f()
-            self.fail("didn't get expected exception")
-        except ZeroDivisionError:
-            tb = traceback.extract_tb(sys.exc_info()[2])
-            self.assertIn(self.expected_frame, tb)
-
-    @gen_test
-    def test_uncaught_exception_log(self):
-        @gen.coroutine
-        def f():
-            yield gen.moment
-            1 / 0
-
-        g = f()
-
-        with ExpectLog(app_log,
-                       "(?s)Future.* exception was never retrieved:"
-                       ".*ZeroDivisionError"):
-            yield gen.moment
-            yield gen.moment
-            del g
-            gc.collect()  # for PyPy
-
-
-# The following series of classes demonstrate and test various styles
-# of use, with and without generators and futures.
-
-
-class CapServer(TCPServer):
-    def handle_stream(self, stream, address):
-        logging.info("handle_stream")
-        self.stream = stream
-        self.stream.read_until(b"\n", self.handle_read)
-
-    def handle_read(self, data):
-        logging.info("handle_read")
-        data = to_unicode(data)
-        if data == data.upper():
-            self.stream.write(b"error\talready capitalized\n")
-        else:
-            # data already has \n
-            self.stream.write(utf8("ok\t%s" % data.upper()))
-        self.stream.close()
-
-
-class CapError(Exception):
-    pass
-
-
-class BaseCapClient(object):
-    def __init__(self, port, io_loop):
-        self.port = port
-        self.io_loop = io_loop
-
-    def process_response(self, data):
-        status, message = re.match('(.*)\t(.*)\n', to_unicode(data)).groups()
-        if status == 'ok':
-            return message
-        else:
-            raise CapError(message)
-
-
-class ManualCapClient(BaseCapClient):
-    def capitalize(self, request_data, callback=None):
-        logging.info("capitalize")
-        self.request_data = request_data
-        self.stream = IOStream(socket.socket(), io_loop=self.io_loop)
-        self.stream.connect(('127.0.0.1', self.port),
-                            callback=self.handle_connect)
-        self.future = Future()
-        if callback is not None:
-            self.future.add_done_callback(
-                stack_context.wrap(lambda future: callback(future.result())))
-        return self.future
-
-    def handle_connect(self):
-        logging.info("handle_connect")
-        self.stream.write(utf8(self.request_data + "\n"))
-        self.stream.read_until(b'\n', callback=self.handle_read)
-
-    def handle_read(self, data):
-        logging.info("handle_read")
-        self.stream.close()
-        try:
-            self.future.set_result(self.process_response(data))
-        except CapError as e:
-            self.future.set_exception(e)
-
-
-class DecoratorCapClient(BaseCapClient):
-    @return_future
-    def capitalize(self, request_data, callback):
-        logging.info("capitalize")
-        self.request_data = request_data
-        self.stream = IOStream(socket.socket(), io_loop=self.io_loop)
-        self.stream.connect(('127.0.0.1', self.port),
-                            callback=self.handle_connect)
-        self.callback = callback
-
-    def handle_connect(self):
-        logging.info("handle_connect")
-        self.stream.write(utf8(self.request_data + "\n"))
-        self.stream.read_until(b'\n', callback=self.handle_read)
-
-    def handle_read(self, data):
-        logging.info("handle_read")
-        self.stream.close()
-        self.callback(self.process_response(data))
-
-
-class GeneratorCapClient(BaseCapClient):
-    @return_future
-    @gen.engine
-    def capitalize(self, request_data, callback):
-        logging.info('capitalize')
-        stream = IOStream(socket.socket(), io_loop=self.io_loop)
-        logging.info('connecting')
-        yield gen.Task(stream.connect, ('127.0.0.1', self.port))
-        stream.write(utf8(request_data + '\n'))
-        logging.info('reading')
-        data = yield gen.Task(stream.read_until, b'\n')
-        logging.info('returning')
-        stream.close()
-        callback(self.process_response(data))
-
-
-class ClientTestMixin(object):
-    def setUp(self):
-        super(ClientTestMixin, self).setUp()  # type: ignore
-        self.server = CapServer(io_loop=self.io_loop)
-        sock, port = bind_unused_port()
-        self.server.add_sockets([sock])
-        self.client = self.client_class(io_loop=self.io_loop, port=port)
-
-    def tearDown(self):
-        self.server.stop()
-        super(ClientTestMixin, self).tearDown()  # type: ignore
-
-    def test_callback(self):
-        self.client.capitalize("hello", callback=self.stop)
-        result = self.wait()
-        self.assertEqual(result, "HELLO")
-
-    def test_callback_error(self):
-        self.client.capitalize("HELLO", callback=self.stop)
-        self.assertRaisesRegexp(CapError, "already capitalized", self.wait)
-
-    def test_future(self):
-        future = self.client.capitalize("hello")
-        self.io_loop.add_future(future, self.stop)
-        self.wait()
-        self.assertEqual(future.result(), "HELLO")
-
-    def test_future_error(self):
-        future = self.client.capitalize("HELLO")
-        self.io_loop.add_future(future, self.stop)
-        self.wait()
-        self.assertRaisesRegexp(CapError, "already capitalized", future.result)
-
-    def test_generator(self):
-        @gen.engine
-        def f():
-            result = yield self.client.capitalize("hello")
-            self.assertEqual(result, "HELLO")
-            self.stop()
-        f()
-        self.wait()
-
-    def test_generator_error(self):
-        @gen.engine
-        def f():
-            with self.assertRaisesRegexp(CapError, "already capitalized"):
-                yield self.client.capitalize("HELLO")
-            self.stop()
-        f()
-        self.wait()
-
-
-class ManualClientTest(ClientTestMixin, AsyncTestCase, LogTrapTestCase):
-    client_class = ManualCapClient
-
-
-class DecoratorClientTest(ClientTestMixin, AsyncTestCase, LogTrapTestCase):
-    client_class = DecoratorCapClient
-
-
-class GeneratorClientTest(ClientTestMixin, AsyncTestCase, LogTrapTestCase):
-    client_class = GeneratorCapClient
-
-
-@unittest.skipIf(futures is None, "concurrent.futures module not present")
-class RunOnExecutorTest(AsyncTestCase):
-    @gen_test
-    def test_no_calling(self):
-        class Object(object):
-            def __init__(self, io_loop):
-                self.io_loop = io_loop
-                self.executor = futures.thread.ThreadPoolExecutor(1)
-
-            @run_on_executor
-            def f(self):
-                return 42
-
-        o = Object(io_loop=self.io_loop)
-        answer = yield o.f()
-        self.assertEqual(answer, 42)
-
-    @gen_test
-    def test_call_with_no_args(self):
-        class Object(object):
-            def __init__(self, io_loop):
-                self.io_loop = io_loop
-                self.executor = futures.thread.ThreadPoolExecutor(1)
-
-            @run_on_executor()
-            def f(self):
-                return 42
-
-        o = Object(io_loop=self.io_loop)
-        answer = yield o.f()
-        self.assertEqual(answer, 42)
-
-    @gen_test
-    def test_call_with_io_loop(self):
-        class Object(object):
-            def __init__(self, io_loop):
-                self._io_loop = io_loop
-                self.executor = futures.thread.ThreadPoolExecutor(1)
-
-            @run_on_executor(io_loop='_io_loop')
-            def f(self):
-                return 42
-
-        o = Object(io_loop=self.io_loop)
-        answer = yield o.f()
-        self.assertEqual(answer, 42)
-
-    @gen_test
-    def test_call_with_executor(self):
-        class Object(object):
-            def __init__(self, io_loop):
-                self.io_loop = io_loop
-                self.__executor = futures.thread.ThreadPoolExecutor(1)
-
-            @run_on_executor(executor='_Object__executor')
-            def f(self):
-                return 42
-
-        o = Object(io_loop=self.io_loop)
-        answer = yield o.f()
-        self.assertEqual(answer, 42)
-
-    @gen_test
-    def test_call_with_both(self):
-        class Object(object):
-            def __init__(self, io_loop):
-                self._io_loop = io_loop
-                self.__executor = futures.thread.ThreadPoolExecutor(1)
-
-            @run_on_executor(io_loop='_io_loop', executor='_Object__executor')
-            def f(self):
-                return 42
-
-        o = Object(io_loop=self.io_loop)
-        answer = yield o.f()
-        self.assertEqual(answer, 42)
diff --git a/lib/tornado/test/csv_translations/fr_FR.csv b/lib/tornado/test/csv_translations/fr_FR.csv
deleted file mode 100644
index 6321b6e7c0864f629137ab7694f8987b98f71fab..0000000000000000000000000000000000000000
--- a/lib/tornado/test/csv_translations/fr_FR.csv
+++ /dev/null
@@ -1 +0,0 @@
-"school","école"
diff --git a/lib/tornado/test/curl_httpclient_test.py b/lib/tornado/test/curl_httpclient_test.py
deleted file mode 100644
index eb6f89d6673bc56659aa477499f28b539234f528..0000000000000000000000000000000000000000
--- a/lib/tornado/test/curl_httpclient_test.py
+++ /dev/null
@@ -1,134 +0,0 @@
-# coding: utf-8
-from __future__ import absolute_import, division, print_function
-
-from hashlib import md5
-
-from tornado.escape import utf8
-from tornado.httpclient import HTTPRequest
-from tornado.stack_context import ExceptionStackContext
-from tornado.testing import AsyncHTTPTestCase
-from tornado.test import httpclient_test
-from tornado.test.util import unittest
-from tornado.web import Application, RequestHandler
-
-
-try:
-    import pycurl  # type: ignore
-except ImportError:
-    pycurl = None
-
-if pycurl is not None:
-    from tornado.curl_httpclient import CurlAsyncHTTPClient
-
-
-@unittest.skipIf(pycurl is None, "pycurl module not present")
-class CurlHTTPClientCommonTestCase(httpclient_test.HTTPClientCommonTestCase):
-    def get_http_client(self):
-        client = CurlAsyncHTTPClient(io_loop=self.io_loop,
-                                     defaults=dict(allow_ipv6=False))
-        # make sure AsyncHTTPClient magic doesn't give us the wrong class
-        self.assertTrue(isinstance(client, CurlAsyncHTTPClient))
-        return client
-
-
-class DigestAuthHandler(RequestHandler):
-    def get(self):
-        realm = 'test'
-        opaque = 'asdf'
-        # Real implementations would use a random nonce.
-        nonce = "1234"
-        username = 'foo'
-        password = 'bar'
-
-        auth_header = self.request.headers.get('Authorization', None)
-        if auth_header is not None:
-            auth_mode, params = auth_header.split(' ', 1)
-            assert auth_mode == 'Digest'
-            param_dict = {}
-            for pair in params.split(','):
-                k, v = pair.strip().split('=', 1)
-                if v[0] == '"' and v[-1] == '"':
-                    v = v[1:-1]
-                param_dict[k] = v
-            assert param_dict['realm'] == realm
-            assert param_dict['opaque'] == opaque
-            assert param_dict['nonce'] == nonce
-            assert param_dict['username'] == username
-            assert param_dict['uri'] == self.request.path
-            h1 = md5(utf8('%s:%s:%s' % (username, realm, password))).hexdigest()
-            h2 = md5(utf8('%s:%s' % (self.request.method,
-                                     self.request.path))).hexdigest()
-            digest = md5(utf8('%s:%s:%s' % (h1, nonce, h2))).hexdigest()
-            if digest == param_dict['response']:
-                self.write('ok')
-            else:
-                self.write('fail')
-        else:
-            self.set_status(401)
-            self.set_header('WWW-Authenticate',
-                            'Digest realm="%s", nonce="%s", opaque="%s"' %
-                            (realm, nonce, opaque))
-
-
-class CustomReasonHandler(RequestHandler):
-    def get(self):
-        self.set_status(200, "Custom reason")
-
-
-class CustomFailReasonHandler(RequestHandler):
-    def get(self):
-        self.set_status(400, "Custom reason")
-
-
-@unittest.skipIf(pycurl is None, "pycurl module not present")
-class CurlHTTPClientTestCase(AsyncHTTPTestCase):
-    def setUp(self):
-        super(CurlHTTPClientTestCase, self).setUp()
-        self.http_client = self.create_client()
-
-    def get_app(self):
-        return Application([
-            ('/digest', DigestAuthHandler),
-            ('/custom_reason', CustomReasonHandler),
-            ('/custom_fail_reason', CustomFailReasonHandler),
-        ])
-
-    def create_client(self, **kwargs):
-        return CurlAsyncHTTPClient(self.io_loop, force_instance=True,
-                                   defaults=dict(allow_ipv6=False),
-                                   **kwargs)
-
-    def test_prepare_curl_callback_stack_context(self):
-        exc_info = []
-
-        def error_handler(typ, value, tb):
-            exc_info.append((typ, value, tb))
-            self.stop()
-            return True
-
-        with ExceptionStackContext(error_handler):
-            request = HTTPRequest(self.get_url('/'),
-                                  prepare_curl_callback=lambda curl: 1 / 0)
-        self.http_client.fetch(request, callback=self.stop)
-        self.wait()
-        self.assertEqual(1, len(exc_info))
-        self.assertIs(exc_info[0][0], ZeroDivisionError)
-
-    def test_digest_auth(self):
-        response = self.fetch('/digest', auth_mode='digest',
-                              auth_username='foo', auth_password='bar')
-        self.assertEqual(response.body, b'ok')
-
-    def test_custom_reason(self):
-        response = self.fetch('/custom_reason')
-        self.assertEqual(response.reason, "Custom reason")
-
-    def test_fail_custom_reason(self):
-        response = self.fetch('/custom_fail_reason')
-        self.assertEqual(str(response.error), "HTTP 400: Custom reason")
-
-    def test_failed_setup(self):
-        self.http_client = self.create_client(max_clients=1)
-        for i in range(5):
-            response = self.fetch(u'/ユニコード')
-            self.assertIsNot(response.error, None)
diff --git a/lib/tornado/test/escape_test.py b/lib/tornado/test/escape_test.py
deleted file mode 100644
index 5ae75d002ab142efdb07bfc07ccad7ea4d3368d5..0000000000000000000000000000000000000000
--- a/lib/tornado/test/escape_test.py
+++ /dev/null
@@ -1,245 +0,0 @@
-#!/usr/bin/env python
-
-
-from __future__ import absolute_import, division, print_function
-import tornado.escape
-
-from tornado.escape import utf8, xhtml_escape, xhtml_unescape, url_escape, url_unescape, to_unicode, json_decode, json_encode, squeeze, recursive_unicode
-from tornado.util import unicode_type
-from tornado.test.util import unittest
-
-linkify_tests = [
-    # (input, linkify_kwargs, expected_output)
-
-    ("hello http://world.com/!", {},
-     u'hello <a href="http://world.com/">http://world.com/</a>!'),
-
-    ("hello http://world.com/with?param=true&stuff=yes", {},
-     u'hello <a href="http://world.com/with?param=true&amp;stuff=yes">http://world.com/with?param=true&amp;stuff=yes</a>'),
-
-    # an opened paren followed by many chars killed Gruber's regex
-    ("http://url.com/w(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", {},
-     u'<a href="http://url.com/w">http://url.com/w</a>(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'),
-
-    # as did too many dots at the end
-    ("http://url.com/withmany.......................................", {},
-     u'<a href="http://url.com/withmany">http://url.com/withmany</a>.......................................'),
-
-    ("http://url.com/withmany((((((((((((((((((((((((((((((((((a)", {},
-     u'<a href="http://url.com/withmany">http://url.com/withmany</a>((((((((((((((((((((((((((((((((((a)'),
-
-    # some examples from http://daringfireball.net/2009/11/liberal_regex_for_matching_urls
-    # plus a fex extras (such as multiple parentheses).
-    ("http://foo.com/blah_blah", {},
-     u'<a href="http://foo.com/blah_blah">http://foo.com/blah_blah</a>'),
-
-    ("http://foo.com/blah_blah/", {},
-     u'<a href="http://foo.com/blah_blah/">http://foo.com/blah_blah/</a>'),
-
-    ("(Something like http://foo.com/blah_blah)", {},
-     u'(Something like <a href="http://foo.com/blah_blah">http://foo.com/blah_blah</a>)'),
-
-    ("http://foo.com/blah_blah_(wikipedia)", {},
-     u'<a href="http://foo.com/blah_blah_(wikipedia)">http://foo.com/blah_blah_(wikipedia)</a>'),
-
-    ("http://foo.com/blah_(blah)_(wikipedia)_blah", {},
-     u'<a href="http://foo.com/blah_(blah)_(wikipedia)_blah">http://foo.com/blah_(blah)_(wikipedia)_blah</a>'),
-
-    ("(Something like http://foo.com/blah_blah_(wikipedia))", {},
-     u'(Something like <a href="http://foo.com/blah_blah_(wikipedia)">http://foo.com/blah_blah_(wikipedia)</a>)'),
-
-    ("http://foo.com/blah_blah.", {},
-     u'<a href="http://foo.com/blah_blah">http://foo.com/blah_blah</a>.'),
-
-    ("http://foo.com/blah_blah/.", {},
-     u'<a href="http://foo.com/blah_blah/">http://foo.com/blah_blah/</a>.'),
-
-    ("<http://foo.com/blah_blah>", {},
-     u'&lt;<a href="http://foo.com/blah_blah">http://foo.com/blah_blah</a>&gt;'),
-
-    ("<http://foo.com/blah_blah/>", {},
-     u'&lt;<a href="http://foo.com/blah_blah/">http://foo.com/blah_blah/</a>&gt;'),
-
-    ("http://foo.com/blah_blah,", {},
-     u'<a href="http://foo.com/blah_blah">http://foo.com/blah_blah</a>,'),
-
-    ("http://www.example.com/wpstyle/?p=364.", {},
-     u'<a href="http://www.example.com/wpstyle/?p=364">http://www.example.com/wpstyle/?p=364</a>.'),
-
-    ("rdar://1234",
-     {"permitted_protocols": ["http", "rdar"]},
-     u'<a href="rdar://1234">rdar://1234</a>'),
-
-    ("rdar:/1234",
-     {"permitted_protocols": ["rdar"]},
-     u'<a href="rdar:/1234">rdar:/1234</a>'),
-
-    ("http://userid:password@example.com:8080", {},
-     u'<a href="http://userid:password@example.com:8080">http://userid:password@example.com:8080</a>'),
-
-    ("http://userid@example.com", {},
-     u'<a href="http://userid@example.com">http://userid@example.com</a>'),
-
-    ("http://userid@example.com:8080", {},
-     u'<a href="http://userid@example.com:8080">http://userid@example.com:8080</a>'),
-
-    ("http://userid:password@example.com", {},
-     u'<a href="http://userid:password@example.com">http://userid:password@example.com</a>'),
-
-    ("message://%3c330e7f8409726r6a4ba78dkf1fd71420c1bf6ff@mail.gmail.com%3e",
-     {"permitted_protocols": ["http", "message"]},
-     u'<a href="message://%3c330e7f8409726r6a4ba78dkf1fd71420c1bf6ff@mail.gmail.com%3e">message://%3c330e7f8409726r6a4ba78dkf1fd71420c1bf6ff@mail.gmail.com%3e</a>'),
-
-    (u"http://\u27a1.ws/\u4a39", {},
-     u'<a href="http://\u27a1.ws/\u4a39">http://\u27a1.ws/\u4a39</a>'),
-
-    ("<tag>http://example.com</tag>", {},
-     u'&lt;tag&gt;<a href="http://example.com">http://example.com</a>&lt;/tag&gt;'),
-
-    ("Just a www.example.com link.", {},
-     u'Just a <a href="http://www.example.com">www.example.com</a> link.'),
-
-    ("Just a www.example.com link.",
-     {"require_protocol": True},
-     u'Just a www.example.com link.'),
-
-    ("A http://reallylong.com/link/that/exceedsthelenglimit.html",
-     {"require_protocol": True, "shorten": True},
-     u'A <a href="http://reallylong.com/link/that/exceedsthelenglimit.html" title="http://reallylong.com/link/that/exceedsthelenglimit.html">http://reallylong.com/link...</a>'),
-
-    ("A http://reallylongdomainnamethatwillbetoolong.com/hi!",
-     {"shorten": True},
-     u'A <a href="http://reallylongdomainnamethatwillbetoolong.com/hi" title="http://reallylongdomainnamethatwillbetoolong.com/hi">http://reallylongdomainnametha...</a>!'),
-
-    ("A file:///passwords.txt and http://web.com link", {},
-     u'A file:///passwords.txt and <a href="http://web.com">http://web.com</a> link'),
-
-    ("A file:///passwords.txt and http://web.com link",
-     {"permitted_protocols": ["file"]},
-     u'A <a href="file:///passwords.txt">file:///passwords.txt</a> and http://web.com link'),
-
-    ("www.external-link.com",
-     {"extra_params": 'rel="nofollow" class="external"'},
-     u'<a href="http://www.external-link.com" rel="nofollow" class="external">www.external-link.com</a>'),
-
-    ("www.external-link.com and www.internal-link.com/blogs extra",
-     {"extra_params": lambda href: 'class="internal"' if href.startswith("http://www.internal-link.com") else 'rel="nofollow" class="external"'},
-     u'<a href="http://www.external-link.com" rel="nofollow" class="external">www.external-link.com</a> and <a href="http://www.internal-link.com/blogs" class="internal">www.internal-link.com/blogs</a> extra'),
-
-    ("www.external-link.com",
-     {"extra_params": lambda href: '    rel="nofollow" class="external"  '},
-     u'<a href="http://www.external-link.com" rel="nofollow" class="external">www.external-link.com</a>'),
-]
-
-
-class EscapeTestCase(unittest.TestCase):
-    def test_linkify(self):
-        for text, kwargs, html in linkify_tests:
-            linked = tornado.escape.linkify(text, **kwargs)
-            self.assertEqual(linked, html)
-
-    def test_xhtml_escape(self):
-        tests = [
-            ("<foo>", "&lt;foo&gt;"),
-            (u"<foo>", u"&lt;foo&gt;"),
-            (b"<foo>", b"&lt;foo&gt;"),
-
-            ("<>&\"'", "&lt;&gt;&amp;&quot;&#39;"),
-            ("&amp;", "&amp;amp;"),
-
-            (u"<\u00e9>", u"&lt;\u00e9&gt;"),
-            (b"<\xc3\xa9>", b"&lt;\xc3\xa9&gt;"),
-        ]
-        for unescaped, escaped in tests:
-            self.assertEqual(utf8(xhtml_escape(unescaped)), utf8(escaped))
-            self.assertEqual(utf8(unescaped), utf8(xhtml_unescape(escaped)))
-
-    def test_xhtml_unescape_numeric(self):
-        tests = [
-            ('foo&#32;bar', 'foo bar'),
-            ('foo&#x20;bar', 'foo bar'),
-            ('foo&#X20;bar', 'foo bar'),
-            ('foo&#xabc;bar', u'foo\u0abcbar'),
-            ('foo&#xyz;bar', 'foo&#xyz;bar'),  # invalid encoding
-            ('foo&#;bar', 'foo&#;bar'),        # invalid encoding
-            ('foo&#x;bar', 'foo&#x;bar'),      # invalid encoding
-        ]
-        for escaped, unescaped in tests:
-            self.assertEqual(unescaped, xhtml_unescape(escaped))
-
-    def test_url_escape_unicode(self):
-        tests = [
-            # byte strings are passed through as-is
-            (u'\u00e9'.encode('utf8'), '%C3%A9'),
-            (u'\u00e9'.encode('latin1'), '%E9'),
-
-            # unicode strings become utf8
-            (u'\u00e9', '%C3%A9'),
-        ]
-        for unescaped, escaped in tests:
-            self.assertEqual(url_escape(unescaped), escaped)
-
-    def test_url_unescape_unicode(self):
-        tests = [
-            ('%C3%A9', u'\u00e9', 'utf8'),
-            ('%C3%A9', u'\u00c3\u00a9', 'latin1'),
-            ('%C3%A9', utf8(u'\u00e9'), None),
-        ]
-        for escaped, unescaped, encoding in tests:
-            # input strings to url_unescape should only contain ascii
-            # characters, but make sure the function accepts both byte
-            # and unicode strings.
-            self.assertEqual(url_unescape(to_unicode(escaped), encoding), unescaped)
-            self.assertEqual(url_unescape(utf8(escaped), encoding), unescaped)
-
-    def test_url_escape_quote_plus(self):
-        unescaped = '+ #%'
-        plus_escaped = '%2B+%23%25'
-        escaped = '%2B%20%23%25'
-        self.assertEqual(url_escape(unescaped), plus_escaped)
-        self.assertEqual(url_escape(unescaped, plus=False), escaped)
-        self.assertEqual(url_unescape(plus_escaped), unescaped)
-        self.assertEqual(url_unescape(escaped, plus=False), unescaped)
-        self.assertEqual(url_unescape(plus_escaped, encoding=None),
-                         utf8(unescaped))
-        self.assertEqual(url_unescape(escaped, encoding=None, plus=False),
-                         utf8(unescaped))
-
-    def test_escape_return_types(self):
-        # On python2 the escape methods should generally return the same
-        # type as their argument
-        self.assertEqual(type(xhtml_escape("foo")), str)
-        self.assertEqual(type(xhtml_escape(u"foo")), unicode_type)
-
-    def test_json_decode(self):
-        # json_decode accepts both bytes and unicode, but strings it returns
-        # are always unicode.
-        self.assertEqual(json_decode(b'"foo"'), u"foo")
-        self.assertEqual(json_decode(u'"foo"'), u"foo")
-
-        # Non-ascii bytes are interpreted as utf8
-        self.assertEqual(json_decode(utf8(u'"\u00e9"')), u"\u00e9")
-
-    def test_json_encode(self):
-        # json deals with strings, not bytes.  On python 2 byte strings will
-        # convert automatically if they are utf8; on python 3 byte strings
-        # are not allowed.
-        self.assertEqual(json_decode(json_encode(u"\u00e9")), u"\u00e9")
-        if bytes is str:
-            self.assertEqual(json_decode(json_encode(utf8(u"\u00e9"))), u"\u00e9")
-            self.assertRaises(UnicodeDecodeError, json_encode, b"\xe9")
-
-    def test_squeeze(self):
-        self.assertEqual(squeeze(u'sequences     of    whitespace   chars'), u'sequences of whitespace chars')
-
-    def test_recursive_unicode(self):
-        tests = {
-            'dict': {b"foo": b"bar"},
-            'list': [b"foo", b"bar"],
-            'tuple': (b"foo", b"bar"),
-            'bytes': b"foo"
-        }
-        self.assertEqual(recursive_unicode(tests['dict']), {u"foo": u"bar"})
-        self.assertEqual(recursive_unicode(tests['list']), [u"foo", u"bar"])
-        self.assertEqual(recursive_unicode(tests['tuple']), (u"foo", u"bar"))
-        self.assertEqual(recursive_unicode(tests['bytes']), u"foo")
diff --git a/lib/tornado/test/gen_test.py b/lib/tornado/test/gen_test.py
deleted file mode 100644
index fea4c644978ad5f4fe841ff52c208f42e5b4f4e1..0000000000000000000000000000000000000000
--- a/lib/tornado/test/gen_test.py
+++ /dev/null
@@ -1,1467 +0,0 @@
-from __future__ import absolute_import, division, print_function
-
-import gc
-import contextlib
-import datetime
-import functools
-import sys
-import textwrap
-import time
-import weakref
-
-from tornado.concurrent import return_future, Future
-from tornado.escape import url_escape
-from tornado.httpclient import AsyncHTTPClient
-from tornado.ioloop import IOLoop
-from tornado.log import app_log
-from tornado import stack_context
-from tornado.testing import AsyncHTTPTestCase, AsyncTestCase, ExpectLog, gen_test
-from tornado.test.util import unittest, skipOnTravis, skipBefore33, skipBefore35, skipNotCPython, exec_test
-from tornado.web import Application, RequestHandler, asynchronous, HTTPError
-
-from tornado import gen
-
-try:
-    from concurrent import futures
-except ImportError:
-    futures = None
-
-
-class GenEngineTest(AsyncTestCase):
-    def setUp(self):
-        super(GenEngineTest, self).setUp()
-        self.named_contexts = []
-
-    def named_context(self, name):
-        @contextlib.contextmanager
-        def context():
-            self.named_contexts.append(name)
-            try:
-                yield
-            finally:
-                self.assertEqual(self.named_contexts.pop(), name)
-        return context
-
-    def run_gen(self, f):
-        f()
-        return self.wait()
-
-    def delay_callback(self, iterations, callback, arg):
-        """Runs callback(arg) after a number of IOLoop iterations."""
-        if iterations == 0:
-            callback(arg)
-        else:
-            self.io_loop.add_callback(functools.partial(
-                self.delay_callback, iterations - 1, callback, arg))
-
-    @return_future
-    def async_future(self, result, callback):
-        self.io_loop.add_callback(callback, result)
-
-    @gen.coroutine
-    def async_exception(self, e):
-        yield gen.moment
-        raise e
-
-    def test_no_yield(self):
-        @gen.engine
-        def f():
-            self.stop()
-        self.run_gen(f)
-
-    def test_inline_cb(self):
-        @gen.engine
-        def f():
-            (yield gen.Callback("k1"))()
-            res = yield gen.Wait("k1")
-            self.assertTrue(res is None)
-            self.stop()
-        self.run_gen(f)
-
-    def test_ioloop_cb(self):
-        @gen.engine
-        def f():
-            self.io_loop.add_callback((yield gen.Callback("k1")))
-            yield gen.Wait("k1")
-            self.stop()
-        self.run_gen(f)
-
-    def test_exception_phase1(self):
-        @gen.engine
-        def f():
-            1 / 0
-        self.assertRaises(ZeroDivisionError, self.run_gen, f)
-
-    def test_exception_phase2(self):
-        @gen.engine
-        def f():
-            self.io_loop.add_callback((yield gen.Callback("k1")))
-            yield gen.Wait("k1")
-            1 / 0
-        self.assertRaises(ZeroDivisionError, self.run_gen, f)
-
-    def test_exception_in_task_phase1(self):
-        def fail_task(callback):
-            1 / 0
-
-        @gen.engine
-        def f():
-            try:
-                yield gen.Task(fail_task)
-                raise Exception("did not get expected exception")
-            except ZeroDivisionError:
-                self.stop()
-        self.run_gen(f)
-
-    def test_exception_in_task_phase2(self):
-        # This is the case that requires the use of stack_context in gen.engine
-        def fail_task(callback):
-            self.io_loop.add_callback(lambda: 1 / 0)
-
-        @gen.engine
-        def f():
-            try:
-                yield gen.Task(fail_task)
-                raise Exception("did not get expected exception")
-            except ZeroDivisionError:
-                self.stop()
-        self.run_gen(f)
-
-    def test_with_arg(self):
-        @gen.engine
-        def f():
-            (yield gen.Callback("k1"))(42)
-            res = yield gen.Wait("k1")
-            self.assertEqual(42, res)
-            self.stop()
-        self.run_gen(f)
-
-    def test_with_arg_tuple(self):
-        @gen.engine
-        def f():
-            (yield gen.Callback((1, 2)))((3, 4))
-            res = yield gen.Wait((1, 2))
-            self.assertEqual((3, 4), res)
-            self.stop()
-        self.run_gen(f)
-
-    def test_key_reuse(self):
-        @gen.engine
-        def f():
-            yield gen.Callback("k1")
-            yield gen.Callback("k1")
-            self.stop()
-        self.assertRaises(gen.KeyReuseError, self.run_gen, f)
-
-    def test_key_reuse_tuple(self):
-        @gen.engine
-        def f():
-            yield gen.Callback((1, 2))
-            yield gen.Callback((1, 2))
-            self.stop()
-        self.assertRaises(gen.KeyReuseError, self.run_gen, f)
-
-    def test_key_mismatch(self):
-        @gen.engine
-        def f():
-            yield gen.Callback("k1")
-            yield gen.Wait("k2")
-            self.stop()
-        self.assertRaises(gen.UnknownKeyError, self.run_gen, f)
-
-    def test_key_mismatch_tuple(self):
-        @gen.engine
-        def f():
-            yield gen.Callback((1, 2))
-            yield gen.Wait((2, 3))
-            self.stop()
-        self.assertRaises(gen.UnknownKeyError, self.run_gen, f)
-
-    def test_leaked_callback(self):
-        @gen.engine
-        def f():
-            yield gen.Callback("k1")
-            self.stop()
-        self.assertRaises(gen.LeakedCallbackError, self.run_gen, f)
-
-    def test_leaked_callback_tuple(self):
-        @gen.engine
-        def f():
-            yield gen.Callback((1, 2))
-            self.stop()
-        self.assertRaises(gen.LeakedCallbackError, self.run_gen, f)
-
-    def test_parallel_callback(self):
-        @gen.engine
-        def f():
-            for k in range(3):
-                self.io_loop.add_callback((yield gen.Callback(k)))
-            yield gen.Wait(1)
-            self.io_loop.add_callback((yield gen.Callback(3)))
-            yield gen.Wait(0)
-            yield gen.Wait(3)
-            yield gen.Wait(2)
-            self.stop()
-        self.run_gen(f)
-
-    def test_bogus_yield(self):
-        @gen.engine
-        def f():
-            yield 42
-        self.assertRaises(gen.BadYieldError, self.run_gen, f)
-
-    def test_bogus_yield_tuple(self):
-        @gen.engine
-        def f():
-            yield (1, 2)
-        self.assertRaises(gen.BadYieldError, self.run_gen, f)
-
-    def test_reuse(self):
-        @gen.engine
-        def f():
-            self.io_loop.add_callback((yield gen.Callback(0)))
-            yield gen.Wait(0)
-            self.stop()
-        self.run_gen(f)
-        self.run_gen(f)
-
-    def test_task(self):
-        @gen.engine
-        def f():
-            yield gen.Task(self.io_loop.add_callback)
-            self.stop()
-        self.run_gen(f)
-
-    def test_wait_all(self):
-        @gen.engine
-        def f():
-            (yield gen.Callback("k1"))("v1")
-            (yield gen.Callback("k2"))("v2")
-            results = yield gen.WaitAll(["k1", "k2"])
-            self.assertEqual(results, ["v1", "v2"])
-            self.stop()
-        self.run_gen(f)
-
-    def test_exception_in_yield(self):
-        @gen.engine
-        def f():
-            try:
-                yield gen.Wait("k1")
-                raise Exception("did not get expected exception")
-            except gen.UnknownKeyError:
-                pass
-            self.stop()
-        self.run_gen(f)
-
-    def test_resume_after_exception_in_yield(self):
-        @gen.engine
-        def f():
-            try:
-                yield gen.Wait("k1")
-                raise Exception("did not get expected exception")
-            except gen.UnknownKeyError:
-                pass
-            (yield gen.Callback("k2"))("v2")
-            self.assertEqual((yield gen.Wait("k2")), "v2")
-            self.stop()
-        self.run_gen(f)
-
-    def test_orphaned_callback(self):
-        @gen.engine
-        def f():
-            self.orphaned_callback = yield gen.Callback(1)
-        try:
-            self.run_gen(f)
-            raise Exception("did not get expected exception")
-        except gen.LeakedCallbackError:
-            pass
-        self.orphaned_callback()
-
-    def test_none(self):
-        @gen.engine
-        def f():
-            yield None
-            self.stop()
-        self.run_gen(f)
-
-    def test_multi(self):
-        @gen.engine
-        def f():
-            (yield gen.Callback("k1"))("v1")
-            (yield gen.Callback("k2"))("v2")
-            results = yield [gen.Wait("k1"), gen.Wait("k2")]
-            self.assertEqual(results, ["v1", "v2"])
-            self.stop()
-        self.run_gen(f)
-
-    def test_multi_dict(self):
-        @gen.engine
-        def f():
-            (yield gen.Callback("k1"))("v1")
-            (yield gen.Callback("k2"))("v2")
-            results = yield dict(foo=gen.Wait("k1"), bar=gen.Wait("k2"))
-            self.assertEqual(results, dict(foo="v1", bar="v2"))
-            self.stop()
-        self.run_gen(f)
-
-    # The following tests explicitly run with both gen.Multi
-    # and gen.multi_future (Task returns a Future, so it can be used
-    # with either).
-    def test_multi_yieldpoint_delayed(self):
-        @gen.engine
-        def f():
-            # callbacks run at different times
-            responses = yield gen.Multi([
-                gen.Task(self.delay_callback, 3, arg="v1"),
-                gen.Task(self.delay_callback, 1, arg="v2"),
-            ])
-            self.assertEqual(responses, ["v1", "v2"])
-            self.stop()
-        self.run_gen(f)
-
-    def test_multi_yieldpoint_dict_delayed(self):
-        @gen.engine
-        def f():
-            # callbacks run at different times
-            responses = yield gen.Multi(dict(
-                foo=gen.Task(self.delay_callback, 3, arg="v1"),
-                bar=gen.Task(self.delay_callback, 1, arg="v2"),
-            ))
-            self.assertEqual(responses, dict(foo="v1", bar="v2"))
-            self.stop()
-        self.run_gen(f)
-
-    def test_multi_future_delayed(self):
-        @gen.engine
-        def f():
-            # callbacks run at different times
-            responses = yield gen.multi_future([
-                gen.Task(self.delay_callback, 3, arg="v1"),
-                gen.Task(self.delay_callback, 1, arg="v2"),
-            ])
-            self.assertEqual(responses, ["v1", "v2"])
-            self.stop()
-        self.run_gen(f)
-
-    def test_multi_future_dict_delayed(self):
-        @gen.engine
-        def f():
-            # callbacks run at different times
-            responses = yield gen.multi_future(dict(
-                foo=gen.Task(self.delay_callback, 3, arg="v1"),
-                bar=gen.Task(self.delay_callback, 1, arg="v2"),
-            ))
-            self.assertEqual(responses, dict(foo="v1", bar="v2"))
-            self.stop()
-        self.run_gen(f)
-
-    @skipOnTravis
-    @gen_test
-    def test_multi_performance(self):
-        # Yielding a list used to have quadratic performance; make
-        # sure a large list stays reasonable.  On my laptop a list of
-        # 2000 used to take 1.8s, now it takes 0.12.
-        start = time.time()
-        yield [gen.Task(self.io_loop.add_callback) for i in range(2000)]
-        end = time.time()
-        self.assertLess(end - start, 1.0)
-
-    @gen_test
-    def test_multi_empty(self):
-        # Empty lists or dicts should return the same type.
-        x = yield []
-        self.assertTrue(isinstance(x, list))
-        y = yield {}
-        self.assertTrue(isinstance(y, dict))
-
-    @gen_test
-    def test_multi_mixed_types(self):
-        # A YieldPoint (Wait) and Future (Task) can be combined
-        # (and use the YieldPoint codepath)
-        (yield gen.Callback("k1"))("v1")
-        responses = yield [gen.Wait("k1"),
-                           gen.Task(self.delay_callback, 3, arg="v2")]
-        self.assertEqual(responses, ["v1", "v2"])
-
-    @gen_test
-    def test_future(self):
-        result = yield self.async_future(1)
-        self.assertEqual(result, 1)
-
-    @gen_test
-    def test_multi_future(self):
-        results = yield [self.async_future(1), self.async_future(2)]
-        self.assertEqual(results, [1, 2])
-
-    @gen_test
-    def test_multi_future_duplicate(self):
-        f = self.async_future(2)
-        results = yield [self.async_future(1), f, self.async_future(3), f]
-        self.assertEqual(results, [1, 2, 3, 2])
-
-    @gen_test
-    def test_multi_dict_future(self):
-        results = yield dict(foo=self.async_future(1), bar=self.async_future(2))
-        self.assertEqual(results, dict(foo=1, bar=2))
-
-    @gen_test
-    def test_multi_exceptions(self):
-        with ExpectLog(app_log, "Multiple exceptions in yield list"):
-            with self.assertRaises(RuntimeError) as cm:
-                yield gen.Multi([self.async_exception(RuntimeError("error 1")),
-                                 self.async_exception(RuntimeError("error 2"))])
-        self.assertEqual(str(cm.exception), "error 1")
-
-        # With only one exception, no error is logged.
-        with self.assertRaises(RuntimeError):
-            yield gen.Multi([self.async_exception(RuntimeError("error 1")),
-                             self.async_future(2)])
-
-        # Exception logging may be explicitly quieted.
-        with self.assertRaises(RuntimeError):
-            yield gen.Multi([self.async_exception(RuntimeError("error 1")),
-                             self.async_exception(RuntimeError("error 2"))],
-                            quiet_exceptions=RuntimeError)
-
-    @gen_test
-    def test_multi_future_exceptions(self):
-        with ExpectLog(app_log, "Multiple exceptions in yield list"):
-            with self.assertRaises(RuntimeError) as cm:
-                yield [self.async_exception(RuntimeError("error 1")),
-                       self.async_exception(RuntimeError("error 2"))]
-        self.assertEqual(str(cm.exception), "error 1")
-
-        # With only one exception, no error is logged.
-        with self.assertRaises(RuntimeError):
-            yield [self.async_exception(RuntimeError("error 1")),
-                   self.async_future(2)]
-
-        # Exception logging may be explicitly quieted.
-        with self.assertRaises(RuntimeError):
-            yield gen.multi_future(
-                [self.async_exception(RuntimeError("error 1")),
-                 self.async_exception(RuntimeError("error 2"))],
-                quiet_exceptions=RuntimeError)
-
-    def test_arguments(self):
-        @gen.engine
-        def f():
-            (yield gen.Callback("noargs"))()
-            self.assertEqual((yield gen.Wait("noargs")), None)
-            (yield gen.Callback("1arg"))(42)
-            self.assertEqual((yield gen.Wait("1arg")), 42)
-
-            (yield gen.Callback("kwargs"))(value=42)
-            result = yield gen.Wait("kwargs")
-            self.assertTrue(isinstance(result, gen.Arguments))
-            self.assertEqual(((), dict(value=42)), result)
-            self.assertEqual(dict(value=42), result.kwargs)
-
-            (yield gen.Callback("2args"))(42, 43)
-            result = yield gen.Wait("2args")
-            self.assertTrue(isinstance(result, gen.Arguments))
-            self.assertEqual(((42, 43), {}), result)
-            self.assertEqual((42, 43), result.args)
-
-            def task_func(callback):
-                callback(None, error="foo")
-            result = yield gen.Task(task_func)
-            self.assertTrue(isinstance(result, gen.Arguments))
-            self.assertEqual(((None,), dict(error="foo")), result)
-
-            self.stop()
-        self.run_gen(f)
-
-    def test_stack_context_leak(self):
-        # regression test: repeated invocations of a gen-based
-        # function should not result in accumulated stack_contexts
-        def _stack_depth():
-            head = stack_context._state.contexts[1]
-            length = 0
-
-            while head is not None:
-                length += 1
-                head = head.old_contexts[1]
-
-            return length
-
-        @gen.engine
-        def inner(callback):
-            yield gen.Task(self.io_loop.add_callback)
-            callback()
-
-        @gen.engine
-        def outer():
-            for i in range(10):
-                yield gen.Task(inner)
-
-            stack_increase = _stack_depth() - initial_stack_depth
-            self.assertTrue(stack_increase <= 2)
-            self.stop()
-        initial_stack_depth = _stack_depth()
-        self.run_gen(outer)
-
-    def test_stack_context_leak_exception(self):
-        # same as previous, but with a function that exits with an exception
-        @gen.engine
-        def inner(callback):
-            yield gen.Task(self.io_loop.add_callback)
-            1 / 0
-
-        @gen.engine
-        def outer():
-            for i in range(10):
-                try:
-                    yield gen.Task(inner)
-                except ZeroDivisionError:
-                    pass
-            stack_increase = len(stack_context._state.contexts) - initial_stack_depth
-            self.assertTrue(stack_increase <= 2)
-            self.stop()
-        initial_stack_depth = len(stack_context._state.contexts)
-        self.run_gen(outer)
-
-    def function_with_stack_context(self, callback):
-        # Technically this function should stack_context.wrap its callback
-        # upon entry.  However, it is very common for this step to be
-        # omitted.
-        def step2():
-            self.assertEqual(self.named_contexts, ['a'])
-            self.io_loop.add_callback(callback)
-
-        with stack_context.StackContext(self.named_context('a')):
-            self.io_loop.add_callback(step2)
-
-    @gen_test
-    def test_wait_transfer_stack_context(self):
-        # Wait should not pick up contexts from where callback was invoked,
-        # even if that function improperly fails to wrap its callback.
-        cb = yield gen.Callback('k1')
-        self.function_with_stack_context(cb)
-        self.assertEqual(self.named_contexts, [])
-        yield gen.Wait('k1')
-        self.assertEqual(self.named_contexts, [])
-
-    @gen_test
-    def test_task_transfer_stack_context(self):
-        yield gen.Task(self.function_with_stack_context)
-        self.assertEqual(self.named_contexts, [])
-
-    def test_raise_after_stop(self):
-        # This pattern will be used in the following tests so make sure
-        # the exception propagates as expected.
-        @gen.engine
-        def f():
-            self.stop()
-            1 / 0
-
-        with self.assertRaises(ZeroDivisionError):
-            self.run_gen(f)
-
-    def test_sync_raise_return(self):
-        # gen.Return is allowed in @gen.engine, but it may not be used
-        # to return a value.
-        @gen.engine
-        def f():
-            self.stop(42)
-            raise gen.Return()
-
-        result = self.run_gen(f)
-        self.assertEqual(result, 42)
-
-    def test_async_raise_return(self):
-        @gen.engine
-        def f():
-            yield gen.Task(self.io_loop.add_callback)
-            self.stop(42)
-            raise gen.Return()
-
-        result = self.run_gen(f)
-        self.assertEqual(result, 42)
-
-    def test_sync_raise_return_value(self):
-        @gen.engine
-        def f():
-            raise gen.Return(42)
-
-        with self.assertRaises(gen.ReturnValueIgnoredError):
-            self.run_gen(f)
-
-    def test_sync_raise_return_value_tuple(self):
-        @gen.engine
-        def f():
-            raise gen.Return((1, 2))
-
-        with self.assertRaises(gen.ReturnValueIgnoredError):
-            self.run_gen(f)
-
-    def test_async_raise_return_value(self):
-        @gen.engine
-        def f():
-            yield gen.Task(self.io_loop.add_callback)
-            raise gen.Return(42)
-
-        with self.assertRaises(gen.ReturnValueIgnoredError):
-            self.run_gen(f)
-
-    def test_async_raise_return_value_tuple(self):
-        @gen.engine
-        def f():
-            yield gen.Task(self.io_loop.add_callback)
-            raise gen.Return((1, 2))
-
-        with self.assertRaises(gen.ReturnValueIgnoredError):
-            self.run_gen(f)
-
-    def test_return_value(self):
-        # It is an error to apply @gen.engine to a function that returns
-        # a value.
-        @gen.engine
-        def f():
-            return 42
-
-        with self.assertRaises(gen.ReturnValueIgnoredError):
-            self.run_gen(f)
-
-    def test_return_value_tuple(self):
-        # It is an error to apply @gen.engine to a function that returns
-        # a value.
-        @gen.engine
-        def f():
-            return (1, 2)
-
-        with self.assertRaises(gen.ReturnValueIgnoredError):
-            self.run_gen(f)
-
-    @skipNotCPython
-    def test_task_refcounting(self):
-        # On CPython, tasks and their arguments should be released immediately
-        # without waiting for garbage collection.
-        @gen.engine
-        def f():
-            class Foo(object):
-                pass
-            arg = Foo()
-            self.arg_ref = weakref.ref(arg)
-            task = gen.Task(self.io_loop.add_callback, arg=arg)
-            self.task_ref = weakref.ref(task)
-            yield task
-            self.stop()
-
-        self.run_gen(f)
-        self.assertIs(self.arg_ref(), None)
-        self.assertIs(self.task_ref(), None)
-
-
-class GenCoroutineTest(AsyncTestCase):
-    def setUp(self):
-        # Stray StopIteration exceptions can lead to tests exiting prematurely,
-        # so we need explicit checks here to make sure the tests run all
-        # the way through.
-        self.finished = False
-        super(GenCoroutineTest, self).setUp()
-
-    def tearDown(self):
-        super(GenCoroutineTest, self).tearDown()
-        assert self.finished
-
-    def test_attributes(self):
-        self.finished = True
-
-        def f():
-            yield gen.moment
-
-        coro = gen.coroutine(f)
-        self.assertEqual(coro.__name__, f.__name__)
-        self.assertEqual(coro.__module__, f.__module__)
-        self.assertIs(coro.__wrapped__, f)
-
-    def test_is_coroutine_function(self):
-        self.finished = True
-
-        def f():
-            yield gen.moment
-
-        coro = gen.coroutine(f)
-        self.assertFalse(gen.is_coroutine_function(f))
-        self.assertTrue(gen.is_coroutine_function(coro))
-        self.assertFalse(gen.is_coroutine_function(coro()))
-
-    @gen_test
-    def test_sync_gen_return(self):
-        @gen.coroutine
-        def f():
-            raise gen.Return(42)
-        result = yield f()
-        self.assertEqual(result, 42)
-        self.finished = True
-
-    @gen_test
-    def test_async_gen_return(self):
-        @gen.coroutine
-        def f():
-            yield gen.Task(self.io_loop.add_callback)
-            raise gen.Return(42)
-        result = yield f()
-        self.assertEqual(result, 42)
-        self.finished = True
-
-    @gen_test
-    def test_sync_return(self):
-        @gen.coroutine
-        def f():
-            return 42
-        result = yield f()
-        self.assertEqual(result, 42)
-        self.finished = True
-
-    @skipBefore33
-    @gen_test
-    def test_async_return(self):
-        namespace = exec_test(globals(), locals(), """
-        @gen.coroutine
-        def f():
-            yield gen.Task(self.io_loop.add_callback)
-            return 42
-        """)
-        result = yield namespace['f']()
-        self.assertEqual(result, 42)
-        self.finished = True
-
-    @skipBefore33
-    @gen_test
-    def test_async_early_return(self):
-        # A yield statement exists but is not executed, which means
-        # this function "returns" via an exception.  This exception
-        # doesn't happen before the exception handling is set up.
-        namespace = exec_test(globals(), locals(), """
-        @gen.coroutine
-        def f():
-            if True:
-                return 42
-            yield gen.Task(self.io_loop.add_callback)
-        """)
-        result = yield namespace['f']()
-        self.assertEqual(result, 42)
-        self.finished = True
-
-    @skipBefore35
-    @gen_test
-    def test_async_await(self):
-        # This test verifies that an async function can await a
-        # yield-based gen.coroutine, and that a gen.coroutine
-        # (the test method itself) can yield an async function.
-        namespace = exec_test(globals(), locals(), """
-        async def f():
-            await gen.Task(self.io_loop.add_callback)
-            return 42
-        """)
-        result = yield namespace['f']()
-        self.assertEqual(result, 42)
-        self.finished = True
-
-    @skipBefore35
-    @gen_test
-    def test_asyncio_sleep_zero(self):
-        # asyncio.sleep(0) turns into a special case (equivalent to
-        # `yield None`)
-        namespace = exec_test(globals(), locals(), """
-        async def f():
-            import asyncio
-            await asyncio.sleep(0)
-            return 42
-        """)
-        result = yield namespace['f']()
-        self.assertEqual(result, 42)
-        self.finished = True
-
-    @skipBefore35
-    @gen_test
-    def test_async_await_mixed_multi_native_future(self):
-        namespace = exec_test(globals(), locals(), """
-        async def f1():
-            await gen.Task(self.io_loop.add_callback)
-            return 42
-        """)
-
-        @gen.coroutine
-        def f2():
-            yield gen.Task(self.io_loop.add_callback)
-            raise gen.Return(43)
-
-        results = yield [namespace['f1'](), f2()]
-        self.assertEqual(results, [42, 43])
-        self.finished = True
-
-    @skipBefore35
-    @gen_test
-    def test_async_await_mixed_multi_native_yieldpoint(self):
-        namespace = exec_test(globals(), locals(), """
-        async def f1():
-            await gen.Task(self.io_loop.add_callback)
-            return 42
-        """)
-
-        @gen.coroutine
-        def f2():
-            yield gen.Task(self.io_loop.add_callback)
-            raise gen.Return(43)
-
-        f2(callback=(yield gen.Callback('cb')))
-        results = yield [namespace['f1'](), gen.Wait('cb')]
-        self.assertEqual(results, [42, 43])
-        self.finished = True
-
-    @skipBefore35
-    @gen_test
-    def test_async_with_timeout(self):
-        namespace = exec_test(globals(), locals(), """
-        async def f1():
-            return 42
-        """)
-
-        result = yield gen.with_timeout(datetime.timedelta(hours=1),
-                                        namespace['f1']())
-        self.assertEqual(result, 42)
-        self.finished = True
-
-    @gen_test
-    def test_sync_return_no_value(self):
-        @gen.coroutine
-        def f():
-            return
-        result = yield f()
-        self.assertEqual(result, None)
-        self.finished = True
-
-    @gen_test
-    def test_async_return_no_value(self):
-        # Without a return value we don't need python 3.3.
-        @gen.coroutine
-        def f():
-            yield gen.Task(self.io_loop.add_callback)
-            return
-        result = yield f()
-        self.assertEqual(result, None)
-        self.finished = True
-
-    @gen_test
-    def test_sync_raise(self):
-        @gen.coroutine
-        def f():
-            1 / 0
-        # The exception is raised when the future is yielded
-        # (or equivalently when its result method is called),
-        # not when the function itself is called).
-        future = f()
-        with self.assertRaises(ZeroDivisionError):
-            yield future
-        self.finished = True
-
-    @gen_test
-    def test_async_raise(self):
-        @gen.coroutine
-        def f():
-            yield gen.Task(self.io_loop.add_callback)
-            1 / 0
-        future = f()
-        with self.assertRaises(ZeroDivisionError):
-            yield future
-        self.finished = True
-
-    @gen_test
-    def test_pass_callback(self):
-        @gen.coroutine
-        def f():
-            raise gen.Return(42)
-        result = yield gen.Task(f)
-        self.assertEqual(result, 42)
-        self.finished = True
-
-    @gen_test
-    def test_replace_yieldpoint_exception(self):
-        # Test exception handling: a coroutine can catch one exception
-        # raised by a yield point and raise a different one.
-        @gen.coroutine
-        def f1():
-            1 / 0
-
-        @gen.coroutine
-        def f2():
-            try:
-                yield f1()
-            except ZeroDivisionError:
-                raise KeyError()
-
-        future = f2()
-        with self.assertRaises(KeyError):
-            yield future
-        self.finished = True
-
-    @gen_test
-    def test_swallow_yieldpoint_exception(self):
-        # Test exception handling: a coroutine can catch an exception
-        # raised by a yield point and not raise a different one.
-        @gen.coroutine
-        def f1():
-            1 / 0
-
-        @gen.coroutine
-        def f2():
-            try:
-                yield f1()
-            except ZeroDivisionError:
-                raise gen.Return(42)
-
-        result = yield f2()
-        self.assertEqual(result, 42)
-        self.finished = True
-
-    @gen_test
-    def test_replace_context_exception(self):
-        # Test exception handling: exceptions thrown into the stack context
-        # can be caught and replaced.
-        # Note that this test and the following are for behavior that is
-        # not really supported any more:  coroutines no longer create a
-        # stack context automatically; but one is created after the first
-        # YieldPoint (i.e. not a Future).
-        @gen.coroutine
-        def f2():
-            (yield gen.Callback(1))()
-            yield gen.Wait(1)
-            self.io_loop.add_callback(lambda: 1 / 0)
-            try:
-                yield gen.Task(self.io_loop.add_timeout,
-                               self.io_loop.time() + 10)
-            except ZeroDivisionError:
-                raise KeyError()
-
-        future = f2()
-        with self.assertRaises(KeyError):
-            yield future
-        self.finished = True
-
-    @gen_test
-    def test_swallow_context_exception(self):
-        # Test exception handling: exceptions thrown into the stack context
-        # can be caught and ignored.
-        @gen.coroutine
-        def f2():
-            (yield gen.Callback(1))()
-            yield gen.Wait(1)
-            self.io_loop.add_callback(lambda: 1 / 0)
-            try:
-                yield gen.Task(self.io_loop.add_timeout,
-                               self.io_loop.time() + 10)
-            except ZeroDivisionError:
-                raise gen.Return(42)
-
-        result = yield f2()
-        self.assertEqual(result, 42)
-        self.finished = True
-
-    @gen_test
-    def test_moment(self):
-        calls = []
-
-        @gen.coroutine
-        def f(name, yieldable):
-            for i in range(5):
-                calls.append(name)
-                yield yieldable
-        # First, confirm the behavior without moment: each coroutine
-        # monopolizes the event loop until it finishes.
-        immediate = Future()
-        immediate.set_result(None)
-        yield [f('a', immediate), f('b', immediate)]
-        self.assertEqual(''.join(calls), 'aaaaabbbbb')
-
-        # With moment, they take turns.
-        calls = []
-        yield [f('a', gen.moment), f('b', gen.moment)]
-        self.assertEqual(''.join(calls), 'ababababab')
-        self.finished = True
-
-        calls = []
-        yield [f('a', gen.moment), f('b', immediate)]
-        self.assertEqual(''.join(calls), 'abbbbbaaaa')
-
-    @gen_test
-    def test_sleep(self):
-        yield gen.sleep(0.01)
-        self.finished = True
-
-    @skipBefore33
-    @gen_test
-    def test_py3_leak_exception_context(self):
-        class LeakedException(Exception):
-            pass
-
-        @gen.coroutine
-        def inner(iteration):
-            raise LeakedException(iteration)
-
-        try:
-            yield inner(1)
-        except LeakedException as e:
-            self.assertEqual(str(e), "1")
-            self.assertIsNone(e.__context__)
-
-        try:
-            yield inner(2)
-        except LeakedException as e:
-            self.assertEqual(str(e), "2")
-            self.assertIsNone(e.__context__)
-
-        self.finished = True
-
-    @skipNotCPython
-    def test_coroutine_refcounting(self):
-        # On CPython, tasks and their arguments should be released immediately
-        # without waiting for garbage collection.
-        @gen.coroutine
-        def inner():
-            class Foo(object):
-                pass
-            local_var = Foo()
-            self.local_ref = weakref.ref(local_var)
-            yield gen.coroutine(lambda: None)()
-            raise ValueError('Some error')
-
-        @gen.coroutine
-        def inner2():
-            try:
-                yield inner()
-            except ValueError:
-                pass
-
-        self.io_loop.run_sync(inner2, timeout=3)
-
-        self.assertIs(self.local_ref(), None)
-        self.finished = True
-
-
-class GenSequenceHandler(RequestHandler):
-    @asynchronous
-    @gen.engine
-    def get(self):
-        self.io_loop = self.request.connection.stream.io_loop
-        self.io_loop.add_callback((yield gen.Callback("k1")))
-        yield gen.Wait("k1")
-        self.write("1")
-        self.io_loop.add_callback((yield gen.Callback("k2")))
-        yield gen.Wait("k2")
-        self.write("2")
-        # reuse an old key
-        self.io_loop.add_callback((yield gen.Callback("k1")))
-        yield gen.Wait("k1")
-        self.finish("3")
-
-
-class GenCoroutineSequenceHandler(RequestHandler):
-    @gen.coroutine
-    def get(self):
-        self.io_loop = self.request.connection.stream.io_loop
-        self.io_loop.add_callback((yield gen.Callback("k1")))
-        yield gen.Wait("k1")
-        self.write("1")
-        self.io_loop.add_callback((yield gen.Callback("k2")))
-        yield gen.Wait("k2")
-        self.write("2")
-        # reuse an old key
-        self.io_loop.add_callback((yield gen.Callback("k1")))
-        yield gen.Wait("k1")
-        self.finish("3")
-
-
-class GenCoroutineUnfinishedSequenceHandler(RequestHandler):
-    @asynchronous
-    @gen.coroutine
-    def get(self):
-        self.io_loop = self.request.connection.stream.io_loop
-        self.io_loop.add_callback((yield gen.Callback("k1")))
-        yield gen.Wait("k1")
-        self.write("1")
-        self.io_loop.add_callback((yield gen.Callback("k2")))
-        yield gen.Wait("k2")
-        self.write("2")
-        # reuse an old key
-        self.io_loop.add_callback((yield gen.Callback("k1")))
-        yield gen.Wait("k1")
-        # just write, don't finish
-        self.write("3")
-
-
-class GenTaskHandler(RequestHandler):
-    @asynchronous
-    @gen.engine
-    def get(self):
-        io_loop = self.request.connection.stream.io_loop
-        client = AsyncHTTPClient(io_loop=io_loop)
-        response = yield gen.Task(client.fetch, self.get_argument('url'))
-        response.rethrow()
-        self.finish(b"got response: " + response.body)
-
-
-class GenExceptionHandler(RequestHandler):
-    @asynchronous
-    @gen.engine
-    def get(self):
-        # This test depends on the order of the two decorators.
-        io_loop = self.request.connection.stream.io_loop
-        yield gen.Task(io_loop.add_callback)
-        raise Exception("oops")
-
-
-class GenCoroutineExceptionHandler(RequestHandler):
-    @gen.coroutine
-    def get(self):
-        # This test depends on the order of the two decorators.
-        io_loop = self.request.connection.stream.io_loop
-        yield gen.Task(io_loop.add_callback)
-        raise Exception("oops")
-
-
-class GenYieldExceptionHandler(RequestHandler):
-    @asynchronous
-    @gen.engine
-    def get(self):
-        io_loop = self.request.connection.stream.io_loop
-        # Test the interaction of the two stack_contexts.
-
-        def fail_task(callback):
-            io_loop.add_callback(lambda: 1 / 0)
-        try:
-            yield gen.Task(fail_task)
-            raise Exception("did not get expected exception")
-        except ZeroDivisionError:
-            self.finish('ok')
-
-
-# "Undecorated" here refers to the absence of @asynchronous.
-class UndecoratedCoroutinesHandler(RequestHandler):
-    @gen.coroutine
-    def prepare(self):
-        self.chunks = []
-        yield gen.Task(IOLoop.current().add_callback)
-        self.chunks.append('1')
-
-    @gen.coroutine
-    def get(self):
-        self.chunks.append('2')
-        yield gen.Task(IOLoop.current().add_callback)
-        self.chunks.append('3')
-        yield gen.Task(IOLoop.current().add_callback)
-        self.write(''.join(self.chunks))
-
-
-class AsyncPrepareErrorHandler(RequestHandler):
-    @gen.coroutine
-    def prepare(self):
-        yield gen.Task(IOLoop.current().add_callback)
-        raise HTTPError(403)
-
-    def get(self):
-        self.finish('ok')
-
-
-class NativeCoroutineHandler(RequestHandler):
-    if sys.version_info > (3, 5):
-        exec(textwrap.dedent("""
-        async def get(self):
-            await gen.Task(IOLoop.current().add_callback)
-            self.write("ok")
-        """))
-
-
-class GenWebTest(AsyncHTTPTestCase):
-    def get_app(self):
-        return Application([
-            ('/sequence', GenSequenceHandler),
-            ('/coroutine_sequence', GenCoroutineSequenceHandler),
-            ('/coroutine_unfinished_sequence',
-             GenCoroutineUnfinishedSequenceHandler),
-            ('/task', GenTaskHandler),
-            ('/exception', GenExceptionHandler),
-            ('/coroutine_exception', GenCoroutineExceptionHandler),
-            ('/yield_exception', GenYieldExceptionHandler),
-            ('/undecorated_coroutine', UndecoratedCoroutinesHandler),
-            ('/async_prepare_error', AsyncPrepareErrorHandler),
-            ('/native_coroutine', NativeCoroutineHandler),
-        ])
-
-    def test_sequence_handler(self):
-        response = self.fetch('/sequence')
-        self.assertEqual(response.body, b"123")
-
-    def test_coroutine_sequence_handler(self):
-        response = self.fetch('/coroutine_sequence')
-        self.assertEqual(response.body, b"123")
-
-    def test_coroutine_unfinished_sequence_handler(self):
-        response = self.fetch('/coroutine_unfinished_sequence')
-        self.assertEqual(response.body, b"123")
-
-    def test_task_handler(self):
-        response = self.fetch('/task?url=%s' % url_escape(self.get_url('/sequence')))
-        self.assertEqual(response.body, b"got response: 123")
-
-    def test_exception_handler(self):
-        # Make sure we get an error and not a timeout
-        with ExpectLog(app_log, "Uncaught exception GET /exception"):
-            response = self.fetch('/exception')
-        self.assertEqual(500, response.code)
-
-    def test_coroutine_exception_handler(self):
-        # Make sure we get an error and not a timeout
-        with ExpectLog(app_log, "Uncaught exception GET /coroutine_exception"):
-            response = self.fetch('/coroutine_exception')
-        self.assertEqual(500, response.code)
-
-    def test_yield_exception_handler(self):
-        response = self.fetch('/yield_exception')
-        self.assertEqual(response.body, b'ok')
-
-    def test_undecorated_coroutines(self):
-        response = self.fetch('/undecorated_coroutine')
-        self.assertEqual(response.body, b'123')
-
-    def test_async_prepare_error_handler(self):
-        response = self.fetch('/async_prepare_error')
-        self.assertEqual(response.code, 403)
-
-    @skipBefore35
-    def test_native_coroutine_handler(self):
-        response = self.fetch('/native_coroutine')
-        self.assertEqual(response.code, 200)
-        self.assertEqual(response.body, b'ok')
-
-
-class WithTimeoutTest(AsyncTestCase):
-    @gen_test
-    def test_timeout(self):
-        with self.assertRaises(gen.TimeoutError):
-            yield gen.with_timeout(datetime.timedelta(seconds=0.1),
-                                   Future())
-
-    @gen_test
-    def test_completes_before_timeout(self):
-        future = Future()
-        self.io_loop.add_timeout(datetime.timedelta(seconds=0.1),
-                                 lambda: future.set_result('asdf'))
-        result = yield gen.with_timeout(datetime.timedelta(seconds=3600),
-                                        future, io_loop=self.io_loop)
-        self.assertEqual(result, 'asdf')
-
-    @gen_test
-    def test_fails_before_timeout(self):
-        future = Future()
-        self.io_loop.add_timeout(
-            datetime.timedelta(seconds=0.1),
-            lambda: future.set_exception(ZeroDivisionError()))
-        with self.assertRaises(ZeroDivisionError):
-            yield gen.with_timeout(datetime.timedelta(seconds=3600),
-                                   future, io_loop=self.io_loop)
-
-    @gen_test
-    def test_already_resolved(self):
-        future = Future()
-        future.set_result('asdf')
-        result = yield gen.with_timeout(datetime.timedelta(seconds=3600),
-                                        future, io_loop=self.io_loop)
-        self.assertEqual(result, 'asdf')
-
-    @unittest.skipIf(futures is None, 'futures module not present')
-    @gen_test
-    def test_timeout_concurrent_future(self):
-        with futures.ThreadPoolExecutor(1) as executor:
-            with self.assertRaises(gen.TimeoutError):
-                yield gen.with_timeout(self.io_loop.time(),
-                                       executor.submit(time.sleep, 0.1))
-
-    @unittest.skipIf(futures is None, 'futures module not present')
-    @gen_test
-    def test_completed_concurrent_future(self):
-        with futures.ThreadPoolExecutor(1) as executor:
-            yield gen.with_timeout(datetime.timedelta(seconds=3600),
-                                   executor.submit(lambda: None))
-
-
-class WaitIteratorTest(AsyncTestCase):
-    @gen_test
-    def test_empty_iterator(self):
-        g = gen.WaitIterator()
-        self.assertTrue(g.done(), 'empty generator iterated')
-
-        with self.assertRaises(ValueError):
-            g = gen.WaitIterator(False, bar=False)
-
-        self.assertEqual(g.current_index, None, "bad nil current index")
-        self.assertEqual(g.current_future, None, "bad nil current future")
-
-    @gen_test
-    def test_already_done(self):
-        f1 = Future()
-        f2 = Future()
-        f3 = Future()
-        f1.set_result(24)
-        f2.set_result(42)
-        f3.set_result(84)
-
-        g = gen.WaitIterator(f1, f2, f3)
-        i = 0
-        while not g.done():
-            r = yield g.next()
-            # Order is not guaranteed, but the current implementation
-            # preserves ordering of already-done Futures.
-            if i == 0:
-                self.assertEqual(g.current_index, 0)
-                self.assertIs(g.current_future, f1)
-                self.assertEqual(r, 24)
-            elif i == 1:
-                self.assertEqual(g.current_index, 1)
-                self.assertIs(g.current_future, f2)
-                self.assertEqual(r, 42)
-            elif i == 2:
-                self.assertEqual(g.current_index, 2)
-                self.assertIs(g.current_future, f3)
-                self.assertEqual(r, 84)
-            i += 1
-
-        self.assertEqual(g.current_index, None, "bad nil current index")
-        self.assertEqual(g.current_future, None, "bad nil current future")
-
-        dg = gen.WaitIterator(f1=f1, f2=f2)
-
-        while not dg.done():
-            dr = yield dg.next()
-            if dg.current_index == "f1":
-                self.assertTrue(dg.current_future == f1 and dr == 24,
-                                "WaitIterator dict status incorrect")
-            elif dg.current_index == "f2":
-                self.assertTrue(dg.current_future == f2 and dr == 42,
-                                "WaitIterator dict status incorrect")
-            else:
-                self.fail("got bad WaitIterator index {}".format(
-                    dg.current_index))
-
-            i += 1
-
-        self.assertEqual(dg.current_index, None, "bad nil current index")
-        self.assertEqual(dg.current_future, None, "bad nil current future")
-
-    def finish_coroutines(self, iteration, futures):
-        if iteration == 3:
-            futures[2].set_result(24)
-        elif iteration == 5:
-            futures[0].set_exception(ZeroDivisionError())
-        elif iteration == 8:
-            futures[1].set_result(42)
-            futures[3].set_result(84)
-
-        if iteration < 8:
-            self.io_loop.add_callback(self.finish_coroutines, iteration + 1, futures)
-
-    @gen_test
-    def test_iterator(self):
-        futures = [Future(), Future(), Future(), Future()]
-
-        self.finish_coroutines(0, futures)
-
-        g = gen.WaitIterator(*futures)
-
-        i = 0
-        while not g.done():
-            try:
-                r = yield g.next()
-            except ZeroDivisionError:
-                self.assertIs(g.current_future, futures[0],
-                              'exception future invalid')
-            else:
-                if i == 0:
-                    self.assertEqual(r, 24, 'iterator value incorrect')
-                    self.assertEqual(g.current_index, 2, 'wrong index')
-                elif i == 2:
-                    self.assertEqual(r, 42, 'iterator value incorrect')
-                    self.assertEqual(g.current_index, 1, 'wrong index')
-                elif i == 3:
-                    self.assertEqual(r, 84, 'iterator value incorrect')
-                    self.assertEqual(g.current_index, 3, 'wrong index')
-            i += 1
-
-    @skipBefore35
-    @gen_test
-    def test_iterator_async_await(self):
-        # Recreate the previous test with py35 syntax. It's a little clunky
-        # because of the way the previous test handles an exception on
-        # a single iteration.
-        futures = [Future(), Future(), Future(), Future()]
-        self.finish_coroutines(0, futures)
-        self.finished = False
-
-        namespace = exec_test(globals(), locals(), """
-        async def f():
-            i = 0
-            g = gen.WaitIterator(*futures)
-            try:
-                async for r in g:
-                    if i == 0:
-                        self.assertEqual(r, 24, 'iterator value incorrect')
-                        self.assertEqual(g.current_index, 2, 'wrong index')
-                    else:
-                        raise Exception("expected exception on iteration 1")
-                    i += 1
-            except ZeroDivisionError:
-                i += 1
-            async for r in g:
-                if i == 2:
-                    self.assertEqual(r, 42, 'iterator value incorrect')
-                    self.assertEqual(g.current_index, 1, 'wrong index')
-                elif i == 3:
-                    self.assertEqual(r, 84, 'iterator value incorrect')
-                    self.assertEqual(g.current_index, 3, 'wrong index')
-                else:
-                    raise Exception("didn't expect iteration %d" % i)
-                i += 1
-            self.finished = True
-        """)
-        yield namespace['f']()
-        self.assertTrue(self.finished)
-
-    @gen_test
-    def test_no_ref(self):
-        # In this usage, there is no direct hard reference to the
-        # WaitIterator itself, only the Future it returns. Since
-        # WaitIterator uses weak references internally to improve GC
-        # performance, this used to cause problems.
-        yield gen.with_timeout(datetime.timedelta(seconds=0.1),
-                               gen.WaitIterator(gen.sleep(0)).next())
-
-
-class RunnerGCTest(AsyncTestCase):
-    """Github issue 1769: Runner objects can get GCed unexpectedly"""
-    @gen_test
-    def test_gc(self):
-        """Runners shouldn't GC if future is alive"""
-        # Create the weakref
-        weakref_scope = [None]
-
-        def callback():
-            gc.collect(2)
-            weakref_scope[0]().set_result(123)
-
-        @gen.coroutine
-        def tester():
-            fut = Future()
-            weakref_scope[0] = weakref.ref(fut)
-            self.io_loop.add_callback(callback)
-            yield fut
-
-        yield gen.with_timeout(
-            datetime.timedelta(seconds=0.2),
-            tester()
-        )
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/lib/tornado/test/gettext_translations/fr_FR/LC_MESSAGES/tornado_test.mo b/lib/tornado/test/gettext_translations/fr_FR/LC_MESSAGES/tornado_test.mo
deleted file mode 100644
index a97bf9c57460ecfc27761accf90d712ea5cebb44..0000000000000000000000000000000000000000
Binary files a/lib/tornado/test/gettext_translations/fr_FR/LC_MESSAGES/tornado_test.mo and /dev/null differ
diff --git a/lib/tornado/test/gettext_translations/fr_FR/LC_MESSAGES/tornado_test.po b/lib/tornado/test/gettext_translations/fr_FR/LC_MESSAGES/tornado_test.po
deleted file mode 100644
index 88d72c8623a4275c85cb32e2ec35205b5b907176..0000000000000000000000000000000000000000
--- a/lib/tornado/test/gettext_translations/fr_FR/LC_MESSAGES/tornado_test.po
+++ /dev/null
@@ -1,47 +0,0 @@
-# SOME DESCRIPTIVE TITLE.
-# Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER
-# This file is distributed under the same license as the PACKAGE package.
-# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
-#
-#, fuzzy
-msgid ""
-msgstr ""
-"Project-Id-Version: PACKAGE VERSION\n"
-"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2015-01-27 11:05+0300\n"
-"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
-"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
-"Language-Team: LANGUAGE <LL@li.org>\n"
-"Language: \n"
-"MIME-Version: 1.0\n"
-"Content-Type: text/plain; charset=utf-8\n"
-"Content-Transfer-Encoding: 8bit\n"
-"Plural-Forms: nplurals=2; plural=(n > 1);\n"
-
-#: extract_me.py:11
-msgid "school"
-msgstr "école"
-
-#: extract_me.py:12
-msgctxt "law"
-msgid "right"
-msgstr "le droit"
-
-#: extract_me.py:13
-msgctxt "good"
-msgid "right"
-msgstr "le bien"
-
-#: extract_me.py:14
-msgctxt "organization"
-msgid "club"
-msgid_plural "clubs"
-msgstr[0] "le club"
-msgstr[1] "les clubs"
-
-#: extract_me.py:15
-msgctxt "stick"
-msgid "club"
-msgid_plural "clubs"
-msgstr[0] "le bâton"
-msgstr[1] "les bâtons"
diff --git a/lib/tornado/test/http1connection_test.py b/lib/tornado/test/http1connection_test.py
deleted file mode 100644
index 8aaaaf35b7374e39cdb4c49c402f67d0d44d50d3..0000000000000000000000000000000000000000
--- a/lib/tornado/test/http1connection_test.py
+++ /dev/null
@@ -1,61 +0,0 @@
-from __future__ import absolute_import, division, print_function
-
-import socket
-
-from tornado.http1connection import HTTP1Connection
-from tornado.httputil import HTTPMessageDelegate
-from tornado.iostream import IOStream
-from tornado.locks import Event
-from tornado.netutil import add_accept_handler
-from tornado.testing import AsyncTestCase, bind_unused_port, gen_test
-
-
-class HTTP1ConnectionTest(AsyncTestCase):
-    def setUp(self):
-        super(HTTP1ConnectionTest, self).setUp()
-        self.asyncSetUp()
-
-    @gen_test
-    def asyncSetUp(self):
-        listener, port = bind_unused_port()
-        event = Event()
-
-        def accept_callback(conn, addr):
-            self.server_stream = IOStream(conn)
-            self.addCleanup(self.server_stream.close)
-            event.set()
-
-        add_accept_handler(listener, accept_callback)
-        self.client_stream = IOStream(socket.socket())
-        self.addCleanup(self.client_stream.close)
-        yield [self.client_stream.connect(('127.0.0.1', port)),
-               event.wait()]
-        self.io_loop.remove_handler(listener)
-        listener.close()
-
-    @gen_test
-    def test_http10_no_content_length(self):
-        # Regression test for a bug in which can_keep_alive would crash
-        # for an HTTP/1.0 (not 1.1) response with no content-length.
-        conn = HTTP1Connection(self.client_stream, True)
-        self.server_stream.write(b"HTTP/1.0 200 Not Modified\r\n\r\nhello")
-        self.server_stream.close()
-
-        event = Event()
-        test = self
-        body = []
-
-        class Delegate(HTTPMessageDelegate):
-            def headers_received(self, start_line, headers):
-                test.code = start_line.code
-
-            def data_received(self, data):
-                body.append(data)
-
-            def finish(self):
-                event.set()
-
-        yield conn.read_response(Delegate())
-        yield event.wait()
-        self.assertEqual(self.code, 200)
-        self.assertEqual(b''.join(body), b'hello')
diff --git a/lib/tornado/test/httpclient_test.py b/lib/tornado/test/httpclient_test.py
deleted file mode 100644
index 320454e41afb6991e5a569bde784649e5c3982c4..0000000000000000000000000000000000000000
--- a/lib/tornado/test/httpclient_test.py
+++ /dev/null
@@ -1,685 +0,0 @@
-#!/usr/bin/env python
-
-from __future__ import absolute_import, division, print_function
-
-import base64
-import binascii
-from contextlib import closing
-import copy
-import functools
-import sys
-import threading
-import datetime
-from io import BytesIO
-
-from tornado.escape import utf8, native_str
-from tornado import gen
-from tornado.httpclient import HTTPRequest, HTTPResponse, _RequestProxy, HTTPError, HTTPClient
-from tornado.httpserver import HTTPServer
-from tornado.ioloop import IOLoop
-from tornado.iostream import IOStream
-from tornado.log import gen_log
-from tornado import netutil
-from tornado.stack_context import ExceptionStackContext, NullContext
-from tornado.testing import AsyncHTTPTestCase, bind_unused_port, gen_test, ExpectLog
-from tornado.test.util import unittest, skipOnTravis
-from tornado.web import Application, RequestHandler, url
-from tornado.httputil import format_timestamp, HTTPHeaders
-
-
-class HelloWorldHandler(RequestHandler):
-    def get(self):
-        name = self.get_argument("name", "world")
-        self.set_header("Content-Type", "text/plain")
-        self.finish("Hello %s!" % name)
-
-
-class PostHandler(RequestHandler):
-    def post(self):
-        self.finish("Post arg1: %s, arg2: %s" % (
-            self.get_argument("arg1"), self.get_argument("arg2")))
-
-
-class PutHandler(RequestHandler):
-    def put(self):
-        self.write("Put body: ")
-        self.write(self.request.body)
-
-
-class RedirectHandler(RequestHandler):
-    def prepare(self):
-        self.write('redirects can have bodies too')
-        self.redirect(self.get_argument("url"),
-                      status=int(self.get_argument("status", "302")))
-
-
-class ChunkHandler(RequestHandler):
-    @gen.coroutine
-    def get(self):
-        self.write("asdf")
-        self.flush()
-        # Wait a bit to ensure the chunks are sent and received separately.
-        yield gen.sleep(0.01)
-        self.write("qwer")
-
-
-class AuthHandler(RequestHandler):
-    def get(self):
-        self.finish(self.request.headers["Authorization"])
-
-
-class CountdownHandler(RequestHandler):
-    def get(self, count):
-        count = int(count)
-        if count > 0:
-            self.redirect(self.reverse_url("countdown", count - 1))
-        else:
-            self.write("Zero")
-
-
-class EchoPostHandler(RequestHandler):
-    def post(self):
-        self.write(self.request.body)
-
-
-class UserAgentHandler(RequestHandler):
-    def get(self):
-        self.write(self.request.headers.get('User-Agent', 'User agent not set'))
-
-
-class ContentLength304Handler(RequestHandler):
-    def get(self):
-        self.set_status(304)
-        self.set_header('Content-Length', 42)
-
-    def _clear_headers_for_304(self):
-        # Tornado strips content-length from 304 responses, but here we
-        # want to simulate servers that include the headers anyway.
-        pass
-
-
-class PatchHandler(RequestHandler):
-
-    def patch(self):
-        "Return the request payload - so we can check it is being kept"
-        self.write(self.request.body)
-
-
-class AllMethodsHandler(RequestHandler):
-    SUPPORTED_METHODS = RequestHandler.SUPPORTED_METHODS + ('OTHER',)
-
-    def method(self):
-        self.write(self.request.method)
-
-    get = post = put = delete = options = patch = other = method
-
-
-class SetHeaderHandler(RequestHandler):
-    def get(self):
-        # Use get_arguments for keys to get strings, but
-        # request.arguments for values to get bytes.
-        for k, v in zip(self.get_arguments('k'),
-                        self.request.arguments['v']):
-            self.set_header(k, v)
-
-# These tests end up getting run redundantly: once here with the default
-# HTTPClient implementation, and then again in each implementation's own
-# test suite.
-
-
-class HTTPClientCommonTestCase(AsyncHTTPTestCase):
-    def get_app(self):
-        return Application([
-            url("/hello", HelloWorldHandler),
-            url("/post", PostHandler),
-            url("/put", PutHandler),
-            url("/redirect", RedirectHandler),
-            url("/chunk", ChunkHandler),
-            url("/auth", AuthHandler),
-            url("/countdown/([0-9]+)", CountdownHandler, name="countdown"),
-            url("/echopost", EchoPostHandler),
-            url("/user_agent", UserAgentHandler),
-            url("/304_with_content_length", ContentLength304Handler),
-            url("/all_methods", AllMethodsHandler),
-            url('/patch', PatchHandler),
-            url('/set_header', SetHeaderHandler),
-        ], gzip=True)
-
-    def test_patch_receives_payload(self):
-        body = b"some patch data"
-        response = self.fetch("/patch", method='PATCH', body=body)
-        self.assertEqual(response.code, 200)
-        self.assertEqual(response.body, body)
-
-    @skipOnTravis
-    def test_hello_world(self):
-        response = self.fetch("/hello")
-        self.assertEqual(response.code, 200)
-        self.assertEqual(response.headers["Content-Type"], "text/plain")
-        self.assertEqual(response.body, b"Hello world!")
-        self.assertEqual(int(response.request_time), 0)
-
-        response = self.fetch("/hello?name=Ben")
-        self.assertEqual(response.body, b"Hello Ben!")
-
-    def test_streaming_callback(self):
-        # streaming_callback is also tested in test_chunked
-        chunks = []
-        response = self.fetch("/hello",
-                              streaming_callback=chunks.append)
-        # with streaming_callback, data goes to the callback and not response.body
-        self.assertEqual(chunks, [b"Hello world!"])
-        self.assertFalse(response.body)
-
-    def test_post(self):
-        response = self.fetch("/post", method="POST",
-                              body="arg1=foo&arg2=bar")
-        self.assertEqual(response.code, 200)
-        self.assertEqual(response.body, b"Post arg1: foo, arg2: bar")
-
-    def test_chunked(self):
-        response = self.fetch("/chunk")
-        self.assertEqual(response.body, b"asdfqwer")
-
-        chunks = []
-        response = self.fetch("/chunk",
-                              streaming_callback=chunks.append)
-        self.assertEqual(chunks, [b"asdf", b"qwer"])
-        self.assertFalse(response.body)
-
-    def test_chunked_close(self):
-        # test case in which chunks spread read-callback processing
-        # over several ioloop iterations, but the connection is already closed.
-        sock, port = bind_unused_port()
-        with closing(sock):
-            def write_response(stream, request_data):
-                if b"HTTP/1." not in request_data:
-                    self.skipTest("requires HTTP/1.x")
-                stream.write(b"""\
-HTTP/1.1 200 OK
-Transfer-Encoding: chunked
-
-1
-1
-1
-2
-0
-
-""".replace(b"\n", b"\r\n"), callback=stream.close)
-
-            def accept_callback(conn, address):
-                # fake an HTTP server using chunked encoding where the final chunks
-                # and connection close all happen at once
-                stream = IOStream(conn, io_loop=self.io_loop)
-                stream.read_until(b"\r\n\r\n",
-                                  functools.partial(write_response, stream))
-            netutil.add_accept_handler(sock, accept_callback, self.io_loop)
-            self.http_client.fetch("http://127.0.0.1:%d/" % port, self.stop)
-            resp = self.wait()
-            resp.rethrow()
-            self.assertEqual(resp.body, b"12")
-            self.io_loop.remove_handler(sock.fileno())
-
-    def test_streaming_stack_context(self):
-        chunks = []
-        exc_info = []
-
-        def error_handler(typ, value, tb):
-            exc_info.append((typ, value, tb))
-            return True
-
-        def streaming_cb(chunk):
-            chunks.append(chunk)
-            if chunk == b'qwer':
-                1 / 0
-
-        with ExceptionStackContext(error_handler):
-            self.fetch('/chunk', streaming_callback=streaming_cb)
-
-        self.assertEqual(chunks, [b'asdf', b'qwer'])
-        self.assertEqual(1, len(exc_info))
-        self.assertIs(exc_info[0][0], ZeroDivisionError)
-
-    def test_basic_auth(self):
-        self.assertEqual(self.fetch("/auth", auth_username="Aladdin",
-                                    auth_password="open sesame").body,
-                         b"Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==")
-
-    def test_basic_auth_explicit_mode(self):
-        self.assertEqual(self.fetch("/auth", auth_username="Aladdin",
-                                    auth_password="open sesame",
-                                    auth_mode="basic").body,
-                         b"Basic QWxhZGRpbjpvcGVuIHNlc2FtZQ==")
-
-    def test_unsupported_auth_mode(self):
-        # curl and simple clients handle errors a bit differently; the
-        # important thing is that they don't fall back to basic auth
-        # on an unknown mode.
-        with ExpectLog(gen_log, "uncaught exception", required=False):
-            with self.assertRaises((ValueError, HTTPError)):
-                response = self.fetch("/auth", auth_username="Aladdin",
-                                      auth_password="open sesame",
-                                      auth_mode="asdf")
-                response.rethrow()
-
-    def test_follow_redirect(self):
-        response = self.fetch("/countdown/2", follow_redirects=False)
-        self.assertEqual(302, response.code)
-        self.assertTrue(response.headers["Location"].endswith("/countdown/1"))
-
-        response = self.fetch("/countdown/2")
-        self.assertEqual(200, response.code)
-        self.assertTrue(response.effective_url.endswith("/countdown/0"))
-        self.assertEqual(b"Zero", response.body)
-
-    def test_credentials_in_url(self):
-        url = self.get_url("/auth").replace("http://", "http://me:secret@")
-        self.http_client.fetch(url, self.stop)
-        response = self.wait()
-        self.assertEqual(b"Basic " + base64.b64encode(b"me:secret"),
-                         response.body)
-
-    def test_body_encoding(self):
-        unicode_body = u"\xe9"
-        byte_body = binascii.a2b_hex(b"e9")
-
-        # unicode string in body gets converted to utf8
-        response = self.fetch("/echopost", method="POST", body=unicode_body,
-                              headers={"Content-Type": "application/blah"})
-        self.assertEqual(response.headers["Content-Length"], "2")
-        self.assertEqual(response.body, utf8(unicode_body))
-
-        # byte strings pass through directly
-        response = self.fetch("/echopost", method="POST",
-                              body=byte_body,
-                              headers={"Content-Type": "application/blah"})
-        self.assertEqual(response.headers["Content-Length"], "1")
-        self.assertEqual(response.body, byte_body)
-
-        # Mixing unicode in headers and byte string bodies shouldn't
-        # break anything
-        response = self.fetch("/echopost", method="POST", body=byte_body,
-                              headers={"Content-Type": "application/blah"},
-                              user_agent=u"foo")
-        self.assertEqual(response.headers["Content-Length"], "1")
-        self.assertEqual(response.body, byte_body)
-
-    def test_types(self):
-        response = self.fetch("/hello")
-        self.assertEqual(type(response.body), bytes)
-        self.assertEqual(type(response.headers["Content-Type"]), str)
-        self.assertEqual(type(response.code), int)
-        self.assertEqual(type(response.effective_url), str)
-
-    def test_header_callback(self):
-        first_line = []
-        headers = {}
-        chunks = []
-
-        def header_callback(header_line):
-            if header_line.startswith('HTTP/1.1 101'):
-                # Upgrading to HTTP/2
-                pass
-            elif header_line.startswith('HTTP/'):
-                first_line.append(header_line)
-            elif header_line != '\r\n':
-                k, v = header_line.split(':', 1)
-                headers[k.lower()] = v.strip()
-
-        def streaming_callback(chunk):
-            # All header callbacks are run before any streaming callbacks,
-            # so the header data is available to process the data as it
-            # comes in.
-            self.assertEqual(headers['content-type'], 'text/html; charset=UTF-8')
-            chunks.append(chunk)
-
-        self.fetch('/chunk', header_callback=header_callback,
-                   streaming_callback=streaming_callback)
-        self.assertEqual(len(first_line), 1, first_line)
-        self.assertRegexpMatches(first_line[0], 'HTTP/[0-9]\\.[0-9] 200.*\r\n')
-        self.assertEqual(chunks, [b'asdf', b'qwer'])
-
-    def test_header_callback_stack_context(self):
-        exc_info = []
-
-        def error_handler(typ, value, tb):
-            exc_info.append((typ, value, tb))
-            return True
-
-        def header_callback(header_line):
-            if header_line.lower().startswith('content-type:'):
-                1 / 0
-
-        with ExceptionStackContext(error_handler):
-            self.fetch('/chunk', header_callback=header_callback)
-        self.assertEqual(len(exc_info), 1)
-        self.assertIs(exc_info[0][0], ZeroDivisionError)
-
-    def test_configure_defaults(self):
-        defaults = dict(user_agent='TestDefaultUserAgent', allow_ipv6=False)
-        # Construct a new instance of the configured client class
-        client = self.http_client.__class__(self.io_loop, force_instance=True,
-                                            defaults=defaults)
-        try:
-            client.fetch(self.get_url('/user_agent'), callback=self.stop)
-            response = self.wait()
-            self.assertEqual(response.body, b'TestDefaultUserAgent')
-        finally:
-            client.close()
-
-    def test_header_types(self):
-        # Header values may be passed as character or utf8 byte strings,
-        # in a plain dictionary or an HTTPHeaders object.
-        # Keys must always be the native str type.
-        # All combinations should have the same results on the wire.
-        for value in [u"MyUserAgent", b"MyUserAgent"]:
-            for container in [dict, HTTPHeaders]:
-                headers = container()
-                headers['User-Agent'] = value
-                resp = self.fetch('/user_agent', headers=headers)
-                self.assertEqual(
-                    resp.body, b"MyUserAgent",
-                    "response=%r, value=%r, container=%r" %
-                    (resp.body, value, container))
-
-    def test_multi_line_headers(self):
-        # Multi-line http headers are rare but rfc-allowed
-        # http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.2
-        sock, port = bind_unused_port()
-        with closing(sock):
-            def write_response(stream, request_data):
-                if b"HTTP/1." not in request_data:
-                    self.skipTest("requires HTTP/1.x")
-                stream.write(b"""\
-HTTP/1.1 200 OK
-X-XSS-Protection: 1;
-\tmode=block
-
-""".replace(b"\n", b"\r\n"), callback=stream.close)
-
-            def accept_callback(conn, address):
-                stream = IOStream(conn, io_loop=self.io_loop)
-                stream.read_until(b"\r\n\r\n",
-                                  functools.partial(write_response, stream))
-            netutil.add_accept_handler(sock, accept_callback, self.io_loop)
-            self.http_client.fetch("http://127.0.0.1:%d/" % port, self.stop)
-            resp = self.wait()
-            resp.rethrow()
-            self.assertEqual(resp.headers['X-XSS-Protection'], "1; mode=block")
-            self.io_loop.remove_handler(sock.fileno())
-
-    def test_304_with_content_length(self):
-        # According to the spec 304 responses SHOULD NOT include
-        # Content-Length or other entity headers, but some servers do it
-        # anyway.
-        # http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.5
-        response = self.fetch('/304_with_content_length')
-        self.assertEqual(response.code, 304)
-        self.assertEqual(response.headers['Content-Length'], '42')
-
-    def test_final_callback_stack_context(self):
-        # The final callback should be run outside of the httpclient's
-        # stack_context.  We want to ensure that there is not stack_context
-        # between the user's callback and the IOLoop, so monkey-patch
-        # IOLoop.handle_callback_exception and disable the test harness's
-        # context with a NullContext.
-        # Note that this does not apply to secondary callbacks (header
-        # and streaming_callback), as errors there must be seen as errors
-        # by the http client so it can clean up the connection.
-        exc_info = []
-
-        def handle_callback_exception(callback):
-            exc_info.append(sys.exc_info())
-            self.stop()
-        self.io_loop.handle_callback_exception = handle_callback_exception
-        with NullContext():
-            self.http_client.fetch(self.get_url('/hello'),
-                                   lambda response: 1 / 0)
-        self.wait()
-        self.assertEqual(exc_info[0][0], ZeroDivisionError)
-
-    @gen_test
-    def test_future_interface(self):
-        response = yield self.http_client.fetch(self.get_url('/hello'))
-        self.assertEqual(response.body, b'Hello world!')
-
-    @gen_test
-    def test_future_http_error(self):
-        with self.assertRaises(HTTPError) as context:
-            yield self.http_client.fetch(self.get_url('/notfound'))
-        self.assertEqual(context.exception.code, 404)
-        self.assertEqual(context.exception.response.code, 404)
-
-    @gen_test
-    def test_future_http_error_no_raise(self):
-        response = yield self.http_client.fetch(self.get_url('/notfound'), raise_error=False)
-        self.assertEqual(response.code, 404)
-
-    @gen_test
-    def test_reuse_request_from_response(self):
-        # The response.request attribute should be an HTTPRequest, not
-        # a _RequestProxy.
-        # This test uses self.http_client.fetch because self.fetch calls
-        # self.get_url on the input unconditionally.
-        url = self.get_url('/hello')
-        response = yield self.http_client.fetch(url)
-        self.assertEqual(response.request.url, url)
-        self.assertTrue(isinstance(response.request, HTTPRequest))
-        response2 = yield self.http_client.fetch(response.request)
-        self.assertEqual(response2.body, b'Hello world!')
-
-    def test_all_methods(self):
-        for method in ['GET', 'DELETE', 'OPTIONS']:
-            response = self.fetch('/all_methods', method=method)
-            self.assertEqual(response.body, utf8(method))
-        for method in ['POST', 'PUT', 'PATCH']:
-            response = self.fetch('/all_methods', method=method, body=b'')
-            self.assertEqual(response.body, utf8(method))
-        response = self.fetch('/all_methods', method='HEAD')
-        self.assertEqual(response.body, b'')
-        response = self.fetch('/all_methods', method='OTHER',
-                              allow_nonstandard_methods=True)
-        self.assertEqual(response.body, b'OTHER')
-
-    def test_body_sanity_checks(self):
-        # These methods require a body.
-        for method in ('POST', 'PUT', 'PATCH'):
-            with self.assertRaises(ValueError) as context:
-                resp = self.fetch('/all_methods', method=method)
-                resp.rethrow()
-            self.assertIn('must not be None', str(context.exception))
-
-            resp = self.fetch('/all_methods', method=method,
-                              allow_nonstandard_methods=True)
-            self.assertEqual(resp.code, 200)
-
-        # These methods don't allow a body.
-        for method in ('GET', 'DELETE', 'OPTIONS'):
-            with self.assertRaises(ValueError) as context:
-                resp = self.fetch('/all_methods', method=method, body=b'asdf')
-                resp.rethrow()
-            self.assertIn('must be None', str(context.exception))
-
-            # In most cases this can be overridden, but curl_httpclient
-            # does not allow body with a GET at all.
-            if method != 'GET':
-                resp = self.fetch('/all_methods', method=method, body=b'asdf',
-                                  allow_nonstandard_methods=True)
-                resp.rethrow()
-                self.assertEqual(resp.code, 200)
-
-    # This test causes odd failures with the combination of
-    # curl_httpclient (at least with the version of libcurl available
-    # on ubuntu 12.04), TwistedIOLoop, and epoll.  For POST (but not PUT),
-    # curl decides the response came back too soon and closes the connection
-    # to start again.  It does this *before* telling the socket callback to
-    # unregister the FD.  Some IOLoop implementations have special kernel
-    # integration to discover this immediately.  Tornado's IOLoops
-    # ignore errors on remove_handler to accommodate this behavior, but
-    # Twisted's reactor does not.  The removeReader call fails and so
-    # do all future removeAll calls (which our tests do at cleanup).
-    #
-    # def test_post_307(self):
-    #    response = self.fetch("/redirect?status=307&url=/post",
-    #                          method="POST", body=b"arg1=foo&arg2=bar")
-    #    self.assertEqual(response.body, b"Post arg1: foo, arg2: bar")
-
-    def test_put_307(self):
-        response = self.fetch("/redirect?status=307&url=/put",
-                              method="PUT", body=b"hello")
-        response.rethrow()
-        self.assertEqual(response.body, b"Put body: hello")
-
-    def test_non_ascii_header(self):
-        # Non-ascii headers are sent as latin1.
-        response = self.fetch("/set_header?k=foo&v=%E9")
-        response.rethrow()
-        self.assertEqual(response.headers["Foo"], native_str(u"\u00e9"))
-
-
-class RequestProxyTest(unittest.TestCase):
-    def test_request_set(self):
-        proxy = _RequestProxy(HTTPRequest('http://example.com/',
-                                          user_agent='foo'),
-                              dict())
-        self.assertEqual(proxy.user_agent, 'foo')
-
-    def test_default_set(self):
-        proxy = _RequestProxy(HTTPRequest('http://example.com/'),
-                              dict(network_interface='foo'))
-        self.assertEqual(proxy.network_interface, 'foo')
-
-    def test_both_set(self):
-        proxy = _RequestProxy(HTTPRequest('http://example.com/',
-                                          proxy_host='foo'),
-                              dict(proxy_host='bar'))
-        self.assertEqual(proxy.proxy_host, 'foo')
-
-    def test_neither_set(self):
-        proxy = _RequestProxy(HTTPRequest('http://example.com/'),
-                              dict())
-        self.assertIs(proxy.auth_username, None)
-
-    def test_bad_attribute(self):
-        proxy = _RequestProxy(HTTPRequest('http://example.com/'),
-                              dict())
-        with self.assertRaises(AttributeError):
-            proxy.foo
-
-    def test_defaults_none(self):
-        proxy = _RequestProxy(HTTPRequest('http://example.com/'), None)
-        self.assertIs(proxy.auth_username, None)
-
-
-class HTTPResponseTestCase(unittest.TestCase):
-    def test_str(self):
-        response = HTTPResponse(HTTPRequest('http://example.com'),
-                                200, headers={}, buffer=BytesIO())
-        s = str(response)
-        self.assertTrue(s.startswith('HTTPResponse('))
-        self.assertIn('code=200', s)
-
-
-class SyncHTTPClientTest(unittest.TestCase):
-    def setUp(self):
-        if IOLoop.configured_class().__name__ in ('TwistedIOLoop',
-                                                  'AsyncIOMainLoop'):
-            # TwistedIOLoop only supports the global reactor, so we can't have
-            # separate IOLoops for client and server threads.
-            # AsyncIOMainLoop doesn't work with the default policy
-            # (although it could with some tweaks to this test and a
-            # policy that created loops for non-main threads).
-            raise unittest.SkipTest(
-                'Sync HTTPClient not compatible with TwistedIOLoop or '
-                'AsyncIOMainLoop')
-        self.server_ioloop = IOLoop()
-
-        sock, self.port = bind_unused_port()
-        app = Application([('/', HelloWorldHandler)])
-        self.server = HTTPServer(app, io_loop=self.server_ioloop)
-        self.server.add_socket(sock)
-
-        self.server_thread = threading.Thread(target=self.server_ioloop.start)
-        self.server_thread.start()
-
-        self.http_client = HTTPClient()
-
-    def tearDown(self):
-        def stop_server():
-            self.server.stop()
-            # Delay the shutdown of the IOLoop by one iteration because
-            # the server may still have some cleanup work left when
-            # the client finishes with the response (this is noticable
-            # with http/2, which leaves a Future with an unexamined
-            # StreamClosedError on the loop).
-            self.server_ioloop.add_callback(self.server_ioloop.stop)
-        self.server_ioloop.add_callback(stop_server)
-        self.server_thread.join()
-        self.http_client.close()
-        self.server_ioloop.close(all_fds=True)
-
-    def get_url(self, path):
-        return 'http://127.0.0.1:%d%s' % (self.port, path)
-
-    def test_sync_client(self):
-        response = self.http_client.fetch(self.get_url('/'))
-        self.assertEqual(b'Hello world!', response.body)
-
-    def test_sync_client_error(self):
-        # Synchronous HTTPClient raises errors directly; no need for
-        # response.rethrow()
-        with self.assertRaises(HTTPError) as assertion:
-            self.http_client.fetch(self.get_url('/notfound'))
-        self.assertEqual(assertion.exception.code, 404)
-
-
-class HTTPRequestTestCase(unittest.TestCase):
-    def test_headers(self):
-        request = HTTPRequest('http://example.com', headers={'foo': 'bar'})
-        self.assertEqual(request.headers, {'foo': 'bar'})
-
-    def test_headers_setter(self):
-        request = HTTPRequest('http://example.com')
-        request.headers = {'bar': 'baz'}
-        self.assertEqual(request.headers, {'bar': 'baz'})
-
-    def test_null_headers_setter(self):
-        request = HTTPRequest('http://example.com')
-        request.headers = None
-        self.assertEqual(request.headers, {})
-
-    def test_body(self):
-        request = HTTPRequest('http://example.com', body='foo')
-        self.assertEqual(request.body, utf8('foo'))
-
-    def test_body_setter(self):
-        request = HTTPRequest('http://example.com')
-        request.body = 'foo'
-        self.assertEqual(request.body, utf8('foo'))
-
-    def test_if_modified_since(self):
-        http_date = datetime.datetime.utcnow()
-        request = HTTPRequest('http://example.com', if_modified_since=http_date)
-        self.assertEqual(request.headers,
-                         {'If-Modified-Since': format_timestamp(http_date)})
-
-
-class HTTPErrorTestCase(unittest.TestCase):
-    def test_copy(self):
-        e = HTTPError(403)
-        e2 = copy.copy(e)
-        self.assertIsNot(e, e2)
-        self.assertEqual(e.code, e2.code)
-
-    def test_plain_error(self):
-        e = HTTPError(403)
-        self.assertEqual(str(e), "HTTP 403: Forbidden")
-        self.assertEqual(repr(e), "HTTP 403: Forbidden")
-
-    def test_error_with_response(self):
-        resp = HTTPResponse(HTTPRequest('http://example.com/'), 403)
-        with self.assertRaises(HTTPError) as cm:
-            resp.rethrow()
-        e = cm.exception
-        self.assertEqual(str(e), "HTTP 403: Forbidden")
-        self.assertEqual(repr(e), "HTTP 403: Forbidden")
diff --git a/lib/tornado/test/httpserver_test.py b/lib/tornado/test/httpserver_test.py
deleted file mode 100644
index 11cb72313765905e20dc59e9e2bf80e4ce2a43e8..0000000000000000000000000000000000000000
--- a/lib/tornado/test/httpserver_test.py
+++ /dev/null
@@ -1,1131 +0,0 @@
-#!/usr/bin/env python
-
-
-from __future__ import absolute_import, division, print_function
-from tornado import netutil
-from tornado.escape import json_decode, json_encode, utf8, _unicode, recursive_unicode, native_str
-from tornado import gen
-from tornado.http1connection import HTTP1Connection
-from tornado.httpserver import HTTPServer
-from tornado.httputil import HTTPHeaders, HTTPMessageDelegate, HTTPServerConnectionDelegate, ResponseStartLine
-from tornado.iostream import IOStream
-from tornado.log import gen_log
-from tornado.netutil import ssl_options_to_context
-from tornado.simple_httpclient import SimpleAsyncHTTPClient
-from tornado.testing import AsyncHTTPTestCase, AsyncHTTPSTestCase, AsyncTestCase, ExpectLog, gen_test
-from tornado.test.util import unittest, skipOnTravis
-from tornado.web import Application, RequestHandler, asynchronous, stream_request_body
-from contextlib import closing
-import datetime
-import gzip
-import os
-import shutil
-import socket
-import ssl
-import sys
-import tempfile
-from io import BytesIO
-
-
-def read_stream_body(stream, callback):
-    """Reads an HTTP response from `stream` and runs callback with its
-    headers and body."""
-    chunks = []
-
-    class Delegate(HTTPMessageDelegate):
-        def headers_received(self, start_line, headers):
-            self.headers = headers
-
-        def data_received(self, chunk):
-            chunks.append(chunk)
-
-        def finish(self):
-            callback((self.headers, b''.join(chunks)))
-    conn = HTTP1Connection(stream, True)
-    conn.read_response(Delegate())
-
-
-class HandlerBaseTestCase(AsyncHTTPTestCase):
-    def get_app(self):
-        return Application([('/', self.__class__.Handler)])
-
-    def fetch_json(self, *args, **kwargs):
-        response = self.fetch(*args, **kwargs)
-        response.rethrow()
-        return json_decode(response.body)
-
-
-class HelloWorldRequestHandler(RequestHandler):
-    def initialize(self, protocol="http"):
-        self.expected_protocol = protocol
-
-    def get(self):
-        if self.request.protocol != self.expected_protocol:
-            raise Exception("unexpected protocol")
-        self.finish("Hello world")
-
-    def post(self):
-        self.finish("Got %d bytes in POST" % len(self.request.body))
-
-
-# In pre-1.0 versions of openssl, SSLv23 clients always send SSLv2
-# ClientHello messages, which are rejected by SSLv3 and TLSv1
-# servers.  Note that while the OPENSSL_VERSION_INFO was formally
-# introduced in python3.2, it was present but undocumented in
-# python 2.7
-skipIfOldSSL = unittest.skipIf(
-    getattr(ssl, 'OPENSSL_VERSION_INFO', (0, 0)) < (1, 0),
-    "old version of ssl module and/or openssl")
-
-
-class BaseSSLTest(AsyncHTTPSTestCase):
-    def get_app(self):
-        return Application([('/', HelloWorldRequestHandler,
-                             dict(protocol="https"))])
-
-
-class SSLTestMixin(object):
-    def get_ssl_options(self):
-        return dict(ssl_version=self.get_ssl_version(),  # type: ignore
-                    **AsyncHTTPSTestCase.get_ssl_options())
-
-    def get_ssl_version(self):
-        raise NotImplementedError()
-
-    def test_ssl(self):
-        response = self.fetch('/')
-        self.assertEqual(response.body, b"Hello world")
-
-    def test_large_post(self):
-        response = self.fetch('/',
-                              method='POST',
-                              body='A' * 5000)
-        self.assertEqual(response.body, b"Got 5000 bytes in POST")
-
-    def test_non_ssl_request(self):
-        # Make sure the server closes the connection when it gets a non-ssl
-        # connection, rather than waiting for a timeout or otherwise
-        # misbehaving.
-        with ExpectLog(gen_log, '(SSL Error|uncaught exception)'):
-            with ExpectLog(gen_log, 'Uncaught exception', required=False):
-                self.http_client.fetch(
-                    self.get_url("/").replace('https:', 'http:'),
-                    self.stop,
-                    request_timeout=3600,
-                    connect_timeout=3600)
-                response = self.wait()
-        self.assertEqual(response.code, 599)
-
-    def test_error_logging(self):
-        # No stack traces are logged for SSL errors.
-        with ExpectLog(gen_log, 'SSL Error') as expect_log:
-            self.http_client.fetch(
-                self.get_url("/").replace("https:", "http:"),
-                self.stop)
-            response = self.wait()
-            self.assertEqual(response.code, 599)
-        self.assertFalse(expect_log.logged_stack)
-
-# Python's SSL implementation differs significantly between versions.
-# For example, SSLv3 and TLSv1 throw an exception if you try to read
-# from the socket before the handshake is complete, but the default
-# of SSLv23 allows it.
-
-
-class SSLv23Test(BaseSSLTest, SSLTestMixin):
-    def get_ssl_version(self):
-        return ssl.PROTOCOL_SSLv23
-
-
-@skipIfOldSSL
-class SSLv3Test(BaseSSLTest, SSLTestMixin):
-    def get_ssl_version(self):
-        return ssl.PROTOCOL_SSLv3
-
-
-@skipIfOldSSL
-class TLSv1Test(BaseSSLTest, SSLTestMixin):
-    def get_ssl_version(self):
-        return ssl.PROTOCOL_TLSv1
-
-
-@unittest.skipIf(not hasattr(ssl, 'SSLContext'), 'ssl.SSLContext not present')
-class SSLContextTest(BaseSSLTest, SSLTestMixin):
-    def get_ssl_options(self):
-        context = ssl_options_to_context(
-            AsyncHTTPSTestCase.get_ssl_options(self))
-        assert isinstance(context, ssl.SSLContext)
-        return context
-
-
-class BadSSLOptionsTest(unittest.TestCase):
-    def test_missing_arguments(self):
-        application = Application()
-        self.assertRaises(KeyError, HTTPServer, application, ssl_options={
-            "keyfile": "/__missing__.crt",
-        })
-
-    def test_missing_key(self):
-        """A missing SSL key should cause an immediate exception."""
-
-        application = Application()
-        module_dir = os.path.dirname(__file__)
-        existing_certificate = os.path.join(module_dir, 'test.crt')
-        existing_key = os.path.join(module_dir, 'test.key')
-
-        self.assertRaises((ValueError, IOError),
-                          HTTPServer, application, ssl_options={
-                              "certfile": "/__mising__.crt",
-        })
-        self.assertRaises((ValueError, IOError),
-                          HTTPServer, application, ssl_options={
-                              "certfile": existing_certificate,
-                              "keyfile": "/__missing__.key"
-        })
-
-        # This actually works because both files exist
-        HTTPServer(application, ssl_options={
-                   "certfile": existing_certificate,
-                   "keyfile": existing_key,
-                   })
-
-
-class MultipartTestHandler(RequestHandler):
-    def post(self):
-        self.finish({"header": self.request.headers["X-Header-Encoding-Test"],
-                     "argument": self.get_argument("argument"),
-                     "filename": self.request.files["files"][0].filename,
-                     "filebody": _unicode(self.request.files["files"][0]["body"]),
-                     })
-
-
-# This test is also called from wsgi_test
-class HTTPConnectionTest(AsyncHTTPTestCase):
-    def get_handlers(self):
-        return [("/multipart", MultipartTestHandler),
-                ("/hello", HelloWorldRequestHandler)]
-
-    def get_app(self):
-        return Application(self.get_handlers())
-
-    def raw_fetch(self, headers, body, newline=b"\r\n"):
-        with closing(IOStream(socket.socket())) as stream:
-            stream.connect(('127.0.0.1', self.get_http_port()), self.stop)
-            self.wait()
-            stream.write(
-                newline.join(headers +
-                             [utf8("Content-Length: %d" % len(body))]) +
-                newline + newline + body)
-            read_stream_body(stream, self.stop)
-            headers, body = self.wait()
-            return body
-
-    def test_multipart_form(self):
-        # Encodings here are tricky:  Headers are latin1, bodies can be
-        # anything (we use utf8 by default).
-        response = self.raw_fetch([
-            b"POST /multipart HTTP/1.0",
-            b"Content-Type: multipart/form-data; boundary=1234567890",
-            b"X-Header-encoding-test: \xe9",
-        ],
-            b"\r\n".join([
-                b"Content-Disposition: form-data; name=argument",
-                b"",
-                u"\u00e1".encode("utf-8"),
-                b"--1234567890",
-                u'Content-Disposition: form-data; name="files"; filename="\u00f3"'.encode("utf8"),
-                b"",
-                u"\u00fa".encode("utf-8"),
-                b"--1234567890--",
-                b"",
-            ]))
-        data = json_decode(response)
-        self.assertEqual(u"\u00e9", data["header"])
-        self.assertEqual(u"\u00e1", data["argument"])
-        self.assertEqual(u"\u00f3", data["filename"])
-        self.assertEqual(u"\u00fa", data["filebody"])
-
-    def test_newlines(self):
-        # We support both CRLF and bare LF as line separators.
-        for newline in (b"\r\n", b"\n"):
-            response = self.raw_fetch([b"GET /hello HTTP/1.0"], b"",
-                                      newline=newline)
-            self.assertEqual(response, b'Hello world')
-
-    def test_100_continue(self):
-        # Run through a 100-continue interaction by hand:
-        # When given Expect: 100-continue, we get a 100 response after the
-        # headers, and then the real response after the body.
-        stream = IOStream(socket.socket(), io_loop=self.io_loop)
-        stream.connect(("127.0.0.1", self.get_http_port()), callback=self.stop)
-        self.wait()
-        stream.write(b"\r\n".join([b"POST /hello HTTP/1.1",
-                                   b"Content-Length: 1024",
-                                   b"Expect: 100-continue",
-                                   b"Connection: close",
-                                   b"\r\n"]), callback=self.stop)
-        self.wait()
-        stream.read_until(b"\r\n\r\n", self.stop)
-        data = self.wait()
-        self.assertTrue(data.startswith(b"HTTP/1.1 100 "), data)
-        stream.write(b"a" * 1024)
-        stream.read_until(b"\r\n", self.stop)
-        first_line = self.wait()
-        self.assertTrue(first_line.startswith(b"HTTP/1.1 200"), first_line)
-        stream.read_until(b"\r\n\r\n", self.stop)
-        header_data = self.wait()
-        headers = HTTPHeaders.parse(native_str(header_data.decode('latin1')))
-        stream.read_bytes(int(headers["Content-Length"]), self.stop)
-        body = self.wait()
-        self.assertEqual(body, b"Got 1024 bytes in POST")
-        stream.close()
-
-
-class EchoHandler(RequestHandler):
-    def get(self):
-        self.write(recursive_unicode(self.request.arguments))
-
-    def post(self):
-        self.write(recursive_unicode(self.request.arguments))
-
-
-class TypeCheckHandler(RequestHandler):
-    def prepare(self):
-        self.errors = {}
-        fields = [
-            ('method', str),
-            ('uri', str),
-            ('version', str),
-            ('remote_ip', str),
-            ('protocol', str),
-            ('host', str),
-            ('path', str),
-            ('query', str),
-        ]
-        for field, expected_type in fields:
-            self.check_type(field, getattr(self.request, field), expected_type)
-
-        self.check_type('header_key', list(self.request.headers.keys())[0], str)
-        self.check_type('header_value', list(self.request.headers.values())[0], str)
-
-        self.check_type('cookie_key', list(self.request.cookies.keys())[0], str)
-        self.check_type('cookie_value', list(self.request.cookies.values())[0].value, str)
-        # secure cookies
-
-        self.check_type('arg_key', list(self.request.arguments.keys())[0], str)
-        self.check_type('arg_value', list(self.request.arguments.values())[0][0], bytes)
-
-    def post(self):
-        self.check_type('body', self.request.body, bytes)
-        self.write(self.errors)
-
-    def get(self):
-        self.write(self.errors)
-
-    def check_type(self, name, obj, expected_type):
-        actual_type = type(obj)
-        if expected_type != actual_type:
-            self.errors[name] = "expected %s, got %s" % (expected_type,
-                                                         actual_type)
-
-
-class HTTPServerTest(AsyncHTTPTestCase):
-    def get_app(self):
-        return Application([("/echo", EchoHandler),
-                            ("/typecheck", TypeCheckHandler),
-                            ("//doubleslash", EchoHandler),
-                            ])
-
-    def test_query_string_encoding(self):
-        response = self.fetch("/echo?foo=%C3%A9")
-        data = json_decode(response.body)
-        self.assertEqual(data, {u"foo": [u"\u00e9"]})
-
-    def test_empty_query_string(self):
-        response = self.fetch("/echo?foo=&foo=")
-        data = json_decode(response.body)
-        self.assertEqual(data, {u"foo": [u"", u""]})
-
-    def test_empty_post_parameters(self):
-        response = self.fetch("/echo", method="POST", body="foo=&bar=")
-        data = json_decode(response.body)
-        self.assertEqual(data, {u"foo": [u""], u"bar": [u""]})
-
-    def test_types(self):
-        headers = {"Cookie": "foo=bar"}
-        response = self.fetch("/typecheck?foo=bar", headers=headers)
-        data = json_decode(response.body)
-        self.assertEqual(data, {})
-
-        response = self.fetch("/typecheck", method="POST", body="foo=bar", headers=headers)
-        data = json_decode(response.body)
-        self.assertEqual(data, {})
-
-    def test_double_slash(self):
-        # urlparse.urlsplit (which tornado.httpserver used to use
-        # incorrectly) would parse paths beginning with "//" as
-        # protocol-relative urls.
-        response = self.fetch("//doubleslash")
-        self.assertEqual(200, response.code)
-        self.assertEqual(json_decode(response.body), {})
-
-    def test_malformed_body(self):
-        # parse_qs is pretty forgiving, but it will fail on python 3
-        # if the data is not utf8.  On python 2 parse_qs will work,
-        # but then the recursive_unicode call in EchoHandler will
-        # fail.
-        if str is bytes:
-            return
-        with ExpectLog(gen_log, 'Invalid x-www-form-urlencoded body'):
-            response = self.fetch(
-                '/echo', method="POST",
-                headers={'Content-Type': 'application/x-www-form-urlencoded'},
-                body=b'\xe9')
-        self.assertEqual(200, response.code)
-        self.assertEqual(b'{}', response.body)
-
-
-class HTTPServerRawTest(AsyncHTTPTestCase):
-    def get_app(self):
-        return Application([
-            ('/echo', EchoHandler),
-        ])
-
-    def setUp(self):
-        super(HTTPServerRawTest, self).setUp()
-        self.stream = IOStream(socket.socket())
-        self.stream.connect(('127.0.0.1', self.get_http_port()), self.stop)
-        self.wait()
-
-    def tearDown(self):
-        self.stream.close()
-        super(HTTPServerRawTest, self).tearDown()
-
-    def test_empty_request(self):
-        self.stream.close()
-        self.io_loop.add_timeout(datetime.timedelta(seconds=0.001), self.stop)
-        self.wait()
-
-    def test_malformed_first_line(self):
-        with ExpectLog(gen_log, '.*Malformed HTTP request line'):
-            self.stream.write(b'asdf\r\n\r\n')
-            # TODO: need an async version of ExpectLog so we don't need
-            # hard-coded timeouts here.
-            self.io_loop.add_timeout(datetime.timedelta(seconds=0.05),
-                                     self.stop)
-            self.wait()
-
-    def test_malformed_headers(self):
-        with ExpectLog(gen_log, '.*Malformed HTTP headers'):
-            self.stream.write(b'GET / HTTP/1.0\r\nasdf\r\n\r\n')
-            self.io_loop.add_timeout(datetime.timedelta(seconds=0.05),
-                                     self.stop)
-            self.wait()
-
-    def test_chunked_request_body(self):
-        # Chunked requests are not widely supported and we don't have a way
-        # to generate them in AsyncHTTPClient, but HTTPServer will read them.
-        self.stream.write(b"""\
-POST /echo HTTP/1.1
-Transfer-Encoding: chunked
-Content-Type: application/x-www-form-urlencoded
-
-4
-foo=
-3
-bar
-0
-
-""".replace(b"\n", b"\r\n"))
-        read_stream_body(self.stream, self.stop)
-        headers, response = self.wait()
-        self.assertEqual(json_decode(response), {u'foo': [u'bar']})
-
-    def test_chunked_request_uppercase(self):
-        # As per RFC 2616 section 3.6, "Transfer-Encoding" header's value is
-        # case-insensitive.
-        self.stream.write(b"""\
-POST /echo HTTP/1.1
-Transfer-Encoding: Chunked
-Content-Type: application/x-www-form-urlencoded
-
-4
-foo=
-3
-bar
-0
-
-""".replace(b"\n", b"\r\n"))
-        read_stream_body(self.stream, self.stop)
-        headers, response = self.wait()
-        self.assertEqual(json_decode(response), {u'foo': [u'bar']})
-
-    def test_invalid_content_length(self):
-        with ExpectLog(gen_log, '.*Only integer Content-Length is allowed'):
-            self.stream.write(b"""\
-POST /echo HTTP/1.1
-Content-Length: foo
-
-bar
-
-""".replace(b"\n", b"\r\n"))
-            self.stream.read_until_close(self.stop)
-            self.wait()
-
-
-class XHeaderTest(HandlerBaseTestCase):
-    class Handler(RequestHandler):
-        def get(self):
-            self.write(dict(remote_ip=self.request.remote_ip,
-                            remote_protocol=self.request.protocol))
-
-    def get_httpserver_options(self):
-        return dict(xheaders=True, trusted_downstream=['5.5.5.5'])
-
-    def test_ip_headers(self):
-        self.assertEqual(self.fetch_json("/")["remote_ip"], "127.0.0.1")
-
-        valid_ipv4 = {"X-Real-IP": "4.4.4.4"}
-        self.assertEqual(
-            self.fetch_json("/", headers=valid_ipv4)["remote_ip"],
-            "4.4.4.4")
-
-        valid_ipv4_list = {"X-Forwarded-For": "127.0.0.1, 4.4.4.4"}
-        self.assertEqual(
-            self.fetch_json("/", headers=valid_ipv4_list)["remote_ip"],
-            "4.4.4.4")
-
-        valid_ipv6 = {"X-Real-IP": "2620:0:1cfe:face:b00c::3"}
-        self.assertEqual(
-            self.fetch_json("/", headers=valid_ipv6)["remote_ip"],
-            "2620:0:1cfe:face:b00c::3")
-
-        valid_ipv6_list = {"X-Forwarded-For": "::1, 2620:0:1cfe:face:b00c::3"}
-        self.assertEqual(
-            self.fetch_json("/", headers=valid_ipv6_list)["remote_ip"],
-            "2620:0:1cfe:face:b00c::3")
-
-        invalid_chars = {"X-Real-IP": "4.4.4.4<script>"}
-        self.assertEqual(
-            self.fetch_json("/", headers=invalid_chars)["remote_ip"],
-            "127.0.0.1")
-
-        invalid_chars_list = {"X-Forwarded-For": "4.4.4.4, 5.5.5.5<script>"}
-        self.assertEqual(
-            self.fetch_json("/", headers=invalid_chars_list)["remote_ip"],
-            "127.0.0.1")
-
-        invalid_host = {"X-Real-IP": "www.google.com"}
-        self.assertEqual(
-            self.fetch_json("/", headers=invalid_host)["remote_ip"],
-            "127.0.0.1")
-
-    def test_trusted_downstream(self):
-
-        valid_ipv4_list = {"X-Forwarded-For": "127.0.0.1, 4.4.4.4, 5.5.5.5"}
-        self.assertEqual(
-            self.fetch_json("/", headers=valid_ipv4_list)["remote_ip"],
-            "4.4.4.4")
-
-    def test_scheme_headers(self):
-        self.assertEqual(self.fetch_json("/")["remote_protocol"], "http")
-
-        https_scheme = {"X-Scheme": "https"}
-        self.assertEqual(
-            self.fetch_json("/", headers=https_scheme)["remote_protocol"],
-            "https")
-
-        https_forwarded = {"X-Forwarded-Proto": "https"}
-        self.assertEqual(
-            self.fetch_json("/", headers=https_forwarded)["remote_protocol"],
-            "https")
-
-        bad_forwarded = {"X-Forwarded-Proto": "unknown"}
-        self.assertEqual(
-            self.fetch_json("/", headers=bad_forwarded)["remote_protocol"],
-            "http")
-
-
-class SSLXHeaderTest(AsyncHTTPSTestCase, HandlerBaseTestCase):
-    def get_app(self):
-        return Application([('/', XHeaderTest.Handler)])
-
-    def get_httpserver_options(self):
-        output = super(SSLXHeaderTest, self).get_httpserver_options()
-        output['xheaders'] = True
-        return output
-
-    def test_request_without_xprotocol(self):
-        self.assertEqual(self.fetch_json("/")["remote_protocol"], "https")
-
-        http_scheme = {"X-Scheme": "http"}
-        self.assertEqual(
-            self.fetch_json("/", headers=http_scheme)["remote_protocol"], "http")
-
-        bad_scheme = {"X-Scheme": "unknown"}
-        self.assertEqual(
-            self.fetch_json("/", headers=bad_scheme)["remote_protocol"], "https")
-
-
-class ManualProtocolTest(HandlerBaseTestCase):
-    class Handler(RequestHandler):
-        def get(self):
-            self.write(dict(protocol=self.request.protocol))
-
-    def get_httpserver_options(self):
-        return dict(protocol='https')
-
-    def test_manual_protocol(self):
-        self.assertEqual(self.fetch_json('/')['protocol'], 'https')
-
-
-@unittest.skipIf(not hasattr(socket, 'AF_UNIX') or sys.platform == 'cygwin',
-                 "unix sockets not supported on this platform")
-class UnixSocketTest(AsyncTestCase):
-    """HTTPServers can listen on Unix sockets too.
-
-    Why would you want to do this?  Nginx can proxy to backends listening
-    on unix sockets, for one thing (and managing a namespace for unix
-    sockets can be easier than managing a bunch of TCP port numbers).
-
-    Unfortunately, there's no way to specify a unix socket in a url for
-    an HTTP client, so we have to test this by hand.
-    """
-    def setUp(self):
-        super(UnixSocketTest, self).setUp()
-        self.tmpdir = tempfile.mkdtemp()
-        self.sockfile = os.path.join(self.tmpdir, "test.sock")
-        sock = netutil.bind_unix_socket(self.sockfile)
-        app = Application([("/hello", HelloWorldRequestHandler)])
-        self.server = HTTPServer(app, io_loop=self.io_loop)
-        self.server.add_socket(sock)
-        self.stream = IOStream(socket.socket(socket.AF_UNIX), io_loop=self.io_loop)
-        self.stream.connect(self.sockfile, self.stop)
-        self.wait()
-
-    def tearDown(self):
-        self.stream.close()
-        self.server.stop()
-        shutil.rmtree(self.tmpdir)
-        super(UnixSocketTest, self).tearDown()
-
-    def test_unix_socket(self):
-        self.stream.write(b"GET /hello HTTP/1.0\r\n\r\n")
-        self.stream.read_until(b"\r\n", self.stop)
-        response = self.wait()
-        self.assertEqual(response, b"HTTP/1.1 200 OK\r\n")
-        self.stream.read_until(b"\r\n\r\n", self.stop)
-        headers = HTTPHeaders.parse(self.wait().decode('latin1'))
-        self.stream.read_bytes(int(headers["Content-Length"]), self.stop)
-        body = self.wait()
-        self.assertEqual(body, b"Hello world")
-
-    def test_unix_socket_bad_request(self):
-        # Unix sockets don't have remote addresses so they just return an
-        # empty string.
-        with ExpectLog(gen_log, "Malformed HTTP message from"):
-            self.stream.write(b"garbage\r\n\r\n")
-            self.stream.read_until_close(self.stop)
-            response = self.wait()
-        self.assertEqual(response, b"")
-
-
-class KeepAliveTest(AsyncHTTPTestCase):
-    """Tests various scenarios for HTTP 1.1 keep-alive support.
-
-    These tests don't use AsyncHTTPClient because we want to control
-    connection reuse and closing.
-    """
-    def get_app(self):
-        class HelloHandler(RequestHandler):
-            def get(self):
-                self.finish('Hello world')
-
-            def post(self):
-                self.finish('Hello world')
-
-        class LargeHandler(RequestHandler):
-            def get(self):
-                # 512KB should be bigger than the socket buffers so it will
-                # be written out in chunks.
-                self.write(''.join(chr(i % 256) * 1024 for i in range(512)))
-
-        class FinishOnCloseHandler(RequestHandler):
-            @asynchronous
-            def get(self):
-                self.flush()
-
-            def on_connection_close(self):
-                # This is not very realistic, but finishing the request
-                # from the close callback has the right timing to mimic
-                # some errors seen in the wild.
-                self.finish('closed')
-
-        return Application([('/', HelloHandler),
-                            ('/large', LargeHandler),
-                            ('/finish_on_close', FinishOnCloseHandler)])
-
-    def setUp(self):
-        super(KeepAliveTest, self).setUp()
-        self.http_version = b'HTTP/1.1'
-
-    def tearDown(self):
-        # We just closed the client side of the socket; let the IOLoop run
-        # once to make sure the server side got the message.
-        self.io_loop.add_timeout(datetime.timedelta(seconds=0.001), self.stop)
-        self.wait()
-
-        if hasattr(self, 'stream'):
-            self.stream.close()
-        super(KeepAliveTest, self).tearDown()
-
-    # The next few methods are a crude manual http client
-    def connect(self):
-        self.stream = IOStream(socket.socket(), io_loop=self.io_loop)
-        self.stream.connect(('127.0.0.1', self.get_http_port()), self.stop)
-        self.wait()
-
-    def read_headers(self):
-        self.stream.read_until(b'\r\n', self.stop)
-        first_line = self.wait()
-        self.assertTrue(first_line.startswith(b'HTTP/1.1 200'), first_line)
-        self.stream.read_until(b'\r\n\r\n', self.stop)
-        header_bytes = self.wait()
-        headers = HTTPHeaders.parse(header_bytes.decode('latin1'))
-        return headers
-
-    def read_response(self):
-        self.headers = self.read_headers()
-        self.stream.read_bytes(int(self.headers['Content-Length']), self.stop)
-        body = self.wait()
-        self.assertEqual(b'Hello world', body)
-
-    def close(self):
-        self.stream.close()
-        del self.stream
-
-    def test_two_requests(self):
-        self.connect()
-        self.stream.write(b'GET / HTTP/1.1\r\n\r\n')
-        self.read_response()
-        self.stream.write(b'GET / HTTP/1.1\r\n\r\n')
-        self.read_response()
-        self.close()
-
-    def test_request_close(self):
-        self.connect()
-        self.stream.write(b'GET / HTTP/1.1\r\nConnection: close\r\n\r\n')
-        self.read_response()
-        self.stream.read_until_close(callback=self.stop)
-        data = self.wait()
-        self.assertTrue(not data)
-        self.close()
-
-    # keepalive is supported for http 1.0 too, but it's opt-in
-    def test_http10(self):
-        self.http_version = b'HTTP/1.0'
-        self.connect()
-        self.stream.write(b'GET / HTTP/1.0\r\n\r\n')
-        self.read_response()
-        self.stream.read_until_close(callback=self.stop)
-        data = self.wait()
-        self.assertTrue(not data)
-        self.assertTrue('Connection' not in self.headers)
-        self.close()
-
-    def test_http10_keepalive(self):
-        self.http_version = b'HTTP/1.0'
-        self.connect()
-        self.stream.write(b'GET / HTTP/1.0\r\nConnection: keep-alive\r\n\r\n')
-        self.read_response()
-        self.assertEqual(self.headers['Connection'], 'Keep-Alive')
-        self.stream.write(b'GET / HTTP/1.0\r\nConnection: keep-alive\r\n\r\n')
-        self.read_response()
-        self.assertEqual(self.headers['Connection'], 'Keep-Alive')
-        self.close()
-
-    def test_http10_keepalive_extra_crlf(self):
-        self.http_version = b'HTTP/1.0'
-        self.connect()
-        self.stream.write(b'GET / HTTP/1.0\r\nConnection: keep-alive\r\n\r\n\r\n')
-        self.read_response()
-        self.assertEqual(self.headers['Connection'], 'Keep-Alive')
-        self.stream.write(b'GET / HTTP/1.0\r\nConnection: keep-alive\r\n\r\n')
-        self.read_response()
-        self.assertEqual(self.headers['Connection'], 'Keep-Alive')
-        self.close()
-
-    def test_pipelined_requests(self):
-        self.connect()
-        self.stream.write(b'GET / HTTP/1.1\r\n\r\nGET / HTTP/1.1\r\n\r\n')
-        self.read_response()
-        self.read_response()
-        self.close()
-
-    def test_pipelined_cancel(self):
-        self.connect()
-        self.stream.write(b'GET / HTTP/1.1\r\n\r\nGET / HTTP/1.1\r\n\r\n')
-        # only read once
-        self.read_response()
-        self.close()
-
-    def test_cancel_during_download(self):
-        self.connect()
-        self.stream.write(b'GET /large HTTP/1.1\r\n\r\n')
-        self.read_headers()
-        self.stream.read_bytes(1024, self.stop)
-        self.wait()
-        self.close()
-
-    def test_finish_while_closed(self):
-        self.connect()
-        self.stream.write(b'GET /finish_on_close HTTP/1.1\r\n\r\n')
-        self.read_headers()
-        self.close()
-
-    def test_keepalive_chunked(self):
-        self.http_version = b'HTTP/1.0'
-        self.connect()
-        self.stream.write(b'POST / HTTP/1.0\r\nConnection: keep-alive\r\n'
-                          b'Transfer-Encoding: chunked\r\n'
-                          b'\r\n0\r\n')
-        self.read_response()
-        self.assertEqual(self.headers['Connection'], 'Keep-Alive')
-        self.stream.write(b'GET / HTTP/1.0\r\nConnection: keep-alive\r\n\r\n')
-        self.read_response()
-        self.assertEqual(self.headers['Connection'], 'Keep-Alive')
-        self.close()
-
-
-class GzipBaseTest(object):
-    def get_app(self):
-        return Application([('/', EchoHandler)])
-
-    def post_gzip(self, body):
-        bytesio = BytesIO()
-        gzip_file = gzip.GzipFile(mode='w', fileobj=bytesio)
-        gzip_file.write(utf8(body))
-        gzip_file.close()
-        compressed_body = bytesio.getvalue()
-        return self.fetch('/', method='POST', body=compressed_body,
-                          headers={'Content-Encoding': 'gzip'})
-
-    def test_uncompressed(self):
-        response = self.fetch('/', method='POST', body='foo=bar')
-        self.assertEquals(json_decode(response.body), {u'foo': [u'bar']})
-
-
-class GzipTest(GzipBaseTest, AsyncHTTPTestCase):
-    def get_httpserver_options(self):
-        return dict(decompress_request=True)
-
-    def test_gzip(self):
-        response = self.post_gzip('foo=bar')
-        self.assertEquals(json_decode(response.body), {u'foo': [u'bar']})
-
-
-class GzipUnsupportedTest(GzipBaseTest, AsyncHTTPTestCase):
-    def test_gzip_unsupported(self):
-        # Gzip support is opt-in; without it the server fails to parse
-        # the body (but parsing form bodies is currently just a log message,
-        # not a fatal error).
-        with ExpectLog(gen_log, "Unsupported Content-Encoding"):
-            response = self.post_gzip('foo=bar')
-        self.assertEquals(json_decode(response.body), {})
-
-
-class StreamingChunkSizeTest(AsyncHTTPTestCase):
-    # 50 characters long, and repetitive so it can be compressed.
-    BODY = b'01234567890123456789012345678901234567890123456789'
-    CHUNK_SIZE = 16
-
-    def get_http_client(self):
-        # body_producer doesn't work on curl_httpclient, so override the
-        # configured AsyncHTTPClient implementation.
-        return SimpleAsyncHTTPClient(io_loop=self.io_loop)
-
-    def get_httpserver_options(self):
-        return dict(chunk_size=self.CHUNK_SIZE, decompress_request=True)
-
-    class MessageDelegate(HTTPMessageDelegate):
-        def __init__(self, connection):
-            self.connection = connection
-
-        def headers_received(self, start_line, headers):
-            self.chunk_lengths = []
-
-        def data_received(self, chunk):
-            self.chunk_lengths.append(len(chunk))
-
-        def finish(self):
-            response_body = utf8(json_encode(self.chunk_lengths))
-            self.connection.write_headers(
-                ResponseStartLine('HTTP/1.1', 200, 'OK'),
-                HTTPHeaders({'Content-Length': str(len(response_body))}))
-            self.connection.write(response_body)
-            self.connection.finish()
-
-    def get_app(self):
-        class App(HTTPServerConnectionDelegate):
-            def start_request(self, server_conn, request_conn):
-                return StreamingChunkSizeTest.MessageDelegate(request_conn)
-        return App()
-
-    def fetch_chunk_sizes(self, **kwargs):
-        response = self.fetch('/', method='POST', **kwargs)
-        response.rethrow()
-        chunks = json_decode(response.body)
-        self.assertEqual(len(self.BODY), sum(chunks))
-        for chunk_size in chunks:
-            self.assertLessEqual(chunk_size, self.CHUNK_SIZE,
-                                 'oversized chunk: ' + str(chunks))
-            self.assertGreater(chunk_size, 0,
-                               'empty chunk: ' + str(chunks))
-        return chunks
-
-    def compress(self, body):
-        bytesio = BytesIO()
-        gzfile = gzip.GzipFile(mode='w', fileobj=bytesio)
-        gzfile.write(body)
-        gzfile.close()
-        compressed = bytesio.getvalue()
-        if len(compressed) >= len(body):
-            raise Exception("body did not shrink when compressed")
-        return compressed
-
-    def test_regular_body(self):
-        chunks = self.fetch_chunk_sizes(body=self.BODY)
-        # Without compression we know exactly what to expect.
-        self.assertEqual([16, 16, 16, 2], chunks)
-
-    def test_compressed_body(self):
-        self.fetch_chunk_sizes(body=self.compress(self.BODY),
-                               headers={'Content-Encoding': 'gzip'})
-        # Compression creates irregular boundaries so the assertions
-        # in fetch_chunk_sizes are as specific as we can get.
-
-    def test_chunked_body(self):
-        def body_producer(write):
-            write(self.BODY[:20])
-            write(self.BODY[20:])
-        chunks = self.fetch_chunk_sizes(body_producer=body_producer)
-        # HTTP chunk boundaries translate to application-visible breaks
-        self.assertEqual([16, 4, 16, 14], chunks)
-
-    def test_chunked_compressed(self):
-        compressed = self.compress(self.BODY)
-        self.assertGreater(len(compressed), 20)
-
-        def body_producer(write):
-            write(compressed[:20])
-            write(compressed[20:])
-        self.fetch_chunk_sizes(body_producer=body_producer,
-                               headers={'Content-Encoding': 'gzip'})
-
-
-class MaxHeaderSizeTest(AsyncHTTPTestCase):
-    def get_app(self):
-        return Application([('/', HelloWorldRequestHandler)])
-
-    def get_httpserver_options(self):
-        return dict(max_header_size=1024)
-
-    def test_small_headers(self):
-        response = self.fetch("/", headers={'X-Filler': 'a' * 100})
-        response.rethrow()
-        self.assertEqual(response.body, b"Hello world")
-
-    def test_large_headers(self):
-        with ExpectLog(gen_log, "Unsatisfiable read", required=False):
-            response = self.fetch("/", headers={'X-Filler': 'a' * 1000})
-        # 431 is "Request Header Fields Too Large", defined in RFC
-        # 6585. However, many implementations just close the
-        # connection in this case, resulting in a 599.
-        self.assertIn(response.code, (431, 599))
-
-
-@skipOnTravis
-class IdleTimeoutTest(AsyncHTTPTestCase):
-    def get_app(self):
-        return Application([('/', HelloWorldRequestHandler)])
-
-    def get_httpserver_options(self):
-        return dict(idle_connection_timeout=0.1)
-
-    def setUp(self):
-        super(IdleTimeoutTest, self).setUp()
-        self.streams = []
-
-    def tearDown(self):
-        super(IdleTimeoutTest, self).tearDown()
-        for stream in self.streams:
-            stream.close()
-
-    def connect(self):
-        stream = IOStream(socket.socket())
-        stream.connect(('127.0.0.1', self.get_http_port()), self.stop)
-        self.wait()
-        self.streams.append(stream)
-        return stream
-
-    def test_unused_connection(self):
-        stream = self.connect()
-        stream.set_close_callback(self.stop)
-        self.wait()
-
-    def test_idle_after_use(self):
-        stream = self.connect()
-        stream.set_close_callback(lambda: self.stop("closed"))
-
-        # Use the connection twice to make sure keep-alives are working
-        for i in range(2):
-            stream.write(b"GET / HTTP/1.1\r\n\r\n")
-            stream.read_until(b"\r\n\r\n", self.stop)
-            self.wait()
-            stream.read_bytes(11, self.stop)
-            data = self.wait()
-            self.assertEqual(data, b"Hello world")
-
-        # Now let the timeout trigger and close the connection.
-        data = self.wait()
-        self.assertEqual(data, "closed")
-
-
-class BodyLimitsTest(AsyncHTTPTestCase):
-    def get_app(self):
-        class BufferedHandler(RequestHandler):
-            def put(self):
-                self.write(str(len(self.request.body)))
-
-        @stream_request_body
-        class StreamingHandler(RequestHandler):
-            def initialize(self):
-                self.bytes_read = 0
-
-            def prepare(self):
-                if 'expected_size' in self.request.arguments:
-                    self.request.connection.set_max_body_size(
-                        int(self.get_argument('expected_size')))
-                if 'body_timeout' in self.request.arguments:
-                    self.request.connection.set_body_timeout(
-                        float(self.get_argument('body_timeout')))
-
-            def data_received(self, data):
-                self.bytes_read += len(data)
-
-            def put(self):
-                self.write(str(self.bytes_read))
-
-        return Application([('/buffered', BufferedHandler),
-                            ('/streaming', StreamingHandler)])
-
-    def get_httpserver_options(self):
-        return dict(body_timeout=3600, max_body_size=4096)
-
-    def get_http_client(self):
-        # body_producer doesn't work on curl_httpclient, so override the
-        # configured AsyncHTTPClient implementation.
-        return SimpleAsyncHTTPClient(io_loop=self.io_loop)
-
-    def test_small_body(self):
-        response = self.fetch('/buffered', method='PUT', body=b'a' * 4096)
-        self.assertEqual(response.body, b'4096')
-        response = self.fetch('/streaming', method='PUT', body=b'a' * 4096)
-        self.assertEqual(response.body, b'4096')
-
-    def test_large_body_buffered(self):
-        with ExpectLog(gen_log, '.*Content-Length too long'):
-            response = self.fetch('/buffered', method='PUT', body=b'a' * 10240)
-        self.assertEqual(response.code, 599)
-
-    def test_large_body_buffered_chunked(self):
-        with ExpectLog(gen_log, '.*chunked body too large'):
-            response = self.fetch('/buffered', method='PUT',
-                                  body_producer=lambda write: write(b'a' * 10240))
-        self.assertEqual(response.code, 599)
-
-    def test_large_body_streaming(self):
-        with ExpectLog(gen_log, '.*Content-Length too long'):
-            response = self.fetch('/streaming', method='PUT', body=b'a' * 10240)
-        self.assertEqual(response.code, 599)
-
-    def test_large_body_streaming_chunked(self):
-        with ExpectLog(gen_log, '.*chunked body too large'):
-            response = self.fetch('/streaming', method='PUT',
-                                  body_producer=lambda write: write(b'a' * 10240))
-        self.assertEqual(response.code, 599)
-
-    def test_large_body_streaming_override(self):
-        response = self.fetch('/streaming?expected_size=10240', method='PUT',
-                              body=b'a' * 10240)
-        self.assertEqual(response.body, b'10240')
-
-    def test_large_body_streaming_chunked_override(self):
-        response = self.fetch('/streaming?expected_size=10240', method='PUT',
-                              body_producer=lambda write: write(b'a' * 10240))
-        self.assertEqual(response.body, b'10240')
-
-    @gen_test
-    def test_timeout(self):
-        stream = IOStream(socket.socket())
-        try:
-            yield stream.connect(('127.0.0.1', self.get_http_port()))
-            # Use a raw stream because AsyncHTTPClient won't let us read a
-            # response without finishing a body.
-            stream.write(b'PUT /streaming?body_timeout=0.1 HTTP/1.0\r\n'
-                         b'Content-Length: 42\r\n\r\n')
-            with ExpectLog(gen_log, 'Timeout reading body'):
-                response = yield stream.read_until_close()
-            self.assertEqual(response, b'')
-        finally:
-            stream.close()
-
-    @gen_test
-    def test_body_size_override_reset(self):
-        # The max_body_size override is reset between requests.
-        stream = IOStream(socket.socket())
-        try:
-            yield stream.connect(('127.0.0.1', self.get_http_port()))
-            # Use a raw stream so we can make sure it's all on one connection.
-            stream.write(b'PUT /streaming?expected_size=10240 HTTP/1.1\r\n'
-                         b'Content-Length: 10240\r\n\r\n')
-            stream.write(b'a' * 10240)
-            headers, response = yield gen.Task(read_stream_body, stream)
-            self.assertEqual(response, b'10240')
-            # Without the ?expected_size parameter, we get the old default value
-            stream.write(b'PUT /streaming HTTP/1.1\r\n'
-                         b'Content-Length: 10240\r\n\r\n')
-            with ExpectLog(gen_log, '.*Content-Length too long'):
-                data = yield stream.read_until_close()
-            self.assertEqual(data, b'')
-        finally:
-            stream.close()
-
-
-class LegacyInterfaceTest(AsyncHTTPTestCase):
-    def get_app(self):
-        # The old request_callback interface does not implement the
-        # delegate interface, and writes its response via request.write
-        # instead of request.connection.write_headers.
-        def handle_request(request):
-            self.http1 = request.version.startswith("HTTP/1.")
-            if not self.http1:
-                # This test will be skipped if we're using HTTP/2,
-                # so just close it out cleanly using the modern interface.
-                request.connection.write_headers(
-                    ResponseStartLine('', 200, 'OK'),
-                    HTTPHeaders())
-                request.connection.finish()
-                return
-            message = b"Hello world"
-            request.write(utf8("HTTP/1.1 200 OK\r\n"
-                               "Content-Length: %d\r\n\r\n" % len(message)))
-            request.write(message)
-            request.finish()
-        return handle_request
-
-    def test_legacy_interface(self):
-        response = self.fetch('/')
-        if not self.http1:
-            self.skipTest("requires HTTP/1.x")
-        self.assertEqual(response.body, b"Hello world")
diff --git a/lib/tornado/test/httputil_test.py b/lib/tornado/test/httputil_test.py
deleted file mode 100644
index d1278567bbcd4c3c770620d8b6dfd20b4069b20f..0000000000000000000000000000000000000000
--- a/lib/tornado/test/httputil_test.py
+++ /dev/null
@@ -1,466 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-
-from __future__ import absolute_import, division, print_function
-from tornado.httputil import url_concat, parse_multipart_form_data, HTTPHeaders, format_timestamp, HTTPServerRequest, parse_request_start_line, parse_cookie
-from tornado.escape import utf8, native_str
-from tornado.log import gen_log
-from tornado.testing import ExpectLog
-from tornado.test.util import unittest
-
-import copy
-import datetime
-import logging
-import pickle
-import time
-
-
-class TestUrlConcat(unittest.TestCase):
-    def test_url_concat_no_query_params(self):
-        url = url_concat(
-            "https://localhost/path",
-            [('y', 'y'), ('z', 'z')],
-        )
-        self.assertEqual(url, "https://localhost/path?y=y&z=z")
-
-    def test_url_concat_encode_args(self):
-        url = url_concat(
-            "https://localhost/path",
-            [('y', '/y'), ('z', 'z')],
-        )
-        self.assertEqual(url, "https://localhost/path?y=%2Fy&z=z")
-
-    def test_url_concat_trailing_q(self):
-        url = url_concat(
-            "https://localhost/path?",
-            [('y', 'y'), ('z', 'z')],
-        )
-        self.assertEqual(url, "https://localhost/path?y=y&z=z")
-
-    def test_url_concat_q_with_no_trailing_amp(self):
-        url = url_concat(
-            "https://localhost/path?x",
-            [('y', 'y'), ('z', 'z')],
-        )
-        self.assertEqual(url, "https://localhost/path?x=&y=y&z=z")
-
-    def test_url_concat_trailing_amp(self):
-        url = url_concat(
-            "https://localhost/path?x&",
-            [('y', 'y'), ('z', 'z')],
-        )
-        self.assertEqual(url, "https://localhost/path?x=&y=y&z=z")
-
-    def test_url_concat_mult_params(self):
-        url = url_concat(
-            "https://localhost/path?a=1&b=2",
-            [('y', 'y'), ('z', 'z')],
-        )
-        self.assertEqual(url, "https://localhost/path?a=1&b=2&y=y&z=z")
-
-    def test_url_concat_no_params(self):
-        url = url_concat(
-            "https://localhost/path?r=1&t=2",
-            [],
-        )
-        self.assertEqual(url, "https://localhost/path?r=1&t=2")
-
-    def test_url_concat_none_params(self):
-        url = url_concat(
-            "https://localhost/path?r=1&t=2",
-            None,
-        )
-        self.assertEqual(url, "https://localhost/path?r=1&t=2")
-
-    def test_url_concat_with_frag(self):
-        url = url_concat(
-            "https://localhost/path#tab",
-            [('y', 'y')],
-        )
-        self.assertEqual(url, "https://localhost/path?y=y#tab")
-
-    def test_url_concat_multi_same_params(self):
-        url = url_concat(
-            "https://localhost/path",
-            [('y', 'y1'), ('y', 'y2')],
-        )
-        self.assertEqual(url, "https://localhost/path?y=y1&y=y2")
-
-    def test_url_concat_multi_same_query_params(self):
-        url = url_concat(
-            "https://localhost/path?r=1&r=2",
-            [('y', 'y')],
-        )
-        self.assertEqual(url, "https://localhost/path?r=1&r=2&y=y")
-
-    def test_url_concat_dict_params(self):
-        url = url_concat(
-            "https://localhost/path",
-            dict(y='y'),
-        )
-        self.assertEqual(url, "https://localhost/path?y=y")
-
-
-class MultipartFormDataTest(unittest.TestCase):
-    def test_file_upload(self):
-        data = b"""\
---1234
-Content-Disposition: form-data; name="files"; filename="ab.txt"
-
-Foo
---1234--""".replace(b"\n", b"\r\n")
-        args = {}
-        files = {}
-        parse_multipart_form_data(b"1234", data, args, files)
-        file = files["files"][0]
-        self.assertEqual(file["filename"], "ab.txt")
-        self.assertEqual(file["body"], b"Foo")
-
-    def test_unquoted_names(self):
-        # quotes are optional unless special characters are present
-        data = b"""\
---1234
-Content-Disposition: form-data; name=files; filename=ab.txt
-
-Foo
---1234--""".replace(b"\n", b"\r\n")
-        args = {}
-        files = {}
-        parse_multipart_form_data(b"1234", data, args, files)
-        file = files["files"][0]
-        self.assertEqual(file["filename"], "ab.txt")
-        self.assertEqual(file["body"], b"Foo")
-
-    def test_special_filenames(self):
-        filenames = ['a;b.txt',
-                     'a"b.txt',
-                     'a";b.txt',
-                     'a;"b.txt',
-                     'a";";.txt',
-                     'a\\"b.txt',
-                     'a\\b.txt',
-                     ]
-        for filename in filenames:
-            logging.debug("trying filename %r", filename)
-            data = """\
---1234
-Content-Disposition: form-data; name="files"; filename="%s"
-
-Foo
---1234--""" % filename.replace('\\', '\\\\').replace('"', '\\"')
-            data = utf8(data.replace("\n", "\r\n"))
-            args = {}
-            files = {}
-            parse_multipart_form_data(b"1234", data, args, files)
-            file = files["files"][0]
-            self.assertEqual(file["filename"], filename)
-            self.assertEqual(file["body"], b"Foo")
-
-    def test_boundary_starts_and_ends_with_quotes(self):
-        data = b'''\
---1234
-Content-Disposition: form-data; name="files"; filename="ab.txt"
-
-Foo
---1234--'''.replace(b"\n", b"\r\n")
-        args = {}
-        files = {}
-        parse_multipart_form_data(b'"1234"', data, args, files)
-        file = files["files"][0]
-        self.assertEqual(file["filename"], "ab.txt")
-        self.assertEqual(file["body"], b"Foo")
-
-    def test_missing_headers(self):
-        data = b'''\
---1234
-
-Foo
---1234--'''.replace(b"\n", b"\r\n")
-        args = {}
-        files = {}
-        with ExpectLog(gen_log, "multipart/form-data missing headers"):
-            parse_multipart_form_data(b"1234", data, args, files)
-        self.assertEqual(files, {})
-
-    def test_invalid_content_disposition(self):
-        data = b'''\
---1234
-Content-Disposition: invalid; name="files"; filename="ab.txt"
-
-Foo
---1234--'''.replace(b"\n", b"\r\n")
-        args = {}
-        files = {}
-        with ExpectLog(gen_log, "Invalid multipart/form-data"):
-            parse_multipart_form_data(b"1234", data, args, files)
-        self.assertEqual(files, {})
-
-    def test_line_does_not_end_with_correct_line_break(self):
-        data = b'''\
---1234
-Content-Disposition: form-data; name="files"; filename="ab.txt"
-
-Foo--1234--'''.replace(b"\n", b"\r\n")
-        args = {}
-        files = {}
-        with ExpectLog(gen_log, "Invalid multipart/form-data"):
-            parse_multipart_form_data(b"1234", data, args, files)
-        self.assertEqual(files, {})
-
-    def test_content_disposition_header_without_name_parameter(self):
-        data = b"""\
---1234
-Content-Disposition: form-data; filename="ab.txt"
-
-Foo
---1234--""".replace(b"\n", b"\r\n")
-        args = {}
-        files = {}
-        with ExpectLog(gen_log, "multipart/form-data value missing name"):
-            parse_multipart_form_data(b"1234", data, args, files)
-        self.assertEqual(files, {})
-
-    def test_data_after_final_boundary(self):
-        # The spec requires that data after the final boundary be ignored.
-        # http://www.w3.org/Protocols/rfc1341/7_2_Multipart.html
-        # In practice, some libraries include an extra CRLF after the boundary.
-        data = b"""\
---1234
-Content-Disposition: form-data; name="files"; filename="ab.txt"
-
-Foo
---1234--
-""".replace(b"\n", b"\r\n")
-        args = {}
-        files = {}
-        parse_multipart_form_data(b"1234", data, args, files)
-        file = files["files"][0]
-        self.assertEqual(file["filename"], "ab.txt")
-        self.assertEqual(file["body"], b"Foo")
-
-
-class HTTPHeadersTest(unittest.TestCase):
-    def test_multi_line(self):
-        # Lines beginning with whitespace are appended to the previous line
-        # with any leading whitespace replaced by a single space.
-        # Note that while multi-line headers are a part of the HTTP spec,
-        # their use is strongly discouraged.
-        data = """\
-Foo: bar
- baz
-Asdf: qwer
-\tzxcv
-Foo: even
-     more
-     lines
-""".replace("\n", "\r\n")
-        headers = HTTPHeaders.parse(data)
-        self.assertEqual(headers["asdf"], "qwer zxcv")
-        self.assertEqual(headers.get_list("asdf"), ["qwer zxcv"])
-        self.assertEqual(headers["Foo"], "bar baz,even more lines")
-        self.assertEqual(headers.get_list("foo"), ["bar baz", "even more lines"])
-        self.assertEqual(sorted(list(headers.get_all())),
-                         [("Asdf", "qwer zxcv"),
-                          ("Foo", "bar baz"),
-                          ("Foo", "even more lines")])
-
-    def test_unicode_newlines(self):
-        # Ensure that only \r\n is recognized as a header separator, and not
-        # the other newline-like unicode characters.
-        # Characters that are likely to be problematic can be found in
-        # http://unicode.org/standard/reports/tr13/tr13-5.html
-        # and cpython's unicodeobject.c (which defines the implementation
-        # of unicode_type.splitlines(), and uses a different list than TR13).
-        newlines = [
-            u'\u001b',  # VERTICAL TAB
-            u'\u001c',  # FILE SEPARATOR
-            u'\u001d',  # GROUP SEPARATOR
-            u'\u001e',  # RECORD SEPARATOR
-            u'\u0085',  # NEXT LINE
-            u'\u2028',  # LINE SEPARATOR
-            u'\u2029',  # PARAGRAPH SEPARATOR
-        ]
-        for newline in newlines:
-            # Try the utf8 and latin1 representations of each newline
-            for encoding in ['utf8', 'latin1']:
-                try:
-                    try:
-                        encoded = newline.encode(encoding)
-                    except UnicodeEncodeError:
-                        # Some chars cannot be represented in latin1
-                        continue
-                    data = b'Cookie: foo=' + encoded + b'bar'
-                    # parse() wants a native_str, so decode through latin1
-                    # in the same way the real parser does.
-                    headers = HTTPHeaders.parse(
-                        native_str(data.decode('latin1')))
-                    expected = [('Cookie', 'foo=' +
-                                 native_str(encoded.decode('latin1')) + 'bar')]
-                    self.assertEqual(
-                        expected, list(headers.get_all()))
-                except Exception:
-                    gen_log.warning("failed while trying %r in %s",
-                                    newline, encoding)
-                    raise
-
-    def test_optional_cr(self):
-        # Both CRLF and LF should be accepted as separators. CR should not be
-        # part of the data when followed by LF, but it is a normal char
-        # otherwise (or should bare CR be an error?)
-        headers = HTTPHeaders.parse(
-            'CRLF: crlf\r\nLF: lf\nCR: cr\rMore: more\r\n')
-        self.assertEqual(sorted(headers.get_all()),
-                         [('Cr', 'cr\rMore: more'),
-                          ('Crlf', 'crlf'),
-                          ('Lf', 'lf'),
-                          ])
-
-    def test_copy(self):
-        all_pairs = [('A', '1'), ('A', '2'), ('B', 'c')]
-        h1 = HTTPHeaders()
-        for k, v in all_pairs:
-            h1.add(k, v)
-        h2 = h1.copy()
-        h3 = copy.copy(h1)
-        h4 = copy.deepcopy(h1)
-        for headers in [h1, h2, h3, h4]:
-            # All the copies are identical, no matter how they were
-            # constructed.
-            self.assertEqual(list(sorted(headers.get_all())), all_pairs)
-        for headers in [h2, h3, h4]:
-            # Neither the dict or its member lists are reused.
-            self.assertIsNot(headers, h1)
-            self.assertIsNot(headers.get_list('A'), h1.get_list('A'))
-
-    def test_pickle_roundtrip(self):
-        headers = HTTPHeaders()
-        headers.add('Set-Cookie', 'a=b')
-        headers.add('Set-Cookie', 'c=d')
-        headers.add('Content-Type', 'text/html')
-        pickled = pickle.dumps(headers)
-        unpickled = pickle.loads(pickled)
-        self.assertEqual(sorted(headers.get_all()), sorted(unpickled.get_all()))
-        self.assertEqual(sorted(headers.items()), sorted(unpickled.items()))
-
-    def test_setdefault(self):
-        headers = HTTPHeaders()
-        headers['foo'] = 'bar'
-        # If a value is present, setdefault returns it without changes.
-        self.assertEqual(headers.setdefault('foo', 'baz'), 'bar')
-        self.assertEqual(headers['foo'], 'bar')
-        # If a value is not present, setdefault sets it for future use.
-        self.assertEqual(headers.setdefault('quux', 'xyzzy'), 'xyzzy')
-        self.assertEqual(headers['quux'], 'xyzzy')
-        self.assertEqual(sorted(headers.get_all()), [('Foo', 'bar'), ('Quux', 'xyzzy')])
-
-    def test_string(self):
-        headers = HTTPHeaders()
-        headers.add("Foo", "1")
-        headers.add("Foo", "2")
-        headers.add("Foo", "3")
-        headers2 = HTTPHeaders.parse(str(headers))
-        self.assertEquals(headers, headers2)
-
-
-class FormatTimestampTest(unittest.TestCase):
-    # Make sure that all the input types are supported.
-    TIMESTAMP = 1359312200.503611
-    EXPECTED = 'Sun, 27 Jan 2013 18:43:20 GMT'
-
-    def check(self, value):
-        self.assertEqual(format_timestamp(value), self.EXPECTED)
-
-    def test_unix_time_float(self):
-        self.check(self.TIMESTAMP)
-
-    def test_unix_time_int(self):
-        self.check(int(self.TIMESTAMP))
-
-    def test_struct_time(self):
-        self.check(time.gmtime(self.TIMESTAMP))
-
-    def test_time_tuple(self):
-        tup = tuple(time.gmtime(self.TIMESTAMP))
-        self.assertEqual(9, len(tup))
-        self.check(tup)
-
-    def test_datetime(self):
-        self.check(datetime.datetime.utcfromtimestamp(self.TIMESTAMP))
-
-
-# HTTPServerRequest is mainly tested incidentally to the server itself,
-# but this tests the parts of the class that can be tested in isolation.
-class HTTPServerRequestTest(unittest.TestCase):
-    def test_default_constructor(self):
-        # All parameters are formally optional, but uri is required
-        # (and has been for some time).  This test ensures that no
-        # more required parameters slip in.
-        HTTPServerRequest(uri='/')
-
-    def test_body_is_a_byte_string(self):
-        requets = HTTPServerRequest(uri='/')
-        self.assertIsInstance(requets.body, bytes)
-
-
-class ParseRequestStartLineTest(unittest.TestCase):
-    METHOD = "GET"
-    PATH = "/foo"
-    VERSION = "HTTP/1.1"
-
-    def test_parse_request_start_line(self):
-        start_line = " ".join([self.METHOD, self.PATH, self.VERSION])
-        parsed_start_line = parse_request_start_line(start_line)
-        self.assertEqual(parsed_start_line.method, self.METHOD)
-        self.assertEqual(parsed_start_line.path, self.PATH)
-        self.assertEqual(parsed_start_line.version, self.VERSION)
-
-
-class ParseCookieTest(unittest.TestCase):
-    # These tests copied from Django:
-    # https://github.com/django/django/pull/6277/commits/da810901ada1cae9fc1f018f879f11a7fb467b28
-    def test_python_cookies(self):
-        """
-        Test cases copied from Python's Lib/test/test_http_cookies.py
-        """
-        self.assertEqual(parse_cookie('chips=ahoy; vienna=finger'), {'chips': 'ahoy', 'vienna': 'finger'})
-        # Here parse_cookie() differs from Python's cookie parsing in that it
-        # treats all semicolons as delimiters, even within quotes.
-        self.assertEqual(
-            parse_cookie('keebler="E=mc2; L=\\"Loves\\"; fudge=\\012;"'),
-            {'keebler': '"E=mc2', 'L': '\\"Loves\\"', 'fudge': '\\012', '': '"'}
-        )
-        # Illegal cookies that have an '=' char in an unquoted value.
-        self.assertEqual(parse_cookie('keebler=E=mc2'), {'keebler': 'E=mc2'})
-        # Cookies with ':' character in their name.
-        self.assertEqual(parse_cookie('key:term=value:term'), {'key:term': 'value:term'})
-        # Cookies with '[' and ']'.
-        self.assertEqual(parse_cookie('a=b; c=[; d=r; f=h'), {'a': 'b', 'c': '[', 'd': 'r', 'f': 'h'})
-
-    def test_cookie_edgecases(self):
-        # Cookies that RFC6265 allows.
-        self.assertEqual(parse_cookie('a=b; Domain=example.com'), {'a': 'b', 'Domain': 'example.com'})
-        # parse_cookie() has historically kept only the last cookie with the
-        # same name.
-        self.assertEqual(parse_cookie('a=b; h=i; a=c'), {'a': 'c', 'h': 'i'})
-
-    def test_invalid_cookies(self):
-        """
-        Cookie strings that go against RFC6265 but browsers will send if set
-        via document.cookie.
-        """
-        # Chunks without an equals sign appear as unnamed values per
-        # https://bugzilla.mozilla.org/show_bug.cgi?id=169091
-        self.assertIn('django_language', parse_cookie('abc=def; unnamed; django_language=en').keys())
-        # Even a double quote may be an unamed value.
-        self.assertEqual(parse_cookie('a=b; "; c=d'), {'a': 'b', '': '"', 'c': 'd'})
-        # Spaces in names and values, and an equals sign in values.
-        self.assertEqual(parse_cookie('a b c=d e = f; gh=i'), {'a b c': 'd e = f', 'gh': 'i'})
-        # More characters the spec forbids.
-        self.assertEqual(parse_cookie('a   b,c<>@:/[]?{}=d  "  =e,f g'), {'a   b,c<>@:/[]?{}': 'd  "  =e,f g'})
-        # Unicode characters. The spec only allows ASCII.
-        self.assertEqual(parse_cookie('saint=André Bessette'), {'saint': native_str('André Bessette')})
-        # Browsers don't send extra whitespace or semicolons in Cookie headers,
-        # but parse_cookie() should parse whitespace the same way
-        # document.cookie parses whitespace.
-        self.assertEqual(parse_cookie('  =  b  ;  ;  =  ;   c  =  ;  '), {'': 'b', 'c': ''})
diff --git a/lib/tornado/test/import_test.py b/lib/tornado/test/import_test.py
deleted file mode 100644
index 88d02e027039dfcbe32813dbf7e9e2fcb3614a11..0000000000000000000000000000000000000000
--- a/lib/tornado/test/import_test.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# flake8: noqa
-from __future__ import absolute_import, division, print_function
-from tornado.test.util import unittest
-
-
-class ImportTest(unittest.TestCase):
-    def test_import_everything(self):
-        # Some of our modules are not otherwise tested.  Import them
-        # all (unless they have external dependencies) here to at
-        # least ensure that there are no syntax errors.
-        import tornado.auth
-        import tornado.autoreload
-        import tornado.concurrent
-        import tornado.escape
-        import tornado.gen
-        import tornado.http1connection
-        import tornado.httpclient
-        import tornado.httpserver
-        import tornado.httputil
-        import tornado.ioloop
-        import tornado.iostream
-        import tornado.locale
-        import tornado.log
-        import tornado.netutil
-        import tornado.options
-        import tornado.process
-        import tornado.simple_httpclient
-        import tornado.stack_context
-        import tornado.tcpserver
-        import tornado.tcpclient
-        import tornado.template
-        import tornado.testing
-        import tornado.util
-        import tornado.web
-        import tornado.websocket
-        import tornado.wsgi
-
-    # for modules with dependencies, if those dependencies can be loaded,
-    # load them too.
-
-    def test_import_pycurl(self):
-        try:
-            import pycurl  # type: ignore
-        except ImportError:
-            pass
-        else:
-            import tornado.curl_httpclient
diff --git a/lib/tornado/test/ioloop_test.py b/lib/tornado/test/ioloop_test.py
deleted file mode 100644
index 1601813f44c5285fc66af16aeb05b440cbc6d935..0000000000000000000000000000000000000000
--- a/lib/tornado/test/ioloop_test.py
+++ /dev/null
@@ -1,681 +0,0 @@
-#!/usr/bin/env python
-
-
-from __future__ import absolute_import, division, print_function
-import contextlib
-import datetime
-import functools
-import socket
-import sys
-import threading
-import time
-import types
-
-from tornado import gen
-from tornado.ioloop import IOLoop, TimeoutError, PollIOLoop, PeriodicCallback
-from tornado.log import app_log
-from tornado.platform.select import _Select
-from tornado.stack_context import ExceptionStackContext, StackContext, wrap, NullContext
-from tornado.testing import AsyncTestCase, bind_unused_port, ExpectLog
-from tornado.test.util import unittest, skipIfNonUnix, skipOnTravis, skipBefore35, exec_test
-
-try:
-    from concurrent import futures
-except ImportError:
-    futures = None
-
-
-class FakeTimeSelect(_Select):
-    def __init__(self):
-        self._time = 1000
-        super(FakeTimeSelect, self).__init__()
-
-    def time(self):
-        return self._time
-
-    def sleep(self, t):
-        self._time += t
-
-    def poll(self, timeout):
-        events = super(FakeTimeSelect, self).poll(0)
-        if events:
-            return events
-        self._time += timeout
-        return []
-
-
-class FakeTimeIOLoop(PollIOLoop):
-    """IOLoop implementation with a fake and deterministic clock.
-
-    The clock advances as needed to trigger timeouts immediately.
-    For use when testing code that involves the passage of time
-    and no external dependencies.
-    """
-    def initialize(self):
-        self.fts = FakeTimeSelect()
-        super(FakeTimeIOLoop, self).initialize(impl=self.fts,
-                                               time_func=self.fts.time)
-
-    def sleep(self, t):
-        """Simulate a blocking sleep by advancing the clock."""
-        self.fts.sleep(t)
-
-
-class TestIOLoop(AsyncTestCase):
-    def test_add_callback_return_sequence(self):
-        # A callback returning {} or [] shouldn't spin the CPU, see Issue #1803.
-        self.calls = 0
-
-        loop = self.io_loop
-        test = self
-        old_add_callback = loop.add_callback
-
-        def add_callback(self, callback, *args, **kwargs):
-            test.calls += 1
-            old_add_callback(callback, *args, **kwargs)
-
-        loop.add_callback = types.MethodType(add_callback, loop)
-        loop.add_callback(lambda: {})
-        loop.add_callback(lambda: [])
-        loop.add_timeout(datetime.timedelta(milliseconds=50), loop.stop)
-        loop.start()
-        self.assertLess(self.calls, 10)
-
-    @skipOnTravis
-    def test_add_callback_wakeup(self):
-        # Make sure that add_callback from inside a running IOLoop
-        # wakes up the IOLoop immediately instead of waiting for a timeout.
-        def callback():
-            self.called = True
-            self.stop()
-
-        def schedule_callback():
-            self.called = False
-            self.io_loop.add_callback(callback)
-            # Store away the time so we can check if we woke up immediately
-            self.start_time = time.time()
-        self.io_loop.add_timeout(self.io_loop.time(), schedule_callback)
-        self.wait()
-        self.assertAlmostEqual(time.time(), self.start_time, places=2)
-        self.assertTrue(self.called)
-
-    @skipOnTravis
-    def test_add_callback_wakeup_other_thread(self):
-        def target():
-            # sleep a bit to let the ioloop go into its poll loop
-            time.sleep(0.01)
-            self.stop_time = time.time()
-            self.io_loop.add_callback(self.stop)
-        thread = threading.Thread(target=target)
-        self.io_loop.add_callback(thread.start)
-        self.wait()
-        delta = time.time() - self.stop_time
-        self.assertLess(delta, 0.1)
-        thread.join()
-
-    def test_add_timeout_timedelta(self):
-        self.io_loop.add_timeout(datetime.timedelta(microseconds=1), self.stop)
-        self.wait()
-
-    def test_multiple_add(self):
-        sock, port = bind_unused_port()
-        try:
-            self.io_loop.add_handler(sock.fileno(), lambda fd, events: None,
-                                     IOLoop.READ)
-            # Attempting to add the same handler twice fails
-            # (with a platform-dependent exception)
-            self.assertRaises(Exception, self.io_loop.add_handler,
-                              sock.fileno(), lambda fd, events: None,
-                              IOLoop.READ)
-        finally:
-            self.io_loop.remove_handler(sock.fileno())
-            sock.close()
-
-    def test_remove_without_add(self):
-        # remove_handler should not throw an exception if called on an fd
-        # was never added.
-        sock, port = bind_unused_port()
-        try:
-            self.io_loop.remove_handler(sock.fileno())
-        finally:
-            sock.close()
-
-    def test_add_callback_from_signal(self):
-        # cheat a little bit and just run this normally, since we can't
-        # easily simulate the races that happen with real signal handlers
-        self.io_loop.add_callback_from_signal(self.stop)
-        self.wait()
-
-    def test_add_callback_from_signal_other_thread(self):
-        # Very crude test, just to make sure that we cover this case.
-        # This also happens to be the first test where we run an IOLoop in
-        # a non-main thread.
-        other_ioloop = IOLoop()
-        thread = threading.Thread(target=other_ioloop.start)
-        thread.start()
-        other_ioloop.add_callback_from_signal(other_ioloop.stop)
-        thread.join()
-        other_ioloop.close()
-
-    def test_add_callback_while_closing(self):
-        # Issue #635: add_callback() should raise a clean exception
-        # if called while another thread is closing the IOLoop.
-        if IOLoop.configured_class().__name__.endswith('AsyncIOLoop'):
-            raise unittest.SkipTest("AsyncIOMainLoop shutdown not thread safe")
-        closing = threading.Event()
-
-        def target():
-            other_ioloop.add_callback(other_ioloop.stop)
-            other_ioloop.start()
-            closing.set()
-            other_ioloop.close(all_fds=True)
-        other_ioloop = IOLoop()
-        thread = threading.Thread(target=target)
-        thread.start()
-        closing.wait()
-        for i in range(1000):
-            try:
-                other_ioloop.add_callback(lambda: None)
-            except RuntimeError as e:
-                self.assertEqual("IOLoop is closing", str(e))
-                break
-
-    def test_handle_callback_exception(self):
-        # IOLoop.handle_callback_exception can be overridden to catch
-        # exceptions in callbacks.
-        def handle_callback_exception(callback):
-            self.assertIs(sys.exc_info()[0], ZeroDivisionError)
-            self.stop()
-        self.io_loop.handle_callback_exception = handle_callback_exception
-        with NullContext():
-            # remove the test StackContext that would see this uncaught
-            # exception as a test failure.
-            self.io_loop.add_callback(lambda: 1 / 0)
-        self.wait()
-
-    @skipIfNonUnix  # just because socketpair is so convenient
-    def test_read_while_writeable(self):
-        # Ensure that write events don't come in while we're waiting for
-        # a read and haven't asked for writeability. (the reverse is
-        # difficult to test for)
-        client, server = socket.socketpair()
-        try:
-            def handler(fd, events):
-                self.assertEqual(events, IOLoop.READ)
-                self.stop()
-            self.io_loop.add_handler(client.fileno(), handler, IOLoop.READ)
-            self.io_loop.add_timeout(self.io_loop.time() + 0.01,
-                                     functools.partial(server.send, b'asdf'))
-            self.wait()
-            self.io_loop.remove_handler(client.fileno())
-        finally:
-            client.close()
-            server.close()
-
-    def test_remove_timeout_after_fire(self):
-        # It is not an error to call remove_timeout after it has run.
-        handle = self.io_loop.add_timeout(self.io_loop.time(), self.stop)
-        self.wait()
-        self.io_loop.remove_timeout(handle)
-
-    def test_remove_timeout_cleanup(self):
-        # Add and remove enough callbacks to trigger cleanup.
-        # Not a very thorough test, but it ensures that the cleanup code
-        # gets executed and doesn't blow up.  This test is only really useful
-        # on PollIOLoop subclasses, but it should run silently on any
-        # implementation.
-        for i in range(2000):
-            timeout = self.io_loop.add_timeout(self.io_loop.time() + 3600,
-                                               lambda: None)
-            self.io_loop.remove_timeout(timeout)
-        # HACK: wait two IOLoop iterations for the GC to happen.
-        self.io_loop.add_callback(lambda: self.io_loop.add_callback(self.stop))
-        self.wait()
-
-    def test_remove_timeout_from_timeout(self):
-        calls = [False, False]
-
-        # Schedule several callbacks and wait for them all to come due at once.
-        # t2 should be cancelled by t1, even though it is already scheduled to
-        # be run before the ioloop even looks at it.
-        now = self.io_loop.time()
-
-        def t1():
-            calls[0] = True
-            self.io_loop.remove_timeout(t2_handle)
-        self.io_loop.add_timeout(now + 0.01, t1)
-
-        def t2():
-            calls[1] = True
-        t2_handle = self.io_loop.add_timeout(now + 0.02, t2)
-        self.io_loop.add_timeout(now + 0.03, self.stop)
-        time.sleep(0.03)
-        self.wait()
-        self.assertEqual(calls, [True, False])
-
-    def test_timeout_with_arguments(self):
-        # This tests that all the timeout methods pass through *args correctly.
-        results = []
-        self.io_loop.add_timeout(self.io_loop.time(), results.append, 1)
-        self.io_loop.add_timeout(datetime.timedelta(seconds=0),
-                                 results.append, 2)
-        self.io_loop.call_at(self.io_loop.time(), results.append, 3)
-        self.io_loop.call_later(0, results.append, 4)
-        self.io_loop.call_later(0, self.stop)
-        self.wait()
-        self.assertEqual(results, [1, 2, 3, 4])
-
-    def test_add_timeout_return(self):
-        # All the timeout methods return non-None handles that can be
-        # passed to remove_timeout.
-        handle = self.io_loop.add_timeout(self.io_loop.time(), lambda: None)
-        self.assertFalse(handle is None)
-        self.io_loop.remove_timeout(handle)
-
-    def test_call_at_return(self):
-        handle = self.io_loop.call_at(self.io_loop.time(), lambda: None)
-        self.assertFalse(handle is None)
-        self.io_loop.remove_timeout(handle)
-
-    def test_call_later_return(self):
-        handle = self.io_loop.call_later(0, lambda: None)
-        self.assertFalse(handle is None)
-        self.io_loop.remove_timeout(handle)
-
-    def test_close_file_object(self):
-        """When a file object is used instead of a numeric file descriptor,
-        the object should be closed (by IOLoop.close(all_fds=True),
-        not just the fd.
-        """
-        # Use a socket since they are supported by IOLoop on all platforms.
-        # Unfortunately, sockets don't support the .closed attribute for
-        # inspecting their close status, so we must use a wrapper.
-        class SocketWrapper(object):
-            def __init__(self, sockobj):
-                self.sockobj = sockobj
-                self.closed = False
-
-            def fileno(self):
-                return self.sockobj.fileno()
-
-            def close(self):
-                self.closed = True
-                self.sockobj.close()
-        sockobj, port = bind_unused_port()
-        socket_wrapper = SocketWrapper(sockobj)
-        io_loop = IOLoop()
-        io_loop.add_handler(socket_wrapper, lambda fd, events: None,
-                            IOLoop.READ)
-        io_loop.close(all_fds=True)
-        self.assertTrue(socket_wrapper.closed)
-
-    def test_handler_callback_file_object(self):
-        """The handler callback receives the same fd object it passed in."""
-        server_sock, port = bind_unused_port()
-        fds = []
-
-        def handle_connection(fd, events):
-            fds.append(fd)
-            conn, addr = server_sock.accept()
-            conn.close()
-            self.stop()
-        self.io_loop.add_handler(server_sock, handle_connection, IOLoop.READ)
-        with contextlib.closing(socket.socket()) as client_sock:
-            client_sock.connect(('127.0.0.1', port))
-            self.wait()
-        self.io_loop.remove_handler(server_sock)
-        self.io_loop.add_handler(server_sock.fileno(), handle_connection,
-                                 IOLoop.READ)
-        with contextlib.closing(socket.socket()) as client_sock:
-            client_sock.connect(('127.0.0.1', port))
-            self.wait()
-        self.assertIs(fds[0], server_sock)
-        self.assertEqual(fds[1], server_sock.fileno())
-        self.io_loop.remove_handler(server_sock.fileno())
-        server_sock.close()
-
-    def test_mixed_fd_fileobj(self):
-        server_sock, port = bind_unused_port()
-
-        def f(fd, events):
-            pass
-        self.io_loop.add_handler(server_sock, f, IOLoop.READ)
-        with self.assertRaises(Exception):
-            # The exact error is unspecified - some implementations use
-            # IOError, others use ValueError.
-            self.io_loop.add_handler(server_sock.fileno(), f, IOLoop.READ)
-        self.io_loop.remove_handler(server_sock.fileno())
-        server_sock.close()
-
-    def test_reentrant(self):
-        """Calling start() twice should raise an error, not deadlock."""
-        returned_from_start = [False]
-        got_exception = [False]
-
-        def callback():
-            try:
-                self.io_loop.start()
-                returned_from_start[0] = True
-            except Exception:
-                got_exception[0] = True
-            self.stop()
-        self.io_loop.add_callback(callback)
-        self.wait()
-        self.assertTrue(got_exception[0])
-        self.assertFalse(returned_from_start[0])
-
-    def test_exception_logging(self):
-        """Uncaught exceptions get logged by the IOLoop."""
-        # Use a NullContext to keep the exception from being caught by
-        # AsyncTestCase.
-        with NullContext():
-            self.io_loop.add_callback(lambda: 1 / 0)
-            self.io_loop.add_callback(self.stop)
-            with ExpectLog(app_log, "Exception in callback"):
-                self.wait()
-
-    def test_exception_logging_future(self):
-        """The IOLoop examines exceptions from Futures and logs them."""
-        with NullContext():
-            @gen.coroutine
-            def callback():
-                self.io_loop.add_callback(self.stop)
-                1 / 0
-            self.io_loop.add_callback(callback)
-            with ExpectLog(app_log, "Exception in callback"):
-                self.wait()
-
-    @skipBefore35
-    def test_exception_logging_native_coro(self):
-        """The IOLoop examines exceptions from awaitables and logs them."""
-        namespace = exec_test(globals(), locals(), """
-        async def callback():
-            self.io_loop.add_callback(self.stop)
-            1 / 0
-        """)
-        with NullContext():
-            self.io_loop.add_callback(namespace["callback"])
-            with ExpectLog(app_log, "Exception in callback"):
-                self.wait()
-
-    def test_spawn_callback(self):
-        # An added callback runs in the test's stack_context, so will be
-        # re-arised in wait().
-        self.io_loop.add_callback(lambda: 1 / 0)
-        with self.assertRaises(ZeroDivisionError):
-            self.wait()
-        # A spawned callback is run directly on the IOLoop, so it will be
-        # logged without stopping the test.
-        self.io_loop.spawn_callback(lambda: 1 / 0)
-        self.io_loop.add_callback(self.stop)
-        with ExpectLog(app_log, "Exception in callback"):
-            self.wait()
-
-    @skipIfNonUnix
-    def test_remove_handler_from_handler(self):
-        # Create two sockets with simultaneous read events.
-        client, server = socket.socketpair()
-        try:
-            client.send(b'abc')
-            server.send(b'abc')
-
-            # After reading from one fd, remove the other from the IOLoop.
-            chunks = []
-
-            def handle_read(fd, events):
-                chunks.append(fd.recv(1024))
-                if fd is client:
-                    self.io_loop.remove_handler(server)
-                else:
-                    self.io_loop.remove_handler(client)
-            self.io_loop.add_handler(client, handle_read, self.io_loop.READ)
-            self.io_loop.add_handler(server, handle_read, self.io_loop.READ)
-            self.io_loop.call_later(0.1, self.stop)
-            self.wait()
-
-            # Only one fd was read; the other was cleanly removed.
-            self.assertEqual(chunks, [b'abc'])
-        finally:
-            client.close()
-            server.close()
-
-
-# Deliberately not a subclass of AsyncTestCase so the IOLoop isn't
-# automatically set as current.
-class TestIOLoopCurrent(unittest.TestCase):
-    def setUp(self):
-        self.io_loop = None
-        IOLoop.clear_current()
-
-    def tearDown(self):
-        if self.io_loop is not None:
-            self.io_loop.close()
-
-    def test_default_current(self):
-        self.io_loop = IOLoop()
-        # The first IOLoop with default arguments is made current.
-        self.assertIs(self.io_loop, IOLoop.current())
-        # A second IOLoop can be created but is not made current.
-        io_loop2 = IOLoop()
-        self.assertIs(self.io_loop, IOLoop.current())
-        io_loop2.close()
-
-    def test_non_current(self):
-        self.io_loop = IOLoop(make_current=False)
-        # The new IOLoop is not initially made current.
-        self.assertIsNone(IOLoop.current(instance=False))
-        # Starting the IOLoop makes it current, and stopping the loop
-        # makes it non-current. This process is repeatable.
-        for i in range(3):
-            def f():
-                self.current_io_loop = IOLoop.current()
-                self.io_loop.stop()
-            self.io_loop.add_callback(f)
-            self.io_loop.start()
-            self.assertIs(self.current_io_loop, self.io_loop)
-            # Now that the loop is stopped, it is no longer current.
-            self.assertIsNone(IOLoop.current(instance=False))
-
-    def test_force_current(self):
-        self.io_loop = IOLoop(make_current=True)
-        self.assertIs(self.io_loop, IOLoop.current())
-        with self.assertRaises(RuntimeError):
-            # A second make_current=True construction cannot succeed.
-            IOLoop(make_current=True)
-        # current() was not affected by the failed construction.
-        self.assertIs(self.io_loop, IOLoop.current())
-
-
-class TestIOLoopAddCallback(AsyncTestCase):
-    def setUp(self):
-        super(TestIOLoopAddCallback, self).setUp()
-        self.active_contexts = []
-
-    def add_callback(self, callback, *args, **kwargs):
-        self.io_loop.add_callback(callback, *args, **kwargs)
-
-    @contextlib.contextmanager
-    def context(self, name):
-        self.active_contexts.append(name)
-        yield
-        self.assertEqual(self.active_contexts.pop(), name)
-
-    def test_pre_wrap(self):
-        # A pre-wrapped callback is run in the context in which it was
-        # wrapped, not when it was added to the IOLoop.
-        def f1():
-            self.assertIn('c1', self.active_contexts)
-            self.assertNotIn('c2', self.active_contexts)
-            self.stop()
-
-        with StackContext(functools.partial(self.context, 'c1')):
-            wrapped = wrap(f1)
-
-        with StackContext(functools.partial(self.context, 'c2')):
-            self.add_callback(wrapped)
-
-        self.wait()
-
-    def test_pre_wrap_with_args(self):
-        # Same as test_pre_wrap, but the function takes arguments.
-        # Implementation note: The function must not be wrapped in a
-        # functools.partial until after it has been passed through
-        # stack_context.wrap
-        def f1(foo, bar):
-            self.assertIn('c1', self.active_contexts)
-            self.assertNotIn('c2', self.active_contexts)
-            self.stop((foo, bar))
-
-        with StackContext(functools.partial(self.context, 'c1')):
-            wrapped = wrap(f1)
-
-        with StackContext(functools.partial(self.context, 'c2')):
-            self.add_callback(wrapped, 1, bar=2)
-
-        result = self.wait()
-        self.assertEqual(result, (1, 2))
-
-
-class TestIOLoopAddCallbackFromSignal(TestIOLoopAddCallback):
-    # Repeat the add_callback tests using add_callback_from_signal
-    def add_callback(self, callback, *args, **kwargs):
-        self.io_loop.add_callback_from_signal(callback, *args, **kwargs)
-
-
-@unittest.skipIf(futures is None, "futures module not present")
-class TestIOLoopFutures(AsyncTestCase):
-    def test_add_future_threads(self):
-        with futures.ThreadPoolExecutor(1) as pool:
-            self.io_loop.add_future(pool.submit(lambda: None),
-                                    lambda future: self.stop(future))
-            future = self.wait()
-            self.assertTrue(future.done())
-            self.assertTrue(future.result() is None)
-
-    def test_add_future_stack_context(self):
-        ready = threading.Event()
-
-        def task():
-            # we must wait for the ioloop callback to be scheduled before
-            # the task completes to ensure that add_future adds the callback
-            # asynchronously (which is the scenario in which capturing
-            # the stack_context matters)
-            ready.wait(1)
-            assert ready.isSet(), "timed out"
-            raise Exception("worker")
-
-        def callback(future):
-            self.future = future
-            raise Exception("callback")
-
-        def handle_exception(typ, value, traceback):
-            self.exception = value
-            self.stop()
-            return True
-
-        # stack_context propagates to the ioloop callback, but the worker
-        # task just has its exceptions caught and saved in the Future.
-        with futures.ThreadPoolExecutor(1) as pool:
-            with ExceptionStackContext(handle_exception):
-                self.io_loop.add_future(pool.submit(task), callback)
-            ready.set()
-        self.wait()
-
-        self.assertEqual(self.exception.args[0], "callback")
-        self.assertEqual(self.future.exception().args[0], "worker")
-
-
-class TestIOLoopRunSync(unittest.TestCase):
-    def setUp(self):
-        self.io_loop = IOLoop()
-
-    def tearDown(self):
-        self.io_loop.close()
-
-    def test_sync_result(self):
-        with self.assertRaises(gen.BadYieldError):
-            self.io_loop.run_sync(lambda: 42)
-
-    def test_sync_exception(self):
-        with self.assertRaises(ZeroDivisionError):
-            self.io_loop.run_sync(lambda: 1 / 0)
-
-    def test_async_result(self):
-        @gen.coroutine
-        def f():
-            yield gen.Task(self.io_loop.add_callback)
-            raise gen.Return(42)
-        self.assertEqual(self.io_loop.run_sync(f), 42)
-
-    def test_async_exception(self):
-        @gen.coroutine
-        def f():
-            yield gen.Task(self.io_loop.add_callback)
-            1 / 0
-        with self.assertRaises(ZeroDivisionError):
-            self.io_loop.run_sync(f)
-
-    def test_current(self):
-        def f():
-            self.assertIs(IOLoop.current(), self.io_loop)
-        self.io_loop.run_sync(f)
-
-    def test_timeout(self):
-        @gen.coroutine
-        def f():
-            yield gen.Task(self.io_loop.add_timeout, self.io_loop.time() + 1)
-        self.assertRaises(TimeoutError, self.io_loop.run_sync, f, timeout=0.01)
-
-    @skipBefore35
-    def test_native_coroutine(self):
-        namespace = exec_test(globals(), locals(), """
-        async def f():
-            await gen.Task(self.io_loop.add_callback)
-        """)
-        self.io_loop.run_sync(namespace['f'])
-
-
-class TestPeriodicCallback(unittest.TestCase):
-    def setUp(self):
-        self.io_loop = FakeTimeIOLoop()
-        self.io_loop.make_current()
-
-    def tearDown(self):
-        self.io_loop.close()
-
-    def test_basic(self):
-        calls = []
-
-        def cb():
-            calls.append(self.io_loop.time())
-        pc = PeriodicCallback(cb, 10000)
-        pc.start()
-        self.io_loop.call_later(50, self.io_loop.stop)
-        self.io_loop.start()
-        self.assertEqual(calls, [1010, 1020, 1030, 1040, 1050])
-
-    def test_overrun(self):
-        sleep_durations = [9, 9, 10, 11, 20, 20, 35, 35, 0, 0]
-        expected = [
-            1010, 1020, 1030,  # first 3 calls on schedule
-            1050, 1070,  # next 2 delayed one cycle
-            1100, 1130,  # next 2 delayed 2 cycles
-            1170, 1210,  # next 2 delayed 3 cycles
-            1220, 1230,  # then back on schedule.
-        ]
-        calls = []
-
-        def cb():
-            calls.append(self.io_loop.time())
-            if not sleep_durations:
-                self.io_loop.stop()
-                return
-            self.io_loop.sleep(sleep_durations.pop(0))
-        pc = PeriodicCallback(cb, 10000)
-        pc.start()
-        self.io_loop.start()
-        self.assertEqual(calls, expected)
-
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/lib/tornado/test/iostream_test.py b/lib/tornado/test/iostream_test.py
deleted file mode 100644
index 91bc7bf6add6bcd948834a86053ca225ed918e97..0000000000000000000000000000000000000000
--- a/lib/tornado/test/iostream_test.py
+++ /dev/null
@@ -1,1141 +0,0 @@
-from __future__ import absolute_import, division, print_function
-from tornado.concurrent import Future
-from tornado import gen
-from tornado import netutil
-from tornado.iostream import IOStream, SSLIOStream, PipeIOStream, StreamClosedError
-from tornado.httputil import HTTPHeaders
-from tornado.log import gen_log, app_log
-from tornado.netutil import ssl_wrap_socket
-from tornado.stack_context import NullContext
-from tornado.tcpserver import TCPServer
-from tornado.testing import AsyncHTTPTestCase, AsyncHTTPSTestCase, AsyncTestCase, bind_unused_port, ExpectLog, gen_test
-from tornado.test.util import unittest, skipIfNonUnix, refusing_port
-from tornado.web import RequestHandler, Application
-import errno
-import logging
-import os
-import platform
-import socket
-import ssl
-import sys
-
-try:
-    from unittest import mock  # type: ignore
-except ImportError:
-    try:
-        import mock  # type: ignore
-    except ImportError:
-        mock = None
-
-
-def _server_ssl_options():
-    return dict(
-        certfile=os.path.join(os.path.dirname(__file__), 'test.crt'),
-        keyfile=os.path.join(os.path.dirname(__file__), 'test.key'),
-    )
-
-
-class HelloHandler(RequestHandler):
-    def get(self):
-        self.write("Hello")
-
-
-class TestIOStreamWebMixin(object):
-    def _make_client_iostream(self):
-        raise NotImplementedError()
-
-    def get_app(self):
-        return Application([('/', HelloHandler)])
-
-    def test_connection_closed(self):
-        # When a server sends a response and then closes the connection,
-        # the client must be allowed to read the data before the IOStream
-        # closes itself.  Epoll reports closed connections with a separate
-        # EPOLLRDHUP event delivered at the same time as the read event,
-        # while kqueue reports them as a second read/write event with an EOF
-        # flag.
-        response = self.fetch("/", headers={"Connection": "close"})
-        response.rethrow()
-
-    def test_read_until_close(self):
-        stream = self._make_client_iostream()
-        stream.connect(('127.0.0.1', self.get_http_port()), callback=self.stop)
-        self.wait()
-        stream.write(b"GET / HTTP/1.0\r\n\r\n")
-
-        stream.read_until_close(self.stop)
-        data = self.wait()
-        self.assertTrue(data.startswith(b"HTTP/1.1 200"))
-        self.assertTrue(data.endswith(b"Hello"))
-
-    def test_read_zero_bytes(self):
-        self.stream = self._make_client_iostream()
-        self.stream.connect(("127.0.0.1", self.get_http_port()),
-                            callback=self.stop)
-        self.wait()
-        self.stream.write(b"GET / HTTP/1.0\r\n\r\n")
-
-        # normal read
-        self.stream.read_bytes(9, self.stop)
-        data = self.wait()
-        self.assertEqual(data, b"HTTP/1.1 ")
-
-        # zero bytes
-        self.stream.read_bytes(0, self.stop)
-        data = self.wait()
-        self.assertEqual(data, b"")
-
-        # another normal read
-        self.stream.read_bytes(3, self.stop)
-        data = self.wait()
-        self.assertEqual(data, b"200")
-
-        self.stream.close()
-
-    def test_write_while_connecting(self):
-        stream = self._make_client_iostream()
-        connected = [False]
-
-        def connected_callback():
-            connected[0] = True
-            self.stop()
-        stream.connect(("127.0.0.1", self.get_http_port()),
-                       callback=connected_callback)
-        # unlike the previous tests, try to write before the connection
-        # is complete.
-        written = [False]
-
-        def write_callback():
-            written[0] = True
-            self.stop()
-        stream.write(b"GET / HTTP/1.0\r\nConnection: close\r\n\r\n",
-                     callback=write_callback)
-        self.assertTrue(not connected[0])
-        # by the time the write has flushed, the connection callback has
-        # also run
-        try:
-            self.wait(lambda: connected[0] and written[0])
-        finally:
-            logging.debug((connected, written))
-
-        stream.read_until_close(self.stop)
-        data = self.wait()
-        self.assertTrue(data.endswith(b"Hello"))
-
-        stream.close()
-
-    @gen_test
-    def test_future_interface(self):
-        """Basic test of IOStream's ability to return Futures."""
-        stream = self._make_client_iostream()
-        connect_result = yield stream.connect(
-            ("127.0.0.1", self.get_http_port()))
-        self.assertIs(connect_result, stream)
-        yield stream.write(b"GET / HTTP/1.0\r\n\r\n")
-        first_line = yield stream.read_until(b"\r\n")
-        self.assertEqual(first_line, b"HTTP/1.1 200 OK\r\n")
-        # callback=None is equivalent to no callback.
-        header_data = yield stream.read_until(b"\r\n\r\n", callback=None)
-        headers = HTTPHeaders.parse(header_data.decode('latin1'))
-        content_length = int(headers['Content-Length'])
-        body = yield stream.read_bytes(content_length)
-        self.assertEqual(body, b'Hello')
-        stream.close()
-
-    @gen_test
-    def test_future_close_while_reading(self):
-        stream = self._make_client_iostream()
-        yield stream.connect(("127.0.0.1", self.get_http_port()))
-        yield stream.write(b"GET / HTTP/1.0\r\n\r\n")
-        with self.assertRaises(StreamClosedError):
-            yield stream.read_bytes(1024 * 1024)
-        stream.close()
-
-    @gen_test
-    def test_future_read_until_close(self):
-        # Ensure that the data comes through before the StreamClosedError.
-        stream = self._make_client_iostream()
-        yield stream.connect(("127.0.0.1", self.get_http_port()))
-        yield stream.write(b"GET / HTTP/1.0\r\nConnection: close\r\n\r\n")
-        yield stream.read_until(b"\r\n\r\n")
-        body = yield stream.read_until_close()
-        self.assertEqual(body, b"Hello")
-
-        # Nothing else to read; the error comes immediately without waiting
-        # for yield.
-        with self.assertRaises(StreamClosedError):
-            stream.read_bytes(1)
-
-
-class TestIOStreamMixin(object):
-    def _make_server_iostream(self, connection, **kwargs):
-        raise NotImplementedError()
-
-    def _make_client_iostream(self, connection, **kwargs):
-        raise NotImplementedError()
-
-    def make_iostream_pair(self, **kwargs):
-        listener, port = bind_unused_port()
-        streams = [None, None]
-
-        def accept_callback(connection, address):
-            streams[0] = self._make_server_iostream(connection, **kwargs)
-            self.stop()
-
-        def connect_callback():
-            streams[1] = client_stream
-            self.stop()
-        netutil.add_accept_handler(listener, accept_callback,
-                                   io_loop=self.io_loop)
-        client_stream = self._make_client_iostream(socket.socket(), **kwargs)
-        client_stream.connect(('127.0.0.1', port),
-                              callback=connect_callback)
-        self.wait(condition=lambda: all(streams))
-        self.io_loop.remove_handler(listener.fileno())
-        listener.close()
-        return streams
-
-    def test_streaming_callback_with_data_in_buffer(self):
-        server, client = self.make_iostream_pair()
-        client.write(b"abcd\r\nefgh")
-        server.read_until(b"\r\n", self.stop)
-        data = self.wait()
-        self.assertEqual(data, b"abcd\r\n")
-
-        def closed_callback(chunk):
-            self.fail()
-        server.read_until_close(callback=closed_callback,
-                                streaming_callback=self.stop)
-        # self.io_loop.add_timeout(self.io_loop.time() + 0.01, self.stop)
-        data = self.wait()
-        self.assertEqual(data, b"efgh")
-        server.close()
-        client.close()
-
-    def test_write_zero_bytes(self):
-        # Attempting to write zero bytes should run the callback without
-        # going into an infinite loop.
-        server, client = self.make_iostream_pair()
-        server.write(b'', callback=self.stop)
-        self.wait()
-        server.close()
-        client.close()
-
-    def test_connection_refused(self):
-        # When a connection is refused, the connect callback should not
-        # be run.  (The kqueue IOLoop used to behave differently from the
-        # epoll IOLoop in this respect)
-        cleanup_func, port = refusing_port()
-        self.addCleanup(cleanup_func)
-        stream = IOStream(socket.socket(), self.io_loop)
-        self.connect_called = False
-
-        def connect_callback():
-            self.connect_called = True
-            self.stop()
-        stream.set_close_callback(self.stop)
-        # log messages vary by platform and ioloop implementation
-        with ExpectLog(gen_log, ".*", required=False):
-            stream.connect(("127.0.0.1", port), connect_callback)
-            self.wait()
-        self.assertFalse(self.connect_called)
-        self.assertTrue(isinstance(stream.error, socket.error), stream.error)
-        if sys.platform != 'cygwin':
-            _ERRNO_CONNREFUSED = (errno.ECONNREFUSED,)
-            if hasattr(errno, "WSAECONNREFUSED"):
-                _ERRNO_CONNREFUSED += (errno.WSAECONNREFUSED,)
-            # cygwin's errnos don't match those used on native windows python
-            self.assertTrue(stream.error.args[0] in _ERRNO_CONNREFUSED)
-
-    @unittest.skipIf(mock is None, 'mock package not present')
-    def test_gaierror(self):
-        # Test that IOStream sets its exc_info on getaddrinfo error.
-        # It's difficult to reliably trigger a getaddrinfo error;
-        # some resolvers own't even return errors for malformed names,
-        # so we mock it instead. If IOStream changes to call a Resolver
-        # before sock.connect, the mock target will need to change too.
-        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
-        stream = IOStream(s, io_loop=self.io_loop)
-        stream.set_close_callback(self.stop)
-        with mock.patch('socket.socket.connect',
-                        side_effect=socket.gaierror(errno.EIO, 'boom')):
-            with ExpectLog(gen_log, "Connect error"):
-                stream.connect(('localhost', 80), callback=self.stop)
-                self.wait()
-                self.assertIsInstance(stream.error, socket.gaierror)
-
-    def test_read_callback_error(self):
-        # Test that IOStream sets its exc_info when a read callback throws
-        server, client = self.make_iostream_pair()
-        try:
-            server.set_close_callback(self.stop)
-            with ExpectLog(
-                app_log, "(Uncaught exception|Exception in callback)"
-            ):
-                # Clear ExceptionStackContext so IOStream catches error
-                with NullContext():
-                    server.read_bytes(1, callback=lambda data: 1 / 0)
-                client.write(b"1")
-                self.wait()
-            self.assertTrue(isinstance(server.error, ZeroDivisionError))
-        finally:
-            server.close()
-            client.close()
-
-    def test_streaming_callback(self):
-        server, client = self.make_iostream_pair()
-        try:
-            chunks = []
-            final_called = []
-
-            def streaming_callback(data):
-                chunks.append(data)
-                self.stop()
-
-            def final_callback(data):
-                self.assertFalse(data)
-                final_called.append(True)
-                self.stop()
-            server.read_bytes(6, callback=final_callback,
-                              streaming_callback=streaming_callback)
-            client.write(b"1234")
-            self.wait(condition=lambda: chunks)
-            client.write(b"5678")
-            self.wait(condition=lambda: final_called)
-            self.assertEqual(chunks, [b"1234", b"56"])
-
-            # the rest of the last chunk is still in the buffer
-            server.read_bytes(2, callback=self.stop)
-            data = self.wait()
-            self.assertEqual(data, b"78")
-        finally:
-            server.close()
-            client.close()
-
-    def test_streaming_until_close(self):
-        server, client = self.make_iostream_pair()
-        try:
-            chunks = []
-            closed = [False]
-
-            def streaming_callback(data):
-                chunks.append(data)
-                self.stop()
-
-            def close_callback(data):
-                assert not data, data
-                closed[0] = True
-                self.stop()
-            client.read_until_close(callback=close_callback,
-                                    streaming_callback=streaming_callback)
-            server.write(b"1234")
-            self.wait(condition=lambda: len(chunks) == 1)
-            server.write(b"5678", self.stop)
-            self.wait()
-            server.close()
-            self.wait(condition=lambda: closed[0])
-            self.assertEqual(chunks, [b"1234", b"5678"])
-        finally:
-            server.close()
-            client.close()
-
-    def test_streaming_until_close_future(self):
-        server, client = self.make_iostream_pair()
-        try:
-            chunks = []
-
-            @gen.coroutine
-            def client_task():
-                yield client.read_until_close(streaming_callback=chunks.append)
-
-            @gen.coroutine
-            def server_task():
-                yield server.write(b"1234")
-                yield gen.sleep(0.01)
-                yield server.write(b"5678")
-                server.close()
-
-            @gen.coroutine
-            def f():
-                yield [client_task(), server_task()]
-            self.io_loop.run_sync(f)
-            self.assertEqual(chunks, [b"1234", b"5678"])
-        finally:
-            server.close()
-            client.close()
-
-    def test_delayed_close_callback(self):
-        # The scenario:  Server closes the connection while there is a pending
-        # read that can be served out of buffered data.  The client does not
-        # run the close_callback as soon as it detects the close, but rather
-        # defers it until after the buffered read has finished.
-        server, client = self.make_iostream_pair()
-        try:
-            client.set_close_callback(self.stop)
-            server.write(b"12")
-            chunks = []
-
-            def callback1(data):
-                chunks.append(data)
-                client.read_bytes(1, callback2)
-                server.close()
-
-            def callback2(data):
-                chunks.append(data)
-            client.read_bytes(1, callback1)
-            self.wait()  # stopped by close_callback
-            self.assertEqual(chunks, [b"1", b"2"])
-        finally:
-            server.close()
-            client.close()
-
-    def test_future_delayed_close_callback(self):
-        # Same as test_delayed_close_callback, but with the future interface.
-        server, client = self.make_iostream_pair()
-
-        # We can't call make_iostream_pair inside a gen_test function
-        # because the ioloop is not reentrant.
-        @gen_test
-        def f(self):
-            server.write(b"12")
-            chunks = []
-            chunks.append((yield client.read_bytes(1)))
-            server.close()
-            chunks.append((yield client.read_bytes(1)))
-            self.assertEqual(chunks, [b"1", b"2"])
-        try:
-            f(self)
-        finally:
-            server.close()
-            client.close()
-
-    def test_close_buffered_data(self):
-        # Similar to the previous test, but with data stored in the OS's
-        # socket buffers instead of the IOStream's read buffer.  Out-of-band
-        # close notifications must be delayed until all data has been
-        # drained into the IOStream buffer. (epoll used to use out-of-band
-        # close events with EPOLLRDHUP, but no longer)
-        #
-        # This depends on the read_chunk_size being smaller than the
-        # OS socket buffer, so make it small.
-        server, client = self.make_iostream_pair(read_chunk_size=256)
-        try:
-            server.write(b"A" * 512)
-            client.read_bytes(256, self.stop)
-            data = self.wait()
-            self.assertEqual(b"A" * 256, data)
-            server.close()
-            # Allow the close to propagate to the client side of the
-            # connection.  Using add_callback instead of add_timeout
-            # doesn't seem to work, even with multiple iterations
-            self.io_loop.add_timeout(self.io_loop.time() + 0.01, self.stop)
-            self.wait()
-            client.read_bytes(256, self.stop)
-            data = self.wait()
-            self.assertEqual(b"A" * 256, data)
-        finally:
-            server.close()
-            client.close()
-
-    def test_read_until_close_after_close(self):
-        # Similar to test_delayed_close_callback, but read_until_close takes
-        # a separate code path so test it separately.
-        server, client = self.make_iostream_pair()
-        try:
-            server.write(b"1234")
-            server.close()
-            # Read one byte to make sure the client has received the data.
-            # It won't run the close callback as long as there is more buffered
-            # data that could satisfy a later read.
-            client.read_bytes(1, self.stop)
-            data = self.wait()
-            self.assertEqual(data, b"1")
-            client.read_until_close(self.stop)
-            data = self.wait()
-            self.assertEqual(data, b"234")
-        finally:
-            server.close()
-            client.close()
-
-    @unittest.skipIf(mock is None, 'mock package not present')
-    def test_read_until_close_with_error(self):
-        server, client = self.make_iostream_pair()
-        try:
-            with mock.patch('tornado.iostream.BaseIOStream._try_inline_read',
-                            side_effect=IOError('boom')):
-                with self.assertRaisesRegexp(IOError, 'boom'):
-                    client.read_until_close(self.stop)
-        finally:
-            server.close()
-            client.close()
-
-    def test_streaming_read_until_close_after_close(self):
-        # Same as the preceding test but with a streaming_callback.
-        # All data should go through the streaming callback,
-        # and the final read callback just gets an empty string.
-        server, client = self.make_iostream_pair()
-        try:
-            server.write(b"1234")
-            server.close()
-            client.read_bytes(1, self.stop)
-            data = self.wait()
-            self.assertEqual(data, b"1")
-            streaming_data = []
-            client.read_until_close(self.stop,
-                                    streaming_callback=streaming_data.append)
-            data = self.wait()
-            self.assertEqual(b'', data)
-            self.assertEqual(b''.join(streaming_data), b"234")
-        finally:
-            server.close()
-            client.close()
-
-    def test_large_read_until(self):
-        # Performance test: read_until used to have a quadratic component
-        # so a read_until of 4MB would take 8 seconds; now it takes 0.25
-        # seconds.
-        server, client = self.make_iostream_pair()
-        try:
-            # This test fails on pypy with ssl.  I think it's because
-            # pypy's gc defeats moves objects, breaking the
-            # "frozen write buffer" assumption.
-            if (isinstance(server, SSLIOStream) and
-                    platform.python_implementation() == 'PyPy'):
-                raise unittest.SkipTest(
-                    "pypy gc causes problems with openssl")
-            NUM_KB = 4096
-            for i in range(NUM_KB):
-                client.write(b"A" * 1024)
-            client.write(b"\r\n")
-            server.read_until(b"\r\n", self.stop)
-            data = self.wait()
-            self.assertEqual(len(data), NUM_KB * 1024 + 2)
-        finally:
-            server.close()
-            client.close()
-
-    def test_close_callback_with_pending_read(self):
-        # Regression test for a bug that was introduced in 2.3
-        # where the IOStream._close_callback would never be called
-        # if there were pending reads.
-        OK = b"OK\r\n"
-        server, client = self.make_iostream_pair()
-        client.set_close_callback(self.stop)
-        try:
-            server.write(OK)
-            client.read_until(b"\r\n", self.stop)
-            res = self.wait()
-            self.assertEqual(res, OK)
-
-            server.close()
-            client.read_until(b"\r\n", lambda x: x)
-            # If _close_callback (self.stop) is not called,
-            # an AssertionError: Async operation timed out after 5 seconds
-            # will be raised.
-            res = self.wait()
-            self.assertTrue(res is None)
-        finally:
-            server.close()
-            client.close()
-
-    @skipIfNonUnix
-    def test_inline_read_error(self):
-        # An error on an inline read is raised without logging (on the
-        # assumption that it will eventually be noticed or logged further
-        # up the stack).
-        #
-        # This test is posix-only because windows os.close() doesn't work
-        # on socket FDs, but we can't close the socket object normally
-        # because we won't get the error we want if the socket knows
-        # it's closed.
-        server, client = self.make_iostream_pair()
-        try:
-            os.close(server.socket.fileno())
-            with self.assertRaises(socket.error):
-                server.read_bytes(1, lambda data: None)
-        finally:
-            server.close()
-            client.close()
-
-    def test_async_read_error_logging(self):
-        # Socket errors on asynchronous reads should be logged (but only
-        # once).
-        server, client = self.make_iostream_pair()
-        server.set_close_callback(self.stop)
-        try:
-            # Start a read that will be fulfilled asynchronously.
-            server.read_bytes(1, lambda data: None)
-            client.write(b'a')
-            # Stub out read_from_fd to make it fail.
-
-            def fake_read_from_fd():
-                os.close(server.socket.fileno())
-                server.__class__.read_from_fd(server)
-            server.read_from_fd = fake_read_from_fd
-            # This log message is from _handle_read (not read_from_fd).
-            with ExpectLog(gen_log, "error on read"):
-                self.wait()
-        finally:
-            server.close()
-            client.close()
-
-    def test_future_close_callback(self):
-        # Regression test for interaction between the Future read interfaces
-        # and IOStream._maybe_add_error_listener.
-        server, client = self.make_iostream_pair()
-        closed = [False]
-
-        def close_callback():
-            closed[0] = True
-            self.stop()
-        server.set_close_callback(close_callback)
-        try:
-            client.write(b'a')
-            future = server.read_bytes(1)
-            self.io_loop.add_future(future, self.stop)
-            self.assertEqual(self.wait().result(), b'a')
-            self.assertFalse(closed[0])
-            client.close()
-            self.wait()
-            self.assertTrue(closed[0])
-        finally:
-            server.close()
-            client.close()
-
-    def test_write_memoryview(self):
-        server, client = self.make_iostream_pair()
-        try:
-            client.read_bytes(4, self.stop)
-            server.write(memoryview(b"hello"))
-            data = self.wait()
-            self.assertEqual(data, b"hell")
-        finally:
-            server.close()
-            client.close()
-
-    def test_read_bytes_partial(self):
-        server, client = self.make_iostream_pair()
-        try:
-            # Ask for more than is available with partial=True
-            client.read_bytes(50, self.stop, partial=True)
-            server.write(b"hello")
-            data = self.wait()
-            self.assertEqual(data, b"hello")
-
-            # Ask for less than what is available; num_bytes is still
-            # respected.
-            client.read_bytes(3, self.stop, partial=True)
-            server.write(b"world")
-            data = self.wait()
-            self.assertEqual(data, b"wor")
-
-            # Partial reads won't return an empty string, but read_bytes(0)
-            # will.
-            client.read_bytes(0, self.stop, partial=True)
-            data = self.wait()
-            self.assertEqual(data, b'')
-        finally:
-            server.close()
-            client.close()
-
-    def test_read_until_max_bytes(self):
-        server, client = self.make_iostream_pair()
-        client.set_close_callback(lambda: self.stop("closed"))
-        try:
-            # Extra room under the limit
-            client.read_until(b"def", self.stop, max_bytes=50)
-            server.write(b"abcdef")
-            data = self.wait()
-            self.assertEqual(data, b"abcdef")
-
-            # Just enough space
-            client.read_until(b"def", self.stop, max_bytes=6)
-            server.write(b"abcdef")
-            data = self.wait()
-            self.assertEqual(data, b"abcdef")
-
-            # Not enough space, but we don't know it until all we can do is
-            # log a warning and close the connection.
-            with ExpectLog(gen_log, "Unsatisfiable read"):
-                client.read_until(b"def", self.stop, max_bytes=5)
-                server.write(b"123456")
-                data = self.wait()
-            self.assertEqual(data, "closed")
-        finally:
-            server.close()
-            client.close()
-
-    def test_read_until_max_bytes_inline(self):
-        server, client = self.make_iostream_pair()
-        client.set_close_callback(lambda: self.stop("closed"))
-        try:
-            # Similar to the error case in the previous test, but the
-            # server writes first so client reads are satisfied
-            # inline.  For consistency with the out-of-line case, we
-            # do not raise the error synchronously.
-            server.write(b"123456")
-            with ExpectLog(gen_log, "Unsatisfiable read"):
-                client.read_until(b"def", self.stop, max_bytes=5)
-                data = self.wait()
-            self.assertEqual(data, "closed")
-        finally:
-            server.close()
-            client.close()
-
-    def test_read_until_max_bytes_ignores_extra(self):
-        server, client = self.make_iostream_pair()
-        client.set_close_callback(lambda: self.stop("closed"))
-        try:
-            # Even though data that matches arrives the same packet that
-            # puts us over the limit, we fail the request because it was not
-            # found within the limit.
-            server.write(b"abcdef")
-            with ExpectLog(gen_log, "Unsatisfiable read"):
-                client.read_until(b"def", self.stop, max_bytes=5)
-                data = self.wait()
-            self.assertEqual(data, "closed")
-        finally:
-            server.close()
-            client.close()
-
-    def test_read_until_regex_max_bytes(self):
-        server, client = self.make_iostream_pair()
-        client.set_close_callback(lambda: self.stop("closed"))
-        try:
-            # Extra room under the limit
-            client.read_until_regex(b"def", self.stop, max_bytes=50)
-            server.write(b"abcdef")
-            data = self.wait()
-            self.assertEqual(data, b"abcdef")
-
-            # Just enough space
-            client.read_until_regex(b"def", self.stop, max_bytes=6)
-            server.write(b"abcdef")
-            data = self.wait()
-            self.assertEqual(data, b"abcdef")
-
-            # Not enough space, but we don't know it until all we can do is
-            # log a warning and close the connection.
-            with ExpectLog(gen_log, "Unsatisfiable read"):
-                client.read_until_regex(b"def", self.stop, max_bytes=5)
-                server.write(b"123456")
-                data = self.wait()
-            self.assertEqual(data, "closed")
-        finally:
-            server.close()
-            client.close()
-
-    def test_read_until_regex_max_bytes_inline(self):
-        server, client = self.make_iostream_pair()
-        client.set_close_callback(lambda: self.stop("closed"))
-        try:
-            # Similar to the error case in the previous test, but the
-            # server writes first so client reads are satisfied
-            # inline.  For consistency with the out-of-line case, we
-            # do not raise the error synchronously.
-            server.write(b"123456")
-            with ExpectLog(gen_log, "Unsatisfiable read"):
-                client.read_until_regex(b"def", self.stop, max_bytes=5)
-                data = self.wait()
-            self.assertEqual(data, "closed")
-        finally:
-            server.close()
-            client.close()
-
-    def test_read_until_regex_max_bytes_ignores_extra(self):
-        server, client = self.make_iostream_pair()
-        client.set_close_callback(lambda: self.stop("closed"))
-        try:
-            # Even though data that matches arrives the same packet that
-            # puts us over the limit, we fail the request because it was not
-            # found within the limit.
-            server.write(b"abcdef")
-            with ExpectLog(gen_log, "Unsatisfiable read"):
-                client.read_until_regex(b"def", self.stop, max_bytes=5)
-                data = self.wait()
-            self.assertEqual(data, "closed")
-        finally:
-            server.close()
-            client.close()
-
-    def test_small_reads_from_large_buffer(self):
-        # 10KB buffer size, 100KB available to read.
-        # Read 1KB at a time and make sure that the buffer is not eagerly
-        # filled.
-        server, client = self.make_iostream_pair(max_buffer_size=10 * 1024)
-        try:
-            server.write(b"a" * 1024 * 100)
-            for i in range(100):
-                client.read_bytes(1024, self.stop)
-                data = self.wait()
-                self.assertEqual(data, b"a" * 1024)
-        finally:
-            server.close()
-            client.close()
-
-    def test_small_read_untils_from_large_buffer(self):
-        # 10KB buffer size, 100KB available to read.
-        # Read 1KB at a time and make sure that the buffer is not eagerly
-        # filled.
-        server, client = self.make_iostream_pair(max_buffer_size=10 * 1024)
-        try:
-            server.write((b"a" * 1023 + b"\n") * 100)
-            for i in range(100):
-                client.read_until(b"\n", self.stop, max_bytes=4096)
-                data = self.wait()
-                self.assertEqual(data, b"a" * 1023 + b"\n")
-        finally:
-            server.close()
-            client.close()
-
-    def test_flow_control(self):
-        MB = 1024 * 1024
-        server, client = self.make_iostream_pair(max_buffer_size=5 * MB)
-        try:
-            # Client writes more than the server will accept.
-            client.write(b"a" * 10 * MB)
-            # The server pauses while reading.
-            server.read_bytes(MB, self.stop)
-            self.wait()
-            self.io_loop.call_later(0.1, self.stop)
-            self.wait()
-            # The client's writes have been blocked; the server can
-            # continue to read gradually.
-            for i in range(9):
-                server.read_bytes(MB, self.stop)
-                self.wait()
-        finally:
-            server.close()
-            client.close()
-
-    def test_future_write(self):
-        """
-        Test that write() Futures are never orphaned.
-        """
-        # Run concurrent writers that will write enough bytes so as to
-        # clog the socket buffer and accumulate bytes in our write buffer.
-        m, n = 10000, 1000
-        nproducers = 10
-        total_bytes = m * n * nproducers
-        server, client = self.make_iostream_pair(max_buffer_size=total_bytes)
-
-        @gen.coroutine
-        def produce():
-            data = b'x' * m
-            for i in range(n):
-                yield server.write(data)
-
-        @gen.coroutine
-        def consume():
-            nread = 0
-            while nread < total_bytes:
-                res = yield client.read_bytes(m)
-                nread += len(res)
-
-        @gen.coroutine
-        def main():
-            yield [produce() for i in range(nproducers)] + [consume()]
-
-        try:
-            self.io_loop.run_sync(main)
-        finally:
-            server.close()
-            client.close()
-
-
-class TestIOStreamWebHTTP(TestIOStreamWebMixin, AsyncHTTPTestCase):
-    def _make_client_iostream(self):
-        return IOStream(socket.socket(), io_loop=self.io_loop)
-
-
-class TestIOStreamWebHTTPS(TestIOStreamWebMixin, AsyncHTTPSTestCase):
-    def _make_client_iostream(self):
-        return SSLIOStream(socket.socket(), io_loop=self.io_loop,
-                           ssl_options=dict(cert_reqs=ssl.CERT_NONE))
-
-
-class TestIOStream(TestIOStreamMixin, AsyncTestCase):
-    def _make_server_iostream(self, connection, **kwargs):
-        return IOStream(connection, **kwargs)
-
-    def _make_client_iostream(self, connection, **kwargs):
-        return IOStream(connection, **kwargs)
-
-
-class TestIOStreamSSL(TestIOStreamMixin, AsyncTestCase):
-    def _make_server_iostream(self, connection, **kwargs):
-        connection = ssl.wrap_socket(connection,
-                                     server_side=True,
-                                     do_handshake_on_connect=False,
-                                     **_server_ssl_options())
-        return SSLIOStream(connection, io_loop=self.io_loop, **kwargs)
-
-    def _make_client_iostream(self, connection, **kwargs):
-        return SSLIOStream(connection, io_loop=self.io_loop,
-                           ssl_options=dict(cert_reqs=ssl.CERT_NONE),
-                           **kwargs)
-
-
-# This will run some tests that are basically redundant but it's the
-# simplest way to make sure that it works to pass an SSLContext
-# instead of an ssl_options dict to the SSLIOStream constructor.
-@unittest.skipIf(not hasattr(ssl, 'SSLContext'), 'ssl.SSLContext not present')
-class TestIOStreamSSLContext(TestIOStreamMixin, AsyncTestCase):
-    def _make_server_iostream(self, connection, **kwargs):
-        context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
-        context.load_cert_chain(
-            os.path.join(os.path.dirname(__file__), 'test.crt'),
-            os.path.join(os.path.dirname(__file__), 'test.key'))
-        connection = ssl_wrap_socket(connection, context,
-                                     server_side=True,
-                                     do_handshake_on_connect=False)
-        return SSLIOStream(connection, io_loop=self.io_loop, **kwargs)
-
-    def _make_client_iostream(self, connection, **kwargs):
-        context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
-        return SSLIOStream(connection, io_loop=self.io_loop,
-                           ssl_options=context, **kwargs)
-
-
-class TestIOStreamStartTLS(AsyncTestCase):
-    def setUp(self):
-        try:
-            super(TestIOStreamStartTLS, self).setUp()
-            self.listener, self.port = bind_unused_port()
-            self.server_stream = None
-            self.server_accepted = Future()
-            netutil.add_accept_handler(self.listener, self.accept)
-            self.client_stream = IOStream(socket.socket())
-            self.io_loop.add_future(self.client_stream.connect(
-                ('127.0.0.1', self.port)), self.stop)
-            self.wait()
-            self.io_loop.add_future(self.server_accepted, self.stop)
-            self.wait()
-        except Exception as e:
-            print(e)
-            raise
-
-    def tearDown(self):
-        if self.server_stream is not None:
-            self.server_stream.close()
-        if self.client_stream is not None:
-            self.client_stream.close()
-        self.listener.close()
-        super(TestIOStreamStartTLS, self).tearDown()
-
-    def accept(self, connection, address):
-        if self.server_stream is not None:
-            self.fail("should only get one connection")
-        self.server_stream = IOStream(connection)
-        self.server_accepted.set_result(None)
-
-    @gen.coroutine
-    def client_send_line(self, line):
-        self.client_stream.write(line)
-        recv_line = yield self.server_stream.read_until(b"\r\n")
-        self.assertEqual(line, recv_line)
-
-    @gen.coroutine
-    def server_send_line(self, line):
-        self.server_stream.write(line)
-        recv_line = yield self.client_stream.read_until(b"\r\n")
-        self.assertEqual(line, recv_line)
-
-    def client_start_tls(self, ssl_options=None, server_hostname=None):
-        client_stream = self.client_stream
-        self.client_stream = None
-        return client_stream.start_tls(False, ssl_options, server_hostname)
-
-    def server_start_tls(self, ssl_options=None):
-        server_stream = self.server_stream
-        self.server_stream = None
-        return server_stream.start_tls(True, ssl_options)
-
-    @gen_test
-    def test_start_tls_smtp(self):
-        # This flow is simplified from RFC 3207 section 5.
-        # We don't really need all of this, but it helps to make sure
-        # that after realistic back-and-forth traffic the buffers end up
-        # in a sane state.
-        yield self.server_send_line(b"220 mail.example.com ready\r\n")
-        yield self.client_send_line(b"EHLO mail.example.com\r\n")
-        yield self.server_send_line(b"250-mail.example.com welcome\r\n")
-        yield self.server_send_line(b"250 STARTTLS\r\n")
-        yield self.client_send_line(b"STARTTLS\r\n")
-        yield self.server_send_line(b"220 Go ahead\r\n")
-        client_future = self.client_start_tls(dict(cert_reqs=ssl.CERT_NONE))
-        server_future = self.server_start_tls(_server_ssl_options())
-        self.client_stream = yield client_future
-        self.server_stream = yield server_future
-        self.assertTrue(isinstance(self.client_stream, SSLIOStream))
-        self.assertTrue(isinstance(self.server_stream, SSLIOStream))
-        yield self.client_send_line(b"EHLO mail.example.com\r\n")
-        yield self.server_send_line(b"250 mail.example.com welcome\r\n")
-
-    @gen_test
-    def test_handshake_fail(self):
-        server_future = self.server_start_tls(_server_ssl_options())
-        # Certificates are verified with the default configuration.
-        client_future = self.client_start_tls(server_hostname="localhost")
-        with ExpectLog(gen_log, "SSL Error"):
-            with self.assertRaises(ssl.SSLError):
-                yield client_future
-        with self.assertRaises((ssl.SSLError, socket.error)):
-            yield server_future
-
-    @unittest.skipIf(not hasattr(ssl, 'create_default_context'),
-                     'ssl.create_default_context not present')
-    @gen_test
-    def test_check_hostname(self):
-        # Test that server_hostname parameter to start_tls is being used.
-        # The check_hostname functionality is only available in python 2.7 and
-        # up and in python 3.4 and up.
-        server_future = self.server_start_tls(_server_ssl_options())
-        client_future = self.client_start_tls(
-            ssl.create_default_context(),
-            server_hostname=b'127.0.0.1')
-        with ExpectLog(gen_log, "SSL Error"):
-            with self.assertRaises(ssl.SSLError):
-                # The client fails to connect with an SSL error.
-                yield client_future
-        with self.assertRaises(Exception):
-            # The server fails to connect, but the exact error is unspecified.
-            yield server_future
-
-
-class WaitForHandshakeTest(AsyncTestCase):
-    @gen.coroutine
-    def connect_to_server(self, server_cls):
-        server = client = None
-        try:
-            sock, port = bind_unused_port()
-            server = server_cls(ssl_options=_server_ssl_options())
-            server.add_socket(sock)
-
-            client = SSLIOStream(socket.socket(),
-                                 ssl_options=dict(cert_reqs=ssl.CERT_NONE))
-            yield client.connect(('127.0.0.1', port))
-            self.assertIsNotNone(client.socket.cipher())
-        finally:
-            if server is not None:
-                server.stop()
-            if client is not None:
-                client.close()
-
-    @gen_test
-    def test_wait_for_handshake_callback(self):
-        test = self
-        handshake_future = Future()
-
-        class TestServer(TCPServer):
-            def handle_stream(self, stream, address):
-                # The handshake has not yet completed.
-                test.assertIsNone(stream.socket.cipher())
-                self.stream = stream
-                stream.wait_for_handshake(self.handshake_done)
-
-            def handshake_done(self):
-                # Now the handshake is done and ssl information is available.
-                test.assertIsNotNone(self.stream.socket.cipher())
-                handshake_future.set_result(None)
-
-        yield self.connect_to_server(TestServer)
-        yield handshake_future
-
-    @gen_test
-    def test_wait_for_handshake_future(self):
-        test = self
-        handshake_future = Future()
-
-        class TestServer(TCPServer):
-            def handle_stream(self, stream, address):
-                test.assertIsNone(stream.socket.cipher())
-                test.io_loop.spawn_callback(self.handle_connection, stream)
-
-            @gen.coroutine
-            def handle_connection(self, stream):
-                yield stream.wait_for_handshake()
-                handshake_future.set_result(None)
-
-        yield self.connect_to_server(TestServer)
-        yield handshake_future
-
-    @gen_test
-    def test_wait_for_handshake_already_waiting_error(self):
-        test = self
-        handshake_future = Future()
-
-        class TestServer(TCPServer):
-            def handle_stream(self, stream, address):
-                stream.wait_for_handshake(self.handshake_done)
-                test.assertRaises(RuntimeError, stream.wait_for_handshake)
-
-            def handshake_done(self):
-                handshake_future.set_result(None)
-
-        yield self.connect_to_server(TestServer)
-        yield handshake_future
-
-    @gen_test
-    def test_wait_for_handshake_already_connected(self):
-        handshake_future = Future()
-
-        class TestServer(TCPServer):
-            def handle_stream(self, stream, address):
-                self.stream = stream
-                stream.wait_for_handshake(self.handshake_done)
-
-            def handshake_done(self):
-                self.stream.wait_for_handshake(self.handshake2_done)
-
-            def handshake2_done(self):
-                handshake_future.set_result(None)
-
-        yield self.connect_to_server(TestServer)
-        yield handshake_future
-
-
-@skipIfNonUnix
-class TestPipeIOStream(AsyncTestCase):
-    def test_pipe_iostream(self):
-        r, w = os.pipe()
-
-        rs = PipeIOStream(r, io_loop=self.io_loop)
-        ws = PipeIOStream(w, io_loop=self.io_loop)
-
-        ws.write(b"hel")
-        ws.write(b"lo world")
-
-        rs.read_until(b' ', callback=self.stop)
-        data = self.wait()
-        self.assertEqual(data, b"hello ")
-
-        rs.read_bytes(3, self.stop)
-        data = self.wait()
-        self.assertEqual(data, b"wor")
-
-        ws.close()
-
-        rs.read_until_close(self.stop)
-        data = self.wait()
-        self.assertEqual(data, b"ld")
-
-        rs.close()
-
-    def test_pipe_iostream_big_write(self):
-        r, w = os.pipe()
-
-        rs = PipeIOStream(r, io_loop=self.io_loop)
-        ws = PipeIOStream(w, io_loop=self.io_loop)
-
-        NUM_BYTES = 1048576
-
-        # Write 1MB of data, which should fill the buffer
-        ws.write(b"1" * NUM_BYTES)
-
-        rs.read_bytes(NUM_BYTES, self.stop)
-        data = self.wait()
-        self.assertEqual(data, b"1" * NUM_BYTES)
-
-        ws.close()
-        rs.close()
diff --git a/lib/tornado/test/locale_test.py b/lib/tornado/test/locale_test.py
deleted file mode 100644
index d548ffb861b2978319b5b476b1a701da5a35fc37..0000000000000000000000000000000000000000
--- a/lib/tornado/test/locale_test.py
+++ /dev/null
@@ -1,130 +0,0 @@
-from __future__ import absolute_import, division, print_function
-
-import datetime
-import os
-import shutil
-import tempfile
-
-import tornado.locale
-from tornado.escape import utf8, to_unicode
-from tornado.test.util import unittest, skipOnAppEngine
-from tornado.util import unicode_type
-
-
-class TranslationLoaderTest(unittest.TestCase):
-    # TODO: less hacky way to get isolated tests
-    SAVE_VARS = ['_translations', '_supported_locales', '_use_gettext']
-
-    def clear_locale_cache(self):
-        if hasattr(tornado.locale.Locale, '_cache'):
-            del tornado.locale.Locale._cache
-
-    def setUp(self):
-        self.saved = {}
-        for var in TranslationLoaderTest.SAVE_VARS:
-            self.saved[var] = getattr(tornado.locale, var)
-        self.clear_locale_cache()
-
-    def tearDown(self):
-        for k, v in self.saved.items():
-            setattr(tornado.locale, k, v)
-        self.clear_locale_cache()
-
-    def test_csv(self):
-        tornado.locale.load_translations(
-            os.path.join(os.path.dirname(__file__), 'csv_translations'))
-        locale = tornado.locale.get("fr_FR")
-        self.assertTrue(isinstance(locale, tornado.locale.CSVLocale))
-        self.assertEqual(locale.translate("school"), u"\u00e9cole")
-
-    # tempfile.mkdtemp is not available on app engine.
-    @skipOnAppEngine
-    def test_csv_bom(self):
-        with open(os.path.join(os.path.dirname(__file__), 'csv_translations',
-                               'fr_FR.csv'), 'rb') as f:
-            char_data = to_unicode(f.read())
-        # Re-encode our input data (which is utf-8 without BOM) in
-        # encodings that use the BOM and ensure that we can still load
-        # it. Note that utf-16-le and utf-16-be do not write a BOM,
-        # so we only test whichver variant is native to our platform.
-        for encoding in ['utf-8-sig', 'utf-16']:
-            tmpdir = tempfile.mkdtemp()
-            try:
-                with open(os.path.join(tmpdir, 'fr_FR.csv'), 'wb') as f:
-                    f.write(char_data.encode(encoding))
-                tornado.locale.load_translations(tmpdir)
-                locale = tornado.locale.get('fr_FR')
-                self.assertIsInstance(locale, tornado.locale.CSVLocale)
-                self.assertEqual(locale.translate("school"), u"\u00e9cole")
-            finally:
-                shutil.rmtree(tmpdir)
-
-    def test_gettext(self):
-        tornado.locale.load_gettext_translations(
-            os.path.join(os.path.dirname(__file__), 'gettext_translations'),
-            "tornado_test")
-        locale = tornado.locale.get("fr_FR")
-        self.assertTrue(isinstance(locale, tornado.locale.GettextLocale))
-        self.assertEqual(locale.translate("school"), u"\u00e9cole")
-        self.assertEqual(locale.pgettext("law", "right"), u"le droit")
-        self.assertEqual(locale.pgettext("good", "right"), u"le bien")
-        self.assertEqual(locale.pgettext("organization", "club", "clubs", 1), u"le club")
-        self.assertEqual(locale.pgettext("organization", "club", "clubs", 2), u"les clubs")
-        self.assertEqual(locale.pgettext("stick", "club", "clubs", 1), u"le b\xe2ton")
-        self.assertEqual(locale.pgettext("stick", "club", "clubs", 2), u"les b\xe2tons")
-
-
-class LocaleDataTest(unittest.TestCase):
-    def test_non_ascii_name(self):
-        name = tornado.locale.LOCALE_NAMES['es_LA']['name']
-        self.assertTrue(isinstance(name, unicode_type))
-        self.assertEqual(name, u'Espa\u00f1ol')
-        self.assertEqual(utf8(name), b'Espa\xc3\xb1ol')
-
-
-class EnglishTest(unittest.TestCase):
-    def test_format_date(self):
-        locale = tornado.locale.get('en_US')
-        date = datetime.datetime(2013, 4, 28, 18, 35)
-        self.assertEqual(locale.format_date(date, full_format=True),
-                         'April 28, 2013 at 6:35 pm')
-
-        self.assertEqual(locale.format_date(datetime.datetime.utcnow() - datetime.timedelta(seconds=2), full_format=False),
-                         '2 seconds ago')
-        self.assertEqual(locale.format_date(datetime.datetime.utcnow() - datetime.timedelta(minutes=2), full_format=False),
-                         '2 minutes ago')
-        self.assertEqual(locale.format_date(datetime.datetime.utcnow() - datetime.timedelta(hours=2), full_format=False),
-                         '2 hours ago')
-
-        now = datetime.datetime.utcnow()
-        self.assertEqual(locale.format_date(now - datetime.timedelta(days=1), full_format=False, shorter=True),
-                         'yesterday')
-
-        date = now - datetime.timedelta(days=2)
-        self.assertEqual(locale.format_date(date, full_format=False, shorter=True),
-                         locale._weekdays[date.weekday()])
-
-        date = now - datetime.timedelta(days=300)
-        self.assertEqual(locale.format_date(date, full_format=False, shorter=True),
-                         '%s %d' % (locale._months[date.month - 1], date.day))
-
-        date = now - datetime.timedelta(days=500)
-        self.assertEqual(locale.format_date(date, full_format=False, shorter=True),
-                         '%s %d, %d' % (locale._months[date.month - 1], date.day, date.year))
-
-    def test_friendly_number(self):
-        locale = tornado.locale.get('en_US')
-        self.assertEqual(locale.friendly_number(1000000), '1,000,000')
-
-    def test_list(self):
-        locale = tornado.locale.get('en_US')
-        self.assertEqual(locale.list([]), '')
-        self.assertEqual(locale.list(['A']), 'A')
-        self.assertEqual(locale.list(['A', 'B']), 'A and B')
-        self.assertEqual(locale.list(['A', 'B', 'C']), 'A, B and C')
-
-    def test_format_day(self):
-        locale = tornado.locale.get('en_US')
-        date = datetime.datetime(2013, 4, 28, 18, 35)
-        self.assertEqual(locale.format_day(date=date, dow=True), 'Sunday, April 28')
-        self.assertEqual(locale.format_day(date=date, dow=False), 'April 28')
diff --git a/lib/tornado/test/locks_test.py b/lib/tornado/test/locks_test.py
deleted file mode 100644
index 844d4fb0ff42921cd2b41d737c502bd373a1ab14..0000000000000000000000000000000000000000
--- a/lib/tornado/test/locks_test.py
+++ /dev/null
@@ -1,518 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-
-from __future__ import absolute_import, division, print_function
-from datetime import timedelta
-
-from tornado import gen, locks
-from tornado.gen import TimeoutError
-from tornado.testing import gen_test, AsyncTestCase
-from tornado.test.util import unittest, skipBefore35, exec_test
-
-
-class ConditionTest(AsyncTestCase):
-    def setUp(self):
-        super(ConditionTest, self).setUp()
-        self.history = []
-
-    def record_done(self, future, key):
-        """Record the resolution of a Future returned by Condition.wait."""
-        def callback(_):
-            if not future.result():
-                # wait() resolved to False, meaning it timed out.
-                self.history.append('timeout')
-            else:
-                self.history.append(key)
-        future.add_done_callback(callback)
-
-    def test_repr(self):
-        c = locks.Condition()
-        self.assertIn('Condition', repr(c))
-        self.assertNotIn('waiters', repr(c))
-        c.wait()
-        self.assertIn('waiters', repr(c))
-
-    @gen_test
-    def test_notify(self):
-        c = locks.Condition()
-        self.io_loop.call_later(0.01, c.notify)
-        yield c.wait()
-
-    def test_notify_1(self):
-        c = locks.Condition()
-        self.record_done(c.wait(), 'wait1')
-        self.record_done(c.wait(), 'wait2')
-        c.notify(1)
-        self.history.append('notify1')
-        c.notify(1)
-        self.history.append('notify2')
-        self.assertEqual(['wait1', 'notify1', 'wait2', 'notify2'],
-                         self.history)
-
-    def test_notify_n(self):
-        c = locks.Condition()
-        for i in range(6):
-            self.record_done(c.wait(), i)
-
-        c.notify(3)
-
-        # Callbacks execute in the order they were registered.
-        self.assertEqual(list(range(3)), self.history)
-        c.notify(1)
-        self.assertEqual(list(range(4)), self.history)
-        c.notify(2)
-        self.assertEqual(list(range(6)), self.history)
-
-    def test_notify_all(self):
-        c = locks.Condition()
-        for i in range(4):
-            self.record_done(c.wait(), i)
-
-        c.notify_all()
-        self.history.append('notify_all')
-
-        # Callbacks execute in the order they were registered.
-        self.assertEqual(
-            list(range(4)) + ['notify_all'],
-            self.history)
-
-    @gen_test
-    def test_wait_timeout(self):
-        c = locks.Condition()
-        wait = c.wait(timedelta(seconds=0.01))
-        self.io_loop.call_later(0.02, c.notify)  # Too late.
-        yield gen.sleep(0.03)
-        self.assertFalse((yield wait))
-
-    @gen_test
-    def test_wait_timeout_preempted(self):
-        c = locks.Condition()
-
-        # This fires before the wait times out.
-        self.io_loop.call_later(0.01, c.notify)
-        wait = c.wait(timedelta(seconds=0.02))
-        yield gen.sleep(0.03)
-        yield wait  # No TimeoutError.
-
-    @gen_test
-    def test_notify_n_with_timeout(self):
-        # Register callbacks 0, 1, 2, and 3. Callback 1 has a timeout.
-        # Wait for that timeout to expire, then do notify(2) and make
-        # sure everyone runs. Verifies that a timed-out callback does
-        # not count against the 'n' argument to notify().
-        c = locks.Condition()
-        self.record_done(c.wait(), 0)
-        self.record_done(c.wait(timedelta(seconds=0.01)), 1)
-        self.record_done(c.wait(), 2)
-        self.record_done(c.wait(), 3)
-
-        # Wait for callback 1 to time out.
-        yield gen.sleep(0.02)
-        self.assertEqual(['timeout'], self.history)
-
-        c.notify(2)
-        yield gen.sleep(0.01)
-        self.assertEqual(['timeout', 0, 2], self.history)
-        self.assertEqual(['timeout', 0, 2], self.history)
-        c.notify()
-        self.assertEqual(['timeout', 0, 2, 3], self.history)
-
-    @gen_test
-    def test_notify_all_with_timeout(self):
-        c = locks.Condition()
-        self.record_done(c.wait(), 0)
-        self.record_done(c.wait(timedelta(seconds=0.01)), 1)
-        self.record_done(c.wait(), 2)
-
-        # Wait for callback 1 to time out.
-        yield gen.sleep(0.02)
-        self.assertEqual(['timeout'], self.history)
-
-        c.notify_all()
-        self.assertEqual(['timeout', 0, 2], self.history)
-
-    @gen_test
-    def test_nested_notify(self):
-        # Ensure no notifications lost, even if notify() is reentered by a
-        # waiter calling notify().
-        c = locks.Condition()
-
-        # Three waiters.
-        futures = [c.wait() for _ in range(3)]
-
-        # First and second futures resolved. Second future reenters notify(),
-        # resolving third future.
-        futures[1].add_done_callback(lambda _: c.notify())
-        c.notify(2)
-        self.assertTrue(all(f.done() for f in futures))
-
-    @gen_test
-    def test_garbage_collection(self):
-        # Test that timed-out waiters are occasionally cleaned from the queue.
-        c = locks.Condition()
-        for _ in range(101):
-            c.wait(timedelta(seconds=0.01))
-
-        future = c.wait()
-        self.assertEqual(102, len(c._waiters))
-
-        # Let first 101 waiters time out, triggering a collection.
-        yield gen.sleep(0.02)
-        self.assertEqual(1, len(c._waiters))
-
-        # Final waiter is still active.
-        self.assertFalse(future.done())
-        c.notify()
-        self.assertTrue(future.done())
-
-
-class EventTest(AsyncTestCase):
-    def test_repr(self):
-        event = locks.Event()
-        self.assertTrue('clear' in str(event))
-        self.assertFalse('set' in str(event))
-        event.set()
-        self.assertFalse('clear' in str(event))
-        self.assertTrue('set' in str(event))
-
-    def test_event(self):
-        e = locks.Event()
-        future_0 = e.wait()
-        e.set()
-        future_1 = e.wait()
-        e.clear()
-        future_2 = e.wait()
-
-        self.assertTrue(future_0.done())
-        self.assertTrue(future_1.done())
-        self.assertFalse(future_2.done())
-
-    @gen_test
-    def test_event_timeout(self):
-        e = locks.Event()
-        with self.assertRaises(TimeoutError):
-            yield e.wait(timedelta(seconds=0.01))
-
-        # After a timed-out waiter, normal operation works.
-        self.io_loop.add_timeout(timedelta(seconds=0.01), e.set)
-        yield e.wait(timedelta(seconds=1))
-
-    def test_event_set_multiple(self):
-        e = locks.Event()
-        e.set()
-        e.set()
-        self.assertTrue(e.is_set())
-
-    def test_event_wait_clear(self):
-        e = locks.Event()
-        f0 = e.wait()
-        e.clear()
-        f1 = e.wait()
-        e.set()
-        self.assertTrue(f0.done())
-        self.assertTrue(f1.done())
-
-
-class SemaphoreTest(AsyncTestCase):
-    def test_negative_value(self):
-        self.assertRaises(ValueError, locks.Semaphore, value=-1)
-
-    def test_repr(self):
-        sem = locks.Semaphore()
-        self.assertIn('Semaphore', repr(sem))
-        self.assertIn('unlocked,value:1', repr(sem))
-        sem.acquire()
-        self.assertIn('locked', repr(sem))
-        self.assertNotIn('waiters', repr(sem))
-        sem.acquire()
-        self.assertIn('waiters', repr(sem))
-
-    def test_acquire(self):
-        sem = locks.Semaphore()
-        f0 = sem.acquire()
-        self.assertTrue(f0.done())
-
-        # Wait for release().
-        f1 = sem.acquire()
-        self.assertFalse(f1.done())
-        f2 = sem.acquire()
-        sem.release()
-        self.assertTrue(f1.done())
-        self.assertFalse(f2.done())
-        sem.release()
-        self.assertTrue(f2.done())
-
-        sem.release()
-        # Now acquire() is instant.
-        self.assertTrue(sem.acquire().done())
-        self.assertEqual(0, len(sem._waiters))
-
-    @gen_test
-    def test_acquire_timeout(self):
-        sem = locks.Semaphore(2)
-        yield sem.acquire()
-        yield sem.acquire()
-        acquire = sem.acquire(timedelta(seconds=0.01))
-        self.io_loop.call_later(0.02, sem.release)  # Too late.
-        yield gen.sleep(0.3)
-        with self.assertRaises(gen.TimeoutError):
-            yield acquire
-
-        sem.acquire()
-        f = sem.acquire()
-        self.assertFalse(f.done())
-        sem.release()
-        self.assertTrue(f.done())
-
-    @gen_test
-    def test_acquire_timeout_preempted(self):
-        sem = locks.Semaphore(1)
-        yield sem.acquire()
-
-        # This fires before the wait times out.
-        self.io_loop.call_later(0.01, sem.release)
-        acquire = sem.acquire(timedelta(seconds=0.02))
-        yield gen.sleep(0.03)
-        yield acquire  # No TimeoutError.
-
-    def test_release_unacquired(self):
-        # Unbounded releases are allowed, and increment the semaphore's value.
-        sem = locks.Semaphore()
-        sem.release()
-        sem.release()
-
-        # Now the counter is 3. We can acquire three times before blocking.
-        self.assertTrue(sem.acquire().done())
-        self.assertTrue(sem.acquire().done())
-        self.assertTrue(sem.acquire().done())
-        self.assertFalse(sem.acquire().done())
-
-    @gen_test
-    def test_garbage_collection(self):
-        # Test that timed-out waiters are occasionally cleaned from the queue.
-        sem = locks.Semaphore(value=0)
-        futures = [sem.acquire(timedelta(seconds=0.01)) for _ in range(101)]
-
-        future = sem.acquire()
-        self.assertEqual(102, len(sem._waiters))
-
-        # Let first 101 waiters time out, triggering a collection.
-        yield gen.sleep(0.02)
-        self.assertEqual(1, len(sem._waiters))
-
-        # Final waiter is still active.
-        self.assertFalse(future.done())
-        sem.release()
-        self.assertTrue(future.done())
-
-        # Prevent "Future exception was never retrieved" messages.
-        for future in futures:
-            self.assertRaises(TimeoutError, future.result)
-
-
-class SemaphoreContextManagerTest(AsyncTestCase):
-    @gen_test
-    def test_context_manager(self):
-        sem = locks.Semaphore()
-        with (yield sem.acquire()) as yielded:
-            self.assertTrue(yielded is None)
-
-        # Semaphore was released and can be acquired again.
-        self.assertTrue(sem.acquire().done())
-
-    @skipBefore35
-    @gen_test
-    def test_context_manager_async_await(self):
-        # Repeat the above test using 'async with'.
-        sem = locks.Semaphore()
-
-        namespace = exec_test(globals(), locals(), """
-        async def f():
-            async with sem as yielded:
-                self.assertTrue(yielded is None)
-        """)
-        yield namespace['f']()
-
-        # Semaphore was released and can be acquired again.
-        self.assertTrue(sem.acquire().done())
-
-    @gen_test
-    def test_context_manager_exception(self):
-        sem = locks.Semaphore()
-        with self.assertRaises(ZeroDivisionError):
-            with (yield sem.acquire()):
-                1 / 0
-
-        # Semaphore was released and can be acquired again.
-        self.assertTrue(sem.acquire().done())
-
-    @gen_test
-    def test_context_manager_timeout(self):
-        sem = locks.Semaphore()
-        with (yield sem.acquire(timedelta(seconds=0.01))):
-            pass
-
-        # Semaphore was released and can be acquired again.
-        self.assertTrue(sem.acquire().done())
-
-    @gen_test
-    def test_context_manager_timeout_error(self):
-        sem = locks.Semaphore(value=0)
-        with self.assertRaises(gen.TimeoutError):
-            with (yield sem.acquire(timedelta(seconds=0.01))):
-                pass
-
-        # Counter is still 0.
-        self.assertFalse(sem.acquire().done())
-
-    @gen_test
-    def test_context_manager_contended(self):
-        sem = locks.Semaphore()
-        history = []
-
-        @gen.coroutine
-        def f(index):
-            with (yield sem.acquire()):
-                history.append('acquired %d' % index)
-                yield gen.sleep(0.01)
-                history.append('release %d' % index)
-
-        yield [f(i) for i in range(2)]
-
-        expected_history = []
-        for i in range(2):
-            expected_history.extend(['acquired %d' % i, 'release %d' % i])
-
-        self.assertEqual(expected_history, history)
-
-    @gen_test
-    def test_yield_sem(self):
-        # Ensure we catch a "with (yield sem)", which should be
-        # "with (yield sem.acquire())".
-        with self.assertRaises(gen.BadYieldError):
-            with (yield locks.Semaphore()):
-                pass
-
-    def test_context_manager_misuse(self):
-        # Ensure we catch a "with sem", which should be
-        # "with (yield sem.acquire())".
-        with self.assertRaises(RuntimeError):
-            with locks.Semaphore():
-                pass
-
-
-class BoundedSemaphoreTest(AsyncTestCase):
-    def test_release_unacquired(self):
-        sem = locks.BoundedSemaphore()
-        self.assertRaises(ValueError, sem.release)
-        # Value is 0.
-        sem.acquire()
-        # Block on acquire().
-        future = sem.acquire()
-        self.assertFalse(future.done())
-        sem.release()
-        self.assertTrue(future.done())
-        # Value is 1.
-        sem.release()
-        self.assertRaises(ValueError, sem.release)
-
-
-class LockTests(AsyncTestCase):
-    def test_repr(self):
-        lock = locks.Lock()
-        # No errors.
-        repr(lock)
-        lock.acquire()
-        repr(lock)
-
-    def test_acquire_release(self):
-        lock = locks.Lock()
-        self.assertTrue(lock.acquire().done())
-        future = lock.acquire()
-        self.assertFalse(future.done())
-        lock.release()
-        self.assertTrue(future.done())
-
-    @gen_test
-    def test_acquire_fifo(self):
-        lock = locks.Lock()
-        self.assertTrue(lock.acquire().done())
-        N = 5
-        history = []
-
-        @gen.coroutine
-        def f(idx):
-            with (yield lock.acquire()):
-                history.append(idx)
-
-        futures = [f(i) for i in range(N)]
-        self.assertFalse(any(future.done() for future in futures))
-        lock.release()
-        yield futures
-        self.assertEqual(list(range(N)), history)
-
-    @skipBefore35
-    @gen_test
-    def test_acquire_fifo_async_with(self):
-        # Repeat the above test using `async with lock:`
-        # instead of `with (yield lock.acquire()):`.
-        lock = locks.Lock()
-        self.assertTrue(lock.acquire().done())
-        N = 5
-        history = []
-
-        namespace = exec_test(globals(), locals(), """
-        async def f(idx):
-            async with lock:
-                history.append(idx)
-        """)
-        futures = [namespace['f'](i) for i in range(N)]
-        lock.release()
-        yield futures
-        self.assertEqual(list(range(N)), history)
-
-    @gen_test
-    def test_acquire_timeout(self):
-        lock = locks.Lock()
-        lock.acquire()
-        with self.assertRaises(gen.TimeoutError):
-            yield lock.acquire(timeout=timedelta(seconds=0.01))
-
-        # Still locked.
-        self.assertFalse(lock.acquire().done())
-
-    def test_multi_release(self):
-        lock = locks.Lock()
-        self.assertRaises(RuntimeError, lock.release)
-        lock.acquire()
-        lock.release()
-        self.assertRaises(RuntimeError, lock.release)
-
-    @gen_test
-    def test_yield_lock(self):
-        # Ensure we catch a "with (yield lock)", which should be
-        # "with (yield lock.acquire())".
-        with self.assertRaises(gen.BadYieldError):
-            with (yield locks.Lock()):
-                pass
-
-    def test_context_manager_misuse(self):
-        # Ensure we catch a "with lock", which should be
-        # "with (yield lock.acquire())".
-        with self.assertRaises(RuntimeError):
-            with locks.Lock():
-                pass
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/lib/tornado/test/log_test.py b/lib/tornado/test/log_test.py
deleted file mode 100644
index 888964e7b018c1e617a787a75b73d49b432518f5..0000000000000000000000000000000000000000
--- a/lib/tornado/test/log_test.py
+++ /dev/null
@@ -1,241 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2012 Facebook
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-from __future__ import absolute_import, division, print_function
-
-import contextlib
-import glob
-import logging
-import os
-import re
-import subprocess
-import sys
-import tempfile
-import warnings
-
-from tornado.escape import utf8
-from tornado.log import LogFormatter, define_logging_options, enable_pretty_logging
-from tornado.options import OptionParser
-from tornado.test.util import unittest
-from tornado.util import basestring_type
-
-
-@contextlib.contextmanager
-def ignore_bytes_warning():
-    with warnings.catch_warnings():
-        warnings.simplefilter('ignore', category=BytesWarning)
-        yield
-
-
-class LogFormatterTest(unittest.TestCase):
-    # Matches the output of a single logging call (which may be multiple lines
-    # if a traceback was included, so we use the DOTALL option)
-    LINE_RE = re.compile(b"(?s)\x01\\[E [0-9]{6} [0-9]{2}:[0-9]{2}:[0-9]{2} log_test:[0-9]+\\]\x02 (.*)")
-
-    def setUp(self):
-        self.formatter = LogFormatter(color=False)
-        # Fake color support.  We can't guarantee anything about the $TERM
-        # variable when the tests are run, so just patch in some values
-        # for testing.  (testing with color off fails to expose some potential
-        # encoding issues from the control characters)
-        self.formatter._colors = {
-            logging.ERROR: u"\u0001",
-        }
-        self.formatter._normal = u"\u0002"
-        # construct a Logger directly to bypass getLogger's caching
-        self.logger = logging.Logger('LogFormatterTest')
-        self.logger.propagate = False
-        self.tempdir = tempfile.mkdtemp()
-        self.filename = os.path.join(self.tempdir, 'log.out')
-        self.handler = self.make_handler(self.filename)
-        self.handler.setFormatter(self.formatter)
-        self.logger.addHandler(self.handler)
-
-    def tearDown(self):
-        self.handler.close()
-        os.unlink(self.filename)
-        os.rmdir(self.tempdir)
-
-    def make_handler(self, filename):
-        # Base case: default setup without explicit encoding.
-        # In python 2, supports arbitrary byte strings and unicode objects
-        # that contain only ascii.  In python 3, supports ascii-only unicode
-        # strings (but byte strings will be repr'd automatically).
-        return logging.FileHandler(filename)
-
-    def get_output(self):
-        with open(self.filename, "rb") as f:
-            line = f.read().strip()
-            m = LogFormatterTest.LINE_RE.match(line)
-            if m:
-                return m.group(1)
-            else:
-                raise Exception("output didn't match regex: %r" % line)
-
-    def test_basic_logging(self):
-        self.logger.error("foo")
-        self.assertEqual(self.get_output(), b"foo")
-
-    def test_bytes_logging(self):
-        with ignore_bytes_warning():
-            # This will be "\xe9" on python 2 or "b'\xe9'" on python 3
-            self.logger.error(b"\xe9")
-            self.assertEqual(self.get_output(), utf8(repr(b"\xe9")))
-
-    def test_utf8_logging(self):
-        with ignore_bytes_warning():
-            self.logger.error(u"\u00e9".encode("utf8"))
-        if issubclass(bytes, basestring_type):
-            # on python 2, utf8 byte strings (and by extension ascii byte
-            # strings) are passed through as-is.
-            self.assertEqual(self.get_output(), utf8(u"\u00e9"))
-        else:
-            # on python 3, byte strings always get repr'd even if
-            # they're ascii-only, so this degenerates into another
-            # copy of test_bytes_logging.
-            self.assertEqual(self.get_output(), utf8(repr(utf8(u"\u00e9"))))
-
-    def test_bytes_exception_logging(self):
-        try:
-            raise Exception(b'\xe9')
-        except Exception:
-            self.logger.exception('caught exception')
-        # This will be "Exception: \xe9" on python 2 or
-        # "Exception: b'\xe9'" on python 3.
-        output = self.get_output()
-        self.assertRegexpMatches(output, br'Exception.*\\xe9')
-        # The traceback contains newlines, which should not have been escaped.
-        self.assertNotIn(br'\n', output)
-
-
-class UnicodeLogFormatterTest(LogFormatterTest):
-    def make_handler(self, filename):
-        # Adding an explicit encoding configuration allows non-ascii unicode
-        # strings in both python 2 and 3, without changing the behavior
-        # for byte strings.
-        return logging.FileHandler(filename, encoding="utf8")
-
-    def test_unicode_logging(self):
-        self.logger.error(u"\u00e9")
-        self.assertEqual(self.get_output(), utf8(u"\u00e9"))
-
-
-class EnablePrettyLoggingTest(unittest.TestCase):
-    def setUp(self):
-        super(EnablePrettyLoggingTest, self).setUp()
-        self.options = OptionParser()
-        define_logging_options(self.options)
-        self.logger = logging.Logger('tornado.test.log_test.EnablePrettyLoggingTest')
-        self.logger.propagate = False
-
-    def test_log_file(self):
-        tmpdir = tempfile.mkdtemp()
-        try:
-            self.options.log_file_prefix = tmpdir + '/test_log'
-            enable_pretty_logging(options=self.options, logger=self.logger)
-            self.assertEqual(1, len(self.logger.handlers))
-            self.logger.error('hello')
-            self.logger.handlers[0].flush()
-            filenames = glob.glob(tmpdir + '/test_log*')
-            self.assertEqual(1, len(filenames))
-            with open(filenames[0]) as f:
-                self.assertRegexpMatches(f.read(), r'^\[E [^]]*\] hello$')
-        finally:
-            for handler in self.logger.handlers:
-                handler.flush()
-                handler.close()
-            for filename in glob.glob(tmpdir + '/test_log*'):
-                os.unlink(filename)
-            os.rmdir(tmpdir)
-
-    def test_log_file_with_timed_rotating(self):
-        tmpdir = tempfile.mkdtemp()
-        try:
-            self.options.log_file_prefix = tmpdir + '/test_log'
-            self.options.log_rotate_mode = 'time'
-            enable_pretty_logging(options=self.options, logger=self.logger)
-            self.logger.error('hello')
-            self.logger.handlers[0].flush()
-            filenames = glob.glob(tmpdir + '/test_log*')
-            self.assertEqual(1, len(filenames))
-            with open(filenames[0]) as f:
-                self.assertRegexpMatches(
-                    f.read(),
-                    r'^\[E [^]]*\] hello$')
-        finally:
-            for handler in self.logger.handlers:
-                handler.flush()
-                handler.close()
-            for filename in glob.glob(tmpdir + '/test_log*'):
-                os.unlink(filename)
-            os.rmdir(tmpdir)
-
-    def test_wrong_rotate_mode_value(self):
-        try:
-            self.options.log_file_prefix = 'some_path'
-            self.options.log_rotate_mode = 'wrong_mode'
-            self.assertRaises(ValueError, enable_pretty_logging,
-                              options=self.options, logger=self.logger)
-        finally:
-            for handler in self.logger.handlers:
-                handler.flush()
-                handler.close()
-
-
-class LoggingOptionTest(unittest.TestCase):
-    """Test the ability to enable and disable Tornado's logging hooks."""
-    def logs_present(self, statement, args=None):
-        # Each test may manipulate and/or parse the options and then logs
-        # a line at the 'info' level.  This level is ignored in the
-        # logging module by default, but Tornado turns it on by default
-        # so it is the easiest way to tell whether tornado's logging hooks
-        # ran.
-        IMPORT = 'from tornado.options import options, parse_command_line'
-        LOG_INFO = 'import logging; logging.info("hello")'
-        program = ';'.join([IMPORT, statement, LOG_INFO])
-        proc = subprocess.Popen(
-            [sys.executable, '-c', program] + (args or []),
-            stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-        stdout, stderr = proc.communicate()
-        self.assertEqual(proc.returncode, 0, 'process failed: %r' % stdout)
-        return b'hello' in stdout
-
-    def test_default(self):
-        self.assertFalse(self.logs_present('pass'))
-
-    def test_tornado_default(self):
-        self.assertTrue(self.logs_present('parse_command_line()'))
-
-    def test_disable_command_line(self):
-        self.assertFalse(self.logs_present('parse_command_line()',
-                                           ['--logging=none']))
-
-    def test_disable_command_line_case_insensitive(self):
-        self.assertFalse(self.logs_present('parse_command_line()',
-                                           ['--logging=None']))
-
-    def test_disable_code_string(self):
-        self.assertFalse(self.logs_present(
-            'options.logging = "none"; parse_command_line()'))
-
-    def test_disable_code_none(self):
-        self.assertFalse(self.logs_present(
-            'options.logging = None; parse_command_line()'))
-
-    def test_disable_override(self):
-        # command line trumps code defaults
-        self.assertTrue(self.logs_present(
-            'options.logging = None; parse_command_line()',
-            ['--logging=info']))
diff --git a/lib/tornado/test/netutil_test.py b/lib/tornado/test/netutil_test.py
deleted file mode 100644
index 9564290abd2300ae4a85c97380d944add70b0d5f..0000000000000000000000000000000000000000
--- a/lib/tornado/test/netutil_test.py
+++ /dev/null
@@ -1,215 +0,0 @@
-from __future__ import absolute_import, division, print_function
-
-import errno
-import os
-import signal
-import socket
-from subprocess import Popen
-import sys
-import time
-
-from tornado.netutil import BlockingResolver, ThreadedResolver, is_valid_ip, bind_sockets
-from tornado.stack_context import ExceptionStackContext
-from tornado.testing import AsyncTestCase, gen_test, bind_unused_port
-from tornado.test.util import unittest, skipIfNoNetwork
-
-try:
-    from concurrent import futures
-except ImportError:
-    futures = None
-
-try:
-    import pycares  # type: ignore
-except ImportError:
-    pycares = None
-else:
-    from tornado.platform.caresresolver import CaresResolver
-
-try:
-    import twisted  # type: ignore
-    import twisted.names  # type: ignore
-except ImportError:
-    twisted = None
-else:
-    from tornado.platform.twisted import TwistedResolver
-
-
-class _ResolverTestMixin(object):
-    def test_localhost(self):
-        self.resolver.resolve('localhost', 80, callback=self.stop)
-        result = self.wait()
-        self.assertIn((socket.AF_INET, ('127.0.0.1', 80)), result)
-
-    @gen_test
-    def test_future_interface(self):
-        addrinfo = yield self.resolver.resolve('localhost', 80,
-                                               socket.AF_UNSPEC)
-        self.assertIn((socket.AF_INET, ('127.0.0.1', 80)),
-                      addrinfo)
-
-
-# It is impossible to quickly and consistently generate an error in name
-# resolution, so test this case separately, using mocks as needed.
-class _ResolverErrorTestMixin(object):
-    def test_bad_host(self):
-        def handler(exc_typ, exc_val, exc_tb):
-            self.stop(exc_val)
-            return True  # Halt propagation.
-
-        with ExceptionStackContext(handler):
-            self.resolver.resolve('an invalid domain', 80, callback=self.stop)
-
-        result = self.wait()
-        self.assertIsInstance(result, Exception)
-
-    @gen_test
-    def test_future_interface_bad_host(self):
-        with self.assertRaises(IOError):
-            yield self.resolver.resolve('an invalid domain', 80,
-                                        socket.AF_UNSPEC)
-
-
-def _failing_getaddrinfo(*args):
-    """Dummy implementation of getaddrinfo for use in mocks"""
-    raise socket.gaierror(errno.EIO, "mock: lookup failed")
-
-
-@skipIfNoNetwork
-class BlockingResolverTest(AsyncTestCase, _ResolverTestMixin):
-    def setUp(self):
-        super(BlockingResolverTest, self).setUp()
-        self.resolver = BlockingResolver(io_loop=self.io_loop)
-
-
-# getaddrinfo-based tests need mocking to reliably generate errors;
-# some configurations are slow to produce errors and take longer than
-# our default timeout.
-class BlockingResolverErrorTest(AsyncTestCase, _ResolverErrorTestMixin):
-    def setUp(self):
-        super(BlockingResolverErrorTest, self).setUp()
-        self.resolver = BlockingResolver(io_loop=self.io_loop)
-        self.real_getaddrinfo = socket.getaddrinfo
-        socket.getaddrinfo = _failing_getaddrinfo
-
-    def tearDown(self):
-        socket.getaddrinfo = self.real_getaddrinfo
-        super(BlockingResolverErrorTest, self).tearDown()
-
-
-@skipIfNoNetwork
-@unittest.skipIf(futures is None, "futures module not present")
-class ThreadedResolverTest(AsyncTestCase, _ResolverTestMixin):
-    def setUp(self):
-        super(ThreadedResolverTest, self).setUp()
-        self.resolver = ThreadedResolver(io_loop=self.io_loop)
-
-    def tearDown(self):
-        self.resolver.close()
-        super(ThreadedResolverTest, self).tearDown()
-
-
-class ThreadedResolverErrorTest(AsyncTestCase, _ResolverErrorTestMixin):
-    def setUp(self):
-        super(ThreadedResolverErrorTest, self).setUp()
-        self.resolver = BlockingResolver(io_loop=self.io_loop)
-        self.real_getaddrinfo = socket.getaddrinfo
-        socket.getaddrinfo = _failing_getaddrinfo
-
-    def tearDown(self):
-        socket.getaddrinfo = self.real_getaddrinfo
-        super(ThreadedResolverErrorTest, self).tearDown()
-
-
-@skipIfNoNetwork
-@unittest.skipIf(futures is None, "futures module not present")
-@unittest.skipIf(sys.platform == 'win32', "preexec_fn not available on win32")
-class ThreadedResolverImportTest(unittest.TestCase):
-    def test_import(self):
-        TIMEOUT = 5
-
-        # Test for a deadlock when importing a module that runs the
-        # ThreadedResolver at import-time. See resolve_test.py for
-        # full explanation.
-        command = [
-            sys.executable,
-            '-c',
-            'import tornado.test.resolve_test_helper']
-
-        start = time.time()
-        popen = Popen(command, preexec_fn=lambda: signal.alarm(TIMEOUT))
-        while time.time() - start < TIMEOUT:
-            return_code = popen.poll()
-            if return_code is not None:
-                self.assertEqual(0, return_code)
-                return  # Success.
-            time.sleep(0.05)
-
-        self.fail("import timed out")
-
-
-# We do not test errors with CaresResolver:
-# Some DNS-hijacking ISPs (e.g. Time Warner) return non-empty results
-# with an NXDOMAIN status code.  Most resolvers treat this as an error;
-# C-ares returns the results, making the "bad_host" tests unreliable.
-# C-ares will try to resolve even malformed names, such as the
-# name with spaces used in this test.
-@skipIfNoNetwork
-@unittest.skipIf(pycares is None, "pycares module not present")
-class CaresResolverTest(AsyncTestCase, _ResolverTestMixin):
-    def setUp(self):
-        super(CaresResolverTest, self).setUp()
-        self.resolver = CaresResolver(io_loop=self.io_loop)
-
-
-# TwistedResolver produces consistent errors in our test cases so we
-# can test the regular and error cases in the same class.
-@skipIfNoNetwork
-@unittest.skipIf(twisted is None, "twisted module not present")
-@unittest.skipIf(getattr(twisted, '__version__', '0.0') < "12.1", "old version of twisted")
-class TwistedResolverTest(AsyncTestCase, _ResolverTestMixin,
-                          _ResolverErrorTestMixin):
-    def setUp(self):
-        super(TwistedResolverTest, self).setUp()
-        self.resolver = TwistedResolver(io_loop=self.io_loop)
-
-
-class IsValidIPTest(unittest.TestCase):
-    def test_is_valid_ip(self):
-        self.assertTrue(is_valid_ip('127.0.0.1'))
-        self.assertTrue(is_valid_ip('4.4.4.4'))
-        self.assertTrue(is_valid_ip('::1'))
-        self.assertTrue(is_valid_ip('2620:0:1cfe:face:b00c::3'))
-        self.assertTrue(not is_valid_ip('www.google.com'))
-        self.assertTrue(not is_valid_ip('localhost'))
-        self.assertTrue(not is_valid_ip('4.4.4.4<'))
-        self.assertTrue(not is_valid_ip(' 127.0.0.1'))
-        self.assertTrue(not is_valid_ip(''))
-        self.assertTrue(not is_valid_ip(' '))
-        self.assertTrue(not is_valid_ip('\n'))
-        self.assertTrue(not is_valid_ip('\x00'))
-
-
-class TestPortAllocation(unittest.TestCase):
-    def test_same_port_allocation(self):
-        if 'TRAVIS' in os.environ:
-            self.skipTest("dual-stack servers often have port conflicts on travis")
-        sockets = bind_sockets(None, 'localhost')
-        try:
-            port = sockets[0].getsockname()[1]
-            self.assertTrue(all(s.getsockname()[1] == port
-                                for s in sockets[1:]))
-        finally:
-            for sock in sockets:
-                sock.close()
-
-    @unittest.skipIf(not hasattr(socket, "SO_REUSEPORT"), "SO_REUSEPORT is not supported")
-    def test_reuse_port(self):
-        sockets = []
-        socket, port = bind_unused_port(reuse_port=True)
-        try:
-            sockets = bind_sockets(port, '127.0.0.1', reuse_port=True)
-            self.assertTrue(all(s.getsockname()[1] == port for s in sockets))
-        finally:
-            socket.close()
-            for sock in sockets:
-                sock.close()
diff --git a/lib/tornado/test/options_test.cfg b/lib/tornado/test/options_test.cfg
deleted file mode 100644
index 4ead46a49a44ba4b51c60649343c1e8fd6e429c0..0000000000000000000000000000000000000000
--- a/lib/tornado/test/options_test.cfg
+++ /dev/null
@@ -1,7 +0,0 @@
-port=443
-port=443
-username='李康'
-
-foo_bar='a'
-
-my_path = __file__
diff --git a/lib/tornado/test/options_test.py b/lib/tornado/test/options_test.py
deleted file mode 100644
index bafeea6fd35f4897a094dc6df0e7a1b58a8f519a..0000000000000000000000000000000000000000
--- a/lib/tornado/test/options_test.py
+++ /dev/null
@@ -1,275 +0,0 @@
-# -*- coding: utf-8 -*-
-from __future__ import absolute_import, division, print_function
-
-import datetime
-import os
-import sys
-
-from tornado.options import OptionParser, Error
-from tornado.util import basestring_type, PY3
-from tornado.test.util import unittest
-
-if PY3:
-    from io import StringIO
-else:
-    from cStringIO import StringIO
-
-try:
-    # py33+
-    from unittest import mock  # type: ignore
-except ImportError:
-    try:
-        import mock  # type: ignore
-    except ImportError:
-        mock = None
-
-
-class OptionsTest(unittest.TestCase):
-    def test_parse_command_line(self):
-        options = OptionParser()
-        options.define("port", default=80)
-        options.parse_command_line(["main.py", "--port=443"])
-        self.assertEqual(options.port, 443)
-
-    def test_parse_config_file(self):
-        options = OptionParser()
-        options.define("port", default=80)
-        options.define("username", default='foo')
-        options.define("my_path")
-        config_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
-                                   "options_test.cfg")
-        options.parse_config_file(config_path)
-        self.assertEqual(options.port, 443)
-        self.assertEqual(options.username, "李康")
-        self.assertEqual(options.my_path, config_path)
-
-    def test_parse_callbacks(self):
-        options = OptionParser()
-        self.called = False
-
-        def callback():
-            self.called = True
-        options.add_parse_callback(callback)
-
-        # non-final parse doesn't run callbacks
-        options.parse_command_line(["main.py"], final=False)
-        self.assertFalse(self.called)
-
-        # final parse does
-        options.parse_command_line(["main.py"])
-        self.assertTrue(self.called)
-
-        # callbacks can be run more than once on the same options
-        # object if there are multiple final parses
-        self.called = False
-        options.parse_command_line(["main.py"])
-        self.assertTrue(self.called)
-
-    def test_help(self):
-        options = OptionParser()
-        try:
-            orig_stderr = sys.stderr
-            sys.stderr = StringIO()
-            with self.assertRaises(SystemExit):
-                options.parse_command_line(["main.py", "--help"])
-            usage = sys.stderr.getvalue()
-        finally:
-            sys.stderr = orig_stderr
-        self.assertIn("Usage:", usage)
-
-    def test_subcommand(self):
-        base_options = OptionParser()
-        base_options.define("verbose", default=False)
-        sub_options = OptionParser()
-        sub_options.define("foo", type=str)
-        rest = base_options.parse_command_line(
-            ["main.py", "--verbose", "subcommand", "--foo=bar"])
-        self.assertEqual(rest, ["subcommand", "--foo=bar"])
-        self.assertTrue(base_options.verbose)
-        rest2 = sub_options.parse_command_line(rest)
-        self.assertEqual(rest2, [])
-        self.assertEqual(sub_options.foo, "bar")
-
-        # the two option sets are distinct
-        try:
-            orig_stderr = sys.stderr
-            sys.stderr = StringIO()
-            with self.assertRaises(Error):
-                sub_options.parse_command_line(["subcommand", "--verbose"])
-        finally:
-            sys.stderr = orig_stderr
-
-    def test_setattr(self):
-        options = OptionParser()
-        options.define('foo', default=1, type=int)
-        options.foo = 2
-        self.assertEqual(options.foo, 2)
-
-    def test_setattr_type_check(self):
-        # setattr requires that options be the right type and doesn't
-        # parse from string formats.
-        options = OptionParser()
-        options.define('foo', default=1, type=int)
-        with self.assertRaises(Error):
-            options.foo = '2'
-
-    def test_setattr_with_callback(self):
-        values = []
-        options = OptionParser()
-        options.define('foo', default=1, type=int, callback=values.append)
-        options.foo = 2
-        self.assertEqual(values, [2])
-
-    def _sample_options(self):
-        options = OptionParser()
-        options.define('a', default=1)
-        options.define('b', default=2)
-        return options
-
-    def test_iter(self):
-        options = self._sample_options()
-        # OptionParsers always define 'help'.
-        self.assertEqual(set(['a', 'b', 'help']), set(iter(options)))
-
-    def test_getitem(self):
-        options = self._sample_options()
-        self.assertEqual(1, options['a'])
-
-    def test_setitem(self):
-        options = OptionParser()
-        options.define('foo', default=1, type=int)
-        options['foo'] = 2
-        self.assertEqual(options['foo'], 2)
-
-    def test_items(self):
-        options = self._sample_options()
-        # OptionParsers always define 'help'.
-        expected = [('a', 1), ('b', 2), ('help', options.help)]
-        actual = sorted(options.items())
-        self.assertEqual(expected, actual)
-
-    def test_as_dict(self):
-        options = self._sample_options()
-        expected = {'a': 1, 'b': 2, 'help': options.help}
-        self.assertEqual(expected, options.as_dict())
-
-    def test_group_dict(self):
-        options = OptionParser()
-        options.define('a', default=1)
-        options.define('b', group='b_group', default=2)
-
-        frame = sys._getframe(0)
-        this_file = frame.f_code.co_filename
-        self.assertEqual(set(['b_group', '', this_file]), options.groups())
-
-        b_group_dict = options.group_dict('b_group')
-        self.assertEqual({'b': 2}, b_group_dict)
-
-        self.assertEqual({}, options.group_dict('nonexistent'))
-
-    @unittest.skipIf(mock is None, 'mock package not present')
-    def test_mock_patch(self):
-        # ensure that our setattr hooks don't interfere with mock.patch
-        options = OptionParser()
-        options.define('foo', default=1)
-        options.parse_command_line(['main.py', '--foo=2'])
-        self.assertEqual(options.foo, 2)
-
-        with mock.patch.object(options.mockable(), 'foo', 3):
-            self.assertEqual(options.foo, 3)
-        self.assertEqual(options.foo, 2)
-
-        # Try nested patches mixed with explicit sets
-        with mock.patch.object(options.mockable(), 'foo', 4):
-            self.assertEqual(options.foo, 4)
-            options.foo = 5
-            self.assertEqual(options.foo, 5)
-            with mock.patch.object(options.mockable(), 'foo', 6):
-                self.assertEqual(options.foo, 6)
-            self.assertEqual(options.foo, 5)
-        self.assertEqual(options.foo, 2)
-
-    def test_types(self):
-        options = OptionParser()
-        options.define('str', type=str)
-        options.define('basestring', type=basestring_type)
-        options.define('int', type=int)
-        options.define('float', type=float)
-        options.define('datetime', type=datetime.datetime)
-        options.define('timedelta', type=datetime.timedelta)
-        options.parse_command_line(['main.py',
-                                    '--str=asdf',
-                                    '--basestring=qwer',
-                                    '--int=42',
-                                    '--float=1.5',
-                                    '--datetime=2013-04-28 05:16',
-                                    '--timedelta=45s'])
-        self.assertEqual(options.str, 'asdf')
-        self.assertEqual(options.basestring, 'qwer')
-        self.assertEqual(options.int, 42)
-        self.assertEqual(options.float, 1.5)
-        self.assertEqual(options.datetime,
-                         datetime.datetime(2013, 4, 28, 5, 16))
-        self.assertEqual(options.timedelta, datetime.timedelta(seconds=45))
-
-    def test_multiple_string(self):
-        options = OptionParser()
-        options.define('foo', type=str, multiple=True)
-        options.parse_command_line(['main.py', '--foo=a,b,c'])
-        self.assertEqual(options.foo, ['a', 'b', 'c'])
-
-    def test_multiple_int(self):
-        options = OptionParser()
-        options.define('foo', type=int, multiple=True)
-        options.parse_command_line(['main.py', '--foo=1,3,5:7'])
-        self.assertEqual(options.foo, [1, 3, 5, 6, 7])
-
-    def test_error_redefine(self):
-        options = OptionParser()
-        options.define('foo')
-        with self.assertRaises(Error) as cm:
-            options.define('foo')
-        self.assertRegexpMatches(str(cm.exception),
-                                 'Option.*foo.*already defined')
-
-    def test_dash_underscore_cli(self):
-        # Dashes and underscores should be interchangeable.
-        for defined_name in ['foo-bar', 'foo_bar']:
-            for flag in ['--foo-bar=a', '--foo_bar=a']:
-                options = OptionParser()
-                options.define(defined_name)
-                options.parse_command_line(['main.py', flag])
-                # Attr-style access always uses underscores.
-                self.assertEqual(options.foo_bar, 'a')
-                # Dict-style access allows both.
-                self.assertEqual(options['foo-bar'], 'a')
-                self.assertEqual(options['foo_bar'], 'a')
-
-    def test_dash_underscore_file(self):
-        # No matter how an option was defined, it can be set with underscores
-        # in a config file.
-        for defined_name in ['foo-bar', 'foo_bar']:
-            options = OptionParser()
-            options.define(defined_name)
-            options.parse_config_file(os.path.join(os.path.dirname(__file__),
-                                                   "options_test.cfg"))
-            self.assertEqual(options.foo_bar, 'a')
-
-    def test_dash_underscore_introspection(self):
-        # Original names are preserved in introspection APIs.
-        options = OptionParser()
-        options.define('with-dash', group='g')
-        options.define('with_underscore', group='g')
-        all_options = ['help', 'with-dash', 'with_underscore']
-        self.assertEqual(sorted(options), all_options)
-        self.assertEqual(sorted(k for (k, v) in options.items()), all_options)
-        self.assertEqual(sorted(options.as_dict().keys()), all_options)
-
-        self.assertEqual(sorted(options.group_dict('g')),
-                         ['with-dash', 'with_underscore'])
-
-        # --help shows CLI-style names with dashes.
-        buf = StringIO()
-        options.print_help(buf)
-        self.assertIn('--with-dash', buf.getvalue())
-        self.assertIn('--with-underscore', buf.getvalue())
diff --git a/lib/tornado/test/process_test.py b/lib/tornado/test/process_test.py
deleted file mode 100644
index 74c10abf19dfb002df301711167faa6cc36b7a1a..0000000000000000000000000000000000000000
--- a/lib/tornado/test/process_test.py
+++ /dev/null
@@ -1,263 +0,0 @@
-#!/usr/bin/env python
-
-
-from __future__ import absolute_import, division, print_function
-import logging
-import os
-import signal
-import subprocess
-import sys
-from tornado.httpclient import HTTPClient, HTTPError
-from tornado.httpserver import HTTPServer
-from tornado.ioloop import IOLoop
-from tornado.log import gen_log
-from tornado.process import fork_processes, task_id, Subprocess
-from tornado.simple_httpclient import SimpleAsyncHTTPClient
-from tornado.testing import bind_unused_port, ExpectLog, AsyncTestCase, gen_test
-from tornado.test.util import unittest, skipIfNonUnix
-from tornado.web import RequestHandler, Application
-
-
-def skip_if_twisted():
-    if IOLoop.configured_class().__name__.endswith(('TwistedIOLoop',
-                                                    'AsyncIOMainLoop')):
-        raise unittest.SkipTest("Process tests not compatible with "
-                                "TwistedIOLoop or AsyncIOMainLoop")
-
-# Not using AsyncHTTPTestCase because we need control over the IOLoop.
-
-
-@skipIfNonUnix
-class ProcessTest(unittest.TestCase):
-    def get_app(self):
-        class ProcessHandler(RequestHandler):
-            def get(self):
-                if self.get_argument("exit", None):
-                    # must use os._exit instead of sys.exit so unittest's
-                    # exception handler doesn't catch it
-                    os._exit(int(self.get_argument("exit")))
-                if self.get_argument("signal", None):
-                    os.kill(os.getpid(),
-                            int(self.get_argument("signal")))
-                self.write(str(os.getpid()))
-        return Application([("/", ProcessHandler)])
-
-    def tearDown(self):
-        if task_id() is not None:
-            # We're in a child process, and probably got to this point
-            # via an uncaught exception.  If we return now, both
-            # processes will continue with the rest of the test suite.
-            # Exit now so the parent process will restart the child
-            # (since we don't have a clean way to signal failure to
-            # the parent that won't restart)
-            logging.error("aborting child process from tearDown")
-            logging.shutdown()
-            os._exit(1)
-        # In the surviving process, clear the alarm we set earlier
-        signal.alarm(0)
-        super(ProcessTest, self).tearDown()
-
-    def test_multi_process(self):
-        # This test can't work on twisted because we use the global reactor
-        # and have no way to get it back into a sane state after the fork.
-        skip_if_twisted()
-        with ExpectLog(gen_log, "(Starting .* processes|child .* exited|uncaught exception)"):
-            self.assertFalse(IOLoop.initialized())
-            sock, port = bind_unused_port()
-
-            def get_url(path):
-                return "http://127.0.0.1:%d%s" % (port, path)
-            # ensure that none of these processes live too long
-            signal.alarm(5)  # master process
-            try:
-                id = fork_processes(3, max_restarts=3)
-                self.assertTrue(id is not None)
-                signal.alarm(5)  # child processes
-            except SystemExit as e:
-                # if we exit cleanly from fork_processes, all the child processes
-                # finished with status 0
-                self.assertEqual(e.code, 0)
-                self.assertTrue(task_id() is None)
-                sock.close()
-                return
-            try:
-                if id in (0, 1):
-                    self.assertEqual(id, task_id())
-                    server = HTTPServer(self.get_app())
-                    server.add_sockets([sock])
-                    IOLoop.current().start()
-                elif id == 2:
-                    self.assertEqual(id, task_id())
-                    sock.close()
-                    # Always use SimpleAsyncHTTPClient here; the curl
-                    # version appears to get confused sometimes if the
-                    # connection gets closed before it's had a chance to
-                    # switch from writing mode to reading mode.
-                    client = HTTPClient(SimpleAsyncHTTPClient)
-
-                    def fetch(url, fail_ok=False):
-                        try:
-                            return client.fetch(get_url(url))
-                        except HTTPError as e:
-                            if not (fail_ok and e.code == 599):
-                                raise
-
-                    # Make two processes exit abnormally
-                    fetch("/?exit=2", fail_ok=True)
-                    fetch("/?exit=3", fail_ok=True)
-
-                    # They've been restarted, so a new fetch will work
-                    int(fetch("/").body)
-
-                    # Now the same with signals
-                    # Disabled because on the mac a process dying with a signal
-                    # can trigger an "Application exited abnormally; send error
-                    # report to Apple?" prompt.
-                    # fetch("/?signal=%d" % signal.SIGTERM, fail_ok=True)
-                    # fetch("/?signal=%d" % signal.SIGABRT, fail_ok=True)
-                    # int(fetch("/").body)
-
-                    # Now kill them normally so they won't be restarted
-                    fetch("/?exit=0", fail_ok=True)
-                    # One process left; watch it's pid change
-                    pid = int(fetch("/").body)
-                    fetch("/?exit=4", fail_ok=True)
-                    pid2 = int(fetch("/").body)
-                    self.assertNotEqual(pid, pid2)
-
-                    # Kill the last one so we shut down cleanly
-                    fetch("/?exit=0", fail_ok=True)
-
-                    os._exit(0)
-            except Exception:
-                logging.error("exception in child process %d", id, exc_info=True)
-                raise
-
-
-@skipIfNonUnix
-class SubprocessTest(AsyncTestCase):
-    def test_subprocess(self):
-        if IOLoop.configured_class().__name__.endswith('LayeredTwistedIOLoop'):
-            # This test fails non-deterministically with LayeredTwistedIOLoop.
-            # (the read_until('\n') returns '\n' instead of 'hello\n')
-            # This probably indicates a problem with either TornadoReactor
-            # or TwistedIOLoop, but I haven't been able to track it down
-            # and for now this is just causing spurious travis-ci failures.
-            raise unittest.SkipTest("Subprocess tests not compatible with "
-                                    "LayeredTwistedIOLoop")
-        subproc = Subprocess([sys.executable, '-u', '-i'],
-                             stdin=Subprocess.STREAM,
-                             stdout=Subprocess.STREAM, stderr=subprocess.STDOUT,
-                             io_loop=self.io_loop)
-        self.addCleanup(lambda: (subproc.proc.terminate(), subproc.proc.wait()))
-        subproc.stdout.read_until(b'>>> ', self.stop)
-        self.wait()
-        subproc.stdin.write(b"print('hello')\n")
-        subproc.stdout.read_until(b'\n', self.stop)
-        data = self.wait()
-        self.assertEqual(data, b"hello\n")
-
-        subproc.stdout.read_until(b">>> ", self.stop)
-        self.wait()
-        subproc.stdin.write(b"raise SystemExit\n")
-        subproc.stdout.read_until_close(self.stop)
-        data = self.wait()
-        self.assertEqual(data, b"")
-
-    def test_close_stdin(self):
-        # Close the parent's stdin handle and see that the child recognizes it.
-        subproc = Subprocess([sys.executable, '-u', '-i'],
-                             stdin=Subprocess.STREAM,
-                             stdout=Subprocess.STREAM, stderr=subprocess.STDOUT,
-                             io_loop=self.io_loop)
-        self.addCleanup(lambda: (subproc.proc.terminate(), subproc.proc.wait()))
-        subproc.stdout.read_until(b'>>> ', self.stop)
-        self.wait()
-        subproc.stdin.close()
-        subproc.stdout.read_until_close(self.stop)
-        data = self.wait()
-        self.assertEqual(data, b"\n")
-
-    def test_stderr(self):
-        # This test is mysteriously flaky on twisted: it succeeds, but logs
-        # an error of EBADF on closing a file descriptor.
-        skip_if_twisted()
-        subproc = Subprocess([sys.executable, '-u', '-c',
-                              r"import sys; sys.stderr.write('hello\n')"],
-                             stderr=Subprocess.STREAM,
-                             io_loop=self.io_loop)
-        self.addCleanup(lambda: (subproc.proc.terminate(), subproc.proc.wait()))
-        subproc.stderr.read_until(b'\n', self.stop)
-        data = self.wait()
-        self.assertEqual(data, b'hello\n')
-
-    def test_sigchild(self):
-        # Twisted's SIGCHLD handler and Subprocess's conflict with each other.
-        skip_if_twisted()
-        Subprocess.initialize(io_loop=self.io_loop)
-        self.addCleanup(Subprocess.uninitialize)
-        subproc = Subprocess([sys.executable, '-c', 'pass'],
-                             io_loop=self.io_loop)
-        subproc.set_exit_callback(self.stop)
-        ret = self.wait()
-        self.assertEqual(ret, 0)
-        self.assertEqual(subproc.returncode, ret)
-
-    @gen_test
-    def test_sigchild_future(self):
-        skip_if_twisted()
-        Subprocess.initialize()
-        self.addCleanup(Subprocess.uninitialize)
-        subproc = Subprocess([sys.executable, '-c', 'pass'])
-        ret = yield subproc.wait_for_exit()
-        self.assertEqual(ret, 0)
-        self.assertEqual(subproc.returncode, ret)
-
-    def test_sigchild_signal(self):
-        skip_if_twisted()
-        Subprocess.initialize(io_loop=self.io_loop)
-        self.addCleanup(Subprocess.uninitialize)
-        subproc = Subprocess([sys.executable, '-c',
-                              'import time; time.sleep(30)'],
-                             stdout=Subprocess.STREAM,
-                             io_loop=self.io_loop)
-        subproc.set_exit_callback(self.stop)
-        os.kill(subproc.pid, signal.SIGTERM)
-        try:
-            ret = self.wait(timeout=1.0)
-        except AssertionError:
-            # We failed to get the termination signal. This test is
-            # occasionally flaky on pypy, so try to get a little more
-            # information: did the process close its stdout
-            # (indicating that the problem is in the parent process's
-            # signal handling) or did the child process somehow fail
-            # to terminate?
-            subproc.stdout.read_until_close(callback=self.stop)
-            try:
-                self.wait(timeout=1.0)
-            except AssertionError:
-                raise AssertionError("subprocess failed to terminate")
-            else:
-                raise AssertionError("subprocess closed stdout but failed to "
-                                     "get termination signal")
-        self.assertEqual(subproc.returncode, ret)
-        self.assertEqual(ret, -signal.SIGTERM)
-
-    @gen_test
-    def test_wait_for_exit_raise(self):
-        skip_if_twisted()
-        Subprocess.initialize()
-        self.addCleanup(Subprocess.uninitialize)
-        subproc = Subprocess([sys.executable, '-c', 'import sys; sys.exit(1)'])
-        with self.assertRaises(subprocess.CalledProcessError) as cm:
-            yield subproc.wait_for_exit()
-        self.assertEqual(cm.exception.returncode, 1)
-
-    @gen_test
-    def test_wait_for_exit_raise_disabled(self):
-        skip_if_twisted()
-        Subprocess.initialize()
-        self.addCleanup(Subprocess.uninitialize)
-        subproc = Subprocess([sys.executable, '-c', 'import sys; sys.exit(1)'])
-        ret = yield subproc.wait_for_exit(raise_error=False)
-        self.assertEqual(ret, 1)
diff --git a/lib/tornado/test/queues_test.py b/lib/tornado/test/queues_test.py
deleted file mode 100644
index 48ed5e20617462ee76e0ed5c940757cf5a34faaa..0000000000000000000000000000000000000000
--- a/lib/tornado/test/queues_test.py
+++ /dev/null
@@ -1,423 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-
-from __future__ import absolute_import, division, print_function
-from datetime import timedelta
-from random import random
-
-from tornado import gen, queues
-from tornado.gen import TimeoutError
-from tornado.testing import gen_test, AsyncTestCase
-from tornado.test.util import unittest, skipBefore35, exec_test
-
-
-class QueueBasicTest(AsyncTestCase):
-    def test_repr_and_str(self):
-        q = queues.Queue(maxsize=1)
-        self.assertIn(hex(id(q)), repr(q))
-        self.assertNotIn(hex(id(q)), str(q))
-        q.get()
-
-        for q_str in repr(q), str(q):
-            self.assertTrue(q_str.startswith('<Queue'))
-            self.assertIn('maxsize=1', q_str)
-            self.assertIn('getters[1]', q_str)
-            self.assertNotIn('putters', q_str)
-            self.assertNotIn('tasks', q_str)
-
-        q.put(None)
-        q.put(None)
-        # Now the queue is full, this putter blocks.
-        q.put(None)
-
-        for q_str in repr(q), str(q):
-            self.assertNotIn('getters', q_str)
-            self.assertIn('putters[1]', q_str)
-            self.assertIn('tasks=2', q_str)
-
-    def test_order(self):
-        q = queues.Queue()
-        for i in [1, 3, 2]:
-            q.put_nowait(i)
-
-        items = [q.get_nowait() for _ in range(3)]
-        self.assertEqual([1, 3, 2], items)
-
-    @gen_test
-    def test_maxsize(self):
-        self.assertRaises(TypeError, queues.Queue, maxsize=None)
-        self.assertRaises(ValueError, queues.Queue, maxsize=-1)
-
-        q = queues.Queue(maxsize=2)
-        self.assertTrue(q.empty())
-        self.assertFalse(q.full())
-        self.assertEqual(2, q.maxsize)
-        self.assertTrue(q.put(0).done())
-        self.assertTrue(q.put(1).done())
-        self.assertFalse(q.empty())
-        self.assertTrue(q.full())
-        put2 = q.put(2)
-        self.assertFalse(put2.done())
-        self.assertEqual(0, (yield q.get()))  # Make room.
-        self.assertTrue(put2.done())
-        self.assertFalse(q.empty())
-        self.assertTrue(q.full())
-
-
-class QueueGetTest(AsyncTestCase):
-    @gen_test
-    def test_blocking_get(self):
-        q = queues.Queue()
-        q.put_nowait(0)
-        self.assertEqual(0, (yield q.get()))
-
-    def test_nonblocking_get(self):
-        q = queues.Queue()
-        q.put_nowait(0)
-        self.assertEqual(0, q.get_nowait())
-
-    def test_nonblocking_get_exception(self):
-        q = queues.Queue()
-        self.assertRaises(queues.QueueEmpty, q.get_nowait)
-
-    @gen_test
-    def test_get_with_putters(self):
-        q = queues.Queue(1)
-        q.put_nowait(0)
-        put = q.put(1)
-        self.assertEqual(0, (yield q.get()))
-        self.assertIsNone((yield put))
-
-    @gen_test
-    def test_blocking_get_wait(self):
-        q = queues.Queue()
-        q.put(0)
-        self.io_loop.call_later(0.01, q.put, 1)
-        self.io_loop.call_later(0.02, q.put, 2)
-        self.assertEqual(0, (yield q.get(timeout=timedelta(seconds=1))))
-        self.assertEqual(1, (yield q.get(timeout=timedelta(seconds=1))))
-
-    @gen_test
-    def test_get_timeout(self):
-        q = queues.Queue()
-        get_timeout = q.get(timeout=timedelta(seconds=0.01))
-        get = q.get()
-        with self.assertRaises(TimeoutError):
-            yield get_timeout
-
-        q.put_nowait(0)
-        self.assertEqual(0, (yield get))
-
-    @gen_test
-    def test_get_timeout_preempted(self):
-        q = queues.Queue()
-        get = q.get(timeout=timedelta(seconds=0.01))
-        q.put(0)
-        yield gen.sleep(0.02)
-        self.assertEqual(0, (yield get))
-
-    @gen_test
-    def test_get_clears_timed_out_putters(self):
-        q = queues.Queue(1)
-        # First putter succeeds, remainder block.
-        putters = [q.put(i, timedelta(seconds=0.01)) for i in range(10)]
-        put = q.put(10)
-        self.assertEqual(10, len(q._putters))
-        yield gen.sleep(0.02)
-        self.assertEqual(10, len(q._putters))
-        self.assertFalse(put.done())  # Final waiter is still active.
-        q.put(11)
-        self.assertEqual(0, (yield q.get()))  # get() clears the waiters.
-        self.assertEqual(1, len(q._putters))
-        for putter in putters[1:]:
-            self.assertRaises(TimeoutError, putter.result)
-
-    @gen_test
-    def test_get_clears_timed_out_getters(self):
-        q = queues.Queue()
-        getters = [q.get(timedelta(seconds=0.01)) for _ in range(10)]
-        get = q.get()
-        self.assertEqual(11, len(q._getters))
-        yield gen.sleep(0.02)
-        self.assertEqual(11, len(q._getters))
-        self.assertFalse(get.done())  # Final waiter is still active.
-        q.get()  # get() clears the waiters.
-        self.assertEqual(2, len(q._getters))
-        for getter in getters:
-            self.assertRaises(TimeoutError, getter.result)
-
-    @skipBefore35
-    @gen_test
-    def test_async_for(self):
-        q = queues.Queue()
-        for i in range(5):
-            q.put(i)
-
-        namespace = exec_test(globals(), locals(), """
-        async def f():
-            results = []
-            async for i in q:
-                results.append(i)
-                if i == 4:
-                    return results
-        """)
-        results = yield namespace['f']()
-        self.assertEqual(results, list(range(5)))
-
-
-class QueuePutTest(AsyncTestCase):
-    @gen_test
-    def test_blocking_put(self):
-        q = queues.Queue()
-        q.put(0)
-        self.assertEqual(0, q.get_nowait())
-
-    def test_nonblocking_put_exception(self):
-        q = queues.Queue(1)
-        q.put(0)
-        self.assertRaises(queues.QueueFull, q.put_nowait, 1)
-
-    @gen_test
-    def test_put_with_getters(self):
-        q = queues.Queue()
-        get0 = q.get()
-        get1 = q.get()
-        yield q.put(0)
-        self.assertEqual(0, (yield get0))
-        yield q.put(1)
-        self.assertEqual(1, (yield get1))
-
-    @gen_test
-    def test_nonblocking_put_with_getters(self):
-        q = queues.Queue()
-        get0 = q.get()
-        get1 = q.get()
-        q.put_nowait(0)
-        # put_nowait does *not* immediately unblock getters.
-        yield gen.moment
-        self.assertEqual(0, (yield get0))
-        q.put_nowait(1)
-        yield gen.moment
-        self.assertEqual(1, (yield get1))
-
-    @gen_test
-    def test_blocking_put_wait(self):
-        q = queues.Queue(1)
-        q.put_nowait(0)
-        self.io_loop.call_later(0.01, q.get)
-        self.io_loop.call_later(0.02, q.get)
-        futures = [q.put(0), q.put(1)]
-        self.assertFalse(any(f.done() for f in futures))
-        yield futures
-
-    @gen_test
-    def test_put_timeout(self):
-        q = queues.Queue(1)
-        q.put_nowait(0)  # Now it's full.
-        put_timeout = q.put(1, timeout=timedelta(seconds=0.01))
-        put = q.put(2)
-        with self.assertRaises(TimeoutError):
-            yield put_timeout
-
-        self.assertEqual(0, q.get_nowait())
-        # 1 was never put in the queue.
-        self.assertEqual(2, (yield q.get()))
-
-        # Final get() unblocked this putter.
-        yield put
-
-    @gen_test
-    def test_put_timeout_preempted(self):
-        q = queues.Queue(1)
-        q.put_nowait(0)
-        put = q.put(1, timeout=timedelta(seconds=0.01))
-        q.get()
-        yield gen.sleep(0.02)
-        yield put  # No TimeoutError.
-
-    @gen_test
-    def test_put_clears_timed_out_putters(self):
-        q = queues.Queue(1)
-        # First putter succeeds, remainder block.
-        putters = [q.put(i, timedelta(seconds=0.01)) for i in range(10)]
-        put = q.put(10)
-        self.assertEqual(10, len(q._putters))
-        yield gen.sleep(0.02)
-        self.assertEqual(10, len(q._putters))
-        self.assertFalse(put.done())  # Final waiter is still active.
-        q.put(11)  # put() clears the waiters.
-        self.assertEqual(2, len(q._putters))
-        for putter in putters[1:]:
-            self.assertRaises(TimeoutError, putter.result)
-
-    @gen_test
-    def test_put_clears_timed_out_getters(self):
-        q = queues.Queue()
-        getters = [q.get(timedelta(seconds=0.01)) for _ in range(10)]
-        get = q.get()
-        q.get()
-        self.assertEqual(12, len(q._getters))
-        yield gen.sleep(0.02)
-        self.assertEqual(12, len(q._getters))
-        self.assertFalse(get.done())  # Final waiters still active.
-        q.put(0)  # put() clears the waiters.
-        self.assertEqual(1, len(q._getters))
-        self.assertEqual(0, (yield get))
-        for getter in getters:
-            self.assertRaises(TimeoutError, getter.result)
-
-    @gen_test
-    def test_float_maxsize(self):
-        # Non-int maxsize must round down: http://bugs.python.org/issue21723
-        q = queues.Queue(maxsize=1.3)
-        self.assertTrue(q.empty())
-        self.assertFalse(q.full())
-        q.put_nowait(0)
-        q.put_nowait(1)
-        self.assertFalse(q.empty())
-        self.assertTrue(q.full())
-        self.assertRaises(queues.QueueFull, q.put_nowait, 2)
-        self.assertEqual(0, q.get_nowait())
-        self.assertFalse(q.empty())
-        self.assertFalse(q.full())
-
-        yield q.put(2)
-        put = q.put(3)
-        self.assertFalse(put.done())
-        self.assertEqual(1, (yield q.get()))
-        yield put
-        self.assertTrue(q.full())
-
-
-class QueueJoinTest(AsyncTestCase):
-    queue_class = queues.Queue
-
-    def test_task_done_underflow(self):
-        q = self.queue_class()
-        self.assertRaises(ValueError, q.task_done)
-
-    @gen_test
-    def test_task_done(self):
-        q = self.queue_class()
-        for i in range(100):
-            q.put_nowait(i)
-
-        self.accumulator = 0
-
-        @gen.coroutine
-        def worker():
-            while True:
-                item = yield q.get()
-                self.accumulator += item
-                q.task_done()
-                yield gen.sleep(random() * 0.01)
-
-        # Two coroutines share work.
-        worker()
-        worker()
-        yield q.join()
-        self.assertEqual(sum(range(100)), self.accumulator)
-
-    @gen_test
-    def test_task_done_delay(self):
-        # Verify it is task_done(), not get(), that unblocks join().
-        q = self.queue_class()
-        q.put_nowait(0)
-        join = q.join()
-        self.assertFalse(join.done())
-        yield q.get()
-        self.assertFalse(join.done())
-        yield gen.moment
-        self.assertFalse(join.done())
-        q.task_done()
-        self.assertTrue(join.done())
-
-    @gen_test
-    def test_join_empty_queue(self):
-        q = self.queue_class()
-        yield q.join()
-        yield q.join()
-
-    @gen_test
-    def test_join_timeout(self):
-        q = self.queue_class()
-        q.put(0)
-        with self.assertRaises(TimeoutError):
-            yield q.join(timeout=timedelta(seconds=0.01))
-
-
-class PriorityQueueJoinTest(QueueJoinTest):
-    queue_class = queues.PriorityQueue
-
-    @gen_test
-    def test_order(self):
-        q = self.queue_class(maxsize=2)
-        q.put_nowait((1, 'a'))
-        q.put_nowait((0, 'b'))
-        self.assertTrue(q.full())
-        q.put((3, 'c'))
-        q.put((2, 'd'))
-        self.assertEqual((0, 'b'), q.get_nowait())
-        self.assertEqual((1, 'a'), (yield q.get()))
-        self.assertEqual((2, 'd'), q.get_nowait())
-        self.assertEqual((3, 'c'), (yield q.get()))
-        self.assertTrue(q.empty())
-
-
-class LifoQueueJoinTest(QueueJoinTest):
-    queue_class = queues.LifoQueue
-
-    @gen_test
-    def test_order(self):
-        q = self.queue_class(maxsize=2)
-        q.put_nowait(1)
-        q.put_nowait(0)
-        self.assertTrue(q.full())
-        q.put(3)
-        q.put(2)
-        self.assertEqual(3, q.get_nowait())
-        self.assertEqual(2, (yield q.get()))
-        self.assertEqual(0, q.get_nowait())
-        self.assertEqual(1, (yield q.get()))
-        self.assertTrue(q.empty())
-
-
-class ProducerConsumerTest(AsyncTestCase):
-    @gen_test
-    def test_producer_consumer(self):
-        q = queues.Queue(maxsize=3)
-        history = []
-
-        # We don't yield between get() and task_done(), so get() must wait for
-        # the next tick. Otherwise we'd immediately call task_done and unblock
-        # join() before q.put() resumes, and we'd only process the first four
-        # items.
-        @gen.coroutine
-        def consumer():
-            while True:
-                history.append((yield q.get()))
-                q.task_done()
-
-        @gen.coroutine
-        def producer():
-            for item in range(10):
-                yield q.put(item)
-
-        consumer()
-        yield producer()
-        yield q.join()
-        self.assertEqual(list(range(10)), history)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/lib/tornado/test/resolve_test_helper.py b/lib/tornado/test/resolve_test_helper.py
deleted file mode 100644
index 429671962f27c07721be163d614161340a773b79..0000000000000000000000000000000000000000
--- a/lib/tornado/test/resolve_test_helper.py
+++ /dev/null
@@ -1,11 +0,0 @@
-from __future__ import absolute_import, division, print_function
-from tornado.ioloop import IOLoop
-from tornado.netutil import ThreadedResolver
-
-# When this module is imported, it runs getaddrinfo on a thread. Since
-# the hostname is unicode, getaddrinfo attempts to import encodings.idna
-# but blocks on the import lock. Verify that ThreadedResolver avoids
-# this deadlock.
-
-resolver = ThreadedResolver()
-IOLoop.current().run_sync(lambda: resolver.resolve(u'localhost', 80))
diff --git a/lib/tornado/test/routing_test.py b/lib/tornado/test/routing_test.py
deleted file mode 100644
index a1040df32b1572a9a8e9ca013ab16279941e160f..0000000000000000000000000000000000000000
--- a/lib/tornado/test/routing_test.py
+++ /dev/null
@@ -1,224 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-
-from __future__ import absolute_import, division, print_function
-
-from tornado.httputil import HTTPHeaders, HTTPMessageDelegate, HTTPServerConnectionDelegate, ResponseStartLine
-from tornado.routing import HostMatches, PathMatches, ReversibleRouter, Router, Rule, RuleRouter
-from tornado.testing import AsyncHTTPTestCase
-from tornado.web import Application, HTTPError, RequestHandler
-from tornado.wsgi import WSGIContainer
-
-
-class BasicRouter(Router):
-    def find_handler(self, request, **kwargs):
-
-        class MessageDelegate(HTTPMessageDelegate):
-            def __init__(self, connection):
-                self.connection = connection
-
-            def finish(self):
-                self.connection.write_headers(
-                    ResponseStartLine("HTTP/1.1", 200, "OK"), HTTPHeaders({"Content-Length": "2"}), b"OK"
-                )
-                self.connection.finish()
-
-        return MessageDelegate(request.connection)
-
-
-class BasicRouterTestCase(AsyncHTTPTestCase):
-    def get_app(self):
-        return BasicRouter()
-
-    def test_basic_router(self):
-        response = self.fetch("/any_request")
-        self.assertEqual(response.body, b"OK")
-
-
-resources = {}
-
-
-class GetResource(RequestHandler):
-    def get(self, path):
-        if path not in resources:
-            raise HTTPError(404)
-
-        self.finish(resources[path])
-
-
-class PostResource(RequestHandler):
-    def post(self, path):
-        resources[path] = self.request.body
-
-
-class HTTPMethodRouter(Router):
-    def __init__(self, app):
-        self.app = app
-
-    def find_handler(self, request, **kwargs):
-        handler = GetResource if request.method == "GET" else PostResource
-        return self.app.get_handler_delegate(request, handler, path_args=[request.path])
-
-
-class HTTPMethodRouterTestCase(AsyncHTTPTestCase):
-    def get_app(self):
-        return HTTPMethodRouter(Application())
-
-    def test_http_method_router(self):
-        response = self.fetch("/post_resource", method="POST", body="data")
-        self.assertEqual(response.code, 200)
-
-        response = self.fetch("/get_resource")
-        self.assertEqual(response.code, 404)
-
-        response = self.fetch("/post_resource")
-        self.assertEqual(response.code, 200)
-        self.assertEqual(response.body, b"data")
-
-
-def _get_named_handler(handler_name):
-    class Handler(RequestHandler):
-        def get(self, *args, **kwargs):
-            if self.application.settings.get("app_name") is not None:
-                self.write(self.application.settings["app_name"] + ": ")
-
-            self.finish(handler_name + ": " + self.reverse_url(handler_name))
-
-    return Handler
-
-
-FirstHandler = _get_named_handler("first_handler")
-SecondHandler = _get_named_handler("second_handler")
-
-
-class CustomRouter(ReversibleRouter):
-    def __init__(self):
-        super(CustomRouter, self).__init__()
-        self.routes = {}
-
-    def add_routes(self, routes):
-        self.routes.update(routes)
-
-    def find_handler(self, request, **kwargs):
-        if request.path in self.routes:
-            app, handler = self.routes[request.path]
-            return app.get_handler_delegate(request, handler)
-
-    def reverse_url(self, name, *args):
-        handler_path = '/' + name
-        return handler_path if handler_path in self.routes else None
-
-
-class CustomRouterTestCase(AsyncHTTPTestCase):
-    def get_app(self):
-        class CustomApplication(Application):
-            def reverse_url(self, name, *args):
-                return router.reverse_url(name, *args)
-
-        router = CustomRouter()
-        app1 = CustomApplication(app_name="app1")
-        app2 = CustomApplication(app_name="app2")
-
-        router.add_routes({
-            "/first_handler": (app1, FirstHandler),
-            "/second_handler": (app2, SecondHandler),
-            "/first_handler_second_app": (app2, FirstHandler),
-        })
-
-        return router
-
-    def test_custom_router(self):
-        response = self.fetch("/first_handler")
-        self.assertEqual(response.body, b"app1: first_handler: /first_handler")
-        response = self.fetch("/second_handler")
-        self.assertEqual(response.body, b"app2: second_handler: /second_handler")
-        response = self.fetch("/first_handler_second_app")
-        self.assertEqual(response.body, b"app2: first_handler: /first_handler")
-
-
-class ConnectionDelegate(HTTPServerConnectionDelegate):
-    def start_request(self, server_conn, request_conn):
-
-        class MessageDelegate(HTTPMessageDelegate):
-            def __init__(self, connection):
-                self.connection = connection
-
-            def finish(self):
-                response_body = b"OK"
-                self.connection.write_headers(
-                    ResponseStartLine("HTTP/1.1", 200, "OK"),
-                    HTTPHeaders({"Content-Length": str(len(response_body))}))
-                self.connection.write(response_body)
-                self.connection.finish()
-
-        return MessageDelegate(request_conn)
-
-
-class RuleRouterTest(AsyncHTTPTestCase):
-    def get_app(self):
-        app = Application()
-
-        def request_callable(request):
-            request.write(b"HTTP/1.1 200 OK\r\nContent-Length: 2\r\n\r\nOK")
-            request.finish()
-
-        app.add_handlers(".*", [
-            (HostMatches("www.example.com"), [
-                (PathMatches("/first_handler"), "tornado.test.routing_test.SecondHandler", {}, "second_handler")
-            ]),
-            Rule(PathMatches("/first_handler"), FirstHandler, name="first_handler"),
-            Rule(PathMatches("/request_callable"), request_callable),
-            ("/connection_delegate", ConnectionDelegate())
-        ])
-
-        return app
-
-    def test_rule_based_router(self):
-        response = self.fetch("/first_handler")
-        self.assertEqual(response.body, b"first_handler: /first_handler")
-        response = self.fetch("/first_handler", headers={'Host': 'www.example.com'})
-        self.assertEqual(response.body, b"second_handler: /first_handler")
-
-        response = self.fetch("/connection_delegate")
-        self.assertEqual(response.body, b"OK")
-
-        response = self.fetch("/request_callable")
-        self.assertEqual(response.body, b"OK")
-
-        response = self.fetch("/404")
-        self.assertEqual(response.code, 404)
-
-
-class WSGIContainerTestCase(AsyncHTTPTestCase):
-    def get_app(self):
-        wsgi_app = WSGIContainer(self.wsgi_app)
-
-        class Handler(RequestHandler):
-            def get(self, *args, **kwargs):
-                self.finish(self.reverse_url("tornado"))
-
-        return RuleRouter([
-            (PathMatches("/tornado.*"), Application([(r"/tornado/test", Handler, {}, "tornado")])),
-            (PathMatches("/wsgi"), wsgi_app),
-        ])
-
-    def wsgi_app(self, environ, start_response):
-        start_response("200 OK", [])
-        return [b"WSGI"]
-
-    def test_wsgi_container(self):
-        response = self.fetch("/tornado/test")
-        self.assertEqual(response.body, b"/tornado/test")
-
-        response = self.fetch("/wsgi")
-        self.assertEqual(response.body, b"WSGI")
diff --git a/lib/tornado/test/runtests.py b/lib/tornado/test/runtests.py
deleted file mode 100644
index b81c5f225ebdbcbbe6aa888c75611c663880c533..0000000000000000000000000000000000000000
--- a/lib/tornado/test/runtests.py
+++ /dev/null
@@ -1,190 +0,0 @@
-#!/usr/bin/env python
-
-from __future__ import absolute_import, division, print_function
-import gc
-import locale  # system locale module, not tornado.locale
-import logging
-import operator
-import textwrap
-import sys
-from tornado.httpclient import AsyncHTTPClient
-from tornado.httpserver import HTTPServer
-from tornado.ioloop import IOLoop
-from tornado.netutil import Resolver
-from tornado.options import define, options, add_parse_callback
-from tornado.test.util import unittest
-
-try:
-    reduce  # py2
-except NameError:
-    from functools import reduce  # py3
-
-TEST_MODULES = [
-    'tornado.httputil.doctests',
-    'tornado.iostream.doctests',
-    'tornado.util.doctests',
-    'tornado.test.asyncio_test',
-    'tornado.test.auth_test',
-    'tornado.test.concurrent_test',
-    'tornado.test.curl_httpclient_test',
-    'tornado.test.escape_test',
-    'tornado.test.gen_test',
-    'tornado.test.http1connection_test',
-    'tornado.test.httpclient_test',
-    'tornado.test.httpserver_test',
-    'tornado.test.httputil_test',
-    'tornado.test.import_test',
-    'tornado.test.ioloop_test',
-    'tornado.test.iostream_test',
-    'tornado.test.locale_test',
-    'tornado.test.locks_test',
-    'tornado.test.netutil_test',
-    'tornado.test.log_test',
-    'tornado.test.options_test',
-    'tornado.test.process_test',
-    'tornado.test.queues_test',
-    'tornado.test.routing_test',
-    'tornado.test.simple_httpclient_test',
-    'tornado.test.stack_context_test',
-    'tornado.test.tcpclient_test',
-    'tornado.test.tcpserver_test',
-    'tornado.test.template_test',
-    'tornado.test.testing_test',
-    'tornado.test.twisted_test',
-    'tornado.test.util_test',
-    'tornado.test.web_test',
-    'tornado.test.websocket_test',
-    'tornado.test.windows_test',
-    'tornado.test.wsgi_test',
-]
-
-
-def all():
-    return unittest.defaultTestLoader.loadTestsFromNames(TEST_MODULES)
-
-
-class TornadoTextTestRunner(unittest.TextTestRunner):
-    def run(self, test):
-        result = super(TornadoTextTestRunner, self).run(test)
-        if result.skipped:
-            skip_reasons = set(reason for (test, reason) in result.skipped)
-            self.stream.write(textwrap.fill(
-                "Some tests were skipped because: %s" %
-                ", ".join(sorted(skip_reasons))))
-            self.stream.write("\n")
-        return result
-
-
-class LogCounter(logging.Filter):
-    """Counts the number of WARNING or higher log records."""
-    def __init__(self, *args, **kwargs):
-        # Can't use super() because logging.Filter is an old-style class in py26
-        logging.Filter.__init__(self, *args, **kwargs)
-        self.warning_count = self.error_count = 0
-
-    def filter(self, record):
-        if record.levelno >= logging.ERROR:
-            self.error_count += 1
-        elif record.levelno >= logging.WARNING:
-            self.warning_count += 1
-        return True
-
-
-def main():
-    # The -W command-line option does not work in a virtualenv with
-    # python 3 (as of virtualenv 1.7), so configure warnings
-    # programmatically instead.
-    import warnings
-    # Be strict about most warnings.  This also turns on warnings that are
-    # ignored by default, including DeprecationWarnings and
-    # python 3.2's ResourceWarnings.
-    warnings.filterwarnings("error")
-    # setuptools sometimes gives ImportWarnings about things that are on
-    # sys.path even if they're not being used.
-    warnings.filterwarnings("ignore", category=ImportWarning)
-    # Tornado generally shouldn't use anything deprecated, but some of
-    # our dependencies do (last match wins).
-    warnings.filterwarnings("ignore", category=DeprecationWarning)
-    warnings.filterwarnings("error", category=DeprecationWarning,
-                            module=r"tornado\..*")
-    warnings.filterwarnings("ignore", category=PendingDeprecationWarning)
-    warnings.filterwarnings("error", category=PendingDeprecationWarning,
-                            module=r"tornado\..*")
-    # The unittest module is aggressive about deprecating redundant methods,
-    # leaving some without non-deprecated spellings that work on both
-    # 2.7 and 3.2
-    warnings.filterwarnings("ignore", category=DeprecationWarning,
-                            message="Please use assert.* instead")
-    # unittest2 0.6 on py26 reports these as PendingDeprecationWarnings
-    # instead of DeprecationWarnings.
-    warnings.filterwarnings("ignore", category=PendingDeprecationWarning,
-                            message="Please use assert.* instead")
-    # Twisted 15.0.0 triggers some warnings on py3 with -bb.
-    warnings.filterwarnings("ignore", category=BytesWarning,
-                            module=r"twisted\..*")
-    # The __aiter__ protocol changed in python 3.5.2.
-    # Silence the warning until we can drop 3.5.[01].
-    warnings.filterwarnings("ignore", category=PendingDeprecationWarning,
-                            message=".*legacy __aiter__ protocol")
-    # 3.5.2's PendingDeprecationWarning became a DeprecationWarning in 3.6.
-    warnings.filterwarnings("ignore", category=DeprecationWarning,
-                            message=".*legacy __aiter__ protocol")
-
-    logging.getLogger("tornado.access").setLevel(logging.CRITICAL)
-
-    define('httpclient', type=str, default=None,
-           callback=lambda s: AsyncHTTPClient.configure(
-               s, defaults=dict(allow_ipv6=False)))
-    define('httpserver', type=str, default=None,
-           callback=HTTPServer.configure)
-    define('ioloop', type=str, default=None)
-    define('ioloop_time_monotonic', default=False)
-    define('resolver', type=str, default=None,
-           callback=Resolver.configure)
-    define('debug_gc', type=str, multiple=True,
-           help="A comma-separated list of gc module debug constants, "
-           "e.g. DEBUG_STATS or DEBUG_COLLECTABLE,DEBUG_OBJECTS",
-           callback=lambda values: gc.set_debug(
-               reduce(operator.or_, (getattr(gc, v) for v in values))))
-    define('locale', type=str, default=None,
-           callback=lambda x: locale.setlocale(locale.LC_ALL, x))
-
-    def configure_ioloop():
-        kwargs = {}
-        if options.ioloop_time_monotonic:
-            from tornado.platform.auto import monotonic_time
-            if monotonic_time is None:
-                raise RuntimeError("monotonic clock not found")
-            kwargs['time_func'] = monotonic_time
-        if options.ioloop or kwargs:
-            IOLoop.configure(options.ioloop, **kwargs)
-    add_parse_callback(configure_ioloop)
-
-    log_counter = LogCounter()
-    add_parse_callback(
-        lambda: logging.getLogger().handlers[0].addFilter(log_counter))
-
-    import tornado.testing
-    kwargs = {}
-    if sys.version_info >= (3, 2):
-        # HACK:  unittest.main will make its own changes to the warning
-        # configuration, which may conflict with the settings above
-        # or command-line flags like -bb.  Passing warnings=False
-        # suppresses this behavior, although this looks like an implementation
-        # detail.  http://bugs.python.org/issue15626
-        kwargs['warnings'] = False
-    kwargs['testRunner'] = TornadoTextTestRunner
-    try:
-        tornado.testing.main(**kwargs)
-    finally:
-        # The tests should run clean; consider it a failure if they logged
-        # any warnings or errors. We'd like to ban info logs too, but
-        # we can't count them cleanly due to interactions with LogTrapTestCase.
-        if log_counter.warning_count > 0 or log_counter.error_count > 0:
-            logging.error("logged %d warnings and %d errors",
-                          log_counter.warning_count, log_counter.error_count)
-            sys.exit(1)
-
-
-if __name__ == '__main__':
-    main()
diff --git a/lib/tornado/test/simple_httpclient_test.py b/lib/tornado/test/simple_httpclient_test.py
deleted file mode 100644
index 02d57c5fb0d944409149ca7088bcc0061cd27a3b..0000000000000000000000000000000000000000
--- a/lib/tornado/test/simple_httpclient_test.py
+++ /dev/null
@@ -1,784 +0,0 @@
-from __future__ import absolute_import, division, print_function
-
-import collections
-from contextlib import closing
-import errno
-import gzip
-import logging
-import os
-import re
-import socket
-import ssl
-import sys
-
-from tornado.escape import to_unicode
-from tornado import gen
-from tornado.httpclient import AsyncHTTPClient
-from tornado.httputil import HTTPHeaders, ResponseStartLine
-from tornado.ioloop import IOLoop
-from tornado.log import gen_log
-from tornado.concurrent import Future
-from tornado.netutil import Resolver, bind_sockets
-from tornado.simple_httpclient import SimpleAsyncHTTPClient
-from tornado.test.httpclient_test import ChunkHandler, CountdownHandler, HelloWorldHandler, RedirectHandler
-from tornado.test import httpclient_test
-from tornado.testing import AsyncHTTPTestCase, AsyncHTTPSTestCase, AsyncTestCase, ExpectLog
-from tornado.test.util import skipOnTravis, skipIfNoIPv6, refusing_port, unittest, skipBefore35, exec_test
-from tornado.web import RequestHandler, Application, asynchronous, url, stream_request_body
-
-
-class SimpleHTTPClientCommonTestCase(httpclient_test.HTTPClientCommonTestCase):
-    def get_http_client(self):
-        client = SimpleAsyncHTTPClient(io_loop=self.io_loop,
-                                       force_instance=True)
-        self.assertTrue(isinstance(client, SimpleAsyncHTTPClient))
-        return client
-
-
-class TriggerHandler(RequestHandler):
-    def initialize(self, queue, wake_callback):
-        self.queue = queue
-        self.wake_callback = wake_callback
-
-    @asynchronous
-    def get(self):
-        logging.debug("queuing trigger")
-        self.queue.append(self.finish)
-        if self.get_argument("wake", "true") == "true":
-            self.wake_callback()
-
-
-class HangHandler(RequestHandler):
-    @asynchronous
-    def get(self):
-        pass
-
-
-class ContentLengthHandler(RequestHandler):
-    def get(self):
-        self.set_header("Content-Length", self.get_argument("value"))
-        self.write("ok")
-
-
-class HeadHandler(RequestHandler):
-    def head(self):
-        self.set_header("Content-Length", "7")
-
-
-class OptionsHandler(RequestHandler):
-    def options(self):
-        self.set_header("Access-Control-Allow-Origin", "*")
-        self.write("ok")
-
-
-class NoContentHandler(RequestHandler):
-    def get(self):
-        self.set_status(204)
-        self.finish()
-
-
-class SeeOtherPostHandler(RequestHandler):
-    def post(self):
-        redirect_code = int(self.request.body)
-        assert redirect_code in (302, 303), "unexpected body %r" % self.request.body
-        self.set_header("Location", "/see_other_get")
-        self.set_status(redirect_code)
-
-
-class SeeOtherGetHandler(RequestHandler):
-    def get(self):
-        if self.request.body:
-            raise Exception("unexpected body %r" % self.request.body)
-        self.write("ok")
-
-
-class HostEchoHandler(RequestHandler):
-    def get(self):
-        self.write(self.request.headers["Host"])
-
-
-class NoContentLengthHandler(RequestHandler):
-    @asynchronous
-    def get(self):
-        if self.request.version.startswith('HTTP/1'):
-            # Emulate the old HTTP/1.0 behavior of returning a body with no
-            # content-length.  Tornado handles content-length at the framework
-            # level so we have to go around it.
-            stream = self.request.connection.detach()
-            stream.write(b"HTTP/1.0 200 OK\r\n\r\n"
-                         b"hello")
-            stream.close()
-        else:
-            self.finish('HTTP/1 required')
-
-
-class EchoPostHandler(RequestHandler):
-    def post(self):
-        self.write(self.request.body)
-
-
-@stream_request_body
-class RespondInPrepareHandler(RequestHandler):
-    def prepare(self):
-        self.set_status(403)
-        self.finish("forbidden")
-
-
-class SimpleHTTPClientTestMixin(object):
-    def get_app(self):
-        # callable objects to finish pending /trigger requests
-        self.triggers = collections.deque()
-        return Application([
-            url("/trigger", TriggerHandler, dict(queue=self.triggers,
-                                                 wake_callback=self.stop)),
-            url("/chunk", ChunkHandler),
-            url("/countdown/([0-9]+)", CountdownHandler, name="countdown"),
-            url("/hang", HangHandler),
-            url("/hello", HelloWorldHandler),
-            url("/content_length", ContentLengthHandler),
-            url("/head", HeadHandler),
-            url("/options", OptionsHandler),
-            url("/no_content", NoContentHandler),
-            url("/see_other_post", SeeOtherPostHandler),
-            url("/see_other_get", SeeOtherGetHandler),
-            url("/host_echo", HostEchoHandler),
-            url("/no_content_length", NoContentLengthHandler),
-            url("/echo_post", EchoPostHandler),
-            url("/respond_in_prepare", RespondInPrepareHandler),
-            url("/redirect", RedirectHandler),
-        ], gzip=True)
-
-    def test_singleton(self):
-        # Class "constructor" reuses objects on the same IOLoop
-        self.assertTrue(SimpleAsyncHTTPClient(self.io_loop) is
-                        SimpleAsyncHTTPClient(self.io_loop))
-        # unless force_instance is used
-        self.assertTrue(SimpleAsyncHTTPClient(self.io_loop) is not
-                        SimpleAsyncHTTPClient(self.io_loop,
-                                              force_instance=True))
-        # different IOLoops use different objects
-        with closing(IOLoop()) as io_loop2:
-            self.assertTrue(SimpleAsyncHTTPClient(self.io_loop) is not
-                            SimpleAsyncHTTPClient(io_loop2))
-
-    def test_connection_limit(self):
-        with closing(self.create_client(max_clients=2)) as client:
-            self.assertEqual(client.max_clients, 2)
-            seen = []
-            # Send 4 requests.  Two can be sent immediately, while the others
-            # will be queued
-            for i in range(4):
-                client.fetch(self.get_url("/trigger"),
-                             lambda response, i=i: (seen.append(i), self.stop()))
-            self.wait(condition=lambda: len(self.triggers) == 2)
-            self.assertEqual(len(client.queue), 2)
-
-            # Finish the first two requests and let the next two through
-            self.triggers.popleft()()
-            self.triggers.popleft()()
-            self.wait(condition=lambda: (len(self.triggers) == 2 and
-                                         len(seen) == 2))
-            self.assertEqual(set(seen), set([0, 1]))
-            self.assertEqual(len(client.queue), 0)
-
-            # Finish all the pending requests
-            self.triggers.popleft()()
-            self.triggers.popleft()()
-            self.wait(condition=lambda: len(seen) == 4)
-            self.assertEqual(set(seen), set([0, 1, 2, 3]))
-            self.assertEqual(len(self.triggers), 0)
-
-    def test_redirect_connection_limit(self):
-        # following redirects should not consume additional connections
-        with closing(self.create_client(max_clients=1)) as client:
-            client.fetch(self.get_url('/countdown/3'), self.stop,
-                         max_redirects=3)
-            response = self.wait()
-            response.rethrow()
-
-    def test_gzip(self):
-        # All the tests in this file should be using gzip, but this test
-        # ensures that it is in fact getting compressed.
-        # Setting Accept-Encoding manually bypasses the client's
-        # decompression so we can see the raw data.
-        response = self.fetch("/chunk", use_gzip=False,
-                              headers={"Accept-Encoding": "gzip"})
-        self.assertEqual(response.headers["Content-Encoding"], "gzip")
-        self.assertNotEqual(response.body, b"asdfqwer")
-        # Our test data gets bigger when gzipped.  Oops.  :)
-        # Chunked encoding bypasses the MIN_LENGTH check.
-        self.assertEqual(len(response.body), 34)
-        f = gzip.GzipFile(mode="r", fileobj=response.buffer)
-        self.assertEqual(f.read(), b"asdfqwer")
-
-    def test_max_redirects(self):
-        response = self.fetch("/countdown/5", max_redirects=3)
-        self.assertEqual(302, response.code)
-        # We requested 5, followed three redirects for 4, 3, 2, then the last
-        # unfollowed redirect is to 1.
-        self.assertTrue(response.request.url.endswith("/countdown/5"))
-        self.assertTrue(response.effective_url.endswith("/countdown/2"))
-        self.assertTrue(response.headers["Location"].endswith("/countdown/1"))
-
-    def test_header_reuse(self):
-        # Apps may reuse a headers object if they are only passing in constant
-        # headers like user-agent.  The header object should not be modified.
-        headers = HTTPHeaders({'User-Agent': 'Foo'})
-        self.fetch("/hello", headers=headers)
-        self.assertEqual(list(headers.get_all()), [('User-Agent', 'Foo')])
-
-    def test_see_other_redirect(self):
-        for code in (302, 303):
-            response = self.fetch("/see_other_post", method="POST", body="%d" % code)
-            self.assertEqual(200, response.code)
-            self.assertTrue(response.request.url.endswith("/see_other_post"))
-            self.assertTrue(response.effective_url.endswith("/see_other_get"))
-            # request is the original request, is a POST still
-            self.assertEqual("POST", response.request.method)
-
-    @skipOnTravis
-    def test_connect_timeout(self):
-        timeout = 0.1
-        timeout_min, timeout_max = 0.099, 1.0
-
-        class TimeoutResolver(Resolver):
-            def resolve(self, *args, **kwargs):
-                return Future()  # never completes
-
-        with closing(self.create_client(resolver=TimeoutResolver())) as client:
-            client.fetch(self.get_url('/hello'), self.stop,
-                         connect_timeout=timeout)
-            response = self.wait()
-            self.assertEqual(response.code, 599)
-            self.assertTrue(timeout_min < response.request_time < timeout_max,
-                            response.request_time)
-            self.assertEqual(str(response.error), "HTTP 599: Timeout while connecting")
-
-    @skipOnTravis
-    def test_request_timeout(self):
-        timeout = 0.1
-        timeout_min, timeout_max = 0.099, 0.15
-        if os.name == 'nt':
-            timeout = 0.5
-            timeout_min, timeout_max = 0.4, 0.6
-
-        response = self.fetch('/trigger?wake=false', request_timeout=timeout)
-        self.assertEqual(response.code, 599)
-        self.assertTrue(timeout_min < response.request_time < timeout_max,
-                        response.request_time)
-        self.assertEqual(str(response.error), "HTTP 599: Timeout during request")
-        # trigger the hanging request to let it clean up after itself
-        self.triggers.popleft()()
-
-    @skipIfNoIPv6
-    def test_ipv6(self):
-        try:
-            [sock] = bind_sockets(None, '::1', family=socket.AF_INET6)
-            port = sock.getsockname()[1]
-            self.http_server.add_socket(sock)
-        except socket.gaierror as e:
-            if e.args[0] == socket.EAI_ADDRFAMILY:
-                # python supports ipv6, but it's not configured on the network
-                # interface, so skip this test.
-                return
-            raise
-        url = '%s://[::1]:%d/hello' % (self.get_protocol(), port)
-
-        # ipv6 is currently enabled by default but can be disabled
-        self.http_client.fetch(url, self.stop, allow_ipv6=False)
-        response = self.wait()
-        self.assertEqual(response.code, 599)
-
-        self.http_client.fetch(url, self.stop)
-        response = self.wait()
-        self.assertEqual(response.body, b"Hello world!")
-
-    def xtest_multiple_content_length_accepted(self):
-        response = self.fetch("/content_length?value=2,2")
-        self.assertEqual(response.body, b"ok")
-        response = self.fetch("/content_length?value=2,%202,2")
-        self.assertEqual(response.body, b"ok")
-
-        response = self.fetch("/content_length?value=2,4")
-        self.assertEqual(response.code, 599)
-        response = self.fetch("/content_length?value=2,%202,3")
-        self.assertEqual(response.code, 599)
-
-    def test_head_request(self):
-        response = self.fetch("/head", method="HEAD")
-        self.assertEqual(response.code, 200)
-        self.assertEqual(response.headers["content-length"], "7")
-        self.assertFalse(response.body)
-
-    def test_options_request(self):
-        response = self.fetch("/options", method="OPTIONS")
-        self.assertEqual(response.code, 200)
-        self.assertEqual(response.headers["content-length"], "2")
-        self.assertEqual(response.headers["access-control-allow-origin"], "*")
-        self.assertEqual(response.body, b"ok")
-
-    def test_no_content(self):
-        response = self.fetch("/no_content")
-        self.assertEqual(response.code, 204)
-        # 204 status shouldn't have a content-length
-        #
-        # Tests with a content-length header are included below
-        # in HTTP204NoContentTestCase.
-        self.assertNotIn("Content-Length", response.headers)
-
-    def test_host_header(self):
-        host_re = re.compile(b"^localhost:[0-9]+$")
-        response = self.fetch("/host_echo")
-        self.assertTrue(host_re.match(response.body))
-
-        url = self.get_url("/host_echo").replace("http://", "http://me:secret@")
-        self.http_client.fetch(url, self.stop)
-        response = self.wait()
-        self.assertTrue(host_re.match(response.body), response.body)
-
-    def test_connection_refused(self):
-        cleanup_func, port = refusing_port()
-        self.addCleanup(cleanup_func)
-        with ExpectLog(gen_log, ".*", required=False):
-            self.http_client.fetch("http://127.0.0.1:%d/" % port, self.stop)
-            response = self.wait()
-        self.assertEqual(599, response.code)
-
-        if sys.platform != 'cygwin':
-            # cygwin returns EPERM instead of ECONNREFUSED here
-            contains_errno = str(errno.ECONNREFUSED) in str(response.error)
-            if not contains_errno and hasattr(errno, "WSAECONNREFUSED"):
-                contains_errno = str(errno.WSAECONNREFUSED) in str(response.error)
-            self.assertTrue(contains_errno, response.error)
-            # This is usually "Connection refused".
-            # On windows, strerror is broken and returns "Unknown error".
-            expected_message = os.strerror(errno.ECONNREFUSED)
-            self.assertTrue(expected_message in str(response.error),
-                            response.error)
-
-    def test_queue_timeout(self):
-        with closing(self.create_client(max_clients=1)) as client:
-            client.fetch(self.get_url('/trigger'), self.stop,
-                         request_timeout=10)
-            # Wait for the trigger request to block, not complete.
-            self.wait()
-            client.fetch(self.get_url('/hello'), self.stop,
-                         connect_timeout=0.1)
-            response = self.wait()
-
-            self.assertEqual(response.code, 599)
-            self.assertTrue(response.request_time < 1, response.request_time)
-            self.assertEqual(str(response.error), "HTTP 599: Timeout in request queue")
-            self.triggers.popleft()()
-            self.wait()
-
-    def test_no_content_length(self):
-        response = self.fetch("/no_content_length")
-        if response.body == b"HTTP/1 required":
-            self.skipTest("requires HTTP/1.x")
-        else:
-            self.assertEquals(b"hello", response.body)
-
-    def sync_body_producer(self, write):
-        write(b'1234')
-        write(b'5678')
-
-    @gen.coroutine
-    def async_body_producer(self, write):
-        yield write(b'1234')
-        yield gen.Task(IOLoop.current().add_callback)
-        yield write(b'5678')
-
-    def test_sync_body_producer_chunked(self):
-        response = self.fetch("/echo_post", method="POST",
-                              body_producer=self.sync_body_producer)
-        response.rethrow()
-        self.assertEqual(response.body, b"12345678")
-
-    def test_sync_body_producer_content_length(self):
-        response = self.fetch("/echo_post", method="POST",
-                              body_producer=self.sync_body_producer,
-                              headers={'Content-Length': '8'})
-        response.rethrow()
-        self.assertEqual(response.body, b"12345678")
-
-    def test_async_body_producer_chunked(self):
-        response = self.fetch("/echo_post", method="POST",
-                              body_producer=self.async_body_producer)
-        response.rethrow()
-        self.assertEqual(response.body, b"12345678")
-
-    def test_async_body_producer_content_length(self):
-        response = self.fetch("/echo_post", method="POST",
-                              body_producer=self.async_body_producer,
-                              headers={'Content-Length': '8'})
-        response.rethrow()
-        self.assertEqual(response.body, b"12345678")
-
-    @skipBefore35
-    def test_native_body_producer_chunked(self):
-        namespace = exec_test(globals(), locals(), """
-        async def body_producer(write):
-            await write(b'1234')
-            await gen.Task(IOLoop.current().add_callback)
-            await write(b'5678')
-        """)
-        response = self.fetch("/echo_post", method="POST",
-                              body_producer=namespace["body_producer"])
-        response.rethrow()
-        self.assertEqual(response.body, b"12345678")
-
-    @skipBefore35
-    def test_native_body_producer_content_length(self):
-        namespace = exec_test(globals(), locals(), """
-        async def body_producer(write):
-            await write(b'1234')
-            await gen.Task(IOLoop.current().add_callback)
-            await write(b'5678')
-        """)
-        response = self.fetch("/echo_post", method="POST",
-                              body_producer=namespace["body_producer"],
-                              headers={'Content-Length': '8'})
-        response.rethrow()
-        self.assertEqual(response.body, b"12345678")
-
-    def test_100_continue(self):
-        response = self.fetch("/echo_post", method="POST",
-                              body=b"1234",
-                              expect_100_continue=True)
-        self.assertEqual(response.body, b"1234")
-
-    def test_100_continue_early_response(self):
-        def body_producer(write):
-            raise Exception("should not be called")
-        response = self.fetch("/respond_in_prepare", method="POST",
-                              body_producer=body_producer,
-                              expect_100_continue=True)
-        self.assertEqual(response.code, 403)
-
-    def test_streaming_follow_redirects(self):
-        # When following redirects, header and streaming callbacks
-        # should only be called for the final result.
-        # TODO(bdarnell): this test belongs in httpclient_test instead of
-        # simple_httpclient_test, but it fails with the version of libcurl
-        # available on travis-ci. Move it when that has been upgraded
-        # or we have a better framework to skip tests based on curl version.
-        headers = []
-        chunks = []
-        self.fetch("/redirect?url=/hello",
-                   header_callback=headers.append,
-                   streaming_callback=chunks.append)
-        chunks = list(map(to_unicode, chunks))
-        self.assertEqual(chunks, ['Hello world!'])
-        # Make sure we only got one set of headers.
-        num_start_lines = len([h for h in headers if h.startswith("HTTP/")])
-        self.assertEqual(num_start_lines, 1)
-
-
-class SimpleHTTPClientTestCase(SimpleHTTPClientTestMixin, AsyncHTTPTestCase):
-    def setUp(self):
-        super(SimpleHTTPClientTestCase, self).setUp()
-        self.http_client = self.create_client()
-
-    def create_client(self, **kwargs):
-        return SimpleAsyncHTTPClient(self.io_loop, force_instance=True,
-                                     **kwargs)
-
-
-class SimpleHTTPSClientTestCase(SimpleHTTPClientTestMixin, AsyncHTTPSTestCase):
-    def setUp(self):
-        super(SimpleHTTPSClientTestCase, self).setUp()
-        self.http_client = self.create_client()
-
-    def create_client(self, **kwargs):
-        return SimpleAsyncHTTPClient(self.io_loop, force_instance=True,
-                                     defaults=dict(validate_cert=False),
-                                     **kwargs)
-
-    def test_ssl_options(self):
-        resp = self.fetch("/hello", ssl_options={})
-        self.assertEqual(resp.body, b"Hello world!")
-
-    @unittest.skipIf(not hasattr(ssl, 'SSLContext'),
-                     'ssl.SSLContext not present')
-    def test_ssl_context(self):
-        resp = self.fetch("/hello",
-                          ssl_options=ssl.SSLContext(ssl.PROTOCOL_SSLv23))
-        self.assertEqual(resp.body, b"Hello world!")
-
-    def test_ssl_options_handshake_fail(self):
-        with ExpectLog(gen_log, "SSL Error|Uncaught exception",
-                       required=False):
-            resp = self.fetch(
-                "/hello", ssl_options=dict(cert_reqs=ssl.CERT_REQUIRED))
-        self.assertRaises(ssl.SSLError, resp.rethrow)
-
-    @unittest.skipIf(not hasattr(ssl, 'SSLContext'),
-                     'ssl.SSLContext not present')
-    def test_ssl_context_handshake_fail(self):
-        with ExpectLog(gen_log, "SSL Error|Uncaught exception"):
-            ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
-            ctx.verify_mode = ssl.CERT_REQUIRED
-            resp = self.fetch("/hello", ssl_options=ctx)
-        self.assertRaises(ssl.SSLError, resp.rethrow)
-
-    def test_error_logging(self):
-        # No stack traces are logged for SSL errors (in this case,
-        # failure to validate the testing self-signed cert).
-        # The SSLError is exposed through ssl.SSLError.
-        with ExpectLog(gen_log, '.*') as expect_log:
-            response = self.fetch("/", validate_cert=True)
-            self.assertEqual(response.code, 599)
-            self.assertIsInstance(response.error, ssl.SSLError)
-        self.assertFalse(expect_log.logged_stack)
-
-
-class CreateAsyncHTTPClientTestCase(AsyncTestCase):
-    def setUp(self):
-        super(CreateAsyncHTTPClientTestCase, self).setUp()
-        self.saved = AsyncHTTPClient._save_configuration()
-
-    def tearDown(self):
-        AsyncHTTPClient._restore_configuration(self.saved)
-        super(CreateAsyncHTTPClientTestCase, self).tearDown()
-
-    def test_max_clients(self):
-        AsyncHTTPClient.configure(SimpleAsyncHTTPClient)
-        with closing(AsyncHTTPClient(
-                self.io_loop, force_instance=True)) as client:
-            self.assertEqual(client.max_clients, 10)
-        with closing(AsyncHTTPClient(
-                self.io_loop, max_clients=11, force_instance=True)) as client:
-            self.assertEqual(client.max_clients, 11)
-
-        # Now configure max_clients statically and try overriding it
-        # with each way max_clients can be passed
-        AsyncHTTPClient.configure(SimpleAsyncHTTPClient, max_clients=12)
-        with closing(AsyncHTTPClient(
-                self.io_loop, force_instance=True)) as client:
-            self.assertEqual(client.max_clients, 12)
-        with closing(AsyncHTTPClient(
-                self.io_loop, max_clients=13, force_instance=True)) as client:
-            self.assertEqual(client.max_clients, 13)
-        with closing(AsyncHTTPClient(
-                self.io_loop, max_clients=14, force_instance=True)) as client:
-            self.assertEqual(client.max_clients, 14)
-
-
-class HTTP100ContinueTestCase(AsyncHTTPTestCase):
-    def respond_100(self, request):
-        self.http1 = request.version.startswith('HTTP/1.')
-        if not self.http1:
-            request.connection.write_headers(ResponseStartLine('', 200, 'OK'),
-                                             HTTPHeaders())
-            request.connection.finish()
-            return
-        self.request = request
-        self.request.connection.stream.write(
-            b"HTTP/1.1 100 CONTINUE\r\n\r\n",
-            self.respond_200)
-
-    def respond_200(self):
-        self.request.connection.stream.write(
-            b"HTTP/1.1 200 OK\r\nContent-Length: 1\r\n\r\nA",
-            self.request.connection.stream.close)
-
-    def get_app(self):
-        # Not a full Application, but works as an HTTPServer callback
-        return self.respond_100
-
-    def test_100_continue(self):
-        res = self.fetch('/')
-        if not self.http1:
-            self.skipTest("requires HTTP/1.x")
-        self.assertEqual(res.body, b'A')
-
-
-class HTTP204NoContentTestCase(AsyncHTTPTestCase):
-    def respond_204(self, request):
-        self.http1 = request.version.startswith('HTTP/1.')
-        if not self.http1:
-            # Close the request cleanly in HTTP/2; it will be skipped anyway.
-            request.connection.write_headers(ResponseStartLine('', 200, 'OK'),
-                                             HTTPHeaders())
-            request.connection.finish()
-            return
-
-        # A 204 response never has a body, even if doesn't have a content-length
-        # (which would otherwise mean read-until-close).  We simulate here a
-        # server that sends no content length and does not close the connection.
-        #
-        # Tests of a 204 response with no Content-Length header are included
-        # in SimpleHTTPClientTestMixin.
-        stream = request.connection.detach()
-        stream.write(b"HTTP/1.1 204 No content\r\n")
-        if request.arguments.get("error", [False])[-1]:
-            stream.write(b"Content-Length: 5\r\n")
-        else:
-            stream.write(b"Content-Length: 0\r\n")
-        stream.write(b"\r\n")
-        stream.close()
-
-    def get_app(self):
-        return self.respond_204
-
-    def test_204_no_content(self):
-        resp = self.fetch('/')
-        if not self.http1:
-            self.skipTest("requires HTTP/1.x")
-        self.assertEqual(resp.code, 204)
-        self.assertEqual(resp.body, b'')
-
-    def test_204_invalid_content_length(self):
-        # 204 status with non-zero content length is malformed
-        with ExpectLog(gen_log, ".*Response with code 204 should not have body"):
-            response = self.fetch("/?error=1")
-            if not self.http1:
-                self.skipTest("requires HTTP/1.x")
-            if self.http_client.configured_class != SimpleAsyncHTTPClient:
-                self.skipTest("curl client accepts invalid headers")
-            self.assertEqual(response.code, 599)
-
-
-class HostnameMappingTestCase(AsyncHTTPTestCase):
-    def setUp(self):
-        super(HostnameMappingTestCase, self).setUp()
-        self.http_client = SimpleAsyncHTTPClient(
-            self.io_loop,
-            hostname_mapping={
-                'www.example.com': '127.0.0.1',
-                ('foo.example.com', 8000): ('127.0.0.1', self.get_http_port()),
-            })
-
-    def get_app(self):
-        return Application([url("/hello", HelloWorldHandler), ])
-
-    def test_hostname_mapping(self):
-        self.http_client.fetch(
-            'http://www.example.com:%d/hello' % self.get_http_port(), self.stop)
-        response = self.wait()
-        response.rethrow()
-        self.assertEqual(response.body, b'Hello world!')
-
-    def test_port_mapping(self):
-        self.http_client.fetch('http://foo.example.com:8000/hello', self.stop)
-        response = self.wait()
-        response.rethrow()
-        self.assertEqual(response.body, b'Hello world!')
-
-
-class ResolveTimeoutTestCase(AsyncHTTPTestCase):
-    def setUp(self):
-        # Dummy Resolver subclass that never invokes its callback.
-        class BadResolver(Resolver):
-            def resolve(self, *args, **kwargs):
-                pass
-
-        super(ResolveTimeoutTestCase, self).setUp()
-        self.http_client = SimpleAsyncHTTPClient(
-            self.io_loop,
-            resolver=BadResolver())
-
-    def get_app(self):
-        return Application([url("/hello", HelloWorldHandler), ])
-
-    def test_resolve_timeout(self):
-        response = self.fetch('/hello', connect_timeout=0.1)
-        self.assertEqual(response.code, 599)
-
-
-class MaxHeaderSizeTest(AsyncHTTPTestCase):
-    def get_app(self):
-        class SmallHeaders(RequestHandler):
-            def get(self):
-                self.set_header("X-Filler", "a" * 100)
-                self.write("ok")
-
-        class LargeHeaders(RequestHandler):
-            def get(self):
-                self.set_header("X-Filler", "a" * 1000)
-                self.write("ok")
-
-        return Application([('/small', SmallHeaders),
-                            ('/large', LargeHeaders)])
-
-    def get_http_client(self):
-        return SimpleAsyncHTTPClient(io_loop=self.io_loop, max_header_size=1024)
-
-    def test_small_headers(self):
-        response = self.fetch('/small')
-        response.rethrow()
-        self.assertEqual(response.body, b'ok')
-
-    def test_large_headers(self):
-        with ExpectLog(gen_log, "Unsatisfiable read"):
-            response = self.fetch('/large')
-        self.assertEqual(response.code, 599)
-
-
-class MaxBodySizeTest(AsyncHTTPTestCase):
-    def get_app(self):
-        class SmallBody(RequestHandler):
-            def get(self):
-                self.write("a" * 1024 * 64)
-
-        class LargeBody(RequestHandler):
-            def get(self):
-                self.write("a" * 1024 * 100)
-
-        return Application([('/small', SmallBody),
-                            ('/large', LargeBody)])
-
-    def get_http_client(self):
-        return SimpleAsyncHTTPClient(io_loop=self.io_loop, max_body_size=1024 * 64)
-
-    def test_small_body(self):
-        response = self.fetch('/small')
-        response.rethrow()
-        self.assertEqual(response.body, b'a' * 1024 * 64)
-
-    def test_large_body(self):
-        with ExpectLog(gen_log, "Malformed HTTP message from None: Content-Length too long"):
-            response = self.fetch('/large')
-        self.assertEqual(response.code, 599)
-
-
-class MaxBufferSizeTest(AsyncHTTPTestCase):
-    def get_app(self):
-
-        class LargeBody(RequestHandler):
-            def get(self):
-                self.write("a" * 1024 * 100)
-
-        return Application([('/large', LargeBody)])
-
-    def get_http_client(self):
-        # 100KB body with 64KB buffer
-        return SimpleAsyncHTTPClient(io_loop=self.io_loop, max_body_size=1024 * 100, max_buffer_size=1024 * 64)
-
-    def test_large_body(self):
-        response = self.fetch('/large')
-        response.rethrow()
-        self.assertEqual(response.body, b'a' * 1024 * 100)
-
-
-class ChunkedWithContentLengthTest(AsyncHTTPTestCase):
-    def get_app(self):
-
-        class ChunkedWithContentLength(RequestHandler):
-            def get(self):
-                # Add an invalid Transfer-Encoding to the response
-                self.set_header('Transfer-Encoding', 'chunked')
-                self.write("Hello world")
-
-        return Application([('/chunkwithcl', ChunkedWithContentLength)])
-
-    def get_http_client(self):
-        return SimpleAsyncHTTPClient()
-
-    def test_chunked_with_content_length(self):
-        # Make sure the invalid headers are detected
-        with ExpectLog(gen_log, ("Malformed HTTP message from None: Response "
-                                 "with both Transfer-Encoding and Content-Length")):
-            response = self.fetch('/chunkwithcl')
-        self.assertEqual(response.code, 599)
diff --git a/lib/tornado/test/stack_context_test.py b/lib/tornado/test/stack_context_test.py
deleted file mode 100644
index 59d25474c3acdbaf699e83a70601df5b24d2b92e..0000000000000000000000000000000000000000
--- a/lib/tornado/test/stack_context_test.py
+++ /dev/null
@@ -1,289 +0,0 @@
-#!/usr/bin/env python
-from __future__ import absolute_import, division, print_function
-
-from tornado import gen
-from tornado.log import app_log
-from tornado.stack_context import (StackContext, wrap, NullContext, StackContextInconsistentError,
-                                   ExceptionStackContext, run_with_stack_context, _state)
-from tornado.testing import AsyncHTTPTestCase, AsyncTestCase, ExpectLog, gen_test
-from tornado.test.util import unittest
-from tornado.web import asynchronous, Application, RequestHandler
-import contextlib
-import functools
-import logging
-
-
-class TestRequestHandler(RequestHandler):
-    def __init__(self, app, request, io_loop):
-        super(TestRequestHandler, self).__init__(app, request)
-        self.io_loop = io_loop
-
-    @asynchronous
-    def get(self):
-        logging.debug('in get()')
-        # call self.part2 without a self.async_callback wrapper.  Its
-        # exception should still get thrown
-        self.io_loop.add_callback(self.part2)
-
-    def part2(self):
-        logging.debug('in part2()')
-        # Go through a third layer to make sure that contexts once restored
-        # are again passed on to future callbacks
-        self.io_loop.add_callback(self.part3)
-
-    def part3(self):
-        logging.debug('in part3()')
-        raise Exception('test exception')
-
-    def write_error(self, status_code, **kwargs):
-        if 'exc_info' in kwargs and str(kwargs['exc_info'][1]) == 'test exception':
-            self.write('got expected exception')
-        else:
-            self.write('unexpected failure')
-
-
-class HTTPStackContextTest(AsyncHTTPTestCase):
-    def get_app(self):
-        return Application([('/', TestRequestHandler,
-                             dict(io_loop=self.io_loop))])
-
-    def test_stack_context(self):
-        with ExpectLog(app_log, "Uncaught exception GET /"):
-            self.http_client.fetch(self.get_url('/'), self.handle_response)
-            self.wait()
-        self.assertEqual(self.response.code, 500)
-        self.assertTrue(b'got expected exception' in self.response.body)
-
-    def handle_response(self, response):
-        self.response = response
-        self.stop()
-
-
-class StackContextTest(AsyncTestCase):
-    def setUp(self):
-        super(StackContextTest, self).setUp()
-        self.active_contexts = []
-
-    @contextlib.contextmanager
-    def context(self, name):
-        self.active_contexts.append(name)
-        yield
-        self.assertEqual(self.active_contexts.pop(), name)
-
-    # Simulates the effect of an asynchronous library that uses its own
-    # StackContext internally and then returns control to the application.
-    def test_exit_library_context(self):
-        def library_function(callback):
-            # capture the caller's context before introducing our own
-            callback = wrap(callback)
-            with StackContext(functools.partial(self.context, 'library')):
-                self.io_loop.add_callback(
-                    functools.partial(library_inner_callback, callback))
-
-        def library_inner_callback(callback):
-            self.assertEqual(self.active_contexts[-2:],
-                             ['application', 'library'])
-            callback()
-
-        def final_callback():
-            # implementation detail:  the full context stack at this point
-            # is ['application', 'library', 'application'].  The 'library'
-            # context was not removed, but is no longer innermost so
-            # the application context takes precedence.
-            self.assertEqual(self.active_contexts[-1], 'application')
-            self.stop()
-        with StackContext(functools.partial(self.context, 'application')):
-            library_function(final_callback)
-        self.wait()
-
-    def test_deactivate(self):
-        deactivate_callbacks = []
-
-        def f1():
-            with StackContext(functools.partial(self.context, 'c1')) as c1:
-                deactivate_callbacks.append(c1)
-                self.io_loop.add_callback(f2)
-
-        def f2():
-            with StackContext(functools.partial(self.context, 'c2')) as c2:
-                deactivate_callbacks.append(c2)
-                self.io_loop.add_callback(f3)
-
-        def f3():
-            with StackContext(functools.partial(self.context, 'c3')) as c3:
-                deactivate_callbacks.append(c3)
-                self.io_loop.add_callback(f4)
-
-        def f4():
-            self.assertEqual(self.active_contexts, ['c1', 'c2', 'c3'])
-            deactivate_callbacks[1]()
-            # deactivating a context doesn't remove it immediately,
-            # but it will be missing from the next iteration
-            self.assertEqual(self.active_contexts, ['c1', 'c2', 'c3'])
-            self.io_loop.add_callback(f5)
-
-        def f5():
-            self.assertEqual(self.active_contexts, ['c1', 'c3'])
-            self.stop()
-        self.io_loop.add_callback(f1)
-        self.wait()
-
-    def test_deactivate_order(self):
-        # Stack context deactivation has separate logic for deactivation at
-        # the head and tail of the stack, so make sure it works in any order.
-        def check_contexts():
-            # Make sure that the full-context array and the exception-context
-            # linked lists are consistent with each other.
-            full_contexts, chain = _state.contexts
-            exception_contexts = []
-            while chain is not None:
-                exception_contexts.append(chain)
-                chain = chain.old_contexts[1]
-            self.assertEqual(list(reversed(full_contexts)), exception_contexts)
-            return list(self.active_contexts)
-
-        def make_wrapped_function():
-            """Wraps a function in three stack contexts, and returns
-            the function along with the deactivation functions.
-            """
-            # Remove the test's stack context to make sure we can cover
-            # the case where the last context is deactivated.
-            with NullContext():
-                partial = functools.partial
-                with StackContext(partial(self.context, 'c0')) as c0:
-                    with StackContext(partial(self.context, 'c1')) as c1:
-                        with StackContext(partial(self.context, 'c2')) as c2:
-                            return (wrap(check_contexts), [c0, c1, c2])
-
-        # First make sure the test mechanism works without any deactivations
-        func, deactivate_callbacks = make_wrapped_function()
-        self.assertEqual(func(), ['c0', 'c1', 'c2'])
-
-        # Deactivate the tail
-        func, deactivate_callbacks = make_wrapped_function()
-        deactivate_callbacks[0]()
-        self.assertEqual(func(), ['c1', 'c2'])
-
-        # Deactivate the middle
-        func, deactivate_callbacks = make_wrapped_function()
-        deactivate_callbacks[1]()
-        self.assertEqual(func(), ['c0', 'c2'])
-
-        # Deactivate the head
-        func, deactivate_callbacks = make_wrapped_function()
-        deactivate_callbacks[2]()
-        self.assertEqual(func(), ['c0', 'c1'])
-
-    def test_isolation_nonempty(self):
-        # f2 and f3 are a chain of operations started in context c1.
-        # f2 is incidentally run under context c2, but that context should
-        # not be passed along to f3.
-        def f1():
-            with StackContext(functools.partial(self.context, 'c1')):
-                wrapped = wrap(f2)
-            with StackContext(functools.partial(self.context, 'c2')):
-                wrapped()
-
-        def f2():
-            self.assertIn('c1', self.active_contexts)
-            self.io_loop.add_callback(f3)
-
-        def f3():
-            self.assertIn('c1', self.active_contexts)
-            self.assertNotIn('c2', self.active_contexts)
-            self.stop()
-
-        self.io_loop.add_callback(f1)
-        self.wait()
-
-    def test_isolation_empty(self):
-        # Similar to test_isolation_nonempty, but here the f2/f3 chain
-        # is started without any context.  Behavior should be equivalent
-        # to the nonempty case (although historically it was not)
-        def f1():
-            with NullContext():
-                wrapped = wrap(f2)
-            with StackContext(functools.partial(self.context, 'c2')):
-                wrapped()
-
-        def f2():
-            self.io_loop.add_callback(f3)
-
-        def f3():
-            self.assertNotIn('c2', self.active_contexts)
-            self.stop()
-
-        self.io_loop.add_callback(f1)
-        self.wait()
-
-    def test_yield_in_with(self):
-        @gen.engine
-        def f():
-            self.callback = yield gen.Callback('a')
-            with StackContext(functools.partial(self.context, 'c1')):
-                # This yield is a problem: the generator will be suspended
-                # and the StackContext's __exit__ is not called yet, so
-                # the context will be left on _state.contexts for anything
-                # that runs before the yield resolves.
-                yield gen.Wait('a')
-
-        with self.assertRaises(StackContextInconsistentError):
-            f()
-            self.wait()
-        # Cleanup: to avoid GC warnings (which for some reason only seem
-        # to show up on py33-asyncio), invoke the callback (which will do
-        # nothing since the gen.Runner is already finished) and delete it.
-        self.callback()
-        del self.callback
-
-    @gen_test
-    def test_yield_outside_with(self):
-        # This pattern avoids the problem in the previous test.
-        cb = yield gen.Callback('k1')
-        with StackContext(functools.partial(self.context, 'c1')):
-            self.io_loop.add_callback(cb)
-        yield gen.Wait('k1')
-
-    def test_yield_in_with_exception_stack_context(self):
-        # As above, but with ExceptionStackContext instead of StackContext.
-        @gen.engine
-        def f():
-            with ExceptionStackContext(lambda t, v, tb: False):
-                yield gen.Task(self.io_loop.add_callback)
-
-        with self.assertRaises(StackContextInconsistentError):
-            f()
-            self.wait()
-
-    @gen_test
-    def test_yield_outside_with_exception_stack_context(self):
-        cb = yield gen.Callback('k1')
-        with ExceptionStackContext(lambda t, v, tb: False):
-            self.io_loop.add_callback(cb)
-        yield gen.Wait('k1')
-
-    @gen_test
-    def test_run_with_stack_context(self):
-        @gen.coroutine
-        def f1():
-            self.assertEqual(self.active_contexts, ['c1'])
-            yield run_with_stack_context(
-                StackContext(functools.partial(self.context, 'c2')),
-                f2)
-            self.assertEqual(self.active_contexts, ['c1'])
-
-        @gen.coroutine
-        def f2():
-            self.assertEqual(self.active_contexts, ['c1', 'c2'])
-            yield gen.Task(self.io_loop.add_callback)
-            self.assertEqual(self.active_contexts, ['c1', 'c2'])
-
-        self.assertEqual(self.active_contexts, [])
-        yield run_with_stack_context(
-            StackContext(functools.partial(self.context, 'c1')),
-            f1)
-        self.assertEqual(self.active_contexts, [])
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/lib/tornado/test/static/dir/index.html b/lib/tornado/test/static/dir/index.html
deleted file mode 100644
index e1cd9d8aad8d7cff7f1bc7cbf632e6822bcf3299..0000000000000000000000000000000000000000
--- a/lib/tornado/test/static/dir/index.html
+++ /dev/null
@@ -1 +0,0 @@
-this is the index
diff --git a/lib/tornado/test/static/robots.txt b/lib/tornado/test/static/robots.txt
deleted file mode 100644
index 1f53798bb4fe33c86020be7f10c44f29486fd190..0000000000000000000000000000000000000000
--- a/lib/tornado/test/static/robots.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-User-agent: *
-Disallow: /
diff --git a/lib/tornado/test/static/sample.xml b/lib/tornado/test/static/sample.xml
deleted file mode 100644
index 35ea0e29daf901f053bb3d6e6bb8cd565989dd48..0000000000000000000000000000000000000000
--- a/lib/tornado/test/static/sample.xml
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0"?>
-<data>
-    <country name="Liechtenstein">
-        <rank>1</rank>
-        <year>2008</year>
-        <gdppc>141100</gdppc>
-        <neighbor name="Austria" direction="E"/>
-        <neighbor name="Switzerland" direction="W"/>
-    </country>
-    <country name="Singapore">
-        <rank>4</rank>
-        <year>2011</year>
-        <gdppc>59900</gdppc>
-        <neighbor name="Malaysia" direction="N"/>
-    </country>
-    <country name="Panama">
-        <rank>68</rank>
-        <year>2011</year>
-        <gdppc>13600</gdppc>
-        <neighbor name="Costa Rica" direction="W"/>
-        <neighbor name="Colombia" direction="E"/>
-    </country>
-</data>
diff --git a/lib/tornado/test/static/sample.xml.bz2 b/lib/tornado/test/static/sample.xml.bz2
deleted file mode 100644
index 44dc6633324307e6834e0346eefe7874f1061b3c..0000000000000000000000000000000000000000
Binary files a/lib/tornado/test/static/sample.xml.bz2 and /dev/null differ
diff --git a/lib/tornado/test/static/sample.xml.gz b/lib/tornado/test/static/sample.xml.gz
deleted file mode 100644
index c0fd5e6fd3c3a42dd2c9ec81e9a3ca424ef35b24..0000000000000000000000000000000000000000
Binary files a/lib/tornado/test/static/sample.xml.gz and /dev/null differ
diff --git a/lib/tornado/test/static_foo.txt b/lib/tornado/test/static_foo.txt
deleted file mode 100644
index bdb44f39184e5d5f85a73eb2405e7c307da03ec3..0000000000000000000000000000000000000000
--- a/lib/tornado/test/static_foo.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-This file should not be served by StaticFileHandler even though
-its name starts with "static".
diff --git a/lib/tornado/test/tcpclient_test.py b/lib/tornado/test/tcpclient_test.py
deleted file mode 100644
index 76206e85ea3735ad94103e209c6fabe7819ae86e..0000000000000000000000000000000000000000
--- a/lib/tornado/test/tcpclient_test.py
+++ /dev/null
@@ -1,313 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2014 Facebook
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-from __future__ import absolute_import, division, print_function
-
-from contextlib import closing
-import os
-import socket
-
-from tornado.concurrent import Future
-from tornado.netutil import bind_sockets, Resolver
-from tornado.queues import Queue
-from tornado.tcpclient import TCPClient, _Connector
-from tornado.tcpserver import TCPServer
-from tornado.testing import AsyncTestCase, gen_test
-from tornado.test.util import skipIfNoIPv6, unittest, refusing_port, skipIfNonUnix
-
-# Fake address families for testing.  Used in place of AF_INET
-# and AF_INET6 because some installations do not have AF_INET6.
-AF1, AF2 = 1, 2
-
-
-class TestTCPServer(TCPServer):
-    def __init__(self, family):
-        super(TestTCPServer, self).__init__()
-        self.streams = []
-        self.queue = Queue()
-        sockets = bind_sockets(None, 'localhost', family)
-        self.add_sockets(sockets)
-        self.port = sockets[0].getsockname()[1]
-
-    def handle_stream(self, stream, address):
-        self.streams.append(stream)
-        self.queue.put(stream)
-
-    def stop(self):
-        super(TestTCPServer, self).stop()
-        for stream in self.streams:
-            stream.close()
-
-
-class TCPClientTest(AsyncTestCase):
-    def setUp(self):
-        super(TCPClientTest, self).setUp()
-        self.server = None
-        self.client = TCPClient()
-
-    def start_server(self, family):
-        if family == socket.AF_UNSPEC and 'TRAVIS' in os.environ:
-            self.skipTest("dual-stack servers often have port conflicts on travis")
-        self.server = TestTCPServer(family)
-        return self.server.port
-
-    def stop_server(self):
-        if self.server is not None:
-            self.server.stop()
-            self.server = None
-
-    def tearDown(self):
-        self.client.close()
-        self.stop_server()
-        super(TCPClientTest, self).tearDown()
-
-    def skipIfLocalhostV4(self):
-        # The port used here doesn't matter, but some systems require it
-        # to be non-zero if we do not also pass AI_PASSIVE.
-        Resolver().resolve('localhost', 80, callback=self.stop)
-        addrinfo = self.wait()
-        families = set(addr[0] for addr in addrinfo)
-        if socket.AF_INET6 not in families:
-            self.skipTest("localhost does not resolve to ipv6")
-
-    @gen_test
-    def do_test_connect(self, family, host, source_ip=None, source_port=None):
-        port = self.start_server(family)
-        stream = yield self.client.connect(host, port,
-                                           source_ip=source_ip,
-                                           source_port=source_port)
-        server_stream = yield self.server.queue.get()
-        with closing(stream):
-            stream.write(b"hello")
-            data = yield server_stream.read_bytes(5)
-            self.assertEqual(data, b"hello")
-
-    def test_connect_ipv4_ipv4(self):
-        self.do_test_connect(socket.AF_INET, '127.0.0.1')
-
-    def test_connect_ipv4_dual(self):
-        self.do_test_connect(socket.AF_INET, 'localhost')
-
-    @skipIfNoIPv6
-    def test_connect_ipv6_ipv6(self):
-        self.skipIfLocalhostV4()
-        self.do_test_connect(socket.AF_INET6, '::1')
-
-    @skipIfNoIPv6
-    def test_connect_ipv6_dual(self):
-        self.skipIfLocalhostV4()
-        if Resolver.configured_class().__name__.endswith('TwistedResolver'):
-            self.skipTest('TwistedResolver does not support multiple addresses')
-        self.do_test_connect(socket.AF_INET6, 'localhost')
-
-    def test_connect_unspec_ipv4(self):
-        self.do_test_connect(socket.AF_UNSPEC, '127.0.0.1')
-
-    @skipIfNoIPv6
-    def test_connect_unspec_ipv6(self):
-        self.skipIfLocalhostV4()
-        self.do_test_connect(socket.AF_UNSPEC, '::1')
-
-    def test_connect_unspec_dual(self):
-        self.do_test_connect(socket.AF_UNSPEC, 'localhost')
-
-    @gen_test
-    def test_refused_ipv4(self):
-        cleanup_func, port = refusing_port()
-        self.addCleanup(cleanup_func)
-        with self.assertRaises(IOError):
-            yield self.client.connect('127.0.0.1', port)
-
-    def test_source_ip_fail(self):
-        '''
-        Fail when trying to use the source IP Address '8.8.8.8'.
-        '''
-        self.assertRaises(socket.error,
-                          self.do_test_connect,
-                          socket.AF_INET,
-                          '127.0.0.1',
-                          source_ip='8.8.8.8')
-
-    def test_source_ip_success(self):
-        '''
-        Success when trying to use the source IP Address '127.0.0.1'
-        '''
-        self.do_test_connect(socket.AF_INET, '127.0.0.1', source_ip='127.0.0.1')
-
-    @skipIfNonUnix
-    def test_source_port_fail(self):
-        '''
-        Fail when trying to use source port 1.
-        '''
-        self.assertRaises(socket.error,
-                          self.do_test_connect,
-                          socket.AF_INET,
-                          '127.0.0.1',
-                          source_port=1)
-
-
-class TestConnectorSplit(unittest.TestCase):
-    def test_one_family(self):
-        # These addresses aren't in the right format, but split doesn't care.
-        primary, secondary = _Connector.split(
-            [(AF1, 'a'),
-             (AF1, 'b')])
-        self.assertEqual(primary, [(AF1, 'a'),
-                                   (AF1, 'b')])
-        self.assertEqual(secondary, [])
-
-    def test_mixed(self):
-        primary, secondary = _Connector.split(
-            [(AF1, 'a'),
-             (AF2, 'b'),
-             (AF1, 'c'),
-             (AF2, 'd')])
-        self.assertEqual(primary, [(AF1, 'a'), (AF1, 'c')])
-        self.assertEqual(secondary, [(AF2, 'b'), (AF2, 'd')])
-
-
-class ConnectorTest(AsyncTestCase):
-    class FakeStream(object):
-        def __init__(self):
-            self.closed = False
-
-        def close(self):
-            self.closed = True
-
-    def setUp(self):
-        super(ConnectorTest, self).setUp()
-        self.connect_futures = {}
-        self.streams = {}
-        self.addrinfo = [(AF1, 'a'), (AF1, 'b'),
-                         (AF2, 'c'), (AF2, 'd')]
-
-    def tearDown(self):
-        # Unless explicitly checked (and popped) in the test, we shouldn't
-        # be closing any streams
-        for stream in self.streams.values():
-            self.assertFalse(stream.closed)
-        super(ConnectorTest, self).tearDown()
-
-    def create_stream(self, af, addr):
-        future = Future()
-        self.connect_futures[(af, addr)] = future
-        return future
-
-    def assert_pending(self, *keys):
-        self.assertEqual(sorted(self.connect_futures.keys()), sorted(keys))
-
-    def resolve_connect(self, af, addr, success):
-        future = self.connect_futures.pop((af, addr))
-        if success:
-            self.streams[addr] = ConnectorTest.FakeStream()
-            future.set_result(self.streams[addr])
-        else:
-            future.set_exception(IOError())
-
-    def start_connect(self, addrinfo):
-        conn = _Connector(addrinfo, self.io_loop, self.create_stream)
-        # Give it a huge timeout; we'll trigger timeouts manually.
-        future = conn.start(3600)
-        return conn, future
-
-    def test_immediate_success(self):
-        conn, future = self.start_connect(self.addrinfo)
-        self.assertEqual(list(self.connect_futures.keys()),
-                         [(AF1, 'a')])
-        self.resolve_connect(AF1, 'a', True)
-        self.assertEqual(future.result(), (AF1, 'a', self.streams['a']))
-
-    def test_immediate_failure(self):
-        # Fail with just one address.
-        conn, future = self.start_connect([(AF1, 'a')])
-        self.assert_pending((AF1, 'a'))
-        self.resolve_connect(AF1, 'a', False)
-        self.assertRaises(IOError, future.result)
-
-    def test_one_family_second_try(self):
-        conn, future = self.start_connect([(AF1, 'a'), (AF1, 'b')])
-        self.assert_pending((AF1, 'a'))
-        self.resolve_connect(AF1, 'a', False)
-        self.assert_pending((AF1, 'b'))
-        self.resolve_connect(AF1, 'b', True)
-        self.assertEqual(future.result(), (AF1, 'b', self.streams['b']))
-
-    def test_one_family_second_try_failure(self):
-        conn, future = self.start_connect([(AF1, 'a'), (AF1, 'b')])
-        self.assert_pending((AF1, 'a'))
-        self.resolve_connect(AF1, 'a', False)
-        self.assert_pending((AF1, 'b'))
-        self.resolve_connect(AF1, 'b', False)
-        self.assertRaises(IOError, future.result)
-
-    def test_one_family_second_try_timeout(self):
-        conn, future = self.start_connect([(AF1, 'a'), (AF1, 'b')])
-        self.assert_pending((AF1, 'a'))
-        # trigger the timeout while the first lookup is pending;
-        # nothing happens.
-        conn.on_timeout()
-        self.assert_pending((AF1, 'a'))
-        self.resolve_connect(AF1, 'a', False)
-        self.assert_pending((AF1, 'b'))
-        self.resolve_connect(AF1, 'b', True)
-        self.assertEqual(future.result(), (AF1, 'b', self.streams['b']))
-
-    def test_two_families_immediate_failure(self):
-        conn, future = self.start_connect(self.addrinfo)
-        self.assert_pending((AF1, 'a'))
-        self.resolve_connect(AF1, 'a', False)
-        self.assert_pending((AF1, 'b'), (AF2, 'c'))
-        self.resolve_connect(AF1, 'b', False)
-        self.resolve_connect(AF2, 'c', True)
-        self.assertEqual(future.result(), (AF2, 'c', self.streams['c']))
-
-    def test_two_families_timeout(self):
-        conn, future = self.start_connect(self.addrinfo)
-        self.assert_pending((AF1, 'a'))
-        conn.on_timeout()
-        self.assert_pending((AF1, 'a'), (AF2, 'c'))
-        self.resolve_connect(AF2, 'c', True)
-        self.assertEqual(future.result(), (AF2, 'c', self.streams['c']))
-        # resolving 'a' after the connection has completed doesn't start 'b'
-        self.resolve_connect(AF1, 'a', False)
-        self.assert_pending()
-
-    def test_success_after_timeout(self):
-        conn, future = self.start_connect(self.addrinfo)
-        self.assert_pending((AF1, 'a'))
-        conn.on_timeout()
-        self.assert_pending((AF1, 'a'), (AF2, 'c'))
-        self.resolve_connect(AF1, 'a', True)
-        self.assertEqual(future.result(), (AF1, 'a', self.streams['a']))
-        # resolving 'c' after completion closes the connection.
-        self.resolve_connect(AF2, 'c', True)
-        self.assertTrue(self.streams.pop('c').closed)
-
-    def test_all_fail(self):
-        conn, future = self.start_connect(self.addrinfo)
-        self.assert_pending((AF1, 'a'))
-        conn.on_timeout()
-        self.assert_pending((AF1, 'a'), (AF2, 'c'))
-        self.resolve_connect(AF2, 'c', False)
-        self.assert_pending((AF1, 'a'), (AF2, 'd'))
-        self.resolve_connect(AF2, 'd', False)
-        # one queue is now empty
-        self.assert_pending((AF1, 'a'))
-        self.resolve_connect(AF1, 'a', False)
-        self.assert_pending((AF1, 'b'))
-        self.assertFalse(future.done())
-        self.resolve_connect(AF1, 'b', False)
-        self.assertRaises(IOError, future.result)
diff --git a/lib/tornado/test/tcpserver_test.py b/lib/tornado/test/tcpserver_test.py
deleted file mode 100644
index 9afb54202a572718cfee924a01111e3f6608bcc1..0000000000000000000000000000000000000000
--- a/lib/tornado/test/tcpserver_test.py
+++ /dev/null
@@ -1,70 +0,0 @@
-from __future__ import absolute_import, division, print_function
-
-import socket
-
-from tornado import gen
-from tornado.iostream import IOStream
-from tornado.log import app_log
-from tornado.stack_context import NullContext
-from tornado.tcpserver import TCPServer
-from tornado.test.util import skipBefore35, exec_test
-from tornado.testing import AsyncTestCase, ExpectLog, bind_unused_port, gen_test
-
-
-class TCPServerTest(AsyncTestCase):
-    @gen_test
-    def test_handle_stream_coroutine_logging(self):
-        # handle_stream may be a coroutine and any exception in its
-        # Future will be logged.
-        class TestServer(TCPServer):
-            @gen.coroutine
-            def handle_stream(self, stream, address):
-                yield gen.moment
-                stream.close()
-                1 / 0
-
-        server = client = None
-        try:
-            sock, port = bind_unused_port()
-            with NullContext():
-                server = TestServer()
-                server.add_socket(sock)
-            client = IOStream(socket.socket())
-            with ExpectLog(app_log, "Exception in callback"):
-                yield client.connect(('localhost', port))
-                yield client.read_until_close()
-                yield gen.moment
-        finally:
-            if server is not None:
-                server.stop()
-            if client is not None:
-                client.close()
-
-    @skipBefore35
-    @gen_test
-    def test_handle_stream_native_coroutine(self):
-        # handle_stream may be a native coroutine.
-
-        namespace = exec_test(globals(), locals(), """
-        class TestServer(TCPServer):
-            async def handle_stream(self, stream, address):
-                stream.write(b'data')
-                stream.close()
-        """)
-
-        sock, port = bind_unused_port()
-        server = namespace['TestServer']()
-        server.add_socket(sock)
-        client = IOStream(socket.socket())
-        yield client.connect(('localhost', port))
-        result = yield client.read_until_close()
-        self.assertEqual(result, b'data')
-        server.stop()
-        client.close()
-
-    def test_stop_twice(self):
-        sock, port = bind_unused_port()
-        server = TCPServer()
-        server.add_socket(sock)
-        server.stop()
-        server.stop()
diff --git a/lib/tornado/test/template_test.py b/lib/tornado/test/template_test.py
deleted file mode 100644
index 2f1e88c1d15f8dbc2fe88bfc56ceaf481e444cee..0000000000000000000000000000000000000000
--- a/lib/tornado/test/template_test.py
+++ /dev/null
@@ -1,496 +0,0 @@
-from __future__ import absolute_import, division, print_function
-
-import os
-import sys
-import traceback
-
-from tornado.escape import utf8, native_str, to_unicode
-from tornado.template import Template, DictLoader, ParseError, Loader
-from tornado.test.util import unittest, is_coverage_running
-from tornado.util import ObjectDict, unicode_type, PY3
-
-
-class TemplateTest(unittest.TestCase):
-    def test_simple(self):
-        template = Template("Hello {{ name }}!")
-        self.assertEqual(template.generate(name="Ben"),
-                         b"Hello Ben!")
-
-    def test_bytes(self):
-        template = Template("Hello {{ name }}!")
-        self.assertEqual(template.generate(name=utf8("Ben")),
-                         b"Hello Ben!")
-
-    def test_expressions(self):
-        template = Template("2 + 2 = {{ 2 + 2 }}")
-        self.assertEqual(template.generate(), b"2 + 2 = 4")
-
-    def test_comment(self):
-        template = Template("Hello{# TODO i18n #} {{ name }}!")
-        self.assertEqual(template.generate(name=utf8("Ben")),
-                         b"Hello Ben!")
-
-    def test_include(self):
-        loader = DictLoader({
-            "index.html": '{% include "header.html" %}\nbody text',
-            "header.html": "header text",
-        })
-        self.assertEqual(loader.load("index.html").generate(),
-                         b"header text\nbody text")
-
-    def test_extends(self):
-        loader = DictLoader({
-            "base.html": """\
-<title>{% block title %}default title{% end %}</title>
-<body>{% block body %}default body{% end %}</body>
-""",
-            "page.html": """\
-{% extends "base.html" %}
-{% block title %}page title{% end %}
-{% block body %}page body{% end %}
-""",
-        })
-        self.assertEqual(loader.load("page.html").generate(),
-                         b"<title>page title</title>\n<body>page body</body>\n")
-
-    def test_relative_load(self):
-        loader = DictLoader({
-            "a/1.html": "{% include '2.html' %}",
-            "a/2.html": "{% include '../b/3.html' %}",
-            "b/3.html": "ok",
-        })
-        self.assertEqual(loader.load("a/1.html").generate(),
-                         b"ok")
-
-    def test_escaping(self):
-        self.assertRaises(ParseError, lambda: Template("{{"))
-        self.assertRaises(ParseError, lambda: Template("{%"))
-        self.assertEqual(Template("{{!").generate(), b"{{")
-        self.assertEqual(Template("{%!").generate(), b"{%")
-        self.assertEqual(Template("{#!").generate(), b"{#")
-        self.assertEqual(Template("{{ 'expr' }} {{!jquery expr}}").generate(),
-                         b"expr {{jquery expr}}")
-
-    def test_unicode_template(self):
-        template = Template(utf8(u"\u00e9"))
-        self.assertEqual(template.generate(), utf8(u"\u00e9"))
-
-    def test_unicode_literal_expression(self):
-        # Unicode literals should be usable in templates.  Note that this
-        # test simulates unicode characters appearing directly in the
-        # template file (with utf8 encoding), i.e. \u escapes would not
-        # be used in the template file itself.
-        if str is unicode_type:
-            # python 3 needs a different version of this test since
-            # 2to3 doesn't run on template internals
-            template = Template(utf8(u'{{ "\u00e9" }}'))
-        else:
-            template = Template(utf8(u'{{ u"\u00e9" }}'))
-        self.assertEqual(template.generate(), utf8(u"\u00e9"))
-
-    def test_custom_namespace(self):
-        loader = DictLoader({"test.html": "{{ inc(5) }}"}, namespace={"inc": lambda x: x + 1})
-        self.assertEqual(loader.load("test.html").generate(), b"6")
-
-    def test_apply(self):
-        def upper(s):
-            return s.upper()
-        template = Template(utf8("{% apply upper %}foo{% end %}"))
-        self.assertEqual(template.generate(upper=upper), b"FOO")
-
-    def test_unicode_apply(self):
-        def upper(s):
-            return to_unicode(s).upper()
-        template = Template(utf8(u"{% apply upper %}foo \u00e9{% end %}"))
-        self.assertEqual(template.generate(upper=upper), utf8(u"FOO \u00c9"))
-
-    def test_bytes_apply(self):
-        def upper(s):
-            return utf8(to_unicode(s).upper())
-        template = Template(utf8(u"{% apply upper %}foo \u00e9{% end %}"))
-        self.assertEqual(template.generate(upper=upper), utf8(u"FOO \u00c9"))
-
-    def test_if(self):
-        template = Template(utf8("{% if x > 4 %}yes{% else %}no{% end %}"))
-        self.assertEqual(template.generate(x=5), b"yes")
-        self.assertEqual(template.generate(x=3), b"no")
-
-    def test_if_empty_body(self):
-        template = Template(utf8("{% if True %}{% else %}{% end %}"))
-        self.assertEqual(template.generate(), b"")
-
-    def test_try(self):
-        template = Template(utf8("""{% try %}
-try{% set y = 1/x %}
-{% except %}-except
-{% else %}-else
-{% finally %}-finally
-{% end %}"""))
-        self.assertEqual(template.generate(x=1), b"\ntry\n-else\n-finally\n")
-        self.assertEqual(template.generate(x=0), b"\ntry-except\n-finally\n")
-
-    def test_comment_directive(self):
-        template = Template(utf8("{% comment blah blah %}foo"))
-        self.assertEqual(template.generate(), b"foo")
-
-    def test_break_continue(self):
-        template = Template(utf8("""\
-{% for i in range(10) %}
-    {% if i == 2 %}
-        {% continue %}
-    {% end %}
-    {{ i }}
-    {% if i == 6 %}
-        {% break %}
-    {% end %}
-{% end %}"""))
-        result = template.generate()
-        # remove extraneous whitespace
-        result = b''.join(result.split())
-        self.assertEqual(result, b"013456")
-
-    def test_break_outside_loop(self):
-        try:
-            Template(utf8("{% break %}"))
-            raise Exception("Did not get expected exception")
-        except ParseError:
-            pass
-
-    def test_break_in_apply(self):
-        # This test verifies current behavior, although of course it would
-        # be nice if apply didn't cause seemingly unrelated breakage
-        try:
-            Template(utf8("{% for i in [] %}{% apply foo %}{% break %}{% end %}{% end %}"))
-            raise Exception("Did not get expected exception")
-        except ParseError:
-            pass
-
-    @unittest.skipIf(sys.version_info >= division.getMandatoryRelease(),
-                     'no testable future imports')
-    def test_no_inherit_future(self):
-        # This file has from __future__ import division...
-        self.assertEqual(1 / 2, 0.5)
-        # ...but the template doesn't
-        template = Template('{{ 1 / 2 }}')
-        self.assertEqual(template.generate(), '0')
-
-    def test_non_ascii_name(self):
-        if PY3 and is_coverage_running():
-            try:
-                os.fsencode(u"t\u00e9st.html")
-            except UnicodeEncodeError:
-                self.skipTest("coverage tries to access unencodable filename")
-        loader = DictLoader({u"t\u00e9st.html": "hello"})
-        self.assertEqual(loader.load(u"t\u00e9st.html").generate(), b"hello")
-
-
-class StackTraceTest(unittest.TestCase):
-    def test_error_line_number_expression(self):
-        loader = DictLoader({"test.html": """one
-two{{1/0}}
-three
-        """})
-        try:
-            loader.load("test.html").generate()
-            self.fail("did not get expected exception")
-        except ZeroDivisionError:
-            self.assertTrue("# test.html:2" in traceback.format_exc())
-
-    def test_error_line_number_directive(self):
-        loader = DictLoader({"test.html": """one
-two{%if 1/0%}
-three{%end%}
-        """})
-        try:
-            loader.load("test.html").generate()
-            self.fail("did not get expected exception")
-        except ZeroDivisionError:
-            self.assertTrue("# test.html:2" in traceback.format_exc())
-
-    def test_error_line_number_module(self):
-        loader = DictLoader({
-            "base.html": "{% module Template('sub.html') %}",
-            "sub.html": "{{1/0}}",
-        }, namespace={"_tt_modules": ObjectDict(Template=lambda path, **kwargs: loader.load(path).generate(**kwargs))})
-        try:
-            loader.load("base.html").generate()
-            self.fail("did not get expected exception")
-        except ZeroDivisionError:
-            exc_stack = traceback.format_exc()
-            self.assertTrue('# base.html:1' in exc_stack)
-            self.assertTrue('# sub.html:1' in exc_stack)
-
-    def test_error_line_number_include(self):
-        loader = DictLoader({
-            "base.html": "{% include 'sub.html' %}",
-            "sub.html": "{{1/0}}",
-        })
-        try:
-            loader.load("base.html").generate()
-            self.fail("did not get expected exception")
-        except ZeroDivisionError:
-            self.assertTrue("# sub.html:1 (via base.html:1)" in
-                            traceback.format_exc())
-
-    def test_error_line_number_extends_base_error(self):
-        loader = DictLoader({
-            "base.html": "{{1/0}}",
-            "sub.html": "{% extends 'base.html' %}",
-        })
-        try:
-            loader.load("sub.html").generate()
-            self.fail("did not get expected exception")
-        except ZeroDivisionError:
-            exc_stack = traceback.format_exc()
-        self.assertTrue("# base.html:1" in exc_stack)
-
-    def test_error_line_number_extends_sub_error(self):
-        loader = DictLoader({
-            "base.html": "{% block 'block' %}{% end %}",
-            "sub.html": """
-{% extends 'base.html' %}
-{% block 'block' %}
-{{1/0}}
-{% end %}
-            """})
-        try:
-            loader.load("sub.html").generate()
-            self.fail("did not get expected exception")
-        except ZeroDivisionError:
-            self.assertTrue("# sub.html:4 (via base.html:1)" in
-                            traceback.format_exc())
-
-    def test_multi_includes(self):
-        loader = DictLoader({
-            "a.html": "{% include 'b.html' %}",
-            "b.html": "{% include 'c.html' %}",
-            "c.html": "{{1/0}}",
-        })
-        try:
-            loader.load("a.html").generate()
-            self.fail("did not get expected exception")
-        except ZeroDivisionError:
-            self.assertTrue("# c.html:1 (via b.html:1, a.html:1)" in
-                            traceback.format_exc())
-
-
-class ParseErrorDetailTest(unittest.TestCase):
-    def test_details(self):
-        loader = DictLoader({
-            "foo.html": "\n\n{{",
-        })
-        with self.assertRaises(ParseError) as cm:
-            loader.load("foo.html")
-        self.assertEqual("Missing end expression }} at foo.html:3",
-                         str(cm.exception))
-        self.assertEqual("foo.html", cm.exception.filename)
-        self.assertEqual(3, cm.exception.lineno)
-
-    def test_custom_parse_error(self):
-        # Make sure that ParseErrors remain compatible with their
-        # pre-4.3 signature.
-        self.assertEqual("asdf at None:0", str(ParseError("asdf")))
-
-
-class AutoEscapeTest(unittest.TestCase):
-    def setUp(self):
-        self.templates = {
-            "escaped.html": "{% autoescape xhtml_escape %}{{ name }}",
-            "unescaped.html": "{% autoescape None %}{{ name }}",
-            "default.html": "{{ name }}",
-
-            "include.html": """\
-escaped: {% include 'escaped.html' %}
-unescaped: {% include 'unescaped.html' %}
-default: {% include 'default.html' %}
-""",
-
-            "escaped_block.html": """\
-{% autoescape xhtml_escape %}\
-{% block name %}base: {{ name }}{% end %}""",
-            "unescaped_block.html": """\
-{% autoescape None %}\
-{% block name %}base: {{ name }}{% end %}""",
-
-            # Extend a base template with different autoescape policy,
-            # with and without overriding the base's blocks
-            "escaped_extends_unescaped.html": """\
-{% autoescape xhtml_escape %}\
-{% extends "unescaped_block.html" %}""",
-            "escaped_overrides_unescaped.html": """\
-{% autoescape xhtml_escape %}\
-{% extends "unescaped_block.html" %}\
-{% block name %}extended: {{ name }}{% end %}""",
-            "unescaped_extends_escaped.html": """\
-{% autoescape None %}\
-{% extends "escaped_block.html" %}""",
-            "unescaped_overrides_escaped.html": """\
-{% autoescape None %}\
-{% extends "escaped_block.html" %}\
-{% block name %}extended: {{ name }}{% end %}""",
-
-            "raw_expression.html": """\
-{% autoescape xhtml_escape %}\
-expr: {{ name }}
-raw: {% raw name %}""",
-        }
-
-    def test_default_off(self):
-        loader = DictLoader(self.templates, autoescape=None)
-        name = "Bobby <table>s"
-        self.assertEqual(loader.load("escaped.html").generate(name=name),
-                         b"Bobby &lt;table&gt;s")
-        self.assertEqual(loader.load("unescaped.html").generate(name=name),
-                         b"Bobby <table>s")
-        self.assertEqual(loader.load("default.html").generate(name=name),
-                         b"Bobby <table>s")
-
-        self.assertEqual(loader.load("include.html").generate(name=name),
-                         b"escaped: Bobby &lt;table&gt;s\n"
-                         b"unescaped: Bobby <table>s\n"
-                         b"default: Bobby <table>s\n")
-
-    def test_default_on(self):
-        loader = DictLoader(self.templates, autoescape="xhtml_escape")
-        name = "Bobby <table>s"
-        self.assertEqual(loader.load("escaped.html").generate(name=name),
-                         b"Bobby &lt;table&gt;s")
-        self.assertEqual(loader.load("unescaped.html").generate(name=name),
-                         b"Bobby <table>s")
-        self.assertEqual(loader.load("default.html").generate(name=name),
-                         b"Bobby &lt;table&gt;s")
-
-        self.assertEqual(loader.load("include.html").generate(name=name),
-                         b"escaped: Bobby &lt;table&gt;s\n"
-                         b"unescaped: Bobby <table>s\n"
-                         b"default: Bobby &lt;table&gt;s\n")
-
-    def test_unextended_block(self):
-        loader = DictLoader(self.templates)
-        name = "<script>"
-        self.assertEqual(loader.load("escaped_block.html").generate(name=name),
-                         b"base: &lt;script&gt;")
-        self.assertEqual(loader.load("unescaped_block.html").generate(name=name),
-                         b"base: <script>")
-
-    def test_extended_block(self):
-        loader = DictLoader(self.templates)
-
-        def render(name):
-            return loader.load(name).generate(name="<script>")
-        self.assertEqual(render("escaped_extends_unescaped.html"),
-                         b"base: <script>")
-        self.assertEqual(render("escaped_overrides_unescaped.html"),
-                         b"extended: &lt;script&gt;")
-
-        self.assertEqual(render("unescaped_extends_escaped.html"),
-                         b"base: &lt;script&gt;")
-        self.assertEqual(render("unescaped_overrides_escaped.html"),
-                         b"extended: <script>")
-
-    def test_raw_expression(self):
-        loader = DictLoader(self.templates)
-
-        def render(name):
-            return loader.load(name).generate(name='<>&"')
-        self.assertEqual(render("raw_expression.html"),
-                         b"expr: &lt;&gt;&amp;&quot;\n"
-                         b"raw: <>&\"")
-
-    def test_custom_escape(self):
-        loader = DictLoader({"foo.py":
-                             "{% autoescape py_escape %}s = {{ name }}\n"})
-
-        def py_escape(s):
-            self.assertEqual(type(s), bytes)
-            return repr(native_str(s))
-
-        def render(template, name):
-            return loader.load(template).generate(py_escape=py_escape,
-                                                  name=name)
-        self.assertEqual(render("foo.py", "<html>"),
-                         b"s = '<html>'\n")
-        self.assertEqual(render("foo.py", "';sys.exit()"),
-                         b"""s = "';sys.exit()"\n""")
-        self.assertEqual(render("foo.py", ["not a string"]),
-                         b"""s = "['not a string']"\n""")
-
-    def test_manual_minimize_whitespace(self):
-        # Whitespace including newlines is allowed within template tags
-        # and directives, and this is one way to avoid long lines while
-        # keeping extra whitespace out of the rendered output.
-        loader = DictLoader({'foo.txt': """\
-{% for i in items
-  %}{% if i > 0 %}, {% end %}{#
-  #}{{i
-  }}{% end
-%}""",
-                             })
-        self.assertEqual(loader.load("foo.txt").generate(items=range(5)),
-                         b"0, 1, 2, 3, 4")
-
-    def test_whitespace_by_filename(self):
-        # Default whitespace handling depends on the template filename.
-        loader = DictLoader({
-            "foo.html": "   \n\t\n asdf\t   ",
-            "bar.js": " \n\n\n\t qwer     ",
-            "baz.txt": "\t    zxcv\n\n",
-            "include.html": "  {% include baz.txt %} \n ",
-            "include.txt": "\t\t{% include foo.html %}    ",
-        })
-
-        # HTML and JS files have whitespace compressed by default.
-        self.assertEqual(loader.load("foo.html").generate(),
-                         b"\nasdf ")
-        self.assertEqual(loader.load("bar.js").generate(),
-                         b"\nqwer ")
-        # TXT files do not.
-        self.assertEqual(loader.load("baz.txt").generate(),
-                         b"\t    zxcv\n\n")
-
-        # Each file maintains its own status even when included in
-        # a file of the other type.
-        self.assertEqual(loader.load("include.html").generate(),
-                         b" \t    zxcv\n\n\n")
-        self.assertEqual(loader.load("include.txt").generate(),
-                         b"\t\t\nasdf     ")
-
-    def test_whitespace_by_loader(self):
-        templates = {
-            "foo.html": "\t\tfoo\n\n",
-            "bar.txt": "\t\tbar\n\n",
-        }
-        loader = DictLoader(templates, whitespace='all')
-        self.assertEqual(loader.load("foo.html").generate(), b"\t\tfoo\n\n")
-        self.assertEqual(loader.load("bar.txt").generate(), b"\t\tbar\n\n")
-
-        loader = DictLoader(templates, whitespace='single')
-        self.assertEqual(loader.load("foo.html").generate(), b" foo\n")
-        self.assertEqual(loader.load("bar.txt").generate(), b" bar\n")
-
-        loader = DictLoader(templates, whitespace='oneline')
-        self.assertEqual(loader.load("foo.html").generate(), b" foo ")
-        self.assertEqual(loader.load("bar.txt").generate(), b" bar ")
-
-    def test_whitespace_directive(self):
-        loader = DictLoader({
-            "foo.html": """\
-{% whitespace oneline %}
-    {% for i in range(3) %}
-        {{ i }}
-    {% end %}
-{% whitespace all %}
-    pre\tformatted
-"""})
-        self.assertEqual(loader.load("foo.html").generate(),
-                         b"  0  1  2  \n    pre\tformatted\n")
-
-
-class TemplateLoaderTest(unittest.TestCase):
-    def setUp(self):
-        self.loader = Loader(os.path.join(os.path.dirname(__file__), "templates"))
-
-    def test_utf8_in_file(self):
-        tmpl = self.loader.load("utf8.html")
-        result = tmpl.generate()
-        self.assertEqual(to_unicode(result).strip(), u"H\u00e9llo")
diff --git a/lib/tornado/test/templates/utf8.html b/lib/tornado/test/templates/utf8.html
deleted file mode 100644
index c5253dfa8647f2fdae07833f4b8db903f2454c19..0000000000000000000000000000000000000000
--- a/lib/tornado/test/templates/utf8.html
+++ /dev/null
@@ -1 +0,0 @@
-Héllo
diff --git a/lib/tornado/test/test.crt b/lib/tornado/test/test.crt
deleted file mode 100644
index 25538c88ab59d51f6dae5969853e02174cefa753..0000000000000000000000000000000000000000
--- a/lib/tornado/test/test.crt
+++ /dev/null
@@ -1,15 +0,0 @@
------BEGIN CERTIFICATE-----
-MIICSDCCAbGgAwIBAgIJAN1oTowzMbkzMA0GCSqGSIb3DQEBBQUAMD0xCzAJBgNV
-BAYTAlVTMRMwEQYDVQQIDApDYWxpZm9ybmlhMRkwFwYDVQQKDBBUb3JuYWRvIFdl
-YiBUZXN0MB4XDTEwMDgyNTE4MjQ0NFoXDTIwMDgyMjE4MjQ0NFowPTELMAkGA1UE
-BhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExGTAXBgNVBAoMEFRvcm5hZG8gV2Vi
-IFRlc3QwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBALirW3mX4jbdFse2aZwW
-zszCJ1IsRDrzALpbvMYLLbIZqo+Z8v5aERKTRQpXFqGaZyY+tdwYy7X7YXcLtKqv
-jnw/MSeIaqkw5pROKz5aR0nkPLvcTmhJVLVPCLc8dFnIlu8aC9TrDhr90P+PzU39
-UG7zLweA9zXKBuW3Tjo5dMP3AgMBAAGjUDBOMB0GA1UdDgQWBBRhJjMBYrzddCFr
-/0vvPyHMeqgo0TAfBgNVHSMEGDAWgBRhJjMBYrzddCFr/0vvPyHMeqgo0TAMBgNV
-HRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA4GBAGP6GaxSfb21bikcqaK3ZKCC1sRJ
-tiCuvJZbBUFUCAzl05dYUfJZim/oWK+GqyUkUB8ciYivUNnn9OtS7DnlTgT2ws2e
-lNgn5cuFXoAGcHXzVlHG3yoywYBf3y0Dn20uzrlLXUWJAzoSLOt2LTaXvwlgm7hF
-W1q8SQ6UBshRw2X0
------END CERTIFICATE-----
diff --git a/lib/tornado/test/test.key b/lib/tornado/test/test.key
deleted file mode 100644
index 577d518e502f491ee935162db952dbe9084bc7ad..0000000000000000000000000000000000000000
--- a/lib/tornado/test/test.key
+++ /dev/null
@@ -1,16 +0,0 @@
------BEGIN PRIVATE KEY-----
-MIICeAIBADANBgkqhkiG9w0BAQEFAASCAmIwggJeAgEAAoGBALirW3mX4jbdFse2
-aZwWzszCJ1IsRDrzALpbvMYLLbIZqo+Z8v5aERKTRQpXFqGaZyY+tdwYy7X7YXcL
-tKqvjnw/MSeIaqkw5pROKz5aR0nkPLvcTmhJVLVPCLc8dFnIlu8aC9TrDhr90P+P
-zU39UG7zLweA9zXKBuW3Tjo5dMP3AgMBAAECgYEAiygNaWYrf95AcUQi9w00zpUr
-nj9fNvCwxr2kVbRMvd2balS/CC4EmXPCXdVcZ3B7dBVjYzSIJV0Fh/iZLtnVysD9
-fcNMZ+Cz71b/T0ItsNYOsJk0qUVyP52uqsqkNppIPJsD19C+ZeMLZj6iEiylZyl8
-2U16c/kVIjER63mUEGkCQQDayQOTGPJrKHqPAkUqzeJkfvHH2yCf+cySU+w6ezyr
-j9yxcq8aZoLusCebDVT+kz7RqnD5JePFvB38cMuepYBLAkEA2BTFdZx30f4moPNv
-JlXlPNJMUTUzsXG7n4vNc+18O5ous0NGQII8jZWrIcTrP8wiP9fF3JwUsKrJhcBn
-xRs3hQJBAIDUgz1YIE+HW3vgi1gkOh6RPdBAsVpiXtr/fggFz3j60qrO7FswaAMj
-SX8c/6KUlBYkNjgP3qruFf4zcUNvEzcCQQCaioCPFVE9ByBpjLG6IUTKsz2R9xL5
-nfYqrbpLZ1aq6iLsYvkjugHE4X57sHLwNfdo4dHJbnf9wqhO2MVe25BhAkBdKYpY
-7OKc/2mmMbJDhVBgoixz/muN/5VjdfbvVY48naZkJF1p1tmogqPC5F1jPCS4rM+S
-FfPJIHRNEn2oktw5
------END PRIVATE KEY-----
diff --git a/lib/tornado/test/testing_test.py b/lib/tornado/test/testing_test.py
deleted file mode 100644
index b3d6d8c5bb7db45930ef88fdefe67f1765c1bac8..0000000000000000000000000000000000000000
--- a/lib/tornado/test/testing_test.py
+++ /dev/null
@@ -1,278 +0,0 @@
-#!/usr/bin/env python
-
-from __future__ import absolute_import, division, print_function
-
-from tornado import gen, ioloop
-from tornado.log import app_log
-from tornado.testing import AsyncTestCase, gen_test, ExpectLog
-from tornado.test.util import unittest, skipBefore35, exec_test
-import contextlib
-import os
-import traceback
-import warnings
-
-
-@contextlib.contextmanager
-def set_environ(name, value):
-    old_value = os.environ.get(name)
-    os.environ[name] = value
-
-    try:
-        yield
-    finally:
-        if old_value is None:
-            del os.environ[name]
-        else:
-            os.environ[name] = old_value
-
-
-class AsyncTestCaseTest(AsyncTestCase):
-    def test_exception_in_callback(self):
-        self.io_loop.add_callback(lambda: 1 / 0)
-        try:
-            self.wait()
-            self.fail("did not get expected exception")
-        except ZeroDivisionError:
-            pass
-
-    def test_wait_timeout(self):
-        time = self.io_loop.time
-
-        # Accept default 5-second timeout, no error
-        self.io_loop.add_timeout(time() + 0.01, self.stop)
-        self.wait()
-
-        # Timeout passed to wait()
-        self.io_loop.add_timeout(time() + 1, self.stop)
-        with self.assertRaises(self.failureException):
-            self.wait(timeout=0.01)
-
-        # Timeout set with environment variable
-        self.io_loop.add_timeout(time() + 1, self.stop)
-        with set_environ('ASYNC_TEST_TIMEOUT', '0.01'):
-            with self.assertRaises(self.failureException):
-                self.wait()
-
-    def test_subsequent_wait_calls(self):
-        """
-        This test makes sure that a second call to wait()
-        clears the first timeout.
-        """
-        self.io_loop.add_timeout(self.io_loop.time() + 0.00, self.stop)
-        self.wait(timeout=0.02)
-        self.io_loop.add_timeout(self.io_loop.time() + 0.03, self.stop)
-        self.wait(timeout=0.15)
-
-    def test_multiple_errors(self):
-        def fail(message):
-            raise Exception(message)
-        self.io_loop.add_callback(lambda: fail("error one"))
-        self.io_loop.add_callback(lambda: fail("error two"))
-        # The first error gets raised; the second gets logged.
-        with ExpectLog(app_log, "multiple unhandled exceptions"):
-            with self.assertRaises(Exception) as cm:
-                self.wait()
-        self.assertEqual(str(cm.exception), "error one")
-
-
-class AsyncTestCaseWrapperTest(unittest.TestCase):
-    def test_undecorated_generator(self):
-        class Test(AsyncTestCase):
-            def test_gen(self):
-                yield
-        test = Test('test_gen')
-        result = unittest.TestResult()
-        test.run(result)
-        self.assertEqual(len(result.errors), 1)
-        self.assertIn("should be decorated", result.errors[0][1])
-
-    @skipBefore35
-    def test_undecorated_coroutine(self):
-        namespace = exec_test(globals(), locals(), """
-        class Test(AsyncTestCase):
-            async def test_coro(self):
-                pass
-        """)
-
-        test_class = namespace['Test']
-        test = test_class('test_coro')
-        result = unittest.TestResult()
-
-        # Silence "RuntimeWarning: coroutine 'test_coro' was never awaited".
-        with warnings.catch_warnings():
-            warnings.simplefilter('ignore')
-            test.run(result)
-
-        self.assertEqual(len(result.errors), 1)
-        self.assertIn("should be decorated", result.errors[0][1])
-
-    def test_undecorated_generator_with_skip(self):
-        class Test(AsyncTestCase):
-            @unittest.skip("don't run this")
-            def test_gen(self):
-                yield
-        test = Test('test_gen')
-        result = unittest.TestResult()
-        test.run(result)
-        self.assertEqual(len(result.errors), 0)
-        self.assertEqual(len(result.skipped), 1)
-
-    def test_other_return(self):
-        class Test(AsyncTestCase):
-            def test_other_return(self):
-                return 42
-        test = Test('test_other_return')
-        result = unittest.TestResult()
-        test.run(result)
-        self.assertEqual(len(result.errors), 1)
-        self.assertIn("Return value from test method ignored", result.errors[0][1])
-
-
-class SetUpTearDownTest(unittest.TestCase):
-    def test_set_up_tear_down(self):
-        """
-        This test makes sure that AsyncTestCase calls super methods for
-        setUp and tearDown.
-
-        InheritBoth is a subclass of both AsyncTestCase and
-        SetUpTearDown, with the ordering so that the super of
-        AsyncTestCase will be SetUpTearDown.
-        """
-        events = []
-        result = unittest.TestResult()
-
-        class SetUpTearDown(unittest.TestCase):
-            def setUp(self):
-                events.append('setUp')
-
-            def tearDown(self):
-                events.append('tearDown')
-
-        class InheritBoth(AsyncTestCase, SetUpTearDown):
-            def test(self):
-                events.append('test')
-
-        InheritBoth('test').run(result)
-        expected = ['setUp', 'test', 'tearDown']
-        self.assertEqual(expected, events)
-
-
-class GenTest(AsyncTestCase):
-    def setUp(self):
-        super(GenTest, self).setUp()
-        self.finished = False
-
-    def tearDown(self):
-        self.assertTrue(self.finished)
-        super(GenTest, self).tearDown()
-
-    @gen_test
-    def test_sync(self):
-        self.finished = True
-
-    @gen_test
-    def test_async(self):
-        yield gen.Task(self.io_loop.add_callback)
-        self.finished = True
-
-    def test_timeout(self):
-        # Set a short timeout and exceed it.
-        @gen_test(timeout=0.1)
-        def test(self):
-            yield gen.Task(self.io_loop.add_timeout, self.io_loop.time() + 1)
-
-        # This can't use assertRaises because we need to inspect the
-        # exc_info triple (and not just the exception object)
-        try:
-            test(self)
-            self.fail("did not get expected exception")
-        except ioloop.TimeoutError:
-            # The stack trace should blame the add_timeout line, not just
-            # unrelated IOLoop/testing internals.
-            self.assertIn(
-                "gen.Task(self.io_loop.add_timeout, self.io_loop.time() + 1)",
-                traceback.format_exc())
-
-        self.finished = True
-
-    def test_no_timeout(self):
-        # A test that does not exceed its timeout should succeed.
-        @gen_test(timeout=1)
-        def test(self):
-            time = self.io_loop.time
-            yield gen.Task(self.io_loop.add_timeout, time() + 0.1)
-
-        test(self)
-        self.finished = True
-
-    def test_timeout_environment_variable(self):
-        @gen_test(timeout=0.5)
-        def test_long_timeout(self):
-            time = self.io_loop.time
-            yield gen.Task(self.io_loop.add_timeout, time() + 0.25)
-
-        # Uses provided timeout of 0.5 seconds, doesn't time out.
-        with set_environ('ASYNC_TEST_TIMEOUT', '0.1'):
-            test_long_timeout(self)
-
-        self.finished = True
-
-    def test_no_timeout_environment_variable(self):
-        @gen_test(timeout=0.01)
-        def test_short_timeout(self):
-            time = self.io_loop.time
-            yield gen.Task(self.io_loop.add_timeout, time() + 1)
-
-        # Uses environment-variable timeout of 0.1, times out.
-        with set_environ('ASYNC_TEST_TIMEOUT', '0.1'):
-            with self.assertRaises(ioloop.TimeoutError):
-                test_short_timeout(self)
-
-        self.finished = True
-
-    def test_with_method_args(self):
-        @gen_test
-        def test_with_args(self, *args):
-            self.assertEqual(args, ('test',))
-            yield gen.Task(self.io_loop.add_callback)
-
-        test_with_args(self, 'test')
-        self.finished = True
-
-    def test_with_method_kwargs(self):
-        @gen_test
-        def test_with_kwargs(self, **kwargs):
-            self.assertDictEqual(kwargs, {'test': 'test'})
-            yield gen.Task(self.io_loop.add_callback)
-
-        test_with_kwargs(self, test='test')
-        self.finished = True
-
-    @skipBefore35
-    def test_native_coroutine(self):
-        namespace = exec_test(globals(), locals(), """
-        @gen_test
-        async def test(self):
-            self.finished = True
-        """)
-
-        namespace['test'](self)
-
-    @skipBefore35
-    def test_native_coroutine_timeout(self):
-        # Set a short timeout and exceed it.
-        namespace = exec_test(globals(), locals(), """
-        @gen_test(timeout=0.1)
-        async def test(self):
-            await gen.sleep(1)
-        """)
-
-        try:
-            namespace['test'](self)
-            self.fail("did not get expected exception")
-        except ioloop.TimeoutError:
-            self.finished = True
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/lib/tornado/test/twisted_test.py b/lib/tornado/test/twisted_test.py
deleted file mode 100644
index 1604ce52f46b0b8defffe77d470db275bd52a870..0000000000000000000000000000000000000000
--- a/lib/tornado/test/twisted_test.py
+++ /dev/null
@@ -1,731 +0,0 @@
-# Author: Ovidiu Predescu
-# Date: July 2011
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""
-Unittest for the twisted-style reactor.
-"""
-
-from __future__ import absolute_import, division, print_function
-
-import logging
-import os
-import shutil
-import signal
-import sys
-import tempfile
-import threading
-import warnings
-
-from tornado.escape import utf8
-from tornado import gen
-from tornado.httpclient import AsyncHTTPClient
-from tornado.httpserver import HTTPServer
-from tornado.ioloop import IOLoop
-from tornado.platform.auto import set_close_exec
-from tornado.platform.select import SelectIOLoop
-from tornado.testing import bind_unused_port
-from tornado.test.util import unittest
-from tornado.util import import_object, PY3
-from tornado.web import RequestHandler, Application
-
-try:
-    import fcntl
-    from twisted.internet.defer import Deferred, inlineCallbacks, returnValue  # type: ignore
-    from twisted.internet.interfaces import IReadDescriptor, IWriteDescriptor  # type: ignore
-    from twisted.internet.protocol import Protocol  # type: ignore
-    from twisted.python import log  # type: ignore
-    from tornado.platform.twisted import TornadoReactor, TwistedIOLoop
-    from zope.interface import implementer  # type: ignore
-    have_twisted = True
-except ImportError:
-    have_twisted = False
-
-# The core of Twisted 12.3.0 is available on python 3, but twisted.web is not
-# so test for it separately.
-try:
-    from twisted.web.client import Agent, readBody  # type: ignore
-    from twisted.web.resource import Resource  # type: ignore
-    from twisted.web.server import Site  # type: ignore
-    # As of Twisted 15.0.0, twisted.web is present but fails our
-    # tests due to internal str/bytes errors.
-    have_twisted_web = sys.version_info < (3,)
-except ImportError:
-    have_twisted_web = False
-
-if PY3:
-    import _thread as thread
-else:
-    import thread
-
-
-skipIfNoTwisted = unittest.skipUnless(have_twisted,
-                                      "twisted module not present")
-
-skipIfPy26 = unittest.skipIf(sys.version_info < (2, 7),
-                             "twisted incompatible with singledispatch in py26")
-
-
-def save_signal_handlers():
-    saved = {}
-    for sig in [signal.SIGINT, signal.SIGTERM, signal.SIGCHLD]:
-        saved[sig] = signal.getsignal(sig)
-    if "twisted" in repr(saved):
-        if not issubclass(IOLoop.configured_class(), TwistedIOLoop):
-            # when the global ioloop is twisted, we expect the signal
-            # handlers to be installed.  Otherwise, it means we're not
-            # cleaning up after twisted properly.
-            raise Exception("twisted signal handlers already installed")
-    return saved
-
-
-def restore_signal_handlers(saved):
-    for sig, handler in saved.items():
-        signal.signal(sig, handler)
-
-
-class ReactorTestCase(unittest.TestCase):
-    def setUp(self):
-        self._saved_signals = save_signal_handlers()
-        self._io_loop = IOLoop()
-        self._reactor = TornadoReactor(self._io_loop)
-
-    def tearDown(self):
-        self._io_loop.close(all_fds=True)
-        restore_signal_handlers(self._saved_signals)
-
-
-@skipIfNoTwisted
-class ReactorWhenRunningTest(ReactorTestCase):
-    def test_whenRunning(self):
-        self._whenRunningCalled = False
-        self._anotherWhenRunningCalled = False
-        self._reactor.callWhenRunning(self.whenRunningCallback)
-        self._reactor.run()
-        self.assertTrue(self._whenRunningCalled)
-        self.assertTrue(self._anotherWhenRunningCalled)
-
-    def whenRunningCallback(self):
-        self._whenRunningCalled = True
-        self._reactor.callWhenRunning(self.anotherWhenRunningCallback)
-        self._reactor.stop()
-
-    def anotherWhenRunningCallback(self):
-        self._anotherWhenRunningCalled = True
-
-
-@skipIfNoTwisted
-class ReactorCallLaterTest(ReactorTestCase):
-    def test_callLater(self):
-        self._laterCalled = False
-        self._now = self._reactor.seconds()
-        self._timeout = 0.001
-        dc = self._reactor.callLater(self._timeout, self.callLaterCallback)
-        self.assertEqual(self._reactor.getDelayedCalls(), [dc])
-        self._reactor.run()
-        self.assertTrue(self._laterCalled)
-        self.assertTrue(self._called - self._now > self._timeout)
-        self.assertEqual(self._reactor.getDelayedCalls(), [])
-
-    def callLaterCallback(self):
-        self._laterCalled = True
-        self._called = self._reactor.seconds()
-        self._reactor.stop()
-
-
-@skipIfNoTwisted
-class ReactorTwoCallLaterTest(ReactorTestCase):
-    def test_callLater(self):
-        self._later1Called = False
-        self._later2Called = False
-        self._now = self._reactor.seconds()
-        self._timeout1 = 0.0005
-        dc1 = self._reactor.callLater(self._timeout1, self.callLaterCallback1)
-        self._timeout2 = 0.001
-        dc2 = self._reactor.callLater(self._timeout2, self.callLaterCallback2)
-        self.assertTrue(self._reactor.getDelayedCalls() == [dc1, dc2] or
-                        self._reactor.getDelayedCalls() == [dc2, dc1])
-        self._reactor.run()
-        self.assertTrue(self._later1Called)
-        self.assertTrue(self._later2Called)
-        self.assertTrue(self._called1 - self._now > self._timeout1)
-        self.assertTrue(self._called2 - self._now > self._timeout2)
-        self.assertEqual(self._reactor.getDelayedCalls(), [])
-
-    def callLaterCallback1(self):
-        self._later1Called = True
-        self._called1 = self._reactor.seconds()
-
-    def callLaterCallback2(self):
-        self._later2Called = True
-        self._called2 = self._reactor.seconds()
-        self._reactor.stop()
-
-
-@skipIfNoTwisted
-class ReactorCallFromThreadTest(ReactorTestCase):
-    def setUp(self):
-        super(ReactorCallFromThreadTest, self).setUp()
-        self._mainThread = thread.get_ident()
-
-    def tearDown(self):
-        self._thread.join()
-        super(ReactorCallFromThreadTest, self).tearDown()
-
-    def _newThreadRun(self):
-        self.assertNotEqual(self._mainThread, thread.get_ident())
-        if hasattr(self._thread, 'ident'):  # new in python 2.6
-            self.assertEqual(self._thread.ident, thread.get_ident())
-        self._reactor.callFromThread(self._fnCalledFromThread)
-
-    def _fnCalledFromThread(self):
-        self.assertEqual(self._mainThread, thread.get_ident())
-        self._reactor.stop()
-
-    def _whenRunningCallback(self):
-        self._thread = threading.Thread(target=self._newThreadRun)
-        self._thread.start()
-
-    def testCallFromThread(self):
-        self._reactor.callWhenRunning(self._whenRunningCallback)
-        self._reactor.run()
-
-
-@skipIfNoTwisted
-class ReactorCallInThread(ReactorTestCase):
-    def setUp(self):
-        super(ReactorCallInThread, self).setUp()
-        self._mainThread = thread.get_ident()
-
-    def _fnCalledInThread(self, *args, **kwargs):
-        self.assertNotEqual(thread.get_ident(), self._mainThread)
-        self._reactor.callFromThread(lambda: self._reactor.stop())
-
-    def _whenRunningCallback(self):
-        self._reactor.callInThread(self._fnCalledInThread)
-
-    def testCallInThread(self):
-        self._reactor.callWhenRunning(self._whenRunningCallback)
-        self._reactor.run()
-
-
-if have_twisted:
-    @implementer(IReadDescriptor)
-    class Reader(object):
-        def __init__(self, fd, callback):
-            self._fd = fd
-            self._callback = callback
-
-        def logPrefix(self):
-            return "Reader"
-
-        def close(self):
-            self._fd.close()
-
-        def fileno(self):
-            return self._fd.fileno()
-
-        def readConnectionLost(self, reason):
-            self.close()
-
-        def connectionLost(self, reason):
-            self.close()
-
-        def doRead(self):
-            self._callback(self._fd)
-
-    @implementer(IWriteDescriptor)
-    class Writer(object):
-        def __init__(self, fd, callback):
-            self._fd = fd
-            self._callback = callback
-
-        def logPrefix(self):
-            return "Writer"
-
-        def close(self):
-            self._fd.close()
-
-        def fileno(self):
-            return self._fd.fileno()
-
-        def connectionLost(self, reason):
-            self.close()
-
-        def doWrite(self):
-            self._callback(self._fd)
-
-
-@skipIfNoTwisted
-class ReactorReaderWriterTest(ReactorTestCase):
-    def _set_nonblocking(self, fd):
-        flags = fcntl.fcntl(fd, fcntl.F_GETFL)
-        fcntl.fcntl(fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
-
-    def setUp(self):
-        super(ReactorReaderWriterTest, self).setUp()
-        r, w = os.pipe()
-        self._set_nonblocking(r)
-        self._set_nonblocking(w)
-        set_close_exec(r)
-        set_close_exec(w)
-        self._p1 = os.fdopen(r, "rb", 0)
-        self._p2 = os.fdopen(w, "wb", 0)
-
-    def tearDown(self):
-        super(ReactorReaderWriterTest, self).tearDown()
-        self._p1.close()
-        self._p2.close()
-
-    def _testReadWrite(self):
-        """
-        In this test the writer writes an 'x' to its fd. The reader
-        reads it, check the value and ends the test.
-        """
-        self.shouldWrite = True
-
-        def checkReadInput(fd):
-            self.assertEquals(fd.read(1), b'x')
-            self._reactor.stop()
-
-        def writeOnce(fd):
-            if self.shouldWrite:
-                self.shouldWrite = False
-                fd.write(b'x')
-        self._reader = Reader(self._p1, checkReadInput)
-        self._writer = Writer(self._p2, writeOnce)
-
-        self._reactor.addWriter(self._writer)
-
-        # Test that adding the reader twice adds it only once to
-        # IOLoop.
-        self._reactor.addReader(self._reader)
-        self._reactor.addReader(self._reader)
-
-    def testReadWrite(self):
-        self._reactor.callWhenRunning(self._testReadWrite)
-        self._reactor.run()
-
-    def _testNoWriter(self):
-        """
-        In this test we have no writer. Make sure the reader doesn't
-        read anything.
-        """
-        def checkReadInput(fd):
-            self.fail("Must not be called.")
-
-        def stopTest():
-            # Close the writer here since the IOLoop doesn't know
-            # about it.
-            self._writer.close()
-            self._reactor.stop()
-        self._reader = Reader(self._p1, checkReadInput)
-
-        # We create a writer, but it should never be invoked.
-        self._writer = Writer(self._p2, lambda fd: fd.write('x'))
-
-        # Test that adding and removing the writer leaves us with no writer.
-        self._reactor.addWriter(self._writer)
-        self._reactor.removeWriter(self._writer)
-
-        # Test that adding and removing the reader doesn't cause
-        # unintended effects.
-        self._reactor.addReader(self._reader)
-
-        # Wake up after a moment and stop the test
-        self._reactor.callLater(0.001, stopTest)
-
-    def testNoWriter(self):
-        self._reactor.callWhenRunning(self._testNoWriter)
-        self._reactor.run()
-
-# Test various combinations of twisted and tornado http servers,
-# http clients, and event loop interfaces.
-
-
-@skipIfNoTwisted
-@unittest.skipIf(not have_twisted_web, 'twisted web not present')
-class CompatibilityTests(unittest.TestCase):
-    def setUp(self):
-        self.saved_signals = save_signal_handlers()
-        self.io_loop = IOLoop()
-        self.io_loop.make_current()
-        self.reactor = TornadoReactor(self.io_loop)
-
-    def tearDown(self):
-        self.reactor.disconnectAll()
-        self.io_loop.clear_current()
-        self.io_loop.close(all_fds=True)
-        restore_signal_handlers(self.saved_signals)
-
-    def start_twisted_server(self):
-        class HelloResource(Resource):
-            isLeaf = True
-
-            def render_GET(self, request):
-                return "Hello from twisted!"
-        site = Site(HelloResource())
-        port = self.reactor.listenTCP(0, site, interface='127.0.0.1')
-        self.twisted_port = port.getHost().port
-
-    def start_tornado_server(self):
-        class HelloHandler(RequestHandler):
-            def get(self):
-                self.write("Hello from tornado!")
-        app = Application([('/', HelloHandler)],
-                          log_function=lambda x: None)
-        server = HTTPServer(app, io_loop=self.io_loop)
-        sock, self.tornado_port = bind_unused_port()
-        server.add_sockets([sock])
-
-    def run_ioloop(self):
-        self.stop_loop = self.io_loop.stop
-        self.io_loop.start()
-        self.reactor.fireSystemEvent('shutdown')
-
-    def run_reactor(self):
-        self.stop_loop = self.reactor.stop
-        self.stop = self.reactor.stop
-        self.reactor.run()
-
-    def tornado_fetch(self, url, runner):
-        responses = []
-        client = AsyncHTTPClient(self.io_loop)
-
-        def callback(response):
-            responses.append(response)
-            self.stop_loop()
-        client.fetch(url, callback=callback)
-        runner()
-        self.assertEqual(len(responses), 1)
-        responses[0].rethrow()
-        return responses[0]
-
-    def twisted_fetch(self, url, runner):
-        # http://twistedmatrix.com/documents/current/web/howto/client.html
-        chunks = []
-        client = Agent(self.reactor)
-        d = client.request(b'GET', utf8(url))
-
-        class Accumulator(Protocol):
-            def __init__(self, finished):
-                self.finished = finished
-
-            def dataReceived(self, data):
-                chunks.append(data)
-
-            def connectionLost(self, reason):
-                self.finished.callback(None)
-
-        def callback(response):
-            finished = Deferred()
-            response.deliverBody(Accumulator(finished))
-            return finished
-        d.addCallback(callback)
-
-        def shutdown(failure):
-            if hasattr(self, 'stop_loop'):
-                self.stop_loop()
-            elif failure is not None:
-                # loop hasn't been initialized yet; try our best to
-                # get an error message out. (the runner() interaction
-                # should probably be refactored).
-                try:
-                    failure.raiseException()
-                except:
-                    logging.error('exception before starting loop', exc_info=True)
-        d.addBoth(shutdown)
-        runner()
-        self.assertTrue(chunks)
-        return ''.join(chunks)
-
-    def twisted_coroutine_fetch(self, url, runner):
-        body = [None]
-
-        @gen.coroutine
-        def f():
-            # This is simpler than the non-coroutine version, but it cheats
-            # by reading the body in one blob instead of streaming it with
-            # a Protocol.
-            client = Agent(self.reactor)
-            response = yield client.request(b'GET', utf8(url))
-            with warnings.catch_warnings():
-                # readBody has a buggy DeprecationWarning in Twisted 15.0:
-                # https://twistedmatrix.com/trac/changeset/43379
-                warnings.simplefilter('ignore', category=DeprecationWarning)
-                body[0] = yield readBody(response)
-            self.stop_loop()
-        self.io_loop.add_callback(f)
-        runner()
-        return body[0]
-
-    def testTwistedServerTornadoClientIOLoop(self):
-        self.start_twisted_server()
-        response = self.tornado_fetch(
-            'http://127.0.0.1:%d' % self.twisted_port, self.run_ioloop)
-        self.assertEqual(response.body, 'Hello from twisted!')
-
-    def testTwistedServerTornadoClientReactor(self):
-        self.start_twisted_server()
-        response = self.tornado_fetch(
-            'http://127.0.0.1:%d' % self.twisted_port, self.run_reactor)
-        self.assertEqual(response.body, 'Hello from twisted!')
-
-    def testTornadoServerTwistedClientIOLoop(self):
-        self.start_tornado_server()
-        response = self.twisted_fetch(
-            'http://127.0.0.1:%d' % self.tornado_port, self.run_ioloop)
-        self.assertEqual(response, 'Hello from tornado!')
-
-    def testTornadoServerTwistedClientReactor(self):
-        self.start_tornado_server()
-        response = self.twisted_fetch(
-            'http://127.0.0.1:%d' % self.tornado_port, self.run_reactor)
-        self.assertEqual(response, 'Hello from tornado!')
-
-    @skipIfPy26
-    def testTornadoServerTwistedCoroutineClientIOLoop(self):
-        self.start_tornado_server()
-        response = self.twisted_coroutine_fetch(
-            'http://127.0.0.1:%d' % self.tornado_port, self.run_ioloop)
-        self.assertEqual(response, 'Hello from tornado!')
-
-
-@skipIfNoTwisted
-@skipIfPy26
-class ConvertDeferredTest(unittest.TestCase):
-    def test_success(self):
-        @inlineCallbacks
-        def fn():
-            if False:
-                # inlineCallbacks doesn't work with regular functions;
-                # must have a yield even if it's unreachable.
-                yield
-            returnValue(42)
-        f = gen.convert_yielded(fn())
-        self.assertEqual(f.result(), 42)
-
-    def test_failure(self):
-        @inlineCallbacks
-        def fn():
-            if False:
-                yield
-            1 / 0
-        f = gen.convert_yielded(fn())
-        with self.assertRaises(ZeroDivisionError):
-            f.result()
-
-
-if have_twisted:
-    # Import and run as much of twisted's test suite as possible.
-    # This is unfortunately rather dependent on implementation details,
-    # but there doesn't appear to be a clean all-in-one conformance test
-    # suite for reactors.
-    #
-    # This is a list of all test suites using the ReactorBuilder
-    # available in Twisted 11.0.0 and 11.1.0 (and a blacklist of
-    # specific test methods to be disabled).
-    twisted_tests = {
-        'twisted.internet.test.test_core.ObjectModelIntegrationTest': [],
-        'twisted.internet.test.test_core.SystemEventTestsBuilder': [
-            'test_iterate',  # deliberately not supported
-            # Fails on TwistedIOLoop and AsyncIOLoop.
-            'test_runAfterCrash',
-        ],
-        'twisted.internet.test.test_fdset.ReactorFDSetTestsBuilder': [
-            "test_lostFileDescriptor",  # incompatible with epoll and kqueue
-        ],
-        'twisted.internet.test.test_process.ProcessTestsBuilder': [
-            # Only work as root.  Twisted's "skip" functionality works
-            # with py27+, but not unittest2 on py26.
-            'test_changeGID',
-            'test_changeUID',
-            # This test sometimes fails with EPIPE on a call to
-            # kqueue.control. Happens consistently for me with
-            # trollius but not asyncio or other IOLoops.
-            'test_childConnectionLost',
-        ],
-        # Process tests appear to work on OSX 10.7, but not 10.6
-        # 'twisted.internet.test.test_process.PTYProcessTestsBuilder': [
-        #    'test_systemCallUninterruptedByChildExit',
-        #    ],
-        'twisted.internet.test.test_tcp.TCPClientTestsBuilder': [
-            'test_badContext',  # ssl-related; see also SSLClientTestsMixin
-        ],
-        'twisted.internet.test.test_tcp.TCPPortTestsBuilder': [
-            # These use link-local addresses and cause firewall prompts on mac
-            'test_buildProtocolIPv6AddressScopeID',
-            'test_portGetHostOnIPv6ScopeID',
-            'test_serverGetHostOnIPv6ScopeID',
-            'test_serverGetPeerOnIPv6ScopeID',
-        ],
-        'twisted.internet.test.test_tcp.TCPConnectionTestsBuilder': [],
-        'twisted.internet.test.test_tcp.WriteSequenceTests': [],
-        'twisted.internet.test.test_tcp.AbortConnectionTestCase': [],
-        'twisted.internet.test.test_threads.ThreadTestsBuilder': [],
-        'twisted.internet.test.test_time.TimeTestsBuilder': [],
-        # Extra third-party dependencies (pyOpenSSL)
-        # 'twisted.internet.test.test_tls.SSLClientTestsMixin': [],
-        'twisted.internet.test.test_udp.UDPServerTestsBuilder': [],
-        'twisted.internet.test.test_unix.UNIXTestsBuilder': [
-            # Platform-specific.  These tests would be skipped automatically
-            # if we were running twisted's own test runner.
-            'test_connectToLinuxAbstractNamespace',
-            'test_listenOnLinuxAbstractNamespace',
-            # These tests use twisted's sendmsg.c extension and sometimes
-            # fail with what looks like uninitialized memory errors
-            # (more common on pypy than cpython, but I've seen it on both)
-            'test_sendFileDescriptor',
-            'test_sendFileDescriptorTriggersPauseProducing',
-            'test_descriptorDeliveredBeforeBytes',
-            'test_avoidLeakingFileDescriptors',
-        ],
-        'twisted.internet.test.test_unix.UNIXDatagramTestsBuilder': [
-            'test_listenOnLinuxAbstractNamespace',
-        ],
-        'twisted.internet.test.test_unix.UNIXPortTestsBuilder': [],
-    }
-    if sys.version_info >= (3,):
-        # In Twisted 15.2.0 on Python 3.4, the process tests will try to run
-        # but fail, due in part to interactions between Tornado's strict
-        # warnings-as-errors policy and Twisted's own warning handling
-        # (it was not obvious how to configure the warnings module to
-        # reconcile the two), and partly due to what looks like a packaging
-        # error (process_cli.py missing). For now, just skip it.
-        del twisted_tests['twisted.internet.test.test_process.ProcessTestsBuilder']
-    for test_name, blacklist in twisted_tests.items():
-        try:
-            test_class = import_object(test_name)
-        except (ImportError, AttributeError):
-            continue
-        for test_func in blacklist:  # type: ignore
-            if hasattr(test_class, test_func):
-                # The test_func may be defined in a mixin, so clobber
-                # it instead of delattr()
-                setattr(test_class, test_func, lambda self: None)
-
-        def make_test_subclass(test_class):
-            class TornadoTest(test_class):  # type: ignore
-                _reactors = ["tornado.platform.twisted._TestReactor"]
-
-                def setUp(self):
-                    # Twisted's tests expect to be run from a temporary
-                    # directory; they create files in their working directory
-                    # and don't always clean up after themselves.
-                    self.__curdir = os.getcwd()
-                    self.__tempdir = tempfile.mkdtemp()
-                    os.chdir(self.__tempdir)
-                    super(TornadoTest, self).setUp()  # type: ignore
-
-                def tearDown(self):
-                    super(TornadoTest, self).tearDown()  # type: ignore
-                    os.chdir(self.__curdir)
-                    shutil.rmtree(self.__tempdir)
-
-                def flushWarnings(self, *args, **kwargs):
-                    # This is a hack because Twisted and Tornado have
-                    # differing approaches to warnings in tests.
-                    # Tornado sets up a global set of warnings filters
-                    # in runtests.py, while Twisted patches the filter
-                    # list in each test. The net effect is that
-                    # Twisted's tests run with Tornado's increased
-                    # strictness (BytesWarning and ResourceWarning are
-                    # enabled) but without our filter rules to ignore those
-                    # warnings from Twisted code.
-                    filtered = []
-                    for w in super(TornadoTest, self).flushWarnings(  # type: ignore
-                            *args, **kwargs):
-                        if w['category'] in (BytesWarning, ResourceWarning):
-                            continue
-                        filtered.append(w)
-                    return filtered
-
-                def buildReactor(self):
-                    self.__saved_signals = save_signal_handlers()
-                    return test_class.buildReactor(self)
-
-                def unbuildReactor(self, reactor):
-                    test_class.unbuildReactor(self, reactor)
-                    # Clean up file descriptors (especially epoll/kqueue
-                    # objects) eagerly instead of leaving them for the
-                    # GC.  Unfortunately we can't do this in reactor.stop
-                    # since twisted expects to be able to unregister
-                    # connections in a post-shutdown hook.
-                    reactor._io_loop.close(all_fds=True)
-                    restore_signal_handlers(self.__saved_signals)
-
-            TornadoTest.__name__ = test_class.__name__
-            return TornadoTest
-        test_subclass = make_test_subclass(test_class)
-        globals().update(test_subclass.makeTestCaseClasses())
-
-    # Since we're not using twisted's test runner, it's tricky to get
-    # logging set up well.  Most of the time it's easiest to just
-    # leave it turned off, but while working on these tests you may want
-    # to uncomment one of the other lines instead.
-    log.defaultObserver.stop()
-    # import sys; log.startLogging(sys.stderr, setStdout=0)
-    # log.startLoggingWithObserver(log.PythonLoggingObserver().emit, setStdout=0)
-    # import logging; logging.getLogger('twisted').setLevel(logging.WARNING)
-
-    # Twisted recently introduced a new logger; disable that one too.
-    try:
-        from twisted.logger import globalLogBeginner  # type: ignore
-    except ImportError:
-        pass
-    else:
-        globalLogBeginner.beginLoggingTo([], redirectStandardIO=False)
-
-if have_twisted:
-    class LayeredTwistedIOLoop(TwistedIOLoop):
-        """Layers a TwistedIOLoop on top of a TornadoReactor on a SelectIOLoop.
-
-        This is of course silly, but is useful for testing purposes to make
-        sure we're implementing both sides of the various interfaces
-        correctly.  In some tests another TornadoReactor is layered on top
-        of the whole stack.
-        """
-        def initialize(self, **kwargs):
-            # When configured to use LayeredTwistedIOLoop we can't easily
-            # get the next-best IOLoop implementation, so use the lowest common
-            # denominator.
-            self.real_io_loop = SelectIOLoop(make_current=False)  # type: ignore
-            reactor = TornadoReactor(io_loop=self.real_io_loop)
-            super(LayeredTwistedIOLoop, self).initialize(reactor=reactor, **kwargs)
-            self.add_callback(self.make_current)
-
-        def close(self, all_fds=False):
-            super(LayeredTwistedIOLoop, self).close(all_fds=all_fds)
-            # HACK: This is the same thing that test_class.unbuildReactor does.
-            for reader in self.reactor._internalReaders:
-                self.reactor.removeReader(reader)
-                reader.connectionLost(None)
-            self.real_io_loop.close(all_fds=all_fds)
-
-        def stop(self):
-            # One of twisted's tests fails if I don't delay crash()
-            # until the reactor has started, but if I move this to
-            # TwistedIOLoop then the tests fail when I'm *not* running
-            # tornado-on-twisted-on-tornado.  I'm clearly missing something
-            # about the startup/crash semantics, but since stop and crash
-            # are really only used in tests it doesn't really matter.
-            def f():
-                self.reactor.crash()
-                # Become current again on restart. This is needed to
-                # override real_io_loop's claim to being the current loop.
-                self.add_callback(self.make_current)
-            self.reactor.callWhenRunning(f)
-
-if __name__ == "__main__":
-    unittest.main()
diff --git a/lib/tornado/test/util.py b/lib/tornado/test/util.py
deleted file mode 100644
index 6c032da63f85b28a7298ddb8015b8c2e851a1d8b..0000000000000000000000000000000000000000
--- a/lib/tornado/test/util.py
+++ /dev/null
@@ -1,96 +0,0 @@
-from __future__ import absolute_import, division, print_function
-
-import os
-import platform
-import socket
-import sys
-import textwrap
-
-from tornado.testing import bind_unused_port
-
-# Encapsulate the choice of unittest or unittest2 here.
-# To be used as 'from tornado.test.util import unittest'.
-if sys.version_info < (2, 7):
-    # In py26, we must always use unittest2.
-    import unittest2 as unittest  # type: ignore
-else:
-    # Otherwise, use whichever version of unittest was imported in
-    # tornado.testing.
-    from tornado.testing import unittest
-
-skipIfNonUnix = unittest.skipIf(os.name != 'posix' or sys.platform == 'cygwin',
-                                "non-unix platform")
-
-# travis-ci.org runs our tests in an overworked virtual machine, which makes
-# timing-related tests unreliable.
-skipOnTravis = unittest.skipIf('TRAVIS' in os.environ,
-                               'timing tests unreliable on travis')
-
-skipOnAppEngine = unittest.skipIf('APPENGINE_RUNTIME' in os.environ,
-                                  'not available on Google App Engine')
-
-# Set the environment variable NO_NETWORK=1 to disable any tests that
-# depend on an external network.
-skipIfNoNetwork = unittest.skipIf('NO_NETWORK' in os.environ,
-                                  'network access disabled')
-
-skipIfNoIPv6 = unittest.skipIf(not socket.has_ipv6, 'ipv6 support not present')
-
-
-skipBefore33 = unittest.skipIf(sys.version_info < (3, 3), 'PEP 380 (yield from) not available')
-skipBefore35 = unittest.skipIf(sys.version_info < (3, 5), 'PEP 492 (async/await) not available')
-skipNotCPython = unittest.skipIf(platform.python_implementation() != 'CPython',
-                                 'Not CPython implementation')
-
-
-def refusing_port():
-    """Returns a local port number that will refuse all connections.
-
-    Return value is (cleanup_func, port); the cleanup function
-    must be called to free the port to be reused.
-    """
-    # On travis-ci, port numbers are reassigned frequently. To avoid
-    # collisions with other tests, we use an open client-side socket's
-    # ephemeral port number to ensure that nothing can listen on that
-    # port.
-    server_socket, port = bind_unused_port()
-    server_socket.setblocking(1)
-    client_socket = socket.socket()
-    client_socket.connect(("127.0.0.1", port))
-    conn, client_addr = server_socket.accept()
-    conn.close()
-    server_socket.close()
-    return (client_socket.close, client_addr[1])
-
-
-def exec_test(caller_globals, caller_locals, s):
-    """Execute ``s`` in a given context and return the result namespace.
-
-    Used to define functions for tests in particular python
-    versions that would be syntax errors in older versions.
-    """
-    # Flatten the real global and local namespace into our fake
-    # globals: it's all global from the perspective of code defined
-    # in s.
-    global_namespace = dict(caller_globals, **caller_locals)  # type: ignore
-    local_namespace = {}
-    exec(textwrap.dedent(s), global_namespace, local_namespace)
-    return local_namespace
-
-
-def is_coverage_running():
-    """Return whether coverage is currently running.
-    """
-    if 'coverage' not in sys.modules:
-        return False
-    tracer = sys.gettrace()
-    if tracer is None:
-        return False
-    try:
-        mod = tracer.__module__
-    except AttributeError:
-        try:
-            mod = tracer.__class__.__module__
-        except AttributeError:
-            return False
-    return mod.startswith('coverage')
diff --git a/lib/tornado/test/util_test.py b/lib/tornado/test/util_test.py
deleted file mode 100644
index 459cb9c327164f20ad7e8b81436fc7b984aefe32..0000000000000000000000000000000000000000
--- a/lib/tornado/test/util_test.py
+++ /dev/null
@@ -1,227 +0,0 @@
-# coding: utf-8
-from __future__ import absolute_import, division, print_function
-import re
-import sys
-import datetime
-
-import tornado.escape
-from tornado.escape import utf8
-from tornado.util import raise_exc_info, Configurable, exec_in, ArgReplacer, timedelta_to_seconds, import_object, re_unescape, is_finalizing, PY3
-from tornado.test.util import unittest
-
-if PY3:
-    from io import StringIO
-else:
-    from cStringIO import StringIO
-
-
-class RaiseExcInfoTest(unittest.TestCase):
-    def test_two_arg_exception(self):
-        # This test would fail on python 3 if raise_exc_info were simply
-        # a three-argument raise statement, because TwoArgException
-        # doesn't have a "copy constructor"
-        class TwoArgException(Exception):
-            def __init__(self, a, b):
-                super(TwoArgException, self).__init__()
-                self.a, self.b = a, b
-
-        try:
-            raise TwoArgException(1, 2)
-        except TwoArgException:
-            exc_info = sys.exc_info()
-        try:
-            raise_exc_info(exc_info)
-            self.fail("didn't get expected exception")
-        except TwoArgException as e:
-            self.assertIs(e, exc_info[1])
-
-
-class TestConfigurable(Configurable):
-    @classmethod
-    def configurable_base(cls):
-        return TestConfigurable
-
-    @classmethod
-    def configurable_default(cls):
-        return TestConfig1
-
-
-class TestConfig1(TestConfigurable):
-    def initialize(self, pos_arg=None, a=None):
-        self.a = a
-        self.pos_arg = pos_arg
-
-
-class TestConfig2(TestConfigurable):
-    def initialize(self, pos_arg=None, b=None):
-        self.b = b
-        self.pos_arg = pos_arg
-
-
-class ConfigurableTest(unittest.TestCase):
-    def setUp(self):
-        self.saved = TestConfigurable._save_configuration()
-
-    def tearDown(self):
-        TestConfigurable._restore_configuration(self.saved)
-
-    def checkSubclasses(self):
-        # no matter how the class is configured, it should always be
-        # possible to instantiate the subclasses directly
-        self.assertIsInstance(TestConfig1(), TestConfig1)
-        self.assertIsInstance(TestConfig2(), TestConfig2)
-
-        obj = TestConfig1(a=1)
-        self.assertEqual(obj.a, 1)
-        obj = TestConfig2(b=2)
-        self.assertEqual(obj.b, 2)
-
-    def test_default(self):
-        obj = TestConfigurable()
-        self.assertIsInstance(obj, TestConfig1)
-        self.assertIs(obj.a, None)
-
-        obj = TestConfigurable(a=1)
-        self.assertIsInstance(obj, TestConfig1)
-        self.assertEqual(obj.a, 1)
-
-        self.checkSubclasses()
-
-    def test_config_class(self):
-        TestConfigurable.configure(TestConfig2)
-        obj = TestConfigurable()
-        self.assertIsInstance(obj, TestConfig2)
-        self.assertIs(obj.b, None)
-
-        obj = TestConfigurable(b=2)
-        self.assertIsInstance(obj, TestConfig2)
-        self.assertEqual(obj.b, 2)
-
-        self.checkSubclasses()
-
-    def test_config_args(self):
-        TestConfigurable.configure(None, a=3)
-        obj = TestConfigurable()
-        self.assertIsInstance(obj, TestConfig1)
-        self.assertEqual(obj.a, 3)
-
-        obj = TestConfigurable(42, a=4)
-        self.assertIsInstance(obj, TestConfig1)
-        self.assertEqual(obj.a, 4)
-        self.assertEqual(obj.pos_arg, 42)
-
-        self.checkSubclasses()
-        # args bound in configure don't apply when using the subclass directly
-        obj = TestConfig1()
-        self.assertIs(obj.a, None)
-
-    def test_config_class_args(self):
-        TestConfigurable.configure(TestConfig2, b=5)
-        obj = TestConfigurable()
-        self.assertIsInstance(obj, TestConfig2)
-        self.assertEqual(obj.b, 5)
-
-        obj = TestConfigurable(42, b=6)
-        self.assertIsInstance(obj, TestConfig2)
-        self.assertEqual(obj.b, 6)
-        self.assertEqual(obj.pos_arg, 42)
-
-        self.checkSubclasses()
-        # args bound in configure don't apply when using the subclass directly
-        obj = TestConfig2()
-        self.assertIs(obj.b, None)
-
-
-class UnicodeLiteralTest(unittest.TestCase):
-    def test_unicode_escapes(self):
-        self.assertEqual(utf8(u'\u00e9'), b'\xc3\xa9')
-
-
-class ExecInTest(unittest.TestCase):
-    # This test is python 2 only because there are no new future imports
-    # defined in python 3 yet.
-    @unittest.skipIf(sys.version_info >= print_function.getMandatoryRelease(),
-                     'no testable future imports')
-    def test_no_inherit_future(self):
-        # This file has from __future__ import print_function...
-        f = StringIO()
-        print('hello', file=f)
-        # ...but the template doesn't
-        exec_in('print >> f, "world"', dict(f=f))
-        self.assertEqual(f.getvalue(), 'hello\nworld\n')
-
-
-class ArgReplacerTest(unittest.TestCase):
-    def setUp(self):
-        def function(x, y, callback=None, z=None):
-            pass
-        self.replacer = ArgReplacer(function, 'callback')
-
-    def test_omitted(self):
-        args = (1, 2)
-        kwargs = dict()
-        self.assertIs(self.replacer.get_old_value(args, kwargs), None)
-        self.assertEqual(self.replacer.replace('new', args, kwargs),
-                         (None, (1, 2), dict(callback='new')))
-
-    def test_position(self):
-        args = (1, 2, 'old', 3)
-        kwargs = dict()
-        self.assertEqual(self.replacer.get_old_value(args, kwargs), 'old')
-        self.assertEqual(self.replacer.replace('new', args, kwargs),
-                         ('old', [1, 2, 'new', 3], dict()))
-
-    def test_keyword(self):
-        args = (1,)
-        kwargs = dict(y=2, callback='old', z=3)
-        self.assertEqual(self.replacer.get_old_value(args, kwargs), 'old')
-        self.assertEqual(self.replacer.replace('new', args, kwargs),
-                         ('old', (1,), dict(y=2, callback='new', z=3)))
-
-
-class TimedeltaToSecondsTest(unittest.TestCase):
-    def test_timedelta_to_seconds(self):
-        time_delta = datetime.timedelta(hours=1)
-        self.assertEqual(timedelta_to_seconds(time_delta), 3600.0)
-
-
-class ImportObjectTest(unittest.TestCase):
-    def test_import_member(self):
-        self.assertIs(import_object('tornado.escape.utf8'), utf8)
-
-    def test_import_member_unicode(self):
-        self.assertIs(import_object(u'tornado.escape.utf8'), utf8)
-
-    def test_import_module(self):
-        self.assertIs(import_object('tornado.escape'), tornado.escape)
-
-    def test_import_module_unicode(self):
-        # The internal implementation of __import__ differs depending on
-        # whether the thing being imported is a module or not.
-        # This variant requires a byte string in python 2.
-        self.assertIs(import_object(u'tornado.escape'), tornado.escape)
-
-
-class ReUnescapeTest(unittest.TestCase):
-    def test_re_unescape(self):
-        test_strings = (
-            '/favicon.ico',
-            'index.html',
-            'Hello, World!',
-            '!$@#%;',
-        )
-        for string in test_strings:
-            self.assertEqual(string, re_unescape(re.escape(string)))
-
-    def test_re_unescape_raises_error_on_invalid_input(self):
-        with self.assertRaises(ValueError):
-            re_unescape('\\d')
-        with self.assertRaises(ValueError):
-            re_unescape('\\b')
-        with self.assertRaises(ValueError):
-            re_unescape('\\Z')
-
-
-class IsFinalizingTest(unittest.TestCase):
-    def test_basic(self):
-        self.assertFalse(is_finalizing())
diff --git a/lib/tornado/test/web_test.py b/lib/tornado/test/web_test.py
deleted file mode 100644
index d79ea52c1be27111e387f1a23bbd8c5954ae2258..0000000000000000000000000000000000000000
--- a/lib/tornado/test/web_test.py
+++ /dev/null
@@ -1,2889 +0,0 @@
-from __future__ import absolute_import, division, print_function
-from tornado.concurrent import Future
-from tornado import gen
-from tornado.escape import json_decode, utf8, to_unicode, recursive_unicode, native_str, to_basestring
-from tornado.httputil import format_timestamp
-from tornado.ioloop import IOLoop
-from tornado.iostream import IOStream
-from tornado import locale
-from tornado.log import app_log, gen_log
-from tornado.simple_httpclient import SimpleAsyncHTTPClient
-from tornado.template import DictLoader
-from tornado.testing import AsyncHTTPTestCase, AsyncTestCase, ExpectLog, gen_test
-from tornado.test.util import unittest, skipBefore35, exec_test
-from tornado.util import ObjectDict, unicode_type, timedelta_to_seconds, PY3
-from tornado.web import RequestHandler, authenticated, Application, asynchronous, url, HTTPError, StaticFileHandler, _create_signature_v1, create_signed_value, decode_signed_value, ErrorHandler, UIModule, MissingArgumentError, stream_request_body, Finish, removeslash, addslash, RedirectHandler as WebRedirectHandler, get_signature_key_version, GZipContentEncoding
-
-import binascii
-import contextlib
-import copy
-import datetime
-import email.utils
-import gzip
-from io import BytesIO
-import itertools
-import logging
-import os
-import re
-import socket
-
-if PY3:
-    import urllib.parse as urllib_parse  # py3
-else:
-    import urllib as urllib_parse  # py2
-
-wsgi_safe_tests = []
-
-
-def relpath(*a):
-    return os.path.join(os.path.dirname(__file__), *a)
-
-
-def wsgi_safe(cls):
-    wsgi_safe_tests.append(cls)
-    return cls
-
-
-class WebTestCase(AsyncHTTPTestCase):
-    """Base class for web tests that also supports WSGI mode.
-
-    Override get_handlers and get_app_kwargs instead of get_app.
-    Append to wsgi_safe to have it run in wsgi_test as well.
-    """
-    def get_app(self):
-        self.app = Application(self.get_handlers(), **self.get_app_kwargs())
-        return self.app
-
-    def get_handlers(self):
-        raise NotImplementedError()
-
-    def get_app_kwargs(self):
-        return {}
-
-
-class SimpleHandlerTestCase(WebTestCase):
-    """Simplified base class for tests that work with a single handler class.
-
-    To use, define a nested class named ``Handler``.
-    """
-    def get_handlers(self):
-        return [('/', self.Handler)]
-
-
-class HelloHandler(RequestHandler):
-    def get(self):
-        self.write('hello')
-
-
-class CookieTestRequestHandler(RequestHandler):
-    # stub out enough methods to make the secure_cookie functions work
-    def __init__(self, cookie_secret='0123456789', key_version=None):
-        # don't call super.__init__
-        self._cookies = {}
-        if key_version is None:
-            self.application = ObjectDict(settings=dict(cookie_secret=cookie_secret))
-        else:
-            self.application = ObjectDict(settings=dict(cookie_secret=cookie_secret,
-                                                        key_version=key_version))
-
-    def get_cookie(self, name):
-        return self._cookies.get(name)
-
-    def set_cookie(self, name, value, expires_days=None):
-        self._cookies[name] = value
-
-
-# See SignedValueTest below for more.
-class SecureCookieV1Test(unittest.TestCase):
-    def test_round_trip(self):
-        handler = CookieTestRequestHandler()
-        handler.set_secure_cookie('foo', b'bar', version=1)
-        self.assertEqual(handler.get_secure_cookie('foo', min_version=1),
-                         b'bar')
-
-    def test_cookie_tampering_future_timestamp(self):
-        handler = CookieTestRequestHandler()
-        # this string base64-encodes to '12345678'
-        handler.set_secure_cookie('foo', binascii.a2b_hex(b'd76df8e7aefc'),
-                                  version=1)
-        cookie = handler._cookies['foo']
-        match = re.match(br'12345678\|([0-9]+)\|([0-9a-f]+)', cookie)
-        self.assertTrue(match)
-        timestamp = match.group(1)
-        sig = match.group(2)
-        self.assertEqual(
-            _create_signature_v1(handler.application.settings["cookie_secret"],
-                                 'foo', '12345678', timestamp),
-            sig)
-        # shifting digits from payload to timestamp doesn't alter signature
-        # (this is not desirable behavior, just confirming that that's how it
-        # works)
-        self.assertEqual(
-            _create_signature_v1(handler.application.settings["cookie_secret"],
-                                 'foo', '1234', b'5678' + timestamp),
-            sig)
-        # tamper with the cookie
-        handler._cookies['foo'] = utf8('1234|5678%s|%s' % (
-            to_basestring(timestamp), to_basestring(sig)))
-        # it gets rejected
-        with ExpectLog(gen_log, "Cookie timestamp in future"):
-            self.assertTrue(
-                handler.get_secure_cookie('foo', min_version=1) is None)
-
-    def test_arbitrary_bytes(self):
-        # Secure cookies accept arbitrary data (which is base64 encoded).
-        # Note that normal cookies accept only a subset of ascii.
-        handler = CookieTestRequestHandler()
-        handler.set_secure_cookie('foo', b'\xe9', version=1)
-        self.assertEqual(handler.get_secure_cookie('foo', min_version=1), b'\xe9')
-
-
-# See SignedValueTest below for more.
-class SecureCookieV2Test(unittest.TestCase):
-    KEY_VERSIONS = {
-        0: 'ajklasdf0ojaisdf',
-        1: 'aslkjasaolwkjsdf'
-    }
-
-    def test_round_trip(self):
-        handler = CookieTestRequestHandler()
-        handler.set_secure_cookie('foo', b'bar', version=2)
-        self.assertEqual(handler.get_secure_cookie('foo', min_version=2), b'bar')
-
-    def test_key_version_roundtrip(self):
-        handler = CookieTestRequestHandler(cookie_secret=self.KEY_VERSIONS,
-                                           key_version=0)
-        handler.set_secure_cookie('foo', b'bar')
-        self.assertEqual(handler.get_secure_cookie('foo'), b'bar')
-
-    def test_key_version_roundtrip_differing_version(self):
-        handler = CookieTestRequestHandler(cookie_secret=self.KEY_VERSIONS,
-                                           key_version=1)
-        handler.set_secure_cookie('foo', b'bar')
-        self.assertEqual(handler.get_secure_cookie('foo'), b'bar')
-
-    def test_key_version_increment_version(self):
-        handler = CookieTestRequestHandler(cookie_secret=self.KEY_VERSIONS,
-                                           key_version=0)
-        handler.set_secure_cookie('foo', b'bar')
-        new_handler = CookieTestRequestHandler(cookie_secret=self.KEY_VERSIONS,
-                                               key_version=1)
-        new_handler._cookies = handler._cookies
-        self.assertEqual(new_handler.get_secure_cookie('foo'), b'bar')
-
-    def test_key_version_invalidate_version(self):
-        handler = CookieTestRequestHandler(cookie_secret=self.KEY_VERSIONS,
-                                           key_version=0)
-        handler.set_secure_cookie('foo', b'bar')
-        new_key_versions = self.KEY_VERSIONS.copy()
-        new_key_versions.pop(0)
-        new_handler = CookieTestRequestHandler(cookie_secret=new_key_versions,
-                                               key_version=1)
-        new_handler._cookies = handler._cookies
-        self.assertEqual(new_handler.get_secure_cookie('foo'), None)
-
-
-class CookieTest(WebTestCase):
-    def get_handlers(self):
-        class SetCookieHandler(RequestHandler):
-            def get(self):
-                # Try setting cookies with different argument types
-                # to ensure that everything gets encoded correctly
-                self.set_cookie("str", "asdf")
-                self.set_cookie("unicode", u"qwer")
-                self.set_cookie("bytes", b"zxcv")
-
-        class GetCookieHandler(RequestHandler):
-            def get(self):
-                self.write(self.get_cookie("foo", "default"))
-
-        class SetCookieDomainHandler(RequestHandler):
-            def get(self):
-                # unicode domain and path arguments shouldn't break things
-                # either (see bug #285)
-                self.set_cookie("unicode_args", "blah", domain=u"foo.com",
-                                path=u"/foo")
-
-        class SetCookieSpecialCharHandler(RequestHandler):
-            def get(self):
-                self.set_cookie("equals", "a=b")
-                self.set_cookie("semicolon", "a;b")
-                self.set_cookie("quote", 'a"b')
-
-        class SetCookieOverwriteHandler(RequestHandler):
-            def get(self):
-                self.set_cookie("a", "b", domain="example.com")
-                self.set_cookie("c", "d", domain="example.com")
-                # A second call with the same name clobbers the first.
-                # Attributes from the first call are not carried over.
-                self.set_cookie("a", "e")
-
-        class SetCookieMaxAgeHandler(RequestHandler):
-            def get(self):
-                self.set_cookie("foo", "bar", max_age=10)
-
-        class SetCookieExpiresDaysHandler(RequestHandler):
-            def get(self):
-                self.set_cookie("foo", "bar", expires_days=10)
-
-        class SetCookieFalsyFlags(RequestHandler):
-            def get(self):
-                self.set_cookie("a", "1", secure=True)
-                self.set_cookie("b", "1", secure=False)
-                self.set_cookie("c", "1", httponly=True)
-                self.set_cookie("d", "1", httponly=False)
-
-        return [("/set", SetCookieHandler),
-                ("/get", GetCookieHandler),
-                ("/set_domain", SetCookieDomainHandler),
-                ("/special_char", SetCookieSpecialCharHandler),
-                ("/set_overwrite", SetCookieOverwriteHandler),
-                ("/set_max_age", SetCookieMaxAgeHandler),
-                ("/set_expires_days", SetCookieExpiresDaysHandler),
-                ("/set_falsy_flags", SetCookieFalsyFlags)
-                ]
-
-    def test_set_cookie(self):
-        response = self.fetch("/set")
-        self.assertEqual(sorted(response.headers.get_list("Set-Cookie")),
-                         ["bytes=zxcv; Path=/",
-                          "str=asdf; Path=/",
-                          "unicode=qwer; Path=/",
-                          ])
-
-    def test_get_cookie(self):
-        response = self.fetch("/get", headers={"Cookie": "foo=bar"})
-        self.assertEqual(response.body, b"bar")
-
-        response = self.fetch("/get", headers={"Cookie": 'foo="bar"'})
-        self.assertEqual(response.body, b"bar")
-
-        response = self.fetch("/get", headers={"Cookie": "/=exception;"})
-        self.assertEqual(response.body, b"default")
-
-    def test_set_cookie_domain(self):
-        response = self.fetch("/set_domain")
-        self.assertEqual(response.headers.get_list("Set-Cookie"),
-                         ["unicode_args=blah; Domain=foo.com; Path=/foo"])
-
-    def test_cookie_special_char(self):
-        response = self.fetch("/special_char")
-        headers = sorted(response.headers.get_list("Set-Cookie"))
-        self.assertEqual(len(headers), 3)
-        self.assertEqual(headers[0], 'equals="a=b"; Path=/')
-        self.assertEqual(headers[1], 'quote="a\\"b"; Path=/')
-        # python 2.7 octal-escapes the semicolon; older versions leave it alone
-        self.assertTrue(headers[2] in ('semicolon="a;b"; Path=/',
-                                       'semicolon="a\\073b"; Path=/'),
-                        headers[2])
-
-        data = [('foo=a=b', 'a=b'),
-                ('foo="a=b"', 'a=b'),
-                ('foo="a;b"', '"a'),  # even quoted, ";" is a delimiter
-                ('foo=a\\073b', 'a\\073b'),  # escapes only decoded in quotes
-                ('foo="a\\073b"', 'a;b'),
-                ('foo="a\\"b"', 'a"b'),
-                ]
-        for header, expected in data:
-            logging.debug("trying %r", header)
-            response = self.fetch("/get", headers={"Cookie": header})
-            self.assertEqual(response.body, utf8(expected))
-
-    def test_set_cookie_overwrite(self):
-        response = self.fetch("/set_overwrite")
-        headers = response.headers.get_list("Set-Cookie")
-        self.assertEqual(sorted(headers),
-                         ["a=e; Path=/", "c=d; Domain=example.com; Path=/"])
-
-    def test_set_cookie_max_age(self):
-        response = self.fetch("/set_max_age")
-        headers = response.headers.get_list("Set-Cookie")
-        self.assertEqual(sorted(headers),
-                         ["foo=bar; Max-Age=10; Path=/"])
-
-    def test_set_cookie_expires_days(self):
-        response = self.fetch("/set_expires_days")
-        header = response.headers.get("Set-Cookie")
-        match = re.match("foo=bar; expires=(?P<expires>.+); Path=/", header)
-        self.assertIsNotNone(match)
-
-        expires = datetime.datetime.utcnow() + datetime.timedelta(days=10)
-        header_expires = datetime.datetime(
-            *email.utils.parsedate(match.groupdict()["expires"])[:6])
-        self.assertTrue(abs(timedelta_to_seconds(expires - header_expires)) < 10)
-
-    def test_set_cookie_false_flags(self):
-        response = self.fetch("/set_falsy_flags")
-        headers = sorted(response.headers.get_list("Set-Cookie"))
-        # The secure and httponly headers are capitalized in py35 and
-        # lowercase in older versions.
-        self.assertEqual(headers[0].lower(), 'a=1; path=/; secure')
-        self.assertEqual(headers[1].lower(), 'b=1; path=/')
-        self.assertEqual(headers[2].lower(), 'c=1; httponly; path=/')
-        self.assertEqual(headers[3].lower(), 'd=1; path=/')
-
-
-class AuthRedirectRequestHandler(RequestHandler):
-    def initialize(self, login_url):
-        self.login_url = login_url
-
-    def get_login_url(self):
-        return self.login_url
-
-    @authenticated
-    def get(self):
-        # we'll never actually get here because the test doesn't follow redirects
-        self.send_error(500)
-
-
-class AuthRedirectTest(WebTestCase):
-    def get_handlers(self):
-        return [('/relative', AuthRedirectRequestHandler,
-                 dict(login_url='/login')),
-                ('/absolute', AuthRedirectRequestHandler,
-                 dict(login_url='http://example.com/login'))]
-
-    def test_relative_auth_redirect(self):
-        self.http_client.fetch(self.get_url('/relative'), self.stop,
-                               follow_redirects=False)
-        response = self.wait()
-        self.assertEqual(response.code, 302)
-        self.assertEqual(response.headers['Location'], '/login?next=%2Frelative')
-
-    def test_absolute_auth_redirect(self):
-        self.http_client.fetch(self.get_url('/absolute'), self.stop,
-                               follow_redirects=False)
-        response = self.wait()
-        self.assertEqual(response.code, 302)
-        self.assertTrue(re.match(
-            'http://example.com/login\?next=http%3A%2F%2Flocalhost%3A[0-9]+%2Fabsolute',
-            response.headers['Location']), response.headers['Location'])
-
-
-class ConnectionCloseHandler(RequestHandler):
-    def initialize(self, test):
-        self.test = test
-
-    @asynchronous
-    def get(self):
-        self.test.on_handler_waiting()
-
-    def on_connection_close(self):
-        self.test.on_connection_close()
-
-
-class ConnectionCloseTest(WebTestCase):
-    def get_handlers(self):
-        return [('/', ConnectionCloseHandler, dict(test=self))]
-
-    def test_connection_close(self):
-        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
-        s.connect(("127.0.0.1", self.get_http_port()))
-        self.stream = IOStream(s, io_loop=self.io_loop)
-        self.stream.write(b"GET / HTTP/1.0\r\n\r\n")
-        self.wait()
-
-    def on_handler_waiting(self):
-        logging.debug('handler waiting')
-        self.stream.close()
-
-    def on_connection_close(self):
-        logging.debug('connection closed')
-        self.stop()
-
-
-class EchoHandler(RequestHandler):
-    def get(self, *path_args):
-        # Type checks: web.py interfaces convert argument values to
-        # unicode strings (by default, but see also decode_argument).
-        # In httpserver.py (i.e. self.request.arguments), they're left
-        # as bytes.  Keys are always native strings.
-        for key in self.request.arguments:
-            if type(key) != str:
-                raise Exception("incorrect type for key: %r" % type(key))
-            for value in self.request.arguments[key]:
-                if type(value) != bytes:
-                    raise Exception("incorrect type for value: %r" %
-                                    type(value))
-            for value in self.get_arguments(key):
-                if type(value) != unicode_type:
-                    raise Exception("incorrect type for value: %r" %
-                                    type(value))
-        for arg in path_args:
-            if type(arg) != unicode_type:
-                raise Exception("incorrect type for path arg: %r" % type(arg))
-        self.write(dict(path=self.request.path,
-                        path_args=path_args,
-                        args=recursive_unicode(self.request.arguments)))
-
-
-class RequestEncodingTest(WebTestCase):
-    def get_handlers(self):
-        return [("/group/(.*)", EchoHandler),
-                ("/slashes/([^/]*)/([^/]*)", EchoHandler),
-                ]
-
-    def fetch_json(self, path):
-        return json_decode(self.fetch(path).body)
-
-    def test_group_question_mark(self):
-        # Ensure that url-encoded question marks are handled properly
-        self.assertEqual(self.fetch_json('/group/%3F'),
-                         dict(path='/group/%3F', path_args=['?'], args={}))
-        self.assertEqual(self.fetch_json('/group/%3F?%3F=%3F'),
-                         dict(path='/group/%3F', path_args=['?'], args={'?': ['?']}))
-
-    def test_group_encoding(self):
-        # Path components and query arguments should be decoded the same way
-        self.assertEqual(self.fetch_json('/group/%C3%A9?arg=%C3%A9'),
-                         {u"path": u"/group/%C3%A9",
-                          u"path_args": [u"\u00e9"],
-                          u"args": {u"arg": [u"\u00e9"]}})
-
-    def test_slashes(self):
-        # Slashes may be escaped to appear as a single "directory" in the path,
-        # but they are then unescaped when passed to the get() method.
-        self.assertEqual(self.fetch_json('/slashes/foo/bar'),
-                         dict(path="/slashes/foo/bar",
-                              path_args=["foo", "bar"],
-                              args={}))
-        self.assertEqual(self.fetch_json('/slashes/a%2Fb/c%2Fd'),
-                         dict(path="/slashes/a%2Fb/c%2Fd",
-                              path_args=["a/b", "c/d"],
-                              args={}))
-
-    def test_error(self):
-        # Percent signs (encoded as %25) should not mess up printf-style
-        # messages in logs
-        with ExpectLog(gen_log, ".*Invalid unicode"):
-            self.fetch("/group/?arg=%25%e9")
-
-
-class TypeCheckHandler(RequestHandler):
-    def prepare(self):
-        self.errors = {}
-
-        self.check_type('status', self.get_status(), int)
-
-        # get_argument is an exception from the general rule of using
-        # type str for non-body data mainly for historical reasons.
-        self.check_type('argument', self.get_argument('foo'), unicode_type)
-        self.check_type('cookie_key', list(self.cookies.keys())[0], str)
-        self.check_type('cookie_value', list(self.cookies.values())[0].value, str)
-
-        # Secure cookies return bytes because they can contain arbitrary
-        # data, but regular cookies are native strings.
-        if list(self.cookies.keys()) != ['asdf']:
-            raise Exception("unexpected values for cookie keys: %r" %
-                            self.cookies.keys())
-        self.check_type('get_secure_cookie', self.get_secure_cookie('asdf'), bytes)
-        self.check_type('get_cookie', self.get_cookie('asdf'), str)
-
-        self.check_type('xsrf_token', self.xsrf_token, bytes)
-        self.check_type('xsrf_form_html', self.xsrf_form_html(), str)
-
-        self.check_type('reverse_url', self.reverse_url('typecheck', 'foo'), str)
-
-        self.check_type('request_summary', self._request_summary(), str)
-
-    def get(self, path_component):
-        # path_component uses type unicode instead of str for consistency
-        # with get_argument()
-        self.check_type('path_component', path_component, unicode_type)
-        self.write(self.errors)
-
-    def post(self, path_component):
-        self.check_type('path_component', path_component, unicode_type)
-        self.write(self.errors)
-
-    def check_type(self, name, obj, expected_type):
-        actual_type = type(obj)
-        if expected_type != actual_type:
-            self.errors[name] = "expected %s, got %s" % (expected_type,
-                                                         actual_type)
-
-
-class DecodeArgHandler(RequestHandler):
-    def decode_argument(self, value, name=None):
-        if type(value) != bytes:
-            raise Exception("unexpected type for value: %r" % type(value))
-        # use self.request.arguments directly to avoid recursion
-        if 'encoding' in self.request.arguments:
-            return value.decode(to_unicode(self.request.arguments['encoding'][0]))
-        else:
-            return value
-
-    def get(self, arg):
-        def describe(s):
-            if type(s) == bytes:
-                return ["bytes", native_str(binascii.b2a_hex(s))]
-            elif type(s) == unicode_type:
-                return ["unicode", s]
-            raise Exception("unknown type")
-        self.write({'path': describe(arg),
-                    'query': describe(self.get_argument("foo")),
-                    })
-
-
-class LinkifyHandler(RequestHandler):
-    def get(self):
-        self.render("linkify.html", message="http://example.com")
-
-
-class UIModuleResourceHandler(RequestHandler):
-    def get(self):
-        self.render("page.html", entries=[1, 2])
-
-
-class OptionalPathHandler(RequestHandler):
-    def get(self, path):
-        self.write({"path": path})
-
-
-class FlowControlHandler(RequestHandler):
-    # These writes are too small to demonstrate real flow control,
-    # but at least it shows that the callbacks get run.
-    @asynchronous
-    def get(self):
-        self.write("1")
-        self.flush(callback=self.step2)
-
-    def step2(self):
-        self.write("2")
-        self.flush(callback=self.step3)
-
-    def step3(self):
-        self.write("3")
-        self.finish()
-
-
-class MultiHeaderHandler(RequestHandler):
-    def get(self):
-        self.set_header("x-overwrite", "1")
-        self.set_header("X-Overwrite", 2)
-        self.add_header("x-multi", 3)
-        self.add_header("X-Multi", "4")
-
-
-class RedirectHandler(RequestHandler):
-    def get(self):
-        if self.get_argument('permanent', None) is not None:
-            self.redirect('/', permanent=int(self.get_argument('permanent')))
-        elif self.get_argument('status', None) is not None:
-            self.redirect('/', status=int(self.get_argument('status')))
-        else:
-            raise Exception("didn't get permanent or status arguments")
-
-
-class EmptyFlushCallbackHandler(RequestHandler):
-    @asynchronous
-    @gen.engine
-    def get(self):
-        # Ensure that the flush callback is run whether or not there
-        # was any output.  The gen.Task and direct yield forms are
-        # equivalent.
-        yield gen.Task(self.flush)  # "empty" flush, but writes headers
-        yield gen.Task(self.flush)  # empty flush
-        self.write("o")
-        yield self.flush()  # flushes the "o"
-        yield self.flush()  # empty flush
-        self.finish("k")
-
-
-class HeaderInjectionHandler(RequestHandler):
-    def get(self):
-        try:
-            self.set_header("X-Foo", "foo\r\nX-Bar: baz")
-            raise Exception("Didn't get expected exception")
-        except ValueError as e:
-            if "Unsafe header value" in str(e):
-                self.finish(b"ok")
-            else:
-                raise
-
-
-class GetArgumentHandler(RequestHandler):
-    def prepare(self):
-        if self.get_argument('source', None) == 'query':
-            method = self.get_query_argument
-        elif self.get_argument('source', None) == 'body':
-            method = self.get_body_argument
-        else:
-            method = self.get_argument
-        self.finish(method("foo", "default"))
-
-
-class GetArgumentsHandler(RequestHandler):
-    def prepare(self):
-        self.finish(dict(default=self.get_arguments("foo"),
-                         query=self.get_query_arguments("foo"),
-                         body=self.get_body_arguments("foo")))
-
-
-# This test is shared with wsgi_test.py
-@wsgi_safe
-class WSGISafeWebTest(WebTestCase):
-    COOKIE_SECRET = "WebTest.COOKIE_SECRET"
-
-    def get_app_kwargs(self):
-        loader = DictLoader({
-            "linkify.html": "{% module linkify(message) %}",
-            "page.html": """\
-<html><head></head><body>
-{% for e in entries %}
-{% module Template("entry.html", entry=e) %}
-{% end %}
-</body></html>""",
-            "entry.html": """\
-{{ set_resources(embedded_css=".entry { margin-bottom: 1em; }", embedded_javascript="js_embed()", css_files=["/base.css", "/foo.css"], javascript_files="/common.js", html_head="<meta>", html_body='<script src="/analytics.js"/>') }}
-<div class="entry">...</div>""",
-        })
-        return dict(template_loader=loader,
-                    autoescape="xhtml_escape",
-                    cookie_secret=self.COOKIE_SECRET)
-
-    def tearDown(self):
-        super(WSGISafeWebTest, self).tearDown()
-        RequestHandler._template_loaders.clear()
-
-    def get_handlers(self):
-        urls = [
-            url("/typecheck/(.*)", TypeCheckHandler, name='typecheck'),
-            url("/decode_arg/(.*)", DecodeArgHandler, name='decode_arg'),
-            url("/decode_arg_kw/(?P<arg>.*)", DecodeArgHandler),
-            url("/linkify", LinkifyHandler),
-            url("/uimodule_resources", UIModuleResourceHandler),
-            url("/optional_path/(.+)?", OptionalPathHandler),
-            url("/multi_header", MultiHeaderHandler),
-            url("/redirect", RedirectHandler),
-            url("/web_redirect_permanent", WebRedirectHandler, {"url": "/web_redirect_newpath"}),
-            url("/web_redirect", WebRedirectHandler, {"url": "/web_redirect_newpath", "permanent": False}),
-            url("//web_redirect_double_slash", WebRedirectHandler, {"url": '/web_redirect_newpath'}),
-            url("/header_injection", HeaderInjectionHandler),
-            url("/get_argument", GetArgumentHandler),
-            url("/get_arguments", GetArgumentsHandler),
-        ]
-        return urls
-
-    def fetch_json(self, *args, **kwargs):
-        response = self.fetch(*args, **kwargs)
-        response.rethrow()
-        return json_decode(response.body)
-
-    def test_types(self):
-        cookie_value = to_unicode(create_signed_value(self.COOKIE_SECRET,
-                                                      "asdf", "qwer"))
-        response = self.fetch("/typecheck/asdf?foo=bar",
-                              headers={"Cookie": "asdf=" + cookie_value})
-        data = json_decode(response.body)
-        self.assertEqual(data, {})
-
-        response = self.fetch("/typecheck/asdf?foo=bar", method="POST",
-                              headers={"Cookie": "asdf=" + cookie_value},
-                              body="foo=bar")
-
-    def test_decode_argument(self):
-        # These urls all decode to the same thing
-        urls = ["/decode_arg/%C3%A9?foo=%C3%A9&encoding=utf-8",
-                "/decode_arg/%E9?foo=%E9&encoding=latin1",
-                "/decode_arg_kw/%E9?foo=%E9&encoding=latin1",
-                ]
-        for req_url in urls:
-            response = self.fetch(req_url)
-            response.rethrow()
-            data = json_decode(response.body)
-            self.assertEqual(data, {u'path': [u'unicode', u'\u00e9'],
-                                    u'query': [u'unicode', u'\u00e9'],
-                                    })
-
-        response = self.fetch("/decode_arg/%C3%A9?foo=%C3%A9")
-        response.rethrow()
-        data = json_decode(response.body)
-        self.assertEqual(data, {u'path': [u'bytes', u'c3a9'],
-                                u'query': [u'bytes', u'c3a9'],
-                                })
-
-    def test_decode_argument_invalid_unicode(self):
-        # test that invalid unicode in URLs causes 400, not 500
-        with ExpectLog(gen_log, ".*Invalid unicode.*"):
-            response = self.fetch("/typecheck/invalid%FF")
-            self.assertEqual(response.code, 400)
-            response = self.fetch("/typecheck/invalid?foo=%FF")
-            self.assertEqual(response.code, 400)
-
-    def test_decode_argument_plus(self):
-        # These urls are all equivalent.
-        urls = ["/decode_arg/1%20%2B%201?foo=1%20%2B%201&encoding=utf-8",
-                "/decode_arg/1%20+%201?foo=1+%2B+1&encoding=utf-8"]
-        for req_url in urls:
-            response = self.fetch(req_url)
-            response.rethrow()
-            data = json_decode(response.body)
-            self.assertEqual(data, {u'path': [u'unicode', u'1 + 1'],
-                                    u'query': [u'unicode', u'1 + 1'],
-                                    })
-
-    def test_reverse_url(self):
-        self.assertEqual(self.app.reverse_url('decode_arg', 'foo'),
-                         '/decode_arg/foo')
-        self.assertEqual(self.app.reverse_url('decode_arg', 42),
-                         '/decode_arg/42')
-        self.assertEqual(self.app.reverse_url('decode_arg', b'\xe9'),
-                         '/decode_arg/%E9')
-        self.assertEqual(self.app.reverse_url('decode_arg', u'\u00e9'),
-                         '/decode_arg/%C3%A9')
-        self.assertEqual(self.app.reverse_url('decode_arg', '1 + 1'),
-                         '/decode_arg/1%20%2B%201')
-
-    def test_uimodule_unescaped(self):
-        response = self.fetch("/linkify")
-        self.assertEqual(response.body,
-                         b"<a href=\"http://example.com\">http://example.com</a>")
-
-    def test_uimodule_resources(self):
-        response = self.fetch("/uimodule_resources")
-        self.assertEqual(response.body, b"""\
-<html><head><link href="/base.css" type="text/css" rel="stylesheet"/><link href="/foo.css" type="text/css" rel="stylesheet"/>
-<style type="text/css">
-.entry { margin-bottom: 1em; }
-</style>
-<meta>
-</head><body>
-
-
-<div class="entry">...</div>
-
-
-<div class="entry">...</div>
-
-<script src="/common.js" type="text/javascript"></script>
-<script type="text/javascript">
-//<![CDATA[
-js_embed()
-//]]>
-</script>
-<script src="/analytics.js"/>
-</body></html>""")
-
-    def test_optional_path(self):
-        self.assertEqual(self.fetch_json("/optional_path/foo"),
-                         {u"path": u"foo"})
-        self.assertEqual(self.fetch_json("/optional_path/"),
-                         {u"path": None})
-
-    def test_multi_header(self):
-        response = self.fetch("/multi_header")
-        self.assertEqual(response.headers["x-overwrite"], "2")
-        self.assertEqual(response.headers.get_list("x-multi"), ["3", "4"])
-
-    def test_redirect(self):
-        response = self.fetch("/redirect?permanent=1", follow_redirects=False)
-        self.assertEqual(response.code, 301)
-        response = self.fetch("/redirect?permanent=0", follow_redirects=False)
-        self.assertEqual(response.code, 302)
-        response = self.fetch("/redirect?status=307", follow_redirects=False)
-        self.assertEqual(response.code, 307)
-
-    def test_web_redirect(self):
-        response = self.fetch("/web_redirect_permanent", follow_redirects=False)
-        self.assertEqual(response.code, 301)
-        self.assertEqual(response.headers['Location'], '/web_redirect_newpath')
-        response = self.fetch("/web_redirect", follow_redirects=False)
-        self.assertEqual(response.code, 302)
-        self.assertEqual(response.headers['Location'], '/web_redirect_newpath')
-
-    def test_web_redirect_double_slash(self):
-        response = self.fetch("//web_redirect_double_slash", follow_redirects=False)
-        self.assertEqual(response.code, 301)
-        self.assertEqual(response.headers['Location'], '/web_redirect_newpath')
-
-    def test_header_injection(self):
-        response = self.fetch("/header_injection")
-        self.assertEqual(response.body, b"ok")
-
-    def test_get_argument(self):
-        response = self.fetch("/get_argument?foo=bar")
-        self.assertEqual(response.body, b"bar")
-        response = self.fetch("/get_argument?foo=")
-        self.assertEqual(response.body, b"")
-        response = self.fetch("/get_argument")
-        self.assertEqual(response.body, b"default")
-
-        # Test merging of query and body arguments.
-        # In singular form, body arguments take precedence over query arguments.
-        body = urllib_parse.urlencode(dict(foo="hello"))
-        response = self.fetch("/get_argument?foo=bar", method="POST", body=body)
-        self.assertEqual(response.body, b"hello")
-        # In plural methods they are merged.
-        response = self.fetch("/get_arguments?foo=bar",
-                              method="POST", body=body)
-        self.assertEqual(json_decode(response.body),
-                         dict(default=['bar', 'hello'],
-                              query=['bar'],
-                              body=['hello']))
-
-    def test_get_query_arguments(self):
-        # send as a post so we can ensure the separation between query
-        # string and body arguments.
-        body = urllib_parse.urlencode(dict(foo="hello"))
-        response = self.fetch("/get_argument?source=query&foo=bar",
-                              method="POST", body=body)
-        self.assertEqual(response.body, b"bar")
-        response = self.fetch("/get_argument?source=query&foo=",
-                              method="POST", body=body)
-        self.assertEqual(response.body, b"")
-        response = self.fetch("/get_argument?source=query",
-                              method="POST", body=body)
-        self.assertEqual(response.body, b"default")
-
-    def test_get_body_arguments(self):
-        body = urllib_parse.urlencode(dict(foo="bar"))
-        response = self.fetch("/get_argument?source=body&foo=hello",
-                              method="POST", body=body)
-        self.assertEqual(response.body, b"bar")
-
-        body = urllib_parse.urlencode(dict(foo=""))
-        response = self.fetch("/get_argument?source=body&foo=hello",
-                              method="POST", body=body)
-        self.assertEqual(response.body, b"")
-
-        body = urllib_parse.urlencode(dict())
-        response = self.fetch("/get_argument?source=body&foo=hello",
-                              method="POST", body=body)
-        self.assertEqual(response.body, b"default")
-
-    def test_no_gzip(self):
-        response = self.fetch('/get_argument')
-        self.assertNotIn('Accept-Encoding', response.headers.get('Vary', ''))
-        self.assertNotIn('gzip', response.headers.get('Content-Encoding', ''))
-
-
-class NonWSGIWebTests(WebTestCase):
-    def get_handlers(self):
-        return [("/flow_control", FlowControlHandler),
-                ("/empty_flush", EmptyFlushCallbackHandler),
-                ]
-
-    def test_flow_control(self):
-        self.assertEqual(self.fetch("/flow_control").body, b"123")
-
-    def test_empty_flush(self):
-        response = self.fetch("/empty_flush")
-        self.assertEqual(response.body, b"ok")
-
-
-@wsgi_safe
-class ErrorResponseTest(WebTestCase):
-    def get_handlers(self):
-        class DefaultHandler(RequestHandler):
-            def get(self):
-                if self.get_argument("status", None):
-                    raise HTTPError(int(self.get_argument("status")))
-                1 / 0
-
-        class WriteErrorHandler(RequestHandler):
-            def get(self):
-                if self.get_argument("status", None):
-                    self.send_error(int(self.get_argument("status")))
-                else:
-                    1 / 0
-
-            def write_error(self, status_code, **kwargs):
-                self.set_header("Content-Type", "text/plain")
-                if "exc_info" in kwargs:
-                    self.write("Exception: %s" % kwargs["exc_info"][0].__name__)
-                else:
-                    self.write("Status: %d" % status_code)
-
-        class FailedWriteErrorHandler(RequestHandler):
-            def get(self):
-                1 / 0
-
-            def write_error(self, status_code, **kwargs):
-                raise Exception("exception in write_error")
-
-        return [url("/default", DefaultHandler),
-                url("/write_error", WriteErrorHandler),
-                url("/failed_write_error", FailedWriteErrorHandler),
-                ]
-
-    def test_default(self):
-        with ExpectLog(app_log, "Uncaught exception"):
-            response = self.fetch("/default")
-            self.assertEqual(response.code, 500)
-            self.assertTrue(b"500: Internal Server Error" in response.body)
-
-            response = self.fetch("/default?status=503")
-            self.assertEqual(response.code, 503)
-            self.assertTrue(b"503: Service Unavailable" in response.body)
-
-    def test_write_error(self):
-        with ExpectLog(app_log, "Uncaught exception"):
-            response = self.fetch("/write_error")
-            self.assertEqual(response.code, 500)
-            self.assertEqual(b"Exception: ZeroDivisionError", response.body)
-
-            response = self.fetch("/write_error?status=503")
-            self.assertEqual(response.code, 503)
-            self.assertEqual(b"Status: 503", response.body)
-
-    def test_failed_write_error(self):
-        with ExpectLog(app_log, "Uncaught exception"):
-            response = self.fetch("/failed_write_error")
-            self.assertEqual(response.code, 500)
-            self.assertEqual(b"", response.body)
-
-
-@wsgi_safe
-class StaticFileTest(WebTestCase):
-    # The expected MD5 hash of robots.txt, used in tests that call
-    # StaticFileHandler.get_version
-    robots_txt_hash = b"f71d20196d4caf35b6a670db8c70b03d"
-    static_dir = os.path.join(os.path.dirname(__file__), 'static')
-
-    def get_handlers(self):
-        class StaticUrlHandler(RequestHandler):
-            def get(self, path):
-                with_v = int(self.get_argument('include_version', 1))
-                self.write(self.static_url(path, include_version=with_v))
-
-        class AbsoluteStaticUrlHandler(StaticUrlHandler):
-            include_host = True
-
-        class OverrideStaticUrlHandler(RequestHandler):
-            def get(self, path):
-                do_include = bool(self.get_argument("include_host"))
-                self.include_host = not do_include
-
-                regular_url = self.static_url(path)
-                override_url = self.static_url(path, include_host=do_include)
-                if override_url == regular_url:
-                    return self.write(str(False))
-
-                protocol = self.request.protocol + "://"
-                protocol_length = len(protocol)
-                check_regular = regular_url.find(protocol, 0, protocol_length)
-                check_override = override_url.find(protocol, 0, protocol_length)
-
-                if do_include:
-                    result = (check_override == 0 and check_regular == -1)
-                else:
-                    result = (check_override == -1 and check_regular == 0)
-                self.write(str(result))
-
-        return [('/static_url/(.*)', StaticUrlHandler),
-                ('/abs_static_url/(.*)', AbsoluteStaticUrlHandler),
-                ('/override_static_url/(.*)', OverrideStaticUrlHandler),
-                ('/root_static/(.*)', StaticFileHandler, dict(path='/'))]
-
-    def get_app_kwargs(self):
-        return dict(static_path=relpath('static'))
-
-    def test_static_files(self):
-        response = self.fetch('/robots.txt')
-        self.assertTrue(b"Disallow: /" in response.body)
-
-        response = self.fetch('/static/robots.txt')
-        self.assertTrue(b"Disallow: /" in response.body)
-        self.assertEqual(response.headers.get("Content-Type"), "text/plain")
-
-    def test_static_compressed_files(self):
-        response = self.fetch("/static/sample.xml.gz")
-        self.assertEqual(response.headers.get("Content-Type"),
-                         "application/gzip")
-        response = self.fetch("/static/sample.xml.bz2")
-        self.assertEqual(response.headers.get("Content-Type"),
-                         "application/octet-stream")
-        # make sure the uncompressed file still has the correct type
-        response = self.fetch("/static/sample.xml")
-        self.assertTrue(response.headers.get("Content-Type")
-                        in set(("text/xml", "application/xml")))
-
-    def test_static_url(self):
-        response = self.fetch("/static_url/robots.txt")
-        self.assertEqual(response.body,
-                         b"/static/robots.txt?v=" + self.robots_txt_hash)
-
-    def test_absolute_static_url(self):
-        response = self.fetch("/abs_static_url/robots.txt")
-        self.assertEqual(response.body, (
-            utf8(self.get_url("/")) +
-            b"static/robots.txt?v=" +
-            self.robots_txt_hash
-        ))
-
-    def test_relative_version_exclusion(self):
-        response = self.fetch("/static_url/robots.txt?include_version=0")
-        self.assertEqual(response.body, b"/static/robots.txt")
-
-    def test_absolute_version_exclusion(self):
-        response = self.fetch("/abs_static_url/robots.txt?include_version=0")
-        self.assertEqual(response.body,
-                         utf8(self.get_url("/") + "static/robots.txt"))
-
-    def test_include_host_override(self):
-        self._trigger_include_host_check(False)
-        self._trigger_include_host_check(True)
-
-    def _trigger_include_host_check(self, include_host):
-        path = "/override_static_url/robots.txt?include_host=%s"
-        response = self.fetch(path % int(include_host))
-        self.assertEqual(response.body, utf8(str(True)))
-
-    def get_and_head(self, *args, **kwargs):
-        """Performs a GET and HEAD request and returns the GET response.
-
-        Fails if any ``Content-*`` headers returned by the two requests
-        differ.
-        """
-        head_response = self.fetch(*args, method="HEAD", **kwargs)
-        get_response = self.fetch(*args, method="GET", **kwargs)
-        content_headers = set()
-        for h in itertools.chain(head_response.headers, get_response.headers):
-            if h.startswith('Content-'):
-                content_headers.add(h)
-        for h in content_headers:
-            self.assertEqual(head_response.headers.get(h),
-                             get_response.headers.get(h),
-                             "%s differs between GET (%s) and HEAD (%s)" %
-                             (h, head_response.headers.get(h),
-                              get_response.headers.get(h)))
-        return get_response
-
-    def test_static_304_if_modified_since(self):
-        response1 = self.get_and_head("/static/robots.txt")
-        response2 = self.get_and_head("/static/robots.txt", headers={
-            'If-Modified-Since': response1.headers['Last-Modified']})
-        self.assertEqual(response2.code, 304)
-        self.assertTrue('Content-Length' not in response2.headers)
-        self.assertTrue('Last-Modified' not in response2.headers)
-
-    def test_static_304_if_none_match(self):
-        response1 = self.get_and_head("/static/robots.txt")
-        response2 = self.get_and_head("/static/robots.txt", headers={
-            'If-None-Match': response1.headers['Etag']})
-        self.assertEqual(response2.code, 304)
-
-    def test_static_if_modified_since_pre_epoch(self):
-        # On windows, the functions that work with time_t do not accept
-        # negative values, and at least one client (processing.js) seems
-        # to use if-modified-since 1/1/1960 as a cache-busting technique.
-        response = self.get_and_head("/static/robots.txt", headers={
-            'If-Modified-Since': 'Fri, 01 Jan 1960 00:00:00 GMT'})
-        self.assertEqual(response.code, 200)
-
-    def test_static_if_modified_since_time_zone(self):
-        # Instead of the value from Last-Modified, make requests with times
-        # chosen just before and after the known modification time
-        # of the file to ensure that the right time zone is being used
-        # when parsing If-Modified-Since.
-        stat = os.stat(relpath('static/robots.txt'))
-
-        response = self.get_and_head('/static/robots.txt', headers={
-            'If-Modified-Since': format_timestamp(stat.st_mtime - 1)})
-        self.assertEqual(response.code, 200)
-        response = self.get_and_head('/static/robots.txt', headers={
-            'If-Modified-Since': format_timestamp(stat.st_mtime + 1)})
-        self.assertEqual(response.code, 304)
-
-    def test_static_etag(self):
-        response = self.get_and_head('/static/robots.txt')
-        self.assertEqual(utf8(response.headers.get("Etag")),
-                         b'"' + self.robots_txt_hash + b'"')
-
-    def test_static_with_range(self):
-        response = self.get_and_head('/static/robots.txt', headers={
-            'Range': 'bytes=0-9'})
-        self.assertEqual(response.code, 206)
-        self.assertEqual(response.body, b"User-agent")
-        self.assertEqual(utf8(response.headers.get("Etag")),
-                         b'"' + self.robots_txt_hash + b'"')
-        self.assertEqual(response.headers.get("Content-Length"), "10")
-        self.assertEqual(response.headers.get("Content-Range"),
-                         "bytes 0-9/26")
-
-    def test_static_with_range_full_file(self):
-        response = self.get_and_head('/static/robots.txt', headers={
-            'Range': 'bytes=0-'})
-        # Note: Chrome refuses to play audio if it gets an HTTP 206 in response
-        # to ``Range: bytes=0-`` :(
-        self.assertEqual(response.code, 200)
-        robots_file_path = os.path.join(self.static_dir, "robots.txt")
-        with open(robots_file_path) as f:
-            self.assertEqual(response.body, utf8(f.read()))
-        self.assertEqual(response.headers.get("Content-Length"), "26")
-        self.assertEqual(response.headers.get("Content-Range"), None)
-
-    def test_static_with_range_full_past_end(self):
-        response = self.get_and_head('/static/robots.txt', headers={
-            'Range': 'bytes=0-10000000'})
-        self.assertEqual(response.code, 200)
-        robots_file_path = os.path.join(self.static_dir, "robots.txt")
-        with open(robots_file_path) as f:
-            self.assertEqual(response.body, utf8(f.read()))
-        self.assertEqual(response.headers.get("Content-Length"), "26")
-        self.assertEqual(response.headers.get("Content-Range"), None)
-
-    def test_static_with_range_partial_past_end(self):
-        response = self.get_and_head('/static/robots.txt', headers={
-            'Range': 'bytes=1-10000000'})
-        self.assertEqual(response.code, 206)
-        robots_file_path = os.path.join(self.static_dir, "robots.txt")
-        with open(robots_file_path) as f:
-            self.assertEqual(response.body, utf8(f.read()[1:]))
-        self.assertEqual(response.headers.get("Content-Length"), "25")
-        self.assertEqual(response.headers.get("Content-Range"), "bytes 1-25/26")
-
-    def test_static_with_range_end_edge(self):
-        response = self.get_and_head('/static/robots.txt', headers={
-            'Range': 'bytes=22-'})
-        self.assertEqual(response.body, b": /\n")
-        self.assertEqual(response.headers.get("Content-Length"), "4")
-        self.assertEqual(response.headers.get("Content-Range"),
-                         "bytes 22-25/26")
-
-    def test_static_with_range_neg_end(self):
-        response = self.get_and_head('/static/robots.txt', headers={
-            'Range': 'bytes=-4'})
-        self.assertEqual(response.body, b": /\n")
-        self.assertEqual(response.headers.get("Content-Length"), "4")
-        self.assertEqual(response.headers.get("Content-Range"),
-                         "bytes 22-25/26")
-
-    def test_static_invalid_range(self):
-        response = self.get_and_head('/static/robots.txt', headers={
-            'Range': 'asdf'})
-        self.assertEqual(response.code, 200)
-
-    def test_static_unsatisfiable_range_zero_suffix(self):
-        response = self.get_and_head('/static/robots.txt', headers={
-            'Range': 'bytes=-0'})
-        self.assertEqual(response.headers.get("Content-Range"),
-                         "bytes */26")
-        self.assertEqual(response.code, 416)
-
-    def test_static_unsatisfiable_range_invalid_start(self):
-        response = self.get_and_head('/static/robots.txt', headers={
-            'Range': 'bytes=26'})
-        self.assertEqual(response.code, 416)
-        self.assertEqual(response.headers.get("Content-Range"),
-                         "bytes */26")
-
-    def test_static_head(self):
-        response = self.fetch('/static/robots.txt', method='HEAD')
-        self.assertEqual(response.code, 200)
-        # No body was returned, but we did get the right content length.
-        self.assertEqual(response.body, b'')
-        self.assertEqual(response.headers['Content-Length'], '26')
-        self.assertEqual(utf8(response.headers['Etag']),
-                         b'"' + self.robots_txt_hash + b'"')
-
-    def test_static_head_range(self):
-        response = self.fetch('/static/robots.txt', method='HEAD',
-                              headers={'Range': 'bytes=1-4'})
-        self.assertEqual(response.code, 206)
-        self.assertEqual(response.body, b'')
-        self.assertEqual(response.headers['Content-Length'], '4')
-        self.assertEqual(utf8(response.headers['Etag']),
-                         b'"' + self.robots_txt_hash + b'"')
-
-    def test_static_range_if_none_match(self):
-        response = self.get_and_head('/static/robots.txt', headers={
-            'Range': 'bytes=1-4',
-            'If-None-Match': b'"' + self.robots_txt_hash + b'"'})
-        self.assertEqual(response.code, 304)
-        self.assertEqual(response.body, b'')
-        self.assertTrue('Content-Length' not in response.headers)
-        self.assertEqual(utf8(response.headers['Etag']),
-                         b'"' + self.robots_txt_hash + b'"')
-
-    def test_static_404(self):
-        response = self.get_and_head('/static/blarg')
-        self.assertEqual(response.code, 404)
-
-    def test_path_traversal_protection(self):
-        # curl_httpclient processes ".." on the client side, so we
-        # must test this with simple_httpclient.
-        self.http_client.close()
-        self.http_client = SimpleAsyncHTTPClient()
-        with ExpectLog(gen_log, ".*not in root static directory"):
-            response = self.get_and_head('/static/../static_foo.txt')
-        # Attempted path traversal should result in 403, not 200
-        # (which means the check failed and the file was served)
-        # or 404 (which means that the file didn't exist and
-        # is probably a packaging error).
-        self.assertEqual(response.code, 403)
-
-    @unittest.skipIf(os.name != 'posix', 'non-posix OS')
-    def test_root_static_path(self):
-        # Sometimes people set the StaticFileHandler's path to '/'
-        # to disable Tornado's path validation (in conjunction with
-        # their own validation in get_absolute_path). Make sure
-        # that the stricter validation in 4.2.1 doesn't break them.
-        path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
-                            'static/robots.txt')
-        response = self.get_and_head('/root_static' + urllib_parse.quote(path))
-        self.assertEqual(response.code, 200)
-
-
-@wsgi_safe
-class StaticDefaultFilenameTest(WebTestCase):
-    def get_app_kwargs(self):
-        return dict(static_path=relpath('static'),
-                    static_handler_args=dict(default_filename='index.html'))
-
-    def get_handlers(self):
-        return []
-
-    def test_static_default_filename(self):
-        response = self.fetch('/static/dir/', follow_redirects=False)
-        self.assertEqual(response.code, 200)
-        self.assertEqual(b'this is the index\n', response.body)
-
-    def test_static_default_redirect(self):
-        response = self.fetch('/static/dir', follow_redirects=False)
-        self.assertEqual(response.code, 301)
-        self.assertTrue(response.headers['Location'].endswith('/static/dir/'))
-
-
-@wsgi_safe
-class StaticFileWithPathTest(WebTestCase):
-    def get_app_kwargs(self):
-        return dict(static_path=relpath('static'),
-                    static_handler_args=dict(default_filename='index.html'))
-
-    def get_handlers(self):
-        return [("/foo/(.*)", StaticFileHandler, {
-            "path": relpath("templates/"),
-        })]
-
-    def test_serve(self):
-        response = self.fetch("/foo/utf8.html")
-        self.assertEqual(response.body, b"H\xc3\xa9llo\n")
-
-
-@wsgi_safe
-class CustomStaticFileTest(WebTestCase):
-    def get_handlers(self):
-        class MyStaticFileHandler(StaticFileHandler):
-            @classmethod
-            def make_static_url(cls, settings, path):
-                version_hash = cls.get_version(settings, path)
-                extension_index = path.rindex('.')
-                before_version = path[:extension_index]
-                after_version = path[(extension_index + 1):]
-                return '/static/%s.%s.%s' % (before_version, version_hash,
-                                             after_version)
-
-            def parse_url_path(self, url_path):
-                extension_index = url_path.rindex('.')
-                version_index = url_path.rindex('.', 0, extension_index)
-                return '%s%s' % (url_path[:version_index],
-                                 url_path[extension_index:])
-
-            @classmethod
-            def get_absolute_path(cls, settings, path):
-                return 'CustomStaticFileTest:' + path
-
-            def validate_absolute_path(self, root, absolute_path):
-                return absolute_path
-
-            @classmethod
-            def get_content(self, path, start=None, end=None):
-                assert start is None and end is None
-                if path == 'CustomStaticFileTest:foo.txt':
-                    return b'bar'
-                raise Exception("unexpected path %r" % path)
-
-            def get_content_size(self):
-                if self.absolute_path == 'CustomStaticFileTest:foo.txt':
-                    return 3
-                raise Exception("unexpected path %r" % self.absolute_path)
-
-            def get_modified_time(self):
-                return None
-
-            @classmethod
-            def get_version(cls, settings, path):
-                return "42"
-
-        class StaticUrlHandler(RequestHandler):
-            def get(self, path):
-                self.write(self.static_url(path))
-
-        self.static_handler_class = MyStaticFileHandler
-
-        return [("/static_url/(.*)", StaticUrlHandler)]
-
-    def get_app_kwargs(self):
-        return dict(static_path="dummy",
-                    static_handler_class=self.static_handler_class)
-
-    def test_serve(self):
-        response = self.fetch("/static/foo.42.txt")
-        self.assertEqual(response.body, b"bar")
-
-    def test_static_url(self):
-        with ExpectLog(gen_log, "Could not open static file", required=False):
-            response = self.fetch("/static_url/foo.txt")
-            self.assertEqual(response.body, b"/static/foo.42.txt")
-
-
-@wsgi_safe
-class HostMatchingTest(WebTestCase):
-    class Handler(RequestHandler):
-        def initialize(self, reply):
-            self.reply = reply
-
-        def get(self):
-            self.write(self.reply)
-
-    def get_handlers(self):
-        return [("/foo", HostMatchingTest.Handler, {"reply": "wildcard"})]
-
-    def test_host_matching(self):
-        self.app.add_handlers("www.example.com",
-                              [("/foo", HostMatchingTest.Handler, {"reply": "[0]"})])
-        self.app.add_handlers(r"www\.example\.com",
-                              [("/bar", HostMatchingTest.Handler, {"reply": "[1]"})])
-        self.app.add_handlers("www.example.com",
-                              [("/baz", HostMatchingTest.Handler, {"reply": "[2]"})])
-        self.app.add_handlers("www.e.*e.com",
-                              [("/baz", HostMatchingTest.Handler, {"reply": "[3]"})])
-
-        response = self.fetch("/foo")
-        self.assertEqual(response.body, b"wildcard")
-        response = self.fetch("/bar")
-        self.assertEqual(response.code, 404)
-        response = self.fetch("/baz")
-        self.assertEqual(response.code, 404)
-
-        response = self.fetch("/foo", headers={'Host': 'www.example.com'})
-        self.assertEqual(response.body, b"[0]")
-        response = self.fetch("/bar", headers={'Host': 'www.example.com'})
-        self.assertEqual(response.body, b"[1]")
-        response = self.fetch("/baz", headers={'Host': 'www.example.com'})
-        self.assertEqual(response.body, b"[2]")
-        response = self.fetch("/baz", headers={'Host': 'www.exe.com'})
-        self.assertEqual(response.body, b"[3]")
-
-
-@wsgi_safe
-class DefaultHostMatchingTest(WebTestCase):
-    def get_handlers(self):
-        return []
-
-    def get_app_kwargs(self):
-        return {'default_host': "www.example.com"}
-
-    def test_default_host_matching(self):
-        self.app.add_handlers("www.example.com",
-                              [("/foo", HostMatchingTest.Handler, {"reply": "[0]"})])
-        self.app.add_handlers(r"www\.example\.com",
-                              [("/bar", HostMatchingTest.Handler, {"reply": "[1]"})])
-        self.app.add_handlers("www.test.com",
-                              [("/baz", HostMatchingTest.Handler, {"reply": "[2]"})])
-
-        response = self.fetch("/foo")
-        self.assertEqual(response.body, b"[0]")
-        response = self.fetch("/bar")
-        self.assertEqual(response.body, b"[1]")
-        response = self.fetch("/baz")
-        self.assertEqual(response.code, 404)
-
-        response = self.fetch("/foo", headers={"X-Real-Ip": "127.0.0.1"})
-        self.assertEqual(response.code, 404)
-
-        self.app.default_host = "www.test.com"
-
-        response = self.fetch("/baz")
-        self.assertEqual(response.body, b"[2]")
-
-
-@wsgi_safe
-class NamedURLSpecGroupsTest(WebTestCase):
-    def get_handlers(self):
-        class EchoHandler(RequestHandler):
-            def get(self, path):
-                self.write(path)
-
-        return [("/str/(?P<path>.*)", EchoHandler),
-                (u"/unicode/(?P<path>.*)", EchoHandler)]
-
-    def test_named_urlspec_groups(self):
-        response = self.fetch("/str/foo")
-        self.assertEqual(response.body, b"foo")
-
-        response = self.fetch("/unicode/bar")
-        self.assertEqual(response.body, b"bar")
-
-
-@wsgi_safe
-class ClearHeaderTest(SimpleHandlerTestCase):
-    class Handler(RequestHandler):
-        def get(self):
-            self.set_header("h1", "foo")
-            self.set_header("h2", "bar")
-            self.clear_header("h1")
-            self.clear_header("nonexistent")
-
-    def test_clear_header(self):
-        response = self.fetch("/")
-        self.assertTrue("h1" not in response.headers)
-        self.assertEqual(response.headers["h2"], "bar")
-
-
-class Header204Test(SimpleHandlerTestCase):
-    class Handler(RequestHandler):
-        def get(self):
-            self.set_status(204)
-            self.finish()
-
-    def test_204_headers(self):
-        response = self.fetch('/')
-        self.assertEqual(response.code, 204)
-        self.assertNotIn("Content-Length", response.headers)
-        self.assertNotIn("Transfer-Encoding", response.headers)
-
-
-@wsgi_safe
-class Header304Test(SimpleHandlerTestCase):
-    class Handler(RequestHandler):
-        def get(self):
-            self.set_header("Content-Language", "en_US")
-            self.write("hello")
-
-    def test_304_headers(self):
-        response1 = self.fetch('/')
-        self.assertEqual(response1.headers["Content-Length"], "5")
-        self.assertEqual(response1.headers["Content-Language"], "en_US")
-
-        response2 = self.fetch('/', headers={
-            'If-None-Match': response1.headers["Etag"]})
-        self.assertEqual(response2.code, 304)
-        self.assertTrue("Content-Length" not in response2.headers)
-        self.assertTrue("Content-Language" not in response2.headers)
-        # Not an entity header, but should not be added to 304s by chunking
-        self.assertTrue("Transfer-Encoding" not in response2.headers)
-
-
-@wsgi_safe
-class StatusReasonTest(SimpleHandlerTestCase):
-    class Handler(RequestHandler):
-        def get(self):
-            reason = self.request.arguments.get('reason', [])
-            self.set_status(int(self.get_argument('code')),
-                            reason=reason[0] if reason else None)
-
-    def get_http_client(self):
-        # simple_httpclient only: curl doesn't expose the reason string
-        return SimpleAsyncHTTPClient(io_loop=self.io_loop)
-
-    def test_status(self):
-        response = self.fetch("/?code=304")
-        self.assertEqual(response.code, 304)
-        self.assertEqual(response.reason, "Not Modified")
-        response = self.fetch("/?code=304&reason=Foo")
-        self.assertEqual(response.code, 304)
-        self.assertEqual(response.reason, "Foo")
-        response = self.fetch("/?code=682&reason=Bar")
-        self.assertEqual(response.code, 682)
-        self.assertEqual(response.reason, "Bar")
-        with ExpectLog(app_log, 'Uncaught exception'):
-            response = self.fetch("/?code=682")
-        self.assertEqual(response.code, 500)
-
-
-@wsgi_safe
-class DateHeaderTest(SimpleHandlerTestCase):
-    class Handler(RequestHandler):
-        def get(self):
-            self.write("hello")
-
-    def test_date_header(self):
-        response = self.fetch('/')
-        header_date = datetime.datetime(
-            *email.utils.parsedate(response.headers['Date'])[:6])
-        self.assertTrue(header_date - datetime.datetime.utcnow() <
-                        datetime.timedelta(seconds=2))
-
-
-@wsgi_safe
-class RaiseWithReasonTest(SimpleHandlerTestCase):
-    class Handler(RequestHandler):
-        def get(self):
-            raise HTTPError(682, reason="Foo")
-
-    def get_http_client(self):
-        # simple_httpclient only: curl doesn't expose the reason string
-        return SimpleAsyncHTTPClient(io_loop=self.io_loop)
-
-    def test_raise_with_reason(self):
-        response = self.fetch("/")
-        self.assertEqual(response.code, 682)
-        self.assertEqual(response.reason, "Foo")
-        self.assertIn(b'682: Foo', response.body)
-
-    def test_httperror_str(self):
-        self.assertEqual(str(HTTPError(682, reason="Foo")), "HTTP 682: Foo")
-
-    def test_httperror_str_from_httputil(self):
-        self.assertEqual(str(HTTPError(682)), "HTTP 682: Unknown")
-
-
-@wsgi_safe
-class ErrorHandlerXSRFTest(WebTestCase):
-    def get_handlers(self):
-        # note that if the handlers list is empty we get the default_host
-        # redirect fallback instead of a 404, so test with both an
-        # explicitly defined error handler and an implicit 404.
-        return [('/error', ErrorHandler, dict(status_code=417))]
-
-    def get_app_kwargs(self):
-        return dict(xsrf_cookies=True)
-
-    def test_error_xsrf(self):
-        response = self.fetch('/error', method='POST', body='')
-        self.assertEqual(response.code, 417)
-
-    def test_404_xsrf(self):
-        response = self.fetch('/404', method='POST', body='')
-        self.assertEqual(response.code, 404)
-
-
-@wsgi_safe
-class GzipTestCase(SimpleHandlerTestCase):
-    class Handler(RequestHandler):
-        def get(self):
-            for v in self.get_arguments('vary'):
-                self.add_header('Vary', v)
-            # Must write at least MIN_LENGTH bytes to activate compression.
-            self.write('hello world' + ('!' * GZipContentEncoding.MIN_LENGTH))
-
-    def get_app_kwargs(self):
-        return dict(
-            gzip=True,
-            static_path=os.path.join(os.path.dirname(__file__), 'static'))
-
-    def assert_compressed(self, response):
-        # simple_httpclient renames the content-encoding header;
-        # curl_httpclient doesn't.
-        self.assertEqual(
-            response.headers.get(
-                'Content-Encoding',
-                response.headers.get('X-Consumed-Content-Encoding')),
-            'gzip')
-
-    def test_gzip(self):
-        response = self.fetch('/')
-        self.assert_compressed(response)
-        self.assertEqual(response.headers['Vary'], 'Accept-Encoding')
-
-    def test_gzip_static(self):
-        # The streaming responses in StaticFileHandler have subtle
-        # interactions with the gzip output so test this case separately.
-        response = self.fetch('/robots.txt')
-        self.assert_compressed(response)
-        self.assertEqual(response.headers['Vary'], 'Accept-Encoding')
-
-    def test_gzip_not_requested(self):
-        response = self.fetch('/', use_gzip=False)
-        self.assertNotIn('Content-Encoding', response.headers)
-        self.assertEqual(response.headers['Vary'], 'Accept-Encoding')
-
-    def test_vary_already_present(self):
-        response = self.fetch('/?vary=Accept-Language')
-        self.assert_compressed(response)
-        self.assertEqual([s.strip() for s in response.headers['Vary'].split(',')],
-                         ['Accept-Language', 'Accept-Encoding'])
-
-    def test_vary_already_present_multiple(self):
-        # Regression test for https://github.com/tornadoweb/tornado/issues/1670
-        response = self.fetch('/?vary=Accept-Language&vary=Cookie')
-        self.assert_compressed(response)
-        self.assertEqual([s.strip() for s in response.headers['Vary'].split(',')],
-                         ['Accept-Language', 'Cookie', 'Accept-Encoding'])
-
-
-@wsgi_safe
-class PathArgsInPrepareTest(WebTestCase):
-    class Handler(RequestHandler):
-        def prepare(self):
-            self.write(dict(args=self.path_args, kwargs=self.path_kwargs))
-
-        def get(self, path):
-            assert path == 'foo'
-            self.finish()
-
-    def get_handlers(self):
-        return [('/pos/(.*)', self.Handler),
-                ('/kw/(?P<path>.*)', self.Handler)]
-
-    def test_pos(self):
-        response = self.fetch('/pos/foo')
-        response.rethrow()
-        data = json_decode(response.body)
-        self.assertEqual(data, {'args': ['foo'], 'kwargs': {}})
-
-    def test_kw(self):
-        response = self.fetch('/kw/foo')
-        response.rethrow()
-        data = json_decode(response.body)
-        self.assertEqual(data, {'args': [], 'kwargs': {'path': 'foo'}})
-
-
-@wsgi_safe
-class ClearAllCookiesTest(SimpleHandlerTestCase):
-    class Handler(RequestHandler):
-        def get(self):
-            self.clear_all_cookies()
-            self.write('ok')
-
-    def test_clear_all_cookies(self):
-        response = self.fetch('/', headers={'Cookie': 'foo=bar; baz=xyzzy'})
-        set_cookies = sorted(response.headers.get_list('Set-Cookie'))
-        # Python 3.5 sends 'baz="";'; older versions use 'baz=;'
-        self.assertTrue(set_cookies[0].startswith('baz=;') or
-                        set_cookies[0].startswith('baz="";'))
-        self.assertTrue(set_cookies[1].startswith('foo=;') or
-                        set_cookies[1].startswith('foo="";'))
-
-
-class PermissionError(Exception):
-    pass
-
-
-@wsgi_safe
-class ExceptionHandlerTest(SimpleHandlerTestCase):
-    class Handler(RequestHandler):
-        def get(self):
-            exc = self.get_argument('exc')
-            if exc == 'http':
-                raise HTTPError(410, "no longer here")
-            elif exc == 'zero':
-                1 / 0
-            elif exc == 'permission':
-                raise PermissionError('not allowed')
-
-        def write_error(self, status_code, **kwargs):
-            if 'exc_info' in kwargs:
-                typ, value, tb = kwargs['exc_info']
-                if isinstance(value, PermissionError):
-                    self.set_status(403)
-                    self.write('PermissionError')
-                    return
-            RequestHandler.write_error(self, status_code, **kwargs)
-
-        def log_exception(self, typ, value, tb):
-            if isinstance(value, PermissionError):
-                app_log.warning('custom logging for PermissionError: %s',
-                                value.args[0])
-            else:
-                RequestHandler.log_exception(self, typ, value, tb)
-
-    def test_http_error(self):
-        # HTTPErrors are logged as warnings with no stack trace.
-        # TODO: extend ExpectLog to test this more precisely
-        with ExpectLog(gen_log, '.*no longer here'):
-            response = self.fetch('/?exc=http')
-            self.assertEqual(response.code, 410)
-
-    def test_unknown_error(self):
-        # Unknown errors are logged as errors with a stack trace.
-        with ExpectLog(app_log, 'Uncaught exception'):
-            response = self.fetch('/?exc=zero')
-            self.assertEqual(response.code, 500)
-
-    def test_known_error(self):
-        # log_exception can override logging behavior, and write_error
-        # can override the response.
-        with ExpectLog(app_log,
-                       'custom logging for PermissionError: not allowed'):
-            response = self.fetch('/?exc=permission')
-            self.assertEqual(response.code, 403)
-
-
-@wsgi_safe
-class BuggyLoggingTest(SimpleHandlerTestCase):
-    class Handler(RequestHandler):
-        def get(self):
-            1 / 0
-
-        def log_exception(self, typ, value, tb):
-            1 / 0
-
-    def test_buggy_log_exception(self):
-        # Something gets logged even though the application's
-        # logger is broken.
-        with ExpectLog(app_log, '.*'):
-            self.fetch('/')
-
-
-@wsgi_safe
-class UIMethodUIModuleTest(SimpleHandlerTestCase):
-    """Test that UI methods and modules are created correctly and
-    associated with the handler.
-    """
-    class Handler(RequestHandler):
-        def get(self):
-            self.render('foo.html')
-
-        def value(self):
-            return self.get_argument("value")
-
-    def get_app_kwargs(self):
-        def my_ui_method(handler, x):
-            return "In my_ui_method(%s) with handler value %s." % (
-                x, handler.value())
-
-        class MyModule(UIModule):
-            def render(self, x):
-                return "In MyModule(%s) with handler value %s." % (
-                    x, self.handler.value())
-
-        loader = DictLoader({
-            'foo.html': '{{ my_ui_method(42) }} {% module MyModule(123) %}',
-        })
-        return dict(template_loader=loader,
-                    ui_methods={'my_ui_method': my_ui_method},
-                    ui_modules={'MyModule': MyModule})
-
-    def tearDown(self):
-        super(UIMethodUIModuleTest, self).tearDown()
-        # TODO: fix template loader caching so this isn't necessary.
-        RequestHandler._template_loaders.clear()
-
-    def test_ui_method(self):
-        response = self.fetch('/?value=asdf')
-        self.assertEqual(response.body,
-                         b'In my_ui_method(42) with handler value asdf. '
-                         b'In MyModule(123) with handler value asdf.')
-
-
-@wsgi_safe
-class GetArgumentErrorTest(SimpleHandlerTestCase):
-    class Handler(RequestHandler):
-        def get(self):
-            try:
-                self.get_argument('foo')
-                self.write({})
-            except MissingArgumentError as e:
-                self.write({'arg_name': e.arg_name,
-                            'log_message': e.log_message})
-
-    def test_catch_error(self):
-        response = self.fetch('/')
-        self.assertEqual(json_decode(response.body),
-                         {'arg_name': 'foo',
-                          'log_message': 'Missing argument foo'})
-
-
-class MultipleExceptionTest(SimpleHandlerTestCase):
-    class Handler(RequestHandler):
-        exc_count = 0
-
-        @asynchronous
-        def get(self):
-            from tornado.ioloop import IOLoop
-            IOLoop.current().add_callback(lambda: 1 / 0)
-            IOLoop.current().add_callback(lambda: 1 / 0)
-
-        def log_exception(self, typ, value, tb):
-            MultipleExceptionTest.Handler.exc_count += 1
-
-    def test_multi_exception(self):
-        # This test verifies that multiple exceptions raised into the same
-        # ExceptionStackContext do not generate extraneous log entries
-        # due to "Cannot send error response after headers written".
-        # log_exception is called, but it does not proceed to send_error.
-        response = self.fetch('/')
-        self.assertEqual(response.code, 500)
-        response = self.fetch('/')
-        self.assertEqual(response.code, 500)
-        # Each of our two requests generated two exceptions, we should have
-        # seen at least three of them by now (the fourth may still be
-        # in the queue).
-        self.assertGreater(MultipleExceptionTest.Handler.exc_count, 2)
-
-
-@wsgi_safe
-class SetLazyPropertiesTest(SimpleHandlerTestCase):
-    class Handler(RequestHandler):
-        def prepare(self):
-            self.current_user = 'Ben'
-            self.locale = locale.get('en_US')
-
-        def get_user_locale(self):
-            raise NotImplementedError()
-
-        def get_current_user(self):
-            raise NotImplementedError()
-
-        def get(self):
-            self.write('Hello %s (%s)' % (self.current_user, self.locale.code))
-
-    def test_set_properties(self):
-        # Ensure that current_user can be assigned to normally for apps
-        # that want to forgo the lazy get_current_user property
-        response = self.fetch('/')
-        self.assertEqual(response.body, b'Hello Ben (en_US)')
-
-
-@wsgi_safe
-class GetCurrentUserTest(WebTestCase):
-    def get_app_kwargs(self):
-        class WithoutUserModule(UIModule):
-            def render(self):
-                return ''
-
-        class WithUserModule(UIModule):
-            def render(self):
-                return str(self.current_user)
-
-        loader = DictLoader({
-            'without_user.html': '',
-            'with_user.html': '{{ current_user }}',
-            'without_user_module.html': '{% module WithoutUserModule() %}',
-            'with_user_module.html': '{% module WithUserModule() %}',
-        })
-        return dict(template_loader=loader,
-                    ui_modules={'WithUserModule': WithUserModule,
-                                'WithoutUserModule': WithoutUserModule})
-
-    def tearDown(self):
-        super(GetCurrentUserTest, self).tearDown()
-        RequestHandler._template_loaders.clear()
-
-    def get_handlers(self):
-        class CurrentUserHandler(RequestHandler):
-            def prepare(self):
-                self.has_loaded_current_user = False
-
-            def get_current_user(self):
-                self.has_loaded_current_user = True
-                return ''
-
-        class WithoutUserHandler(CurrentUserHandler):
-            def get(self):
-                self.render_string('without_user.html')
-                self.finish(str(self.has_loaded_current_user))
-
-        class WithUserHandler(CurrentUserHandler):
-            def get(self):
-                self.render_string('with_user.html')
-                self.finish(str(self.has_loaded_current_user))
-
-        class CurrentUserModuleHandler(CurrentUserHandler):
-            def get_template_namespace(self):
-                # If RequestHandler.get_template_namespace is called, then
-                # get_current_user is evaluated. Until #820 is fixed, this
-                # is a small hack to circumvent the issue.
-                return self.ui
-
-        class WithoutUserModuleHandler(CurrentUserModuleHandler):
-            def get(self):
-                self.render_string('without_user_module.html')
-                self.finish(str(self.has_loaded_current_user))
-
-        class WithUserModuleHandler(CurrentUserModuleHandler):
-            def get(self):
-                self.render_string('with_user_module.html')
-                self.finish(str(self.has_loaded_current_user))
-
-        return [('/without_user', WithoutUserHandler),
-                ('/with_user', WithUserHandler),
-                ('/without_user_module', WithoutUserModuleHandler),
-                ('/with_user_module', WithUserModuleHandler)]
-
-    @unittest.skip('needs fix')
-    def test_get_current_user_is_lazy(self):
-        # TODO: Make this test pass. See #820.
-        response = self.fetch('/without_user')
-        self.assertEqual(response.body, b'False')
-
-    def test_get_current_user_works(self):
-        response = self.fetch('/with_user')
-        self.assertEqual(response.body, b'True')
-
-    def test_get_current_user_from_ui_module_is_lazy(self):
-        response = self.fetch('/without_user_module')
-        self.assertEqual(response.body, b'False')
-
-    def test_get_current_user_from_ui_module_works(self):
-        response = self.fetch('/with_user_module')
-        self.assertEqual(response.body, b'True')
-
-
-@wsgi_safe
-class UnimplementedHTTPMethodsTest(SimpleHandlerTestCase):
-    class Handler(RequestHandler):
-        pass
-
-    def test_unimplemented_standard_methods(self):
-        for method in ['HEAD', 'GET', 'DELETE', 'OPTIONS']:
-            response = self.fetch('/', method=method)
-            self.assertEqual(response.code, 405)
-        for method in ['POST', 'PUT']:
-            response = self.fetch('/', method=method, body=b'')
-            self.assertEqual(response.code, 405)
-
-
-class UnimplementedNonStandardMethodsTest(SimpleHandlerTestCase):
-    # wsgiref.validate complains about unknown methods in a way that makes
-    # this test not wsgi_safe.
-    class Handler(RequestHandler):
-        def other(self):
-            # Even though this method exists, it won't get called automatically
-            # because it is not in SUPPORTED_METHODS.
-            self.write('other')
-
-    def test_unimplemented_patch(self):
-        # PATCH is recently standardized; Tornado supports it by default
-        # but wsgiref.validate doesn't like it.
-        response = self.fetch('/', method='PATCH', body=b'')
-        self.assertEqual(response.code, 405)
-
-    def test_unimplemented_other(self):
-        response = self.fetch('/', method='OTHER',
-                              allow_nonstandard_methods=True)
-        self.assertEqual(response.code, 405)
-
-
-@wsgi_safe
-class AllHTTPMethodsTest(SimpleHandlerTestCase):
-    class Handler(RequestHandler):
-        def method(self):
-            self.write(self.request.method)
-
-        get = delete = options = post = put = method
-
-    def test_standard_methods(self):
-        response = self.fetch('/', method='HEAD')
-        self.assertEqual(response.body, b'')
-        for method in ['GET', 'DELETE', 'OPTIONS']:
-            response = self.fetch('/', method=method)
-            self.assertEqual(response.body, utf8(method))
-        for method in ['POST', 'PUT']:
-            response = self.fetch('/', method=method, body=b'')
-            self.assertEqual(response.body, utf8(method))
-
-
-class PatchMethodTest(SimpleHandlerTestCase):
-    class Handler(RequestHandler):
-        SUPPORTED_METHODS = RequestHandler.SUPPORTED_METHODS + ('OTHER',)
-
-        def patch(self):
-            self.write('patch')
-
-        def other(self):
-            self.write('other')
-
-    def test_patch(self):
-        response = self.fetch('/', method='PATCH', body=b'')
-        self.assertEqual(response.body, b'patch')
-
-    def test_other(self):
-        response = self.fetch('/', method='OTHER',
-                              allow_nonstandard_methods=True)
-        self.assertEqual(response.body, b'other')
-
-
-@wsgi_safe
-class FinishInPrepareTest(SimpleHandlerTestCase):
-    class Handler(RequestHandler):
-        def prepare(self):
-            self.finish('done')
-
-        def get(self):
-            # It's difficult to assert for certain that a method did not
-            # or will not be called in an asynchronous context, but this
-            # will be logged noisily if it is reached.
-            raise Exception('should not reach this method')
-
-    def test_finish_in_prepare(self):
-        response = self.fetch('/')
-        self.assertEqual(response.body, b'done')
-
-
-@wsgi_safe
-class Default404Test(WebTestCase):
-    def get_handlers(self):
-        # If there are no handlers at all a default redirect handler gets added.
-        return [('/foo', RequestHandler)]
-
-    def test_404(self):
-        response = self.fetch('/')
-        self.assertEqual(response.code, 404)
-        self.assertEqual(response.body,
-                         b'<html><title>404: Not Found</title>'
-                         b'<body>404: Not Found</body></html>')
-
-
-@wsgi_safe
-class Custom404Test(WebTestCase):
-    def get_handlers(self):
-        return [('/foo', RequestHandler)]
-
-    def get_app_kwargs(self):
-        class Custom404Handler(RequestHandler):
-            def get(self):
-                self.set_status(404)
-                self.write('custom 404 response')
-
-        return dict(default_handler_class=Custom404Handler)
-
-    def test_404(self):
-        response = self.fetch('/')
-        self.assertEqual(response.code, 404)
-        self.assertEqual(response.body, b'custom 404 response')
-
-
-@wsgi_safe
-class DefaultHandlerArgumentsTest(WebTestCase):
-    def get_handlers(self):
-        return [('/foo', RequestHandler)]
-
-    def get_app_kwargs(self):
-        return dict(default_handler_class=ErrorHandler,
-                    default_handler_args=dict(status_code=403))
-
-    def test_403(self):
-        response = self.fetch('/')
-        self.assertEqual(response.code, 403)
-
-
-@wsgi_safe
-class HandlerByNameTest(WebTestCase):
-    def get_handlers(self):
-        # All three are equivalent.
-        return [('/hello1', HelloHandler),
-                ('/hello2', 'tornado.test.web_test.HelloHandler'),
-                url('/hello3', 'tornado.test.web_test.HelloHandler'),
-                ]
-
-    def test_handler_by_name(self):
-        resp = self.fetch('/hello1')
-        self.assertEqual(resp.body, b'hello')
-        resp = self.fetch('/hello2')
-        self.assertEqual(resp.body, b'hello')
-        resp = self.fetch('/hello3')
-        self.assertEqual(resp.body, b'hello')
-
-
-class StreamingRequestBodyTest(WebTestCase):
-    def get_handlers(self):
-        @stream_request_body
-        class StreamingBodyHandler(RequestHandler):
-            def initialize(self, test):
-                self.test = test
-
-            def prepare(self):
-                self.test.prepared.set_result(None)
-
-            def data_received(self, data):
-                self.test.data.set_result(data)
-
-            def get(self):
-                self.test.finished.set_result(None)
-                self.write({})
-
-        @stream_request_body
-        class EarlyReturnHandler(RequestHandler):
-            def prepare(self):
-                # If we finish the response in prepare, it won't continue to
-                # the (non-existent) data_received.
-                raise HTTPError(401)
-
-        @stream_request_body
-        class CloseDetectionHandler(RequestHandler):
-            def initialize(self, test):
-                self.test = test
-
-            def on_connection_close(self):
-                super(CloseDetectionHandler, self).on_connection_close()
-                self.test.close_future.set_result(None)
-
-        return [('/stream_body', StreamingBodyHandler, dict(test=self)),
-                ('/early_return', EarlyReturnHandler),
-                ('/close_detection', CloseDetectionHandler, dict(test=self))]
-
-    def connect(self, url, connection_close):
-        # Use a raw connection so we can control the sending of data.
-        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
-        s.connect(("127.0.0.1", self.get_http_port()))
-        stream = IOStream(s, io_loop=self.io_loop)
-        stream.write(b"GET " + url + b" HTTP/1.1\r\n")
-        if connection_close:
-            stream.write(b"Connection: close\r\n")
-        stream.write(b"Transfer-Encoding: chunked\r\n\r\n")
-        return stream
-
-    @gen_test
-    def test_streaming_body(self):
-        self.prepared = Future()
-        self.data = Future()
-        self.finished = Future()
-
-        stream = self.connect(b"/stream_body", connection_close=True)
-        yield self.prepared
-        stream.write(b"4\r\nasdf\r\n")
-        # Ensure the first chunk is received before we send the second.
-        data = yield self.data
-        self.assertEqual(data, b"asdf")
-        self.data = Future()
-        stream.write(b"4\r\nqwer\r\n")
-        data = yield self.data
-        self.assertEquals(data, b"qwer")
-        stream.write(b"0\r\n")
-        yield self.finished
-        data = yield gen.Task(stream.read_until_close)
-        # This would ideally use an HTTP1Connection to read the response.
-        self.assertTrue(data.endswith(b"{}"))
-        stream.close()
-
-    @gen_test
-    def test_early_return(self):
-        stream = self.connect(b"/early_return", connection_close=False)
-        data = yield gen.Task(stream.read_until_close)
-        self.assertTrue(data.startswith(b"HTTP/1.1 401"))
-
-    @gen_test
-    def test_early_return_with_data(self):
-        stream = self.connect(b"/early_return", connection_close=False)
-        stream.write(b"4\r\nasdf\r\n")
-        data = yield gen.Task(stream.read_until_close)
-        self.assertTrue(data.startswith(b"HTTP/1.1 401"))
-
-    @gen_test
-    def test_close_during_upload(self):
-        self.close_future = Future()
-        stream = self.connect(b"/close_detection", connection_close=False)
-        stream.close()
-        yield self.close_future
-
-
-# Each method in this handler returns a yieldable object and yields to the
-# IOLoop so the future is not immediately ready.  Ensure that the
-# yieldables are respected and no method is called before the previous
-# one has completed.
-@stream_request_body
-class BaseFlowControlHandler(RequestHandler):
-    def initialize(self, test):
-        self.test = test
-        self.method = None
-        self.methods = []
-
-    @contextlib.contextmanager
-    def in_method(self, method):
-        if self.method is not None:
-            self.test.fail("entered method %s while in %s" %
-                           (method, self.method))
-        self.method = method
-        self.methods.append(method)
-        try:
-            yield
-        finally:
-            self.method = None
-
-    @gen.coroutine
-    def prepare(self):
-        # Note that asynchronous prepare() does not block data_received,
-        # so we don't use in_method here.
-        self.methods.append('prepare')
-        yield gen.Task(IOLoop.current().add_callback)
-
-    @gen.coroutine
-    def post(self):
-        with self.in_method('post'):
-            yield gen.Task(IOLoop.current().add_callback)
-        self.write(dict(methods=self.methods))
-
-
-class BaseStreamingRequestFlowControlTest(object):
-    def get_httpserver_options(self):
-        # Use a small chunk size so flow control is relevant even though
-        # all the data arrives at once.
-        return dict(chunk_size=10, decompress_request=True)
-
-    def get_http_client(self):
-        # simple_httpclient only: curl doesn't support body_producer.
-        return SimpleAsyncHTTPClient(io_loop=self.io_loop)
-
-    # Test all the slightly different code paths for fixed, chunked, etc bodies.
-    def test_flow_control_fixed_body(self):
-        response = self.fetch('/', body='abcdefghijklmnopqrstuvwxyz',
-                              method='POST')
-        response.rethrow()
-        self.assertEqual(json_decode(response.body),
-                         dict(methods=['prepare', 'data_received',
-                                       'data_received', 'data_received',
-                                       'post']))
-
-    def test_flow_control_chunked_body(self):
-        chunks = [b'abcd', b'efgh', b'ijkl']
-
-        @gen.coroutine
-        def body_producer(write):
-            for i in chunks:
-                yield write(i)
-        response = self.fetch('/', body_producer=body_producer, method='POST')
-        response.rethrow()
-        self.assertEqual(json_decode(response.body),
-                         dict(methods=['prepare', 'data_received',
-                                       'data_received', 'data_received',
-                                       'post']))
-
-    def test_flow_control_compressed_body(self):
-        bytesio = BytesIO()
-        gzip_file = gzip.GzipFile(mode='w', fileobj=bytesio)
-        gzip_file.write(b'abcdefghijklmnopqrstuvwxyz')
-        gzip_file.close()
-        compressed_body = bytesio.getvalue()
-        response = self.fetch('/', body=compressed_body, method='POST',
-                              headers={'Content-Encoding': 'gzip'})
-        response.rethrow()
-        self.assertEqual(json_decode(response.body),
-                         dict(methods=['prepare', 'data_received',
-                                       'data_received', 'data_received',
-                                       'post']))
-
-
-class DecoratedStreamingRequestFlowControlTest(
-        BaseStreamingRequestFlowControlTest,
-        WebTestCase):
-    def get_handlers(self):
-        class DecoratedFlowControlHandler(BaseFlowControlHandler):
-            @gen.coroutine
-            def data_received(self, data):
-                with self.in_method('data_received'):
-                    yield gen.Task(IOLoop.current().add_callback)
-        return [('/', DecoratedFlowControlHandler, dict(test=self))]
-
-
-@skipBefore35
-class NativeStreamingRequestFlowControlTest(
-        BaseStreamingRequestFlowControlTest,
-        WebTestCase):
-    def get_handlers(self):
-        class NativeFlowControlHandler(BaseFlowControlHandler):
-            data_received = exec_test(globals(), locals(), """
-            async def data_received(self, data):
-                with self.in_method('data_received'):
-                    await gen.Task(IOLoop.current().add_callback)
-            """)["data_received"]
-        return [('/', NativeFlowControlHandler, dict(test=self))]
-
-
-@wsgi_safe
-class IncorrectContentLengthTest(SimpleHandlerTestCase):
-    def get_handlers(self):
-        test = self
-        self.server_error = None
-
-        # Manually set a content-length that doesn't match the actual content.
-        class TooHigh(RequestHandler):
-            def get(self):
-                self.set_header("Content-Length", "42")
-                try:
-                    self.finish("ok")
-                except Exception as e:
-                    test.server_error = e
-                    raise
-
-        class TooLow(RequestHandler):
-            def get(self):
-                self.set_header("Content-Length", "2")
-                try:
-                    self.finish("hello")
-                except Exception as e:
-                    test.server_error = e
-                    raise
-
-        return [('/high', TooHigh),
-                ('/low', TooLow)]
-
-    def test_content_length_too_high(self):
-        # When the content-length is too high, the connection is simply
-        # closed without completing the response.  An error is logged on
-        # the server.
-        with ExpectLog(app_log, "(Uncaught exception|Exception in callback)"):
-            with ExpectLog(gen_log,
-                           "(Cannot send error response after headers written"
-                           "|Failed to flush partial response)"):
-                response = self.fetch("/high")
-        self.assertEqual(response.code, 599)
-        self.assertEqual(str(self.server_error),
-                         "Tried to write 40 bytes less than Content-Length")
-
-    def test_content_length_too_low(self):
-        # When the content-length is too low, the connection is closed
-        # without writing the last chunk, so the client never sees the request
-        # complete (which would be a framing error).
-        with ExpectLog(app_log, "(Uncaught exception|Exception in callback)"):
-            with ExpectLog(gen_log,
-                           "(Cannot send error response after headers written"
-                           "|Failed to flush partial response)"):
-                response = self.fetch("/low")
-        self.assertEqual(response.code, 599)
-        self.assertEqual(str(self.server_error),
-                         "Tried to write more data than Content-Length")
-
-
-class ClientCloseTest(SimpleHandlerTestCase):
-    class Handler(RequestHandler):
-        def get(self):
-            if self.request.version.startswith('HTTP/1'):
-                # Simulate a connection closed by the client during
-                # request processing.  The client will see an error, but the
-                # server should respond gracefully (without logging errors
-                # because we were unable to write out as many bytes as
-                # Content-Length said we would)
-                self.request.connection.stream.close()
-                self.write('hello')
-            else:
-                # TODO: add a HTTP2-compatible version of this test.
-                self.write('requires HTTP/1.x')
-
-    def test_client_close(self):
-        response = self.fetch('/')
-        if response.body == b'requires HTTP/1.x':
-            self.skipTest('requires HTTP/1.x')
-        self.assertEqual(response.code, 599)
-
-
-class SignedValueTest(unittest.TestCase):
-    SECRET = "It's a secret to everybody"
-    SECRET_DICT = {0: "asdfbasdf", 1: "12312312", 2: "2342342"}
-
-    def past(self):
-        return self.present() - 86400 * 32
-
-    def present(self):
-        return 1300000000
-
-    def test_known_values(self):
-        signed_v1 = create_signed_value(SignedValueTest.SECRET, "key", "value",
-                                        version=1, clock=self.present)
-        self.assertEqual(
-            signed_v1,
-            b"dmFsdWU=|1300000000|31c934969f53e48164c50768b40cbd7e2daaaa4f")
-
-        signed_v2 = create_signed_value(SignedValueTest.SECRET, "key", "value",
-                                        version=2, clock=self.present)
-        self.assertEqual(
-            signed_v2,
-            b"2|1:0|10:1300000000|3:key|8:dmFsdWU=|"
-            b"3d4e60b996ff9c5d5788e333a0cba6f238a22c6c0f94788870e1a9ecd482e152")
-
-        signed_default = create_signed_value(SignedValueTest.SECRET,
-                                             "key", "value", clock=self.present)
-        self.assertEqual(signed_default, signed_v2)
-
-        decoded_v1 = decode_signed_value(SignedValueTest.SECRET, "key",
-                                         signed_v1, min_version=1,
-                                         clock=self.present)
-        self.assertEqual(decoded_v1, b"value")
-
-        decoded_v2 = decode_signed_value(SignedValueTest.SECRET, "key",
-                                         signed_v2, min_version=2,
-                                         clock=self.present)
-        self.assertEqual(decoded_v2, b"value")
-
-    def test_name_swap(self):
-        signed1 = create_signed_value(SignedValueTest.SECRET, "key1", "value",
-                                      clock=self.present)
-        signed2 = create_signed_value(SignedValueTest.SECRET, "key2", "value",
-                                      clock=self.present)
-        # Try decoding each string with the other's "name"
-        decoded1 = decode_signed_value(SignedValueTest.SECRET, "key2", signed1,
-                                       clock=self.present)
-        self.assertIs(decoded1, None)
-        decoded2 = decode_signed_value(SignedValueTest.SECRET, "key1", signed2,
-                                       clock=self.present)
-        self.assertIs(decoded2, None)
-
-    def test_expired(self):
-        signed = create_signed_value(SignedValueTest.SECRET, "key1", "value",
-                                     clock=self.past)
-        decoded_past = decode_signed_value(SignedValueTest.SECRET, "key1",
-                                           signed, clock=self.past)
-        self.assertEqual(decoded_past, b"value")
-        decoded_present = decode_signed_value(SignedValueTest.SECRET, "key1",
-                                              signed, clock=self.present)
-        self.assertIs(decoded_present, None)
-
-    def test_payload_tampering(self):
-        # These cookies are variants of the one in test_known_values.
-        sig = "3d4e60b996ff9c5d5788e333a0cba6f238a22c6c0f94788870e1a9ecd482e152"
-
-        def validate(prefix):
-            return (b'value' ==
-                    decode_signed_value(SignedValueTest.SECRET, "key",
-                                        prefix + sig, clock=self.present))
-        self.assertTrue(validate("2|1:0|10:1300000000|3:key|8:dmFsdWU=|"))
-        # Change key version
-        self.assertFalse(validate("2|1:1|10:1300000000|3:key|8:dmFsdWU=|"))
-        # length mismatch (field too short)
-        self.assertFalse(validate("2|1:0|10:130000000|3:key|8:dmFsdWU=|"))
-        # length mismatch (field too long)
-        self.assertFalse(validate("2|1:0|10:1300000000|3:keey|8:dmFsdWU=|"))
-
-    def test_signature_tampering(self):
-        prefix = "2|1:0|10:1300000000|3:key|8:dmFsdWU=|"
-
-        def validate(sig):
-            return (b'value' ==
-                    decode_signed_value(SignedValueTest.SECRET, "key",
-                                        prefix + sig, clock=self.present))
-        self.assertTrue(validate(
-            "3d4e60b996ff9c5d5788e333a0cba6f238a22c6c0f94788870e1a9ecd482e152"))
-        # All zeros
-        self.assertFalse(validate("0" * 32))
-        # Change one character
-        self.assertFalse(validate(
-            "4d4e60b996ff9c5d5788e333a0cba6f238a22c6c0f94788870e1a9ecd482e152"))
-        # Change another character
-        self.assertFalse(validate(
-            "3d4e60b996ff9c5d5788e333a0cba6f238a22c6c0f94788870e1a9ecd482e153"))
-        # Truncate
-        self.assertFalse(validate(
-            "3d4e60b996ff9c5d5788e333a0cba6f238a22c6c0f94788870e1a9ecd482e15"))
-        # Lengthen
-        self.assertFalse(validate(
-            "3d4e60b996ff9c5d5788e333a0cba6f238a22c6c0f94788870e1a9ecd482e1538"))
-
-    def test_non_ascii(self):
-        value = b"\xe9"
-        signed = create_signed_value(SignedValueTest.SECRET, "key", value,
-                                     clock=self.present)
-        decoded = decode_signed_value(SignedValueTest.SECRET, "key", signed,
-                                      clock=self.present)
-        self.assertEqual(value, decoded)
-
-    def test_key_versioning_read_write_default_key(self):
-        value = b"\xe9"
-        signed = create_signed_value(SignedValueTest.SECRET_DICT,
-                                     "key", value, clock=self.present,
-                                     key_version=0)
-        decoded = decode_signed_value(SignedValueTest.SECRET_DICT,
-                                      "key", signed, clock=self.present)
-        self.assertEqual(value, decoded)
-
-    def test_key_versioning_read_write_non_default_key(self):
-        value = b"\xe9"
-        signed = create_signed_value(SignedValueTest.SECRET_DICT,
-                                     "key", value, clock=self.present,
-                                     key_version=1)
-        decoded = decode_signed_value(SignedValueTest.SECRET_DICT,
-                                      "key", signed, clock=self.present)
-        self.assertEqual(value, decoded)
-
-    def test_key_versioning_invalid_key(self):
-        value = b"\xe9"
-        signed = create_signed_value(SignedValueTest.SECRET_DICT,
-                                     "key", value, clock=self.present,
-                                     key_version=0)
-        newkeys = SignedValueTest.SECRET_DICT.copy()
-        newkeys.pop(0)
-        decoded = decode_signed_value(newkeys,
-                                      "key", signed, clock=self.present)
-        self.assertEqual(None, decoded)
-
-    def test_key_version_retrieval(self):
-        value = b"\xe9"
-        signed = create_signed_value(SignedValueTest.SECRET_DICT,
-                                     "key", value, clock=self.present,
-                                     key_version=1)
-        key_version = get_signature_key_version(signed)
-        self.assertEqual(1, key_version)
-
-
-@wsgi_safe
-class XSRFTest(SimpleHandlerTestCase):
-    class Handler(RequestHandler):
-        def get(self):
-            version = int(self.get_argument("version", "2"))
-            # This would be a bad idea in a real app, but in this test
-            # it's fine.
-            self.settings["xsrf_cookie_version"] = version
-            self.write(self.xsrf_token)
-
-        def post(self):
-            self.write("ok")
-
-    def get_app_kwargs(self):
-        return dict(xsrf_cookies=True)
-
-    def setUp(self):
-        super(XSRFTest, self).setUp()
-        self.xsrf_token = self.get_token()
-
-    def get_token(self, old_token=None, version=None):
-        if old_token is not None:
-            headers = self.cookie_headers(old_token)
-        else:
-            headers = None
-        response = self.fetch(
-            "/" if version is None else ("/?version=%d" % version),
-            headers=headers)
-        response.rethrow()
-        return native_str(response.body)
-
-    def cookie_headers(self, token=None):
-        if token is None:
-            token = self.xsrf_token
-        return {"Cookie": "_xsrf=" + token}
-
-    def test_xsrf_fail_no_token(self):
-        with ExpectLog(gen_log, ".*'_xsrf' argument missing"):
-            response = self.fetch("/", method="POST", body=b"")
-        self.assertEqual(response.code, 403)
-
-    def test_xsrf_fail_body_no_cookie(self):
-        with ExpectLog(gen_log, ".*XSRF cookie does not match POST"):
-            response = self.fetch(
-                "/", method="POST",
-                body=urllib_parse.urlencode(dict(_xsrf=self.xsrf_token)))
-        self.assertEqual(response.code, 403)
-
-    def test_xsrf_fail_argument_invalid_format(self):
-        with ExpectLog(gen_log, ".*'_xsrf' argument has invalid format"):
-            response = self.fetch(
-                "/", method="POST",
-                headers=self.cookie_headers(),
-                body=urllib_parse.urlencode(dict(_xsrf='3|')))
-        self.assertEqual(response.code, 403)
-
-    def test_xsrf_fail_cookie_invalid_format(self):
-        with ExpectLog(gen_log, ".*XSRF cookie does not match POST"):
-            response = self.fetch(
-                "/", method="POST",
-                headers=self.cookie_headers(token='3|'),
-                body=urllib_parse.urlencode(dict(_xsrf=self.xsrf_token)))
-        self.assertEqual(response.code, 403)
-
-    def test_xsrf_fail_cookie_no_body(self):
-        with ExpectLog(gen_log, ".*'_xsrf' argument missing"):
-            response = self.fetch(
-                "/", method="POST", body=b"",
-                headers=self.cookie_headers())
-        self.assertEqual(response.code, 403)
-
-    def test_xsrf_success_short_token(self):
-        response = self.fetch(
-            "/", method="POST",
-            body=urllib_parse.urlencode(dict(_xsrf='deadbeef')),
-            headers=self.cookie_headers(token='deadbeef'))
-        self.assertEqual(response.code, 200)
-
-    def test_xsrf_success_non_hex_token(self):
-        response = self.fetch(
-            "/", method="POST",
-            body=urllib_parse.urlencode(dict(_xsrf='xoxo')),
-            headers=self.cookie_headers(token='xoxo'))
-        self.assertEqual(response.code, 200)
-
-    def test_xsrf_success_post_body(self):
-        response = self.fetch(
-            "/", method="POST",
-            body=urllib_parse.urlencode(dict(_xsrf=self.xsrf_token)),
-            headers=self.cookie_headers())
-        self.assertEqual(response.code, 200)
-
-    def test_xsrf_success_query_string(self):
-        response = self.fetch(
-            "/?" + urllib_parse.urlencode(dict(_xsrf=self.xsrf_token)),
-            method="POST", body=b"",
-            headers=self.cookie_headers())
-        self.assertEqual(response.code, 200)
-
-    def test_xsrf_success_header(self):
-        response = self.fetch("/", method="POST", body=b"",
-                              headers=dict({"X-Xsrftoken": self.xsrf_token},  # type: ignore
-                                           **self.cookie_headers()))
-        self.assertEqual(response.code, 200)
-
-    def test_distinct_tokens(self):
-        # Every request gets a distinct token.
-        NUM_TOKENS = 10
-        tokens = set()
-        for i in range(NUM_TOKENS):
-            tokens.add(self.get_token())
-        self.assertEqual(len(tokens), NUM_TOKENS)
-
-    def test_cross_user(self):
-        token2 = self.get_token()
-        # Each token can be used to authenticate its own request.
-        for token in (self.xsrf_token, token2):
-            response = self.fetch(
-                "/", method="POST",
-                body=urllib_parse.urlencode(dict(_xsrf=token)),
-                headers=self.cookie_headers(token))
-            self.assertEqual(response.code, 200)
-        # Sending one in the cookie and the other in the body is not allowed.
-        for cookie_token, body_token in ((self.xsrf_token, token2),
-                                         (token2, self.xsrf_token)):
-            with ExpectLog(gen_log, '.*XSRF cookie does not match POST'):
-                response = self.fetch(
-                    "/", method="POST",
-                    body=urllib_parse.urlencode(dict(_xsrf=body_token)),
-                    headers=self.cookie_headers(cookie_token))
-            self.assertEqual(response.code, 403)
-
-    def test_refresh_token(self):
-        token = self.xsrf_token
-        tokens_seen = set([token])
-        # A user's token is stable over time.  Refreshing the page in one tab
-        # might update the cookie while an older tab still has the old cookie
-        # in its DOM.  Simulate this scenario by passing a constant token
-        # in the body and re-querying for the token.
-        for i in range(5):
-            token = self.get_token(token)
-            # Tokens are encoded uniquely each time
-            tokens_seen.add(token)
-            response = self.fetch(
-                "/", method="POST",
-                body=urllib_parse.urlencode(dict(_xsrf=self.xsrf_token)),
-                headers=self.cookie_headers(token))
-            self.assertEqual(response.code, 200)
-        self.assertEqual(len(tokens_seen), 6)
-
-    def test_versioning(self):
-        # Version 1 still produces distinct tokens per request.
-        self.assertNotEqual(self.get_token(version=1),
-                            self.get_token(version=1))
-
-        # Refreshed v1 tokens are all identical.
-        v1_token = self.get_token(version=1)
-        for i in range(5):
-            self.assertEqual(self.get_token(v1_token, version=1), v1_token)
-
-        # Upgrade to a v2 version of the same token
-        v2_token = self.get_token(v1_token)
-        self.assertNotEqual(v1_token, v2_token)
-        # Each v1 token can map to many v2 tokens.
-        self.assertNotEqual(v2_token, self.get_token(v1_token))
-
-        # The tokens are cross-compatible.
-        for cookie_token, body_token in ((v1_token, v2_token),
-                                         (v2_token, v1_token)):
-            response = self.fetch(
-                "/", method="POST",
-                body=urllib_parse.urlencode(dict(_xsrf=body_token)),
-                headers=self.cookie_headers(cookie_token))
-            self.assertEqual(response.code, 200)
-
-
-@wsgi_safe
-class XSRFCookieKwargsTest(SimpleHandlerTestCase):
-    class Handler(RequestHandler):
-        def get(self):
-            self.write(self.xsrf_token)
-
-    def get_app_kwargs(self):
-        return dict(xsrf_cookies=True,
-                    xsrf_cookie_kwargs=dict(httponly=True))
-
-    def test_xsrf_httponly(self):
-        response = self.fetch("/")
-        self.assertIn('httponly;', response.headers['Set-Cookie'].lower())
-
-
-@wsgi_safe
-class FinishExceptionTest(SimpleHandlerTestCase):
-    class Handler(RequestHandler):
-        def get(self):
-            self.set_status(401)
-            self.set_header('WWW-Authenticate', 'Basic realm="something"')
-            if self.get_argument('finish_value', ''):
-                raise Finish('authentication required')
-            else:
-                self.write('authentication required')
-                raise Finish()
-
-    def test_finish_exception(self):
-        for u in ['/', '/?finish_value=1']:
-            response = self.fetch(u)
-            self.assertEqual(response.code, 401)
-            self.assertEqual('Basic realm="something"',
-                             response.headers.get('WWW-Authenticate'))
-            self.assertEqual(b'authentication required', response.body)
-
-
-@wsgi_safe
-class DecoratorTest(WebTestCase):
-    def get_handlers(self):
-        class RemoveSlashHandler(RequestHandler):
-            @removeslash
-            def get(self):
-                pass
-
-        class AddSlashHandler(RequestHandler):
-            @addslash
-            def get(self):
-                pass
-
-        return [("/removeslash/", RemoveSlashHandler),
-                ("/addslash", AddSlashHandler),
-                ]
-
-    def test_removeslash(self):
-        response = self.fetch("/removeslash/", follow_redirects=False)
-        self.assertEqual(response.code, 301)
-        self.assertEqual(response.headers['Location'], "/removeslash")
-
-        response = self.fetch("/removeslash/?foo=bar", follow_redirects=False)
-        self.assertEqual(response.code, 301)
-        self.assertEqual(response.headers['Location'], "/removeslash?foo=bar")
-
-    def test_addslash(self):
-        response = self.fetch("/addslash", follow_redirects=False)
-        self.assertEqual(response.code, 301)
-        self.assertEqual(response.headers['Location'], "/addslash/")
-
-        response = self.fetch("/addslash?foo=bar", follow_redirects=False)
-        self.assertEqual(response.code, 301)
-        self.assertEqual(response.headers['Location'], "/addslash/?foo=bar")
-
-
-@wsgi_safe
-class CacheTest(WebTestCase):
-    def get_handlers(self):
-        class EtagHandler(RequestHandler):
-            def get(self, computed_etag):
-                self.write(computed_etag)
-
-            def compute_etag(self):
-                return self._write_buffer[0]
-
-        return [
-            ('/etag/(.*)', EtagHandler)
-        ]
-
-    def test_wildcard_etag(self):
-        computed_etag = '"xyzzy"'
-        etags = '*'
-        self._test_etag(computed_etag, etags, 304)
-
-    def test_strong_etag_match(self):
-        computed_etag = '"xyzzy"'
-        etags = '"xyzzy"'
-        self._test_etag(computed_etag, etags, 304)
-
-    def test_multiple_strong_etag_match(self):
-        computed_etag = '"xyzzy1"'
-        etags = '"xyzzy1", "xyzzy2"'
-        self._test_etag(computed_etag, etags, 304)
-
-    def test_strong_etag_not_match(self):
-        computed_etag = '"xyzzy"'
-        etags = '"xyzzy1"'
-        self._test_etag(computed_etag, etags, 200)
-
-    def test_multiple_strong_etag_not_match(self):
-        computed_etag = '"xyzzy"'
-        etags = '"xyzzy1", "xyzzy2"'
-        self._test_etag(computed_etag, etags, 200)
-
-    def test_weak_etag_match(self):
-        computed_etag = '"xyzzy1"'
-        etags = 'W/"xyzzy1"'
-        self._test_etag(computed_etag, etags, 304)
-
-    def test_multiple_weak_etag_match(self):
-        computed_etag = '"xyzzy2"'
-        etags = 'W/"xyzzy1", W/"xyzzy2"'
-        self._test_etag(computed_etag, etags, 304)
-
-    def test_weak_etag_not_match(self):
-        computed_etag = '"xyzzy2"'
-        etags = 'W/"xyzzy1"'
-        self._test_etag(computed_etag, etags, 200)
-
-    def test_multiple_weak_etag_not_match(self):
-        computed_etag = '"xyzzy3"'
-        etags = 'W/"xyzzy1", W/"xyzzy2"'
-        self._test_etag(computed_etag, etags, 200)
-
-    def _test_etag(self, computed_etag, etags, status_code):
-        response = self.fetch(
-            '/etag/' + computed_etag,
-            headers={'If-None-Match': etags}
-        )
-        self.assertEqual(response.code, status_code)
-
-
-@wsgi_safe
-class RequestSummaryTest(SimpleHandlerTestCase):
-    class Handler(RequestHandler):
-        def get(self):
-            # remote_ip is optional, although it's set by
-            # both HTTPServer and WSGIAdapter.
-            # Clobber it to make sure it doesn't break logging.
-            self.request.remote_ip = None
-            self.finish(self._request_summary())
-
-    def test_missing_remote_ip(self):
-        resp = self.fetch("/")
-        self.assertEqual(resp.body, b"GET / (None)")
-
-
-class HTTPErrorTest(unittest.TestCase):
-    def test_copy(self):
-        e = HTTPError(403, reason="Go away")
-        e2 = copy.copy(e)
-        self.assertIsNot(e, e2)
-        self.assertEqual(e.status_code, e2.status_code)
-        self.assertEqual(e.reason, e2.reason)
-
-
-class ApplicationTest(AsyncTestCase):
-    def test_listen(self):
-        app = Application([])
-        server = app.listen(0, address='127.0.0.1')
-        server.stop()
-
-
-class URLSpecReverseTest(unittest.TestCase):
-    def test_reverse(self):
-        self.assertEqual('/favicon.ico', url(r'/favicon\.ico', None).reverse())
-        self.assertEqual('/favicon.ico', url(r'^/favicon\.ico$', None).reverse())
-
-    def test_non_reversible(self):
-        # URLSpecs are non-reversible if they include non-constant
-        # regex features outside capturing groups. Currently, this is
-        # only strictly enforced for backslash-escaped character
-        # classes.
-        paths = [
-            r'^/api/v\d+/foo/(\w+)$',
-        ]
-        for path in paths:
-            # A URLSpec can still be created even if it cannot be reversed.
-            url_spec = url(path, None)
-            try:
-                result = url_spec.reverse()
-                self.fail("did not get expected exception when reversing %s. "
-                          "result: %s" % (path, result))
-            except ValueError:
-                pass
-
-    def test_reverse_arguments(self):
-        self.assertEqual('/api/v1/foo/bar',
-                         url(r'^/api/v1/foo/(\w+)$', None).reverse('bar'))
-
-
-class RedirectHandlerTest(WebTestCase):
-    def get_handlers(self):
-        return [
-            ('/src', WebRedirectHandler, {'url': '/dst'}),
-            (r'/(.*?)/(.*?)/(.*)', WebRedirectHandler, {'url': '/{1}/{0}/{2}'})]
-
-    def test_basic_redirect(self):
-        response = self.fetch('/src', follow_redirects=False)
-        self.assertEqual(response.code, 301)
-        self.assertEqual(response.headers['Location'], '/dst')
-
-    def test_redirect_pattern(self):
-        response = self.fetch('/a/b/c', follow_redirects=False)
-        self.assertEqual(response.code, 301)
-        self.assertEqual(response.headers['Location'], '/b/a/c')
diff --git a/lib/tornado/test/websocket_test.py b/lib/tornado/test/websocket_test.py
deleted file mode 100644
index d47a74e651e86ab8ecd8ab76d6936c23edd06e05..0000000000000000000000000000000000000000
--- a/lib/tornado/test/websocket_test.py
+++ /dev/null
@@ -1,631 +0,0 @@
-from __future__ import absolute_import, division, print_function
-
-import functools
-import sys
-import traceback
-
-from tornado.concurrent import Future
-from tornado import gen
-from tornado.httpclient import HTTPError, HTTPRequest
-from tornado.log import gen_log, app_log
-from tornado.template import DictLoader
-from tornado.testing import AsyncHTTPTestCase, gen_test, bind_unused_port, ExpectLog
-from tornado.test.util import unittest, skipBefore35, exec_test
-from tornado.web import Application, RequestHandler
-
-try:
-    import tornado.websocket  # noqa
-    from tornado.util import _websocket_mask_python
-except ImportError:
-    # The unittest module presents misleading errors on ImportError
-    # (it acts as if websocket_test could not be found, hiding the underlying
-    # error).  If we get an ImportError here (which could happen due to
-    # TORNADO_EXTENSION=1), print some extra information before failing.
-    traceback.print_exc()
-    raise
-
-from tornado.websocket import WebSocketHandler, websocket_connect, WebSocketError
-
-try:
-    from tornado import speedups
-except ImportError:
-    speedups = None
-
-
-class TestWebSocketHandler(WebSocketHandler):
-    """Base class for testing handlers that exposes the on_close event.
-
-    This allows for deterministic cleanup of the associated socket.
-    """
-    def initialize(self, close_future, compression_options=None):
-        self.close_future = close_future
-        self.compression_options = compression_options
-
-    def get_compression_options(self):
-        return self.compression_options
-
-    def on_close(self):
-        self.close_future.set_result((self.close_code, self.close_reason))
-
-
-class EchoHandler(TestWebSocketHandler):
-    def on_message(self, message):
-        self.write_message(message, isinstance(message, bytes))
-
-
-class ErrorInOnMessageHandler(TestWebSocketHandler):
-    def on_message(self, message):
-        1 / 0
-
-
-class HeaderHandler(TestWebSocketHandler):
-    def open(self):
-        methods_to_test = [
-            functools.partial(self.write, 'This should not work'),
-            functools.partial(self.redirect, 'http://localhost/elsewhere'),
-            functools.partial(self.set_header, 'X-Test', ''),
-            functools.partial(self.set_cookie, 'Chocolate', 'Chip'),
-            functools.partial(self.set_status, 503),
-            self.flush,
-            self.finish,
-        ]
-        for method in methods_to_test:
-            try:
-                # In a websocket context, many RequestHandler methods
-                # raise RuntimeErrors.
-                method()
-                raise Exception("did not get expected exception")
-            except RuntimeError:
-                pass
-        self.write_message(self.request.headers.get('X-Test', ''))
-
-
-class HeaderEchoHandler(TestWebSocketHandler):
-    def set_default_headers(self):
-        self.set_header("X-Extra-Response-Header", "Extra-Response-Value")
-
-    def prepare(self):
-        for k, v in self.request.headers.get_all():
-            if k.lower().startswith('x-test'):
-                self.set_header(k, v)
-
-
-class NonWebSocketHandler(RequestHandler):
-    def get(self):
-        self.write('ok')
-
-
-class CloseReasonHandler(TestWebSocketHandler):
-    def open(self):
-        self.on_close_called = False
-        self.close(1001, "goodbye")
-
-
-class AsyncPrepareHandler(TestWebSocketHandler):
-    @gen.coroutine
-    def prepare(self):
-        yield gen.moment
-
-    def on_message(self, message):
-        self.write_message(message)
-
-
-class PathArgsHandler(TestWebSocketHandler):
-    def open(self, arg):
-        self.write_message(arg)
-
-
-class CoroutineOnMessageHandler(TestWebSocketHandler):
-    def initialize(self, close_future, compression_options=None):
-        super(CoroutineOnMessageHandler, self).initialize(close_future,
-                                                          compression_options)
-        self.sleeping = 0
-
-    @gen.coroutine
-    def on_message(self, message):
-        if self.sleeping > 0:
-            self.write_message('another coroutine is already sleeping')
-        self.sleeping += 1
-        yield gen.sleep(0.01)
-        self.sleeping -= 1
-        self.write_message(message)
-
-
-class RenderMessageHandler(TestWebSocketHandler):
-    def on_message(self, message):
-        self.write_message(self.render_string('message.html', message=message))
-
-
-class WebSocketBaseTestCase(AsyncHTTPTestCase):
-    @gen.coroutine
-    def ws_connect(self, path, **kwargs):
-        ws = yield websocket_connect(
-            'ws://127.0.0.1:%d%s' % (self.get_http_port(), path),
-            **kwargs)
-        raise gen.Return(ws)
-
-    @gen.coroutine
-    def close(self, ws):
-        """Close a websocket connection and wait for the server side.
-
-        If we don't wait here, there are sometimes leak warnings in the
-        tests.
-        """
-        ws.close()
-        yield self.close_future
-
-
-class WebSocketTest(WebSocketBaseTestCase):
-    def get_app(self):
-        self.close_future = Future()
-        return Application([
-            ('/echo', EchoHandler, dict(close_future=self.close_future)),
-            ('/non_ws', NonWebSocketHandler),
-            ('/header', HeaderHandler, dict(close_future=self.close_future)),
-            ('/header_echo', HeaderEchoHandler,
-             dict(close_future=self.close_future)),
-            ('/close_reason', CloseReasonHandler,
-             dict(close_future=self.close_future)),
-            ('/error_in_on_message', ErrorInOnMessageHandler,
-             dict(close_future=self.close_future)),
-            ('/async_prepare', AsyncPrepareHandler,
-             dict(close_future=self.close_future)),
-            ('/path_args/(.*)', PathArgsHandler,
-             dict(close_future=self.close_future)),
-            ('/coroutine', CoroutineOnMessageHandler,
-             dict(close_future=self.close_future)),
-            ('/render', RenderMessageHandler,
-             dict(close_future=self.close_future)),
-        ], template_loader=DictLoader({
-            'message.html': '<b>{{ message }}</b>',
-        }))
-
-    def tearDown(self):
-        super(WebSocketTest, self).tearDown()
-        RequestHandler._template_loaders.clear()
-
-    def test_http_request(self):
-        # WS server, HTTP client.
-        response = self.fetch('/echo')
-        self.assertEqual(response.code, 400)
-
-    def test_bad_websocket_version(self):
-        response = self.fetch('/echo',
-                              headers={'Connection': 'Upgrade',
-                                       'Upgrade': 'WebSocket',
-                                       'Sec-WebSocket-Version': '12'})
-        self.assertEqual(response.code, 426)
-
-    @gen_test
-    def test_websocket_gen(self):
-        ws = yield self.ws_connect('/echo')
-        yield ws.write_message('hello')
-        response = yield ws.read_message()
-        self.assertEqual(response, 'hello')
-        yield self.close(ws)
-
-    def test_websocket_callbacks(self):
-        websocket_connect(
-            'ws://127.0.0.1:%d/echo' % self.get_http_port(),
-            io_loop=self.io_loop, callback=self.stop)
-        ws = self.wait().result()
-        ws.write_message('hello')
-        ws.read_message(self.stop)
-        response = self.wait().result()
-        self.assertEqual(response, 'hello')
-        self.close_future.add_done_callback(lambda f: self.stop())
-        ws.close()
-        self.wait()
-
-    @gen_test
-    def test_binary_message(self):
-        ws = yield self.ws_connect('/echo')
-        ws.write_message(b'hello \xe9', binary=True)
-        response = yield ws.read_message()
-        self.assertEqual(response, b'hello \xe9')
-        yield self.close(ws)
-
-    @gen_test
-    def test_unicode_message(self):
-        ws = yield self.ws_connect('/echo')
-        ws.write_message(u'hello \u00e9')
-        response = yield ws.read_message()
-        self.assertEqual(response, u'hello \u00e9')
-        yield self.close(ws)
-
-    @gen_test
-    def test_render_message(self):
-        ws = yield self.ws_connect('/render')
-        ws.write_message('hello')
-        response = yield ws.read_message()
-        self.assertEqual(response, '<b>hello</b>')
-        yield self.close(ws)
-
-    @gen_test
-    def test_error_in_on_message(self):
-        ws = yield self.ws_connect('/error_in_on_message')
-        ws.write_message('hello')
-        with ExpectLog(app_log, "Uncaught exception"):
-            response = yield ws.read_message()
-        self.assertIs(response, None)
-        yield self.close(ws)
-
-    @gen_test
-    def test_websocket_http_fail(self):
-        with self.assertRaises(HTTPError) as cm:
-            yield self.ws_connect('/notfound')
-        self.assertEqual(cm.exception.code, 404)
-
-    @gen_test
-    def test_websocket_http_success(self):
-        with self.assertRaises(WebSocketError):
-            yield self.ws_connect('/non_ws')
-
-    @gen_test
-    def test_websocket_network_fail(self):
-        sock, port = bind_unused_port()
-        sock.close()
-        with self.assertRaises(IOError):
-            with ExpectLog(gen_log, ".*"):
-                yield websocket_connect(
-                    'ws://127.0.0.1:%d/' % port,
-                    io_loop=self.io_loop,
-                    connect_timeout=3600)
-
-    @gen_test
-    def test_websocket_close_buffered_data(self):
-        ws = yield websocket_connect(
-            'ws://127.0.0.1:%d/echo' % self.get_http_port())
-        ws.write_message('hello')
-        ws.write_message('world')
-        # Close the underlying stream.
-        ws.stream.close()
-        yield self.close_future
-
-    @gen_test
-    def test_websocket_headers(self):
-        # Ensure that arbitrary headers can be passed through websocket_connect.
-        ws = yield websocket_connect(
-            HTTPRequest('ws://127.0.0.1:%d/header' % self.get_http_port(),
-                        headers={'X-Test': 'hello'}))
-        response = yield ws.read_message()
-        self.assertEqual(response, 'hello')
-        yield self.close(ws)
-
-    @gen_test
-    def test_websocket_header_echo(self):
-        # Ensure that headers can be returned in the response.
-        # Specifically, that arbitrary headers passed through websocket_connect
-        # can be returned.
-        ws = yield websocket_connect(
-            HTTPRequest('ws://127.0.0.1:%d/header_echo' % self.get_http_port(),
-                        headers={'X-Test-Hello': 'hello'}))
-        self.assertEqual(ws.headers.get('X-Test-Hello'), 'hello')
-        self.assertEqual(ws.headers.get('X-Extra-Response-Header'), 'Extra-Response-Value')
-        yield self.close(ws)
-
-    @gen_test
-    def test_server_close_reason(self):
-        ws = yield self.ws_connect('/close_reason')
-        msg = yield ws.read_message()
-        # A message of None means the other side closed the connection.
-        self.assertIs(msg, None)
-        self.assertEqual(ws.close_code, 1001)
-        self.assertEqual(ws.close_reason, "goodbye")
-        # The on_close callback is called no matter which side closed.
-        code, reason = yield self.close_future
-        # The client echoed the close code it received to the server,
-        # so the server's close code (returned via close_future) is
-        # the same.
-        self.assertEqual(code, 1001)
-
-    @gen_test
-    def test_client_close_reason(self):
-        ws = yield self.ws_connect('/echo')
-        ws.close(1001, 'goodbye')
-        code, reason = yield self.close_future
-        self.assertEqual(code, 1001)
-        self.assertEqual(reason, 'goodbye')
-
-    @gen_test
-    def test_async_prepare(self):
-        # Previously, an async prepare method triggered a bug that would
-        # result in a timeout on test shutdown (and a memory leak).
-        ws = yield self.ws_connect('/async_prepare')
-        ws.write_message('hello')
-        res = yield ws.read_message()
-        self.assertEqual(res, 'hello')
-
-    @gen_test
-    def test_path_args(self):
-        ws = yield self.ws_connect('/path_args/hello')
-        res = yield ws.read_message()
-        self.assertEqual(res, 'hello')
-
-    @gen_test
-    def test_coroutine(self):
-        ws = yield self.ws_connect('/coroutine')
-        # Send both messages immediately, coroutine must process one at a time.
-        yield ws.write_message('hello1')
-        yield ws.write_message('hello2')
-        res = yield ws.read_message()
-        self.assertEqual(res, 'hello1')
-        res = yield ws.read_message()
-        self.assertEqual(res, 'hello2')
-
-    @gen_test
-    def test_check_origin_valid_no_path(self):
-        port = self.get_http_port()
-
-        url = 'ws://127.0.0.1:%d/echo' % port
-        headers = {'Origin': 'http://127.0.0.1:%d' % port}
-
-        ws = yield websocket_connect(HTTPRequest(url, headers=headers),
-                                     io_loop=self.io_loop)
-        ws.write_message('hello')
-        response = yield ws.read_message()
-        self.assertEqual(response, 'hello')
-        yield self.close(ws)
-
-    @gen_test
-    def test_check_origin_valid_with_path(self):
-        port = self.get_http_port()
-
-        url = 'ws://127.0.0.1:%d/echo' % port
-        headers = {'Origin': 'http://127.0.0.1:%d/something' % port}
-
-        ws = yield websocket_connect(HTTPRequest(url, headers=headers),
-                                     io_loop=self.io_loop)
-        ws.write_message('hello')
-        response = yield ws.read_message()
-        self.assertEqual(response, 'hello')
-        yield self.close(ws)
-
-    @gen_test
-    def test_check_origin_invalid_partial_url(self):
-        port = self.get_http_port()
-
-        url = 'ws://127.0.0.1:%d/echo' % port
-        headers = {'Origin': '127.0.0.1:%d' % port}
-
-        with self.assertRaises(HTTPError) as cm:
-            yield websocket_connect(HTTPRequest(url, headers=headers),
-                                    io_loop=self.io_loop)
-        self.assertEqual(cm.exception.code, 403)
-
-    @gen_test
-    def test_check_origin_invalid(self):
-        port = self.get_http_port()
-
-        url = 'ws://127.0.0.1:%d/echo' % port
-        # Host is 127.0.0.1, which should not be accessible from some other
-        # domain
-        headers = {'Origin': 'http://somewhereelse.com'}
-
-        with self.assertRaises(HTTPError) as cm:
-            yield websocket_connect(HTTPRequest(url, headers=headers),
-                                    io_loop=self.io_loop)
-
-        self.assertEqual(cm.exception.code, 403)
-
-    @gen_test
-    def test_check_origin_invalid_subdomains(self):
-        port = self.get_http_port()
-
-        url = 'ws://localhost:%d/echo' % port
-        # Subdomains should be disallowed by default.  If we could pass a
-        # resolver to websocket_connect we could test sibling domains as well.
-        headers = {'Origin': 'http://subtenant.localhost'}
-
-        with self.assertRaises(HTTPError) as cm:
-            yield websocket_connect(HTTPRequest(url, headers=headers),
-                                    io_loop=self.io_loop)
-
-        self.assertEqual(cm.exception.code, 403)
-
-
-if sys.version_info >= (3, 5):
-    NativeCoroutineOnMessageHandler = exec_test(globals(), locals(), """
-class NativeCoroutineOnMessageHandler(TestWebSocketHandler):
-    def initialize(self, close_future, compression_options=None):
-        super().initialize(close_future, compression_options)
-        self.sleeping = 0
-
-    async def on_message(self, message):
-        if self.sleeping > 0:
-            self.write_message('another coroutine is already sleeping')
-        self.sleeping += 1
-        await gen.sleep(0.01)
-        self.sleeping -= 1
-        self.write_message(message)""")['NativeCoroutineOnMessageHandler']
-
-
-class WebSocketNativeCoroutineTest(WebSocketBaseTestCase):
-    def get_app(self):
-        self.close_future = Future()
-        return Application([
-            ('/native', NativeCoroutineOnMessageHandler,
-             dict(close_future=self.close_future))])
-
-    @skipBefore35
-    @gen_test
-    def test_native_coroutine(self):
-        ws = yield self.ws_connect('/native')
-        # Send both messages immediately, coroutine must process one at a time.
-        yield ws.write_message('hello1')
-        yield ws.write_message('hello2')
-        res = yield ws.read_message()
-        self.assertEqual(res, 'hello1')
-        res = yield ws.read_message()
-        self.assertEqual(res, 'hello2')
-
-
-class CompressionTestMixin(object):
-    MESSAGE = 'Hello world. Testing 123 123'
-
-    def get_app(self):
-        self.close_future = Future()
-        return Application([
-            ('/echo', EchoHandler, dict(
-                close_future=self.close_future,
-                compression_options=self.get_server_compression_options())),
-        ])
-
-    def get_server_compression_options(self):
-        return None
-
-    def get_client_compression_options(self):
-        return None
-
-    @gen_test
-    def test_message_sizes(self):
-        ws = yield self.ws_connect(
-            '/echo',
-            compression_options=self.get_client_compression_options())
-        # Send the same message three times so we can measure the
-        # effect of the context_takeover options.
-        for i in range(3):
-            ws.write_message(self.MESSAGE)
-            response = yield ws.read_message()
-            self.assertEqual(response, self.MESSAGE)
-        self.assertEqual(ws.protocol._message_bytes_out, len(self.MESSAGE) * 3)
-        self.assertEqual(ws.protocol._message_bytes_in, len(self.MESSAGE) * 3)
-        self.verify_wire_bytes(ws.protocol._wire_bytes_in,
-                               ws.protocol._wire_bytes_out)
-        yield self.close(ws)
-
-
-class UncompressedTestMixin(CompressionTestMixin):
-    """Specialization of CompressionTestMixin when we expect no compression."""
-    def verify_wire_bytes(self, bytes_in, bytes_out):
-        # Bytes out includes the 4-byte mask key per message.
-        self.assertEqual(bytes_out, 3 * (len(self.MESSAGE) + 6))
-        self.assertEqual(bytes_in, 3 * (len(self.MESSAGE) + 2))
-
-
-class NoCompressionTest(UncompressedTestMixin, WebSocketBaseTestCase):
-    pass
-
-
-# If only one side tries to compress, the extension is not negotiated.
-class ServerOnlyCompressionTest(UncompressedTestMixin, WebSocketBaseTestCase):
-    def get_server_compression_options(self):
-        return {}
-
-
-class ClientOnlyCompressionTest(UncompressedTestMixin, WebSocketBaseTestCase):
-    def get_client_compression_options(self):
-        return {}
-
-
-class DefaultCompressionTest(CompressionTestMixin, WebSocketBaseTestCase):
-    def get_server_compression_options(self):
-        return {}
-
-    def get_client_compression_options(self):
-        return {}
-
-    def verify_wire_bytes(self, bytes_in, bytes_out):
-        self.assertLess(bytes_out, 3 * (len(self.MESSAGE) + 6))
-        self.assertLess(bytes_in, 3 * (len(self.MESSAGE) + 2))
-        # Bytes out includes the 4 bytes mask key per message.
-        self.assertEqual(bytes_out, bytes_in + 12)
-
-
-class MaskFunctionMixin(object):
-    # Subclasses should define self.mask(mask, data)
-    def test_mask(self):
-        self.assertEqual(self.mask(b'abcd', b''), b'')
-        self.assertEqual(self.mask(b'abcd', b'b'), b'\x03')
-        self.assertEqual(self.mask(b'abcd', b'54321'), b'TVPVP')
-        self.assertEqual(self.mask(b'ZXCV', b'98765432'), b'c`t`olpd')
-        # Include test cases with \x00 bytes (to ensure that the C
-        # extension isn't depending on null-terminated strings) and
-        # bytes with the high bit set (to smoke out signedness issues).
-        self.assertEqual(self.mask(b'\x00\x01\x02\x03',
-                                   b'\xff\xfb\xfd\xfc\xfe\xfa'),
-                         b'\xff\xfa\xff\xff\xfe\xfb')
-        self.assertEqual(self.mask(b'\xff\xfb\xfd\xfc',
-                                   b'\x00\x01\x02\x03\x04\x05'),
-                         b'\xff\xfa\xff\xff\xfb\xfe')
-
-
-class PythonMaskFunctionTest(MaskFunctionMixin, unittest.TestCase):
-    def mask(self, mask, data):
-        return _websocket_mask_python(mask, data)
-
-
-@unittest.skipIf(speedups is None, "tornado.speedups module not present")
-class CythonMaskFunctionTest(MaskFunctionMixin, unittest.TestCase):
-    def mask(self, mask, data):
-        return speedups.websocket_mask(mask, data)
-
-
-class ServerPeriodicPingTest(WebSocketBaseTestCase):
-    def get_app(self):
-        class PingHandler(TestWebSocketHandler):
-            def on_pong(self, data):
-                self.write_message("got pong")
-
-        self.close_future = Future()
-        return Application([
-            ('/', PingHandler, dict(close_future=self.close_future)),
-        ], websocket_ping_interval=0.01)
-
-    @gen_test
-    def test_server_ping(self):
-        ws = yield self.ws_connect('/')
-        for i in range(3):
-            response = yield ws.read_message()
-            self.assertEqual(response, "got pong")
-        yield self.close(ws)
-        # TODO: test that the connection gets closed if ping responses stop.
-
-
-class ClientPeriodicPingTest(WebSocketBaseTestCase):
-    def get_app(self):
-        class PingHandler(TestWebSocketHandler):
-            def on_ping(self, data):
-                self.write_message("got ping")
-
-        self.close_future = Future()
-        return Application([
-            ('/', PingHandler, dict(close_future=self.close_future)),
-        ])
-
-    @gen_test
-    def test_client_ping(self):
-        ws = yield self.ws_connect('/', ping_interval=0.01)
-        for i in range(3):
-            response = yield ws.read_message()
-            self.assertEqual(response, "got ping")
-        yield self.close(ws)
-        # TODO: test that the connection gets closed if ping responses stop.
-
-
-class MaxMessageSizeTest(WebSocketBaseTestCase):
-    def get_app(self):
-        self.close_future = Future()
-        return Application([
-            ('/', EchoHandler, dict(close_future=self.close_future)),
-        ], websocket_max_message_size=1024)
-
-    @gen_test
-    def test_large_message(self):
-        ws = yield self.ws_connect('/')
-
-        # Write a message that is allowed.
-        msg = 'a' * 1024
-        ws.write_message(msg)
-        resp = yield ws.read_message()
-        self.assertEqual(resp, msg)
-
-        # Write a message that is too large.
-        ws.write_message(msg + 'b')
-        resp = yield ws.read_message()
-        # A message of None means the other side closed the connection.
-        self.assertIs(resp, None)
-        self.assertEqual(ws.close_code, 1009)
-        self.assertEqual(ws.close_reason, "message too big")
-        # TODO: Needs tests of messages split over multiple
-        # continuation frames.
diff --git a/lib/tornado/test/windows_test.py b/lib/tornado/test/windows_test.py
deleted file mode 100644
index e5cb33813909def00d43621ffed025a8022be884..0000000000000000000000000000000000000000
--- a/lib/tornado/test/windows_test.py
+++ /dev/null
@@ -1,27 +0,0 @@
-from __future__ import absolute_import, division, print_function
-import functools
-import os
-import socket
-import unittest
-
-from tornado.platform.auto import set_close_exec
-
-skipIfNonWindows = unittest.skipIf(os.name != 'nt', 'non-windows platform')
-
-
-@skipIfNonWindows
-class WindowsTest(unittest.TestCase):
-    def test_set_close_exec(self):
-        # set_close_exec works with sockets.
-        s = socket.socket()
-        self.addCleanup(s.close)
-        set_close_exec(s.fileno())
-
-        # But it doesn't work with pipes.
-        r, w = os.pipe()
-        self.addCleanup(functools.partial(os.close, r))
-        self.addCleanup(functools.partial(os.close, w))
-        with self.assertRaises(WindowsError) as cm:
-            set_close_exec(r)
-        ERROR_INVALID_HANDLE = 6
-        self.assertEqual(cm.exception.winerror, ERROR_INVALID_HANDLE)
diff --git a/lib/tornado/test/wsgi_test.py b/lib/tornado/test/wsgi_test.py
deleted file mode 100644
index e6ccc82ae0c8c63d4c04d8966f7375f87e5d362c..0000000000000000000000000000000000000000
--- a/lib/tornado/test/wsgi_test.py
+++ /dev/null
@@ -1,103 +0,0 @@
-from __future__ import absolute_import, division, print_function
-from wsgiref.validate import validator
-
-from tornado.escape import json_decode
-from tornado.test.httpserver_test import TypeCheckHandler
-from tornado.testing import AsyncHTTPTestCase
-from tornado.web import RequestHandler, Application
-from tornado.wsgi import WSGIApplication, WSGIContainer, WSGIAdapter
-
-from tornado.test import httpserver_test
-from tornado.test import web_test
-
-
-class WSGIContainerTest(AsyncHTTPTestCase):
-    def wsgi_app(self, environ, start_response):
-        status = "200 OK"
-        response_headers = [("Content-Type", "text/plain")]
-        start_response(status, response_headers)
-        return [b"Hello world!"]
-
-    def get_app(self):
-        return WSGIContainer(validator(self.wsgi_app))
-
-    def test_simple(self):
-        response = self.fetch("/")
-        self.assertEqual(response.body, b"Hello world!")
-
-
-class WSGIApplicationTest(AsyncHTTPTestCase):
-    def get_app(self):
-        class HelloHandler(RequestHandler):
-            def get(self):
-                self.write("Hello world!")
-
-        class PathQuotingHandler(RequestHandler):
-            def get(self, path):
-                self.write(path)
-
-        # It would be better to run the wsgiref server implementation in
-        # another thread instead of using our own WSGIContainer, but this
-        # fits better in our async testing framework and the wsgiref
-        # validator should keep us honest
-        return WSGIContainer(validator(WSGIApplication([
-            ("/", HelloHandler),
-            ("/path/(.*)", PathQuotingHandler),
-            ("/typecheck", TypeCheckHandler),
-        ])))
-
-    def test_simple(self):
-        response = self.fetch("/")
-        self.assertEqual(response.body, b"Hello world!")
-
-    def test_path_quoting(self):
-        response = self.fetch("/path/foo%20bar%C3%A9")
-        self.assertEqual(response.body, u"foo bar\u00e9".encode("utf-8"))
-
-    def test_types(self):
-        headers = {"Cookie": "foo=bar"}
-        response = self.fetch("/typecheck?foo=bar", headers=headers)
-        data = json_decode(response.body)
-        self.assertEqual(data, {})
-
-        response = self.fetch("/typecheck", method="POST", body="foo=bar", headers=headers)
-        data = json_decode(response.body)
-        self.assertEqual(data, {})
-
-
-# This is kind of hacky, but run some of the HTTPServer and web tests
-# through WSGIContainer and WSGIApplication to make sure everything
-# survives repeated disassembly and reassembly.
-class WSGIConnectionTest(httpserver_test.HTTPConnectionTest):
-    def get_app(self):
-        return WSGIContainer(validator(WSGIApplication(self.get_handlers())))
-
-
-def wrap_web_tests_application():
-    result = {}
-    for cls in web_test.wsgi_safe_tests:
-        class WSGIApplicationWrappedTest(cls):  # type: ignore
-            def get_app(self):
-                self.app = WSGIApplication(self.get_handlers(),
-                                           **self.get_app_kwargs())
-                return WSGIContainer(validator(self.app))
-        result["WSGIApplication_" + cls.__name__] = WSGIApplicationWrappedTest
-    return result
-
-
-globals().update(wrap_web_tests_application())
-
-
-def wrap_web_tests_adapter():
-    result = {}
-    for cls in web_test.wsgi_safe_tests:
-        class WSGIAdapterWrappedTest(cls):  # type: ignore
-            def get_app(self):
-                self.app = Application(self.get_handlers(),
-                                       **self.get_app_kwargs())
-                return WSGIContainer(validator(WSGIAdapter(self.app)))
-        result["WSGIAdapter_" + cls.__name__] = WSGIAdapterWrappedTest
-    return result
-
-
-globals().update(wrap_web_tests_adapter())
diff --git a/lib/twilio/version.py b/lib/twilio/version.py
index fe171cc661466c82d206ffda37e0982ce68e3cc9..ade110049bfb98df1f1a2d69fc27a03690c67020 100644
--- a/lib/twilio/version.py
+++ b/lib/twilio/version.py
@@ -1,2 +1,2 @@
-__version_info__ = ('5', '6', '0')
+__version_info__ = ('5', '7', '0')
 __version__ = '.'.join(__version_info__)
diff --git a/lib/unidecode/x005.py b/lib/unidecode/x005.py
index 2913ffff0bca1f2d153433ded888b1e5e24cec44..85d6abbca0598c09cfdab4c76d5218f213f80189 100644
--- a/lib/unidecode/x005.py
+++ b/lib/unidecode/x005.py
@@ -189,7 +189,7 @@ data = (
 'u',    # 0xbb
 '\'',    # 0xbc
 '',    # 0xbd
-'',    # 0xbe
+'-',    # 0xbe
 '',    # 0xbf
 '|',    # 0xc0
 '',    # 0xc1
diff --git a/lib/unidecode/x021.py b/lib/unidecode/x021.py
index cc74bc65fae7b6fc77a1d8868cf4b8adb9ececa0..29f05fd4b9516802f4a65bc16c9c56a1c063fe34 100644
--- a/lib/unidecode/x021.py
+++ b/lib/unidecode/x021.py
@@ -21,7 +21,7 @@ data = (
 'l',    # 0x13
 '',    # 0x14
 'N',    # 0x15
-'',    # 0x16
+'No. ',    # 0x16
 '',    # 0x17
 '',    # 0x18
 'P',    # 0x19
diff --git a/requirements/readme.md b/requirements/readme.md
index 9d2dcb448a8252d9ba52c5db75a17b98d64ad86a..a2cf5cfa6cfe7885412cac45b2e7428553471f40 100644
--- a/requirements/readme.md
+++ b/requirements/readme.md
@@ -5,28 +5,28 @@ List of dependencies [![Requirements Status](https://requires.io/github/SickRage
 :------: | :-------: | :----------------: | -----
 :exclamation: | adba | ??? | **Modified**<br>not on PYPI - [GH:lad1337/adba](https://github.com/lad1337/adba)
 :ok: | babelfish | 0.5.5 | Resolved by [#3877](https://github.com/SickRage/SickRage/pull/3877)
-:ok: | backports_abc | 0.5 | 
-:ok: | backports.ssl-match-hostname | 3.5.0.1 | 
-:ok: | beautifulsoup4 | 4.5.3 | 
+:ok: | backports_abc | 0.5
+:ok: | backports.ssl-match-hostname | 3.5.0.1
+:ok: | beautifulsoup4 | 4.5.3
 :ok: | bencode | 1.0 | Resolved by [#3858](https://github.com/SickRage/SickRage/pull/3858) + [#3871](https://github.com/SickRage/SickRage/pull/3871)<br>A newer version (fork) is available: [GH:fuzeman/bencode.py](https://github.com/fuzeman/bencode.py)
-:ok: | cachecontrol | 0.11.5 | 
+:ok: | cachecontrol | 0.11.5
 :warning: | certgen.py | [d52975c](https://github.com/pyca/pyopenssl/blob/d52975cef3a36e18552aeb23de7c06aa73d76454/examples/certgen.py) | Source: [GH:pyca/pyopenssl](https://github.com/pyca/pyopenssl/blob/master/examples/certgen.py)
 :ok: | certifi | 2017.4.17
 :ok: | cfscrape | 1.7.1 | Note: Can't upgrade to latest version<br>because Node.js is now required.
 :ok: | chardet | 3.0.4 | Resolved by [#3870](https://github.com/SickRage/SickRage/pull/3870)
 :ok: | configobj | 4.6.0
 :ok: | decorator | 4.0.10
-:warning: | dogpile.cache | [229615b](https://bitbucket.org/zzzeek/dogpile.cache/src/229615be466d00c9c135a90d8965679ab3e4edaa/dogpile/)  | Bitbucket
+:warning: | dogpile.cache | [229615b](https://bitbucket.org/zzzeek/dogpile.cache/src/229615be466d00c9c135a90d8965679ab3e4edaa/dogpile/) | Bitbucket
 :ok: | dogpile.core | 0.4.1
 :ok: | enum34 | 1.0.4
-:warning: | enzyme | [9572bea](https://github.com/Diaoul/enzyme/tree/9572bea606a6145dad153cd712653d6cf10ef18e)
-:ok: | fake-useragent | 0.1.2  | Note: There's a `ua.json` file that's used by `sickbeard.common`,<br>should be moved to a better location.
+:ok: | enzyme | 0.4.1
+:ok: | fake-useragent | 0.1.2 | Note: There's a `ua.json` file that's used by `sickbeard.common`,<br>should be moved to a better location.
 :warning: | feedparser | [f1dd1bb](https://github.com/kurtmckee/feedparser/tree/f1dd1bb923ebfe6482fc2521c1f150b4032289ec) | Vanilla'd by [#3877](https://github.com/SickRage/SickRage/pull/3877)
-:warning: | futures | [43bfc41](https://github.com/agronholm/pythonfutures/tree/43bfc41626208d78f4db1839e2808772defdfdca)
+:ok: | futures | 3.1.1
 :warning: | guessit | [a4fb286](https://github.com/guessit-io/guessit/tree/a4fb2865d4b697397aa976388bbd0edf558a24fb)
-:warning: | hachoir_core | [708fdf6](https://bitbucket.org/haypo/hachoir/src/708fdf64a982ba2e638aa59d94f143112066b8ce/hachoir-core/hachoir_core/)  | Bitbucket
-:warning: | hachoir_metadata | [708fdf6](https://bitbucket.org/haypo/hachoir/src/708fdf64a982ba2e638aa59d94f143112066b8ce/hachoir-metadata/hachoir_metadata/)  | Bitbucket
-:warning: | hachoir_parser | [708fdf6](https://bitbucket.org/haypo/hachoir/src/708fdf64a982ba2e638aa59d94f143112066b8ce/hachoir-parser/hachoir_parser/)  | Bitbucket
+:warning: | hachoir_core | [708fdf6](https://bitbucket.org/haypo/hachoir/src/708fdf64a982ba2e638aa59d94f143112066b8ce/hachoir-core/hachoir_core/) | Bitbucket
+:warning: | hachoir_metadata | [708fdf6](https://bitbucket.org/haypo/hachoir/src/708fdf64a982ba2e638aa59d94f143112066b8ce/hachoir-metadata/hachoir_metadata/) | Bitbucket
+:warning: | hachoir_parser | [708fdf6](https://bitbucket.org/haypo/hachoir/src/708fdf64a982ba2e638aa59d94f143112066b8ce/hachoir-parser/hachoir_parser/) | Bitbucket
 :ok: | html5lib | 1.0b10
 :ok: | httplib2 | 0.9.2 | + tests folder from [cf631a7](https://github.com/httplib2/httplib2/tree/cf631a73e2f3f43897b65206127ced82382d35f5)
 :ok: | idna | 2.5 | Added in [#3870](https://github.com/SickRage/SickRage/pull/3870)
@@ -41,40 +41,40 @@ List of dependencies [![Requirements Status](https://requires.io/github/SickRage
 :ok: | MarkupSafe | 1.0
 :ok: | ndg-httpsclient | 0.3.3
 :ok: | oauthlib | 2.0.2 | Added in [#3870](https://github.com/SickRage/SickRage/pull/3870)
-:warning: | pgi | [38f8349](https://github.com/pygobject/pgi/tree/38f834902247a5851cb4c72ba018f160ae26d612) | 
-:exclamation: | pkg_resources.py | - | Copied from setuptools and looks to be modified.<br>Maybe we don't really need this?<br>Used to load the egg files for `pymediainfo` and `pytz`.
+:warning: | pgi | [38f8349](https://github.com/pygobject/pgi/tree/38f834902247a5851cb4c72ba018f160ae26d612)
+:exclamation: | pkg_resources.py | - | Copied from setuptools and looks to be modified.<br>Used to load `pymediainfo*.egg` and `pytz*.egg` as packages.<br>Also explicitly used by: `babelfish`, `dogpile.cache`/`dogpile.util`, `enzyme`, `guessit`, `Mako`, `pymediainfo`, `pytz`, `stevedore`, `subliminal`.
 :ok: | profilehooks | 1.5
 :ok: | putio.py | 6.1.0
 :ok: | pyasn1 | 0.1.7 | + LICENSE
 :ok: | PyGithub | 1.34 | Resolved by [#3877](https://github.com/SickRage/SickRage/pull/3877)
 :ok: | PyJWT | 1.5.0 | Added in [#3877](https://github.com/SickRage/SickRage/pull/3877)
-:ok: | pymediainfo | 2.0  | as an `.egg` file, loaded by `pkg_resources`
+:ok: | pymediainfo | 2.0 | as an `.egg` file, loaded by `pkg_resources`
 :ok: | pynma | 1.0
 :ok: | PySocks | 1.6.7 | Added in [#3877](https://github.com/SickRage/SickRage/pull/3877)
-:warning: | pysrt | [47aaa59](https://github.com/byroot/pysrt/tree/47aaa592c3bc185cd2bc1d58d1451bf98be3c1ef)
+:ok: | pysrt | 1.1.1
 :ok: | python-dateutil | 2.6.0 | Resolved by [#3877](https://github.com/SickRage/SickRage/pull/3877)
 :exclamation: | python-fanart | 1.4.0 | **Modified**<br>API url was updated. No newer version.
 :ok: | python-twitter | 3.3 | Updated in [#3870](https://github.com/SickRage/SickRage/pull/3870)
-:ok: | pytz | 2016.4  | as an `.egg` file, loaded by `pkg_resources`
+:ok: | pytz | 2016.4 | as an `.egg` file, loaded by `pkg_resources`
 :exclamation: | rarfile | [3e54b22](https://github.com/markokr/rarfile/tree/3e54b222c8703eea64cd07102df7bb9408b582b3) | *v3.0 Github release*<br>**Modified**<br>See [`059dd933#diff-c1f4e96`](https://github.com/SickRage/SickRage/commit/059dd933b9da3a0f83c6cbb4f47c198e5a957fc6#diff-c1f4e968aa545d42d2e462672169da4a)
 :warning: | rebulk | [42d0a58](https://github.com/Toilal/rebulk/tree/42d0a58af9d793334616a6582f2a83b0fae0dd5f)
 :ok: | requests | 2.18.1 | Updated in [#3870](https://github.com/SickRage/SickRage/pull/3870)
 :ok: | requests-oauthlib | 0.8.0 | Added in [#3870](https://github.com/SickRage/SickRage/pull/3870)
-:exclamation: | rtorrent-python | 0.2.9  | **Modified**<br>See [commits log for `lib/rtorrent`](https://github.com/SickRage/SickRage/commits/master/lib/rtorrent)
-:exclamation: | send2trash | 1.3.0  | **Modified**<br>See [`9ad8114`](https://github.com/SickRage/SickRage/commit/9ad811432ab0ca3292410d29464ce2532361eb55)
+:exclamation: | rtorrent-python | 0.2.9 | **Modified**<br>See [commits log for `lib/rtorrent`](https://github.com/SickRage/SickRage/commits/master/lib/rtorrent)
+:exclamation: | send2trash | 1.3.0 | **Modified**<br>See [`9ad8114`](https://github.com/SickRage/SickRage/commit/9ad811432ab0ca3292410d29464ce2532361eb55)
 :ok: | singledispatch | 3.4.0.3
 :ok: | six | 1.10.0
 :warning: | sqlalchemy | [ccc0c44](https://github.com/zzzeek/sqlalchemy/tree/ccc0c44c3a60fc4906e5e3b26cc6d2b7a69d33bf)
 :ok: | stevedore | 1.10.0
 :warning: | subliminal | [7eb7a53](https://github.com/Diaoul/subliminal/tree/7eb7a53fe6bcaf3e01a6b44c8366faf7c96f7f1b) | **Modified**<br>Subscenter provider disabled until fixed upstream, [#3825 `diff-ab7eb9b`](https://github.com/SickRage/SickRage/pull/3825/files#diff-ab7eb9ba0a2d4c74c16795ff40f2bd62)
 :warning: | synchronous-deluge | - | **Custom: by Christian Dale**
-:ok: | tmdbsimple | 0.3.0  | Note: Package naming is modified.
+:ok: | tmdbsimple | 0.3.0 | Note: Package naming is modified.
 :ok: | tornado | 4.5.1 | Note: Contains a `routes.py` file,<br>which is not a part of the original package
 :ok: | tus.py | 1.2.0
-:exclamation: | tvdb_api | 1.9  | **Heavily Modified**<br>Deprecated API, will be disabled by October 1st, 2017
-:warning: | twilio | [f91e1a9](https://github.com/twilio/twilio-python/tree/f91e1a9e6f4e0a60589b2b90cb66b89b879b9c3e)
+:exclamation: | tvdb_api | 1.9 | **Heavily Modified**<br>Deprecated API, will be disabled by October 1st, 2017
+:ok: | twilio | 5.7.0 | Next version is a major (6.0.0)
 :ok: | tzlocal | 1.4 | Resolved by [#3877](https://github.com/SickRage/SickRage/pull/3877)
-:ok: | Unidecode | 0.04.20 | Updated in [#3877](https://github.com/SickRage/SickRage/pull/3877)
+:ok: | Unidecode | 0.04.21 | Updated in [#3877](https://github.com/SickRage/SickRage/pull/3877)
 :ok: | urllib3 | 1.21.1 | Added in [#3870](https://github.com/SickRage/SickRage/pull/3870)
 :ok: | validators | 0.10
 :ok: | webencodings | 0.5.1
diff --git a/requirements/requirements.txt b/requirements/requirements.txt
index 01ebbfd5894fd0ed0c8fa34332d52931635c4e92..4399505bac758b62647283ef66a7808767519f37 100644
--- a/requirements/requirements.txt
+++ b/requirements/requirements.txt
@@ -1,77 +1,77 @@
-#! adba==???  # <Modified + not on PYPI - https://github.com/lad1337/adba>
-babelfish==0.5.5
-backports_abc==0.5
-backports.ssl-match-hostname==3.5.0.1
-beautifulsoup4==4.5.3
-bencode==1.0  # Made vanilla with https://github.com/SickRage/SickRage/commit/8c4278a52bf30a02914aa85c9b9ba5ad61021bea. A newer version (fork) is available: https://github.com/fuzeman/bencode.py
-cachecontrol==0.11.5
-# certgen.py==d52975cef3a36e18552aeb23de7c06aa73d76454  # Source: https://github.com/pyca/pyopenssl/blob/master/examples/certgen.py
-certifi==2017.4.17
-cfscrape==1.7.1  # rq.filter: <1.8.0
-chardet==3.0.4
-configobj==4.6.0
-decorator==4.0.10
+#! adba == ???  # <Modified + not on PYPI - https://github.com/lad1337/adba>
+babelfish == 0.5.5
+backports_abc == 0.5
+backports.ssl-match-hostname == 3.5.0.1
+beautifulsoup4 == 4.5.3
+bencode == 1.0  # Made vanilla with https://github.com/SickRage/SickRage/commit/8c4278a52bf30a02914aa85c9b9ba5ad61021bea. A newer version (fork) is available: https://github.com/fuzeman/bencode.py
+cachecontrol == 0.11.5
+# certgen.py == d52975cef3a36e18552aeb23de7c06aa73d76454  # Source: https://github.com/pyca/pyopenssl/blob/master/examples/certgen.py
+certifi == 2017.4.17
+cfscrape == 1.7.1  # rq.filter: <1.8.0
+chardet == 3.0.4
+configobj == 4.6.0
+decorator == 4.0.10
 git+https://bitbucket.org/zzzeek/dogpile.cache.git@229615be466d00c9c135a90d8965679ab3e4edaa#egg=dogpile.cache
-dogpile.core==0.4.1
-enum34==1.0.4
-git+https://github.com/Diaoul/enzyme.git@9572bea606a6145dad153cd712653d6cf10ef18e#egg=enzyme
-fake-useragent==0.1.2  # [NOTE] there's a `ua.json` file that's used by sickbeard.common, should be moved to a better location.
+dogpile.core == 0.4.1
+enum34 == 1.0.4
+enzyme == 0.4.1
+fake-useragent == 0.1.2  # [NOTE] there's a `ua.json` file that's used by sickbeard.common, should be moved to a better location.
 git+https://github.com/kurtmckee/feedparser.git@f1dd1bb923ebfe6482fc2521c1f150b4032289ec#egg=feedparser
-git+https://github.com/agronholm/pythonfutures.git@43bfc41626208d78f4db1839e2808772defdfdca#egg=futures
+futures == 3.1.1
 git+https://github.com/guessit-io/guessit.git@a4fb2865d4b697397aa976388bbd0edf558a24fb#egg=guessit
 hg+https://bitbucket.org/haypo/hachoir@708fdf64a982ba2e638aa59d94f143112066b8ce#egg=hachoir-core&subdirectory=hachoir-core
 # hg+https://bitbucket.org/haypo/hachoir@708fdf64a982ba2e638aa59d94f143112066b8ce#egg=hachoir-metadata&subdirectory=hachoir-metadata  # Unable to install
 # hg+https://bitbucket.org/haypo/hachoir@708fdf64a982ba2e638aa59d94f143112066b8ce#egg=hachoir-parser&subdirectory=hachoir-parser  # Unable to install
-html5lib==1.0b10
-httplib2==0.9.2  # + tests folder from cf631a73e2f3f43897b65206127ced82382d35f5
-idna==2.5
-# IMDbPY==5.1.1 --no-deps --global-option="--without-sqlobject" --global-option="--without-sqlalchemy"  # doesn't work because --no-deps isn't supported in reqs file context
+html5lib == 1.0b10
+httplib2 == 0.9.2  # + tests folder from cf631a73e2f3f43897b65206127ced82382d35f5
+idna == 2.5
+# IMDbPY == 5.1.1 --no-deps --global-option="--without-sqlobject" --global-option="--without-sqlalchemy"  # doesn't work because --no-deps isn't supported in reqs file context
 git+https://github.com/PiotrDabkowski/Js2Py.git@05e77f0d4ffe91ef418a93860e666962cfd193b8#egg=js2py
 git+https://github.com/joshmarshall/jsonrpclib.git@e3a3cdedc9577b25b91274815b38ba7f3bc43c68#egg=jsonrpclib
 # libgrowl  # <Custom: by Sick-Beard's midgetspy. Some of the code is from https://github.com/kfdm/gntp>
 # libtrakt  # <Custom> Just a small note - https://github.com/fuzeman/trakt.py is a great implementation of Trakt.tv's API, if needed
-lockfile==0.11.0
-Mako==1.0.6
-markdown2==2.3.4
-MarkupSafe==1.0
-ndg-httpsclient==0.3.3
-oauthlib==2.0.2
-git+https://github.com/pygobject/pgi.git@38f834902247a5851cb4c72ba018f160ae26d612#egg=pgi; platform_system!="Windows"
-#! pkg_resources.py  # Copied from setuptools and looks to be modified. Maybe we don't really need this? Used to load the egg files for pymediainfo and pytz.
-profilehooks==1.5
-putio.py==6.1.0
-pyasn1==0.1.7  # + LICENSE
-PyGithub==1.34
-PyJWT==1.5.0
-pymediainfo==2.0  # as an .egg file, loaded by pkg_resources
-pynma==1.0
-PySocks==1.6.7
-git+https://github.com/byroot/pysrt.git@47aaa592c3bc185cd2bc1d58d1451bf98be3c1ef#egg=pysrt
-python-dateutil==2.6.0
-#! python-fanart==1.4.0  # <Modified: API url was updated. No newer version>
-python-twitter==3.3
-pytz==2016.4  # as an .egg file, loaded by pkg_resources
+lockfile == 0.11.0
+Mako == 1.0.6
+markdown2 == 2.3.4
+MarkupSafe == 1.0
+ndg-httpsclient == 0.3.3
+oauthlib == 2.0.2
+git+https://github.com/pygobject/pgi.git@38f834902247a5851cb4c72ba018f160ae26d612#egg=pgi ; os.name != 'nt'
+#! pkg_resources.py  # Copied from setuptools and looks to be modified. See readme for more info.
+profilehooks == 1.5
+putio.py == 6.1.0
+pyasn1 == 0.1.7  # + LICENSE
+PyGithub == 1.34
+PyJWT == 1.5.0
+pymediainfo == 2.0  # as an .egg file, loaded by pkg_resources
+pynma == 1.0
+PySocks == 1.6.7
+pysrt == 1.1.1
+python-dateutil == 2.6.0
+#! python-fanart == 1.4.0  # <Modified: API url was updated. No newer version>
+python-twitter == 3.3
+pytz == 2016.4  # as an .egg file, loaded by pkg_resources
 #! git+https://github.com/markokr/rarfile.git@3e54b222c8703eea64cd07102df7bb9408b582b3#egg=rarfile  # v3.0 Github release <Modified: See https://github.com/SickRage/SickRage/commit/059dd933b9da3a0f83c6cbb4f47c198e5a957fc6#diff-c1f4e968aa545d42d2e462672169da4a>
 git+https://github.com/Toilal/rebulk.git@42d0a58af9d793334616a6582f2a83b0fae0dd5f#egg=rebulk
-requests==2.18.1
-requests-oauthlib==0.8.0
-#! rtorrent-python==0.2.9  # <Modified: See https://github.com/SickRage/SickRage/commits/master/lib/rtorrent>
-#! send2trash==1.3.0  # <Modified: See https://github.com/SickRage/SickRage/commit/9ad811432ab0ca3292410d29464ce2532361eb55>
-singledispatch==3.4.0.3
-six==1.10.0
+requests == 2.18.1
+requests-oauthlib == 0.8.0
+#! rtorrent-python == 0.2.9  # <Modified: See https://github.com/SickRage/SickRage/commits/master/lib/rtorrent>
+#! send2trash == 1.3.0  # <Modified: See https://github.com/SickRage/SickRage/commit/9ad811432ab0ca3292410d29464ce2532361eb55>
+singledispatch == 3.4.0.3
+six == 1.10.0
 git+https://github.com/zzzeek/sqlalchemy.git@ccc0c44c3a60fc4906e5e3b26cc6d2b7a69d33bf#egg=sqlalchemy
-stevedore==1.10.0
+stevedore == 1.10.0
 #! git+https://github.com/Diaoul/subliminal.git@7eb7a53fe6bcaf3e01a6b44c8366faf7c96f7f1b#egg=subliminal  # <Modified: Subscenter provider disabled until fixed upstream, https://github.com/SickRage/SickRage/pull/3825/files#diff-ab7eb9ba0a2d4c74c16795ff40f2bd62>
 # synchronous-deluge  # <Custom: by Christian Dale>
-tmdbsimple==0.3.0  # [NOTE] Package naming is modified.
-tornado==4.5.1  # [NOTE] Contains a `routes.py` file, which is not a part of the original package
-tus.py==1.2.0
-#! tvdb_api==1.9  # <Heavily Modified> Deprecated API, will be disabled by October 1st, 2017
-git+https://github.com/twilio/twilio-python.git@f91e1a9e6f4e0a60589b2b90cb66b89b879b9c3e#egg=twilio
-tzlocal==1.4
-Unidecode==0.04.20
-urllib3==1.21.1
-validators==0.10
-webencodings==0.5.1
-win-inet-pton==1.0.1  # Required on Windows systems
-xmltodict==0.11.0
+tmdbsimple == 0.3.0  # [NOTE] Package naming is modified.
+tornado == 4.5.1  # [NOTE] Contains a `routes.py` file, which is not a part of the original package
+tus.py == 1.2.0
+#! tvdb_api == 1.9  # <Heavily Modified> Deprecated API, will be disabled by October 1st, 2017
+twilio == 5.7.0
+tzlocal == 1.4
+Unidecode == 0.04.21
+urllib3 == 1.21.1
+validators == 0.10
+webencodings == 0.5.1
+win-inet-pton == 1.0.1 ; os.name == 'nt' and python_version < '3.0'
+xmltodict == 0.11.0
diff --git a/requirements/sort.py b/requirements/sort.py
index 995e4e5ab57599eda1171934c41a19d1be7e6103..29eeecdb6eedb11e4d58cf91854dd7648292ba05 100644
--- a/requirements/sort.py
+++ b/requirements/sort.py
@@ -2,26 +2,34 @@ import argparse
 import json
 import re
 
-LINE_REGEX = re.compile(r'^(?P<disabled>[#!]* *)?'
-                        r'(?P<install>(?P<name>[\w\-.\[\]]+)'
-                        r'(?:[=]{1,2}(?P<version>[\da-z.?\-]+))?)'
-                        r'(?:\s*#+\s*(?P<notes>.*))*?$',
-                        re.I)
+# package == version
+NORMAL_REGEX = re.compile(r'^(?P<disabled>[#!]* *)?'
+                          r'(?P<install>(?P<name>[\w\-.\[\]]+)'
+                          r'(?:\s*[=]{1,2}\s*(?P<version>[\da-z.?\-]+))?'
+                          r'(?:\s(?P<options>--[\s\w\d\-\'"=]+))?'
+                          r'(?:\s*;\s*(?P<markers>[\s\w\d\-\.\'"!=<>,]+))?)'
+                          r'(?:\s+#+\s*(?P<notes>.*))*?$',
+                          re.I)
+# git+https://github.com/org/package.git@tree-ish#egg=package
 VCS_REGEX = re.compile(r'^(?P<disabled>[#!]* *)?'
                        r'(?P<install>(?P<vcs>git|hg)\+(?P<repo>.*?)(?:\.git)?'
                        r'@(?P<version>[\da-z]+)'
                        r'#egg=(?P<name>[\w\-.\[\]]+)'
-                       r'(?:&subdirectory=(?P<repo_subdir>.*?))?)'
-                       r'(?:\s*#+\s*(?P<notes>.*))*?$',
+                       r'(?:&subdirectory=(?P<repo_subdir>.*?))?'
+                       r'(?:\s(?P<options>--[\s\w\d\-\'"=]+))?'
+                       r'(?:\s*;\s*(?P<markers>[\s\w\d\-\.\'"!=<>,]+))?)'
+                       r'(?:\s+#+\s*(?P<notes>.*))*?$',
                        re.I)
 
 
 def _readlines(file_path):
+    # TODO: Use io.open
     with open(file_path, 'r') as fh:
         return fh.readlines()
 
 
 def _write(file_path, string):
+    # TODO: Use io.open
     with open(file_path, 'wb') as fh:  # use 'wb' to avoid CR-LF
         fh.write(string)
 
@@ -53,7 +61,7 @@ def file_to_dict(file_path):
             continue
 
         pkg_obj = dict()
-        pkg_match = re.match(LINE_REGEX, pkg)
+        pkg_match = re.match(NORMAL_REGEX, pkg)
         pkg_vcs_match = re.match(VCS_REGEX, pkg)
 
         if not (pkg_match or pkg_vcs_match):
@@ -80,7 +88,8 @@ def file_to_dict(file_path):
                 'version': version,
                 'notes': pkg_match.group('notes') or version_warning,
                 'url': pypi_url,
-                'install': pkg_match.group('install') if not version_warning else None,
+                'install': pkg_match.group('install').strip() if not version_warning else None,
+                'markers': pkg_match.group('markers'),
             }
         elif pkg_vcs_match:
             vcs = pkg_vcs_match.group('vcs')
@@ -106,7 +115,8 @@ def file_to_dict(file_path):
                 'version': version,
                 'notes': pkg_vcs_match.group('notes'),
                 'url': repo_url,
-                'install': pkg_vcs_match.group('install'),
+                'install': pkg_vcs_match.group('install').strip(),
+                'markers': pkg_vcs_match.group('markers'),
             }
 
         if pkg_obj:
diff --git a/setup.py b/setup.py
index 6dcf2f42fcbec4c981a677e5db6d6de201536da4..e54cb231b3ca2ce546c338f31c169216c3c783c5 100644
--- a/setup.py
+++ b/setup.py
@@ -4,10 +4,14 @@ Use setup tools to install sickrage
 """
 import os
 
-from babel.messages import frontend as babel
 from setuptools import find_packages, setup
 from requirements.sort import file_to_dict
 
+try:
+    from babel.messages import frontend as babel
+except ImportError:
+    babel = None
+
 ROOT = os.path.realpath(os.path.join(os.path.dirname(__file__)))
 
 with open(os.path.join(ROOT, 'readme.md'), 'r') as r:
@@ -16,13 +20,22 @@ with open(os.path.join(ROOT, 'readme.md'), 'r') as r:
 
 def get_requirements(rel_file_path):
     file_path = os.path.join(ROOT, rel_file_path)
-    data = [pkg['install'] for pkg in file_to_dict(file_path) if pkg['active'] and pkg['install']]
-    return data
-
+    data = file_to_dict(file_path)
+    if data is False:
+        print('get_requirements failed')
+        return []
+    return [pkg['install'] for pkg in data
+            if pkg['active'] and pkg['install']]
 
 requirements = get_requirements('requirements/requirements.txt')
-if not requirements:
-    raise AssertionError('get_requirements failed')
+commands = {}
+if babel:
+    commands.update({
+        'compile_catalog': babel.compile_catalog,
+        'extract_messages': babel.extract_messages,
+        'init_catalog': babel.init_catalog,
+        'update_catalog': babel.update_catalog
+    })
 
 setup(
     name="sickrage",
@@ -67,12 +80,7 @@ setup(
         'Topic :: Multimedia :: Video',
     ],
 
-    cmdclass={
-        'compile_catalog': babel.compile_catalog,
-        'extract_messages': babel.extract_messages,
-        'init_catalog': babel.init_catalog,
-        'update_catalog': babel.update_catalog
-    },
+    cmdclass=commands,
 
     message_extractors={
         'gui': [