diff --git a/libs/tornado/__init__.py b/libs/tornado/__init__.py
index 2d1bba88399932bc7ebdc141427f4578a649b072..609e2c0539ba4b7cad6d1713dd89663551bba6be 100755
--- a/libs/tornado/__init__.py
+++ b/libs/tornado/__init__.py
@@ -16,7 +16,7 @@
 
 """The Tornado web server and tools."""
 
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
 # version is a human-readable version number.
 
@@ -25,5 +25,5 @@ from __future__ import absolute_import, division, with_statement
 # is zero for an official release, positive for a development branch,
 # or negative for a release candidate (after the base version number
 # has been incremented)
-version = "2.4.post2"
-version_info = (2, 4, 0, 2)
+version = "2.4.post3"
+version_info = (2, 4, 0, 3)
diff --git a/libs/tornado/auth.py b/libs/tornado/auth.py
index 964534fa8b535f745444719e421af5e86f132c88..0ff32cb2258f02e8e23dcec5563797c7c521b23c 100755
--- a/libs/tornado/auth.py
+++ b/libs/tornado/auth.py
@@ -44,23 +44,63 @@ Example usage for Google OpenID::
             # Save the user with, e.g., set_secure_cookie()
 """
 
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
 import base64
 import binascii
+import functools
 import hashlib
 import hmac
 import time
-import urllib
-import urlparse
 import uuid
 
+from tornado.concurrent import Future, chain_future, return_future
+from tornado import gen
 from tornado import httpclient
 from tornado import escape
 from tornado.httputil import url_concat
 from tornado.log import gen_log
-from tornado.util import bytes_type, b
-
+from tornado.util import bytes_type, u, unicode_type, ArgReplacer
+
+try:
+    import urlparse  # py2
+except ImportError:
+    import urllib.parse as urlparse  # py3
+
+try:
+    import urllib.parse as urllib_parse  # py3
+except ImportError:
+    import urllib as urllib_parse  # py2
+
+class AuthError(Exception):
+    pass
+
+def _auth_future_to_callback(callback, future):
+    try:
+        result = future.result()
+    except AuthError as e:
+        gen_log.warning(str(e))
+        result = None
+    callback(result)
+
+def _auth_return_future(f):
+    """Similar to tornado.concurrent.return_future, but uses the auth
+    module's legacy callback interface.
+
+    Note that when using this decorator the ``callback`` parameter
+    inside the function will actually be a future.
+    """
+    replacer = ArgReplacer(f, 'callback')
+    @functools.wraps(f)
+    def wrapper(*args, **kwargs):
+        future = Future()
+        callback, args, kwargs = replacer.replace(future, args, kwargs)
+        if callback is not None:
+            future.add_done_callback(
+                functools.partial(_auth_future_to_callback, callback))
+        f(*args, **kwargs)
+        return future
+    return wrapper
 
 class OpenIdMixin(object):
     """Abstract implementation of OpenID and Attribute Exchange.
@@ -81,8 +121,9 @@ class OpenIdMixin(object):
         """
         callback_uri = callback_uri or self.request.uri
         args = self._openid_args(callback_uri, ax_attrs=ax_attrs)
-        self.redirect(self._OPENID_ENDPOINT + "?" + urllib.urlencode(args))
+        self.redirect(self._OPENID_ENDPOINT + "?" + urllib_parse.urlencode(args))
 
+    @_auth_return_future
     def get_authenticated_user(self, callback, http_client=None):
         """Fetches the authenticated user data upon redirect.
 
@@ -91,23 +132,23 @@ class OpenIdMixin(object):
         methods.
         """
         # Verify the OpenID response via direct request to the OP
-        args = dict((k, v[-1]) for k, v in self.request.arguments.iteritems())
-        args["openid.mode"] = u"check_authentication"
+        args = dict((k, v[-1]) for k, v in self.request.arguments.items())
+        args["openid.mode"] = u("check_authentication")
         url = self._OPENID_ENDPOINT
         if http_client is None:
             http_client = self.get_auth_http_client()
         http_client.fetch(url, self.async_callback(
             self._on_authentication_verified, callback),
-            method="POST", body=urllib.urlencode(args))
+            method="POST", body=urllib_parse.urlencode(args))
 
     def _openid_args(self, callback_uri, ax_attrs=[], oauth_scope=None):
         url = urlparse.urljoin(self.request.full_url(), callback_uri)
         args = {
             "openid.ns": "http://specs.openid.net/auth/2.0",
             "openid.claimed_id":
-                "http://specs.openid.net/auth/2.0/identifier_select",
+            "http://specs.openid.net/auth/2.0/identifier_select",
             "openid.identity":
-                "http://specs.openid.net/auth/2.0/identifier_select",
+            "http://specs.openid.net/auth/2.0/identifier_select",
             "openid.return_to": url,
             "openid.realm": urlparse.urljoin(url, '/'),
             "openid.mode": "checkid_setup",
@@ -124,11 +165,11 @@ class OpenIdMixin(object):
                 required += ["firstname", "fullname", "lastname"]
                 args.update({
                     "openid.ax.type.firstname":
-                        "http://axschema.org/namePerson/first",
+                    "http://axschema.org/namePerson/first",
                     "openid.ax.type.fullname":
-                        "http://axschema.org/namePerson",
+                    "http://axschema.org/namePerson",
                     "openid.ax.type.lastname":
-                        "http://axschema.org/namePerson/last",
+                    "http://axschema.org/namePerson/last",
                 })
             known_attrs = {
                 "email": "http://axschema.org/contact/email",
@@ -142,40 +183,40 @@ class OpenIdMixin(object):
         if oauth_scope:
             args.update({
                 "openid.ns.oauth":
-                    "http://specs.openid.net/extensions/oauth/1.0",
+                "http://specs.openid.net/extensions/oauth/1.0",
                 "openid.oauth.consumer": self.request.host.split(":")[0],
                 "openid.oauth.scope": oauth_scope,
             })
         return args
 
-    def _on_authentication_verified(self, callback, response):
-        if response.error or b("is_valid:true") not in response.body:
-            gen_log.warning("Invalid OpenID response: %s", response.error or
-                            response.body)
-            callback(None)
+    def _on_authentication_verified(self, future, response):
+        if response.error or b"is_valid:true" not in response.body:
+            future.set_exception(AuthError(
+                    "Invalid OpenID response: %s" % (response.error or
+                                                     response.body)))
             return
 
         # Make sure we got back at least an email from attribute exchange
         ax_ns = None
-        for name in self.request.arguments.iterkeys():
+        for name in self.request.arguments:
             if name.startswith("openid.ns.") and \
-               self.get_argument(name) == u"http://openid.net/srv/ax/1.0":
+                    self.get_argument(name) == u("http://openid.net/srv/ax/1.0"):
                 ax_ns = name[10:]
                 break
 
         def get_ax_arg(uri):
             if not ax_ns:
-                return u""
+                return u("")
             prefix = "openid." + ax_ns + ".type."
             ax_name = None
-            for name in self.request.arguments.iterkeys():
+            for name in self.request.arguments.keys():
                 if self.get_argument(name) == uri and name.startswith(prefix):
                     part = name[len(prefix):]
                     ax_name = "openid." + ax_ns + ".value." + part
                     break
             if not ax_name:
-                return u""
-            return self.get_argument(ax_name, u"")
+                return u("")
+            return self.get_argument(ax_name, u(""))
 
         email = get_ax_arg("http://axschema.org/contact/email")
         name = get_ax_arg("http://axschema.org/namePerson")
@@ -194,7 +235,7 @@ class OpenIdMixin(object):
         if name:
             user["name"] = name
         elif name_parts:
-            user["name"] = u" ".join(name_parts)
+            user["name"] = u(" ").join(name_parts)
         elif email:
             user["name"] = email.split("@")[0]
         if email:
@@ -206,7 +247,7 @@ class OpenIdMixin(object):
         claimed_id = self.get_argument("openid.claimed_id", None)
         if claimed_id:
             user["claimed_id"] = claimed_id
-        callback(user)
+        future.set_result(user)
 
     def get_auth_http_client(self):
         """Returns the AsyncHTTPClient instance to be used for auth requests.
@@ -248,7 +289,7 @@ class OAuthMixin(object):
                 self.async_callback(
                     self._on_request_token,
                     self._OAUTH_AUTHORIZE_URL,
-                callback_uri))
+                    callback_uri))
         else:
             http_client.fetch(
                 self._oauth_request_token_url(),
@@ -256,6 +297,7 @@ class OAuthMixin(object):
                     self._on_request_token, self._OAUTH_AUTHORIZE_URL,
                     callback_uri))
 
+    @_auth_return_future
     def get_authenticated_user(self, callback, http_client=None):
         """Gets the OAuth authorized user and access token on callback.
 
@@ -267,19 +309,19 @@ class OAuthMixin(object):
         to this service on behalf of the user.
 
         """
+        future = callback
         request_key = escape.utf8(self.get_argument("oauth_token"))
         oauth_verifier = self.get_argument("oauth_verifier", None)
         request_cookie = self.get_cookie("_oauth_request_token")
         if not request_cookie:
-            gen_log.warning("Missing OAuth request token cookie")
-            callback(None)
+            future.set_exception(AuthError(
+                    "Missing OAuth request token cookie"))
             return
         self.clear_cookie("_oauth_request_token")
         cookie_key, cookie_secret = [base64.b64decode(escape.utf8(i)) for i in request_cookie.split("|")]
         if cookie_key != request_key:
-            gen_log.info((cookie_key, request_key, request_cookie))
-            gen_log.warning("Request token does not match cookie")
-            callback(None)
+            future.set_exception(AuthError(
+                    "Request token does not match cookie"))
             return
         token = dict(key=cookie_key, secret=cookie_secret)
         if oauth_verifier:
@@ -312,23 +354,23 @@ class OAuthMixin(object):
             signature = _oauth_signature(consumer_token, "GET", url, args)
 
         args["oauth_signature"] = signature
-        return url + "?" + urllib.urlencode(args)
+        return url + "?" + urllib_parse.urlencode(args)
 
     def _on_request_token(self, authorize_url, callback_uri, response):
         if response.error:
             raise Exception("Could not get request token")
         request_token = _oauth_parse_response(response.body)
-        data = (base64.b64encode(request_token["key"]) + b("|") +
+        data = (base64.b64encode(request_token["key"]) + b"|" +
                 base64.b64encode(request_token["secret"]))
         self.set_cookie("_oauth_request_token", data)
         args = dict(oauth_token=request_token["key"])
         if callback_uri == "oob":
-            self.finish(authorize_url + "?" + urllib.urlencode(args))
+            self.finish(authorize_url + "?" + urllib_parse.urlencode(args))
             return
         elif callback_uri:
             args["oauth_callback"] = urlparse.urljoin(
                 self.request.full_url(), callback_uri)
-        self.redirect(authorize_url + "?" + urllib.urlencode(args))
+        self.redirect(authorize_url + "?" + urllib_parse.urlencode(args))
 
     def _oauth_access_token_url(self, request_token):
         consumer_token = self._oauth_consumer_token()
@@ -352,27 +394,36 @@ class OAuthMixin(object):
                                          request_token)
 
         args["oauth_signature"] = signature
-        return url + "?" + urllib.urlencode(args)
+        return url + "?" + urllib_parse.urlencode(args)
 
-    def _on_access_token(self, callback, response):
+    def _on_access_token(self, future, response):
         if response.error:
-            gen_log.warning("Could not fetch access token")
-            callback(None)
+            future.set_exception(AuthError("Could not fetch access token"))
             return
 
         access_token = _oauth_parse_response(response.body)
-        self._oauth_get_user(access_token, self.async_callback(
-             self._on_oauth_get_user, access_token, callback))
+        self._oauth_get_user_future(access_token).add_done_callback(
+            self.async_callback(self._on_oauth_get_user, access_token, future))
+
+    @return_future
+    def _oauth_get_user_future(self, access_token, callback):
+        # By default, call the old-style _oauth_get_user, but new code
+        # should override this method instead.
+        self._oauth_get_user(access_token, callback)
 
     def _oauth_get_user(self, access_token, callback):
         raise NotImplementedError()
 
-    def _on_oauth_get_user(self, access_token, callback, user):
+    def _on_oauth_get_user(self, access_token, future, user_future):
+        if user_future.exception() is not None:
+            future.set_exception(user_future.exception())
+            return
+        user = user_future.result()
         if not user:
-            callback(None)
+            future.set_exception(AuthError("Error getting user"))
             return
         user["access_token"] = access_token
-        callback(user)
+        future.set_result(user)
 
     def _oauth_request_parameters(self, url, access_token, parameters={},
                                   method="GET"):
@@ -395,7 +446,7 @@ class OAuthMixin(object):
         args.update(parameters)
         if getattr(self, "_OAUTH_VERSION", "1.0a") == "1.0a":
             signature = _oauth10a_signature(consumer_token, method, url, args,
-                                         access_token)
+                                            access_token)
         else:
             signature = _oauth_signature(consumer_token, method, url, args,
                                          access_token)
@@ -425,13 +476,13 @@ class OAuth2Mixin(object):
         process.
         """
         args = {
-          "redirect_uri": redirect_uri,
-          "client_id": client_id
+            "redirect_uri": redirect_uri,
+            "client_id": client_id
         }
         if extra_params:
             args.update(extra_params)
         self.redirect(
-                url_concat(self._OAUTH_AUTHORIZE_URL, args))
+            url_concat(self._OAUTH_AUTHORIZE_URL, args))
 
     def _oauth_request_token_url(self, redirect_uri=None, client_id=None,
                                  client_secret=None, code=None,
@@ -442,7 +493,7 @@ class OAuth2Mixin(object):
             code=code,
             client_id=client_id,
             client_secret=client_secret,
-            )
+        )
         if extra_params:
             args.update(extra_params)
         return url_concat(url, args)
@@ -499,8 +550,9 @@ class TwitterMixin(OAuthMixin):
         http.fetch(self._oauth_request_token_url(callback_uri=callback_uri), self.async_callback(
             self._on_request_token, self._OAUTH_AUTHENTICATE_URL, None))
 
-    def twitter_request(self, path, callback, access_token=None,
-                           post_args=None, **args):
+    @_auth_return_future
+    def twitter_request(self, path, callback=None, access_token=None,
+                        post_args=None, **args):
         """Fetches the given API path, e.g., "/statuses/user_timeline/btaylor"
 
         The path should not include the format (we automatically append
@@ -553,22 +605,22 @@ class TwitterMixin(OAuthMixin):
                 url, access_token, all_args, method=method)
             args.update(oauth)
         if args:
-            url += "?" + urllib.urlencode(args)
-        callback = self.async_callback(self._on_twitter_request, callback)
+            url += "?" + urllib_parse.urlencode(args)
         http = self.get_auth_http_client()
+        http_callback = self.async_callback(self._on_twitter_request, callback)
         if post_args is not None:
-            http.fetch(url, method="POST", body=urllib.urlencode(post_args),
-                       callback=callback)
+            http.fetch(url, method="POST", body=urllib_parse.urlencode(post_args),
+                       callback=http_callback)
         else:
-            http.fetch(url, callback=callback)
+            http.fetch(url, callback=http_callback)
 
-    def _on_twitter_request(self, callback, response):
+    def _on_twitter_request(self, future, response):
         if response.error:
-            gen_log.warning("Error response %s fetching %s", response.error,
-                            response.request.url)
-            callback(None)
+            future.set_exception(AuthError(
+                    "Error response %s fetching %s" % (response.error,
+                                                       response.request.url)))
             return
-        callback(escape.json_decode(response.body))
+        future.set_result(escape.json_decode(response.body))
 
     def _oauth_consumer_token(self):
         self.require_setting("twitter_consumer_key", "Twitter OAuth")
@@ -577,13 +629,12 @@ class TwitterMixin(OAuthMixin):
             key=self.settings["twitter_consumer_key"],
             secret=self.settings["twitter_consumer_secret"])
 
-    def _oauth_get_user(self, access_token, callback):
-        callback = self.async_callback(self._parse_user_response, callback)
-        self.twitter_request(
-            "/users/show/" + escape.native_str(access_token[b("screen_name")]),
-            access_token=access_token, callback=callback)
-
-    def _parse_user_response(self, callback, user):
+    @return_future
+    @gen.engine
+    def _oauth_get_user_future(self, access_token, callback):
+        user = yield self.twitter_request(
+            "/users/show/" + escape.native_str(access_token[b"screen_name"]),
+            access_token=access_token)
         if user:
             user["username"] = user["screen_name"]
         callback(user)
@@ -629,6 +680,7 @@ class FriendFeedMixin(OAuthMixin):
     _OAUTH_NO_CALLBACKS = True
     _OAUTH_VERSION = "1.0"
 
+    @_auth_return_future
     def friendfeed_request(self, path, callback, access_token=None,
                            post_args=None, **args):
         """Fetches the given relative API path, e.g., "/bret/friends"
@@ -675,22 +727,22 @@ class FriendFeedMixin(OAuthMixin):
                 url, access_token, all_args, method=method)
             args.update(oauth)
         if args:
-            url += "?" + urllib.urlencode(args)
+            url += "?" + urllib_parse.urlencode(args)
         callback = self.async_callback(self._on_friendfeed_request, callback)
         http = self.get_auth_http_client()
         if post_args is not None:
-            http.fetch(url, method="POST", body=urllib.urlencode(post_args),
+            http.fetch(url, method="POST", body=urllib_parse.urlencode(post_args),
                        callback=callback)
         else:
             http.fetch(url, callback=callback)
 
-    def _on_friendfeed_request(self, callback, response):
+    def _on_friendfeed_request(self, future, response):
         if response.error:
-            gen_log.warning("Error response %s fetching %s", response.error,
-                            response.request.url)
-            callback(None)
+            future.set_exception(AuthError(
+                    "Error response %s fetching %s" % (response.error,
+                                                       response.request.url)))
             return
-        callback(escape.json_decode(response.body))
+        future.set_result(escape.json_decode(response.body))
 
     def _oauth_consumer_token(self):
         self.require_setting("friendfeed_consumer_key", "FriendFeed OAuth")
@@ -699,12 +751,15 @@ class FriendFeedMixin(OAuthMixin):
             key=self.settings["friendfeed_consumer_key"],
             secret=self.settings["friendfeed_consumer_secret"])
 
+    @return_future
+    @gen.engine
     def _oauth_get_user(self, access_token, callback):
-        callback = self.async_callback(self._parse_user_response, callback)
-        self.friendfeed_request(
+        user = yield self.friendfeed_request(
             "/feedinfo/" + access_token["username"],
-            include="id,name,description", access_token=access_token,
-            callback=callback)
+            include="id,name,description", access_token=access_token)
+        if user:
+            user["username"] = user["id"]
+        callback(user)
 
     def _parse_user_response(self, callback, user):
         if user:
@@ -755,15 +810,16 @@ class GoogleMixin(OpenIdMixin, OAuthMixin):
         callback_uri = callback_uri or self.request.uri
         args = self._openid_args(callback_uri, ax_attrs=ax_attrs,
                                  oauth_scope=oauth_scope)
-        self.redirect(self._OPENID_ENDPOINT + "?" + urllib.urlencode(args))
+        self.redirect(self._OPENID_ENDPOINT + "?" + urllib_parse.urlencode(args))
 
+    @_auth_return_future
     def get_authenticated_user(self, callback):
         """Fetches the authenticated user data upon redirect."""
         # Look to see if we are doing combined OpenID/OAuth
         oauth_ns = ""
-        for name, values in self.request.arguments.iteritems():
+        for name, values in self.request.arguments.items():
             if name.startswith("openid.ns.") and \
-               values[-1] == u"http://specs.openid.net/extensions/oauth/1.0":
+                    values[-1] == b"http://specs.openid.net/extensions/oauth/1.0":
                 oauth_ns = name[10:]
                 break
         token = self.get_argument("openid." + oauth_ns + ".request_token", "")
@@ -773,7 +829,8 @@ class GoogleMixin(OpenIdMixin, OAuthMixin):
             http.fetch(self._oauth_access_token_url(token),
                        self.async_callback(self._on_access_token, callback))
         else:
-            OpenIdMixin.get_authenticated_user(self, callback)
+            chain_future(OpenIdMixin.get_authenticated_user(self),
+                         callback)
 
     def _oauth_consumer_token(self):
         self.require_setting("google_consumer_key", "Google OAuth")
@@ -782,15 +839,16 @@ class GoogleMixin(OpenIdMixin, OAuthMixin):
             key=self.settings["google_consumer_key"],
             secret=self.settings["google_consumer_secret"])
 
-    def _oauth_get_user(self, access_token, callback):
-        OpenIdMixin.get_authenticated_user(self, callback)
+    def _oauth_get_user_future(self, access_token, callback):
+        return OpenIdMixin.get_authenticated_user(self)
 
 
 class FacebookMixin(object):
     """Facebook Connect authentication.
 
-    New applications should consider using `FacebookGraphMixin` below instead
-    of this class.
+    *Deprecated:* New applications should use `FacebookGraphMixin`
+    below instead of this class.  This class does not support the
+    Future-based interface seen on other classes in this module.
 
     To authenticate with Facebook, register your application with
     Facebook at http://www.facebook.com/developers/apps.php. Then
@@ -837,11 +895,11 @@ class FacebookMixin(object):
             args["cancel_url"] = urlparse.urljoin(
                 self.request.full_url(), cancel_uri)
         if extended_permissions:
-            if isinstance(extended_permissions, (unicode, bytes_type)):
+            if isinstance(extended_permissions, (unicode_type, bytes_type)):
                 extended_permissions = [extended_permissions]
             args["req_perms"] = ",".join(extended_permissions)
         self.redirect("http://www.facebook.com/login.php?" +
-                      urllib.urlencode(args))
+                      urllib_parse.urlencode(args))
 
     def authorize_redirect(self, extended_permissions, callback_uri=None,
                            cancel_uri=None):
@@ -923,7 +981,7 @@ class FacebookMixin(object):
         args["format"] = "json"
         args["sig"] = self._signature(args)
         url = "http://api.facebook.com/restserver.php?" + \
-            urllib.urlencode(args)
+            urllib_parse.urlencode(args)
         http = self.get_auth_http_client()
         http.fetch(url, callback=self.async_callback(
             self._parse_response, callback))
@@ -966,7 +1024,7 @@ class FacebookMixin(object):
     def _signature(self, args):
         parts = ["%s=%s" % (n, args[n]) for n in sorted(args.keys())]
         body = "".join(parts) + self.settings["facebook_secret"]
-        if isinstance(body, unicode):
+        if isinstance(body, unicode_type):
             body = body.encode("utf-8")
         return hashlib.md5(body).hexdigest()
 
@@ -986,7 +1044,7 @@ class FacebookGraphMixin(OAuth2Mixin):
     _OAUTH_NO_CALLBACKS = False
 
     def get_authenticated_user(self, redirect_uri, client_id, client_secret,
-                              code, callback, extra_fields=None):
+                               code, callback, extra_fields=None):
         """Handles the login for the Facebook user, returning a user object.
 
         Example usage::
@@ -1014,10 +1072,10 @@ class FacebookGraphMixin(OAuth2Mixin):
         """
         http = self.get_auth_http_client()
         args = {
-          "redirect_uri": redirect_uri,
-          "code": code,
-          "client_id": client_id,
-          "client_secret": client_secret,
+            "redirect_uri": redirect_uri,
+            "code": code,
+            "client_id": client_id,
+            "client_secret": client_secret,
         }
 
         fields = set(['id', 'name', 'first_name', 'last_name',
@@ -1026,11 +1084,11 @@ class FacebookGraphMixin(OAuth2Mixin):
             fields.update(extra_fields)
 
         http.fetch(self._oauth_request_token_url(**args),
-            self.async_callback(self._on_access_token, redirect_uri, client_id,
-                                client_secret, callback, fields))
+                   self.async_callback(self._on_access_token, redirect_uri, client_id,
+                                       client_secret, callback, fields))
 
     def _on_access_token(self, redirect_uri, client_id, client_secret,
-                        callback, fields, response):
+                         callback, fields, response):
         if response.error:
             gen_log.warning('Facebook auth error: %s' % str(response))
             callback(None)
@@ -1048,7 +1106,7 @@ class FacebookGraphMixin(OAuth2Mixin):
                 self._on_get_user_info, callback, session, fields),
             access_token=session["access_token"],
             fields=",".join(fields)
-            )
+        )
 
     def _on_get_user_info(self, callback, session, fields, user):
         if user is None:
@@ -1063,7 +1121,7 @@ class FacebookGraphMixin(OAuth2Mixin):
         callback(fieldmap)
 
     def facebook_request(self, path, callback, access_token=None,
-                           post_args=None, **args):
+                         post_args=None, **args):
         """Fetches the given relative API path, e.g., "/btaylor/picture"
 
         If the request is a POST, post_args should be provided. Query
@@ -1104,11 +1162,11 @@ class FacebookGraphMixin(OAuth2Mixin):
             all_args.update(args)
 
         if all_args:
-            url += "?" + urllib.urlencode(all_args)
+            url += "?" + urllib_parse.urlencode(all_args)
         callback = self.async_callback(self._on_facebook_request, callback)
         http = self.get_auth_http_client()
         if post_args is not None:
-            http.fetch(url, method="POST", body=urllib.urlencode(post_args),
+            http.fetch(url, method="POST", body=urllib_parse.urlencode(post_args),
                        callback=callback)
         else:
             http.fetch(url, callback=callback)
@@ -1148,7 +1206,7 @@ def _oauth_signature(consumer_token, method, url, parameters={}, token=None):
 
     key_elems = [escape.utf8(consumer_token["secret"])]
     key_elems.append(escape.utf8(token["secret"] if token else ""))
-    key = b("&").join(key_elems)
+    key = b"&".join(key_elems)
 
     hash = hmac.new(key, escape.utf8(base_string), hashlib.sha1)
     return binascii.b2a_base64(hash.digest())[:-1]
@@ -1170,25 +1228,25 @@ def _oauth10a_signature(consumer_token, method, url, parameters={}, token=None):
                                for k, v in sorted(parameters.items())))
 
     base_string = "&".join(_oauth_escape(e) for e in base_elems)
-    key_elems = [escape.utf8(urllib.quote(consumer_token["secret"], safe='~'))]
-    key_elems.append(escape.utf8(urllib.quote(token["secret"], safe='~') if token else ""))
-    key = b("&").join(key_elems)
+    key_elems = [escape.utf8(urllib_parse.quote(consumer_token["secret"], safe='~'))]
+    key_elems.append(escape.utf8(urllib_parse.quote(token["secret"], safe='~') if token else ""))
+    key = b"&".join(key_elems)
 
     hash = hmac.new(key, escape.utf8(base_string), hashlib.sha1)
     return binascii.b2a_base64(hash.digest())[:-1]
 
 
 def _oauth_escape(val):
-    if isinstance(val, unicode):
+    if isinstance(val, unicode_type):
         val = val.encode("utf-8")
-    return urllib.quote(val, safe="~")
+    return urllib_parse.quote(val, safe="~")
 
 
 def _oauth_parse_response(body):
     p = escape.parse_qs(body, keep_blank_values=False)
-    token = dict(key=p[b("oauth_token")][0], secret=p[b("oauth_token_secret")][0])
+    token = dict(key=p[b"oauth_token"][0], secret=p[b"oauth_token_secret"][0])
 
     # Add the extra parameters the Provider included to the token
-    special = (b("oauth_token"), b("oauth_token_secret"))
+    special = (b"oauth_token", b"oauth_token_secret")
     token.update((k, p[k][0]) for k in p if k not in special)
     return token
diff --git a/libs/tornado/autoreload.py b/libs/tornado/autoreload.py
index 62af0f3b05f32fae0be05318d11f43db47279e05..4e424878f1f5994a35afd3b5dc87e23c57d2a908 100755
--- a/libs/tornado/autoreload.py
+++ b/libs/tornado/autoreload.py
@@ -31,7 +31,7 @@ Additionally, modifying these variables will cause reloading to behave
 incorrectly.
 """
 
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
 import os
 import sys
@@ -79,6 +79,7 @@ import weakref
 from tornado import ioloop
 from tornado.log import gen_log
 from tornado import process
+from tornado.util import exec_in
 
 try:
     import signal
@@ -91,6 +92,7 @@ _reload_hooks = []
 _reload_attempted = False
 _io_loops = weakref.WeakKeyDictionary()
 
+
 def start(io_loop=None, check_time=500):
     """Restarts the process automatically when a module is modified.
 
@@ -103,7 +105,7 @@ def start(io_loop=None, check_time=500):
     _io_loops[io_loop] = True
     if len(_io_loops) > 1:
         gen_log.warning("tornado.autoreload started more than once in the same process")
-    add_reload_hook(functools.partial(_close_all_fds, io_loop))
+    add_reload_hook(functools.partial(io_loop.close, all_fds=True))
     modify_times = {}
     callback = functools.partial(_reload_on_update, modify_times)
     scheduler = ioloop.PeriodicCallback(callback, check_time, io_loop=io_loop)
@@ -141,14 +143,6 @@ def add_reload_hook(fn):
     _reload_hooks.append(fn)
 
 
-def _close_all_fds(io_loop):
-    for fd in io_loop._handlers.keys():
-        try:
-            os.close(fd)
-        except Exception:
-            pass
-
-
 def _reload_on_update(modify_times):
     if _reload_attempted:
         # We already tried to reload and it didn't work, so don't try again.
@@ -204,7 +198,7 @@ def _reload():
     # to ensure that the new process sees the same path we did.
     path_prefix = '.' + os.pathsep
     if (sys.path[0] == '' and
-        not os.environ.get("PYTHONPATH", "").startswith(path_prefix)):
+            not os.environ.get("PYTHONPATH", "").startswith(path_prefix)):
         os.environ["PYTHONPATH"] = (path_prefix +
                                     os.environ.get("PYTHONPATH", ""))
     if sys.platform == 'win32':
@@ -263,7 +257,7 @@ def main():
         script = sys.argv[1]
         sys.argv = sys.argv[1:]
     else:
-        print >>sys.stderr, _USAGE
+        print(_USAGE, file=sys.stderr)
         sys.exit(1)
 
     try:
@@ -277,11 +271,11 @@ def main():
                 # Use globals as our "locals" dictionary so that
                 # something that tries to import __main__ (e.g. the unittest
                 # module) will see the right things.
-                exec f.read() in globals(), globals()
-    except SystemExit, e:
+                exec_in(f.read(), globals(), globals())
+    except SystemExit as e:
         logging.basicConfig()
         gen_log.info("Script exited with status %s", e.code)
-    except Exception, e:
+    except Exception as e:
         logging.basicConfig()
         gen_log.warning("Script exited with uncaught exception", exc_info=True)
         # If an exception occurred at import time, the file with the error
diff --git a/libs/tornado/concurrent.py b/libs/tornado/concurrent.py
index 80596844db3161ce00f28706bc6d7c2c2b22b330..59075a3a4cf2deb932b058779a2293b83ca7fb4d 100755
--- a/libs/tornado/concurrent.py
+++ b/libs/tornado/concurrent.py
@@ -13,19 +13,21 @@
 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 # License for the specific language governing permissions and limitations
 # under the License.
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
 import functools
 import sys
 
 from tornado.stack_context import ExceptionStackContext
-from tornado.util import raise_exc_info
+from tornado.util import raise_exc_info, ArgReplacer
 
 try:
     from concurrent import futures
 except ImportError:
     futures = None
 
+class ReturnValueIgnoredError(Exception):
+    pass
 
 class DummyFuture(object):
     def __init__(self):
@@ -89,39 +91,99 @@ if futures is None:
 else:
     Future = futures.Future
 
+
 class DummyExecutor(object):
     def submit(self, fn, *args, **kwargs):
         future = Future()
         try:
             future.set_result(fn(*args, **kwargs))
-        except Exception, e:
+        except Exception as e:
             future.set_exception(e)
         return future
 
 dummy_executor = DummyExecutor()
 
+
 def run_on_executor(fn):
     @functools.wraps(fn)
     def wrapper(self, *args, **kwargs):
-        callback = kwargs.pop("callback")
+        callback = kwargs.pop("callback", None)
         future = self.executor.submit(fn, self, *args, **kwargs)
         if callback:
             self.io_loop.add_future(future, callback)
         return future
     return wrapper
 
-# TODO: this needs a better name
-def future_wrap(f):
+
+def return_future(f):
+    """Decorator to make a function that returns via callback return a `Future`.
+
+    The wrapped function should take a ``callback`` keyword argument
+    and invoke it with one argument when it has finished.  To signal failure,
+    the function can simply raise an exception (which will be
+    captured by the `stack_context` and passed along to the `Future`).
+
+    From the caller's perspective, the callback argument is optional.
+    If one is given, it will be invoked when the function is complete
+    with the `Future` as an argument.  If no callback is given, the caller
+    should use the `Future` to wait for the function to complete
+    (perhaps by yielding it in a `gen.engine` function, or passing it
+    to `IOLoop.add_future`).
+
+    Usage::
+        @return_future
+        def future_func(arg1, arg2, callback):
+            # Do stuff (possibly asynchronous)
+            callback(result)
+
+        @gen.engine
+        def caller(callback):
+            yield future_func(arg1, arg2)
+            callback()
+
+    Note that ``@return_future`` and ``@gen.engine`` can be applied to the
+    same function, provided ``@return_future`` appears first.
+    """
+    replacer = ArgReplacer(f, 'callback')
     @functools.wraps(f)
     def wrapper(*args, **kwargs):
         future = Future()
-        if kwargs.get('callback') is not None:
-            future.add_done_callback(kwargs.pop('callback'))
-        kwargs['callback'] = future.set_result
+        callback, args, kwargs = replacer.replace(future.set_result,
+                                                  args, kwargs)
+        if callback is not None:
+            future.add_done_callback(callback)
+
         def handle_error(typ, value, tb):
             future.set_exception(value)
             return True
+        exc_info = None
         with ExceptionStackContext(handle_error):
-            f(*args, **kwargs)
+            try:
+                result = f(*args, **kwargs)
+                if result is not None:
+                    raise ReturnValueIgnoredError(
+                        "@return_future should not be used with functions "
+                        "that return values")
+            except:
+                exc_info = sys.exc_info()
+                raise
+        if exc_info is not None:
+            # If the initial synchronous part of f() raised an exception,
+            # go ahead and raise it to the caller directly without waiting
+            # for them to inspect the Future.
+            raise_exc_info(exc_info)
         return future
     return wrapper
+
+def chain_future(a, b):
+    """Chain two futures together so that when one completes, so does the other.
+
+    The result (success or failure) of ``a`` will be copied to ``b``.
+    """
+    def copy(future):
+        assert future is a
+        if a.exception() is not None:
+            b.set_exception(a.exception())
+        else:
+            b.set_result(a.result())
+    a.add_done_callback(copy)
diff --git a/libs/tornado/curl_httpclient.py b/libs/tornado/curl_httpclient.py
index 52350d246625d0fe0438bb3767979344d28c57bc..f46ea7b870afeac3a69ad42a435c1228da290a49 100755
--- a/libs/tornado/curl_httpclient.py
+++ b/libs/tornado/curl_httpclient.py
@@ -16,9 +16,8 @@
 
 """Blocking and non-blocking HTTP client implementations using pycurl."""
 
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
-import cStringIO
 import collections
 import logging
 import pycurl
@@ -30,20 +29,22 @@ from tornado import ioloop
 from tornado.log import gen_log
 from tornado import stack_context
 
-from tornado.escape import utf8
+from tornado.escape import utf8, native_str
 from tornado.httpclient import HTTPRequest, HTTPResponse, HTTPError, AsyncHTTPClient, main, _RequestProxy
 
+try:
+    from io import BytesIO  # py3
+except ImportError:
+    from cStringIO import StringIO as BytesIO  # py2
+
 
 class CurlAsyncHTTPClient(AsyncHTTPClient):
-    def initialize(self, io_loop=None, max_clients=10, defaults=None):
-        self.io_loop = io_loop
-        self.defaults = dict(HTTPRequest._DEFAULTS)
-        if defaults is not None:
-            self.defaults.update(defaults)
+    def initialize(self, io_loop, max_clients=10, defaults=None):
+        super(CurlAsyncHTTPClient, self).initialize(io_loop, defaults=defaults)
         self._multi = pycurl.CurlMulti()
         self._multi.setopt(pycurl.M_TIMERFUNCTION, self._set_timeout)
         self._multi.setopt(pycurl.M_SOCKETFUNCTION, self._handle_socket)
-        self._curls = [_curl_create() for i in xrange(max_clients)]
+        self._curls = [_curl_create() for i in range(max_clients)]
         self._free_list = self._curls[:]
         self._requests = collections.deque()
         self._fds = {}
@@ -69,19 +70,27 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
             self._handle_force_timeout, 1000, io_loop=io_loop)
         self._force_timeout_callback.start()
 
+        # Work around a bug in libcurl 7.29.0: Some fields in the curl
+        # multi object are initialized lazily, and its destructor will
+        # segfault if it is destroyed without having been used.  Add
+        # and remove a dummy handle to make sure everything is
+        # initialized.
+        dummy_curl_handle = pycurl.Curl()
+        self._multi.add_handle(dummy_curl_handle)
+        self._multi.remove_handle(dummy_curl_handle)
+
     def close(self):
         self._force_timeout_callback.stop()
+        if self._timeout is not None:
+            self.io_loop.remove_timeout(self._timeout)
         for curl in self._curls:
             curl.close()
         self._multi.close()
         self._closed = True
         super(CurlAsyncHTTPClient, self).close()
 
-    def fetch(self, request, callback, **kwargs):
-        if not isinstance(request, HTTPRequest):
-            request = HTTPRequest(url=request, **kwargs)
-        request = _RequestProxy(request, self.defaults)
-        self._requests.append((request, stack_context.wrap(callback)))
+    def fetch_impl(self, request, callback):
+        self._requests.append((request, callback))
         self._process_queue()
         self._set_timeout(0)
 
@@ -128,7 +137,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
         while True:
             try:
                 ret, num_handles = self._socket_action(fd, action)
-            except pycurl.error, e:
+            except pycurl.error as e:
                 ret = e.args[0]
             if ret != pycurl.E_CALL_MULTI_PERFORM:
                 break
@@ -142,7 +151,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
                 try:
                     ret, num_handles = self._socket_action(
                         pycurl.SOCKET_TIMEOUT, 0)
-                except pycurl.error, e:
+                except pycurl.error as e:
                     ret = e.args[0]
                 if ret != pycurl.E_CALL_MULTI_PERFORM:
                     break
@@ -173,7 +182,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
             while True:
                 try:
                     ret, num_handles = self._multi.socket_all()
-                except pycurl.error, e:
+                except pycurl.error as e:
                     ret = e.args[0]
                 if ret != pycurl.E_CALL_MULTI_PERFORM:
                     break
@@ -203,7 +212,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
                     (request, callback) = self._requests.popleft()
                     curl.info = {
                         "headers": httputil.HTTPHeaders(),
-                        "buffer": cStringIO.StringIO(),
+                        "buffer": BytesIO(),
                         "request": request,
                         "callback": callback,
                         "curl_start_time": time.time(),
@@ -247,7 +256,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
             starttransfer=curl.getinfo(pycurl.STARTTRANSFER_TIME),
             total=curl.getinfo(pycurl.TOTAL_TIME),
             redirect=curl.getinfo(pycurl.REDIRECT_TIME),
-            )
+        )
         try:
             info["callback"](HTTPResponse(
                 request=info["request"], code=code, headers=info["headers"],
@@ -276,7 +285,7 @@ def _curl_create():
 
 
 def _curl_setup_request(curl, request, buffer, headers):
-    curl.setopt(pycurl.URL, utf8(request.url))
+    curl.setopt(pycurl.URL, native_str(request.url))
 
     # libcurl's magic "Expect: 100-continue" behavior causes delays
     # with servers that don't support it (which include, among others,
@@ -296,10 +305,10 @@ def _curl_setup_request(curl, request, buffer, headers):
     # Request headers may be either a regular dict or HTTPHeaders object
     if isinstance(request.headers, httputil.HTTPHeaders):
         curl.setopt(pycurl.HTTPHEADER,
-                    [utf8("%s: %s" % i) for i in request.headers.get_all()])
+                    [native_str("%s: %s" % i) for i in request.headers.get_all()])
     else:
         curl.setopt(pycurl.HTTPHEADER,
-                    [utf8("%s: %s" % i) for i in request.headers.iteritems()])
+                    [native_str("%s: %s" % i) for i in request.headers.items()])
 
     if request.header_callback:
         curl.setopt(pycurl.HEADERFUNCTION, request.header_callback)
@@ -307,15 +316,26 @@ def _curl_setup_request(curl, request, buffer, headers):
         curl.setopt(pycurl.HEADERFUNCTION,
                     lambda line: _curl_header_callback(headers, line))
     if request.streaming_callback:
-        curl.setopt(pycurl.WRITEFUNCTION, request.streaming_callback)
+        write_function = request.streaming_callback
     else:
-        curl.setopt(pycurl.WRITEFUNCTION, buffer.write)
+        write_function = buffer.write
+    if type(b'') is type(''):  # py2
+        curl.setopt(pycurl.WRITEFUNCTION, write_function)
+    else:  # py3
+        # Upstream pycurl doesn't support py3, but ubuntu 12.10 includes
+        # a fork/port.  That version has a bug in which it passes unicode
+        # strings instead of bytes to the WRITEFUNCTION.  This means that
+        # if you use a WRITEFUNCTION (which tornado always does), you cannot
+        # download arbitrary binary data.  This needs to be fixed in the
+        # ported pycurl package, but in the meantime this lambda will
+        # make it work for downloading (utf8) text.
+        curl.setopt(pycurl.WRITEFUNCTION, lambda s: write_function(utf8(s)))
     curl.setopt(pycurl.FOLLOWLOCATION, request.follow_redirects)
     curl.setopt(pycurl.MAXREDIRS, request.max_redirects)
     curl.setopt(pycurl.CONNECTTIMEOUT_MS, int(1000 * request.connect_timeout))
     curl.setopt(pycurl.TIMEOUT_MS, int(1000 * request.request_timeout))
     if request.user_agent:
-        curl.setopt(pycurl.USERAGENT, utf8(request.user_agent))
+        curl.setopt(pycurl.USERAGENT, native_str(request.user_agent))
     else:
         curl.setopt(pycurl.USERAGENT, "Mozilla/5.0 (compatible; pycurl)")
     if request.network_interface:
@@ -329,7 +349,7 @@ def _curl_setup_request(curl, request, buffer, headers):
         curl.setopt(pycurl.PROXYPORT, request.proxy_port)
         if request.proxy_username:
             credentials = '%s:%s' % (request.proxy_username,
-                    request.proxy_password)
+                                     request.proxy_password)
             curl.setopt(pycurl.PROXYUSERPWD, credentials)
     else:
         curl.setopt(pycurl.PROXY, '')
@@ -377,7 +397,7 @@ def _curl_setup_request(curl, request, buffer, headers):
 
     # Handle curl's cryptic options for every individual HTTP method
     if request.method in ("POST", "PUT"):
-        request_buffer = cStringIO.StringIO(utf8(request.body))
+        request_buffer = BytesIO(utf8(request.body))
         curl.setopt(pycurl.READFUNCTION, request_buffer.read)
         if request.method == "POST":
             def ioctl(cmd):
@@ -391,7 +411,7 @@ def _curl_setup_request(curl, request, buffer, headers):
     if request.auth_username is not None:
         userpwd = "%s:%s" % (request.auth_username, request.auth_password or '')
         curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)
-        curl.setopt(pycurl.USERPWD, utf8(userpwd))
+        curl.setopt(pycurl.USERPWD, native_str(userpwd))
         gen_log.debug("%s %s (username: %r)", request.method, request.url,
                       request.auth_username)
     else:
diff --git a/libs/tornado/epoll.c b/libs/tornado/epoll.c
deleted file mode 100755
index 9a2e3a3747f7405d82eb9e74e91c9b8f240b4d6a..0000000000000000000000000000000000000000
--- a/libs/tornado/epoll.c
+++ /dev/null
@@ -1,112 +0,0 @@
-/*
- * Copyright 2009 Facebook
- *
- * Licensed under the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License. You may obtain
- * a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
- * License for the specific language governing permissions and limitations
- * under the License.
- */
-
-#include "Python.h"
-#include <string.h>
-#include <sys/epoll.h>
-
-#define MAX_EVENTS 24
-
-/*
- * Simple wrapper around epoll_create.
- */
-static PyObject* _epoll_create(void) {
-    int fd = epoll_create(MAX_EVENTS);
-    if (fd == -1) {
-        PyErr_SetFromErrno(PyExc_Exception);
-        return NULL;
-    }
-
-    return PyInt_FromLong(fd);
-}
-
-/*
- * Simple wrapper around epoll_ctl. We throw an exception if the call fails
- * rather than returning the error code since it is an infrequent (and likely
- * catastrophic) event when it does happen.
- */
-static PyObject* _epoll_ctl(PyObject* self, PyObject* args) {
-    int epfd, op, fd, events;
-    struct epoll_event event;
-
-    if (!PyArg_ParseTuple(args, "iiiI", &epfd, &op, &fd, &events)) {
-        return NULL;
-    }
-
-    memset(&event, 0, sizeof(event));
-    event.events = events;
-    event.data.fd = fd;
-    if (epoll_ctl(epfd, op, fd, &event) == -1) {
-        PyErr_SetFromErrno(PyExc_OSError);
-        return NULL;
-    }
-
-    Py_INCREF(Py_None);
-    return Py_None;
-}
-
-/*
- * Simple wrapper around epoll_wait. We return None if the call times out and
- * throw an exception if an error occurs. Otherwise, we return a list of
- * (fd, event) tuples.
- */
-static PyObject* _epoll_wait(PyObject* self, PyObject* args) {
-    struct epoll_event events[MAX_EVENTS];
-    int epfd, timeout, num_events, i;
-    PyObject* list;
-    PyObject* tuple;
-
-    if (!PyArg_ParseTuple(args, "ii", &epfd, &timeout)) {
-        return NULL;
-    }
-
-    Py_BEGIN_ALLOW_THREADS
-    num_events = epoll_wait(epfd, events, MAX_EVENTS, timeout);
-    Py_END_ALLOW_THREADS
-    if (num_events == -1) {
-        PyErr_SetFromErrno(PyExc_Exception);
-        return NULL;
-    }
-
-    list = PyList_New(num_events);
-    for (i = 0; i < num_events; i++) {
-        tuple = PyTuple_New(2);
-        PyTuple_SET_ITEM(tuple, 0, PyInt_FromLong(events[i].data.fd));
-        PyTuple_SET_ITEM(tuple, 1, PyInt_FromLong(events[i].events));
-        PyList_SET_ITEM(list, i, tuple);
-    }
-    return list;
-}
-
-/*
- * Our method declararations
- */
-static PyMethodDef kEpollMethods[] = {
-  {"epoll_create", (PyCFunction)_epoll_create, METH_NOARGS,
-   "Create an epoll file descriptor"},
-  {"epoll_ctl", _epoll_ctl, METH_VARARGS,
-   "Control an epoll file descriptor"},
-  {"epoll_wait", _epoll_wait, METH_VARARGS,
-   "Wait for events on an epoll file descriptor"},
-  {NULL, NULL, 0, NULL}
-};
-
-/*
- * Module initialization
- */
-PyMODINIT_FUNC initepoll(void) {
-    Py_InitModule("epoll", kEpollMethods);
-}
diff --git a/libs/tornado/escape.py b/libs/tornado/escape.py
index ed07c53d186c715c257559b35a7e01b22f1f3742..6d72532d2f6562f5de4721e77cd7f0c0dfcf86d2 100755
--- a/libs/tornado/escape.py
+++ b/libs/tornado/escape.py
@@ -20,49 +20,34 @@ Also includes a few other miscellaneous string manipulation functions that
 have crept in over time.
 """
 
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
-import htmlentitydefs
 import re
 import sys
-import urllib
 
-# Python3 compatibility:  On python2.5, introduce the bytes alias from 2.6
-try:
-    bytes
-except Exception:
-    bytes = str
+from tornado.util import bytes_type, unicode_type, basestring_type, u
 
 try:
+    from urllib.parse import parse_qs  # py3
+except ImportError:
     from urlparse import parse_qs  # Python 2.6+
+
+try:
+    import htmlentitydefs  # py2
 except ImportError:
-    from cgi import parse_qs
+    import html.entities as htmlentitydefs  # py3
 
-# json module is in the standard library as of python 2.6; fall back to
-# simplejson if present for older versions.
 try:
-    import json
-    assert hasattr(json, "loads") and hasattr(json, "dumps")
-    _json_decode = json.loads
-    _json_encode = json.dumps
-except Exception:
-    try:
-        import simplejson
-        _json_decode = lambda s: simplejson.loads(_unicode(s))
-        _json_encode = lambda v: simplejson.dumps(v)
-    except ImportError:
-        try:
-            # For Google AppEngine
-            from django.utils import simplejson
-            _json_decode = lambda s: simplejson.loads(_unicode(s))
-            _json_encode = lambda v: simplejson.dumps(v)
-        except ImportError:
-            def _json_decode(s):
-                raise NotImplementedError(
-                    "A JSON parser is required, e.g., simplejson at "
-                    "http://pypi.python.org/pypi/simplejson/")
-            _json_encode = _json_decode
+    import urllib.parse as urllib_parse  # py3
+except ImportError:
+    import urllib as urllib_parse  # py2
 
+import json
+
+try:
+    unichr
+except NameError:
+    unichr = chr
 
 _XHTML_ESCAPE_RE = re.compile('[&<>"]')
 _XHTML_ESCAPE_DICT = {'&': '&amp;', '<': '&lt;', '>': '&gt;', '"': '&quot;'}
@@ -87,12 +72,12 @@ def json_encode(value):
     # the javscript.  Some json libraries do this escaping by default,
     # although python's standard library does not, so we do it here.
     # http://stackoverflow.com/questions/1580647/json-why-are-forward-slashes-escaped
-    return _json_encode(recursive_unicode(value)).replace("</", "<\\/")
+    return json.dumps(recursive_unicode(value)).replace("</", "<\\/")
 
 
 def json_decode(value):
     """Returns Python objects for the given JSON string."""
-    return _json_decode(to_basestring(value))
+    return json.loads(to_basestring(value))
 
 
 def squeeze(value):
@@ -102,7 +87,7 @@ def squeeze(value):
 
 def url_escape(value):
     """Returns a valid URL-encoded version of the given value."""
-    return urllib.quote_plus(utf8(value))
+    return urllib_parse.quote_plus(utf8(value))
 
 # python 3 changed things around enough that we need two separate
 # implementations of url_unescape.  We also need our own implementation
@@ -117,9 +102,9 @@ if sys.version_info[0] < 3:
         the result is a unicode string in the specified encoding.
         """
         if encoding is None:
-            return urllib.unquote_plus(utf8(value))
+            return urllib_parse.unquote_plus(utf8(value))
         else:
-            return unicode(urllib.unquote_plus(utf8(value)), encoding)
+            return unicode_type(urllib_parse.unquote_plus(utf8(value)), encoding)
 
     parse_qs_bytes = parse_qs
 else:
@@ -132,9 +117,9 @@ else:
         the result is a unicode string in the specified encoding.
         """
         if encoding is None:
-            return urllib.parse.unquote_to_bytes(value)
+            return urllib_parse.unquote_to_bytes(value)
         else:
-            return urllib.unquote_plus(to_basestring(value), encoding=encoding)
+            return urllib_parse.unquote_plus(to_basestring(value), encoding=encoding)
 
     def parse_qs_bytes(qs, keep_blank_values=False, strict_parsing=False):
         """Parses a query string like urlparse.parse_qs, but returns the
@@ -149,12 +134,12 @@ else:
         result = parse_qs(qs, keep_blank_values, strict_parsing,
                           encoding='latin1', errors='strict')
         encoded = {}
-        for k, v in result.iteritems():
+        for k, v in result.items():
             encoded[k] = [i.encode('latin1') for i in v]
         return encoded
 
 
-_UTF8_TYPES = (bytes, type(None))
+_UTF8_TYPES = (bytes_type, type(None))
 
 
 def utf8(value):
@@ -165,10 +150,10 @@ def utf8(value):
     """
     if isinstance(value, _UTF8_TYPES):
         return value
-    assert isinstance(value, unicode)
+    assert isinstance(value, unicode_type)
     return value.encode("utf-8")
 
-_TO_UNICODE_TYPES = (unicode, type(None))
+_TO_UNICODE_TYPES = (unicode_type, type(None))
 
 
 def to_unicode(value):
@@ -179,7 +164,7 @@ def to_unicode(value):
     """
     if isinstance(value, _TO_UNICODE_TYPES):
         return value
-    assert isinstance(value, bytes)
+    assert isinstance(value, bytes_type)
     return value.decode("utf-8")
 
 # to_unicode was previously named _unicode not because it was private,
@@ -188,12 +173,12 @@ _unicode = to_unicode
 
 # When dealing with the standard library across python 2 and 3 it is
 # sometimes useful to have a direct conversion to the native string type
-if str is unicode:
+if str is unicode_type:
     native_str = to_unicode
 else:
     native_str = utf8
 
-_BASESTRING_TYPES = (basestring, type(None))
+_BASESTRING_TYPES = (basestring_type, type(None))
 
 
 def to_basestring(value):
@@ -207,7 +192,7 @@ def to_basestring(value):
     """
     if isinstance(value, _BASESTRING_TYPES):
         return value
-    assert isinstance(value, bytes)
+    assert isinstance(value, bytes_type)
     return value.decode("utf-8")
 
 
@@ -217,12 +202,12 @@ def recursive_unicode(obj):
     Supports lists, tuples, and dictionaries.
     """
     if isinstance(obj, dict):
-        return dict((recursive_unicode(k), recursive_unicode(v)) for (k, v) in obj.iteritems())
+        return dict((recursive_unicode(k), recursive_unicode(v)) for (k, v) in obj.items())
     elif isinstance(obj, list):
         return list(recursive_unicode(i) for i in obj)
     elif isinstance(obj, tuple):
         return tuple(recursive_unicode(i) for i in obj)
-    elif isinstance(obj, bytes):
+    elif isinstance(obj, bytes_type):
         return to_unicode(obj)
     else:
         return obj
@@ -232,7 +217,9 @@ def recursive_unicode(obj):
 # but it gets all exponential on certain patterns (such as too many trailing
 # dots), causing the regex matcher to never return.
 # This regex should avoid those problems.
-_URL_RE = re.compile(ur"""\b((?:([\w-]+):(/{1,3})|www[.])(?:(?:(?:[^\s&()]|&amp;|&quot;)*(?:[^!"#$%&'()*+,.:;<=>?@\[\]^`{|}~\s]))|(?:\((?:[^\s&()]|&amp;|&quot;)*\)))+)""")
+# Use to_unicode instead of tornado.util.u - we don't want backslashes getting
+# processed as escapes.
+_URL_RE = re.compile(to_unicode(r"""\b((?:([\w-]+):(/{1,3})|www[.])(?:(?:(?:[^\s&()]|&amp;|&quot;)*(?:[^!"#$%&'()*+,.:;<=>?@\[\]^`{|}~\s]))|(?:\((?:[^\s&()]|&amp;|&quot;)*\)))+)"""))
 
 
 def linkify(text, shorten=False, extra_params="",
@@ -302,7 +289,7 @@ def linkify(text, shorten=False, extra_params="",
                 # (no more slug, etc), so it really just provides a little
                 # extra indication of shortening.
                 url = url[:proto_len] + parts[0] + "/" + \
-                        parts[1][:8].split('?')[0].split('.')[0]
+                    parts[1][:8].split('?')[0].split('.')[0]
 
             if len(url) > max_len * 1.5:  # still too long
                 url = url[:max_len]
@@ -321,7 +308,7 @@ def linkify(text, shorten=False, extra_params="",
                     # have a status bar, such as Safari by default)
                     params += ' title="%s"' % href
 
-        return u'<a href="%s"%s>%s</a>' % (href, params, url)
+        return u('<a href="%s"%s>%s</a>') % (href, params, url)
 
     # First HTML-escape so that our strings are all safe.
     # The regex is modified to avoid character entites other than &amp; so
@@ -344,7 +331,7 @@ def _convert_entity(m):
 
 def _build_unicode_map():
     unicode_map = {}
-    for name, value in htmlentitydefs.name2codepoint.iteritems():
+    for name, value in htmlentitydefs.name2codepoint.items():
         unicode_map[name] = unichr(value)
     return unicode_map
 
diff --git a/libs/tornado/gen.py b/libs/tornado/gen.py
index 88ded2effbede315f03d886c55330af14a5e734d..8d9d8935cbe99e974c03bfcfc60b80f868d8abfa 100755
--- a/libs/tornado/gen.py
+++ b/libs/tornado/gen.py
@@ -62,10 +62,11 @@ it was called with one argument, the result is that argument.  If it was
 called with more than one argument or any keyword arguments, the result
 is an `Arguments` object, which is a named tuple ``(args, kwargs)``.
 """
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
+import collections
 import functools
-import operator
+import itertools
 import sys
 import types
 
@@ -276,15 +277,23 @@ class Multi(YieldPoint):
     a list of ``YieldPoints``.
     """
     def __init__(self, children):
-        assert all(isinstance(i, YieldPoint) for i in children)
-        self.children = children
+        self.children = []
+        for i in children:
+            if isinstance(i, Future):
+                i = YieldFuture(i)
+            self.children.append(i)
+        assert all(isinstance(i, YieldPoint) for i in self.children)
+        self.unfinished_children = set(self.children)
 
     def start(self, runner):
         for i in self.children:
             i.start(runner)
 
     def is_ready(self):
-        return all(i.is_ready() for i in self.children)
+        finished = list(itertools.takewhile(
+                lambda i: i.is_ready(), self.unfinished_children))
+        self.unfinished_children.difference_update(finished)
+        return not self.unfinished_children
 
     def get_result(self):
         return [i.get_result() for i in self.children]
@@ -305,10 +314,12 @@ class Runner(object):
     """Internal implementation of `tornado.gen.engine`.
 
     Maintains information about pending callbacks and their results.
+
+    ``final_callback`` is run after the generator exits.
     """
-    def __init__(self, gen, deactivate_stack_context):
+    def __init__(self, gen, final_callback):
         self.gen = gen
-        self.deactivate_stack_context = deactivate_stack_context
+        self.final_callback = final_callback
         self.yield_point = _NullYieldPoint()
         self.pending_callbacks = set()
         self.results = {}
@@ -373,16 +384,15 @@ class Runner(object):
                         raise LeakedCallbackError(
                             "finished without waiting for callbacks %r" %
                             self.pending_callbacks)
-                    self.deactivate_stack_context()
-                    self.deactivate_stack_context = None
+                    self.final_callback()
+                    self.final_callback = None
                     return
                 except Exception:
                     self.finished = True
                     raise
                 if isinstance(yielded, list):
                     yielded = Multi(yielded)
-                if isinstance(yielded, Future):
-                    # TODO: lists of futures
+                elif isinstance(yielded, Future):
                     yielded = YieldFuture(yielded)
                 if isinstance(yielded, YieldPoint):
                     self.yield_point = yielded
@@ -414,20 +424,4 @@ class Runner(object):
         else:
             return False
 
-# in python 2.6+ this could be a collections.namedtuple
-
-
-class Arguments(tuple):
-    """The result of a yield expression whose callback had more than one
-    argument (or keyword arguments).
-
-    The `Arguments` object can be used as a tuple ``(args, kwargs)``
-    or an object with attributes ``args`` and ``kwargs``.
-    """
-    __slots__ = ()
-
-    def __new__(cls, args, kwargs):
-        return tuple.__new__(cls, (args, kwargs))
-
-    args = property(operator.itemgetter(0))
-    kwargs = property(operator.itemgetter(1))
+Arguments = collections.namedtuple('Arguments', ['args', 'kwargs'])
diff --git a/libs/tornado/httpclient.py b/libs/tornado/httpclient.py
index 7359a76cefb05591ace17beadf3abe0a7ba207bf..1b64550493d5359e2fedb8f3bde3d170710f5870 100755
--- a/libs/tornado/httpclient.py
+++ b/libs/tornado/httpclient.py
@@ -29,14 +29,12 @@ you use a recent version of ``libcurl`` and ``pycurl``.  Currently the minimum
 supported version is 7.18.2, and the recommended version is 7.21.1 or newer.
 """
 
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
-import calendar
-import email.utils
-import httplib
 import time
 import weakref
 
+from tornado.concurrent import Future
 from tornado.escape import utf8
 from tornado import httputil, stack_context
 from tornado.ioloop import IOLoop
@@ -134,7 +132,7 @@ class AsyncHTTPClient(Configurable):
     def _async_clients(cls):
         attr_name = '_async_client_dict_' + cls.__name__
         if not hasattr(cls, attr_name):
-            setattr(cls, attr_name,  weakref.WeakKeyDictionary())
+            setattr(cls, attr_name, weakref.WeakKeyDictionary())
         return getattr(cls, attr_name)
 
     def __new__(cls, io_loop=None, force_instance=False, **kwargs):
@@ -147,6 +145,12 @@ class AsyncHTTPClient(Configurable):
             cls._async_clients()[io_loop] = instance
         return instance
 
+    def initialize(self, io_loop, defaults=None):
+        self.io_loop = io_loop
+        self.defaults = dict(HTTPRequest._DEFAULTS)
+        if defaults is not None:
+            self.defaults.update(defaults)
+
     def close(self):
         """Destroys this http client, freeing any file descriptors used.
         Not needed in normal use, but may be helpful in unittests that
@@ -156,7 +160,7 @@ class AsyncHTTPClient(Configurable):
         if self._async_clients().get(self.io_loop) is self:
             del self._async_clients()[self.io_loop]
 
-    def fetch(self, request, callback, **kwargs):
+    def fetch(self, request, callback=None, **kwargs):
         """Executes a request, calling callback with an `HTTPResponse`.
 
         The request may be either a string URL or an `HTTPRequest` object.
@@ -168,6 +172,37 @@ class AsyncHTTPClient(Configurable):
         encountered during the request. You can call response.rethrow() to
         throw the exception (if any) in the callback.
         """
+        if not isinstance(request, HTTPRequest):
+            request = HTTPRequest(url=request, **kwargs)
+        # We may modify this (to add Host, Accept-Encoding, etc),
+        # so make sure we don't modify the caller's object.  This is also
+        # where normal dicts get converted to HTTPHeaders objects.
+        request.headers = httputil.HTTPHeaders(request.headers)
+        request = _RequestProxy(request, self.defaults)
+        future = Future()
+        if callback is not None:
+            callback = stack_context.wrap(callback)
+            def handle_future(future):
+                exc = future.exception()
+                if isinstance(exc, HTTPError) and exc.response is not None:
+                    response = exc.response
+                elif exc is not None:
+                    response = HTTPResponse(
+                        request, 599, error=exc,
+                        request_time=time.time() - request.start_time)
+                else:
+                    response = future.result()
+                self.io_loop.add_callback(callback, response)
+            future.add_done_callback(handle_future)
+        def handle_response(response):
+            if response.error:
+                future.set_exception(response.error)
+            else:
+                future.set_result(response)
+        self.fetch_impl(request, handle_response)
+        return future
+
+    def fetch_impl(self, request, callback):
         raise NotImplementedError()
 
     @classmethod
@@ -280,9 +315,8 @@ class HTTPRequest(object):
         if headers is None:
             headers = httputil.HTTPHeaders()
         if if_modified_since:
-            timestamp = calendar.timegm(if_modified_since.utctimetuple())
-            headers["If-Modified-Since"] = email.utils.formatdate(
-                timestamp, localtime=False, usegmt=True)
+            headers["If-Modified-Since"] = httputil.format_timestamp(
+                if_modified_since)
         self.proxy_host = proxy_host
         self.proxy_port = proxy_port
         self.proxy_username = proxy_username
@@ -346,7 +380,7 @@ class HTTPResponse(object):
                  time_info=None, reason=None):
         self.request = request
         self.code = code
-        self.reason = reason or httplib.responses.get(code, "Unknown")
+        self.reason = reason or httputil.responses.get(code, "Unknown")
         if headers is not None:
             self.headers = headers
         else:
@@ -383,7 +417,7 @@ class HTTPResponse(object):
             raise self.error
 
     def __repr__(self):
-        args = ",".join("%s=%r" % i for i in self.__dict__.iteritems())
+        args = ",".join("%s=%r" % i for i in sorted(self.__dict__.items()))
         return "%s(%s)" % (self.__class__.__name__, args)
 
 
@@ -403,10 +437,11 @@ class HTTPError(Exception):
     """
     def __init__(self, code, message=None, response=None):
         self.code = code
-        message = message or httplib.responses.get(code, "Unknown")
+        message = message or httputil.responses.get(code, "Unknown")
         self.response = response
         Exception.__init__(self, "HTTP %d: %s" % (self.code, message))
 
+
 class _RequestProxy(object):
     """Combines an object with a dictionary of defaults.
 
@@ -440,15 +475,15 @@ def main():
                                     follow_redirects=options.follow_redirects,
                                     validate_cert=options.validate_cert,
                                     )
-        except HTTPError, e:
+        except HTTPError as e:
             if e.response is not None:
                 response = e.response
             else:
                 raise
         if options.print_headers:
-            print response.headers
+            print(response.headers)
         if options.print_body:
-            print response.body
+            print(response.body)
     client.close()
 
 if __name__ == "__main__":
diff --git a/libs/tornado/httpserver.py b/libs/tornado/httpserver.py
index af441d93061235c6d8a813793990036525bdcf6c..24d5e6b3d0bcaf659ba9b053d508815589c5cf93 100755
--- a/libs/tornado/httpserver.py
+++ b/libs/tornado/httpserver.py
@@ -24,24 +24,24 @@ This module also defines the `HTTPRequest` class which is exposed via
 `tornado.web.RequestHandler.request`.
 """
 
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
-import Cookie
 import socket
+import ssl
 import time
 
 from tornado.escape import native_str, parse_qs_bytes
 from tornado import httputil
 from tornado import iostream
 from tornado.log import gen_log
-from tornado.netutil import TCPServer
+from tornado.tcpserver import TCPServer
 from tornado import stack_context
-from tornado.util import b, bytes_type
+from tornado.util import bytes_type
 
 try:
-    import ssl  # Python 2.6+
+    import Cookie  # py2
 except ImportError:
-    ssl = None
+    import http.cookies as Cookie  # py3
 
 
 class HTTPServer(TCPServer):
@@ -95,7 +95,8 @@ class HTTPServer(TCPServer):
     `HTTPServer` can serve SSL traffic with Python 2.6+ and OpenSSL.
     To make this server serve SSL traffic, send the ssl_options dictionary
     argument with the arguments required for the `ssl.wrap_socket` method,
-    including "certfile" and "keyfile"::
+    including "certfile" and "keyfile".  In Python 3.2+ you can pass
+    an `ssl.SSLContext` object instead of a dict::
 
        HTTPServer(applicaton, ssl_options={
            "certfile": os.path.join(data_dir, "mydomain.crt"),
@@ -103,9 +104,9 @@ class HTTPServer(TCPServer):
        })
 
     `HTTPServer` initialization follows one of three patterns (the
-    initialization methods are defined on `tornado.netutil.TCPServer`):
+    initialization methods are defined on `tornado.tcpserver.TCPServer`):
 
-    1. `~tornado.netutil.TCPServer.listen`: simple single-process::
+    1. `~tornado.tcpserver.TCPServer.listen`: simple single-process::
 
             server = HTTPServer(app)
             server.listen(8888)
@@ -114,7 +115,7 @@ class HTTPServer(TCPServer):
        In many cases, `tornado.web.Application.listen` can be used to avoid
        the need to explicitly create the `HTTPServer`.
 
-    2. `~tornado.netutil.TCPServer.bind`/`~tornado.netutil.TCPServer.start`:
+    2. `~tornado.tcpserver.TCPServer.bind`/`~tornado.tcpserver.TCPServer.start`:
        simple multi-process::
 
             server = HTTPServer(app)
@@ -126,7 +127,7 @@ class HTTPServer(TCPServer):
        to the `HTTPServer` constructor.  `start` will always start
        the server on the default singleton `IOLoop`.
 
-    3. `~tornado.netutil.TCPServer.add_sockets`: advanced multi-process::
+    3. `~tornado.tcpserver.TCPServer.add_sockets`: advanced multi-process::
 
             sockets = tornado.netutil.bind_sockets(8888)
             tornado.process.fork_processes(0)
@@ -171,6 +172,10 @@ class HTTPConnection(object):
                  xheaders=False, protocol=None):
         self.stream = stream
         self.address = address
+        # Save the socket's address family now so we know how to
+        # interpret self.address even after the stream is closed
+        # and its socket attribute replaced with None.
+        self.address_family = stream.socket.family
         self.request_callback = request_callback
         self.no_keep_alive = no_keep_alive
         self.xheaders = xheaders
@@ -180,7 +185,19 @@ class HTTPConnection(object):
         # Save stack context here, outside of any request.  This keeps
         # contexts from one request from leaking into the next.
         self._header_callback = stack_context.wrap(self._on_headers)
-        self.stream.read_until(b("\r\n\r\n"), self._header_callback)
+        self.stream.read_until(b"\r\n\r\n", self._header_callback)
+        self._write_callback = None
+        self._close_callback = None
+
+    def set_close_callback(self, callback):
+        self._close_callback = stack_context.wrap(callback)
+        self.stream.set_close_callback(self._on_connection_close)
+
+    def _on_connection_close(self):
+        callback = self._close_callback
+        self._close_callback = None
+        callback()
+        # Delete any unfinished callbacks to break up reference cycles.
         self._write_callback = None
 
     def close(self):
@@ -241,7 +258,7 @@ class HTTPConnection(object):
             # Use a try/except instead of checking stream.closed()
             # directly, because in some cases the stream doesn't discover
             # that it's closed until you try to read from it.
-            self.stream.read_until(b("\r\n\r\n"), self._header_callback)
+            self.stream.read_until(b"\r\n\r\n", self._header_callback)
         except iostream.StreamClosedError:
             self.close()
 
@@ -259,10 +276,7 @@ class HTTPConnection(object):
             headers = httputil.HTTPHeaders.parse(data[eol:])
 
             # HTTPRequest wants an IP, not a full socket address
-            if getattr(self.stream.socket, 'family', socket.AF_INET) in (
-                socket.AF_INET, socket.AF_INET6):
-                # Jython 2.5.2 doesn't have the socket.family attribute,
-                # so just assume IP in that case.
+            if self.address_family in (socket.AF_INET, socket.AF_INET6):
                 remote_ip = self.address[0]
             else:
                 # Unix (or other) socket; fake the remote address
@@ -278,12 +292,12 @@ class HTTPConnection(object):
                 if content_length > self.stream.max_buffer_size:
                     raise _BadRequestException("Content-Length too long")
                 if headers.get("Expect") == "100-continue":
-                    self.stream.write(b("HTTP/1.1 100 (Continue)\r\n\r\n"))
+                    self.stream.write(b"HTTP/1.1 100 (Continue)\r\n\r\n")
                 self.stream.read_bytes(content_length, self._on_request_body)
                 return
 
             self.request_callback(self._request)
-        except _BadRequestException, e:
+        except _BadRequestException as e:
             gen_log.info("Malformed HTTP request from %s: %s",
                          self.address[0], e)
             self.close()
@@ -484,7 +498,7 @@ class HTTPRequest(object):
                                      socket.SOCK_STREAM,
                                      0, socket.AI_NUMERICHOST)
             return bool(res)
-        except socket.gaierror, e:
+        except socket.gaierror as e:
             if e.args[0] == socket.EAI_NONAME:
                 return False
             raise
diff --git a/libs/tornado/httputil.py b/libs/tornado/httputil.py
index a7d543cc23571ae45f11c2902bd3f9d87a4879d2..94b8ba4f28987470c9443bcef36044aecd6edbe1 100755
--- a/libs/tornado/httputil.py
+++ b/libs/tornado/httputil.py
@@ -16,14 +16,26 @@
 
 """HTTP utility code shared by clients and servers."""
 
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
-import urllib
+import datetime
+import numbers
 import re
+import time
 
 from tornado.escape import native_str, parse_qs_bytes, utf8
 from tornado.log import gen_log
-from tornado.util import b, ObjectDict
+from tornado.util import ObjectDict
+
+try:
+    from httplib import responses  # py2
+except ImportError:
+    from http.client import responses  # py3
+
+try:
+    from urllib import urlencode  # py2
+except ImportError:
+    from urllib.parse import urlencode  # py3
 
 
 class HTTPHeaders(dict):
@@ -34,7 +46,7 @@ class HTTPHeaders(dict):
     value per key, with multiple values joined by a comma.
 
     >>> h = HTTPHeaders({"content-type": "text/html"})
-    >>> h.keys()
+    >>> list(h.keys())
     ['Content-Type']
     >>> h["Content-Type"]
     'text/html'
@@ -47,7 +59,7 @@ class HTTPHeaders(dict):
     ['A=B', 'C=D']
 
     >>> for (k,v) in sorted(h.get_all()):
-    ...    print '%s: %s' % (k,v)
+    ...    print('%s: %s' % (k,v))
     ...
     Content-Type: text/html
     Set-Cookie: A=B
@@ -60,7 +72,7 @@ class HTTPHeaders(dict):
         self._as_list = {}
         self._last_key = None
         if (len(args) == 1 and len(kwargs) == 0 and
-            isinstance(args[0], HTTPHeaders)):
+                isinstance(args[0], HTTPHeaders)):
             # Copy constructor
             for k, v in args[0].get_all():
                 self.add(k, v)
@@ -76,7 +88,9 @@ class HTTPHeaders(dict):
         self._last_key = norm_name
         if norm_name in self:
             # bypass our override of __setitem__ since it modifies _as_list
-            dict.__setitem__(self, norm_name, self[norm_name] + ',' + value)
+            dict.__setitem__(self, norm_name,
+                             native_str(self[norm_name]) + ',' +
+                             native_str(value))
             self._as_list[norm_name].append(value)
         else:
             self[norm_name] = value
@@ -92,8 +106,8 @@ class HTTPHeaders(dict):
         If a header has multiple values, multiple pairs will be
         returned with the same name.
         """
-        for name, list in self._as_list.iteritems():
-            for value in list:
+        for name, values in self._as_list.items():
+            for value in values:
                 yield (name, value)
 
     def parse_line(self, line):
@@ -119,7 +133,7 @@ class HTTPHeaders(dict):
         """Returns a dictionary from HTTP header text.
 
         >>> h = HTTPHeaders.parse("Content-Type: text/html\\r\\nContent-Length: 42\\r\\n")
-        >>> sorted(h.iteritems())
+        >>> sorted(h.items())
         [('Content-Length', '42'), ('Content-Type', 'text/html')]
         """
         h = cls()
@@ -152,7 +166,7 @@ class HTTPHeaders(dict):
 
     def update(self, *args, **kwargs):
         # dict.update bypasses our __setitem__
-        for k, v in dict(*args, **kwargs).iteritems():
+        for k, v in dict(*args, **kwargs).items():
             self[k] = v
 
     def copy(self):
@@ -191,7 +205,7 @@ def url_concat(url, args):
         return url
     if url[-1] not in ('?', '&'):
         url += '&' if ('?' in url) else '?'
-    return url + urllib.urlencode(args)
+    return url + urlencode(args)
 
 
 class HTTPFile(ObjectDict):
@@ -216,7 +230,7 @@ def parse_body_arguments(content_type, body, arguments, files):
     """
     if content_type.startswith("application/x-www-form-urlencoded"):
         uri_arguments = parse_qs_bytes(native_str(body))
-        for name, values in uri_arguments.iteritems():
+        for name, values in uri_arguments.items():
             values = [v for v in values if v]
             if values:
                 arguments.setdefault(name, []).extend(values)
@@ -243,24 +257,24 @@ def parse_multipart_form_data(boundary, data, arguments, files):
     # xmpp).  I think we're also supposed to handle backslash-escapes
     # here but I'll save that until we see a client that uses them
     # in the wild.
-    if boundary.startswith(b('"')) and boundary.endswith(b('"')):
+    if boundary.startswith(b'"') and boundary.endswith(b'"'):
         boundary = boundary[1:-1]
-    final_boundary_index = data.rfind(b("--") + boundary + b("--"))
+    final_boundary_index = data.rfind(b"--" + boundary + b"--")
     if final_boundary_index == -1:
         gen_log.warning("Invalid multipart/form-data: no final boundary")
         return
-    parts = data[:final_boundary_index].split(b("--") + boundary + b("\r\n"))
+    parts = data[:final_boundary_index].split(b"--" + boundary + b"\r\n")
     for part in parts:
         if not part:
             continue
-        eoh = part.find(b("\r\n\r\n"))
+        eoh = part.find(b"\r\n\r\n")
         if eoh == -1:
             gen_log.warning("multipart/form-data missing headers")
             continue
         headers = HTTPHeaders.parse(part[:eoh].decode("utf-8"))
         disp_header = headers.get("Content-Disposition", "")
         disposition, disp_params = _parse_header(disp_header)
-        if disposition != "form-data" or not part.endswith(b("\r\n")):
+        if disposition != "form-data" or not part.endswith(b"\r\n"):
             gen_log.warning("Invalid multipart/form-data")
             continue
         value = part[eoh + 4:-2]
@@ -277,6 +291,26 @@ def parse_multipart_form_data(boundary, data, arguments, files):
             arguments.setdefault(name, []).append(value)
 
 
+def format_timestamp(ts):
+    """Formats a timestamp in the format used by HTTP.
+
+    The argument may be a numeric timestamp as returned by `time.time()`,
+    a time tuple as returned by `time.gmtime()`, or a `datetime.datetime`
+    object.
+
+    >>> format_timestamp(1359312200)
+    'Sun, 27 Jan 2013 18:43:20 GMT'
+    """
+    if isinstance(ts, (tuple, time.struct_time)):
+        pass
+    elif isinstance(ts, datetime.datetime):
+        ts = ts.utctimetuple()
+    elif isinstance(ts, numbers.Real):
+        ts = time.gmtime(ts)
+    else:
+        raise TypeError("unknown timestamp type: %r" % ts)
+    return time.strftime("%a, %d %b %Y %H:%M:%S GMT", ts)
+
 # _parseparam and _parse_header are copied and modified from python2.7's cgi.py
 # The original 2.7 version of this code did not correctly support some
 # combinations of semicolons and double quotes.
@@ -300,7 +334,7 @@ def _parse_header(line):
 
     """
     parts = _parseparam(';' + line)
-    key = parts.next()
+    key = next(parts)
     pdict = {}
     for p in parts:
         i = p.find('=')
diff --git a/libs/tornado/ioloop.py b/libs/tornado/ioloop.py
index 7b320e59968edf02f3f4407e8c7cb92b3c2e446e..4062661b2ab550c68625fe8611134398247fd089 100755
--- a/libs/tornado/ioloop.py
+++ b/libs/tornado/ioloop.py
@@ -26,17 +26,16 @@ In addition to I/O events, the `IOLoop` can also schedule time-based events.
 `IOLoop.add_timeout` is a non-blocking alternative to `time.sleep`.
 """
 
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
 import datetime
 import errno
 import functools
 import heapq
 import logging
+import numbers
 import os
 import select
-import sys
-import thread
 import threading
 import time
 import traceback
@@ -56,6 +55,11 @@ try:
 except ImportError:
     futures = None
 
+try:
+    import thread  # py2
+except ImportError:
+    import _thread as thread  # py3
+
 from tornado.platform.auto import set_close_exec, Waker
 
 
@@ -66,7 +70,7 @@ class IOLoop(Configurable):
     2.6+) if they are available, or else we fall back on select(). If
     you are implementing a system that needs to handle thousands of
     simultaneous connections, you should use a system that supports either
-    epoll or queue.
+    epoll or kqueue.
 
     Example usage for a simple TCP server::
 
@@ -177,13 +181,9 @@ class IOLoop(Configurable):
 
     @classmethod
     def configurable_default(cls):
-        if hasattr(select, "epoll") or sys.platform.startswith('linux'):
-            try:
-                from tornado.platform.epoll import EPollIOLoop
-                return EPollIOLoop
-            except ImportError:
-                gen_log.warning("unable to import EPollIOLoop, falling back to SelectIOLoop")
-                pass
+        if hasattr(select, "epoll"):
+            from tornado.platform.epoll import EPollIOLoop
+            return EPollIOLoop
         if hasattr(select, "kqueue"):
             # Python 2.6+ on BSD or Mac
             from tornado.platform.kqueue import KQueueIOLoop
@@ -194,7 +194,7 @@ class IOLoop(Configurable):
     def initialize(self):
         pass
 
-    def close(self, all_fds = False):
+    def close(self, all_fds=False):
         """Closes the IOLoop, freeing any resources used.
 
         If ``all_fds`` is true, all file descriptors registered on the
@@ -252,8 +252,8 @@ class IOLoop(Configurable):
         For use with set_blocking_signal_threshold.
         """
         gen_log.warning('IOLoop blocked for %f seconds in\n%s',
-                         self._blocking_signal_threshold,
-                         ''.join(traceback.format_stack(frame)))
+                        self._blocking_signal_threshold,
+                        ''.join(traceback.format_stack(frame)))
 
     def start(self):
         """Starts the I/O loop.
@@ -264,7 +264,8 @@ class IOLoop(Configurable):
         raise NotImplementedError()
 
     def stop(self):
-        """Stop the loop after the current event loop iteration is complete.
+        """Stop the I/O loop.
+
         If the event loop is not currently running, the next call to start()
         will return immediately.
 
@@ -280,6 +281,8 @@ class IOLoop(Configurable):
 
         Note that even after `stop` has been called, the IOLoop is not
         completely stopped until `IOLoop.start` has also returned.
+        Some work that was scheduled before the call to `stop` may still
+        be run before the IOLoop shuts down.
         """
         raise NotImplementedError()
 
@@ -316,7 +319,9 @@ class IOLoop(Configurable):
     def remove_timeout(self, timeout):
         """Cancels a pending timeout.
 
-        The argument is a handle as returned by add_timeout.
+        The argument is a handle as returned by add_timeout.  It is
+        safe to call `remove_timeout` even if the callback has already
+        been run.
         """
         raise NotImplementedError()
 
@@ -351,6 +356,7 @@ class IOLoop(Configurable):
         _FUTURE_TYPES = (futures.Future, DummyFuture)
     else:
         _FUTURE_TYPES = DummyFuture
+
     def add_future(self, future, callback):
         """Schedules a callback on the IOLoop when the given future is finished.
 
@@ -381,8 +387,7 @@ class IOLoop(Configurable):
         The exception itself is not passed explicitly, but is available
         in sys.exc_info.
         """
-        app_log.error("Exception in callback %r", callback, exc_info = True)
-
+        app_log.error("Exception in callback %r", callback, exc_info=True)
 
 
 class PollIOLoop(IOLoop):
@@ -392,7 +397,7 @@ class PollIOLoop(IOLoop):
     (Linux), `tornado.platform.kqueue.KQueueIOLoop` (BSD and Mac), or
     `tornado.platform.select.SelectIOLoop` (all platforms).
     """
-    def initialize(self, impl, time_func = None):
+    def initialize(self, impl, time_func=None):
         super(PollIOLoop, self).initialize()
         self._impl = impl
         if hasattr(self._impl, 'fileno'):
@@ -416,16 +421,16 @@ class PollIOLoop(IOLoop):
                          lambda fd, events: self._waker.consume(),
                          self.READ)
 
-    def close(self, all_fds = False):
+    def close(self, all_fds=False):
         with self._callback_lock:
             self._closing = True
         self.remove_handler(self._waker.fileno())
         if all_fds:
-            for fd in self._handlers.keys()[:]:
+            for fd in self._handlers.keys():
                 try:
                     os.close(fd)
                 except Exception:
-                    gen_log.debug("error closing fd %s", fd, exc_info = True)
+                    gen_log.debug("error closing fd %s", fd, exc_info=True)
         self._waker.close()
         self._impl.close()
 
@@ -442,12 +447,12 @@ class PollIOLoop(IOLoop):
         try:
             self._impl.unregister(fd)
         except Exception:
-            gen_log.debug("Error deleting fd from IOLoop", exc_info = True)
+            gen_log.debug("Error deleting fd from IOLoop", exc_info=True)
 
     def set_blocking_signal_threshold(self, seconds, action):
         if not hasattr(signal, "setitimer"):
             gen_log.error("set_blocking_signal_threshold requires a signal module "
-                           "with the setitimer method")
+                          "with the setitimer method")
             return
         self._blocking_signal_threshold = seconds
         if seconds is not None:
@@ -500,7 +505,7 @@ class PollIOLoop(IOLoop):
                     # IOLoop is just started once at the beginning.
                     signal.set_wakeup_fd(old_wakeup_fd)
                     old_wakeup_fd = None
-            except ValueError: # non-main thread
+            except ValueError:  # non-main thread
                 pass
 
         while True:
@@ -543,7 +548,7 @@ class PollIOLoop(IOLoop):
 
             try:
                 event_pairs = self._impl.poll(poll_timeout)
-            except Exception, e:
+            except Exception as e:
                 # Depending on python version and IOLoop implementation,
                 # different exception types may be thrown and there are
                 # two ways EINTR might be signaled:
@@ -568,18 +573,17 @@ class PollIOLoop(IOLoop):
             while self._events:
                 fd, events = self._events.popitem()
                 try:
-                    hdlr = self._handlers.get(fd)
-                    if hdlr: hdlr(fd, events)
-                except (OSError, IOError), e:
+                    self._handlers[fd](fd, events)
+                except (OSError, IOError) as e:
                     if e.args[0] == errno.EPIPE:
                         # Happens when the client closes the connection
                         pass
                     else:
                         app_log.error("Exception in I/O handler for fd %s",
-                                      fd, exc_info = True)
+                                      fd, exc_info=True)
                 except Exception:
                     app_log.error("Exception in I/O handler for fd %s",
-                                  fd, exc_info = True)
+                                  fd, exc_info=True)
         # reset the stopped flag so another start/stop pair can be issued
         self._stopped = False
         if self._blocking_signal_threshold is not None:
@@ -615,7 +619,7 @@ class PollIOLoop(IOLoop):
                 raise RuntimeError("IOLoop is closing")
             list_empty = not self._callbacks
             self._callbacks.append(functools.partial(
-                    stack_context.wrap(callback), *args, **kwargs))
+                stack_context.wrap(callback), *args, **kwargs))
         if list_empty and thread.get_ident() != self._thread_ident:
             # If we're in the IOLoop's thread, we know it's not currently
             # polling.  If we're not, and we added the first callback to an
@@ -641,7 +645,7 @@ class PollIOLoop(IOLoop):
                 # either the old or new version of self._callbacks,
                 # but either way will work.
                 self._callbacks.append(functools.partial(
-                        stack_context.wrap(callback), *args, **kwargs))
+                    stack_context.wrap(callback), *args, **kwargs))
 
 
 class _Timeout(object):
@@ -651,7 +655,7 @@ class _Timeout(object):
     __slots__ = ['deadline', 'callback']
 
     def __init__(self, deadline, callback, io_loop):
-        if isinstance(deadline, (int, long, float)):
+        if isinstance(deadline, numbers.Real):
             self.deadline = deadline
         elif isinstance(deadline, datetime.timedelta):
             self.deadline = io_loop.time() + _Timeout.timedelta_to_seconds(deadline)
@@ -684,7 +688,7 @@ class PeriodicCallback(object):
 
     `start` must be called after the PeriodicCallback is created.
     """
-    def __init__(self, callback, callback_time, io_loop = None):
+    def __init__(self, callback, callback_time, io_loop=None):
         self.callback = callback
         if callback_time <= 0:
             raise ValueError("Periodic callback must have a positive callback_time")
@@ -712,7 +716,7 @@ class PeriodicCallback(object):
         try:
             self.callback()
         except Exception:
-            app_log.error("Error in periodic callback", exc_info = True)
+            app_log.error("Error in periodic callback", exc_info=True)
         self._schedule_next()
 
     def _schedule_next(self):
diff --git a/libs/tornado/iostream.py b/libs/tornado/iostream.py
index 6eec2a357429fe536aeb44977c67ae1da508c31c..86cd68a89c59ad1eff2a88ee27a6c6068b4fef03 100755
--- a/libs/tornado/iostream.py
+++ b/libs/tornado/iostream.py
@@ -24,33 +24,33 @@ Contents:
 * `PipeIOStream`: Pipe-based IOStream implementation.
 """
 
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
 import collections
 import errno
+import numbers
 import os
 import socket
+import ssl
 import sys
 import re
 
 from tornado import ioloop
 from tornado.log import gen_log, app_log
+from tornado.netutil import ssl_wrap_socket, ssl_match_hostname, SSLCertificateError
 from tornado import stack_context
-from tornado.util import b, bytes_type
-
-try:
-    import ssl  # Python 2.6+
-except ImportError:
-    ssl = None
+from tornado.util import bytes_type
 
 try:
     from tornado.platform.posix import _set_nonblocking
 except ImportError:
     _set_nonblocking = None
 
+
 class StreamClosedError(IOError):
     pass
 
+
 class BaseIOStream(object):
     """A utility class to write to and read from a non-blocking file or socket.
 
@@ -146,7 +146,7 @@ class BaseIOStream(object):
         ``callback`` will be empty.
         """
         self._set_read_callback(callback)
-        assert isinstance(num_bytes, (int, long))
+        assert isinstance(num_bytes, numbers.Integral)
         self._read_bytes = num_bytes
         self._streaming_callback = stack_context.wrap(streaming_callback)
         self._try_inline_read()
@@ -237,12 +237,14 @@ class BaseIOStream(object):
 
     def _maybe_run_close_callback(self):
         if (self.closed() and self._close_callback and
-            self._pending_callbacks == 0):
+                self._pending_callbacks == 0):
             # if there are pending callbacks, don't run the close callback
             # until they're done (see _maybe_add_error_handler)
             cb = self._close_callback
             self._close_callback = None
             self._run_callback(cb)
+            # Delete any unfinished callbacks to break up reference cycles.
+            self._read_callback = self._write_callback = None
 
     def reading(self):
         """Returns true if we are currently reading from the stream."""
@@ -399,7 +401,7 @@ class BaseIOStream(object):
         """
         try:
             chunk = self.read_from_fd()
-        except (socket.error, IOError, OSError), e:
+        except (socket.error, IOError, OSError) as e:
             # ssl.SSLError is a subclass of socket.error
             if e.args[0] == errno.ECONNRESET:
                 # Treat ECONNRESET as a connection close rather than
@@ -504,7 +506,7 @@ class BaseIOStream(object):
                 self._write_buffer_frozen = False
                 _merge_prefix(self._write_buffer, num_bytes)
                 self._write_buffer.popleft()
-            except socket.error, e:
+            except socket.error as e:
                 if e.args[0] in (errno.EWOULDBLOCK, errno.EAGAIN):
                     self._write_buffer_frozen = True
                     break
@@ -520,7 +522,7 @@ class BaseIOStream(object):
 
     def _consume(self, loc):
         if loc == 0:
-            return b("")
+            return b""
         _merge_prefix(self._read_buffer, loc)
         self._read_buffer_size -= loc
         return self._read_buffer.popleft()
@@ -630,7 +632,7 @@ class IOStream(BaseIOStream):
     def read_from_fd(self):
         try:
             chunk = self.socket.recv(self.read_chunk_size)
-        except socket.error, e:
+        except socket.error as e:
             if e.args[0] in (errno.EWOULDBLOCK, errno.EAGAIN):
                 return None
             else:
@@ -643,7 +645,7 @@ class IOStream(BaseIOStream):
     def write_to_fd(self, data):
         return self.socket.send(data)
 
-    def connect(self, address, callback=None):
+    def connect(self, address, callback=None, server_hostname=None):
         """Connects the socket to a remote address without blocking.
 
         May only be called if the socket passed to the constructor was
@@ -652,6 +654,11 @@ class IOStream(BaseIOStream):
         If callback is specified, it will be called when the
         connection is completed.
 
+        If specified, the ``server_hostname`` parameter will be used
+        in SSL connections for certificate validation (if requested in
+        the ``ssl_options``) and SNI (if supported; requires
+        Python 3.2+).
+
         Note that it is safe to call IOStream.write while the
         connection is pending, in which case the data will be written
         as soon as the connection is ready.  Calling IOStream read
@@ -661,7 +668,7 @@ class IOStream(BaseIOStream):
         self._connecting = True
         try:
             self.socket.connect(address)
-        except socket.error, e:
+        except socket.error as e:
             # In non-blocking mode we expect connect() to raise an
             # exception with EINPROGRESS or EWOULDBLOCK.
             #
@@ -710,8 +717,9 @@ class SSLIOStream(IOStream):
     def __init__(self, *args, **kwargs):
         """Creates an SSLIOStream.
 
-        If a dictionary is provided as keyword argument ssl_options,
-        it will be used as additional keyword arguments to ssl.wrap_socket.
+        The ``ssl_options`` keyword argument may either be a dictionary
+        of keywords arguments for `ssl.wrap_socket`, or an `ssl.SSLContext`
+        object.
         """
         self._ssl_options = kwargs.pop('ssl_options', {})
         super(SSLIOStream, self).__init__(*args, **kwargs)
@@ -719,6 +727,7 @@ class SSLIOStream(IOStream):
         self._handshake_reading = False
         self._handshake_writing = False
         self._ssl_connect_callback = None
+        self._server_hostname = None
 
     def reading(self):
         return self._handshake_reading or super(SSLIOStream, self).reading()
@@ -732,7 +741,7 @@ class SSLIOStream(IOStream):
             self._handshake_reading = False
             self._handshake_writing = False
             self.socket.do_handshake()
-        except ssl.SSLError, err:
+        except ssl.SSLError as err:
             if err.args[0] == ssl.SSL_ERROR_WANT_READ:
                 self._handshake_reading = True
                 return
@@ -751,16 +760,46 @@ class SSLIOStream(IOStream):
                                 self.socket.fileno(), peer, err)
                 return self.close(exc_info=True)
             raise
-        except socket.error, err:
+        except socket.error as err:
             if err.args[0] in (errno.ECONNABORTED, errno.ECONNRESET):
                 return self.close(exc_info=True)
         else:
             self._ssl_accepting = False
+            if not self._verify_cert(self.socket.getpeercert()):
+                self.close()
+                return
             if self._ssl_connect_callback is not None:
                 callback = self._ssl_connect_callback
                 self._ssl_connect_callback = None
                 self._run_callback(callback)
 
+    def _verify_cert(self, peercert):
+        """Returns True if peercert is valid according to the configured
+        validation mode and hostname.
+
+        The ssl handshake already tested the certificate for a valid
+        CA signature; the only thing that remains is to check
+        the hostname.
+        """
+        if isinstance(self._ssl_options, dict):
+            verify_mode = self._ssl_options.get('cert_reqs', ssl.CERT_NONE)
+        elif isinstance(self._ssl_options, ssl.SSLContext):
+            verify_mode = self._ssl_options.verify_mode
+        assert verify_mode in (ssl.CERT_NONE, ssl.CERT_REQUIRED, ssl.CERT_OPTIONAL)
+        if verify_mode == ssl.CERT_NONE or self._server_hostname is None:
+            return True
+        cert = self.socket.getpeercert()
+        if cert is None and verify_mode == ssl.CERT_REQUIRED:
+            gen_log.warning("No SSL certificate given")
+            return False
+        try:
+            ssl_match_hostname(peercert, self._server_hostname)
+        except SSLCertificateError:
+            gen_log.warning("Invalid SSL certificate", exc_info=True)
+            return False
+        else:
+            return True
+
     def _handle_read(self):
         if self._ssl_accepting:
             self._do_ssl_handshake()
@@ -773,10 +812,11 @@ class SSLIOStream(IOStream):
             return
         super(SSLIOStream, self)._handle_write()
 
-    def connect(self, address, callback=None):
+    def connect(self, address, callback=None, server_hostname=None):
         # Save the user's callback and run it after the ssl handshake
         # has completed.
         self._ssl_connect_callback = callback
+        self._server_hostname = server_hostname
         super(SSLIOStream, self).connect(address, callback=None)
 
     def _handle_connect(self):
@@ -786,9 +826,9 @@ class SSLIOStream(IOStream):
         # user callbacks are enqueued asynchronously on the IOLoop,
         # but since _handle_events calls _handle_connect immediately
         # followed by _handle_write we need this to be synchronous.
-        self.socket = ssl.wrap_socket(self.socket,
-                                      do_handshake_on_connect=False,
-                                      **self._ssl_options)
+        self.socket = ssl_wrap_socket(self.socket, self._ssl_options,
+                                      server_hostname=self._server_hostname,
+                                      do_handshake_on_connect=False)
         super(SSLIOStream, self)._handle_connect()
 
     def read_from_fd(self):
@@ -804,14 +844,14 @@ class SSLIOStream(IOStream):
             # called when there is nothing to read, so we have to use
             # read() instead.
             chunk = self.socket.read(self.read_chunk_size)
-        except ssl.SSLError, e:
+        except ssl.SSLError as e:
             # SSLError is a subclass of socket.error, so this except
             # block must come first.
             if e.args[0] == ssl.SSL_ERROR_WANT_READ:
                 return None
             else:
                 raise
-        except socket.error, e:
+        except socket.error as e:
             if e.args[0] in (errno.EWOULDBLOCK, errno.EAGAIN):
                 return None
             else:
@@ -821,6 +861,7 @@ class SSLIOStream(IOStream):
             return None
         return chunk
 
+
 class PipeIOStream(BaseIOStream):
     """Pipe-based IOStream implementation.
 
@@ -844,7 +885,7 @@ class PipeIOStream(BaseIOStream):
     def read_from_fd(self):
         try:
             chunk = os.read(self.fd, self.read_chunk_size)
-        except (IOError, OSError), e:
+        except (IOError, OSError) as e:
             if e.args[0] in (errno.EWOULDBLOCK, errno.EAGAIN):
                 return None
             elif e.args[0] == errno.EBADF:
@@ -874,17 +915,17 @@ def _merge_prefix(deque, size):
     string of up to size bytes.
 
     >>> d = collections.deque(['abc', 'de', 'fghi', 'j'])
-    >>> _merge_prefix(d, 5); print d
+    >>> _merge_prefix(d, 5); print(d)
     deque(['abcde', 'fghi', 'j'])
 
     Strings will be split as necessary to reach the desired size.
-    >>> _merge_prefix(d, 7); print d
+    >>> _merge_prefix(d, 7); print(d)
     deque(['abcdefg', 'hi', 'j'])
 
-    >>> _merge_prefix(d, 3); print d
+    >>> _merge_prefix(d, 3); print(d)
     deque(['abc', 'defg', 'hi', 'j'])
 
-    >>> _merge_prefix(d, 100); print d
+    >>> _merge_prefix(d, 100); print(d)
     deque(['abcdefghij'])
     """
     if len(deque) == 1 and len(deque[0]) <= size:
@@ -904,7 +945,7 @@ def _merge_prefix(deque, size):
     if prefix:
         deque.appendleft(type(prefix[0])().join(prefix))
     if not deque:
-        deque.appendleft(b(""))
+        deque.appendleft(b"")
 
 
 def doctests():
diff --git a/libs/tornado/locale.py b/libs/tornado/locale.py
index 918f0c415f7aa8a5454d3cc1ea19a62e04410bed..e4e1a154453c37e192413be57fc01bf2dd7f93f1 100755
--- a/libs/tornado/locale.py
+++ b/libs/tornado/locale.py
@@ -39,7 +39,7 @@ supported by gettext and related tools).  If neither method is called,
 the locale.translate method will simply return the original string.
 """
 
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
 import csv
 import datetime
@@ -48,6 +48,7 @@ import re
 
 from tornado import escape
 from tornado.log import gen_log
+from tornado.util import u
 
 _default_locale = "en_US"
 _translations = {}
@@ -80,11 +81,11 @@ def set_default_locale(code):
     global _default_locale
     global _supported_locales
     _default_locale = code
-    _supported_locales = frozenset(_translations.keys() + [_default_locale])
+    _supported_locales = frozenset(list(_translations.keys()) + [_default_locale])
 
 
 def load_translations(directory):
-    u"""Loads translations from CSV files in a directory.
+    u("""Loads translations from CSV files in a directory.
 
     Translations are strings with optional Python-style named placeholders
     (e.g., "My name is %(name)s") and their associated translations.
@@ -109,7 +110,7 @@ def load_translations(directory):
         "%(name)s liked this","A %(name)s les gust\u00f3 esto","plural"
         "%(name)s liked this","A %(name)s le gust\u00f3 esto","singular"
 
-    """
+    """)
     global _translations
     global _supported_locales
     _translations = {}
@@ -128,8 +129,6 @@ def load_translations(directory):
             f = open(full_path, "r", encoding="utf-8")
         except TypeError:
             # python 2: files return byte strings, which are decoded below.
-            # Once we drop python 2.5, this could use io.open instead
-            # on both 2 and 3.
             f = open(full_path, "r")
         _translations[locale] = {}
         for i, row in enumerate(csv.reader(f)):
@@ -147,7 +146,7 @@ def load_translations(directory):
                 continue
             _translations[locale].setdefault(plural, {})[english] = translation
         f.close()
-    _supported_locales = frozenset(_translations.keys() + [_default_locale])
+    _supported_locales = frozenset(list(_translations.keys()) + [_default_locale])
     gen_log.debug("Supported locales: %s", sorted(_supported_locales))
 
 
@@ -183,10 +182,10 @@ def load_gettext_translations(directory, domain):
             os.stat(os.path.join(directory, lang, "LC_MESSAGES", domain + ".mo"))
             _translations[lang] = gettext.translation(domain, directory,
                                                       languages=[lang])
-        except Exception, e:
+        except Exception as e:
             gen_log.error("Cannot load translation for '%s': %s", lang, str(e))
             continue
-    _supported_locales = frozenset(_translations.keys() + [_default_locale])
+    _supported_locales = frozenset(list(_translations.keys()) + [_default_locale])
     _use_gettext = True
     gen_log.debug("Supported locales: %s", sorted(_supported_locales))
 
@@ -242,7 +241,7 @@ class Locale(object):
 
     def __init__(self, code, translations):
         self.code = code
-        self.name = LOCALE_NAMES.get(code, {}).get("name", u"Unknown")
+        self.name = LOCALE_NAMES.get(code, {}).get("name", u("Unknown"))
         self.rtl = False
         for prefix in ["fa", "ar", "he"]:
             if self.code.startswith(prefix):
@@ -323,26 +322,26 @@ class Locale(object):
             if days == 0:
                 format = _("%(time)s")
             elif days == 1 and local_date.day == local_yesterday.day and \
-                 relative:
+                    relative:
                 format = _("yesterday") if shorter else \
-                         _("yesterday at %(time)s")
+                    _("yesterday at %(time)s")
             elif days < 5:
                 format = _("%(weekday)s") if shorter else \
-                         _("%(weekday)s at %(time)s")
+                    _("%(weekday)s at %(time)s")
             elif days < 334:  # 11mo, since confusing for same month last year
                 format = _("%(month_name)s %(day)s") if shorter else \
-                         _("%(month_name)s %(day)s at %(time)s")
+                    _("%(month_name)s %(day)s at %(time)s")
 
         if format is None:
             format = _("%(month_name)s %(day)s, %(year)s") if shorter else \
-                     _("%(month_name)s %(day)s, %(year)s at %(time)s")
+                _("%(month_name)s %(day)s, %(year)s at %(time)s")
 
         tfhour_clock = self.code not in ("en", "en_US", "zh_CN")
         if tfhour_clock:
             str_time = "%d:%02d" % (local_date.hour, local_date.minute)
         elif self.code == "zh_CN":
             str_time = "%s%d:%02d" % (
-                (u'\u4e0a\u5348', u'\u4e0b\u5348')[local_date.hour >= 12],
+                (u('\u4e0a\u5348'), u('\u4e0b\u5348'))[local_date.hour >= 12],
                 local_date.hour % 12 or 12, local_date.minute)
         else:
             str_time = "%d:%02d %s" % (
@@ -388,7 +387,7 @@ class Locale(object):
             return ""
         if len(parts) == 1:
             return parts[0]
-        comma = u' \u0648 ' if self.code.startswith("fa") else u", "
+        comma = u(' \u0648 ') if self.code.startswith("fa") else u(", ")
         return _("%(commas)s and %(last)s") % {
             "commas": comma.join(parts[:-1]),
             "last": parts[len(parts) - 1],
@@ -444,66 +443,66 @@ class GettextLocale(Locale):
             return self.gettext(message)
 
 LOCALE_NAMES = {
-    "af_ZA": {"name_en": u"Afrikaans", "name": u"Afrikaans"},
-    "am_ET": {"name_en": u"Amharic", "name": u'\u12a0\u121b\u122d\u129b'},
-    "ar_AR": {"name_en": u"Arabic", "name": u"\u0627\u0644\u0639\u0631\u0628\u064a\u0629"},
-    "bg_BG": {"name_en": u"Bulgarian", "name": u"\u0411\u044a\u043b\u0433\u0430\u0440\u0441\u043a\u0438"},
-    "bn_IN": {"name_en": u"Bengali", "name": u"\u09ac\u09be\u0982\u09b2\u09be"},
-    "bs_BA": {"name_en": u"Bosnian", "name": u"Bosanski"},
-    "ca_ES": {"name_en": u"Catalan", "name": u"Catal\xe0"},
-    "cs_CZ": {"name_en": u"Czech", "name": u"\u010ce\u0161tina"},
-    "cy_GB": {"name_en": u"Welsh", "name": u"Cymraeg"},
-    "da_DK": {"name_en": u"Danish", "name": u"Dansk"},
-    "de_DE": {"name_en": u"German", "name": u"Deutsch"},
-    "el_GR": {"name_en": u"Greek", "name": u"\u0395\u03bb\u03bb\u03b7\u03bd\u03b9\u03ba\u03ac"},
-    "en_GB": {"name_en": u"English (UK)", "name": u"English (UK)"},
-    "en_US": {"name_en": u"English (US)", "name": u"English (US)"},
-    "es_ES": {"name_en": u"Spanish (Spain)", "name": u"Espa\xf1ol (Espa\xf1a)"},
-    "es_LA": {"name_en": u"Spanish", "name": u"Espa\xf1ol"},
-    "et_EE": {"name_en": u"Estonian", "name": u"Eesti"},
-    "eu_ES": {"name_en": u"Basque", "name": u"Euskara"},
-    "fa_IR": {"name_en": u"Persian", "name": u"\u0641\u0627\u0631\u0633\u06cc"},
-    "fi_FI": {"name_en": u"Finnish", "name": u"Suomi"},
-    "fr_CA": {"name_en": u"French (Canada)", "name": u"Fran\xe7ais (Canada)"},
-    "fr_FR": {"name_en": u"French", "name": u"Fran\xe7ais"},
-    "ga_IE": {"name_en": u"Irish", "name": u"Gaeilge"},
-    "gl_ES": {"name_en": u"Galician", "name": u"Galego"},
-    "he_IL": {"name_en": u"Hebrew", "name": u"\u05e2\u05d1\u05e8\u05d9\u05ea"},
-    "hi_IN": {"name_en": u"Hindi", "name": u"\u0939\u093f\u0928\u094d\u0926\u0940"},
-    "hr_HR": {"name_en": u"Croatian", "name": u"Hrvatski"},
-    "hu_HU": {"name_en": u"Hungarian", "name": u"Magyar"},
-    "id_ID": {"name_en": u"Indonesian", "name": u"Bahasa Indonesia"},
-    "is_IS": {"name_en": u"Icelandic", "name": u"\xcdslenska"},
-    "it_IT": {"name_en": u"Italian", "name": u"Italiano"},
-    "ja_JP": {"name_en": u"Japanese", "name": u"\u65e5\u672c\u8a9e"},
-    "ko_KR": {"name_en": u"Korean", "name": u"\ud55c\uad6d\uc5b4"},
-    "lt_LT": {"name_en": u"Lithuanian", "name": u"Lietuvi\u0173"},
-    "lv_LV": {"name_en": u"Latvian", "name": u"Latvie\u0161u"},
-    "mk_MK": {"name_en": u"Macedonian", "name": u"\u041c\u0430\u043a\u0435\u0434\u043e\u043d\u0441\u043a\u0438"},
-    "ml_IN": {"name_en": u"Malayalam", "name": u"\u0d2e\u0d32\u0d2f\u0d3e\u0d33\u0d02"},
-    "ms_MY": {"name_en": u"Malay", "name": u"Bahasa Melayu"},
-    "nb_NO": {"name_en": u"Norwegian (bokmal)", "name": u"Norsk (bokm\xe5l)"},
-    "nl_NL": {"name_en": u"Dutch", "name": u"Nederlands"},
-    "nn_NO": {"name_en": u"Norwegian (nynorsk)", "name": u"Norsk (nynorsk)"},
-    "pa_IN": {"name_en": u"Punjabi", "name": u"\u0a2a\u0a70\u0a1c\u0a3e\u0a2c\u0a40"},
-    "pl_PL": {"name_en": u"Polish", "name": u"Polski"},
-    "pt_BR": {"name_en": u"Portuguese (Brazil)", "name": u"Portugu\xeas (Brasil)"},
-    "pt_PT": {"name_en": u"Portuguese (Portugal)", "name": u"Portugu\xeas (Portugal)"},
-    "ro_RO": {"name_en": u"Romanian", "name": u"Rom\xe2n\u0103"},
-    "ru_RU": {"name_en": u"Russian", "name": u"\u0420\u0443\u0441\u0441\u043a\u0438\u0439"},
-    "sk_SK": {"name_en": u"Slovak", "name": u"Sloven\u010dina"},
-    "sl_SI": {"name_en": u"Slovenian", "name": u"Sloven\u0161\u010dina"},
-    "sq_AL": {"name_en": u"Albanian", "name": u"Shqip"},
-    "sr_RS": {"name_en": u"Serbian", "name": u"\u0421\u0440\u043f\u0441\u043a\u0438"},
-    "sv_SE": {"name_en": u"Swedish", "name": u"Svenska"},
-    "sw_KE": {"name_en": u"Swahili", "name": u"Kiswahili"},
-    "ta_IN": {"name_en": u"Tamil", "name": u"\u0ba4\u0bae\u0bbf\u0bb4\u0bcd"},
-    "te_IN": {"name_en": u"Telugu", "name": u"\u0c24\u0c46\u0c32\u0c41\u0c17\u0c41"},
-    "th_TH": {"name_en": u"Thai", "name": u"\u0e20\u0e32\u0e29\u0e32\u0e44\u0e17\u0e22"},
-    "tl_PH": {"name_en": u"Filipino", "name": u"Filipino"},
-    "tr_TR": {"name_en": u"Turkish", "name": u"T\xfcrk\xe7e"},
-    "uk_UA": {"name_en": u"Ukraini ", "name": u"\u0423\u043a\u0440\u0430\u0457\u043d\u0441\u044c\u043a\u0430"},
-    "vi_VN": {"name_en": u"Vietnamese", "name": u"Ti\u1ebfng Vi\u1ec7t"},
-    "zh_CN": {"name_en": u"Chinese (Simplified)", "name": u"\u4e2d\u6587(\u7b80\u4f53)"},
-    "zh_TW": {"name_en": u"Chinese (Traditional)", "name": u"\u4e2d\u6587(\u7e41\u9ad4)"},
+    "af_ZA": {"name_en": u("Afrikaans"), "name": u("Afrikaans")},
+    "am_ET": {"name_en": u("Amharic"), "name": u('\u12a0\u121b\u122d\u129b')},
+    "ar_AR": {"name_en": u("Arabic"), "name": u("\u0627\u0644\u0639\u0631\u0628\u064a\u0629")},
+    "bg_BG": {"name_en": u("Bulgarian"), "name": u("\u0411\u044a\u043b\u0433\u0430\u0440\u0441\u043a\u0438")},
+    "bn_IN": {"name_en": u("Bengali"), "name": u("\u09ac\u09be\u0982\u09b2\u09be")},
+    "bs_BA": {"name_en": u("Bosnian"), "name": u("Bosanski")},
+    "ca_ES": {"name_en": u("Catalan"), "name": u("Catal\xe0")},
+    "cs_CZ": {"name_en": u("Czech"), "name": u("\u010ce\u0161tina")},
+    "cy_GB": {"name_en": u("Welsh"), "name": u("Cymraeg")},
+    "da_DK": {"name_en": u("Danish"), "name": u("Dansk")},
+    "de_DE": {"name_en": u("German"), "name": u("Deutsch")},
+    "el_GR": {"name_en": u("Greek"), "name": u("\u0395\u03bb\u03bb\u03b7\u03bd\u03b9\u03ba\u03ac")},
+    "en_GB": {"name_en": u("English (UK)"), "name": u("English (UK)")},
+    "en_US": {"name_en": u("English (US)"), "name": u("English (US)")},
+    "es_ES": {"name_en": u("Spanish (Spain)"), "name": u("Espa\xf1ol (Espa\xf1a)")},
+    "es_LA": {"name_en": u("Spanish"), "name": u("Espa\xf1ol")},
+    "et_EE": {"name_en": u("Estonian"), "name": u("Eesti")},
+    "eu_ES": {"name_en": u("Basque"), "name": u("Euskara")},
+    "fa_IR": {"name_en": u("Persian"), "name": u("\u0641\u0627\u0631\u0633\u06cc")},
+    "fi_FI": {"name_en": u("Finnish"), "name": u("Suomi")},
+    "fr_CA": {"name_en": u("French (Canada)"), "name": u("Fran\xe7ais (Canada)")},
+    "fr_FR": {"name_en": u("French"), "name": u("Fran\xe7ais")},
+    "ga_IE": {"name_en": u("Irish"), "name": u("Gaeilge")},
+    "gl_ES": {"name_en": u("Galician"), "name": u("Galego")},
+    "he_IL": {"name_en": u("Hebrew"), "name": u("\u05e2\u05d1\u05e8\u05d9\u05ea")},
+    "hi_IN": {"name_en": u("Hindi"), "name": u("\u0939\u093f\u0928\u094d\u0926\u0940")},
+    "hr_HR": {"name_en": u("Croatian"), "name": u("Hrvatski")},
+    "hu_HU": {"name_en": u("Hungarian"), "name": u("Magyar")},
+    "id_ID": {"name_en": u("Indonesian"), "name": u("Bahasa Indonesia")},
+    "is_IS": {"name_en": u("Icelandic"), "name": u("\xcdslenska")},
+    "it_IT": {"name_en": u("Italian"), "name": u("Italiano")},
+    "ja_JP": {"name_en": u("Japanese"), "name": u("\u65e5\u672c\u8a9e")},
+    "ko_KR": {"name_en": u("Korean"), "name": u("\ud55c\uad6d\uc5b4")},
+    "lt_LT": {"name_en": u("Lithuanian"), "name": u("Lietuvi\u0173")},
+    "lv_LV": {"name_en": u("Latvian"), "name": u("Latvie\u0161u")},
+    "mk_MK": {"name_en": u("Macedonian"), "name": u("\u041c\u0430\u043a\u0435\u0434\u043e\u043d\u0441\u043a\u0438")},
+    "ml_IN": {"name_en": u("Malayalam"), "name": u("\u0d2e\u0d32\u0d2f\u0d3e\u0d33\u0d02")},
+    "ms_MY": {"name_en": u("Malay"), "name": u("Bahasa Melayu")},
+    "nb_NO": {"name_en": u("Norwegian (bokmal)"), "name": u("Norsk (bokm\xe5l)")},
+    "nl_NL": {"name_en": u("Dutch"), "name": u("Nederlands")},
+    "nn_NO": {"name_en": u("Norwegian (nynorsk)"), "name": u("Norsk (nynorsk)")},
+    "pa_IN": {"name_en": u("Punjabi"), "name": u("\u0a2a\u0a70\u0a1c\u0a3e\u0a2c\u0a40")},
+    "pl_PL": {"name_en": u("Polish"), "name": u("Polski")},
+    "pt_BR": {"name_en": u("Portuguese (Brazil)"), "name": u("Portugu\xeas (Brasil)")},
+    "pt_PT": {"name_en": u("Portuguese (Portugal)"), "name": u("Portugu\xeas (Portugal)")},
+    "ro_RO": {"name_en": u("Romanian"), "name": u("Rom\xe2n\u0103")},
+    "ru_RU": {"name_en": u("Russian"), "name": u("\u0420\u0443\u0441\u0441\u043a\u0438\u0439")},
+    "sk_SK": {"name_en": u("Slovak"), "name": u("Sloven\u010dina")},
+    "sl_SI": {"name_en": u("Slovenian"), "name": u("Sloven\u0161\u010dina")},
+    "sq_AL": {"name_en": u("Albanian"), "name": u("Shqip")},
+    "sr_RS": {"name_en": u("Serbian"), "name": u("\u0421\u0440\u043f\u0441\u043a\u0438")},
+    "sv_SE": {"name_en": u("Swedish"), "name": u("Svenska")},
+    "sw_KE": {"name_en": u("Swahili"), "name": u("Kiswahili")},
+    "ta_IN": {"name_en": u("Tamil"), "name": u("\u0ba4\u0bae\u0bbf\u0bb4\u0bcd")},
+    "te_IN": {"name_en": u("Telugu"), "name": u("\u0c24\u0c46\u0c32\u0c41\u0c17\u0c41")},
+    "th_TH": {"name_en": u("Thai"), "name": u("\u0e20\u0e32\u0e29\u0e32\u0e44\u0e17\u0e22")},
+    "tl_PH": {"name_en": u("Filipino"), "name": u("Filipino")},
+    "tr_TR": {"name_en": u("Turkish"), "name": u("T\xfcrk\xe7e")},
+    "uk_UA": {"name_en": u("Ukraini "), "name": u("\u0423\u043a\u0440\u0430\u0457\u043d\u0441\u044c\u043a\u0430")},
+    "vi_VN": {"name_en": u("Vietnamese"), "name": u("Ti\u1ebfng Vi\u1ec7t")},
+    "zh_CN": {"name_en": u("Chinese (Simplified)"), "name": u("\u4e2d\u6587(\u7b80\u4f53)")},
+    "zh_TW": {"name_en": u("Chinese (Traditional)"), "name": u("\u4e2d\u6587(\u7e41\u9ad4)")},
 }
diff --git a/libs/tornado/log.py b/libs/tornado/log.py
index 7a4a8332f29e67628c2dc3784314bc8529856bcc..fa11f37953598f9dd442344546a208e3a5650760 100755
--- a/libs/tornado/log.py
+++ b/libs/tornado/log.py
@@ -28,13 +28,15 @@ These streams may be configured independently using the standard library's
 `logging` module.  For example, you may wish to send ``tornado.access`` logs
 to a separate file for analysis.
 """
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
 import logging
+import logging.handlers
 import sys
 import time
 
 from tornado.escape import _unicode
+from tornado.util import unicode_type, basestring_type
 
 try:
     import curses
@@ -46,6 +48,7 @@ access_log = logging.getLogger("tornado.access")
 app_log = logging.getLogger("tornado.application")
 gen_log = logging.getLogger("tornado.general")
 
+
 def _stderr_supports_color():
     color = False
     if curses and sys.stderr.isatty():
@@ -85,25 +88,25 @@ class LogFormatter(logging.Formatter):
             fg_color = (curses.tigetstr("setaf") or
                         curses.tigetstr("setf") or "")
             if (3, 0) < sys.version_info < (3, 2, 3):
-                fg_color = unicode(fg_color, "ascii")
+                fg_color = unicode_type(fg_color, "ascii")
             self._colors = {
-                logging.DEBUG: unicode(curses.tparm(fg_color, 4),  # Blue
-                                       "ascii"),
-                logging.INFO: unicode(curses.tparm(fg_color, 2),  # Green
-                                      "ascii"),
-                logging.WARNING: unicode(curses.tparm(fg_color, 3),  # Yellow
-                                         "ascii"),
-                logging.ERROR: unicode(curses.tparm(fg_color, 1),  # Red
-                                       "ascii"),
+                logging.DEBUG: unicode_type(curses.tparm(fg_color, 4),  # Blue
+                                            "ascii"),
+                logging.INFO: unicode_type(curses.tparm(fg_color, 2),  # Green
+                                           "ascii"),
+                logging.WARNING: unicode_type(curses.tparm(fg_color, 3),  # Yellow
+                                              "ascii"),
+                logging.ERROR: unicode_type(curses.tparm(fg_color, 1),  # Red
+                                            "ascii"),
             }
-            self._normal = unicode(curses.tigetstr("sgr0"), "ascii")
+            self._normal = unicode_type(curses.tigetstr("sgr0"), "ascii")
 
     def format(self, record):
         try:
             record.message = record.getMessage()
-        except Exception, e:
+        except Exception as e:
             record.message = "Bad message (%r): %r" % (e, record.__dict__)
-        assert isinstance(record.message, basestring)  # guaranteed by logging
+        assert isinstance(record.message, basestring_type)  # guaranteed by logging
         record.asctime = time.strftime(
             "%y%m%d %H:%M:%S", self.converter(record.created))
         prefix = '[%(levelname)1.1s %(asctime)s %(module)s:%(lineno)d]' % \
@@ -128,20 +131,27 @@ class LogFormatter(logging.Formatter):
         # it's worth it since the encoding errors that would otherwise
         # result are so useless (and tornado is fond of using utf8-encoded
         # byte strings whereever possible).
-        try:
-            message = _unicode(record.message)
-        except UnicodeDecodeError:
-            message = repr(record.message)
+        def safe_unicode(s):
+            try:
+                return _unicode(s)
+            except UnicodeDecodeError:
+                return repr(s)
 
-        formatted = prefix + " " + message
+        formatted = prefix + " " + safe_unicode(record.message)
         if record.exc_info:
             if not record.exc_text:
                 record.exc_text = self.formatException(record.exc_info)
         if record.exc_text:
-            formatted = formatted.rstrip() + "\n" + record.exc_text
+            # exc_text contains multiple lines.  We need to safe_unicode
+            # each line separately so that non-utf8 bytes don't cause
+            # all the newlines to turn into '\n'.
+            lines = [formatted.rstrip()]
+            lines.extend(safe_unicode(ln) for ln in record.exc_text.split('\n'))
+            formatted = '\n'.join(lines)
         return formatted.replace("\n", "\n    ")
 
-def enable_pretty_logging(options=None):
+
+def enable_pretty_logging(options=None, logger=None):
     """Turns on formatted logging output as configured.
 
     This is called automaticaly by `tornado.options.parse_command_line`
@@ -151,22 +161,23 @@ def enable_pretty_logging(options=None):
         from tornado.options import options
     if options.logging == 'none':
         return
-    root_logger = logging.getLogger()
-    root_logger.setLevel(getattr(logging, options.logging.upper()))
+    if logger is None:
+        logger = logging.getLogger()
+    logger.setLevel(getattr(logging, options.logging.upper()))
     if options.log_file_prefix:
         channel = logging.handlers.RotatingFileHandler(
             filename=options.log_file_prefix,
             maxBytes=options.log_file_max_size,
             backupCount=options.log_file_num_backups)
         channel.setFormatter(LogFormatter(color=False))
-        root_logger.addHandler(channel)
+        logger.addHandler(channel)
 
     if (options.log_to_stderr or
-        (options.log_to_stderr is None and not root_logger.handlers)):
+            (options.log_to_stderr is None and not logger.handlers)):
         # Set up color if we are in a tty and curses is installed
         channel = logging.StreamHandler()
         channel.setFormatter(LogFormatter())
-        root_logger.addHandler(channel)
+        logger.addHandler(channel)
 
 
 def define_logging_options(options=None):
@@ -174,21 +185,21 @@ def define_logging_options(options=None):
         # late import to prevent cycle
         from tornado.options import options
     options.define("logging", default="info",
-           help=("Set the Python log level. If 'none', tornado won't touch the "
-                 "logging configuration."),
-           metavar="debug|info|warning|error|none")
+                   help=("Set the Python log level. If 'none', tornado won't touch the "
+                         "logging configuration."),
+                   metavar="debug|info|warning|error|none")
     options.define("log_to_stderr", type=bool, default=None,
-           help=("Send log output to stderr (colorized if possible). "
-                 "By default use stderr if --log_file_prefix is not set and "
-                 "no other logging is configured."))
+                   help=("Send log output to stderr (colorized if possible). "
+                         "By default use stderr if --log_file_prefix is not set and "
+                         "no other logging is configured."))
     options.define("log_file_prefix", type=str, default=None, metavar="PATH",
-           help=("Path prefix for log files. "
-                 "Note that if you are running multiple tornado processes, "
-                 "log_file_prefix must be different for each of them (e.g. "
-                 "include the port number)"))
+                   help=("Path prefix for log files. "
+                         "Note that if you are running multiple tornado processes, "
+                         "log_file_prefix must be different for each of them (e.g. "
+                         "include the port number)"))
     options.define("log_file_max_size", type=int, default=100 * 1000 * 1000,
-           help="max size of log files before rollover")
+                   help="max size of log files before rollover")
     options.define("log_file_num_backups", type=int, default=10,
-           help="number of log files to keep")
+                   help="number of log files to keep")
 
     options.add_parse_callback(enable_pretty_logging)
diff --git a/libs/tornado/netutil.py b/libs/tornado/netutil.py
index 291dbecd9201dc61e30ed79813c8e7578c339765..4003245e3b93052017cdc251093fd0174be35592 100755
--- a/libs/tornado/netutil.py
+++ b/libs/tornado/netutil.py
@@ -16,226 +16,19 @@
 
 """Miscellaneous network utility code."""
 
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
 import errno
 import os
+import re
 import socket
+import ssl
 import stat
 
-from tornado import process
 from tornado.concurrent import dummy_executor, run_on_executor
 from tornado.ioloop import IOLoop
-from tornado.iostream import IOStream, SSLIOStream
-from tornado.log import app_log
 from tornado.platform.auto import set_close_exec
-
-try:
-    import ssl  # Python 2.6+
-except ImportError:
-    ssl = None
-
-
-class TCPServer(object):
-    r"""A non-blocking, single-threaded TCP server.
-
-    To use `TCPServer`, define a subclass which overrides the `handle_stream`
-    method.
-
-    `TCPServer` can serve SSL traffic with Python 2.6+ and OpenSSL.
-    To make this server serve SSL traffic, send the ssl_options dictionary
-    argument with the arguments required for the `ssl.wrap_socket` method,
-    including "certfile" and "keyfile"::
-
-       TCPServer(ssl_options={
-           "certfile": os.path.join(data_dir, "mydomain.crt"),
-           "keyfile": os.path.join(data_dir, "mydomain.key"),
-       })
-
-    `TCPServer` initialization follows one of three patterns:
-
-    1. `listen`: simple single-process::
-
-            server = TCPServer()
-            server.listen(8888)
-            IOLoop.instance().start()
-
-    2. `bind`/`start`: simple multi-process::
-
-            server = TCPServer()
-            server.bind(8888)
-            server.start(0)  # Forks multiple sub-processes
-            IOLoop.instance().start()
-
-       When using this interface, an `IOLoop` must *not* be passed
-       to the `TCPServer` constructor.  `start` will always start
-       the server on the default singleton `IOLoop`.
-
-    3. `add_sockets`: advanced multi-process::
-
-            sockets = bind_sockets(8888)
-            tornado.process.fork_processes(0)
-            server = TCPServer()
-            server.add_sockets(sockets)
-            IOLoop.instance().start()
-
-       The `add_sockets` interface is more complicated, but it can be
-       used with `tornado.process.fork_processes` to give you more
-       flexibility in when the fork happens.  `add_sockets` can
-       also be used in single-process servers if you want to create
-       your listening sockets in some way other than
-       `bind_sockets`.
-    """
-    def __init__(self, io_loop=None, ssl_options=None):
-        self.io_loop = io_loop
-        self.ssl_options = ssl_options
-        self._sockets = {}  # fd -> socket object
-        self._pending_sockets = []
-        self._started = False
-
-        # Verify the SSL options. Otherwise we don't get errors until clients
-        # connect. This doesn't verify that the keys are legitimate, but
-        # the SSL module doesn't do that until there is a connected socket
-        # which seems like too much work
-        if self.ssl_options is not None:
-            # Only certfile is required: it can contain both keys
-            if 'certfile' not in self.ssl_options:
-                raise KeyError('missing key "certfile" in ssl_options')
-
-            if not os.path.exists(self.ssl_options['certfile']):
-                raise ValueError('certfile "%s" does not exist' %
-                    self.ssl_options['certfile'])
-            if ('keyfile' in self.ssl_options and
-                    not os.path.exists(self.ssl_options['keyfile'])):
-                raise ValueError('keyfile "%s" does not exist' %
-                    self.ssl_options['keyfile'])
-
-    def listen(self, port, address=""):
-        """Starts accepting connections on the given port.
-
-        This method may be called more than once to listen on multiple ports.
-        `listen` takes effect immediately; it is not necessary to call
-        `TCPServer.start` afterwards.  It is, however, necessary to start
-        the `IOLoop`.
-        """
-        sockets = bind_sockets(port, address=address)
-        self.add_sockets(sockets)
-
-    def add_sockets(self, sockets):
-        """Makes this server start accepting connections on the given sockets.
-
-        The ``sockets`` parameter is a list of socket objects such as
-        those returned by `bind_sockets`.
-        `add_sockets` is typically used in combination with that
-        method and `tornado.process.fork_processes` to provide greater
-        control over the initialization of a multi-process server.
-        """
-        if self.io_loop is None:
-            self.io_loop = IOLoop.instance()
-
-        for sock in sockets:
-            self._sockets[sock.fileno()] = sock
-            add_accept_handler(sock, self._handle_connection,
-                               io_loop=self.io_loop)
-
-    def add_socket(self, socket):
-        """Singular version of `add_sockets`.  Takes a single socket object."""
-        self.add_sockets([socket])
-
-    def bind(self, port, address=None, family=socket.AF_UNSPEC, backlog=128):
-        """Binds this server to the given port on the given address.
-
-        To start the server, call `start`. If you want to run this server
-        in a single process, you can call `listen` as a shortcut to the
-        sequence of `bind` and `start` calls.
-
-        Address may be either an IP address or hostname.  If it's a hostname,
-        the server will listen on all IP addresses associated with the
-        name.  Address may be an empty string or None to listen on all
-        available interfaces.  Family may be set to either ``socket.AF_INET``
-        or ``socket.AF_INET6`` to restrict to ipv4 or ipv6 addresses, otherwise
-        both will be used if available.
-
-        The ``backlog`` argument has the same meaning as for
-        `socket.listen`.
-
-        This method may be called multiple times prior to `start` to listen
-        on multiple ports or interfaces.
-        """
-        sockets = bind_sockets(port, address=address, family=family,
-                               backlog=backlog)
-        if self._started:
-            self.add_sockets(sockets)
-        else:
-            self._pending_sockets.extend(sockets)
-
-    def start(self, num_processes=1):
-        """Starts this server in the IOLoop.
-
-        By default, we run the server in this process and do not fork any
-        additional child process.
-
-        If num_processes is ``None`` or <= 0, we detect the number of cores
-        available on this machine and fork that number of child
-        processes. If num_processes is given and > 1, we fork that
-        specific number of sub-processes.
-
-        Since we use processes and not threads, there is no shared memory
-        between any server code.
-
-        Note that multiple processes are not compatible with the autoreload
-        module (or the ``debug=True`` option to `tornado.web.Application`).
-        When using multiple processes, no IOLoops can be created or
-        referenced until after the call to ``TCPServer.start(n)``.
-        """
-        assert not self._started
-        self._started = True
-        if num_processes != 1:
-            process.fork_processes(num_processes)
-        sockets = self._pending_sockets
-        self._pending_sockets = []
-        self.add_sockets(sockets)
-
-    def stop(self):
-        """Stops listening for new connections.
-
-        Requests currently in progress may still continue after the
-        server is stopped.
-        """
-        for fd, sock in self._sockets.iteritems():
-            self.io_loop.remove_handler(fd)
-            sock.close()
-
-    def handle_stream(self, stream, address):
-        """Override to handle a new `IOStream` from an incoming connection."""
-        raise NotImplementedError()
-
-    def _handle_connection(self, connection, address):
-        if self.ssl_options is not None:
-            assert ssl, "Python 2.6+ and OpenSSL required for SSL"
-            try:
-                connection = ssl.wrap_socket(connection,
-                                             server_side=True,
-                                             do_handshake_on_connect=False,
-                                             **self.ssl_options)
-            except ssl.SSLError, err:
-                if err.args[0] == ssl.SSL_ERROR_EOF:
-                    return connection.close()
-                else:
-                    raise
-            except socket.error, err:
-                if err.args[0] == errno.ECONNABORTED:
-                    return connection.close()
-                else:
-                    raise
-        try:
-            if self.ssl_options is not None:
-                stream = SSLIOStream(connection, io_loop=self.io_loop)
-            else:
-                stream = IOStream(connection, io_loop=self.io_loop)
-            self.handle_stream(stream, address)
-        except Exception:
-            app_log.error("Error in connection callback", exc_info=True)
+from tornado.util import Configurable
 
 
 def bind_sockets(port, address=None, family=socket.AF_UNSPEC, backlog=128, flags=None):
@@ -261,10 +54,17 @@ def bind_sockets(port, address=None, family=socket.AF_UNSPEC, backlog=128, flags
     sockets = []
     if address == "":
         address = None
+    if not socket.has_ipv6 and family == socket.AF_UNSPEC:
+        # Python can be compiled with --disable-ipv6, which causes
+        # operations on AF_INET6 sockets to fail, but does not
+        # automatically exclude those results from getaddrinfo
+        # results.
+        # http://bugs.python.org/issue16208
+        family = socket.AF_INET
     if flags is None:
         flags = socket.AI_PASSIVE
     for res in set(socket.getaddrinfo(address, port, family, socket.SOCK_STREAM,
-                                  0, flags)):
+                                      0, flags)):
         af, socktype, proto, canonname, sockaddr = res
         sock = socket.socket(af, socktype, proto)
         set_close_exec(sock.fileno())
@@ -288,7 +88,7 @@ def bind_sockets(port, address=None, family=socket.AF_UNSPEC, backlog=128, flags
     return sockets
 
 if hasattr(socket, 'AF_UNIX'):
-    def bind_unix_socket(file, mode=0600, backlog=128):
+    def bind_unix_socket(file, mode=0o600, backlog=128):
         """Creates a listening unix socket.
 
         If a socket with the given name already exists, it will be deleted.
@@ -304,7 +104,7 @@ if hasattr(socket, 'AF_UNIX'):
         sock.setblocking(0)
         try:
             st = os.stat(file)
-        except OSError, err:
+        except OSError as err:
             if err.errno != errno.ENOENT:
                 raise
         else:
@@ -334,7 +134,7 @@ def add_accept_handler(sock, callback, io_loop=None):
         while True:
             try:
                 connection, address = sock.accept()
-            except socket.error, e:
+            except socket.error as e:
                 if e.args[0] in (errno.EWOULDBLOCK, errno.EAGAIN):
                     return
                 raise
@@ -342,11 +142,186 @@ def add_accept_handler(sock, callback, io_loop=None):
     io_loop.add_handler(sock.fileno(), accept_handler, IOLoop.READ)
 
 
-class Resolver(object):
-    def __init__(self, io_loop=None, executor=None):
+class Resolver(Configurable):
+    @classmethod
+    def configurable_base(cls):
+        return Resolver
+
+    @classmethod
+    def configurable_default(cls):
+        return BlockingResolver
+
+    def getaddrinfo(self, *args, **kwargs):
+        """Resolves an address.
+
+        The arguments to this function are the same as to
+        `socket.getaddrinfo`, with the addition of an optional
+        keyword-only ``callback`` argument.
+
+        Returns a `Future` whose result is the same as the return
+        value of `socket.getaddrinfo`.  If a callback is passed,
+        it will be run with the `Future` as an argument when it
+        is complete.
+        """
+        raise NotImplementedError()
+
+
+class ExecutorResolver(Resolver):
+    def initialize(self, io_loop=None, executor=None):
         self.io_loop = io_loop or IOLoop.instance()
         self.executor = executor or dummy_executor
 
     @run_on_executor
     def getaddrinfo(self, *args, **kwargs):
         return socket.getaddrinfo(*args, **kwargs)
+
+class BlockingResolver(ExecutorResolver):
+    def initialize(self, io_loop=None):
+        super(BlockingResolver, self).initialize(io_loop=io_loop)
+
+class ThreadedResolver(ExecutorResolver):
+    def initialize(self, io_loop=None, num_threads=10):
+        from concurrent.futures import ThreadPoolExecutor
+        super(ThreadedResolver, self).initialize(
+            io_loop=io_loop, executor=ThreadPoolExecutor(num_threads))
+
+class OverrideResolver(Resolver):
+    """Wraps a resolver with a mapping of overrides.
+
+    This can be used to make local DNS changes (e.g. for testing)
+    without modifying system-wide settings.
+
+    The mapping can contain either host strings or host-port pairs.
+    """
+    def initialize(self, resolver, mapping):
+        self.resolver = resolver
+        self.mapping = mapping
+
+    def getaddrinfo(self, host, port, *args, **kwargs):
+        if (host, port) in self.mapping:
+            host, port = self.mapping[(host, port)]
+        elif host in self.mapping:
+            host = self.mapping[host]
+        return self.resolver.getaddrinfo(host, port, *args, **kwargs)
+
+
+
+# These are the keyword arguments to ssl.wrap_socket that must be translated
+# to their SSLContext equivalents (the other arguments are still passed
+# to SSLContext.wrap_socket).
+_SSL_CONTEXT_KEYWORDS = frozenset(['ssl_version', 'certfile', 'keyfile',
+                                   'cert_reqs', 'ca_certs', 'ciphers'])
+
+def ssl_options_to_context(ssl_options):
+    """Try to Convert an ssl_options dictionary to an SSLContext object.
+
+    The ``ssl_options`` dictionary contains keywords to be passed to
+    `ssl.wrap_sockets`.  In Python 3.2+, `ssl.SSLContext` objects can
+    be used instead.  This function converts the dict form to its
+    `SSLContext` equivalent, and may be used when a component which
+    accepts both forms needs to upgrade to the `SSLContext` version
+    to use features like SNI or NPN.
+    """
+    if isinstance(ssl_options, dict):
+        assert all(k in _SSL_CONTEXT_KEYWORDS for k in ssl_options), ssl_options
+    if (not hasattr(ssl, 'SSLContext') or
+        isinstance(ssl_options, ssl.SSLContext)):
+        return ssl_options
+    context = ssl.SSLContext(
+        ssl_options.get('ssl_version', ssl.PROTOCOL_SSLv23))
+    if 'certfile' in ssl_options:
+        context.load_cert_chain(ssl_options['certfile'], ssl_options.get('keyfile', None))
+    if 'cert_reqs' in ssl_options:
+        context.verify_mode = ssl_options['cert_reqs']
+    if 'ca_certs' in ssl_options:
+        context.load_verify_locations(ssl_options['ca_certs'])
+    if 'ciphers' in ssl_options:
+        context.set_ciphers(ssl_options['ciphers'])
+    return context
+
+
+def ssl_wrap_socket(socket, ssl_options, server_hostname=None, **kwargs):
+    """Returns an `ssl.SSLSocket` wrapping the given socket.
+
+    ``ssl_options`` may be either a dictionary (as accepted by
+    `ssl_options_to_context) or an `ssl.SSLContext` object.
+    Additional keyword arguments are passed to `wrap_socket`
+    (either the `SSLContext` method or the `ssl` module function
+    as appropriate).
+    """
+    context = ssl_options_to_context(ssl_options)
+    if hasattr(ssl, 'SSLContext') and isinstance(context, ssl.SSLContext):
+        if server_hostname is not None and getattr(ssl, 'HAS_SNI'):
+            # Python doesn't have server-side SNI support so we can't
+            # really unittest this, but it can be manually tested with
+            # python3.2 -m tornado.httpclient https://sni.velox.ch
+            return context.wrap_socket(socket, server_hostname=server_hostname,
+                                       **kwargs)
+        else:
+            return context.wrap_socket(socket, **kwargs)
+    else:
+        return ssl.wrap_socket(socket, **dict(context, **kwargs))
+
+if hasattr(ssl, 'match_hostname'):  # python 3.2+
+    ssl_match_hostname = ssl.match_hostname
+    SSLCertificateError = ssl.CertificateError
+else:
+    # match_hostname was added to the standard library ssl module in python 3.2.
+    # The following code was backported for older releases and copied from
+    # https://bitbucket.org/brandon/backports.ssl_match_hostname
+    class SSLCertificateError(ValueError):
+        pass
+
+
+    def _dnsname_to_pat(dn):
+        pats = []
+        for frag in dn.split(r'.'):
+            if frag == '*':
+                # When '*' is a fragment by itself, it matches a non-empty dotless
+                # fragment.
+                pats.append('[^.]+')
+            else:
+                # Otherwise, '*' matches any dotless fragment.
+                frag = re.escape(frag)
+                pats.append(frag.replace(r'\*', '[^.]*'))
+        return re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
+
+
+    def ssl_match_hostname(cert, hostname):
+        """Verify that *cert* (in decoded format as returned by
+        SSLSocket.getpeercert()) matches the *hostname*.  RFC 2818 rules
+        are mostly followed, but IP addresses are not accepted for *hostname*.
+
+        CertificateError is raised on failure. On success, the function
+        returns nothing.
+        """
+        if not cert:
+            raise ValueError("empty or no certificate")
+        dnsnames = []
+        san = cert.get('subjectAltName', ())
+        for key, value in san:
+            if key == 'DNS':
+                if _dnsname_to_pat(value).match(hostname):
+                    return
+                dnsnames.append(value)
+        if not san:
+            # The subject is only checked when subjectAltName is empty
+            for sub in cert.get('subject', ()):
+                for key, value in sub:
+                    # XXX according to RFC 2818, the most specific Common Name
+                    # must be used.
+                    if key == 'commonName':
+                        if _dnsname_to_pat(value).match(hostname):
+                            return
+                        dnsnames.append(value)
+        if len(dnsnames) > 1:
+            raise SSLCertificateError("hostname %r "
+                                      "doesn't match either of %s"
+                                      % (hostname, ', '.join(map(repr, dnsnames))))
+        elif len(dnsnames) == 1:
+            raise SSLCertificateError("hostname %r "
+                                      "doesn't match %r"
+                                      % (hostname, dnsnames[0]))
+        else:
+            raise SSLCertificateError("no appropriate commonName or "
+                                      "subjectAltName fields were found")
diff --git a/libs/tornado/options.py b/libs/tornado/options.py
index ac1f07c19cbf6f094842e8b55334efe6d9888993..ee146fca06aebe177e9058b7466144204a86e384 100755
--- a/libs/tornado/options.py
+++ b/libs/tornado/options.py
@@ -57,7 +57,7 @@ simply call methods on it.  You may create additional `OptionParser`
 instances to define isolated sets of options, such as for subcommands.
 """
 
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
 import datetime
 import re
@@ -68,6 +68,7 @@ import textwrap
 from tornado.escape import _unicode
 from tornado.log import define_logging_options
 from tornado import stack_context
+from tornado.util import basestring_type
 
 
 class Error(Exception):
@@ -171,7 +172,7 @@ class OptionParser(object):
         if args is None:
             args = sys.argv
         remaining = []
-        for i in xrange(1, len(args)):
+        for i in range(1, len(args)):
             # All things after the last option are command line arguments
             if not args[i].startswith("-"):
                 remaining = args[i:]
@@ -218,15 +219,15 @@ class OptionParser(object):
         """Prints all the command line options to stderr (or another file)."""
         if file is None:
             file = sys.stderr
-        print >> file, "Usage: %s [OPTIONS]" % sys.argv[0]
-        print >> file, "\nOptions:\n"
+        print("Usage: %s [OPTIONS]" % sys.argv[0], file=file)
+        print("\nOptions:\n", file=file)
         by_group = {}
-        for option in self._options.itervalues():
+        for option in self._options.values():
             by_group.setdefault(option.group_name, []).append(option)
 
         for filename, o in sorted(by_group.items()):
             if filename:
-                print >> file, "\n%s options:\n" % os.path.normpath(filename)
+                print("\n%s options:\n" % os.path.normpath(filename), file=file)
             o.sort(key=lambda option: option.name)
             for option in o:
                 prefix = option.name
@@ -238,10 +239,10 @@ class OptionParser(object):
                 lines = textwrap.wrap(description, 79 - 35)
                 if len(prefix) > 30 or len(lines) == 0:
                     lines.insert(0, '')
-                print >> file, "  --%-30s %s" % (prefix, lines[0])
+                print("  --%-30s %s" % (prefix, lines[0]), file=file)
                 for line in lines[1:]:
-                    print >> file, "%-34s %s" % (' ', line)
-        print >> file
+                    print("%-34s %s" % (' ', line), file=file)
+        print(file=file)
 
     def _help_callback(self, value):
         if value:
@@ -272,6 +273,7 @@ class OptionParser(object):
         """
         return _Mockable(self)
 
+
 class _Mockable(object):
     """`mock.patch` compatible wrapper for `OptionParser`.
 
@@ -300,8 +302,9 @@ class _Mockable(object):
     def __delattr__(self, name):
         setattr(self._options, name, self._originals.pop(name))
 
+
 class _Option(object):
-    def __init__(self, name, default=None, type=basestring, help=None,
+    def __init__(self, name, default=None, type=basestring_type, help=None,
                  metavar=None, multiple=False, file_name=None, group_name=None,
                  callback=None):
         if default is None and multiple:
@@ -325,7 +328,7 @@ class _Option(object):
             datetime.datetime: self._parse_datetime,
             datetime.timedelta: self._parse_timedelta,
             bool: self._parse_bool,
-            basestring: self._parse_string,
+            basestring_type: self._parse_string,
         }.get(self.type, self.type)
         if self.multiple:
             self._value = []
@@ -467,6 +470,7 @@ def print_help(file=None):
     """
     return options.print_help(file)
 
+
 def add_parse_callback(callback):
     """Adds a parse callback, to be invoked when option parsing is done.
 
diff --git a/libs/tornado/platform/auto.py b/libs/tornado/platform/auto.py
index 7199cb5ce5fa0f709e069b9f266b06ff9660bc27..e55725b37b172c332acad1726bb00100d88dfbaf 100755
--- a/libs/tornado/platform/auto.py
+++ b/libs/tornado/platform/auto.py
@@ -23,7 +23,7 @@ Most code that needs access to this functionality should do e.g.::
     from tornado.platform.auto import set_close_exec
 """
 
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
 import os
 
diff --git a/libs/tornado/platform/common.py b/libs/tornado/platform/common.py
index 39f60bd7989bc244911ef5aaf133a69df5a8b12f..d9c4cf9fb1ad31300587d3e24030d9670ed150d3 100755
--- a/libs/tornado/platform/common.py
+++ b/libs/tornado/platform/common.py
@@ -1,11 +1,10 @@
 """Lowest-common-denominator implementations of platform functionality."""
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
 import errno
 import socket
 
 from tornado.platform import interface
-from tornado.util import b
 
 
 class Waker(interface.Waker):
@@ -43,9 +42,9 @@ class Waker(interface.Waker):
             try:
                 self.writer.connect(connect_address)
                 break    # success
-            except socket.error, detail:
+            except socket.error as detail:
                 if (not hasattr(errno, 'WSAEADDRINUSE') or
-                    detail[0] != errno.WSAEADDRINUSE):
+                        detail[0] != errno.WSAEADDRINUSE):
                     # "Address already in use" is the only error
                     # I've seen on two WinXP Pro SP2 boxes, under
                     # Pythons 2.3.5 and 2.4.1.
@@ -74,7 +73,7 @@ class Waker(interface.Waker):
 
     def wake(self):
         try:
-            self.writer.send(b("x"))
+            self.writer.send(b"x")
         except (IOError, socket.error):
             pass
 
diff --git a/libs/tornado/platform/epoll.py b/libs/tornado/platform/epoll.py
index fa5b68eda95e73a07b662e53b0582aeea32c2095..b08cc62810f0b993583734d392f8296200f49200 100755
--- a/libs/tornado/platform/epoll.py
+++ b/libs/tornado/platform/epoll.py
@@ -13,56 +13,14 @@
 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
 # License for the specific language governing permissions and limitations
 # under the License.
-"""EPoll-based IOLoop implementation for Linux systems.
+"""EPoll-based IOLoop implementation for Linux systems."""
+from __future__ import absolute_import, division, print_function, with_statement
 
-Supports the standard library's `select.epoll` function for Python 2.6+,
-and our own C module for Python 2.5.
-"""
-from __future__ import absolute_import, division, with_statement
-
-import os
 import select
 
 from tornado.ioloop import PollIOLoop
 
-if hasattr(select, 'epoll'):
-    # Python 2.6+
-    class EPollIOLoop(PollIOLoop):
-        def initialize(self, **kwargs):
-            super(EPollIOLoop, self).initialize(impl=select.epoll(), **kwargs)
-else:
-    # Python 2.5
-    from tornado import epoll
-    
-    class _EPoll(object):
-        """An epoll-based event loop using our C module for Python 2.5 systems"""
-        _EPOLL_CTL_ADD = 1
-        _EPOLL_CTL_DEL = 2
-        _EPOLL_CTL_MOD = 3
-
-        def __init__(self):
-            self._epoll_fd = epoll.epoll_create()
-
-        def fileno(self):
-            return self._epoll_fd
-
-        def close(self):
-            os.close(self._epoll_fd)
-
-        def register(self, fd, events):
-            epoll.epoll_ctl(self._epoll_fd, self._EPOLL_CTL_ADD, fd, events)
-
-        def modify(self, fd, events):
-            epoll.epoll_ctl(self._epoll_fd, self._EPOLL_CTL_MOD, fd, events)
-
-        def unregister(self, fd):
-            epoll.epoll_ctl(self._epoll_fd, self._EPOLL_CTL_DEL, fd, 0)
-
-        def poll(self, timeout):
-            return epoll.epoll_wait(self._epoll_fd, int(timeout * 1000))
-
-
-    class EPollIOLoop(PollIOLoop):
-        def initialize(self, **kwargs):
-            super(EPollIOLoop, self).initialize(impl=_EPoll(), **kwargs)
 
+class EPollIOLoop(PollIOLoop):
+    def initialize(self, **kwargs):
+        super(EPollIOLoop, self).initialize(impl=select.epoll(), **kwargs)
diff --git a/libs/tornado/platform/interface.py b/libs/tornado/platform/interface.py
index 5006f30b8fc0a5bfdffea00277ed081e8349075d..07da6babdbdcc929037401252ce2c613b77d5dd5 100755
--- a/libs/tornado/platform/interface.py
+++ b/libs/tornado/platform/interface.py
@@ -21,7 +21,7 @@ for other tornado.platform modules.  Most code should import the appropriate
 implementation from `tornado.platform.auto`.
 """
 
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
 
 def set_close_exec(fd):
diff --git a/libs/tornado/platform/kqueue.py b/libs/tornado/platform/kqueue.py
index 2f14c15c3742c6a68741aafa4b3c2826d73aa243..ceff0a43a31392dbb7255c24d3be9ccfcf6293ad 100755
--- a/libs/tornado/platform/kqueue.py
+++ b/libs/tornado/platform/kqueue.py
@@ -14,7 +14,7 @@
 # License for the specific language governing permissions and limitations
 # under the License.
 """KQueue-based IOLoop implementation for BSD/Mac systems."""
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
 import select
 
@@ -22,6 +22,7 @@ from tornado.ioloop import IOLoop, PollIOLoop
 
 assert hasattr(select, 'kqueue'), 'kqueue not supported'
 
+
 class _KQueue(object):
     """A kqueue-based event loop for BSD/Mac systems."""
     def __init__(self):
@@ -52,11 +53,11 @@ class _KQueue(object):
         kevents = []
         if events & IOLoop.WRITE:
             kevents.append(select.kevent(
-                    fd, filter=select.KQ_FILTER_WRITE, flags=flags))
+                fd, filter=select.KQ_FILTER_WRITE, flags=flags))
         if events & IOLoop.READ or not kevents:
             # Always read when there is not a write
             kevents.append(select.kevent(
-                    fd, filter=select.KQ_FILTER_READ, flags=flags))
+                fd, filter=select.KQ_FILTER_READ, flags=flags))
         # Even though control() takes a list, it seems to return EINVAL
         # on Mac OS X (10.6) when there is more than one event in the list.
         for kevent in kevents:
diff --git a/libs/tornado/platform/posix.py b/libs/tornado/platform/posix.py
index 487f97a9a7fc400e83dfd1fb793c59db4a597892..41a5794c63af29e397abbab4e5a2588007b2c3d5 100755
--- a/libs/tornado/platform/posix.py
+++ b/libs/tornado/platform/posix.py
@@ -16,13 +16,12 @@
 
 """Posix implementations of platform-specific functionality."""
 
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
 import fcntl
 import os
 
 from tornado.platform import interface
-from tornado.util import b
 
 
 def set_close_exec(fd):
@@ -53,7 +52,7 @@ class Waker(interface.Waker):
 
     def wake(self):
         try:
-            self.writer.write(b("x"))
+            self.writer.write(b"x")
         except IOError:
             pass
 
diff --git a/libs/tornado/platform/select.py b/libs/tornado/platform/select.py
index 51dc964e02365942e97e6756e50998c1e16ba157..8bbb1f4f995cf1c7a7de4f9aaea56f5612a0ea62 100755
--- a/libs/tornado/platform/select.py
+++ b/libs/tornado/platform/select.py
@@ -17,12 +17,13 @@
 
 Used as a fallback for systems that don't support epoll or kqueue.
 """
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
 import select
 
 from tornado.ioloop import IOLoop, PollIOLoop
 
+
 class _Select(object):
     """A simple, select()-based IOLoop implementation for non-Linux systems"""
     def __init__(self):
@@ -69,7 +70,7 @@ class _Select(object):
             events[fd] = events.get(fd, 0) | IOLoop.ERROR
         return events.items()
 
+
 class SelectIOLoop(PollIOLoop):
     def initialize(self, **kwargs):
         super(SelectIOLoop, self).initialize(impl=_Select(), **kwargs)
-
diff --git a/libs/tornado/platform/twisted.py b/libs/tornado/platform/twisted.py
index 1efc82b70aeb3d4f0ec500d74e5c46d433b18727..34e108d79866664b4f4520412266cc2d976ae121 100755
--- a/libs/tornado/platform/twisted.py
+++ b/libs/tornado/platform/twisted.py
@@ -64,11 +64,10 @@ reactor.  Recommended usage::
 This module has been tested with Twisted versions 11.0.0 and newer.
 """
 
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
 import functools
 import datetime
-import time
 
 from twisted.internet.posixbase import PosixReactorBase
 from twisted.internet.interfaces import \
@@ -85,6 +84,7 @@ from tornado.stack_context import NullContext, wrap
 from tornado.ioloop import IOLoop
 
 
+@implementer(IDelayedCall)
 class TornadoDelayedCall(object):
     """DelayedCall object for Tornado."""
     def __init__(self, reactor, seconds, f, *args, **kw):
@@ -125,10 +125,9 @@ class TornadoDelayedCall(object):
 
     def active(self):
         return self._active
-# Fake class decorator for python 2.5 compatibility
-TornadoDelayedCall = implementer(IDelayedCall)(TornadoDelayedCall)
 
 
+@implementer(IReactorTime, IReactorFDSet)
 class TornadoReactor(PosixReactorBase):
     """Twisted reactor built on the Tornado IOLoop.
 
@@ -235,7 +234,7 @@ class TornadoReactor(PosixReactorBase):
             with NullContext():
                 self._fds[fd] = (reader, None)
                 self._io_loop.add_handler(fd, self._invoke_callback,
-                                         IOLoop.READ)
+                                          IOLoop.READ)
 
     def addWriter(self, writer):
         """Add a FileDescriptor for notification of data available to write."""
@@ -254,7 +253,7 @@ class TornadoReactor(PosixReactorBase):
             with NullContext():
                 self._fds[fd] = (None, writer)
                 self._io_loop.add_handler(fd, self._invoke_callback,
-                                         IOLoop.WRITE)
+                                          IOLoop.WRITE)
 
     def removeReader(self, reader):
         """Remove a Selectable for notification of data available to read."""
@@ -316,7 +315,6 @@ class TornadoReactor(PosixReactorBase):
 
     def mainLoop(self):
         self._io_loop.start()
-TornadoReactor = implementer(IReactorTime, IReactorFDSet)(TornadoReactor)
 
 
 class _TestReactor(TornadoReactor):
@@ -352,6 +350,8 @@ def install(io_loop=None):
     installReactor(reactor)
     return reactor
 
+
+@implementer(IReadDescriptor, IWriteDescriptor)
 class _FD(object):
     def __init__(self, fd, handler):
         self.fd = fd
@@ -378,7 +378,7 @@ class _FD(object):
 
     def logPrefix(self):
         return ''
-_FD = implementer(IReadDescriptor, IWriteDescriptor)(_FD)
+
 
 class TwistedIOLoop(tornado.ioloop.IOLoop):
     """IOLoop implementation that runs on Twisted.
@@ -405,15 +405,15 @@ class TwistedIOLoop(tornado.ioloop.IOLoop):
         if fd in self.fds:
             raise ValueError('fd %d added twice' % fd)
         self.fds[fd] = _FD(fd, wrap(handler))
-        if events | tornado.ioloop.IOLoop.READ:
+        if events & tornado.ioloop.IOLoop.READ:
             self.fds[fd].reading = True
             self.reactor.addReader(self.fds[fd])
-        if events | tornado.ioloop.IOLoop.WRITE:
+        if events & tornado.ioloop.IOLoop.WRITE:
             self.fds[fd].writing = True
             self.reactor.addWriter(self.fds[fd])
 
     def update_handler(self, fd, events):
-        if events | tornado.ioloop.IOLoop.READ:
+        if events & tornado.ioloop.IOLoop.READ:
             if not self.fds[fd].reading:
                 self.fds[fd].reading = True
                 self.reactor.addReader(self.fds[fd])
@@ -421,7 +421,7 @@ class TwistedIOLoop(tornado.ioloop.IOLoop):
             if self.fds[fd].reading:
                 self.fds[fd].reading = False
                 self.reactor.removeReader(self.fds[fd])
-        if events | tornado.ioloop.IOLoop.WRITE:
+        if events & tornado.ioloop.IOLoop.WRITE:
             if not self.fds[fd].writing:
                 self.fds[fd].writing = True
                 self.reactor.addWriter(self.fds[fd])
diff --git a/libs/tornado/platform/windows.py b/libs/tornado/platform/windows.py
index 80c8a6e2886360e4f2660d98bbf6f29d1bf6cfbf..817bdca13e367ea3ad7ba767bea2b3f913720380 100755
--- a/libs/tornado/platform/windows.py
+++ b/libs/tornado/platform/windows.py
@@ -2,7 +2,7 @@
 # for production use.
 
 
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 import ctypes
 import ctypes.wintypes
 
diff --git a/libs/tornado/process.py b/libs/tornado/process.py
index fa0be555f0dbeeea574d3c049d0c3f041af8567f..0509eb3a9cb95db0f2515d38641ae4addf1704a0 100755
--- a/libs/tornado/process.py
+++ b/libs/tornado/process.py
@@ -16,10 +16,10 @@
 
 """Utilities for working with multiple processes."""
 
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
 import errno
-import functools
+import multiprocessing
 import os
 import signal
 import subprocess
@@ -34,18 +34,17 @@ from tornado.log import gen_log
 from tornado import stack_context
 
 try:
-    import multiprocessing  # Python 2.6+
-except ImportError:
-    multiprocessing = None
+    long  # py2
+except NameError:
+    long = int  # py3
 
 
 def cpu_count():
     """Returns the number of processors on this machine."""
-    if multiprocessing is not None:
-        try:
-            return multiprocessing.cpu_count()
-        except NotImplementedError:
-            pass
+    try:
+        return multiprocessing.cpu_count()
+    except NotImplementedError:
+        pass
     try:
         return os.sysconf("SC_NPROCESSORS_CONF")
     except ValueError:
@@ -125,7 +124,7 @@ def fork_processes(num_processes, max_restarts=100):
     while children:
         try:
             pid, status = os.wait()
-        except OSError, e:
+        except OSError as e:
             if e.errno == errno.EINTR:
                 continue
             raise
@@ -162,6 +161,7 @@ def task_id():
     global _task_id
     return _task_id
 
+
 class Subprocess(object):
     """Wraps ``subprocess.Popen`` with IOStream support.
 
@@ -195,7 +195,7 @@ class Subprocess(object):
             err_r, err_w = os.pipe()
             kwargs['stderr'] = err_w
             to_close.append(err_w)
-            self.stdout = PipeIOStream(err_r, io_loop=self.io_loop)
+            self.stderr = PipeIOStream(err_r, io_loop=self.io_loop)
         self.proc = subprocess.Popen(*args, **kwargs)
         for fd in to_close:
             os.close(fd)
@@ -253,14 +253,14 @@ class Subprocess(object):
 
     @classmethod
     def _cleanup(cls):
-        for pid in cls._waiting.keys():
+        for pid in list(cls._waiting.keys()):  # make a copy
             cls._try_cleanup_process(pid)
 
     @classmethod
     def _try_cleanup_process(cls, pid):
         try:
             ret_pid, status = os.waitpid(pid, os.WNOHANG)
-        except OSError, e:
+        except OSError as e:
             if e.args[0] == errno.ECHILD:
                 return
         if ret_pid == 0:
diff --git a/libs/tornado/simple_httpclient.py b/libs/tornado/simple_httpclient.py
index 7000d987e73da5b96b9c484bbaed483586dde2f8..f33ed242efd37ca4f81662a223c9801b48f1e7aa 100755
--- a/libs/tornado/simple_httpclient.py
+++ b/libs/tornado/simple_httpclient.py
@@ -1,14 +1,14 @@
 #!/usr/bin/env python
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
 from tornado.escape import utf8, _unicode, native_str
 from tornado.httpclient import HTTPRequest, HTTPResponse, HTTPError, AsyncHTTPClient, main, _RequestProxy
 from tornado.httputil import HTTPHeaders
 from tornado.iostream import IOStream, SSLIOStream
-from tornado.netutil import Resolver
+from tornado.netutil import Resolver, OverrideResolver
 from tornado.log import gen_log
 from tornado import stack_context
-from tornado.util import b, GzipDecompressor
+from tornado.util import GzipDecompressor
 
 import base64
 import collections
@@ -17,9 +17,8 @@ import functools
 import os.path
 import re
 import socket
+import ssl
 import sys
-import time
-import urlparse
 
 try:
     from io import BytesIO  # python 3
@@ -27,9 +26,9 @@ except ImportError:
     from cStringIO import StringIO as BytesIO  # python 2
 
 try:
-    import ssl  # python 2.6+
+    import urlparse  # py2
 except ImportError:
-    ssl = None
+    import urllib.parse as urlparse  # py3
 
 _DEFAULT_CA_CERTS = os.path.dirname(__file__) + '/ca-certificates.crt'
 
@@ -45,12 +44,8 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient):
     supported.  In particular, proxies are not supported, connections
     are not reused, and callers cannot select the network interface to be
     used.
-
-    Python 2.6 or higher is required for HTTPS support.  Users of Python 2.5
-    should use the curl-based AsyncHTTPClient if HTTPS support is required.
-
     """
-    def initialize(self, io_loop=None, max_clients=10,
+    def initialize(self, io_loop, max_clients=10,
                    hostname_mapping=None, max_buffer_size=104857600,
                    resolver=None, defaults=None):
         """Creates a AsyncHTTPClient.
@@ -72,32 +67,24 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient):
         max_buffer_size is the number of bytes that can be read by IOStream. It
         defaults to 100mb.
         """
-        self.io_loop = io_loop
+        super(SimpleAsyncHTTPClient, self).initialize(io_loop,
+                                                      defaults=defaults)
         self.max_clients = max_clients
         self.queue = collections.deque()
         self.active = {}
-        self.hostname_mapping = hostname_mapping
         self.max_buffer_size = max_buffer_size
         self.resolver = resolver or Resolver(io_loop=io_loop)
-        self.defaults = dict(HTTPRequest._DEFAULTS)
-        if defaults is not None:
-            self.defaults.update(defaults)
-
-    def fetch(self, request, callback, **kwargs):
-        if not isinstance(request, HTTPRequest):
-            request = HTTPRequest(url=request, **kwargs)
-        # We're going to modify this (to add Host, Accept-Encoding, etc),
-        # so make sure we don't modify the caller's object.  This is also
-        # where normal dicts get converted to HTTPHeaders objects.
-        request.headers = HTTPHeaders(request.headers)
-        request = _RequestProxy(request, self.defaults)
-        callback = stack_context.wrap(callback)
+        if hostname_mapping is not None:
+            self.resolver = OverrideResolver(resolver=self.resolver,
+                                             mapping=hostname_mapping)
+
+    def fetch_impl(self, request, callback):
         self.queue.append((request, callback))
         self._process_queue()
         if self.queue:
             gen_log.debug("max_clients limit reached, request queued. "
                           "%d active, %d queued requests." % (
-                    len(self.active), len(self.queue)))
+                              len(self.active), len(self.queue)))
 
     def _process_queue(self):
         with stack_context.NullContext():
@@ -108,7 +95,7 @@ class SimpleAsyncHTTPClient(AsyncHTTPClient):
                 _HTTPConnection(self.io_loop, self, request,
                                 functools.partial(self._release_fetch, key),
                                 callback,
-                                self.max_buffer_size)
+                                self.max_buffer_size, self.resolver)
 
     def _release_fetch(self, key):
         del self.active[key]
@@ -119,7 +106,7 @@ class _HTTPConnection(object):
     _SUPPORTED_METHODS = set(["GET", "HEAD", "POST", "PUT", "DELETE", "PATCH", "OPTIONS"])
 
     def __init__(self, io_loop, client, request, release_callback,
-                 final_callback, max_buffer_size):
+                 final_callback, max_buffer_size, resolver):
         self.start_time = io_loop.time()
         self.io_loop = io_loop
         self.client = client
@@ -127,6 +114,7 @@ class _HTTPConnection(object):
         self.release_callback = release_callback
         self.final_callback = final_callback
         self.max_buffer_size = max_buffer_size
+        self.resolver = resolver
         self.code = None
         self.headers = None
         self.chunks = None
@@ -135,9 +123,6 @@ class _HTTPConnection(object):
         self._timeout = None
         with stack_context.ExceptionStackContext(self._handle_exception):
             self.parsed = urlparse.urlsplit(_unicode(self.request.url))
-            if ssl is None and self.parsed.scheme == "https":
-                raise ValueError("HTTPS requires either python2.6+ or "
-                                 "curl_httpclient")
             if self.parsed.scheme not in ("http", "https"):
                 raise ValueError("Unsupported url scheme: %s" %
                                  self.request.url)
@@ -157,8 +142,6 @@ class _HTTPConnection(object):
                 # raw ipv6 addresses in urls are enclosed in brackets
                 host = host[1:-1]
             self.parsed_hostname = host  # save final host for _on_connect
-            if self.client.hostname_mapping is not None:
-                host = self.client.hostname_mapping.get(host, host)
 
             if request.allow_ipv6:
                 af = socket.AF_UNSPEC
@@ -167,7 +150,7 @@ class _HTTPConnection(object):
                 # so restrict to ipv4 by default.
                 af = socket.AF_INET
 
-            self.client.resolver.getaddrinfo(
+            self.resolver.getaddrinfo(
                 host, port, af, socket.SOCK_STREAM, 0, 0,
                 callback=self._on_resolve)
 
@@ -220,31 +203,29 @@ class _HTTPConnection(object):
                 self.start_time + timeout,
                 stack_context.wrap(self._on_timeout))
         self.stream.set_close_callback(self._on_close)
-        self.stream.connect(sockaddr, self._on_connect)
+        # ipv6 addresses are broken (in self.parsed.hostname) until
+        # 2.7, here is correctly parsed value calculated in __init__
+        self.stream.connect(sockaddr, self._on_connect,
+                            server_hostname=self.parsed_hostname)
 
     def _on_timeout(self):
         self._timeout = None
         if self.final_callback is not None:
             raise HTTPError(599, "Timeout")
 
-    def _on_connect(self):
+    def _remove_timeout(self):
         if self._timeout is not None:
             self.io_loop.remove_timeout(self._timeout)
             self._timeout = None
+
+    def _on_connect(self):
+        self._remove_timeout()
         if self.request.request_timeout:
             self._timeout = self.io_loop.add_timeout(
                 self.start_time + self.request.request_timeout,
                 stack_context.wrap(self._on_timeout))
-        if (self.request.validate_cert and
-            isinstance(self.stream, SSLIOStream)):
-            match_hostname(self.stream.socket.getpeercert(),
-                           # ipv6 addresses are broken (in
-                           # self.parsed.hostname) until 2.7, here is
-                           # correctly parsed value calculated in
-                           # __init__
-                           self.parsed_hostname)
         if (self.request.method not in self._SUPPORTED_METHODS and
-            not self.request.allow_nonstandard_methods):
+                not self.request.allow_nonstandard_methods):
             raise KeyError("unknown method %s" % self.request.method)
         for key in ('network_interface',
                     'proxy_host', 'proxy_port',
@@ -265,8 +246,8 @@ class _HTTPConnection(object):
             username = self.request.auth_username
             password = self.request.auth_password or ''
         if username is not None:
-            auth = utf8(username) + b(":") + utf8(password)
-            self.request.headers["Authorization"] = (b("Basic ") +
+            auth = utf8(username) + b":" + utf8(password)
+            self.request.headers["Authorization"] = (b"Basic " +
                                                      base64.b64encode(auth))
         if self.request.user_agent:
             self.request.headers["User-Agent"] = self.request.user_agent
@@ -277,25 +258,25 @@ class _HTTPConnection(object):
                 assert self.request.body is None
         if self.request.body is not None:
             self.request.headers["Content-Length"] = str(len(
-                    self.request.body))
+                self.request.body))
         if (self.request.method == "POST" and
-            "Content-Type" not in self.request.headers):
+                "Content-Type" not in self.request.headers):
             self.request.headers["Content-Type"] = "application/x-www-form-urlencoded"
         if self.request.use_gzip:
             self.request.headers["Accept-Encoding"] = "gzip"
         req_path = ((self.parsed.path or '/') +
-                (('?' + self.parsed.query) if self.parsed.query else ''))
+                   (('?' + self.parsed.query) if self.parsed.query else ''))
         request_lines = [utf8("%s %s HTTP/1.1" % (self.request.method,
                                                   req_path))]
         for k, v in self.request.headers.get_all():
-            line = utf8(k) + b(": ") + utf8(v)
-            if b('\n') in line:
+            line = utf8(k) + b": " + utf8(v)
+            if b'\n' in line:
                 raise ValueError('Newline in header: ' + repr(line))
             request_lines.append(line)
-        self.stream.write(b("\r\n").join(request_lines) + b("\r\n\r\n"))
+        self.stream.write(b"\r\n".join(request_lines) + b"\r\n\r\n")
         if self.request.body is not None:
             self.stream.write(self.request.body)
-        self.stream.read_until_regex(b("\r?\n\r?\n"), self._on_headers)
+        self.stream.read_until_regex(b"\r?\n\r?\n", self._on_headers)
 
     def _release(self):
         if self.release_callback is not None:
@@ -312,10 +293,11 @@ class _HTTPConnection(object):
 
     def _handle_exception(self, typ, value, tb):
         if self.final_callback:
+            self._remove_timeout()
             gen_log.warning("uncaught exception", exc_info=(typ, value, tb))
             self._run_callback(HTTPResponse(self.request, 599, error=value,
-                                request_time=self.io_loop.time() - self.start_time,
-                                ))
+                                            request_time=self.io_loop.time() - self.start_time,
+                                            ))
 
             if hasattr(self, "stream"):
                 self.stream.close()
@@ -334,19 +316,22 @@ class _HTTPConnection(object):
                 message = str(self.stream.error)
             raise HTTPError(599, message)
 
+    def _handle_1xx(self, code):
+        self.stream.read_until_regex(b"\r?\n\r?\n", self._on_headers)
+
     def _on_headers(self, data):
         data = native_str(data.decode("latin1"))
         first_line, _, header_data = data.partition("\n")
         match = re.match("HTTP/1.[01] ([0-9]+) ([^\r]*)", first_line)
         assert match
         code = int(match.group(1))
+        self.headers = HTTPHeaders.parse(header_data)
         if 100 <= code < 200:
-            self.stream.read_until_regex(b("\r?\n\r?\n"), self._on_headers)
+            self._handle_1xx(code)
             return
         else:
             self.code = code
             self.reason = match.group(2)
-        self.headers = HTTPHeaders.parse(header_data)
 
         if "Content-Length" in self.headers:
             if "," in self.headers["Content-Length"]:
@@ -372,38 +357,36 @@ class _HTTPConnection(object):
         if self.request.method == "HEAD" or self.code == 304:
             # HEAD requests and 304 responses never have content, even
             # though they may have content-length headers
-            self._on_body(b(""))
+            self._on_body(b"")
             return
         if 100 <= self.code < 200 or self.code == 204:
             # These response codes never have bodies
             # http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.3
             if ("Transfer-Encoding" in self.headers or
-                content_length not in (None, 0)):
+                    content_length not in (None, 0)):
                 raise ValueError("Response with code %d should not have body" %
                                  self.code)
-            self._on_body(b(""))
+            self._on_body(b"")
             return
 
         if (self.request.use_gzip and
-            self.headers.get("Content-Encoding") == "gzip"):
+                self.headers.get("Content-Encoding") == "gzip"):
             self._decompressor = GzipDecompressor()
         if self.headers.get("Transfer-Encoding") == "chunked":
             self.chunks = []
-            self.stream.read_until(b("\r\n"), self._on_chunk_length)
+            self.stream.read_until(b"\r\n", self._on_chunk_length)
         elif content_length is not None:
             self.stream.read_bytes(content_length, self._on_body)
         else:
             self.stream.read_until_close(self._on_body)
 
     def _on_body(self, data):
-        if self._timeout is not None:
-            self.io_loop.remove_timeout(self._timeout)
-            self._timeout = None
+        self._remove_timeout()
         original_request = getattr(self.request, "original_request",
                                    self.request)
         if (self.request.follow_redirects and
             self.request.max_redirects > 0 and
-            self.code in (301, 302, 303, 307)):
+                self.code in (301, 302, 303, 307)):
             assert isinstance(self.request, _RequestProxy)
             new_request = copy.copy(self.request.request)
             new_request.url = urlparse.urljoin(self.request.url,
@@ -472,13 +455,13 @@ class _HTTPConnection(object):
                 # all the data has been decompressed, so we don't need to
                 # decompress again in _on_body
                 self._decompressor = None
-            self._on_body(b('').join(self.chunks))
+            self._on_body(b''.join(self.chunks))
         else:
             self.stream.read_bytes(length + 2,  # chunk ends with \r\n
-                              self._on_chunk_data)
+                                   self._on_chunk_data)
 
     def _on_chunk_data(self, data):
-        assert data[-2:] == b("\r\n")
+        assert data[-2:] == b"\r\n"
         chunk = data[:-2]
         if self._decompressor:
             chunk = self._decompressor.decompress(chunk)
@@ -486,68 +469,8 @@ class _HTTPConnection(object):
             self.request.streaming_callback(chunk)
         else:
             self.chunks.append(chunk)
-        self.stream.read_until(b("\r\n"), self._on_chunk_length)
-
-
-# match_hostname was added to the standard library ssl module in python 3.2.
-# The following code was backported for older releases and copied from
-# https://bitbucket.org/brandon/backports.ssl_match_hostname
-class CertificateError(ValueError):
-    pass
-
+        self.stream.read_until(b"\r\n", self._on_chunk_length)
 
-def _dnsname_to_pat(dn):
-    pats = []
-    for frag in dn.split(r'.'):
-        if frag == '*':
-            # When '*' is a fragment by itself, it matches a non-empty dotless
-            # fragment.
-            pats.append('[^.]+')
-        else:
-            # Otherwise, '*' matches any dotless fragment.
-            frag = re.escape(frag)
-            pats.append(frag.replace(r'\*', '[^.]*'))
-    return re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
-
-
-def match_hostname(cert, hostname):
-    """Verify that *cert* (in decoded format as returned by
-    SSLSocket.getpeercert()) matches the *hostname*.  RFC 2818 rules
-    are mostly followed, but IP addresses are not accepted for *hostname*.
-
-    CertificateError is raised on failure. On success, the function
-    returns nothing.
-    """
-    if not cert:
-        raise ValueError("empty or no certificate")
-    dnsnames = []
-    san = cert.get('subjectAltName', ())
-    for key, value in san:
-        if key == 'DNS':
-            if _dnsname_to_pat(value).match(hostname):
-                return
-            dnsnames.append(value)
-    if not san:
-        # The subject is only checked when subjectAltName is empty
-        for sub in cert.get('subject', ()):
-            for key, value in sub:
-                # XXX according to RFC 2818, the most specific Common Name
-                # must be used.
-                if key == 'commonName':
-                    if _dnsname_to_pat(value).match(hostname):
-                        return
-                    dnsnames.append(value)
-    if len(dnsnames) > 1:
-        raise CertificateError("hostname %r "
-            "doesn't match either of %s"
-            % (hostname, ', '.join(map(repr, dnsnames))))
-    elif len(dnsnames) == 1:
-        raise CertificateError("hostname %r "
-            "doesn't match %r"
-            % (hostname, dnsnames[0]))
-    else:
-        raise CertificateError("no appropriate commonName or "
-            "subjectAltName fields were found")
 
 if __name__ == "__main__":
     AsyncHTTPClient.configure(SimpleAsyncHTTPClient)
diff --git a/libs/tornado/stack_context.py b/libs/tornado/stack_context.py
index d4aec3c5eeb30f43ced1c2dce7a5a958139a12ec..c30a2598f3cf68b213262dc0f99882be5461042c 100755
--- a/libs/tornado/stack_context.py
+++ b/libs/tornado/stack_context.py
@@ -14,7 +14,7 @@
 # License for the specific language governing permissions and limitations
 # under the License.
 
-'''StackContext allows applications to maintain threadlocal-like state
+"""StackContext allows applications to maintain threadlocal-like state
 that follows execution as it moves to other execution contexts.
 
 The motivating examples are to eliminate the need for explicit
@@ -64,9 +64,9 @@ Here are a few rules of thumb for when it's necessary:
   persist across asynchronous calls, create a new `StackContext` (or
   `ExceptionStackContext`), and make your asynchronous calls in a ``with``
   block that references your `StackContext`.
-'''
+"""
 
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
 import contextlib
 import functools
@@ -77,6 +77,10 @@ import threading
 from tornado.util import raise_exc_info
 
 
+class StackContextInconsistentError(Exception):
+    pass
+
+
 class _State(threading.local):
     def __init__(self):
         self.contexts = ()
@@ -84,7 +88,7 @@ _state = _State()
 
 
 class StackContext(object):
-    '''Establishes the given context as a StackContext that will be transferred.
+    """Establishes the given context as a StackContext that will be transferred.
 
     Note that the parameter is a callable that returns a context
     manager, not the context itself.  That is, where for a
@@ -102,7 +106,7 @@ class StackContext(object):
     deactivating a context does not affect any instances of that
     context that are currently pending).  This is an advanced feature
     and not necessary in most applications.
-    '''
+    """
     def __init__(self, context_factory, _active_cell=None):
         self.context_factory = context_factory
         self.active_cell = _active_cell or [True]
@@ -113,8 +117,10 @@ class StackContext(object):
     def __enter__(self):
         self.old_contexts = _state.contexts
         # _state.contexts is a tuple of (class, arg, active_cell) tuples
-        _state.contexts = (self.old_contexts +
-                           ((StackContext, self.context_factory, self.active_cell),))
+        self.new_contexts = (self.old_contexts +
+                             ((StackContext, self.context_factory,
+                               self.active_cell),))
+        _state.contexts = self.new_contexts
         try:
             self.context = self.context_factory()
             self.context.__enter__()
@@ -127,11 +133,23 @@ class StackContext(object):
         try:
             return self.context.__exit__(type, value, traceback)
         finally:
+            final_contexts = _state.contexts
             _state.contexts = self.old_contexts
+            # Generator coroutines and with-statements with non-local
+            # effects interact badly.  Check here for signs of
+            # the stack getting out of sync.
+            # Note that this check comes after restoring _state.context
+            # so that if it fails things are left in a (relatively)
+            # consistent state.
+            if final_contexts is not self.new_contexts:
+                raise StackContextInconsistentError(
+                    'stack_context inconsistency (may be caused by yield '
+                    'within a "with StackContext" block)')
+            self.old_contexts = self.new_contexts = None
 
 
 class ExceptionStackContext(object):
-    '''Specialization of StackContext for exception handling.
+    """Specialization of StackContext for exception handling.
 
     The supplied exception_handler function will be called in the
     event of an uncaught exception in this context.  The semantics are
@@ -142,16 +160,17 @@ class ExceptionStackContext(object):
 
     If the exception handler returns true, the exception will be
     consumed and will not be propagated to other exception handlers.
-    '''
+    """
     def __init__(self, exception_handler, _active_cell=None):
         self.exception_handler = exception_handler
         self.active_cell = _active_cell or [True]
 
     def __enter__(self):
         self.old_contexts = _state.contexts
-        _state.contexts = (self.old_contexts +
-                           ((ExceptionStackContext, self.exception_handler,
-                             self.active_cell),))
+        self.new_contexts = (self.old_contexts +
+                             ((ExceptionStackContext, self.exception_handler,
+                               self.active_cell),))
+        _state.contexts = self.new_contexts
         return lambda: operator.setitem(self.active_cell, 0, False)
 
     def __exit__(self, type, value, traceback):
@@ -159,17 +178,22 @@ class ExceptionStackContext(object):
             if type is not None:
                 return self.exception_handler(type, value, traceback)
         finally:
+            final_contexts = _state.contexts
             _state.contexts = self.old_contexts
-            self.old_contexts = None
+            if final_contexts is not self.new_contexts:
+                raise StackContextInconsistentError(
+                    'stack_context inconsistency (may be caused by yield '
+                    'within a "with StackContext" block)')
+            self.old_contexts = self.new_contexts = None
 
 
 class NullContext(object):
-    '''Resets the StackContext.
+    """Resets the StackContext.
 
     Useful when creating a shared resource on demand (e.g. an AsyncHTTPClient)
     where the stack that caused the creating is not relevant to future
     operations.
-    '''
+    """
     def __enter__(self):
         self.old_contexts = _state.contexts
         _state.contexts = ()
@@ -183,13 +207,13 @@ class _StackContextWrapper(functools.partial):
 
 
 def wrap(fn):
-    '''Returns a callable object that will restore the current StackContext
+    """Returns a callable object that will restore the current StackContext
     when executed.
 
     Use this whenever saving a callback to be executed later in a
     different execution context (either in a different thread or
     asynchronously in the same thread).
-    '''
+    """
     if fn is None or fn.__class__ is _StackContextWrapper:
         return fn
     # functools.wraps doesn't appear to work on functools.partial objects
diff --git a/libs/tornado/tcpserver.py b/libs/tornado/tcpserver.py
new file mode 100755
index 0000000000000000000000000000000000000000..52ed70b1d733dcfa1db79c64b3a673aab9aefbe8
--- /dev/null
+++ b/libs/tornado/tcpserver.py
@@ -0,0 +1,230 @@
+#!/usr/bin/env python
+#
+# Copyright 2011 Facebook
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""A non-blocking, single-threaded TCP server."""
+from __future__ import absolute_import, division, print_function, with_statement
+
+import errno
+import os
+import socket
+import ssl
+
+from tornado.log import app_log
+from tornado.ioloop import IOLoop
+from tornado.iostream import IOStream, SSLIOStream
+from tornado.netutil import bind_sockets, add_accept_handler, ssl_wrap_socket
+from tornado import process
+
+class TCPServer(object):
+    r"""A non-blocking, single-threaded TCP server.
+
+    To use `TCPServer`, define a subclass which overrides the `handle_stream`
+    method.
+
+    `TCPServer` can serve SSL traffic with Python 2.6+ and OpenSSL.
+    To make this server serve SSL traffic, send the ssl_options dictionary
+    argument with the arguments required for the `ssl.wrap_socket` method,
+    including "certfile" and "keyfile"::
+
+       TCPServer(ssl_options={
+           "certfile": os.path.join(data_dir, "mydomain.crt"),
+           "keyfile": os.path.join(data_dir, "mydomain.key"),
+       })
+
+    `TCPServer` initialization follows one of three patterns:
+
+    1. `listen`: simple single-process::
+
+            server = TCPServer()
+            server.listen(8888)
+            IOLoop.instance().start()
+
+    2. `bind`/`start`: simple multi-process::
+
+            server = TCPServer()
+            server.bind(8888)
+            server.start(0)  # Forks multiple sub-processes
+            IOLoop.instance().start()
+
+       When using this interface, an `IOLoop` must *not* be passed
+       to the `TCPServer` constructor.  `start` will always start
+       the server on the default singleton `IOLoop`.
+
+    3. `add_sockets`: advanced multi-process::
+
+            sockets = bind_sockets(8888)
+            tornado.process.fork_processes(0)
+            server = TCPServer()
+            server.add_sockets(sockets)
+            IOLoop.instance().start()
+
+       The `add_sockets` interface is more complicated, but it can be
+       used with `tornado.process.fork_processes` to give you more
+       flexibility in when the fork happens.  `add_sockets` can
+       also be used in single-process servers if you want to create
+       your listening sockets in some way other than
+       `bind_sockets`.
+    """
+    def __init__(self, io_loop=None, ssl_options=None):
+        self.io_loop = io_loop
+        self.ssl_options = ssl_options
+        self._sockets = {}  # fd -> socket object
+        self._pending_sockets = []
+        self._started = False
+
+        # Verify the SSL options. Otherwise we don't get errors until clients
+        # connect. This doesn't verify that the keys are legitimate, but
+        # the SSL module doesn't do that until there is a connected socket
+        # which seems like too much work
+        if self.ssl_options is not None and isinstance(self.ssl_options, dict):
+            # Only certfile is required: it can contain both keys
+            if 'certfile' not in self.ssl_options:
+                raise KeyError('missing key "certfile" in ssl_options')
+
+            if not os.path.exists(self.ssl_options['certfile']):
+                raise ValueError('certfile "%s" does not exist' %
+                                 self.ssl_options['certfile'])
+            if ('keyfile' in self.ssl_options and
+                    not os.path.exists(self.ssl_options['keyfile'])):
+                raise ValueError('keyfile "%s" does not exist' %
+                                 self.ssl_options['keyfile'])
+
+    def listen(self, port, address=""):
+        """Starts accepting connections on the given port.
+
+        This method may be called more than once to listen on multiple ports.
+        `listen` takes effect immediately; it is not necessary to call
+        `TCPServer.start` afterwards.  It is, however, necessary to start
+        the `IOLoop`.
+        """
+        sockets = bind_sockets(port, address=address)
+        self.add_sockets(sockets)
+
+    def add_sockets(self, sockets):
+        """Makes this server start accepting connections on the given sockets.
+
+        The ``sockets`` parameter is a list of socket objects such as
+        those returned by `bind_sockets`.
+        `add_sockets` is typically used in combination with that
+        method and `tornado.process.fork_processes` to provide greater
+        control over the initialization of a multi-process server.
+        """
+        if self.io_loop is None:
+            self.io_loop = IOLoop.instance()
+
+        for sock in sockets:
+            self._sockets[sock.fileno()] = sock
+            add_accept_handler(sock, self._handle_connection,
+                               io_loop=self.io_loop)
+
+    def add_socket(self, socket):
+        """Singular version of `add_sockets`.  Takes a single socket object."""
+        self.add_sockets([socket])
+
+    def bind(self, port, address=None, family=socket.AF_UNSPEC, backlog=128):
+        """Binds this server to the given port on the given address.
+
+        To start the server, call `start`. If you want to run this server
+        in a single process, you can call `listen` as a shortcut to the
+        sequence of `bind` and `start` calls.
+
+        Address may be either an IP address or hostname.  If it's a hostname,
+        the server will listen on all IP addresses associated with the
+        name.  Address may be an empty string or None to listen on all
+        available interfaces.  Family may be set to either ``socket.AF_INET``
+        or ``socket.AF_INET6`` to restrict to ipv4 or ipv6 addresses, otherwise
+        both will be used if available.
+
+        The ``backlog`` argument has the same meaning as for
+        `socket.listen`.
+
+        This method may be called multiple times prior to `start` to listen
+        on multiple ports or interfaces.
+        """
+        sockets = bind_sockets(port, address=address, family=family,
+                               backlog=backlog)
+        if self._started:
+            self.add_sockets(sockets)
+        else:
+            self._pending_sockets.extend(sockets)
+
+    def start(self, num_processes=1):
+        """Starts this server in the IOLoop.
+
+        By default, we run the server in this process and do not fork any
+        additional child process.
+
+        If num_processes is ``None`` or <= 0, we detect the number of cores
+        available on this machine and fork that number of child
+        processes. If num_processes is given and > 1, we fork that
+        specific number of sub-processes.
+
+        Since we use processes and not threads, there is no shared memory
+        between any server code.
+
+        Note that multiple processes are not compatible with the autoreload
+        module (or the ``debug=True`` option to `tornado.web.Application`).
+        When using multiple processes, no IOLoops can be created or
+        referenced until after the call to ``TCPServer.start(n)``.
+        """
+        assert not self._started
+        self._started = True
+        if num_processes != 1:
+            process.fork_processes(num_processes)
+        sockets = self._pending_sockets
+        self._pending_sockets = []
+        self.add_sockets(sockets)
+
+    def stop(self):
+        """Stops listening for new connections.
+
+        Requests currently in progress may still continue after the
+        server is stopped.
+        """
+        for fd, sock in self._sockets.items():
+            self.io_loop.remove_handler(fd)
+            sock.close()
+
+    def handle_stream(self, stream, address):
+        """Override to handle a new `IOStream` from an incoming connection."""
+        raise NotImplementedError()
+
+    def _handle_connection(self, connection, address):
+        if self.ssl_options is not None:
+            assert ssl, "Python 2.6+ and OpenSSL required for SSL"
+            try:
+                connection = ssl_wrap_socket(connection,
+                                             self.ssl_options,
+                                             server_side=True,
+                                             do_handshake_on_connect=False)
+            except ssl.SSLError as err:
+                if err.args[0] == ssl.SSL_ERROR_EOF:
+                    return connection.close()
+                else:
+                    raise
+            except socket.error as err:
+                if err.args[0] == errno.ECONNABORTED:
+                    return connection.close()
+                else:
+                    raise
+        try:
+            if self.ssl_options is not None:
+                stream = SSLIOStream(connection, io_loop=self.io_loop)
+            else:
+                stream = IOStream(connection, io_loop=self.io_loop)
+            self.handle_stream(stream, address)
+        except Exception:
+            app_log.error("Error in connection callback", exc_info=True)
diff --git a/libs/tornado/template.py b/libs/tornado/template.py
index 0cd8124e8e5c44a7fca487df99b7c1100f92ef15..4f61de3f8fe5783b5e6e7da7ca58df3d832cfea0 100755
--- a/libs/tornado/template.py
+++ b/libs/tornado/template.py
@@ -179,9 +179,8 @@ with ``{# ... #}``.
     ``{% continue %}`` may be used inside the loop.
 """
 
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
-import cStringIO
 import datetime
 import linecache
 import os.path
@@ -191,7 +190,12 @@ import threading
 
 from tornado import escape
 from tornado.log import app_log
-from tornado.util import bytes_type, ObjectDict
+from tornado.util import bytes_type, ObjectDict, exec_in, unicode_type
+
+try:
+    from cStringIO import StringIO  # py2
+except ImportError:
+    from io import StringIO  # py3
 
 _DEFAULT_AUTOESCAPE = "xhtml_escape"
 _UNSET = object()
@@ -226,10 +230,12 @@ class Template(object):
         try:
             # Under python2.5, the fake filename used here must match
             # the module name used in __name__ below.
+            # The dont_inherit flag prevents template.py's future imports
+            # from being applied to the generated code.
             self.compiled = compile(
                 escape.to_unicode(self.code),
                 "%s.generated.py" % self.name.replace('.', '_'),
-                "exec")
+                "exec", dont_inherit=True)
         except Exception:
             formatted_code = _format_code(self.code).rstrip()
             app_log.error("%s code:\n%s", self.name, formatted_code)
@@ -246,7 +252,7 @@ class Template(object):
             "linkify": escape.linkify,
             "datetime": datetime,
             "_utf8": escape.utf8,  # for internal use
-            "_string_types": (unicode, bytes_type),
+            "_string_types": (unicode_type, bytes_type),
             # __name__ and __loader__ allow the traceback mechanism to find
             # the generated source code.
             "__name__": self.name.replace('.', '_'),
@@ -254,7 +260,7 @@ class Template(object):
         }
         namespace.update(self.namespace)
         namespace.update(kwargs)
-        exec self.compiled in namespace
+        exec_in(self.compiled, namespace)
         execute = namespace["_execute"]
         # Clear the traceback module's cache of source data now that
         # we've generated a new template (mainly for this module's
@@ -263,7 +269,7 @@ class Template(object):
         return execute()
 
     def _generate_python(self, loader, compress_whitespace):
-        buffer = cStringIO.StringIO()
+        buffer = StringIO()
         try:
             # named_blocks maps from names to _NamedBlock objects
             named_blocks = {}
@@ -346,8 +352,8 @@ class Loader(BaseLoader):
 
     def resolve_path(self, name, parent_path=None):
         if parent_path and not parent_path.startswith("<") and \
-           not parent_path.startswith("/") and \
-           not name.startswith("/"):
+            not parent_path.startswith("/") and \
+                not name.startswith("/"):
             current_path = os.path.join(self.root, parent_path)
             file_dir = os.path.dirname(os.path.abspath(current_path))
             relative_path = os.path.abspath(os.path.join(file_dir, name))
@@ -371,8 +377,8 @@ class DictLoader(BaseLoader):
 
     def resolve_path(self, name, parent_path=None):
         if parent_path and not parent_path.startswith("<") and \
-           not parent_path.startswith("/") and \
-           not name.startswith("/"):
+            not parent_path.startswith("/") and \
+                not name.startswith("/"):
             file_dir = posixpath.dirname(parent_path)
             name = posixpath.normpath(posixpath.join(file_dir, name))
         return name
@@ -620,7 +626,7 @@ class _CodeWriter(object):
             ancestors = ["%s:%d" % (tmpl.name, lineno)
                          for (tmpl, lineno) in self.include_stack]
             line_comment += ' (via %s)' % ', '.join(reversed(ancestors))
-        print >> self.file, "    " * indent + line + line_comment
+        print("    " * indent + line + line_comment, file=self.file)
 
 
 class _TemplateReader(object):
@@ -708,7 +714,7 @@ def _parse(reader, template, in_block=None, in_loop=None):
             # innermost ones.  This is useful when generating languages
             # like latex where curlies are also meaningful
             if (curly + 2 < reader.remaining() and
-                reader[curly + 1] == '{' and reader[curly + 2] == '{'):
+                    reader[curly + 1] == '{' and reader[curly + 2] == '{'):
                 curly += 1
                 continue
             break
@@ -775,7 +781,7 @@ def _parse(reader, template, in_block=None, in_loop=None):
         if allowed_parents is not None:
             if not in_block:
                 raise ParseError("%s outside %s block" %
-                            (operator, allowed_parents))
+                                (operator, allowed_parents))
             if in_block not in allowed_parents:
                 raise ParseError("%s block cannot be attached to %s block" % (operator, in_block))
             body.chunks.append(_IntermediateControlBlock(contents, line))
diff --git a/libs/tornado/testing.py b/libs/tornado/testing.py
index 22376627e58903fb958c777547b008d978ecd58f..30fe4e29cf6896bf368dd861d3c42ee85ad13b4f 100755
--- a/libs/tornado/testing.py
+++ b/libs/tornado/testing.py
@@ -18,10 +18,10 @@ inheritance.  See the docstrings for each class/function below for more
 information.
 """
 
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
-from cStringIO import StringIO
 try:
+    from tornado import gen
     from tornado.httpclient import AsyncHTTPClient
     from tornado.httpserver import HTTPServer
     from tornado.simple_httpclient import SimpleAsyncHTTPClient
@@ -31,20 +31,27 @@ except ImportError:
     # These modules are not importable on app engine.  Parts of this module
     # won't work, but e.g. LogTrapTestCase and main() will.
     AsyncHTTPClient = None
+    gen = None
     HTTPServer = None
     IOLoop = None
     netutil = None
     SimpleAsyncHTTPClient = None
 from tornado.log import gen_log
 from tornado.stack_context import ExceptionStackContext
-from tornado.util import raise_exc_info
+from tornado.util import raise_exc_info, basestring_type
+import functools
 import logging
 import os
 import re
 import signal
 import socket
 import sys
-import time
+import types
+
+try:
+    from io import StringIO  # py3
+except ImportError:
+    from cStringIO import StringIO  # py2
 
 # Tornado's own test suite requires the updated unittest module
 # (either py27+ or unittest2) so tornado.test.util enforces
@@ -151,7 +158,7 @@ class AsyncTestCase(unittest.TestCase):
     def tearDown(self):
         self.io_loop.clear_current()
         if (not IOLoop.initialized() or
-            self.io_loop is not IOLoop.instance()):
+                self.io_loop is not IOLoop.instance()):
             # Try to clean up any file descriptors left open in the ioloop.
             # This avoids leaks, especially when tests are run repeatedly
             # in the same process with autoreload (because curl does not
@@ -160,10 +167,10 @@ class AsyncTestCase(unittest.TestCase):
         super(AsyncTestCase, self).tearDown()
 
     def get_new_ioloop(self):
-        '''Creates a new IOLoop for this test.  May be overridden in
+        """Creates a new IOLoop for this test.  May be overridden in
         subclasses for tests that require a specific IOLoop (usually
         the singleton).
-        '''
+        """
         return IOLoop()
 
     def _handle_exception(self, typ, value, tb):
@@ -185,12 +192,12 @@ class AsyncTestCase(unittest.TestCase):
         self.__rethrow()
 
     def stop(self, _arg=None, **kwargs):
-        '''Stops the ioloop, causing one pending (or future) call to wait()
+        """Stops the ioloop, causing one pending (or future) call to wait()
         to return.
 
         Keyword arguments or a single positional argument passed to stop() are
         saved and will be returned by wait().
-        '''
+        """
         assert _arg is None or not kwargs
         self.__stop_args = kwargs or _arg
         if self.__running:
@@ -211,20 +218,21 @@ class AsyncTestCase(unittest.TestCase):
                 def timeout_func():
                     try:
                         raise self.failureException(
-                          'Async operation timed out after %s seconds' %
-                          timeout)
+                            'Async operation timed out after %s seconds' %
+                            timeout)
                     except Exception:
                         self.__failure = sys.exc_info()
                     self.stop()
-                if self.__timeout is not None:
-                    self.io_loop.remove_timeout(self.__timeout)
                 self.__timeout = self.io_loop.add_timeout(self.io_loop.time() + timeout, timeout_func)
             while True:
                 self.__running = True
                 self.io_loop.start()
                 if (self.__failure is not None or
-                    condition is None or condition()):
+                        condition is None or condition()):
                     break
+            if self.__timeout is not None:
+                self.io_loop.remove_timeout(self.__timeout)
+                self.__timeout = None
         assert self.__stopped
         self.__stopped = False
         self.__rethrow()
@@ -234,7 +242,7 @@ class AsyncTestCase(unittest.TestCase):
 
 
 class AsyncHTTPTestCase(AsyncTestCase):
-    '''A test case that starts up an HTTP server.
+    """A test case that starts up an HTTP server.
 
     Subclasses must override get_app(), which returns the
     tornado.web.Application (or other HTTPServer callback) to be tested.
@@ -255,7 +263,7 @@ class AsyncHTTPTestCase(AsyncTestCase):
                 self.http_client.fetch(self.get_url('/'), self.stop)
                 response = self.wait()
                 # test contents of response
-    '''
+    """
     def setUp(self):
         super(AsyncHTTPTestCase, self).setUp()
         sock, port = bind_unused_port()
@@ -314,7 +322,7 @@ class AsyncHTTPTestCase(AsyncTestCase):
     def tearDown(self):
         self.http_server.stop()
         if (not IOLoop.initialized() or
-            self.http_client.io_loop is not IOLoop.instance()):
+                self.http_client.io_loop is not IOLoop.instance()):
             self.http_client.close()
         super(AsyncHTTPTestCase, self).tearDown()
 
@@ -342,13 +350,43 @@ class AsyncHTTPSTestCase(AsyncHTTPTestCase):
         # openssl req -new -keyout tornado/test/test.key -out tornado/test/test.crt -nodes -days 3650 -x509
         module_dir = os.path.dirname(__file__)
         return dict(
-                certfile=os.path.join(module_dir, 'test', 'test.crt'),
-                keyfile=os.path.join(module_dir, 'test', 'test.key'))
+            certfile=os.path.join(module_dir, 'test', 'test.crt'),
+            keyfile=os.path.join(module_dir, 'test', 'test.key'))
 
     def get_protocol(self):
         return 'https'
 
 
+def gen_test(f):
+    """Testing equivalent of ``@gen.engine``, to be applied to test methods.
+
+    ``@gen.engine`` cannot be used on tests because the `IOLoop` is not
+    already running.  ``@gen_test`` should be applied to test methods
+    on subclasses of `AsyncTestCase`.
+
+    Note that unlike most uses of ``@gen.engine``, ``@gen_test`` can
+    detect automatically when the function finishes cleanly so there
+    is no need to run a callback to signal completion.
+
+    Example::
+        class MyTest(AsyncHTTPTestCase):
+            @gen_test
+            def test_something(self):
+                response = yield gen.Task(self.fetch('/'))
+
+    """
+    @functools.wraps(f)
+    def wrapper(self, *args, **kwargs):
+        result = f(self, *args, **kwargs)
+        if result is None:
+            return
+        assert isinstance(result, types.GeneratorType)
+        runner = gen.Runner(result, self.stop)
+        runner.run()
+        self.wait()
+    return wrapper
+
+
 class LogTrapTestCase(unittest.TestCase):
     """A test case that captures and discards all logging output
     if the test passes.
@@ -361,21 +399,21 @@ class LogTrapTestCase(unittest.TestCase):
 
     This class assumes that only one log handler is configured and that
     it is a StreamHandler.  This is true for both logging.basicConfig
-    and the "pretty logging" configured by tornado.options.
+    and the "pretty logging" configured by tornado.options.  It is not
+    compatible with other log buffering mechanisms, such as those provided
+    by some test runners.
     """
     def run(self, result=None):
         logger = logging.getLogger()
-        if len(logger.handlers) > 1:
-            # Multiple handlers have been defined.  It gets messy to handle
-            # this, especially since the handlers may have different
-            # formatters.  Just leave the logging alone in this case.
-            super(LogTrapTestCase, self).run(result)
-            return
         if not logger.handlers:
             logging.basicConfig()
-        self.assertEqual(len(logger.handlers), 1)
         handler = logger.handlers[0]
-        assert isinstance(handler, logging.StreamHandler)
+        if (len(logger.handlers) > 1 or
+            not isinstance(handler, logging.StreamHandler)):
+            # Logging has been configured in a way we don't recognize,
+            # so just leave it alone.
+            super(LogTrapTestCase, self).run(result)
+            return
         old_stream = handler.stream
         try:
             handler.stream = StringIO()
@@ -410,7 +448,7 @@ class ExpectLog(logging.Filter):
         :param required: If true, an exeption will be raised if the end of
             the ``with`` statement is reached without matching any log entries.
         """
-        if isinstance(logger, basestring):
+        if isinstance(logger, basestring_type):
             logger = logging.getLogger(logger)
         self.logger = logger
         self.regex = re.compile(regex)
@@ -496,7 +534,7 @@ def main(**kwargs):
         kwargs['buffer'] = True
 
     if __name__ == '__main__' and len(argv) == 1:
-        print >> sys.stderr, "No tests specified"
+        print("No tests specified", file=sys.stderr)
         sys.exit(1)
     try:
         # In order to be able to run tests by their fully-qualified name
@@ -509,7 +547,7 @@ def main(**kwargs):
             unittest.main(module=None, argv=argv, **kwargs)
         else:
             unittest.main(defaultTest="all", argv=argv, **kwargs)
-    except SystemExit, e:
+    except SystemExit as e:
         if e.code == 0:
             gen_log.info('PASS')
         else:
diff --git a/libs/tornado/util.py b/libs/tornado/util.py
index f550449a0725adb6fb32e3d8ee99211caf459a0a..69de2c8e8ee42b0df47ca36ef5d78f6f6d6844a7 100755
--- a/libs/tornado/util.py
+++ b/libs/tornado/util.py
@@ -1,7 +1,9 @@
 """Miscellaneous utility functions."""
 
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
+import inspect
+import sys
 import zlib
 
 
@@ -62,38 +64,47 @@ def import_object(name):
     obj = __import__('.'.join(parts[:-1]), None, None, [parts[-1]], 0)
     return getattr(obj, parts[-1])
 
-# Fake byte literal support:  In python 2.6+, you can say b"foo" to get
-# a byte literal (str in 2.x, bytes in 3.x).  There's no way to do this
-# in a way that supports 2.5, though, so we need a function wrapper
-# to convert our string literals.  b() should only be applied to literal
-# latin1 strings.  Once we drop support for 2.5, we can remove this function
-# and just use byte literals.
-if str is unicode:
-    def b(s):
-        return s.encode('latin1')
+# Fake unicode literal support:  Python 3.2 doesn't have the u'' marker for
+# literal strings, and alternative solutions like "from __future__ import
+# unicode_literals" have other problems (see PEP 414).  u() can be applied
+# to ascii strings that include \u escapes (but they must not contain
+# literal non-ascii characters).
+if type('') is not type(b''):
+    def u(s):
+        return s
     bytes_type = bytes
+    unicode_type = str
+    basestring_type = str
 else:
-    def b(s):
-        return s
+    def u(s):
+        return s.decode('unicode_escape')
     bytes_type = str
+    unicode_type = unicode
+    basestring_type = basestring
 
 
+if sys.version_info > (3,):
+    exec("""
 def raise_exc_info(exc_info):
-    """Re-raise an exception (with original traceback) from an exc_info tuple.
+    raise exc_info[1].with_traceback(exc_info[2])
 
-    The argument is a ``(type, value, traceback)`` tuple as returned by
-    `sys.exc_info`.
-    """
-    # 2to3 isn't smart enough to convert three-argument raise
-    # statements correctly in some cases.
-    if isinstance(exc_info[1], exc_info[0]):
-        raise exc_info[1], None, exc_info[2]
-        # After 2to3: raise exc_info[1].with_traceback(exc_info[2])
-    else:
-        # I think this branch is only taken for string exceptions,
-        # which were removed in Python 2.6.
-        raise exc_info[0], exc_info[1], exc_info[2]
-        # After 2to3: raise exc_info[0](exc_info[1]).with_traceback(exc_info[2])
+def exec_in(code, glob, loc=None):
+    if isinstance(code, str):
+        code = compile(code, '<string>', 'exec', dont_inherit=True)
+    exec(code, glob, loc)
+""")
+else:
+    exec("""
+def raise_exc_info(exc_info):
+    raise exc_info[0], exc_info[1], exc_info[2]
+
+def exec_in(code, glob, loc=None):
+    if isinstance(code, basestring):
+        # exec(string) inherits the caller's future imports; compile
+        # the string first to prevent that.
+        code = compile(code, '<string>', 'exec', dont_inherit=True)
+    exec code in glob, loc
+""")
 
 
 class Configurable(object):
@@ -164,7 +175,7 @@ class Configurable(object):
         some parameters.
         """
         base = cls.configurable_base()
-        if isinstance(impl, (unicode, bytes_type)):
+        if isinstance(impl, (unicode_type, bytes_type)):
             impl = import_object(impl)
         if impl is not None and not issubclass(impl, cls):
             raise ValueError("Invalid subclass of %s" % cls)
@@ -179,7 +190,6 @@ class Configurable(object):
             base.__impl_class = cls.configurable_default()
         return base.__impl_class
 
-
     @classmethod
     def _save_configuration(cls):
         base = cls.configurable_base()
@@ -192,6 +202,45 @@ class Configurable(object):
         base.__impl_kwargs = saved[1]
 
 
+class ArgReplacer(object):
+    """Replaces one value in an ``args, kwargs`` pair.
+
+    Inspects the function signature to find an argument by name
+    whether it is passed by position or keyword.  For use in decorators
+    and similar wrappers.
+    """
+    def __init__(self, func, name):
+        """Create an ArgReplacer for the named argument to the given function.
+        """
+        self.name = name
+        try:
+            self.arg_pos = inspect.getargspec(func).args.index(self.name)
+        except ValueError:
+            # Not a positional parameter
+            self.arg_pos = None
+
+    def replace(self, new_value, args, kwargs):
+        """Replace the named argument in ``args, kwargs`` with ``new_value``.
+
+        Returns ``(old_value, args, kwargs)``.  The returned ``args`` and
+        ``kwargs`` objects may not be the same as the input objects, or
+        the input objects may be mutated.
+
+        If the named argument was not found, ``new_value`` will be added
+        to ``kwargs`` and None will be returned as ``old_value``.
+        """
+        if self.arg_pos is not None and len(args) > self.arg_pos:
+            # The arg to replace is passed positionally
+            old_value = args[self.arg_pos]
+            args = list(args)  # *args is normally a tuple
+            args[self.arg_pos] = new_value
+        else:
+            # The arg to replace is either omitted or passed by keyword.
+            old_value = kwargs.get(self.name)
+            kwargs[self.name] = new_value
+        return old_value, args, kwargs
+
+
 def doctests():
     import doctest
     return doctest.DocTestSuite()
diff --git a/libs/tornado/web.py b/libs/tornado/web.py
index 41ce95d4b8802097b5f9fa5a994380e9c9ef2041..3cccba5645c7c87ac00b9d9ef461a7054504f0b0 100755
--- a/libs/tornado/web.py
+++ b/libs/tornado/web.py
@@ -49,21 +49,18 @@ threads it is important to use IOLoop.add_callback to transfer control
 back to the main thread before finishing the request.
 """
 
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
-import Cookie
 import base64
 import binascii
-import calendar
 import datetime
 import email.utils
 import functools
 import gzip
 import hashlib
 import hmac
-import httplib
-import itertools
 import mimetypes
+import numbers
 import os.path
 import re
 import stat
@@ -73,23 +70,37 @@ import time
 import tornado
 import traceback
 import types
-import urllib
-import urlparse
 import uuid
 
 from tornado import escape
+from tornado import httputil
 from tornado import locale
 from tornado.log import access_log, app_log, gen_log
 from tornado import stack_context
 from tornado import template
 from tornado.escape import utf8, _unicode
-from tornado.util import b, bytes_type, import_object, ObjectDict, raise_exc_info
+from tornado.util import bytes_type, import_object, ObjectDict, raise_exc_info, unicode_type
 
 try:
     from io import BytesIO  # python 3
 except ImportError:
     from cStringIO import StringIO as BytesIO  # python 2
 
+try:
+    import Cookie  # py2
+except ImportError:
+    import http.cookies as Cookie  # py3
+
+try:
+    import urlparse  # py2
+except ImportError:
+    import urllib.parse as urlparse  # py3
+
+try:
+    from urllib import urlencode  # py2
+except ImportError:
+    from urllib.parse import urlencode  # py3
+
 
 class RequestHandler(object):
     """Subclass this class and define get() or post() to make a handler.
@@ -103,6 +114,7 @@ class RequestHandler(object):
 
     _template_loaders = {}  # {path: template.BaseLoader}
     _template_loader_lock = threading.Lock()
+    _remove_control_chars_regex = re.compile(r"[\x00-\x08\x0e-\x1f]")
 
     def __init__(self, application, request, **kwargs):
         super(RequestHandler, self).__init__()
@@ -116,19 +128,19 @@ class RequestHandler(object):
         self.path_args = None
         self.path_kwargs = None
         self.ui = ObjectDict((n, self._ui_method(m)) for n, m in
-                     application.ui_methods.iteritems())
+                             application.ui_methods.items())
         # UIModules are available as both `modules` and `_modules` in the
         # template namespace.  Historically only `modules` was available
         # but could be clobbered by user additions to the namespace.
         # The template {% module %} directive looks in `_modules` to avoid
         # possible conflicts.
         self.ui["_modules"] = ObjectDict((n, self._ui_module(n, m)) for n, m in
-                                 application.ui_modules.iteritems())
+                                         application.ui_modules.items())
         self.ui["modules"] = self.ui["_modules"]
         self.clear()
         # Check since connection is not available in WSGI
         if getattr(self.request, "connection", None):
-            self.request.connection.stream.set_close_callback(
+            self.request.connection.set_close_callback(
                 self.on_connection_close)
         self.initialize(**kwargs)
 
@@ -215,25 +227,18 @@ class RequestHandler(object):
 
     def clear(self):
         """Resets all headers and content for this response."""
-        # The performance cost of tornado.httputil.HTTPHeaders is significant
-        # (slowing down a benchmark with a trivial handler by more than 10%),
-        # and its case-normalization is not generally necessary for
-        # headers we generate on the server side, so use a plain dict
-        # and list instead.
-        self._headers = {
-            "Server": "TornadoServer/%s" % tornado.version,
-            "Content-Type": "text/html; charset=UTF-8",
-            "Date": datetime.datetime.utcnow().strftime(
-                "%a, %d %b %Y %H:%M:%S GMT"),
-        }
-        self._list_headers = []
+        self._headers = httputil.HTTPHeaders({
+                "Server": "TornadoServer/%s" % tornado.version,
+                "Content-Type": "text/html; charset=UTF-8",
+                "Date": httputil.format_timestamp(time.gmtime()),
+                })
         self.set_default_headers()
         if not self.request.supports_http_1_1():
             if self.request.headers.get("Connection") == "Keep-Alive":
                 self.set_header("Connection", "Keep-Alive")
         self._write_buffer = []
         self._status_code = 200
-        self._reason = httplib.responses[200]
+        self._reason = httputil.responses[200]
 
     def set_default_headers(self):
         """Override this to set HTTP headers at the beginning of the request.
@@ -258,7 +263,7 @@ class RequestHandler(object):
             self._reason = escape.native_str(reason)
         else:
             try:
-                self._reason = httplib.responses[status_code]
+                self._reason = httputil.responses[status_code]
             except KeyError:
                 raise ValueError("unknown status code %d", status_code)
 
@@ -281,7 +286,7 @@ class RequestHandler(object):
         Unlike `set_header`, `add_header` may be called multiple times
         to return multiple values for the same header.
         """
-        self._list_headers.append((name, self._convert_header_value(value)))
+        self._headers.add(name, self._convert_header_value(value))
 
     def clear_header(self, name):
         """Clears an outgoing header, undoing a previous `set_header` call.
@@ -295,20 +300,19 @@ class RequestHandler(object):
     def _convert_header_value(self, value):
         if isinstance(value, bytes_type):
             pass
-        elif isinstance(value, unicode):
+        elif isinstance(value, unicode_type):
             value = value.encode('utf-8')
-        elif isinstance(value, (int, long)):
+        elif isinstance(value, numbers.Integral):
             # return immediately since we know the converted value will be safe
             return str(value)
         elif isinstance(value, datetime.datetime):
-            t = calendar.timegm(value.utctimetuple())
-            return email.utils.formatdate(t, localtime=False, usegmt=True)
+            return httputil.format_timestamp(value)
         else:
             raise TypeError("Unsupported header value %r" % value)
         # If \n is allowed into the header, it is possible to inject
         # additional headers or split the request. Also cap length to
         # prevent obviously erroneous values.
-        if len(value) > 4000 or re.search(b(r"[\x00-\x1f]"), value):
+        if len(value) > 4000 or re.search(br"[\x00-\x1f]", value):
             raise ValueError("Unsafe header value %r", value)
         return value
 
@@ -339,13 +343,14 @@ class RequestHandler(object):
 
         The returned values are always unicode.
         """
+
         values = []
         for v in self.request.arguments.get(name, []):
             v = self.decode_argument(v, name=name)
-            if isinstance(v, unicode):
+            if isinstance(v, unicode_type):
                 # Get rid of any weird control chars (unless decoding gave
                 # us bytes, in which case leave it alone)
-                v = re.sub(r"[\x00-\x08\x0e-\x1f]", " ", v)
+                v = RequestHandler._remove_control_chars_regex.sub(" ", v)
             if strip:
                 v = v.strip()
             values.append(v)
@@ -403,12 +408,10 @@ class RequestHandler(object):
             expires = datetime.datetime.utcnow() + datetime.timedelta(
                 days=expires_days)
         if expires:
-            timestamp = calendar.timegm(expires.utctimetuple())
-            morsel["expires"] = email.utils.formatdate(
-                timestamp, localtime=False, usegmt=True)
+            morsel["expires"] = httputil.format_timestamp(expires)
         if path:
             morsel["path"] = path
-        for k, v in kwargs.iteritems():
+        for k, v in kwargs.items():
             if k == 'max_age':
                 k = 'max-age'
             morsel[k] = v
@@ -421,7 +424,7 @@ class RequestHandler(object):
 
     def clear_all_cookies(self):
         """Deletes all the cookies the user sent with this request."""
-        for name in self.request.cookies.iterkeys():
+        for name in self.request.cookies:
             self.clear_cookie(name)
 
     def set_secure_cookie(self, name, value, expires_days=30, **kwargs):
@@ -482,7 +485,7 @@ class RequestHandler(object):
             assert isinstance(status, int) and 300 <= status <= 399
         self.set_status(status)
         # Remove whitespace
-        url = re.sub(b(r"[\x00-\x20]+"), "", utf8(url))
+        url = re.sub(br"[\x00-\x20]+", "", utf8(url))
         self.set_header("Location", urlparse.urljoin(utf8(self.request.uri),
                                                      url))
         self.finish()
@@ -523,13 +526,13 @@ class RequestHandler(object):
         css_files = []
         html_heads = []
         html_bodies = []
-        for module in getattr(self, "_active_modules", {}).itervalues():
+        for module in getattr(self, "_active_modules", {}).values():
             embed_part = module.embedded_javascript()
             if embed_part:
                 js_embed.append(utf8(embed_part))
             file_part = module.javascript_files()
             if file_part:
-                if isinstance(file_part, (unicode, bytes_type)):
+                if isinstance(file_part, (unicode_type, bytes_type)):
                     js_files.append(file_part)
                 else:
                     js_files.extend(file_part)
@@ -538,7 +541,7 @@ class RequestHandler(object):
                 css_embed.append(utf8(embed_part))
             file_part = module.css_files()
             if file_part:
-                if isinstance(file_part, (unicode, bytes_type)):
+                if isinstance(file_part, (unicode_type, bytes_type)):
                     css_files.append(file_part)
                 else:
                     css_files.extend(file_part)
@@ -564,13 +567,13 @@ class RequestHandler(object):
             js = ''.join('<script src="' + escape.xhtml_escape(p) +
                          '" type="text/javascript"></script>'
                          for p in paths)
-            sloc = html.rindex(b('</body>'))
-            html = html[:sloc] + utf8(js) + b('\n') + html[sloc:]
+            sloc = html.rindex(b'</body>')
+            html = html[:sloc] + utf8(js) + b'\n' + html[sloc:]
         if js_embed:
-            js = b('<script type="text/javascript">\n//<![CDATA[\n') + \
-                b('\n').join(js_embed) + b('\n//]]>\n</script>')
-            sloc = html.rindex(b('</body>'))
-            html = html[:sloc] + js + b('\n') + html[sloc:]
+            js = b'<script type="text/javascript">\n//<![CDATA[\n' + \
+                b'\n'.join(js_embed) + b'\n//]]>\n</script>'
+            sloc = html.rindex(b'</body>')
+            html = html[:sloc] + js + b'\n' + html[sloc:]
         if css_files:
             paths = []
             unique_paths = set()
@@ -583,19 +586,19 @@ class RequestHandler(object):
             css = ''.join('<link href="' + escape.xhtml_escape(p) + '" '
                           'type="text/css" rel="stylesheet"/>'
                           for p in paths)
-            hloc = html.index(b('</head>'))
-            html = html[:hloc] + utf8(css) + b('\n') + html[hloc:]
+            hloc = html.index(b'</head>')
+            html = html[:hloc] + utf8(css) + b'\n' + html[hloc:]
         if css_embed:
-            css = b('<style type="text/css">\n') + b('\n').join(css_embed) + \
-                b('\n</style>')
-            hloc = html.index(b('</head>'))
-            html = html[:hloc] + css + b('\n') + html[hloc:]
+            css = b'<style type="text/css">\n' + b'\n'.join(css_embed) + \
+                b'\n</style>'
+            hloc = html.index(b'</head>')
+            html = html[:hloc] + css + b'\n' + html[hloc:]
         if html_heads:
-            hloc = html.index(b('</head>'))
-            html = html[:hloc] + b('').join(html_heads) + b('\n') + html[hloc:]
+            hloc = html.index(b'</head>')
+            html = html[:hloc] + b''.join(html_heads) + b'\n' + html[hloc:]
         if html_bodies:
-            hloc = html.index(b('</body>'))
-            html = html[:hloc] + b('').join(html_bodies) + b('\n') + html[hloc:]
+            hloc = html.index(b'</body>')
+            html = html[:hloc] + b''.join(html_bodies) + b'\n' + html[hloc:]
         self.finish(html)
 
     def render_string(self, template_name, **kwargs):
@@ -675,19 +678,19 @@ class RequestHandler(object):
         if self.application._wsgi:
             raise Exception("WSGI applications do not support flush()")
 
-        chunk = b("").join(self._write_buffer)
+        chunk = b"".join(self._write_buffer)
         self._write_buffer = []
         if not self._headers_written:
             self._headers_written = True
             for transform in self._transforms:
                 self._status_code, self._headers, chunk = \
                     transform.transform_first_chunk(
-                    self._status_code, self._headers, chunk, include_footers)
+                        self._status_code, self._headers, chunk, include_footers)
             headers = self._generate_headers()
         else:
             for transform in self._transforms:
                 chunk = transform.transform_chunk(chunk, include_footers)
-            headers = b("")
+            headers = b""
 
         # Ignore the chunk and only write the headers for HEAD requests
         if self.request.method == "HEAD":
@@ -712,7 +715,7 @@ class RequestHandler(object):
         if not self._headers_written:
             if (self._status_code == 200 and
                 self.request.method in ("GET", "HEAD") and
-                "Etag" not in self._headers):
+                    "Etag" not in self._headers):
                 etag = self.compute_etag()
                 if etag is not None:
                     self.set_header("Etag", etag)
@@ -812,9 +815,9 @@ class RequestHandler(object):
         else:
             self.finish("<html><title>%(code)d: %(message)s</title>"
                         "<body>%(code)d: %(message)s</body></html>" % {
-                    "code": status_code,
-                    "message": self._reason,
-                    })
+                            "code": status_code,
+                            "message": self._reason,
+                        })
 
     @property
     def locale(self):
@@ -861,7 +864,7 @@ class RequestHandler(object):
                     score = 1.0
                 locales.append((parts[0], score))
             if locales:
-                locales.sort(key=lambda (l, s): s, reverse=True)
+                locales.sort(key=lambda pair: pair[1], reverse=True)
                 codes = [l[0] for l in locales]
                 return locale.get(*codes)
         return locale.get(default)
@@ -1007,7 +1010,7 @@ class RequestHandler(object):
         def wrapper(*args, **kwargs):
             try:
                 return callback(*args, **kwargs)
-            except Exception, e:
+            except Exception as e:
                 if self._headers_written:
                     app_log.error("Exception after headers written",
                                   exc_info=True)
@@ -1055,11 +1058,11 @@ class RequestHandler(object):
                 raise HTTPError(405)
             self.path_args = [self.decode_argument(arg) for arg in args]
             self.path_kwargs = dict((k, self.decode_argument(v, name=k))
-                                    for (k, v) in kwargs.iteritems())
+                                    for (k, v) in kwargs.items())
             # If XSRF cookies are turned on, reject form submissions without
             # the proper cookie
             if self.request.method not in ("GET", "HEAD", "OPTIONS") and \
-               self.application.settings.get("xsrf_cookies"):
+                    self.application.settings.get("xsrf_cookies"):
                 self.check_xsrf_cookie()
             self.prepare()
             if not self._finished:
@@ -1067,7 +1070,7 @@ class RequestHandler(object):
                     *self.path_args, **self.path_kwargs)
                 if self._auto_finish and not self._finished:
                     self.finish()
-        except Exception, e:
+        except Exception as e:
             self._handle_request_exception(e)
 
     def _generate_headers(self):
@@ -1075,12 +1078,12 @@ class RequestHandler(object):
         lines = [utf8(self.request.version + " " +
                       str(self._status_code) +
                       " " + reason)]
-        lines.extend([(utf8(n) + b(": ") + utf8(v)) for n, v in
-                      itertools.chain(self._headers.iteritems(), self._list_headers)])
+        lines.extend([utf8(n) + b": " + utf8(v) for n, v in self._headers.get_all()])
+
         if hasattr(self, "_new_cookie"):
             for cookie in self._new_cookie.values():
                 lines.append(utf8("Set-Cookie: " + cookie.OutputString(None)))
-        return b("\r\n").join(lines) + b("\r\n\r\n")
+        return b"\r\n".join(lines) + b"\r\n\r\n"
 
     def _log(self):
         """Logs the current request.
@@ -1101,7 +1104,7 @@ class RequestHandler(object):
                 format = "%d %s: " + e.log_message
                 args = [e.status_code, self._request_summary()] + list(e.args)
                 gen_log.warning(format, *args)
-            if e.status_code not in httplib.responses and not e.reason:
+            if e.status_code not in httputil.responses and not e.reason:
                 gen_log.error("Bad HTTP status code: %d", e.status_code)
                 self.send_error(500, exc_info=sys.exc_info())
             else:
@@ -1161,7 +1164,7 @@ def asynchronous(method):
             raise Exception("@asynchronous is not supported for WSGI apps")
         self._auto_finish = False
         with stack_context.ExceptionStackContext(
-            self._stack_context_handle_exception):
+                self._stack_context_handle_exception):
             return method(self, *args, **kwargs)
     return wrapper
 
@@ -1334,7 +1337,7 @@ class Application(object):
             self.handlers.append((re.compile(host_pattern), handlers))
 
         for spec in host_handlers:
-            if type(spec) is type(()):
+            if isinstance(spec, type(())):
                 assert len(spec) in (2, 3)
                 pattern = spec[0]
                 handler = spec[1]
@@ -1375,20 +1378,20 @@ class Application(object):
         return matches or None
 
     def _load_ui_methods(self, methods):
-        if type(methods) is types.ModuleType:
+        if isinstance(methods, types.ModuleType):
             self._load_ui_methods(dict((n, getattr(methods, n))
                                        for n in dir(methods)))
         elif isinstance(methods, list):
             for m in methods:
                 self._load_ui_methods(m)
         else:
-            for name, fn in methods.iteritems():
+            for name, fn in methods.items():
                 if not name.startswith("_") and hasattr(fn, "__call__") \
-                   and name[0].lower() == name[0]:
+                        and name[0].lower() == name[0]:
                     self.ui_methods[name] = fn
 
     def _load_ui_modules(self, modules):
-        if type(modules) is types.ModuleType:
+        if isinstance(modules, types.ModuleType):
             self._load_ui_modules(dict((n, getattr(modules, n))
                                        for n in dir(modules)))
         elif isinstance(modules, list):
@@ -1396,7 +1399,7 @@ class Application(object):
                 self._load_ui_modules(m)
         else:
             assert isinstance(modules, dict)
-            for name, cls in modules.iteritems():
+            for name, cls in modules.items():
                 try:
                     if issubclass(cls, UIModule):
                         self.ui_modules[name] = cls
@@ -1434,7 +1437,7 @@ class Application(object):
                         if spec.regex.groupindex:
                             kwargs = dict(
                                 (str(k), unquote(v))
-                                for (k, v) in match.groupdict().iteritems())
+                                for (k, v) in match.groupdict().items())
                         else:
                             args = [unquote(s) for s in match.groups()]
                     break
@@ -1510,7 +1513,7 @@ class HTTPError(Exception):
     def __str__(self):
         message = "HTTP %d: %s" % (
             self.status_code,
-            self.reason or httplib.responses.get(self.status_code, 'Unknown'))
+            self.reason or httputil.responses.get(self.status_code, 'Unknown'))
         if self.log_message:
             return message + " (" + (self.log_message % self.args) + ")"
         else:
@@ -1618,7 +1621,7 @@ class StaticFileHandler(RequestHandler):
 
         if cache_time > 0:
             self.set_header("Expires", datetime.datetime.utcnow() +
-                                       datetime.timedelta(seconds=cache_time))
+                            datetime.timedelta(seconds=cache_time))
             self.set_header("Cache-Control", "max-age=" + str(cache_time))
 
         self.set_extra_headers(path)
@@ -1771,9 +1774,9 @@ class GZipContentEncoding(OutputTransform):
 
     def transform_first_chunk(self, status_code, headers, chunk, finishing):
         if 'Vary' in headers:
-            headers['Vary'] += b(', Accept-Encoding')
+            headers['Vary'] += b', Accept-Encoding'
         else:
-            headers['Vary'] = b('Accept-Encoding')
+            headers['Vary'] = b'Accept-Encoding'
         if self._gzipping:
             ctype = _unicode(headers.get("Content-Type", "")).split(";")[0]
             self._gzipping = (ctype in self.CONTENT_TYPES) and \
@@ -1827,9 +1830,9 @@ class ChunkedTransferEncoding(OutputTransform):
             # Don't write out empty chunks because that means END-OF-STREAM
             # with chunked encoding
             if block:
-                block = utf8("%x" % len(block)) + b("\r\n") + block + b("\r\n")
+                block = utf8("%x" % len(block)) + b"\r\n" + block + b"\r\n"
             if finishing:
-                block += b("0\r\n\r\n")
+                block += b"0\r\n\r\n"
         return block
 
 
@@ -1846,7 +1849,7 @@ def authenticated(method):
                         next_url = self.request.full_url()
                     else:
                         next_url = self.request.uri
-                    url += "?" + urllib.urlencode(dict(next=next_url))
+                    url += "?" + urlencode(dict(next=next_url))
                 self.redirect(url)
                 return
             raise HTTPError(403)
@@ -1954,7 +1957,7 @@ class TemplateModule(UIModule):
     def javascript_files(self):
         result = []
         for f in self._get_resources("javascript_files"):
-            if isinstance(f, (unicode, bytes_type)):
+            if isinstance(f, (unicode_type, bytes_type)):
                 result.append(f)
             else:
                 result.extend(f)
@@ -1966,7 +1969,7 @@ class TemplateModule(UIModule):
     def css_files(self):
         result = []
         for f in self._get_resources("css_files"):
-            if isinstance(f, (unicode, bytes_type)):
+            if isinstance(f, (unicode_type, bytes_type)):
                 result.append(f)
             else:
                 result.extend(f)
@@ -2011,8 +2014,8 @@ class URLSpec(object):
 
     def __repr__(self):
         return '%s(%r, %s, kwargs=%r, name=%r)' % \
-                (self.__class__.__name__, self.regex.pattern,
-                 self.handler_class, self.kwargs, self.name)
+            (self.__class__.__name__, self.regex.pattern,
+             self.handler_class, self.kwargs, self.name)
 
     def _find_groups(self):
         """Returns a tuple (reverse string, group count) for a url.
@@ -2051,7 +2054,7 @@ class URLSpec(object):
             return self._path
         converted_args = []
         for a in args:
-            if not isinstance(a, (unicode, bytes_type)):
+            if not isinstance(a, (unicode_type, bytes_type)):
                 a = str(a)
             converted_args.append(escape.url_escape(utf8(a)))
         return self._path % tuple(converted_args)
@@ -2066,7 +2069,7 @@ else:
         if len(a) != len(b):
             return False
         result = 0
-        if type(a[0]) is int:  # python3 byte strings
+        if isinstance(a[0], int):  # python3 byte strings
             for x, y in zip(a, b):
                 result |= x ^ y
         else:  # python2
@@ -2079,14 +2082,14 @@ def create_signed_value(secret, name, value):
     timestamp = utf8(str(int(time.time())))
     value = base64.b64encode(utf8(value))
     signature = _create_signature(secret, name, value, timestamp)
-    value = b("|").join([value, timestamp, signature])
+    value = b"|".join([value, timestamp, signature])
     return value
 
 
 def decode_signed_value(secret, name, value, max_age_days=31):
     if not value:
         return None
-    parts = utf8(value).split(b("|"))
+    parts = utf8(value).split(b"|")
     if len(parts) != 3:
         return None
     signature = _create_signature(secret, name, parts[0], parts[1])
@@ -2105,7 +2108,7 @@ def decode_signed_value(secret, name, value, max_age_days=31):
         # here instead of modifying _cookie_signature.
         gen_log.warning("Cookie timestamp in future; possible tampering %r", value)
         return None
-    if parts[1].startswith(b("0")):
+    if parts[1].startswith(b"0"):
         gen_log.warning("Tampered cookie %r", value)
         return None
     try:
diff --git a/libs/tornado/websocket.py b/libs/tornado/websocket.py
index 08f2e0fe6148cb3a6e795d44d2f0391079eb7288..235a1102e6356f6cf72b825ecdf871a39c31fa99 100755
--- a/libs/tornado/websocket.py
+++ b/libs/tornado/websocket.py
@@ -17,21 +17,42 @@ communication between the browser and server.
    documentation for caveats).
 """
 
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 # Author: Jacob Kristhammar, 2010
 
 import array
+import base64
+import collections
 import functools
 import hashlib
+import logging
+import os
+import re
+import socket
 import struct
 import time
-import base64
 import tornado.escape
 import tornado.web
 
+from tornado.concurrent import Future, return_future
+from tornado.escape import utf8, to_unicode, native_str
+from tornado.httputil import HTTPHeaders
+from tornado.ioloop import IOLoop
+from tornado.iostream import IOStream, SSLIOStream
 from tornado.log import gen_log, app_log
-from tornado.util import bytes_type, b
+from tornado.netutil import Resolver
+from tornado import simple_httpclient
+from tornado.util import bytes_type
+
+try:
+    xrange  # py2
+except NameError:
+    xrange = range  # py3
 
+try:
+    import urlparse  # py2
+except ImportError:
+    import urllib.parse as urlparse  # py3
 
 class WebSocketHandler(tornado.web.RequestHandler):
     """Subclass this class to create a basic WebSocket handler.
@@ -325,12 +346,12 @@ class WebSocketProtocol76(WebSocketProtocol):
             "Sec-WebSocket-Location: %(scheme)s://%(host)s%(uri)s\r\n"
             "%(subprotocol)s"
             "\r\n" % (dict(
-                    version=tornado.version,
-                    origin=self.request.headers["Origin"],
-                    scheme=scheme,
-                    host=self.request.host,
-                    uri=self.request.uri,
-                    subprotocol=subprotocol_header))))
+            version=tornado.version,
+            origin=self.request.headers["Origin"],
+            scheme=scheme,
+            host=self.request.host,
+            uri=self.request.uri,
+            subprotocol=subprotocol_header))))
         self.stream.read_bytes(8, self._handle_challenge)
 
     def challenge_response(self, challenge):
@@ -398,7 +419,7 @@ class WebSocketProtocol76(WebSocketProtocol):
     def _on_frame_type(self, byte):
         frame_type = ord(byte)
         if frame_type == 0x00:
-            self.stream.read_until(b("\xff"), self._on_end_delimiter)
+            self.stream.read_until(b"\xff", self._on_end_delimiter)
         elif frame_type == 0xff:
             self.stream.read_bytes(1, self._on_length_indicator)
         else:
@@ -407,7 +428,7 @@ class WebSocketProtocol76(WebSocketProtocol):
     def _on_end_delimiter(self, frame):
         if not self.client_terminated:
             self.async_callback(self.handler.on_message)(
-                    frame[:-1].decode("utf-8", "replace"))
+                frame[:-1].decode("utf-8", "replace"))
         if not self.client_terminated:
             self._receive_message()
 
@@ -426,7 +447,7 @@ class WebSocketProtocol76(WebSocketProtocol):
         if isinstance(message, unicode):
             message = message.encode("utf-8")
         assert isinstance(message, bytes_type)
-        self.stream.write(b("\x00") + message + b("\xff"))
+        self.stream.write(b"\x00" + message + b"\xff")
 
     def write_ping(self, data):
         """Send ping frame."""
@@ -454,10 +475,12 @@ class WebSocketProtocol13(WebSocketProtocol):
     This class supports versions 7 and 8 of the protocol in addition to the
     final version 13.
     """
-    def __init__(self, handler):
+    def __init__(self, handler, mask_outgoing=False):
         WebSocketProtocol.__init__(self, handler)
+        self.mask_outgoing = mask_outgoing
         self._final_frame = False
         self._frame_opcode = None
+        self._masked_frame = None
         self._frame_mask = None
         self._frame_length = None
         self._fragmented_message_buffer = None
@@ -469,7 +492,7 @@ class WebSocketProtocol13(WebSocketProtocol):
             self._handle_websocket_headers()
             self._accept_connection()
         except ValueError:
-            gen_log.debug("Malformed WebSocket request received")
+            gen_log.debug("Malformed WebSocket request received", exc_info=True)
             self._abort()
             return
 
@@ -483,12 +506,20 @@ class WebSocketProtocol13(WebSocketProtocol):
         if not all(map(lambda f: self.request.headers.get(f), fields)):
             raise ValueError("Missing/Invalid WebSocket headers")
 
-    def _challenge_response(self):
+    @staticmethod
+    def compute_accept_value(key):
+        """Computes the value for the Sec-WebSocket-Accept header,
+        given the value for Sec-WebSocket-Key.
+        """
         sha1 = hashlib.sha1()
-        sha1.update(tornado.escape.utf8(
-                self.request.headers.get("Sec-Websocket-Key")))
-        sha1.update(b("258EAFA5-E914-47DA-95CA-C5AB0DC85B11"))  # Magic value
-        return tornado.escape.native_str(base64.b64encode(sha1.digest()))
+        sha1.update(utf8(key))
+        sha1.update(b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11")  # Magic value
+        return native_str(base64.b64encode(sha1.digest()))
+
+    def _challenge_response(self):
+        return WebSocketProtocol13.compute_accept_value(
+            self.request.headers.get("Sec-Websocket-Key"))
+
 
     def _accept_connection(self):
         subprotocol_header = ''
@@ -518,12 +549,19 @@ class WebSocketProtocol13(WebSocketProtocol):
             finbit = 0
         frame = struct.pack("B", finbit | opcode)
         l = len(data)
+        if self.mask_outgoing:
+            mask_bit = 0x80
+        else:
+            mask_bit = 0
         if l < 126:
-            frame += struct.pack("B", l)
+            frame += struct.pack("B", l | mask_bit)
         elif l <= 0xFFFF:
-            frame += struct.pack("!BH", 126, l)
+            frame += struct.pack("!BH", 126 | mask_bit, l)
         else:
-            frame += struct.pack("!BQ", 127, l)
+            frame += struct.pack("!BQ", 127 | mask_bit, l)
+        if self.mask_outgoing:
+            mask = os.urandom(4)
+            data = mask + self._apply_mask(mask, data)
         frame += data
         self.stream.write(frame)
 
@@ -555,10 +593,7 @@ class WebSocketProtocol13(WebSocketProtocol):
             # client is using as-yet-undefined extensions; abort
             self._abort()
             return
-        if not (payloadlen & 0x80):
-            # Unmasked frame -> abort connection
-            self._abort()
-            return
+        self._masked_frame = bool(payloadlen & 0x80)
         payloadlen = payloadlen & 0x7f
         if self._frame_opcode_is_control and payloadlen >= 126:
             # control frames must have payload < 126
@@ -566,7 +601,10 @@ class WebSocketProtocol13(WebSocketProtocol):
             return
         if payloadlen < 126:
             self._frame_length = payloadlen
-            self.stream.read_bytes(4, self._on_masking_key)
+            if self._masked_frame:
+                self.stream.read_bytes(4, self._on_masking_key)
+            else:
+                self.stream.read_bytes(self._frame_length, self._on_frame_data)
         elif payloadlen == 126:
             self.stream.read_bytes(2, self._on_frame_length_16)
         elif payloadlen == 127:
@@ -574,21 +612,39 @@ class WebSocketProtocol13(WebSocketProtocol):
 
     def _on_frame_length_16(self, data):
         self._frame_length = struct.unpack("!H", data)[0]
-        self.stream.read_bytes(4, self._on_masking_key)
+        if self._masked_frame:
+            self.stream.read_bytes(4, self._on_masking_key)
+        else:
+            self.stream.read_bytes(self._frame_length, self._on_frame_data)
 
     def _on_frame_length_64(self, data):
         self._frame_length = struct.unpack("!Q", data)[0]
-        self.stream.read_bytes(4, self._on_masking_key)
+        if self._masked_frame:
+            self.stream.read_bytes(4, self._on_masking_key)
+        else:
+            self.stream.read_bytes(self._frame_length, self._on_frame_data)
 
     def _on_masking_key(self, data):
-        self._frame_mask = array.array("B", data)
-        self.stream.read_bytes(self._frame_length, self._on_frame_data)
+        self._frame_mask = data
+        self.stream.read_bytes(self._frame_length, self._on_masked_frame_data)
 
-    def _on_frame_data(self, data):
+    def _apply_mask(self, mask, data):
+        mask = array.array("B", mask)
         unmasked = array.array("B", data)
         for i in xrange(len(data)):
-            unmasked[i] = unmasked[i] ^ self._frame_mask[i % 4]
+            unmasked[i] = unmasked[i] ^ mask[i % 4]
+        if hasattr(unmasked, 'tobytes'):
+            # tostring was deprecated in py32.  It hasn't been removed,
+            # but since we turn on deprecation warnings in our tests
+            # we need to use the right one.
+            return unmasked.tobytes()
+        else:
+            return unmasked.tostring()
 
+    def _on_masked_frame_data(self, data):
+        self._on_frame_data(self._apply_mask(self._frame_mask, data))
+
+    def _on_frame_data(self, data):
         if self._frame_opcode_is_control:
             # control frames may be interleaved with a series of fragmented
             # data frames, so control frames must not interact with
@@ -603,10 +659,10 @@ class WebSocketProtocol13(WebSocketProtocol):
                 # nothing to continue
                 self._abort()
                 return
-            self._fragmented_message_buffer += unmasked
+            self._fragmented_message_buffer += data
             if self._final_frame:
                 opcode = self._fragmented_message_opcode
-                unmasked = self._fragmented_message_buffer
+                data = self._fragmented_message_buffer
                 self._fragmented_message_buffer = None
         else:  # start of new data message
             if self._fragmented_message_buffer is not None:
@@ -617,10 +673,10 @@ class WebSocketProtocol13(WebSocketProtocol):
                 opcode = self._frame_opcode
             else:
                 self._fragmented_message_opcode = self._frame_opcode
-                self._fragmented_message_buffer = unmasked
+                self._fragmented_message_buffer = data
 
         if self._final_frame:
-            self._handle_message(opcode, unmasked.tostring())
+            self._handle_message(opcode, data)
 
         if not self.client_terminated:
             self._receive_frame()
@@ -657,7 +713,7 @@ class WebSocketProtocol13(WebSocketProtocol):
         """Closes the WebSocket connection."""
         if not self.server_terminated:
             if not self.stream.closed():
-                self._write_frame(True, 0x8, b(""))
+                self._write_frame(True, 0x8, b"")
             self.server_terminated = True
         if self.client_terminated:
             if self._waiting is not None:
@@ -669,3 +725,81 @@ class WebSocketProtocol13(WebSocketProtocol):
             # otherwise just close the connection.
             self._waiting = self.stream.io_loop.add_timeout(
                 self.stream.io_loop.time() + 5, self._abort)
+
+
+class _WebSocketClientConnection(simple_httpclient._HTTPConnection):
+    def __init__(self, io_loop, request):
+        self.connect_future = Future()
+        self.read_future = None
+        self.read_queue = collections.deque()
+        self.key = base64.b64encode(os.urandom(16))
+
+        scheme, sep, rest = request.url.partition(':')
+        scheme = {'ws': 'http', 'wss': 'https'}[scheme]
+        request.url = scheme + sep + rest
+        request.headers.update({
+                'Upgrade': 'websocket',
+                'Connection': 'Upgrade',
+                'Sec-WebSocket-Key': self.key,
+                'Sec-WebSocket-Version': '13',
+                })
+
+        super(_WebSocketClientConnection, self).__init__(
+            io_loop, None, request, lambda: None, lambda response: None,
+            104857600, Resolver(io_loop=io_loop))
+
+    def _on_close(self):
+        self.on_message(None)
+
+
+    def _handle_1xx(self, code):
+        assert code == 101
+        assert self.headers['Upgrade'].lower() == 'websocket'
+        assert self.headers['Connection'].lower() == 'upgrade'
+        accept = WebSocketProtocol13.compute_accept_value(self.key)
+        assert self.headers['Sec-Websocket-Accept'] == accept
+
+        self.protocol = WebSocketProtocol13(self, mask_outgoing=True)
+        self.protocol._receive_frame()
+
+        if self._timeout is not None:
+            self.io_loop.remove_timeout(self._timeout)
+            self._timeout = None
+
+        self.connect_future.set_result(self)
+
+    def write_message(self, message, binary=False):
+        self.protocol.write_message(message, binary)
+
+    def read_message(self, callback=None):
+        assert self.read_future is None
+        future = Future()
+        if self.read_queue:
+            future.set_result(self.read_queue.popleft())
+        else:
+            self.read_future = future
+        if callback is not None:
+            self.io_loop.add_future(future, callback)
+        return future
+
+    def on_message(self, message):
+        if self.read_future is not None:
+            self.read_future.set_result(message)
+            self.read_future = None
+        else:
+            self.read_queue.append(message)
+
+    def on_pong(self, data):
+        pass
+
+
+def WebSocketConnect(url, io_loop=None, callback=None):
+    if io_loop is None:
+        io_loop = IOLoop.instance()
+    request = simple_httpclient.HTTPRequest(url)
+    request = simple_httpclient._RequestProxy(
+        request, simple_httpclient.HTTPRequest._DEFAULTS)
+    conn = _WebSocketClientConnection(io_loop, request)
+    if callback is not None:
+        io_loop.add_future(conn.connect_future, callback)
+    return conn.connect_future
diff --git a/libs/tornado/wsgi.py b/libs/tornado/wsgi.py
index 3cb08502ed1546c6f4065227bf0447415e50f404..3d06860f9d8c192525a4db31e4735cf04195ee15 100755
--- a/libs/tornado/wsgi.py
+++ b/libs/tornado/wsgi.py
@@ -29,33 +29,39 @@ provides WSGI support in two ways:
   and Tornado handlers in a single server.
 """
 
-from __future__ import absolute_import, division, with_statement
+from __future__ import absolute_import, division, print_function, with_statement
 
-import Cookie
-import httplib
 import sys
 import time
 import tornado
-import urllib
 
 from tornado import escape
 from tornado import httputil
 from tornado.log import access_log
 from tornado import web
-from tornado.escape import native_str, utf8, parse_qs_bytes
-from tornado.util import b, bytes_type
+from tornado.escape import native_str, parse_qs_bytes
+from tornado.util import bytes_type, unicode_type
 
 try:
     from io import BytesIO  # python 3
 except ImportError:
     from cStringIO import StringIO as BytesIO  # python 2
 
+try:
+    import Cookie  # py2
+except ImportError:
+    import http.cookies as Cookie  # py3
+
+try:
+    import urllib.parse as urllib_parse  # py3
+except ImportError:
+    import urllib as urllib_parse
 
 # PEP 3333 specifies that WSGI on python 3 generally deals with byte strings
 # that are smuggled inside objects of type unicode (via the latin1 encoding).
 # These functions are like those in the tornado.escape module, but defined
 # here to minimize the temptation to use them in non-wsgi contexts.
-if str is unicode:
+if str is unicode_type:
     def to_wsgi_str(s):
         assert isinstance(s, bytes_type)
         return s.decode('latin1')
@@ -116,7 +122,7 @@ class WSGIApplication(web.Application):
         assert handler._finished
         reason = handler._reason
         status = str(handler._status_code) + " " + reason
-        headers = handler._headers.items() + handler._list_headers
+        headers = list(handler._headers.get_all())
         if hasattr(handler, "_new_cookie"):
             for cookie in handler._new_cookie.values():
                 headers.append(("Set-Cookie", cookie.OutputString(None)))
@@ -130,8 +136,8 @@ class HTTPRequest(object):
     def __init__(self, environ):
         """Parses the given WSGI environ to construct the request."""
         self.method = environ["REQUEST_METHOD"]
-        self.path = urllib.quote(from_wsgi_str(environ.get("SCRIPT_NAME", "")))
-        self.path += urllib.quote(from_wsgi_str(environ.get("PATH_INFO", "")))
+        self.path = urllib_parse.quote(from_wsgi_str(environ.get("SCRIPT_NAME", "")))
+        self.path += urllib_parse.quote(from_wsgi_str(environ.get("PATH_INFO", "")))
         self.uri = self.path
         self.arguments = {}
         self.query = environ.get("QUERY_STRING", "")
@@ -235,7 +241,7 @@ class WSGIContainer(object):
         app_response = self.wsgi_application(
             WSGIContainer.environ(request), start_response)
         response.extend(app_response)
-        body = b("").join(response)
+        body = b"".join(response)
         if hasattr(app_response, "close"):
             app_response.close()
         if not data:
@@ -255,10 +261,10 @@ class WSGIContainer(object):
 
         parts = [escape.utf8("HTTP/1.1 " + data["status"] + "\r\n")]
         for key, value in headers:
-            parts.append(escape.utf8(key) + b(": ") + escape.utf8(value) + b("\r\n"))
-        parts.append(b("\r\n"))
+            parts.append(escape.utf8(key) + b": " + escape.utf8(value) + b"\r\n")
+        parts.append(b"\r\n")
         parts.append(body)
-        request.write(b("").join(parts))
+        request.write(b"".join(parts))
         request.finish()
         self._log(status_code, request)
 
@@ -294,7 +300,7 @@ class WSGIContainer(object):
             environ["CONTENT_TYPE"] = request.headers.pop("Content-Type")
         if "Content-Length" in request.headers:
             environ["CONTENT_LENGTH"] = request.headers.pop("Content-Length")
-        for key, value in request.headers.iteritems():
+        for key, value in request.headers.items():
             environ["HTTP_" + key.replace("-", "_").upper()] = value
         return environ