chore: blacken (#772)

diff --git a/apiclient/__init__.py b/apiclient/__init__.py
index 41828dd..38dd24b 100644
--- a/apiclient/__init__.py
+++ b/apiclient/__init__.py
@@ -10,6 +10,7 @@
 from googleapiclient import http
 from googleapiclient import mimeparse
 from googleapiclient import model
+
 try:
     from googleapiclient import sample_tools
 except ImportError:
@@ -21,16 +22,17 @@
 __version__ = googleapiclient.__version__
 
 _SUBMODULES = {
-    'channel': channel,
-    'discovery': discovery,
-    'errors': errors,
-    'http': http,
-    'mimeparse': mimeparse,
-    'model': model,
-    'sample_tools': sample_tools,
-    'schema': schema,
+    "channel": channel,
+    "discovery": discovery,
+    "errors": errors,
+    "http": http,
+    "mimeparse": mimeparse,
+    "model": model,
+    "sample_tools": sample_tools,
+    "schema": schema,
 }
 
 import sys
+
 for module_name, module in iteritems(_SUBMODULES):
-  sys.modules['apiclient.%s' % module_name] = module
+    sys.modules["apiclient.%s" % module_name] = module
diff --git a/describe.py b/describe.py
index 636eb5d..e36db53 100755
--- a/describe.py
+++ b/describe.py
@@ -22,7 +22,7 @@
 """
 from __future__ import print_function
 
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+__author__ = "jcgregorio@google.com (Joe Gregorio)"
 
 from collections import OrderedDict
 import argparse
@@ -130,30 +130,40 @@
   <code><a href="#$name">$name($params)</a></code></p>
 <p class="firstline">$firstline</p>"""
 
-BASE = 'docs/dyn'
+BASE = "docs/dyn"
 
-DIRECTORY_URI = 'https://www.googleapis.com/discovery/v1/apis'
+DIRECTORY_URI = "https://www.googleapis.com/discovery/v1/apis"
 
 parser = argparse.ArgumentParser(description=__doc__)
 
-parser.add_argument('--discovery_uri_template', default=DISCOVERY_URI,
-                    help='URI Template for discovery.')
+parser.add_argument(
+    "--discovery_uri_template",
+    default=DISCOVERY_URI,
+    help="URI Template for discovery.",
+)
 
-parser.add_argument('--discovery_uri', default='',
-                    help=('URI of discovery document. If supplied then only '
-                          'this API will be documented.'))
+parser.add_argument(
+    "--discovery_uri",
+    default="",
+    help=(
+        "URI of discovery document. If supplied then only "
+        "this API will be documented."
+    ),
+)
 
-parser.add_argument('--directory_uri', default=DIRECTORY_URI,
-                    help=('URI of directory document. Unused if --discovery_uri'
-                          ' is supplied.'))
+parser.add_argument(
+    "--directory_uri",
+    default=DIRECTORY_URI,
+    help=("URI of directory document. Unused if --discovery_uri" " is supplied."),
+)
 
-parser.add_argument('--dest', default=BASE,
-                    help='Directory name to write documents into.')
-
+parser.add_argument(
+    "--dest", default=BASE, help="Directory name to write documents into."
+)
 
 
 def safe_version(version):
-  """Create a safe version of the verion string.
+    """Create a safe version of the verion string.
 
   Needed so that we can distinguish between versions
   and sub-collections in URIs. I.e. we don't want
@@ -166,11 +176,11 @@
     The string with '.' replaced with '_'.
   """
 
-  return version.replace('.', '_')
+    return version.replace(".", "_")
 
 
 def unsafe_version(version):
-  """Undoes what safe_version() does.
+    """Undoes what safe_version() does.
 
   See safe_version() for the details.
 
@@ -181,11 +191,11 @@
     The string with '_' replaced with '.'.
   """
 
-  return version.replace('_', '.')
+    return version.replace("_", ".")
 
 
 def method_params(doc):
-  """Document the parameters of a method.
+    """Document the parameters of a method.
 
   Args:
     doc: string, The method's docstring.
@@ -193,54 +203,57 @@
   Returns:
     The method signature as a string.
   """
-  doclines = doc.splitlines()
-  if 'Args:' in doclines:
-    begin = doclines.index('Args:')
-    if 'Returns:' in doclines[begin+1:]:
-      end = doclines.index('Returns:', begin)
-      args = doclines[begin+1: end]
-    else:
-      args = doclines[begin+1:]
+    doclines = doc.splitlines()
+    if "Args:" in doclines:
+        begin = doclines.index("Args:")
+        if "Returns:" in doclines[begin + 1 :]:
+            end = doclines.index("Returns:", begin)
+            args = doclines[begin + 1 : end]
+        else:
+            args = doclines[begin + 1 :]
 
-    parameters = []
-    pname = None
-    desc = ''
-    def add_param(pname, desc):
-      if pname is None:
-        return
-      if '(required)' not in desc:
-        pname = pname + '=None'
-      parameters.append(pname)
-    for line in args:
-      m = re.search('^\s+([a-zA-Z0-9_]+): (.*)', line)
-      if m is None:
-        desc += line
-        continue
-      add_param(pname, desc)
-      pname = m.group(1)
-      desc = m.group(2)
-    add_param(pname, desc)
-    parameters = ', '.join(parameters)
-  else:
-    parameters = ''
-  return parameters
+        parameters = []
+        pname = None
+        desc = ""
+
+        def add_param(pname, desc):
+            if pname is None:
+                return
+            if "(required)" not in desc:
+                pname = pname + "=None"
+            parameters.append(pname)
+
+        for line in args:
+            m = re.search("^\s+([a-zA-Z0-9_]+): (.*)", line)
+            if m is None:
+                desc += line
+                continue
+            add_param(pname, desc)
+            pname = m.group(1)
+            desc = m.group(2)
+        add_param(pname, desc)
+        parameters = ", ".join(parameters)
+    else:
+        parameters = ""
+    return parameters
 
 
 def method(name, doc):
-  """Documents an individual method.
+    """Documents an individual method.
 
   Args:
     name: string, Name of the method.
     doc: string, The methods docstring.
   """
 
-  params = method_params(doc)
-  return string.Template(METHOD_TEMPLATE).substitute(
-      name=name, params=params, doc=doc)
+    params = method_params(doc)
+    return string.Template(METHOD_TEMPLATE).substitute(
+        name=name, params=params, doc=doc
+    )
 
 
 def breadcrumbs(path, root_discovery):
-  """Create the breadcrumb trail to this page of documentation.
+    """Create the breadcrumb trail to this page of documentation.
 
   Args:
     path: string, Dot separated name of the resource.
@@ -249,28 +262,28 @@
   Returns:
     HTML with links to each of the parent resources of this resource.
   """
-  parts = path.split('.')
+    parts = path.split(".")
 
-  crumbs = []
-  accumulated = []
+    crumbs = []
+    accumulated = []
 
-  for i, p in enumerate(parts):
-    prefix = '.'.join(accumulated)
-    # The first time through prefix will be [], so we avoid adding in a
-    # superfluous '.' to prefix.
-    if prefix:
-      prefix += '.'
-    display = p
-    if i == 0:
-      display = root_discovery.get('title', display)
-    crumbs.append('<a href="%s.html">%s</a>' % (prefix + p, display))
-    accumulated.append(p)
+    for i, p in enumerate(parts):
+        prefix = ".".join(accumulated)
+        # The first time through prefix will be [], so we avoid adding in a
+        # superfluous '.' to prefix.
+        if prefix:
+            prefix += "."
+        display = p
+        if i == 0:
+            display = root_discovery.get("title", display)
+        crumbs.append('<a href="%s.html">%s</a>' % (prefix + p, display))
+        accumulated.append(p)
 
-  return ' . '.join(crumbs)
+    return " . ".join(crumbs)
 
 
 def document_collection(resource, path, root_discovery, discovery, css=CSS):
-  """Document a single collection in an API.
+    """Document a single collection in an API.
 
   Args:
     resource: Collection or service being documented.
@@ -280,148 +293,164 @@
       describes the resource.
     css: string, The CSS to include in the generated file.
   """
-  collections = []
-  methods = []
-  resource_name = path.split('.')[-2]
-  html = [
-      '<html><body>',
-      css,
-      '<h1>%s</h1>' % breadcrumbs(path[:-1], root_discovery),
-      '<h2>Instance Methods</h2>'
-      ]
+    collections = []
+    methods = []
+    resource_name = path.split(".")[-2]
+    html = [
+        "<html><body>",
+        css,
+        "<h1>%s</h1>" % breadcrumbs(path[:-1], root_discovery),
+        "<h2>Instance Methods</h2>",
+    ]
 
-  # Which methods are for collections.
-  for name in dir(resource):
-    if not name.startswith('_') and callable(getattr(resource, name)):
-      if hasattr(getattr(resource, name), '__is_resource__'):
-        collections.append(name)
-      else:
-        methods.append(name)
+    # Which methods are for collections.
+    for name in dir(resource):
+        if not name.startswith("_") and callable(getattr(resource, name)):
+            if hasattr(getattr(resource, name), "__is_resource__"):
+                collections.append(name)
+            else:
+                methods.append(name)
 
+    # TOC
+    if collections:
+        for name in collections:
+            if not name.startswith("_") and callable(getattr(resource, name)):
+                href = path + name + ".html"
+                html.append(
+                    string.Template(COLLECTION_LINK).substitute(href=href, name=name)
+                )
 
-  # TOC
-  if collections:
-    for name in collections:
-      if not name.startswith('_') and callable(getattr(resource, name)):
-        href = path + name + '.html'
-        html.append(string.Template(COLLECTION_LINK).substitute(
-            href=href, name=name))
+    if methods:
+        for name in methods:
+            if not name.startswith("_") and callable(getattr(resource, name)):
+                doc = getattr(resource, name).__doc__
+                params = method_params(doc)
+                firstline = doc.splitlines()[0]
+                html.append(
+                    string.Template(METHOD_LINK).substitute(
+                        name=name, params=params, firstline=firstline
+                    )
+                )
 
-  if methods:
-    for name in methods:
-      if not name.startswith('_') and callable(getattr(resource, name)):
-        doc = getattr(resource, name).__doc__
-        params = method_params(doc)
-        firstline = doc.splitlines()[0]
-        html.append(string.Template(METHOD_LINK).substitute(
-            name=name, params=params, firstline=firstline))
+    if methods:
+        html.append("<h3>Method Details</h3>")
+        for name in methods:
+            dname = name.rsplit("_")[0]
+            html.append(method(name, getattr(resource, name).__doc__))
 
-  if methods:
-    html.append('<h3>Method Details</h3>')
-    for name in methods:
-      dname = name.rsplit('_')[0]
-      html.append(method(name, getattr(resource, name).__doc__))
+    html.append("</body></html>")
 
-  html.append('</body></html>')
-
-  return '\n'.join(html)
+    return "\n".join(html)
 
 
 def document_collection_recursive(resource, path, root_discovery, discovery):
 
-  html = document_collection(resource, path, root_discovery, discovery)
+    html = document_collection(resource, path, root_discovery, discovery)
 
-  f = open(os.path.join(FLAGS.dest, path + 'html'), 'w')
-  f.write(html.encode('utf-8'))
-  f.close()
+    f = open(os.path.join(FLAGS.dest, path + "html"), "w")
+    f.write(html.encode("utf-8"))
+    f.close()
 
-  for name in dir(resource):
-    if (not name.startswith('_')
-        and callable(getattr(resource, name))
-        and hasattr(getattr(resource, name), '__is_resource__')
-        and discovery != {}):
-      dname = name.rsplit('_')[0]
-      collection = getattr(resource, name)()
-      document_collection_recursive(collection, path + name + '.', root_discovery,
-               discovery['resources'].get(dname, {}))
+    for name in dir(resource):
+        if (
+            not name.startswith("_")
+            and callable(getattr(resource, name))
+            and hasattr(getattr(resource, name), "__is_resource__")
+            and discovery != {}
+        ):
+            dname = name.rsplit("_")[0]
+            collection = getattr(resource, name)()
+            document_collection_recursive(
+                collection,
+                path + name + ".",
+                root_discovery,
+                discovery["resources"].get(dname, {}),
+            )
+
 
 def document_api(name, version):
-  """Document the given API.
+    """Document the given API.
 
   Args:
     name: string, Name of the API.
     version: string, Version of the API.
   """
-  try:
-    service = build(name, version)
-  except UnknownApiNameOrVersion as e:
-    print('Warning: {} {} found but could not be built.'.format(name, version))
-    return
+    try:
+        service = build(name, version)
+    except UnknownApiNameOrVersion as e:
+        print("Warning: {} {} found but could not be built.".format(name, version))
+        return
 
-  http = build_http()
-  response, content = http.request(
-      uritemplate.expand(
-          FLAGS.discovery_uri_template, {
-              'api': name,
-              'apiVersion': version})
-          )
-  discovery = json.loads(content)
+    http = build_http()
+    response, content = http.request(
+        uritemplate.expand(
+            FLAGS.discovery_uri_template, {"api": name, "apiVersion": version}
+        )
+    )
+    discovery = json.loads(content)
 
-  version = safe_version(version)
+    version = safe_version(version)
 
-  document_collection_recursive(
-      service, '%s_%s.' % (name, version), discovery, discovery)
+    document_collection_recursive(
+        service, "%s_%s." % (name, version), discovery, discovery
+    )
 
 
 def document_api_from_discovery_document(uri):
-  """Document the given API.
+    """Document the given API.
 
   Args:
     uri: string, URI of discovery document.
   """
-  http = build_http()
-  response, content = http.request(FLAGS.discovery_uri)
-  discovery = json.loads(content)
-
-  service = build_from_document(discovery)
-
-  name = discovery['version']
-  version = safe_version(discovery['version'])
-
-  document_collection_recursive(
-      service, '%s_%s.' % (name, version), discovery, discovery)
-
-
-if __name__ == '__main__':
-  FLAGS = parser.parse_args(sys.argv[1:])
-  if FLAGS.discovery_uri:
-    document_api_from_discovery_document(FLAGS.discovery_uri)
-  else:
-    api_directory = collections.defaultdict(list)
     http = build_http()
-    resp, content = http.request(
-        FLAGS.directory_uri,
-        headers={'X-User-IP': '0.0.0.0'})
-    if resp.status == 200:
-      directory = json.loads(content)['items']
-      for api in directory:
-        document_api(api['name'], api['version'])
-        api_directory[api['name']].append(api['version'])
-      
-      # sort by api name and version number
-      for api in api_directory:
-        api_directory[api] = sorted(api_directory[api])
-      api_directory = OrderedDict(sorted(api_directory.items(), key = lambda x: x[0]))
+    response, content = http.request(FLAGS.discovery_uri)
+    discovery = json.loads(content)
 
-      markdown = []
-      for api, versions in api_directory.items():
-          markdown.append('## %s' % api)
-          for version in versions:
-              markdown.append('* [%s](http://googleapis.github.io/google-api-python-client/docs/dyn/%s_%s.html)' % (version, api, version))
-          markdown.append('\n')
+    service = build_from_document(discovery)
 
-      with open('docs/dyn/index.md', 'w') as f:
-        f.write('\n'.join(markdown).encode('utf-8'))
+    name = discovery["version"]
+    version = safe_version(discovery["version"])
 
+    document_collection_recursive(
+        service, "%s_%s." % (name, version), discovery, discovery
+    )
+
+
+if __name__ == "__main__":
+    FLAGS = parser.parse_args(sys.argv[1:])
+    if FLAGS.discovery_uri:
+        document_api_from_discovery_document(FLAGS.discovery_uri)
     else:
-      sys.exit("Failed to load the discovery document.")
+        api_directory = collections.defaultdict(list)
+        http = build_http()
+        resp, content = http.request(
+            FLAGS.directory_uri, headers={"X-User-IP": "0.0.0.0"}
+        )
+        if resp.status == 200:
+            directory = json.loads(content)["items"]
+            for api in directory:
+                document_api(api["name"], api["version"])
+                api_directory[api["name"]].append(api["version"])
+
+            # sort by api name and version number
+            for api in api_directory:
+                api_directory[api] = sorted(api_directory[api])
+            api_directory = OrderedDict(
+                sorted(api_directory.items(), key=lambda x: x[0])
+            )
+
+            markdown = []
+            for api, versions in api_directory.items():
+                markdown.append("## %s" % api)
+                for version in versions:
+                    markdown.append(
+                        "* [%s](http://googleapis.github.io/google-api-python-client/docs/dyn/%s_%s.html)"
+                        % (version, api, version)
+                    )
+                markdown.append("\n")
+
+            with open("docs/dyn/index.md", "w") as f:
+                f.write("\n".join(markdown).encode("utf-8"))
+
+        else:
+            sys.exit("Failed to load the discovery document.")
diff --git a/googleapiclient/__init__.py b/googleapiclient/__init__.py
index feba5ce..01147bf 100644
--- a/googleapiclient/__init__.py
+++ b/googleapiclient/__init__.py
@@ -20,8 +20,10 @@
 try:  # Python 2.7+
     from logging import NullHandler
 except ImportError:
+
     class NullHandler(logging.Handler):
         def emit(self, record):
             pass
 
+
 logging.getLogger(__name__).addHandler(NullHandler())
diff --git a/googleapiclient/_auth.py b/googleapiclient/_auth.py
index 9d6d363..8a2f673 100644
--- a/googleapiclient/_auth.py
+++ b/googleapiclient/_auth.py
@@ -19,6 +19,7 @@
 try:
     import google.auth
     import google.auth.credentials
+
     HAS_GOOGLE_AUTH = True
 except ImportError:  # pragma: NO COVER
     HAS_GOOGLE_AUTH = False
@@ -31,6 +32,7 @@
 try:
     import oauth2client
     import oauth2client.client
+
     HAS_OAUTH2CLIENT = True
 except ImportError:  # pragma: NO COVER
     HAS_OAUTH2CLIENT = False
@@ -45,8 +47,9 @@
         return oauth2client.client.GoogleCredentials.get_application_default()
     else:
         raise EnvironmentError(
-            'No authentication library is available. Please install either '
-            'google-auth or oauth2client.')
+            "No authentication library is available. Please install either "
+            "google-auth or oauth2client."
+        )
 
 
 def with_scopes(credentials, scopes):
@@ -62,10 +65,8 @@
         Union[google.auth.credentials.Credentials,
             oauth2client.client.Credentials]: The scoped credentials.
     """
-    if HAS_GOOGLE_AUTH and isinstance(
-            credentials, google.auth.credentials.Credentials):
-        return google.auth.credentials.with_scopes_if_required(
-            credentials, scopes)
+    if HAS_GOOGLE_AUTH and isinstance(credentials, google.auth.credentials.Credentials):
+        return google.auth.credentials.with_scopes_if_required(credentials, scopes)
     else:
         try:
             if credentials.create_scoped_required():
@@ -90,16 +91,15 @@
     """
     from googleapiclient.http import build_http
 
-    if HAS_GOOGLE_AUTH and isinstance(
-            credentials, google.auth.credentials.Credentials):
+    if HAS_GOOGLE_AUTH and isinstance(credentials, google.auth.credentials.Credentials):
         if google_auth_httplib2 is None:
             raise ValueError(
-                'Credentials from google.auth specified, but '
-                'google-api-python-client is unable to use these credentials '
-                'unless google-auth-httplib2 is installed. Please install '
-                'google-auth-httplib2.')
-        return google_auth_httplib2.AuthorizedHttp(credentials,
-                                                   http=build_http())
+                "Credentials from google.auth specified, but "
+                "google-api-python-client is unable to use these credentials "
+                "unless google-auth-httplib2 is installed. Please install "
+                "google-auth-httplib2."
+            )
+        return google_auth_httplib2.AuthorizedHttp(credentials, http=build_http())
     else:
         return credentials.authorize(build_http())
 
@@ -110,8 +110,7 @@
     # Http instance which would cause a weird recursive loop of refreshing
     # and likely tear a hole in spacetime.
     refresh_http = httplib2.Http()
-    if HAS_GOOGLE_AUTH and isinstance(
-            credentials, google.auth.credentials.Credentials):
+    if HAS_GOOGLE_AUTH and isinstance(credentials, google.auth.credentials.Credentials):
         request = google_auth_httplib2.Request(refresh_http)
         return credentials.refresh(request)
     else:
@@ -126,22 +125,23 @@
 
 
 def is_valid(credentials):
-    if HAS_GOOGLE_AUTH and isinstance(
-            credentials, google.auth.credentials.Credentials):
+    if HAS_GOOGLE_AUTH and isinstance(credentials, google.auth.credentials.Credentials):
         return credentials.valid
     else:
         return (
-            credentials.access_token is not None and
-            not credentials.access_token_expired)
+            credentials.access_token is not None
+            and not credentials.access_token_expired
+        )
 
 
 def get_credentials_from_http(http):
     if http is None:
         return None
-    elif hasattr(http.request, 'credentials'):
+    elif hasattr(http.request, "credentials"):
         return http.request.credentials
-    elif (hasattr(http, 'credentials')
-          and not isinstance(http.credentials, httplib2.Credentials)):
+    elif hasattr(http, "credentials") and not isinstance(
+        http.credentials, httplib2.Credentials
+    ):
         return http.credentials
     else:
         return None
diff --git a/googleapiclient/_helpers.py b/googleapiclient/_helpers.py
index 5e8184b..66ccf79 100644
--- a/googleapiclient/_helpers.py
+++ b/googleapiclient/_helpers.py
@@ -25,17 +25,18 @@
 
 logger = logging.getLogger(__name__)
 
-POSITIONAL_WARNING = 'WARNING'
-POSITIONAL_EXCEPTION = 'EXCEPTION'
-POSITIONAL_IGNORE = 'IGNORE'
-POSITIONAL_SET = frozenset([POSITIONAL_WARNING, POSITIONAL_EXCEPTION,
-                            POSITIONAL_IGNORE])
+POSITIONAL_WARNING = "WARNING"
+POSITIONAL_EXCEPTION = "EXCEPTION"
+POSITIONAL_IGNORE = "IGNORE"
+POSITIONAL_SET = frozenset(
+    [POSITIONAL_WARNING, POSITIONAL_EXCEPTION, POSITIONAL_IGNORE]
+)
 
 positional_parameters_enforcement = POSITIONAL_WARNING
 
-_SYM_LINK_MESSAGE = 'File: {0}: Is a symbolic link.'
-_IS_DIR_MESSAGE = '{0}: Is a directory'
-_MISSING_FILE_MESSAGE = 'Cannot access {0}: No such file or directory'
+_SYM_LINK_MESSAGE = "File: {0}: Is a symbolic link."
+_IS_DIR_MESSAGE = "{0}: Is a directory"
+_MISSING_FILE_MESSAGE = "Cannot access {0}: No such file or directory"
 
 
 def positional(max_positional_args):
@@ -114,20 +115,24 @@
         @functools.wraps(wrapped)
         def positional_wrapper(*args, **kwargs):
             if len(args) > max_positional_args:
-                plural_s = ''
+                plural_s = ""
                 if max_positional_args != 1:
-                    plural_s = 's'
-                message = ('{function}() takes at most {args_max} positional '
-                           'argument{plural} ({args_given} given)'.format(
-                               function=wrapped.__name__,
-                               args_max=max_positional_args,
-                               args_given=len(args),
-                               plural=plural_s))
+                    plural_s = "s"
+                message = (
+                    "{function}() takes at most {args_max} positional "
+                    "argument{plural} ({args_given} given)".format(
+                        function=wrapped.__name__,
+                        args_max=max_positional_args,
+                        args_given=len(args),
+                        plural=plural_s,
+                    )
+                )
                 if positional_parameters_enforcement == POSITIONAL_EXCEPTION:
                     raise TypeError(message)
                 elif positional_parameters_enforcement == POSITIONAL_WARNING:
                     logger.warning(message)
             return wrapped(*args, **kwargs)
+
         return positional_wrapper
 
     if isinstance(max_positional_args, six.integer_types):
@@ -153,8 +158,10 @@
     params = {}
     for key, value in six.iteritems(urlencoded_params):
         if len(value) != 1:
-            msg = ('URL-encoded content contains a repeated value:'
-                   '%s -> %s' % (key, ', '.join(value)))
+            msg = "URL-encoded content contains a repeated value:" "%s -> %s" % (
+                key,
+                ", ".join(value),
+            )
             raise ValueError(msg)
         params[key] = value[0]
     return params
diff --git a/googleapiclient/channel.py b/googleapiclient/channel.py
index 3caee13..efff0f6 100644
--- a/googleapiclient/channel.py
+++ b/googleapiclient/channel.py
@@ -85,32 +85,32 @@
 # Map the names of the parameters in the JSON channel description to
 # the parameter names we use in the Channel class.
 CHANNEL_PARAMS = {
-    'address': 'address',
-    'id': 'id',
-    'expiration': 'expiration',
-    'params': 'params',
-    'resourceId': 'resource_id',
-    'resourceUri': 'resource_uri',
-    'type': 'type',
-    'token': 'token',
-    }
+    "address": "address",
+    "id": "id",
+    "expiration": "expiration",
+    "params": "params",
+    "resourceId": "resource_id",
+    "resourceUri": "resource_uri",
+    "type": "type",
+    "token": "token",
+}
 
-X_GOOG_CHANNEL_ID     = 'X-GOOG-CHANNEL-ID'
-X_GOOG_MESSAGE_NUMBER = 'X-GOOG-MESSAGE-NUMBER'
-X_GOOG_RESOURCE_STATE = 'X-GOOG-RESOURCE-STATE'
-X_GOOG_RESOURCE_URI   = 'X-GOOG-RESOURCE-URI'
-X_GOOG_RESOURCE_ID    = 'X-GOOG-RESOURCE-ID'
+X_GOOG_CHANNEL_ID = "X-GOOG-CHANNEL-ID"
+X_GOOG_MESSAGE_NUMBER = "X-GOOG-MESSAGE-NUMBER"
+X_GOOG_RESOURCE_STATE = "X-GOOG-RESOURCE-STATE"
+X_GOOG_RESOURCE_URI = "X-GOOG-RESOURCE-URI"
+X_GOOG_RESOURCE_ID = "X-GOOG-RESOURCE-ID"
 
 
 def _upper_header_keys(headers):
-  new_headers = {}
-  for k, v in six.iteritems(headers):
-    new_headers[k.upper()] = v
-  return new_headers
+    new_headers = {}
+    for k, v in six.iteritems(headers):
+        new_headers[k.upper()] = v
+    return new_headers
 
 
 class Notification(object):
-  """A Notification from a Channel.
+    """A Notification from a Channel.
 
   Notifications are not usually constructed directly, but are returned
   from functions like notification_from_headers().
@@ -122,9 +122,10 @@
     resource_id: str, The unique identifier of the version of the resource at
       this event.
   """
-  @util.positional(5)
-  def __init__(self, message_number, state, resource_uri, resource_id):
-    """Notification constructor.
+
+    @util.positional(5)
+    def __init__(self, message_number, state, resource_uri, resource_id):
+        """Notification constructor.
 
     Args:
       message_number: int, The unique id number of this notification.
@@ -133,14 +134,14 @@
       resource_uri: str, The address of the resource being monitored.
       resource_id: str, The identifier of the watched resource.
     """
-    self.message_number = message_number
-    self.state = state
-    self.resource_uri = resource_uri
-    self.resource_id = resource_id
+        self.message_number = message_number
+        self.state = state
+        self.resource_uri = resource_uri
+        self.resource_id = resource_id
 
 
 class Channel(object):
-  """A Channel for notifications.
+    """A Channel for notifications.
 
   Usually not constructed directly, instead it is returned from helper
   functions like new_webhook_channel().
@@ -163,10 +164,19 @@
     resource_uri: str, The canonicalized ID of the watched resource.
   """
 
-  @util.positional(5)
-  def __init__(self, type, id, token, address, expiration=None,
-               params=None, resource_id="", resource_uri=""):
-    """Create a new Channel.
+    @util.positional(5)
+    def __init__(
+        self,
+        type,
+        id,
+        token,
+        address,
+        expiration=None,
+        params=None,
+        resource_id="",
+        resource_uri="",
+    ):
+        """Create a new Channel.
 
     In user code, this Channel constructor will not typically be called
     manually since there are functions for creating channels for each specific
@@ -189,17 +199,17 @@
         being watched. Stable across different API versions.
       resource_uri: str, The canonicalized ID of the watched resource.
     """
-    self.type = type
-    self.id = id
-    self.token = token
-    self.address = address
-    self.expiration = expiration
-    self.params = params
-    self.resource_id = resource_id
-    self.resource_uri = resource_uri
+        self.type = type
+        self.id = id
+        self.token = token
+        self.address = address
+        self.expiration = expiration
+        self.params = params
+        self.resource_id = resource_id
+        self.resource_uri = resource_uri
 
-  def body(self):
-    """Build a body from the Channel.
+    def body(self):
+        """Build a body from the Channel.
 
     Constructs a dictionary that's appropriate for passing into watch()
     methods as the value of body argument.
@@ -207,25 +217,25 @@
     Returns:
       A dictionary representation of the channel.
     """
-    result = {
-        'id': self.id,
-        'token': self.token,
-        'type': self.type,
-        'address': self.address
+        result = {
+            "id": self.id,
+            "token": self.token,
+            "type": self.type,
+            "address": self.address,
         }
-    if self.params:
-      result['params'] = self.params
-    if self.resource_id:
-      result['resourceId'] = self.resource_id
-    if self.resource_uri:
-      result['resourceUri'] = self.resource_uri
-    if self.expiration:
-      result['expiration'] = self.expiration
+        if self.params:
+            result["params"] = self.params
+        if self.resource_id:
+            result["resourceId"] = self.resource_id
+        if self.resource_uri:
+            result["resourceUri"] = self.resource_uri
+        if self.expiration:
+            result["expiration"] = self.expiration
 
-    return result
+        return result
 
-  def update(self, resp):
-    """Update a channel with information from the response of watch().
+    def update(self, resp):
+        """Update a channel with information from the response of watch().
 
     When a request is sent to watch() a resource, the response returned
     from the watch() request is a dictionary with updated channel information,
@@ -234,14 +244,14 @@
     Args:
       resp: dict, The response from a watch() method.
     """
-    for json_name, param_name in six.iteritems(CHANNEL_PARAMS):
-      value = resp.get(json_name)
-      if value is not None:
-        setattr(self, param_name, value)
+        for json_name, param_name in six.iteritems(CHANNEL_PARAMS):
+            value = resp.get(json_name)
+            if value is not None:
+                setattr(self, param_name, value)
 
 
 def notification_from_headers(channel, headers):
-  """Parse a notification from the webhook request headers, validate
+    """Parse a notification from the webhook request headers, validate
     the notification, and return a Notification object.
 
   Args:
@@ -256,17 +266,18 @@
     errors.InvalidNotificationError if the notification is invalid.
     ValueError if the X-GOOG-MESSAGE-NUMBER can't be converted to an int.
   """
-  headers = _upper_header_keys(headers)
-  channel_id = headers[X_GOOG_CHANNEL_ID]
-  if channel.id != channel_id:
-    raise errors.InvalidNotificationError(
-        'Channel id mismatch: %s != %s' % (channel.id, channel_id))
-  else:
-    message_number = int(headers[X_GOOG_MESSAGE_NUMBER])
-    state = headers[X_GOOG_RESOURCE_STATE]
-    resource_uri = headers[X_GOOG_RESOURCE_URI]
-    resource_id = headers[X_GOOG_RESOURCE_ID]
-    return Notification(message_number, state, resource_uri, resource_id)
+    headers = _upper_header_keys(headers)
+    channel_id = headers[X_GOOG_CHANNEL_ID]
+    if channel.id != channel_id:
+        raise errors.InvalidNotificationError(
+            "Channel id mismatch: %s != %s" % (channel.id, channel_id)
+        )
+    else:
+        message_number = int(headers[X_GOOG_MESSAGE_NUMBER])
+        state = headers[X_GOOG_RESOURCE_STATE]
+        resource_uri = headers[X_GOOG_RESOURCE_URI]
+        resource_id = headers[X_GOOG_RESOURCE_ID]
+        return Notification(message_number, state, resource_uri, resource_id)
 
 
 @util.positional(2)
@@ -289,13 +300,18 @@
     """
     expiration_ms = 0
     if expiration:
-      delta = expiration - EPOCH
-      expiration_ms = delta.microseconds/1000 + (
-          delta.seconds + delta.days*24*3600)*1000
-      if expiration_ms < 0:
-        expiration_ms = 0
+        delta = expiration - EPOCH
+        expiration_ms = (
+            delta.microseconds / 1000 + (delta.seconds + delta.days * 24 * 3600) * 1000
+        )
+        if expiration_ms < 0:
+            expiration_ms = 0
 
-    return Channel('web_hook', str(uuid.uuid4()),
-                   token, url, expiration=expiration_ms,
-                   params=params)
-
+    return Channel(
+        "web_hook",
+        str(uuid.uuid4()),
+        token,
+        url,
+        expiration=expiration_ms,
+        params=params,
+    )
diff --git a/googleapiclient/discovery.py b/googleapiclient/discovery.py
index 771d9fc..87403b9 100644
--- a/googleapiclient/discovery.py
+++ b/googleapiclient/discovery.py
@@ -20,25 +20,20 @@
 import six
 from six.moves import zip
 
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-__all__ = [
-    'build',
-    'build_from_document',
-    'fix_method_name',
-    'key2param',
-    ]
+__author__ = "jcgregorio@google.com (Joe Gregorio)"
+__all__ = ["build", "build_from_document", "fix_method_name", "key2param"]
 
 from six import BytesIO
 from six.moves import http_client
-from six.moves.urllib.parse import urlencode, urlparse, urljoin, \
-  urlunparse, parse_qsl
+from six.moves.urllib.parse import urlencode, urlparse, urljoin, urlunparse, parse_qsl
 
 # Standard library imports
 import copy
+
 try:
-  from email.generator import BytesGenerator
+    from email.generator import BytesGenerator
 except ImportError:
-  from email.generator import Generator as BytesGenerator
+    from email.generator import Generator as BytesGenerator
 from email.mime.multipart import MIMEMultipart
 from email.mime.nonmultipart import MIMENonMultipart
 import json
@@ -82,50 +77,54 @@
 
 logger = logging.getLogger(__name__)
 
-URITEMPLATE = re.compile('{[^}]*}')
-VARNAME = re.compile('[a-zA-Z0-9_-]+')
-DISCOVERY_URI = ('https://www.googleapis.com/discovery/v1/apis/'
-                 '{api}/{apiVersion}/rest')
+URITEMPLATE = re.compile("{[^}]*}")
+VARNAME = re.compile("[a-zA-Z0-9_-]+")
+DISCOVERY_URI = (
+    "https://www.googleapis.com/discovery/v1/apis/" "{api}/{apiVersion}/rest"
+)
 V1_DISCOVERY_URI = DISCOVERY_URI
-V2_DISCOVERY_URI = ('https://{api}.googleapis.com/$discovery/rest?'
-                    'version={apiVersion}')
-DEFAULT_METHOD_DOC = 'A description of how to use this function'
-HTTP_PAYLOAD_METHODS = frozenset(['PUT', 'POST', 'PATCH'])
+V2_DISCOVERY_URI = (
+    "https://{api}.googleapis.com/$discovery/rest?" "version={apiVersion}"
+)
+DEFAULT_METHOD_DOC = "A description of how to use this function"
+HTTP_PAYLOAD_METHODS = frozenset(["PUT", "POST", "PATCH"])
 
-_MEDIA_SIZE_BIT_SHIFTS = {'KB': 10, 'MB': 20, 'GB': 30, 'TB': 40}
-BODY_PARAMETER_DEFAULT_VALUE = {
-    'description': 'The request body.',
-    'type': 'object',
-}
+_MEDIA_SIZE_BIT_SHIFTS = {"KB": 10, "MB": 20, "GB": 30, "TB": 40}
+BODY_PARAMETER_DEFAULT_VALUE = {"description": "The request body.", "type": "object"}
 MEDIA_BODY_PARAMETER_DEFAULT_VALUE = {
-    'description': ('The filename of the media request body, or an instance '
-                    'of a MediaUpload object.'),
-    'type': 'string',
-    'required': False,
+    "description": (
+        "The filename of the media request body, or an instance "
+        "of a MediaUpload object."
+    ),
+    "type": "string",
+    "required": False,
 }
 MEDIA_MIME_TYPE_PARAMETER_DEFAULT_VALUE = {
-    'description': ('The MIME type of the media request body, or an instance '
-                    'of a MediaUpload object.'),
-    'type': 'string',
-    'required': False,
+    "description": (
+        "The MIME type of the media request body, or an instance "
+        "of a MediaUpload object."
+    ),
+    "type": "string",
+    "required": False,
 }
-_PAGE_TOKEN_NAMES = ('pageToken', 'nextPageToken')
+_PAGE_TOKEN_NAMES = ("pageToken", "nextPageToken")
 
 # Parameters accepted by the stack, but not visible via discovery.
 # TODO(dhermes): Remove 'userip' in 'v2'.
-STACK_QUERY_PARAMETERS = frozenset(['trace', 'pp', 'userip', 'strict'])
-STACK_QUERY_PARAMETER_DEFAULT_VALUE = {'type': 'string', 'location': 'query'}
+STACK_QUERY_PARAMETERS = frozenset(["trace", "pp", "userip", "strict"])
+STACK_QUERY_PARAMETER_DEFAULT_VALUE = {"type": "string", "location": "query"}
 
 # Library-specific reserved words beyond Python keywords.
-RESERVED_WORDS = frozenset(['body'])
+RESERVED_WORDS = frozenset(["body"])
 
 # patch _write_lines to avoid munging '\r' into '\n'
 # ( https://bugs.python.org/issue18886 https://bugs.python.org/issue19003 )
 class _BytesGenerator(BytesGenerator):
-  _write_lines = BytesGenerator.write
+    _write_lines = BytesGenerator.write
+
 
 def fix_method_name(name):
-  """Fix method names to avoid '$' characters and reserved word conflicts.
+    """Fix method names to avoid '$' characters and reserved word conflicts.
 
   Args:
     name: string, method name.
@@ -134,15 +133,15 @@
     The name with '_' appended if the name is a reserved word and '$' 
     replaced with '_'. 
   """
-  name = name.replace('$', '_')
-  if keyword.iskeyword(name) or name in RESERVED_WORDS:
-    return name + '_'
-  else:
-    return name
+    name = name.replace("$", "_")
+    if keyword.iskeyword(name) or name in RESERVED_WORDS:
+        return name + "_"
+    else:
+        return name
 
 
 def key2param(key):
-  """Converts key names into parameter names.
+    """Converts key names into parameter names.
 
   For example, converting "max-results" -> "max_results"
 
@@ -152,31 +151,33 @@
   Returns:
     A safe method name based on the key name.
   """
-  result = []
-  key = list(key)
-  if not key[0].isalpha():
-    result.append('x')
-  for c in key:
-    if c.isalnum():
-      result.append(c)
-    else:
-      result.append('_')
+    result = []
+    key = list(key)
+    if not key[0].isalpha():
+        result.append("x")
+    for c in key:
+        if c.isalnum():
+            result.append(c)
+        else:
+            result.append("_")
 
-  return ''.join(result)
+    return "".join(result)
 
 
 @positional(2)
-def build(serviceName,
-          version,
-          http=None,
-          discoveryServiceUrl=DISCOVERY_URI,
-          developerKey=None,
-          model=None,
-          requestBuilder=HttpRequest,
-          credentials=None,
-          cache_discovery=True,
-          cache=None):
-  """Construct a Resource for interacting with an API.
+def build(
+    serviceName,
+    version,
+    http=None,
+    discoveryServiceUrl=DISCOVERY_URI,
+    developerKey=None,
+    model=None,
+    requestBuilder=HttpRequest,
+    credentials=None,
+    cache_discovery=True,
+    cache=None,
+):
+    """Construct a Resource for interacting with an API.
 
   Construct a Resource object for interacting with an API. The serviceName and
   version are the names from the Discovery service.
@@ -205,38 +206,40 @@
   Returns:
     A Resource object with methods for interacting with the service.
   """
-  params = {
-      'api': serviceName,
-      'apiVersion': version
-      }
+    params = {"api": serviceName, "apiVersion": version}
 
-  if http is None:
-    discovery_http = build_http()
-  else:
-    discovery_http = http
+    if http is None:
+        discovery_http = build_http()
+    else:
+        discovery_http = http
 
-  for discovery_url in (discoveryServiceUrl, V2_DISCOVERY_URI,):
-    requested_url = uritemplate.expand(discovery_url, params)
+    for discovery_url in (discoveryServiceUrl, V2_DISCOVERY_URI):
+        requested_url = uritemplate.expand(discovery_url, params)
 
-    try:
-      content = _retrieve_discovery_doc(
-        requested_url, discovery_http, cache_discovery, cache, developerKey)
-      return build_from_document(content, base=discovery_url, http=http,
-          developerKey=developerKey, model=model, requestBuilder=requestBuilder,
-          credentials=credentials)
-    except HttpError as e:
-      if e.resp.status == http_client.NOT_FOUND:
-        continue
-      else:
-        raise e
+        try:
+            content = _retrieve_discovery_doc(
+                requested_url, discovery_http, cache_discovery, cache, developerKey
+            )
+            return build_from_document(
+                content,
+                base=discovery_url,
+                http=http,
+                developerKey=developerKey,
+                model=model,
+                requestBuilder=requestBuilder,
+                credentials=credentials,
+            )
+        except HttpError as e:
+            if e.resp.status == http_client.NOT_FOUND:
+                continue
+            else:
+                raise e
 
-  raise UnknownApiNameOrVersion(
-        "name: %s  version: %s" % (serviceName, version))
+    raise UnknownApiNameOrVersion("name: %s  version: %s" % (serviceName, version))
 
 
-def _retrieve_discovery_doc(url, http, cache_discovery, cache=None,
-                            developerKey=None):
-  """Retrieves the discovery_doc from cache or the internet.
+def _retrieve_discovery_doc(url, http, cache_discovery, cache=None, developerKey=None):
+    """Retrieves the discovery_doc from cache or the internet.
 
   Args:
     url: string, the URL of the discovery document.
@@ -249,45 +252,46 @@
   Returns:
     A unicode string representation of the discovery document.
   """
-  if cache_discovery:
-    from . import discovery_cache
-    from .discovery_cache import base
-    if cache is None:
-      cache = discovery_cache.autodetect()
-    if cache:
-      content = cache.get(url)
-      if content:
-        return content
+    if cache_discovery:
+        from . import discovery_cache
+        from .discovery_cache import base
 
-  actual_url = url
-  # REMOTE_ADDR is defined by the CGI spec [RFC3875] as the environment
-  # variable that contains the network address of the client sending the
-  # request. If it exists then add that to the request for the discovery
-  # document to avoid exceeding the quota on discovery requests.
-  if 'REMOTE_ADDR' in os.environ:
-    actual_url = _add_query_parameter(url, 'userIp', os.environ['REMOTE_ADDR'])
-  if developerKey:
-    actual_url = _add_query_parameter(url, 'key', developerKey)
-  logger.info('URL being requested: GET %s', actual_url)
+        if cache is None:
+            cache = discovery_cache.autodetect()
+        if cache:
+            content = cache.get(url)
+            if content:
+                return content
 
-  resp, content = http.request(actual_url)
+    actual_url = url
+    # REMOTE_ADDR is defined by the CGI spec [RFC3875] as the environment
+    # variable that contains the network address of the client sending the
+    # request. If it exists then add that to the request for the discovery
+    # document to avoid exceeding the quota on discovery requests.
+    if "REMOTE_ADDR" in os.environ:
+        actual_url = _add_query_parameter(url, "userIp", os.environ["REMOTE_ADDR"])
+    if developerKey:
+        actual_url = _add_query_parameter(url, "key", developerKey)
+    logger.info("URL being requested: GET %s", actual_url)
 
-  if resp.status >= 400:
-    raise HttpError(resp, content, uri=actual_url)
+    resp, content = http.request(actual_url)
 
-  try:
-    content = content.decode('utf-8')
-  except AttributeError:
-    pass
+    if resp.status >= 400:
+        raise HttpError(resp, content, uri=actual_url)
 
-  try:
-    service = json.loads(content)
-  except ValueError as e:
-    logger.error('Failed to parse as JSON: ' + content)
-    raise InvalidJsonError()
-  if cache_discovery and cache:
-    cache.set(url, content)
-  return content
+    try:
+        content = content.decode("utf-8")
+    except AttributeError:
+        pass
+
+    try:
+        service = json.loads(content)
+    except ValueError as e:
+        logger.error("Failed to parse as JSON: " + content)
+        raise InvalidJsonError()
+    if cache_discovery and cache:
+        cache.set(url, content)
+    return content
 
 
 @positional(1)
@@ -299,8 +303,9 @@
     developerKey=None,
     model=None,
     requestBuilder=HttpRequest,
-    credentials=None):
-  """Create a Resource for interacting with an API.
+    credentials=None,
+):
+    """Create a Resource for interacting with an API.
 
   Same as `build()`, but constructs the Resource object from a discovery
   document that is it given, as opposed to retrieving one over HTTP.
@@ -328,65 +333,74 @@
     A Resource object with methods for interacting with the service.
   """
 
-  if http is not None and credentials is not None:
-    raise ValueError('Arguments http and credentials are mutually exclusive.')
+    if http is not None and credentials is not None:
+        raise ValueError("Arguments http and credentials are mutually exclusive.")
 
-  if isinstance(service, six.string_types):
-    service = json.loads(service)
-  elif isinstance(service, six.binary_type):
-    service = json.loads(service.decode('utf-8'))
+    if isinstance(service, six.string_types):
+        service = json.loads(service)
+    elif isinstance(service, six.binary_type):
+        service = json.loads(service.decode("utf-8"))
 
-  if  'rootUrl' not in service and (isinstance(http, (HttpMock,
-                                                      HttpMockSequence))):
-      logger.error("You are using HttpMock or HttpMockSequence without" +
-                   "having the service discovery doc in cache. Try calling " +
-                   "build() without mocking once first to populate the " +
-                   "cache.")
-      raise InvalidJsonError()
+    if "rootUrl" not in service and (isinstance(http, (HttpMock, HttpMockSequence))):
+        logger.error(
+            "You are using HttpMock or HttpMockSequence without"
+            + "having the service discovery doc in cache. Try calling "
+            + "build() without mocking once first to populate the "
+            + "cache."
+        )
+        raise InvalidJsonError()
 
-  base = urljoin(service['rootUrl'], service['servicePath'])
-  schema = Schemas(service)
+    base = urljoin(service["rootUrl"], service["servicePath"])
+    schema = Schemas(service)
 
-  # If the http client is not specified, then we must construct an http client
-  # to make requests. If the service has scopes, then we also need to setup
-  # authentication.
-  if http is None:
-    # Does the service require scopes?
-    scopes = list(
-      service.get('auth', {}).get('oauth2', {}).get('scopes', {}).keys())
-
-    # If so, then the we need to setup authentication if no developerKey is
-    # specified.
-    if scopes and not developerKey:
-      # If the user didn't pass in credentials, attempt to acquire application
-      # default credentials.
-      if credentials is None:
-        credentials = _auth.default_credentials()
-
-      # The credentials need to be scoped.
-      credentials = _auth.with_scopes(credentials, scopes)
-
-    # If credentials are provided, create an authorized http instance;
-    # otherwise, skip authentication.
-    if credentials:
-      http = _auth.authorized_http(credentials)
-
-    # If the service doesn't require scopes then there is no need for
+    # If the http client is not specified, then we must construct an http client
+    # to make requests. If the service has scopes, then we also need to setup
     # authentication.
-    else:
-      http = build_http()
+    if http is None:
+        # Does the service require scopes?
+        scopes = list(
+            service.get("auth", {}).get("oauth2", {}).get("scopes", {}).keys()
+        )
 
-  if model is None:
-    features = service.get('features', [])
-    model = JsonModel('dataWrapper' in features)
+        # If so, then the we need to setup authentication if no developerKey is
+        # specified.
+        if scopes and not developerKey:
+            # If the user didn't pass in credentials, attempt to acquire application
+            # default credentials.
+            if credentials is None:
+                credentials = _auth.default_credentials()
 
-  return Resource(http=http, baseUrl=base, model=model,
-                  developerKey=developerKey, requestBuilder=requestBuilder,
-                  resourceDesc=service, rootDesc=service, schema=schema)
+            # The credentials need to be scoped.
+            credentials = _auth.with_scopes(credentials, scopes)
+
+        # If credentials are provided, create an authorized http instance;
+        # otherwise, skip authentication.
+        if credentials:
+            http = _auth.authorized_http(credentials)
+
+        # If the service doesn't require scopes then there is no need for
+        # authentication.
+        else:
+            http = build_http()
+
+    if model is None:
+        features = service.get("features", [])
+        model = JsonModel("dataWrapper" in features)
+
+    return Resource(
+        http=http,
+        baseUrl=base,
+        model=model,
+        developerKey=developerKey,
+        requestBuilder=requestBuilder,
+        resourceDesc=service,
+        rootDesc=service,
+        schema=schema,
+    )
 
 
 def _cast(value, schema_type):
-  """Convert value to a string based on JSON Schema type.
+    """Convert value to a string based on JSON Schema type.
 
   See http://tools.ietf.org/html/draft-zyp-json-schema-03 for more details on
   JSON Schema.
@@ -398,26 +412,26 @@
   Returns:
     A string representation of 'value' based on the schema_type.
   """
-  if schema_type == 'string':
-    if type(value) == type('') or type(value) == type(u''):
-      return value
+    if schema_type == "string":
+        if type(value) == type("") or type(value) == type(u""):
+            return value
+        else:
+            return str(value)
+    elif schema_type == "integer":
+        return str(int(value))
+    elif schema_type == "number":
+        return str(float(value))
+    elif schema_type == "boolean":
+        return str(bool(value)).lower()
     else:
-      return str(value)
-  elif schema_type == 'integer':
-    return str(int(value))
-  elif schema_type == 'number':
-    return str(float(value))
-  elif schema_type == 'boolean':
-    return str(bool(value)).lower()
-  else:
-    if type(value) == type('') or type(value) == type(u''):
-      return value
-    else:
-      return str(value)
+        if type(value) == type("") or type(value) == type(u""):
+            return value
+        else:
+            return str(value)
 
 
 def _media_size_to_long(maxSize):
-  """Convert a string media size, such as 10GB or 3TB into an integer.
+    """Convert a string media size, such as 10GB or 3TB into an integer.
 
   Args:
     maxSize: string, size as a string, such as 2MB or 7GB.
@@ -425,18 +439,18 @@
   Returns:
     The size as an integer value.
   """
-  if len(maxSize) < 2:
-    return 0
-  units = maxSize[-2:].upper()
-  bit_shift = _MEDIA_SIZE_BIT_SHIFTS.get(units)
-  if bit_shift is not None:
-    return int(maxSize[:-2]) << bit_shift
-  else:
-    return int(maxSize)
+    if len(maxSize) < 2:
+        return 0
+    units = maxSize[-2:].upper()
+    bit_shift = _MEDIA_SIZE_BIT_SHIFTS.get(units)
+    if bit_shift is not None:
+        return int(maxSize[:-2]) << bit_shift
+    else:
+        return int(maxSize)
 
 
 def _media_path_url_from_info(root_desc, path_url):
-  """Creates an absolute media path URL.
+    """Creates an absolute media path URL.
 
   Constructed using the API root URI and service path from the discovery
   document and the relative path for the API method.
@@ -449,15 +463,15 @@
   Returns:
     String; the absolute URI for media upload for the API method.
   """
-  return '%(root)supload/%(service_path)s%(path)s' % {
-      'root': root_desc['rootUrl'],
-      'service_path': root_desc['servicePath'],
-      'path': path_url,
-  }
+    return "%(root)supload/%(service_path)s%(path)s" % {
+        "root": root_desc["rootUrl"],
+        "service_path": root_desc["servicePath"],
+        "path": path_url,
+    }
 
 
 def _fix_up_parameters(method_desc, root_desc, http_method, schema):
-  """Updates parameters of an API method with values specific to this library.
+    """Updates parameters of an API method with values specific to this library.
 
   Specifically, adds whatever global parameters are specified by the API to the
   parameters for the individual method. Also adds parameters which don't
@@ -480,28 +494,28 @@
     The updated Dictionary stored in the 'parameters' key of the method
         description dictionary.
   """
-  parameters = method_desc.setdefault('parameters', {})
+    parameters = method_desc.setdefault("parameters", {})
 
-  # Add in the parameters common to all methods.
-  for name, description in six.iteritems(root_desc.get('parameters', {})):
-    parameters[name] = description
+    # Add in the parameters common to all methods.
+    for name, description in six.iteritems(root_desc.get("parameters", {})):
+        parameters[name] = description
 
-  # Add in undocumented query parameters.
-  for name in STACK_QUERY_PARAMETERS:
-    parameters[name] = STACK_QUERY_PARAMETER_DEFAULT_VALUE.copy()
+    # Add in undocumented query parameters.
+    for name in STACK_QUERY_PARAMETERS:
+        parameters[name] = STACK_QUERY_PARAMETER_DEFAULT_VALUE.copy()
 
-  # Add 'body' (our own reserved word) to parameters if the method supports
-  # a request payload.
-  if http_method in HTTP_PAYLOAD_METHODS and 'request' in method_desc:
-    body = BODY_PARAMETER_DEFAULT_VALUE.copy()
-    body.update(method_desc['request'])
-    parameters['body'] = body
+    # Add 'body' (our own reserved word) to parameters if the method supports
+    # a request payload.
+    if http_method in HTTP_PAYLOAD_METHODS and "request" in method_desc:
+        body = BODY_PARAMETER_DEFAULT_VALUE.copy()
+        body.update(method_desc["request"])
+        parameters["body"] = body
 
-  return parameters
+    return parameters
 
 
 def _fix_up_media_upload(method_desc, root_desc, path_url, parameters):
-  """Adds 'media_body' and 'media_mime_type' parameters if supported by method.
+    """Adds 'media_body' and 'media_mime_type' parameters if supported by method.
 
   SIDE EFFECTS: If there is a 'mediaUpload' in the method description, adds
   'media_upload' key to parameters.
@@ -528,21 +542,21 @@
         the discovery document and the relative path for the API method. If
         media upload is not supported, this is None.
   """
-  media_upload = method_desc.get('mediaUpload', {})
-  accept = media_upload.get('accept', [])
-  max_size = _media_size_to_long(media_upload.get('maxSize', ''))
-  media_path_url = None
+    media_upload = method_desc.get("mediaUpload", {})
+    accept = media_upload.get("accept", [])
+    max_size = _media_size_to_long(media_upload.get("maxSize", ""))
+    media_path_url = None
 
-  if media_upload:
-    media_path_url = _media_path_url_from_info(root_desc, path_url)
-    parameters['media_body'] = MEDIA_BODY_PARAMETER_DEFAULT_VALUE.copy()
-    parameters['media_mime_type'] = MEDIA_MIME_TYPE_PARAMETER_DEFAULT_VALUE.copy()
+    if media_upload:
+        media_path_url = _media_path_url_from_info(root_desc, path_url)
+        parameters["media_body"] = MEDIA_BODY_PARAMETER_DEFAULT_VALUE.copy()
+        parameters["media_mime_type"] = MEDIA_MIME_TYPE_PARAMETER_DEFAULT_VALUE.copy()
 
-  return accept, max_size, media_path_url
+    return accept, max_size, media_path_url
 
 
 def _fix_up_method_description(method_desc, root_desc, schema):
-  """Updates a method description in a discovery document.
+    """Updates a method description in a discovery document.
 
   SIDE EFFECTS: Changes the parameters dictionary in the method description with
   extra parameters which are used locally.
@@ -573,40 +587,41 @@
         the discovery document and the relative path for the API method. If
         media upload is not supported, this is None.
   """
-  path_url = method_desc['path']
-  http_method = method_desc['httpMethod']
-  method_id = method_desc['id']
+    path_url = method_desc["path"]
+    http_method = method_desc["httpMethod"]
+    method_id = method_desc["id"]
 
-  parameters = _fix_up_parameters(method_desc, root_desc, http_method, schema)
-  # Order is important. `_fix_up_media_upload` needs `method_desc` to have a
-  # 'parameters' key and needs to know if there is a 'body' parameter because it
-  # also sets a 'media_body' parameter.
-  accept, max_size, media_path_url = _fix_up_media_upload(
-      method_desc, root_desc, path_url, parameters)
+    parameters = _fix_up_parameters(method_desc, root_desc, http_method, schema)
+    # Order is important. `_fix_up_media_upload` needs `method_desc` to have a
+    # 'parameters' key and needs to know if there is a 'body' parameter because it
+    # also sets a 'media_body' parameter.
+    accept, max_size, media_path_url = _fix_up_media_upload(
+        method_desc, root_desc, path_url, parameters
+    )
 
-  return path_url, http_method, method_id, accept, max_size, media_path_url
+    return path_url, http_method, method_id, accept, max_size, media_path_url
 
 
 def _urljoin(base, url):
-  """Custom urljoin replacement supporting : before / in url."""
-  # In general, it's unsafe to simply join base and url. However, for
-  # the case of discovery documents, we know:
-  #  * base will never contain params, query, or fragment
-  #  * url will never contain a scheme or net_loc.
-  # In general, this means we can safely join on /; we just need to
-  # ensure we end up with precisely one / joining base and url. The
-  # exception here is the case of media uploads, where url will be an
-  # absolute url.
-  if url.startswith('http://') or url.startswith('https://'):
-    return urljoin(base, url)
-  new_base = base if base.endswith('/') else base + '/'
-  new_url = url[1:] if url.startswith('/') else url
-  return new_base + new_url
+    """Custom urljoin replacement supporting : before / in url."""
+    # In general, it's unsafe to simply join base and url. However, for
+    # the case of discovery documents, we know:
+    #  * base will never contain params, query, or fragment
+    #  * url will never contain a scheme or net_loc.
+    # In general, this means we can safely join on /; we just need to
+    # ensure we end up with precisely one / joining base and url. The
+    # exception here is the case of media uploads, where url will be an
+    # absolute url.
+    if url.startswith("http://") or url.startswith("https://"):
+        return urljoin(base, url)
+    new_base = base if base.endswith("/") else base + "/"
+    new_url = url[1:] if url.startswith("/") else url
+    return new_base + new_url
 
 
 # TODO(dhermes): Convert this class to ResourceMethod and make it callable
 class ResourceMethodParameters(object):
-  """Represents the parameters associated with a method.
+    """Represents the parameters associated with a method.
 
   Attributes:
     argmap: Map from method parameter name (string) to query parameter name
@@ -630,8 +645,8 @@
        where each list of strings is the list of acceptable enum values.
   """
 
-  def __init__(self, method_desc):
-    """Constructor for ResourceMethodParameters.
+    def __init__(self, method_desc):
+        """Constructor for ResourceMethodParameters.
 
     Sets default values and defers to set_parameters to populate.
 
@@ -640,21 +655,21 @@
           comes from the dictionary of methods stored in the 'methods' key in
           the deserialized discovery document.
     """
-    self.argmap = {}
-    self.required_params = []
-    self.repeated_params = []
-    self.pattern_params = {}
-    self.query_params = []
-    # TODO(dhermes): Change path_params to a list if the extra URITEMPLATE
-    #                parsing is gotten rid of.
-    self.path_params = set()
-    self.param_types = {}
-    self.enum_params = {}
+        self.argmap = {}
+        self.required_params = []
+        self.repeated_params = []
+        self.pattern_params = {}
+        self.query_params = []
+        # TODO(dhermes): Change path_params to a list if the extra URITEMPLATE
+        #                parsing is gotten rid of.
+        self.path_params = set()
+        self.param_types = {}
+        self.enum_params = {}
 
-    self.set_parameters(method_desc)
+        self.set_parameters(method_desc)
 
-  def set_parameters(self, method_desc):
-    """Populates maps and lists based on method description.
+    def set_parameters(self, method_desc):
+        """Populates maps and lists based on method description.
 
     Iterates through each parameter for the method and parses the values from
     the parameter dictionary.
@@ -664,37 +679,37 @@
           comes from the dictionary of methods stored in the 'methods' key in
           the deserialized discovery document.
     """
-    for arg, desc in six.iteritems(method_desc.get('parameters', {})):
-      param = key2param(arg)
-      self.argmap[param] = arg
+        for arg, desc in six.iteritems(method_desc.get("parameters", {})):
+            param = key2param(arg)
+            self.argmap[param] = arg
 
-      if desc.get('pattern'):
-        self.pattern_params[param] = desc['pattern']
-      if desc.get('enum'):
-        self.enum_params[param] = desc['enum']
-      if desc.get('required'):
-        self.required_params.append(param)
-      if desc.get('repeated'):
-        self.repeated_params.append(param)
-      if desc.get('location') == 'query':
-        self.query_params.append(param)
-      if desc.get('location') == 'path':
-        self.path_params.add(param)
-      self.param_types[param] = desc.get('type', 'string')
+            if desc.get("pattern"):
+                self.pattern_params[param] = desc["pattern"]
+            if desc.get("enum"):
+                self.enum_params[param] = desc["enum"]
+            if desc.get("required"):
+                self.required_params.append(param)
+            if desc.get("repeated"):
+                self.repeated_params.append(param)
+            if desc.get("location") == "query":
+                self.query_params.append(param)
+            if desc.get("location") == "path":
+                self.path_params.add(param)
+            self.param_types[param] = desc.get("type", "string")
 
-    # TODO(dhermes): Determine if this is still necessary. Discovery based APIs
-    #                should have all path parameters already marked with
-    #                'location: path'.
-    for match in URITEMPLATE.finditer(method_desc['path']):
-      for namematch in VARNAME.finditer(match.group(0)):
-        name = key2param(namematch.group(0))
-        self.path_params.add(name)
-        if name in self.query_params:
-          self.query_params.remove(name)
+        # TODO(dhermes): Determine if this is still necessary. Discovery based APIs
+        #                should have all path parameters already marked with
+        #                'location: path'.
+        for match in URITEMPLATE.finditer(method_desc["path"]):
+            for namematch in VARNAME.finditer(match.group(0)):
+                name = key2param(namematch.group(0))
+                self.path_params.add(name)
+                if name in self.query_params:
+                    self.query_params.remove(name)
 
 
 def createMethod(methodName, methodDesc, rootDesc, schema):
-  """Creates a method for attaching to a Resource.
+    """Creates a method for attaching to a Resource.
 
   Args:
     methodName: string, name of the method to use.
@@ -703,239 +718,262 @@
     rootDesc: object, the entire deserialized discovery document.
     schema: object, mapping of schema names to schema descriptions.
   """
-  methodName = fix_method_name(methodName)
-  (pathUrl, httpMethod, methodId, accept,
-   maxSize, mediaPathUrl) = _fix_up_method_description(methodDesc, rootDesc, schema)
+    methodName = fix_method_name(methodName)
+    (
+        pathUrl,
+        httpMethod,
+        methodId,
+        accept,
+        maxSize,
+        mediaPathUrl,
+    ) = _fix_up_method_description(methodDesc, rootDesc, schema)
 
-  parameters = ResourceMethodParameters(methodDesc)
+    parameters = ResourceMethodParameters(methodDesc)
 
-  def method(self, **kwargs):
-    # Don't bother with doc string, it will be over-written by createMethod.
+    def method(self, **kwargs):
+        # Don't bother with doc string, it will be over-written by createMethod.
 
-    for name in six.iterkeys(kwargs):
-      if name not in parameters.argmap:
-        raise TypeError('Got an unexpected keyword argument "%s"' % name)
+        for name in six.iterkeys(kwargs):
+            if name not in parameters.argmap:
+                raise TypeError('Got an unexpected keyword argument "%s"' % name)
 
-    # Remove args that have a value of None.
-    keys = list(kwargs.keys())
-    for name in keys:
-      if kwargs[name] is None:
-        del kwargs[name]
+        # Remove args that have a value of None.
+        keys = list(kwargs.keys())
+        for name in keys:
+            if kwargs[name] is None:
+                del kwargs[name]
 
-    for name in parameters.required_params:
-      if name not in kwargs:
-        # temporary workaround for non-paging methods incorrectly requiring
-        # page token parameter (cf. drive.changes.watch vs. drive.changes.list)
-        if name not in _PAGE_TOKEN_NAMES or _findPageTokenName(
-            _methodProperties(methodDesc, schema, 'response')):
-          raise TypeError('Missing required parameter "%s"' % name)
+        for name in parameters.required_params:
+            if name not in kwargs:
+                # temporary workaround for non-paging methods incorrectly requiring
+                # page token parameter (cf. drive.changes.watch vs. drive.changes.list)
+                if name not in _PAGE_TOKEN_NAMES or _findPageTokenName(
+                    _methodProperties(methodDesc, schema, "response")
+                ):
+                    raise TypeError('Missing required parameter "%s"' % name)
 
-    for name, regex in six.iteritems(parameters.pattern_params):
-      if name in kwargs:
-        if isinstance(kwargs[name], six.string_types):
-          pvalues = [kwargs[name]]
+        for name, regex in six.iteritems(parameters.pattern_params):
+            if name in kwargs:
+                if isinstance(kwargs[name], six.string_types):
+                    pvalues = [kwargs[name]]
+                else:
+                    pvalues = kwargs[name]
+                for pvalue in pvalues:
+                    if re.match(regex, pvalue) is None:
+                        raise TypeError(
+                            'Parameter "%s" value "%s" does not match the pattern "%s"'
+                            % (name, pvalue, regex)
+                        )
+
+        for name, enums in six.iteritems(parameters.enum_params):
+            if name in kwargs:
+                # We need to handle the case of a repeated enum
+                # name differently, since we want to handle both
+                # arg='value' and arg=['value1', 'value2']
+                if name in parameters.repeated_params and not isinstance(
+                    kwargs[name], six.string_types
+                ):
+                    values = kwargs[name]
+                else:
+                    values = [kwargs[name]]
+                for value in values:
+                    if value not in enums:
+                        raise TypeError(
+                            'Parameter "%s" value "%s" is not an allowed value in "%s"'
+                            % (name, value, str(enums))
+                        )
+
+        actual_query_params = {}
+        actual_path_params = {}
+        for key, value in six.iteritems(kwargs):
+            to_type = parameters.param_types.get(key, "string")
+            # For repeated parameters we cast each member of the list.
+            if key in parameters.repeated_params and type(value) == type([]):
+                cast_value = [_cast(x, to_type) for x in value]
+            else:
+                cast_value = _cast(value, to_type)
+            if key in parameters.query_params:
+                actual_query_params[parameters.argmap[key]] = cast_value
+            if key in parameters.path_params:
+                actual_path_params[parameters.argmap[key]] = cast_value
+        body_value = kwargs.get("body", None)
+        media_filename = kwargs.get("media_body", None)
+        media_mime_type = kwargs.get("media_mime_type", None)
+
+        if self._developerKey:
+            actual_query_params["key"] = self._developerKey
+
+        model = self._model
+        if methodName.endswith("_media"):
+            model = MediaModel()
+        elif "response" not in methodDesc:
+            model = RawModel()
+
+        headers = {}
+        headers, params, query, body = model.request(
+            headers, actual_path_params, actual_query_params, body_value
+        )
+
+        expanded_url = uritemplate.expand(pathUrl, params)
+        url = _urljoin(self._baseUrl, expanded_url + query)
+
+        resumable = None
+        multipart_boundary = ""
+
+        if media_filename:
+            # Ensure we end up with a valid MediaUpload object.
+            if isinstance(media_filename, six.string_types):
+                if media_mime_type is None:
+                    logger.warning(
+                        "media_mime_type argument not specified: trying to auto-detect for %s",
+                        media_filename,
+                    )
+                    media_mime_type, _ = mimetypes.guess_type(media_filename)
+                if media_mime_type is None:
+                    raise UnknownFileType(media_filename)
+                if not mimeparse.best_match([media_mime_type], ",".join(accept)):
+                    raise UnacceptableMimeTypeError(media_mime_type)
+                media_upload = MediaFileUpload(media_filename, mimetype=media_mime_type)
+            elif isinstance(media_filename, MediaUpload):
+                media_upload = media_filename
+            else:
+                raise TypeError("media_filename must be str or MediaUpload.")
+
+            # Check the maxSize
+            if media_upload.size() is not None and media_upload.size() > maxSize > 0:
+                raise MediaUploadSizeError("Media larger than: %s" % maxSize)
+
+            # Use the media path uri for media uploads
+            expanded_url = uritemplate.expand(mediaPathUrl, params)
+            url = _urljoin(self._baseUrl, expanded_url + query)
+            if media_upload.resumable():
+                url = _add_query_parameter(url, "uploadType", "resumable")
+
+            if media_upload.resumable():
+                # This is all we need to do for resumable, if the body exists it gets
+                # sent in the first request, otherwise an empty body is sent.
+                resumable = media_upload
+            else:
+                # A non-resumable upload
+                if body is None:
+                    # This is a simple media upload
+                    headers["content-type"] = media_upload.mimetype()
+                    body = media_upload.getbytes(0, media_upload.size())
+                    url = _add_query_parameter(url, "uploadType", "media")
+                else:
+                    # This is a multipart/related upload.
+                    msgRoot = MIMEMultipart("related")
+                    # msgRoot should not write out it's own headers
+                    setattr(msgRoot, "_write_headers", lambda self: None)
+
+                    # attach the body as one part
+                    msg = MIMENonMultipart(*headers["content-type"].split("/"))
+                    msg.set_payload(body)
+                    msgRoot.attach(msg)
+
+                    # attach the media as the second part
+                    msg = MIMENonMultipart(*media_upload.mimetype().split("/"))
+                    msg["Content-Transfer-Encoding"] = "binary"
+
+                    payload = media_upload.getbytes(0, media_upload.size())
+                    msg.set_payload(payload)
+                    msgRoot.attach(msg)
+                    # encode the body: note that we can't use `as_string`, because
+                    # it plays games with `From ` lines.
+                    fp = BytesIO()
+                    g = _BytesGenerator(fp, mangle_from_=False)
+                    g.flatten(msgRoot, unixfrom=False)
+                    body = fp.getvalue()
+
+                    multipart_boundary = msgRoot.get_boundary()
+                    headers["content-type"] = (
+                        "multipart/related; " 'boundary="%s"'
+                    ) % multipart_boundary
+                    url = _add_query_parameter(url, "uploadType", "multipart")
+
+        logger.info("URL being requested: %s %s" % (httpMethod, url))
+        return self._requestBuilder(
+            self._http,
+            model.response,
+            url,
+            method=httpMethod,
+            body=body,
+            headers=headers,
+            methodId=methodId,
+            resumable=resumable,
+        )
+
+    docs = [methodDesc.get("description", DEFAULT_METHOD_DOC), "\n\n"]
+    if len(parameters.argmap) > 0:
+        docs.append("Args:\n")
+
+    # Skip undocumented params and params common to all methods.
+    skip_parameters = list(rootDesc.get("parameters", {}).keys())
+    skip_parameters.extend(STACK_QUERY_PARAMETERS)
+
+    all_args = list(parameters.argmap.keys())
+    args_ordered = [key2param(s) for s in methodDesc.get("parameterOrder", [])]
+
+    # Move body to the front of the line.
+    if "body" in all_args:
+        args_ordered.append("body")
+
+    for name in all_args:
+        if name not in args_ordered:
+            args_ordered.append(name)
+
+    for arg in args_ordered:
+        if arg in skip_parameters:
+            continue
+
+        repeated = ""
+        if arg in parameters.repeated_params:
+            repeated = " (repeated)"
+        required = ""
+        if arg in parameters.required_params:
+            required = " (required)"
+        paramdesc = methodDesc["parameters"][parameters.argmap[arg]]
+        paramdoc = paramdesc.get("description", "A parameter")
+        if "$ref" in paramdesc:
+            docs.append(
+                ("  %s: object, %s%s%s\n    The object takes the" " form of:\n\n%s\n\n")
+                % (
+                    arg,
+                    paramdoc,
+                    required,
+                    repeated,
+                    schema.prettyPrintByName(paramdesc["$ref"]),
+                )
+            )
         else:
-          pvalues = kwargs[name]
-        for pvalue in pvalues:
-          if re.match(regex, pvalue) is None:
-            raise TypeError(
-                'Parameter "%s" value "%s" does not match the pattern "%s"' %
-                (name, pvalue, regex))
-
-    for name, enums in six.iteritems(parameters.enum_params):
-      if name in kwargs:
-        # We need to handle the case of a repeated enum
-        # name differently, since we want to handle both
-        # arg='value' and arg=['value1', 'value2']
-        if (name in parameters.repeated_params and
-            not isinstance(kwargs[name], six.string_types)):
-          values = kwargs[name]
+            paramtype = paramdesc.get("type", "string")
+            docs.append(
+                "  %s: %s, %s%s%s\n" % (arg, paramtype, paramdoc, required, repeated)
+            )
+        enum = paramdesc.get("enum", [])
+        enumDesc = paramdesc.get("enumDescriptions", [])
+        if enum and enumDesc:
+            docs.append("    Allowed values\n")
+            for (name, desc) in zip(enum, enumDesc):
+                docs.append("      %s - %s\n" % (name, desc))
+    if "response" in methodDesc:
+        if methodName.endswith("_media"):
+            docs.append("\nReturns:\n  The media object as a string.\n\n    ")
         else:
-          values = [kwargs[name]]
-        for value in values:
-          if value not in enums:
-            raise TypeError(
-                'Parameter "%s" value "%s" is not an allowed value in "%s"' %
-                (name, value, str(enums)))
+            docs.append("\nReturns:\n  An object of the form:\n\n    ")
+            docs.append(schema.prettyPrintSchema(methodDesc["response"]))
 
-    actual_query_params = {}
-    actual_path_params = {}
-    for key, value in six.iteritems(kwargs):
-      to_type = parameters.param_types.get(key, 'string')
-      # For repeated parameters we cast each member of the list.
-      if key in parameters.repeated_params and type(value) == type([]):
-        cast_value = [_cast(x, to_type) for x in value]
-      else:
-        cast_value = _cast(value, to_type)
-      if key in parameters.query_params:
-        actual_query_params[parameters.argmap[key]] = cast_value
-      if key in parameters.path_params:
-        actual_path_params[parameters.argmap[key]] = cast_value
-    body_value = kwargs.get('body', None)
-    media_filename = kwargs.get('media_body', None)
-    media_mime_type = kwargs.get('media_mime_type', None)
-
-    if self._developerKey:
-      actual_query_params['key'] = self._developerKey
-
-    model = self._model
-    if methodName.endswith('_media'):
-      model = MediaModel()
-    elif 'response' not in methodDesc:
-      model = RawModel()
-
-    headers = {}
-    headers, params, query, body = model.request(headers,
-        actual_path_params, actual_query_params, body_value)
-
-    expanded_url = uritemplate.expand(pathUrl, params)
-    url = _urljoin(self._baseUrl, expanded_url + query)
-
-    resumable = None
-    multipart_boundary = ''
-
-    if media_filename:
-      # Ensure we end up with a valid MediaUpload object.
-      if isinstance(media_filename, six.string_types):
-        if media_mime_type is None:
-          logger.warning(
-              'media_mime_type argument not specified: trying to auto-detect for %s',
-              media_filename)
-          media_mime_type, _ = mimetypes.guess_type(media_filename)
-        if media_mime_type is None:
-          raise UnknownFileType(media_filename)
-        if not mimeparse.best_match([media_mime_type], ','.join(accept)):
-          raise UnacceptableMimeTypeError(media_mime_type)
-        media_upload = MediaFileUpload(media_filename,
-                                       mimetype=media_mime_type)
-      elif isinstance(media_filename, MediaUpload):
-        media_upload = media_filename
-      else:
-        raise TypeError('media_filename must be str or MediaUpload.')
-
-      # Check the maxSize
-      if media_upload.size() is not None and media_upload.size() > maxSize > 0:
-        raise MediaUploadSizeError("Media larger than: %s" % maxSize)
-
-      # Use the media path uri for media uploads
-      expanded_url = uritemplate.expand(mediaPathUrl, params)
-      url = _urljoin(self._baseUrl, expanded_url + query)
-      if media_upload.resumable():
-        url = _add_query_parameter(url, 'uploadType', 'resumable')
-
-      if media_upload.resumable():
-        # This is all we need to do for resumable, if the body exists it gets
-        # sent in the first request, otherwise an empty body is sent.
-        resumable = media_upload
-      else:
-        # A non-resumable upload
-        if body is None:
-          # This is a simple media upload
-          headers['content-type'] = media_upload.mimetype()
-          body = media_upload.getbytes(0, media_upload.size())
-          url = _add_query_parameter(url, 'uploadType', 'media')
-        else:
-          # This is a multipart/related upload.
-          msgRoot = MIMEMultipart('related')
-          # msgRoot should not write out it's own headers
-          setattr(msgRoot, '_write_headers', lambda self: None)
-
-          # attach the body as one part
-          msg = MIMENonMultipart(*headers['content-type'].split('/'))
-          msg.set_payload(body)
-          msgRoot.attach(msg)
-
-          # attach the media as the second part
-          msg = MIMENonMultipart(*media_upload.mimetype().split('/'))
-          msg['Content-Transfer-Encoding'] = 'binary'
-
-          payload = media_upload.getbytes(0, media_upload.size())
-          msg.set_payload(payload)
-          msgRoot.attach(msg)
-          # encode the body: note that we can't use `as_string`, because
-          # it plays games with `From ` lines.
-          fp = BytesIO()
-          g = _BytesGenerator(fp, mangle_from_=False)
-          g.flatten(msgRoot, unixfrom=False)
-          body = fp.getvalue()
-
-          multipart_boundary = msgRoot.get_boundary()
-          headers['content-type'] = ('multipart/related; '
-                                     'boundary="%s"') % multipart_boundary
-          url = _add_query_parameter(url, 'uploadType', 'multipart')
-
-    logger.info('URL being requested: %s %s' % (httpMethod,url))
-    return self._requestBuilder(self._http,
-                                model.response,
-                                url,
-                                method=httpMethod,
-                                body=body,
-                                headers=headers,
-                                methodId=methodId,
-                                resumable=resumable)
-
-  docs = [methodDesc.get('description', DEFAULT_METHOD_DOC), '\n\n']
-  if len(parameters.argmap) > 0:
-    docs.append('Args:\n')
-
-  # Skip undocumented params and params common to all methods.
-  skip_parameters = list(rootDesc.get('parameters', {}).keys())
-  skip_parameters.extend(STACK_QUERY_PARAMETERS)
-
-  all_args = list(parameters.argmap.keys())
-  args_ordered = [key2param(s) for s in methodDesc.get('parameterOrder', [])]
-
-  # Move body to the front of the line.
-  if 'body' in all_args:
-    args_ordered.append('body')
-
-  for name in all_args:
-    if name not in args_ordered:
-      args_ordered.append(name)
-
-  for arg in args_ordered:
-    if arg in skip_parameters:
-      continue
-
-    repeated = ''
-    if arg in parameters.repeated_params:
-      repeated = ' (repeated)'
-    required = ''
-    if arg in parameters.required_params:
-      required = ' (required)'
-    paramdesc = methodDesc['parameters'][parameters.argmap[arg]]
-    paramdoc = paramdesc.get('description', 'A parameter')
-    if '$ref' in paramdesc:
-      docs.append(
-          ('  %s: object, %s%s%s\n    The object takes the'
-          ' form of:\n\n%s\n\n') % (arg, paramdoc, required, repeated,
-            schema.prettyPrintByName(paramdesc['$ref'])))
-    else:
-      paramtype = paramdesc.get('type', 'string')
-      docs.append('  %s: %s, %s%s%s\n' % (arg, paramtype, paramdoc, required,
-                                          repeated))
-    enum = paramdesc.get('enum', [])
-    enumDesc = paramdesc.get('enumDescriptions', [])
-    if enum and enumDesc:
-      docs.append('    Allowed values\n')
-      for (name, desc) in zip(enum, enumDesc):
-        docs.append('      %s - %s\n' % (name, desc))
-  if 'response' in methodDesc:
-    if methodName.endswith('_media'):
-      docs.append('\nReturns:\n  The media object as a string.\n\n    ')
-    else:
-      docs.append('\nReturns:\n  An object of the form:\n\n    ')
-      docs.append(schema.prettyPrintSchema(methodDesc['response']))
-
-  setattr(method, '__doc__', ''.join(docs))
-  return (methodName, method)
+    setattr(method, "__doc__", "".join(docs))
+    return (methodName, method)
 
 
-def createNextMethod(methodName,
-                     pageTokenName='pageToken',
-                     nextPageTokenName='nextPageToken',
-                     isPageTokenParameter=True):
-  """Creates any _next methods for attaching to a Resource.
+def createNextMethod(
+    methodName,
+    pageTokenName="pageToken",
+    nextPageTokenName="nextPageToken",
+    isPageTokenParameter=True,
+):
+    """Creates any _next methods for attaching to a Resource.
 
   The _next methods allow for easy iteration through list() responses.
 
@@ -946,10 +984,10 @@
     isPageTokenParameter: Boolean, True if request page token is a query
         parameter, False if request page token is a field of the request body.
   """
-  methodName = fix_method_name(methodName)
+    methodName = fix_method_name(methodName)
 
-  def methodNext(self, previous_request, previous_response):
-    """Retrieves the next page of results.
+    def methodNext(self, previous_request, previous_response):
+        """Retrieves the next page of results.
 
 Args:
   previous_request: The request for the previous page. (required)
@@ -959,39 +997,49 @@
   A request object that you can call 'execute()' on to request the next
   page. Returns None if there are no more items in the collection.
     """
-    # Retrieve nextPageToken from previous_response
-    # Use as pageToken in previous_request to create new request.
+        # Retrieve nextPageToken from previous_response
+        # Use as pageToken in previous_request to create new request.
 
-    nextPageToken = previous_response.get(nextPageTokenName, None)
-    if not nextPageToken:
-      return None
+        nextPageToken = previous_response.get(nextPageTokenName, None)
+        if not nextPageToken:
+            return None
 
-    request = copy.copy(previous_request)
+        request = copy.copy(previous_request)
 
-    if isPageTokenParameter:
-        # Replace pageToken value in URI
-        request.uri = _add_query_parameter(
-            request.uri, pageTokenName, nextPageToken)
-        logger.info('Next page request URL: %s %s' % (methodName, request.uri))
-    else:
-        # Replace pageToken value in request body
-        model = self._model
-        body = model.deserialize(request.body)
-        body[pageTokenName] = nextPageToken
-        request.body = model.serialize(body)
-        logger.info('Next page request body: %s %s' % (methodName, body))
+        if isPageTokenParameter:
+            # Replace pageToken value in URI
+            request.uri = _add_query_parameter(
+                request.uri, pageTokenName, nextPageToken
+            )
+            logger.info("Next page request URL: %s %s" % (methodName, request.uri))
+        else:
+            # Replace pageToken value in request body
+            model = self._model
+            body = model.deserialize(request.body)
+            body[pageTokenName] = nextPageToken
+            request.body = model.serialize(body)
+            logger.info("Next page request body: %s %s" % (methodName, body))
 
-    return request
+        return request
 
-  return (methodName, methodNext)
+    return (methodName, methodNext)
 
 
 class Resource(object):
-  """A class for interacting with a resource."""
+    """A class for interacting with a resource."""
 
-  def __init__(self, http, baseUrl, model, requestBuilder, developerKey,
-               resourceDesc, rootDesc, schema):
-    """Build a Resource from the API description.
+    def __init__(
+        self,
+        http,
+        baseUrl,
+        model,
+        requestBuilder,
+        developerKey,
+        resourceDesc,
+        rootDesc,
+        schema,
+    ):
+        """Build a Resource from the API description.
 
     Args:
       http: httplib2.Http, Object to make http requests with.
@@ -1008,63 +1056,66 @@
       rootDesc: object, the entire deserialized discovery document.
       schema: object, mapping of schema names to schema descriptions.
     """
-    self._dynamic_attrs = []
+        self._dynamic_attrs = []
 
-    self._http = http
-    self._baseUrl = baseUrl
-    self._model = model
-    self._developerKey = developerKey
-    self._requestBuilder = requestBuilder
-    self._resourceDesc = resourceDesc
-    self._rootDesc = rootDesc
-    self._schema = schema
+        self._http = http
+        self._baseUrl = baseUrl
+        self._model = model
+        self._developerKey = developerKey
+        self._requestBuilder = requestBuilder
+        self._resourceDesc = resourceDesc
+        self._rootDesc = rootDesc
+        self._schema = schema
 
-    self._set_service_methods()
+        self._set_service_methods()
 
-  def _set_dynamic_attr(self, attr_name, value):
-    """Sets an instance attribute and tracks it in a list of dynamic attributes.
+    def _set_dynamic_attr(self, attr_name, value):
+        """Sets an instance attribute and tracks it in a list of dynamic attributes.
 
     Args:
       attr_name: string; The name of the attribute to be set
       value: The value being set on the object and tracked in the dynamic cache.
     """
-    self._dynamic_attrs.append(attr_name)
-    self.__dict__[attr_name] = value
+        self._dynamic_attrs.append(attr_name)
+        self.__dict__[attr_name] = value
 
-  def __getstate__(self):
-    """Trim the state down to something that can be pickled.
+    def __getstate__(self):
+        """Trim the state down to something that can be pickled.
 
     Uses the fact that the instance variable _dynamic_attrs holds attrs that
     will be wiped and restored on pickle serialization.
     """
-    state_dict = copy.copy(self.__dict__)
-    for dynamic_attr in self._dynamic_attrs:
-      del state_dict[dynamic_attr]
-    del state_dict['_dynamic_attrs']
-    return state_dict
+        state_dict = copy.copy(self.__dict__)
+        for dynamic_attr in self._dynamic_attrs:
+            del state_dict[dynamic_attr]
+        del state_dict["_dynamic_attrs"]
+        return state_dict
 
-  def __setstate__(self, state):
-    """Reconstitute the state of the object from being pickled.
+    def __setstate__(self, state):
+        """Reconstitute the state of the object from being pickled.
 
     Uses the fact that the instance variable _dynamic_attrs holds attrs that
     will be wiped and restored on pickle serialization.
     """
-    self.__dict__.update(state)
-    self._dynamic_attrs = []
-    self._set_service_methods()
+        self.__dict__.update(state)
+        self._dynamic_attrs = []
+        self._set_service_methods()
 
-  def _set_service_methods(self):
-    self._add_basic_methods(self._resourceDesc, self._rootDesc, self._schema)
-    self._add_nested_resources(self._resourceDesc, self._rootDesc, self._schema)
-    self._add_next_methods(self._resourceDesc, self._schema)
+    def _set_service_methods(self):
+        self._add_basic_methods(self._resourceDesc, self._rootDesc, self._schema)
+        self._add_nested_resources(self._resourceDesc, self._rootDesc, self._schema)
+        self._add_next_methods(self._resourceDesc, self._schema)
 
-  def _add_basic_methods(self, resourceDesc, rootDesc, schema):
-    # If this is the root Resource, add a new_batch_http_request() method.
-    if resourceDesc == rootDesc:
-      batch_uri = '%s%s' % (
-        rootDesc['rootUrl'], rootDesc.get('batchPath', 'batch'))
-      def new_batch_http_request(callback=None):
-        """Create a BatchHttpRequest object based on the discovery document.
+    def _add_basic_methods(self, resourceDesc, rootDesc, schema):
+        # If this is the root Resource, add a new_batch_http_request() method.
+        if resourceDesc == rootDesc:
+            batch_uri = "%s%s" % (
+                rootDesc["rootUrl"],
+                rootDesc.get("batchPath", "batch"),
+            )
+
+            def new_batch_http_request(callback=None):
+                """Create a BatchHttpRequest object based on the discovery document.
 
         Args:
           callback: callable, A callback to be called for each response, of the
@@ -1077,83 +1128,100 @@
         Returns:
           A BatchHttpRequest object based on the discovery document.
         """
-        return BatchHttpRequest(callback=callback, batch_uri=batch_uri)
-      self._set_dynamic_attr('new_batch_http_request', new_batch_http_request)
+                return BatchHttpRequest(callback=callback, batch_uri=batch_uri)
 
-    # Add basic methods to Resource
-    if 'methods' in resourceDesc:
-      for methodName, methodDesc in six.iteritems(resourceDesc['methods']):
-        fixedMethodName, method = createMethod(
-            methodName, methodDesc, rootDesc, schema)
-        self._set_dynamic_attr(fixedMethodName,
-                               method.__get__(self, self.__class__))
-        # Add in _media methods. The functionality of the attached method will
-        # change when it sees that the method name ends in _media.
-        if methodDesc.get('supportsMediaDownload', False):
-          fixedMethodName, method = createMethod(
-              methodName + '_media', methodDesc, rootDesc, schema)
-          self._set_dynamic_attr(fixedMethodName,
-                                 method.__get__(self, self.__class__))
+            self._set_dynamic_attr("new_batch_http_request", new_batch_http_request)
 
-  def _add_nested_resources(self, resourceDesc, rootDesc, schema):
-    # Add in nested resources
-    if 'resources' in resourceDesc:
+        # Add basic methods to Resource
+        if "methods" in resourceDesc:
+            for methodName, methodDesc in six.iteritems(resourceDesc["methods"]):
+                fixedMethodName, method = createMethod(
+                    methodName, methodDesc, rootDesc, schema
+                )
+                self._set_dynamic_attr(
+                    fixedMethodName, method.__get__(self, self.__class__)
+                )
+                # Add in _media methods. The functionality of the attached method will
+                # change when it sees that the method name ends in _media.
+                if methodDesc.get("supportsMediaDownload", False):
+                    fixedMethodName, method = createMethod(
+                        methodName + "_media", methodDesc, rootDesc, schema
+                    )
+                    self._set_dynamic_attr(
+                        fixedMethodName, method.__get__(self, self.__class__)
+                    )
 
-      def createResourceMethod(methodName, methodDesc):
-        """Create a method on the Resource to access a nested Resource.
+    def _add_nested_resources(self, resourceDesc, rootDesc, schema):
+        # Add in nested resources
+        if "resources" in resourceDesc:
+
+            def createResourceMethod(methodName, methodDesc):
+                """Create a method on the Resource to access a nested Resource.
 
         Args:
           methodName: string, name of the method to use.
           methodDesc: object, fragment of deserialized discovery document that
             describes the method.
         """
-        methodName = fix_method_name(methodName)
+                methodName = fix_method_name(methodName)
 
-        def methodResource(self):
-          return Resource(http=self._http, baseUrl=self._baseUrl,
-                          model=self._model, developerKey=self._developerKey,
-                          requestBuilder=self._requestBuilder,
-                          resourceDesc=methodDesc, rootDesc=rootDesc,
-                          schema=schema)
+                def methodResource(self):
+                    return Resource(
+                        http=self._http,
+                        baseUrl=self._baseUrl,
+                        model=self._model,
+                        developerKey=self._developerKey,
+                        requestBuilder=self._requestBuilder,
+                        resourceDesc=methodDesc,
+                        rootDesc=rootDesc,
+                        schema=schema,
+                    )
 
-        setattr(methodResource, '__doc__', 'A collection resource.')
-        setattr(methodResource, '__is_resource__', True)
+                setattr(methodResource, "__doc__", "A collection resource.")
+                setattr(methodResource, "__is_resource__", True)
 
-        return (methodName, methodResource)
+                return (methodName, methodResource)
 
-      for methodName, methodDesc in six.iteritems(resourceDesc['resources']):
-        fixedMethodName, method = createResourceMethod(methodName, methodDesc)
-        self._set_dynamic_attr(fixedMethodName,
-                               method.__get__(self, self.__class__))
+            for methodName, methodDesc in six.iteritems(resourceDesc["resources"]):
+                fixedMethodName, method = createResourceMethod(methodName, methodDesc)
+                self._set_dynamic_attr(
+                    fixedMethodName, method.__get__(self, self.__class__)
+                )
 
-  def _add_next_methods(self, resourceDesc, schema):
-    # Add _next() methods if and only if one of the names 'pageToken' or
-    # 'nextPageToken' occurs among the fields of both the method's response
-    # type either the method's request (query parameters) or request body.
-    if 'methods' not in resourceDesc:
-      return
-    for methodName, methodDesc in six.iteritems(resourceDesc['methods']):
-      nextPageTokenName = _findPageTokenName(
-          _methodProperties(methodDesc, schema, 'response'))
-      if not nextPageTokenName:
-        continue
-      isPageTokenParameter = True
-      pageTokenName = _findPageTokenName(methodDesc.get('parameters', {}))
-      if not pageTokenName:
-        isPageTokenParameter = False
-        pageTokenName = _findPageTokenName(
-            _methodProperties(methodDesc, schema, 'request'))
-      if not pageTokenName:
-        continue
-      fixedMethodName, method = createNextMethod(
-          methodName + '_next', pageTokenName, nextPageTokenName,
-          isPageTokenParameter)
-      self._set_dynamic_attr(fixedMethodName,
-                             method.__get__(self, self.__class__))
+    def _add_next_methods(self, resourceDesc, schema):
+        # Add _next() methods if and only if one of the names 'pageToken' or
+        # 'nextPageToken' occurs among the fields of both the method's response
+        # type either the method's request (query parameters) or request body.
+        if "methods" not in resourceDesc:
+            return
+        for methodName, methodDesc in six.iteritems(resourceDesc["methods"]):
+            nextPageTokenName = _findPageTokenName(
+                _methodProperties(methodDesc, schema, "response")
+            )
+            if not nextPageTokenName:
+                continue
+            isPageTokenParameter = True
+            pageTokenName = _findPageTokenName(methodDesc.get("parameters", {}))
+            if not pageTokenName:
+                isPageTokenParameter = False
+                pageTokenName = _findPageTokenName(
+                    _methodProperties(methodDesc, schema, "request")
+                )
+            if not pageTokenName:
+                continue
+            fixedMethodName, method = createNextMethod(
+                methodName + "_next",
+                pageTokenName,
+                nextPageTokenName,
+                isPageTokenParameter,
+            )
+            self._set_dynamic_attr(
+                fixedMethodName, method.__get__(self, self.__class__)
+            )
 
 
 def _findPageTokenName(fields):
-  """Search field names for one like a page token.
+    """Search field names for one like a page token.
 
   Args:
     fields: container of string, names of fields.
@@ -1162,11 +1230,13 @@
     First name that is either 'pageToken' or 'nextPageToken' if one exists,
     otherwise None.
   """
-  return next((tokenName for tokenName in _PAGE_TOKEN_NAMES
-              if tokenName in fields), None)
+    return next(
+        (tokenName for tokenName in _PAGE_TOKEN_NAMES if tokenName in fields), None
+    )
+
 
 def _methodProperties(methodDesc, schema, name):
-  """Get properties of a field in a method description.
+    """Get properties of a field in a method description.
 
   Args:
     methodDesc: object, fragment of deserialized discovery document that
@@ -1179,7 +1249,7 @@
     corresponding to 'properties' field of object corresponding to named field
     in method description, if it exists, otherwise empty dict.
   """
-  desc = methodDesc.get(name, {})
-  if '$ref' in desc:
-    desc = schema.get(desc['$ref'], {})
-  return desc.get('properties', {})
+    desc = methodDesc.get(name, {})
+    if "$ref" in desc:
+        desc = schema.get(desc["$ref"], {})
+    return desc.get("properties", {})
diff --git a/googleapiclient/discovery_cache/__init__.py b/googleapiclient/discovery_cache/__init__.py
index f86a06d..3e4e9a5 100644
--- a/googleapiclient/discovery_cache/__init__.py
+++ b/googleapiclient/discovery_cache/__init__.py
@@ -26,20 +26,22 @@
 
 
 def autodetect():
-  """Detects an appropriate cache module and returns it.
+    """Detects an appropriate cache module and returns it.
 
   Returns:
     googleapiclient.discovery_cache.base.Cache, a cache object which
     is auto detected, or None if no cache object is available.
   """
-  try:
-    from google.appengine.api import memcache
-    from . import appengine_memcache
-    return appengine_memcache.cache
-  except Exception:
     try:
-      from . import file_cache
-      return file_cache.cache
-    except Exception as e:
-      LOGGER.warning(e, exc_info=True)
-      return None
+        from google.appengine.api import memcache
+        from . import appengine_memcache
+
+        return appengine_memcache.cache
+    except Exception:
+        try:
+            from . import file_cache
+
+            return file_cache.cache
+        except Exception as e:
+            LOGGER.warning(e, exc_info=True)
+            return None
diff --git a/googleapiclient/discovery_cache/appengine_memcache.py b/googleapiclient/discovery_cache/appengine_memcache.py
index 7e43e66..1d18d7a 100644
--- a/googleapiclient/discovery_cache/appengine_memcache.py
+++ b/googleapiclient/discovery_cache/appengine_memcache.py
@@ -26,30 +26,31 @@
 
 LOGGER = logging.getLogger(__name__)
 
-NAMESPACE = 'google-api-client'
+NAMESPACE = "google-api-client"
 
 
 class Cache(base.Cache):
-  """A cache with app engine memcache API."""
+    """A cache with app engine memcache API."""
 
-  def __init__(self, max_age):
-      """Constructor.
+    def __init__(self, max_age):
+        """Constructor.
 
       Args:
         max_age: Cache expiration in seconds.
       """
-      self._max_age = max_age
+        self._max_age = max_age
 
-  def get(self, url):
-    try:
-      return memcache.get(url, namespace=NAMESPACE)
-    except Exception as e:
-      LOGGER.warning(e, exc_info=True)
+    def get(self, url):
+        try:
+            return memcache.get(url, namespace=NAMESPACE)
+        except Exception as e:
+            LOGGER.warning(e, exc_info=True)
 
-  def set(self, url, content):
-    try:
-      memcache.set(url, content, time=int(self._max_age), namespace=NAMESPACE)
-    except Exception as e:
-      LOGGER.warning(e, exc_info=True)
+    def set(self, url, content):
+        try:
+            memcache.set(url, content, time=int(self._max_age), namespace=NAMESPACE)
+        except Exception as e:
+            LOGGER.warning(e, exc_info=True)
+
 
 cache = Cache(max_age=DISCOVERY_DOC_MAX_AGE)
diff --git a/googleapiclient/discovery_cache/base.py b/googleapiclient/discovery_cache/base.py
index 00e466d..fbe4459 100644
--- a/googleapiclient/discovery_cache/base.py
+++ b/googleapiclient/discovery_cache/base.py
@@ -18,12 +18,13 @@
 
 
 class Cache(object):
-  """A base abstract cache class."""
-  __metaclass__ = abc.ABCMeta
+    """A base abstract cache class."""
 
-  @abc.abstractmethod
-  def get(self, url):
-    """Gets the content from the memcache with a given key.
+    __metaclass__ = abc.ABCMeta
+
+    @abc.abstractmethod
+    def get(self, url):
+        """Gets the content from the memcache with a given key.
 
     Args:
       url: string, the key for the cache.
@@ -32,14 +33,14 @@
       object, the value in the cache for the given key, or None if the key is
       not in the cache.
     """
-    raise NotImplementedError()
+        raise NotImplementedError()
 
-  @abc.abstractmethod
-  def set(self, url, content):
-    """Sets the given key and content in the cache.
+    @abc.abstractmethod
+    def set(self, url, content):
+        """Sets the given key and content in the cache.
 
     Args:
       url: string, the key for the cache.
       content: string, the discovery document.
     """
-    raise NotImplementedError()
+        raise NotImplementedError()
diff --git a/googleapiclient/discovery_cache/file_cache.py b/googleapiclient/discovery_cache/file_cache.py
index 48bddea..36eb29a 100644
--- a/googleapiclient/discovery_cache/file_cache.py
+++ b/googleapiclient/discovery_cache/file_cache.py
@@ -30,112 +30,117 @@
 import threading
 
 try:
-  from oauth2client.contrib.locked_file import LockedFile
+    from oauth2client.contrib.locked_file import LockedFile
 except ImportError:
-  # oauth2client < 2.0.0
-  try:
-    from oauth2client.locked_file import LockedFile
-  except ImportError:
-    # oauth2client > 4.0.0 or google-auth
-    raise ImportError(
-      'file_cache is unavailable when using oauth2client >= 4.0.0 or google-auth')
+    # oauth2client < 2.0.0
+    try:
+        from oauth2client.locked_file import LockedFile
+    except ImportError:
+        # oauth2client > 4.0.0 or google-auth
+        raise ImportError(
+            "file_cache is unavailable when using oauth2client >= 4.0.0 or google-auth"
+        )
 
 from . import base
 from ..discovery_cache import DISCOVERY_DOC_MAX_AGE
 
 LOGGER = logging.getLogger(__name__)
 
-FILENAME = 'google-api-python-client-discovery-doc.cache'
+FILENAME = "google-api-python-client-discovery-doc.cache"
 EPOCH = datetime.datetime.utcfromtimestamp(0)
 
 
 def _to_timestamp(date):
-  try:
-    return (date - EPOCH).total_seconds()
-  except AttributeError:
-    # The following is the equivalent of total_seconds() in Python2.6.
-    # See also: https://docs.python.org/2/library/datetime.html
-    delta = date - EPOCH
-    return ((delta.microseconds + (delta.seconds + delta.days * 24 * 3600)
-             * 10**6) / 10**6)
+    try:
+        return (date - EPOCH).total_seconds()
+    except AttributeError:
+        # The following is the equivalent of total_seconds() in Python2.6.
+        # See also: https://docs.python.org/2/library/datetime.html
+        delta = date - EPOCH
+        return (
+            delta.microseconds + (delta.seconds + delta.days * 24 * 3600) * 10 ** 6
+        ) / 10 ** 6
 
 
 def _read_or_initialize_cache(f):
-  f.file_handle().seek(0)
-  try:
-    cache = json.load(f.file_handle())
-  except Exception:
-    # This means it opens the file for the first time, or the cache is
-    # corrupted, so initializing the file with an empty dict.
-    cache = {}
-    f.file_handle().truncate(0)
     f.file_handle().seek(0)
-    json.dump(cache, f.file_handle())
-  return cache
+    try:
+        cache = json.load(f.file_handle())
+    except Exception:
+        # This means it opens the file for the first time, or the cache is
+        # corrupted, so initializing the file with an empty dict.
+        cache = {}
+        f.file_handle().truncate(0)
+        f.file_handle().seek(0)
+        json.dump(cache, f.file_handle())
+    return cache
 
 
 class Cache(base.Cache):
-  """A file based cache for the discovery documents."""
+    """A file based cache for the discovery documents."""
 
-  def __init__(self, max_age):
-      """Constructor.
+    def __init__(self, max_age):
+        """Constructor.
 
       Args:
         max_age: Cache expiration in seconds.
       """
-      self._max_age = max_age
-      self._file = os.path.join(tempfile.gettempdir(), FILENAME)
-      f = LockedFile(self._file, 'a+', 'r')
-      try:
-        f.open_and_lock()
-        if f.is_locked():
-          _read_or_initialize_cache(f)
-        # If we can not obtain the lock, other process or thread must
-        # have initialized the file.
-      except Exception as e:
-        LOGGER.warning(e, exc_info=True)
-      finally:
-        f.unlock_and_close()
+        self._max_age = max_age
+        self._file = os.path.join(tempfile.gettempdir(), FILENAME)
+        f = LockedFile(self._file, "a+", "r")
+        try:
+            f.open_and_lock()
+            if f.is_locked():
+                _read_or_initialize_cache(f)
+            # If we can not obtain the lock, other process or thread must
+            # have initialized the file.
+        except Exception as e:
+            LOGGER.warning(e, exc_info=True)
+        finally:
+            f.unlock_and_close()
 
-  def get(self, url):
-    f = LockedFile(self._file, 'r+', 'r')
-    try:
-      f.open_and_lock()
-      if f.is_locked():
-        cache = _read_or_initialize_cache(f)
-        if url in cache:
-          content, t = cache.get(url, (None, 0))
-          if _to_timestamp(datetime.datetime.now()) < t + self._max_age:
-            return content
-        return None
-      else:
-        LOGGER.debug('Could not obtain a lock for the cache file.')
-        return None
-    except Exception as e:
-      LOGGER.warning(e, exc_info=True)
-    finally:
-      f.unlock_and_close()
+    def get(self, url):
+        f = LockedFile(self._file, "r+", "r")
+        try:
+            f.open_and_lock()
+            if f.is_locked():
+                cache = _read_or_initialize_cache(f)
+                if url in cache:
+                    content, t = cache.get(url, (None, 0))
+                    if _to_timestamp(datetime.datetime.now()) < t + self._max_age:
+                        return content
+                return None
+            else:
+                LOGGER.debug("Could not obtain a lock for the cache file.")
+                return None
+        except Exception as e:
+            LOGGER.warning(e, exc_info=True)
+        finally:
+            f.unlock_and_close()
 
-  def set(self, url, content):
-    f = LockedFile(self._file, 'r+', 'r')
-    try:
-      f.open_and_lock()
-      if f.is_locked():
-        cache = _read_or_initialize_cache(f)
-        cache[url] = (content, _to_timestamp(datetime.datetime.now()))
-        # Remove stale cache.
-        for k, (_, timestamp) in list(cache.items()):
-          if _to_timestamp(datetime.datetime.now()) >= timestamp + self._max_age:
-            del cache[k]
-        f.file_handle().truncate(0)
-        f.file_handle().seek(0)
-        json.dump(cache, f.file_handle())
-      else:
-        LOGGER.debug('Could not obtain a lock for the cache file.')
-    except Exception as e:
-      LOGGER.warning(e, exc_info=True)
-    finally:
-      f.unlock_and_close()
+    def set(self, url, content):
+        f = LockedFile(self._file, "r+", "r")
+        try:
+            f.open_and_lock()
+            if f.is_locked():
+                cache = _read_or_initialize_cache(f)
+                cache[url] = (content, _to_timestamp(datetime.datetime.now()))
+                # Remove stale cache.
+                for k, (_, timestamp) in list(cache.items()):
+                    if (
+                        _to_timestamp(datetime.datetime.now())
+                        >= timestamp + self._max_age
+                    ):
+                        del cache[k]
+                f.file_handle().truncate(0)
+                f.file_handle().seek(0)
+                json.dump(cache, f.file_handle())
+            else:
+                LOGGER.debug("Could not obtain a lock for the cache file.")
+        except Exception as e:
+            LOGGER.warning(e, exc_info=True)
+        finally:
+            f.unlock_and_close()
 
 
 cache = Cache(max_age=DISCOVERY_DOC_MAX_AGE)
diff --git a/googleapiclient/errors.py b/googleapiclient/errors.py
index 442c213..64853a4 100644
--- a/googleapiclient/errors.py
+++ b/googleapiclient/errors.py
@@ -19,7 +19,7 @@
 """
 from __future__ import absolute_import
 
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+__author__ = "jcgregorio@google.com (Joe Gregorio)"
 
 import json
 
@@ -27,133 +27,154 @@
 
 
 class Error(Exception):
-  """Base error for this module."""
-  pass
+    """Base error for this module."""
+
+    pass
 
 
 class HttpError(Error):
-  """HTTP data was invalid or unexpected."""
+    """HTTP data was invalid or unexpected."""
 
-  @util.positional(3)
-  def __init__(self, resp, content, uri=None):
-    self.resp = resp
-    if not isinstance(content, bytes):
-        raise TypeError("HTTP content should be bytes")
-    self.content = content
-    self.uri = uri
-    self.error_details = ''
+    @util.positional(3)
+    def __init__(self, resp, content, uri=None):
+        self.resp = resp
+        if not isinstance(content, bytes):
+            raise TypeError("HTTP content should be bytes")
+        self.content = content
+        self.uri = uri
+        self.error_details = ""
 
-  def _get_reason(self):
-    """Calculate the reason for the error from the response content."""
-    reason = self.resp.reason
-    try:
-      data = json.loads(self.content.decode('utf-8'))
-      if isinstance(data, dict):
-        reason = data['error']['message']
-        if 'details' in data['error']:
-            self.error_details = data['error']['details']
-        elif 'detail' in data['error']:
-            self.error_details = data['error']['detail']
-      elif isinstance(data, list) and len(data) > 0:
-        first_error = data[0]
-        reason = first_error['error']['message']
-        if 'details' in first_error['error']:
-            self.error_details = first_error['error']['details']
-    except (ValueError, KeyError, TypeError):
-      pass
-    if reason is None:
-      reason = ''
-    return reason
+    def _get_reason(self):
+        """Calculate the reason for the error from the response content."""
+        reason = self.resp.reason
+        try:
+            data = json.loads(self.content.decode("utf-8"))
+            if isinstance(data, dict):
+                reason = data["error"]["message"]
+                if "details" in data["error"]:
+                    self.error_details = data["error"]["details"]
+                elif "detail" in data["error"]:
+                    self.error_details = data["error"]["detail"]
+            elif isinstance(data, list) and len(data) > 0:
+                first_error = data[0]
+                reason = first_error["error"]["message"]
+                if "details" in first_error["error"]:
+                    self.error_details = first_error["error"]["details"]
+        except (ValueError, KeyError, TypeError):
+            pass
+        if reason is None:
+            reason = ""
+        return reason
 
-  def __repr__(self):
-    reason = self._get_reason()
-    if self.error_details:
-      return '<HttpError %s when requesting %s returned "%s". Details: "%s">' % \
-             (self.resp.status, self.uri, reason.strip(), self.error_details)
-    elif self.uri:
-      return '<HttpError %s when requesting %s returned "%s">' % (
-          self.resp.status, self.uri, self._get_reason().strip())
-    else:
-      return '<HttpError %s "%s">' % (self.resp.status, self._get_reason())
+    def __repr__(self):
+        reason = self._get_reason()
+        if self.error_details:
+            return '<HttpError %s when requesting %s returned "%s". Details: "%s">' % (
+                self.resp.status,
+                self.uri,
+                reason.strip(),
+                self.error_details,
+            )
+        elif self.uri:
+            return '<HttpError %s when requesting %s returned "%s">' % (
+                self.resp.status,
+                self.uri,
+                self._get_reason().strip(),
+            )
+        else:
+            return '<HttpError %s "%s">' % (self.resp.status, self._get_reason())
 
-  __str__ = __repr__
+    __str__ = __repr__
 
 
 class InvalidJsonError(Error):
-  """The JSON returned could not be parsed."""
-  pass
+    """The JSON returned could not be parsed."""
+
+    pass
 
 
 class UnknownFileType(Error):
-  """File type unknown or unexpected."""
-  pass
+    """File type unknown or unexpected."""
+
+    pass
 
 
 class UnknownLinkType(Error):
-  """Link type unknown or unexpected."""
-  pass
+    """Link type unknown or unexpected."""
+
+    pass
 
 
 class UnknownApiNameOrVersion(Error):
-  """No API with that name and version exists."""
-  pass
+    """No API with that name and version exists."""
+
+    pass
 
 
 class UnacceptableMimeTypeError(Error):
-  """That is an unacceptable mimetype for this operation."""
-  pass
+    """That is an unacceptable mimetype for this operation."""
+
+    pass
 
 
 class MediaUploadSizeError(Error):
-  """Media is larger than the method can accept."""
-  pass
+    """Media is larger than the method can accept."""
+
+    pass
 
 
 class ResumableUploadError(HttpError):
-  """Error occured during resumable upload."""
-  pass
+    """Error occured during resumable upload."""
+
+    pass
 
 
 class InvalidChunkSizeError(Error):
-  """The given chunksize is not valid."""
-  pass
+    """The given chunksize is not valid."""
+
+    pass
+
 
 class InvalidNotificationError(Error):
-  """The channel Notification is invalid."""
-  pass
+    """The channel Notification is invalid."""
+
+    pass
+
 
 class BatchError(HttpError):
-  """Error occured during batch operations."""
+    """Error occured during batch operations."""
 
-  @util.positional(2)
-  def __init__(self, reason, resp=None, content=None):
-    self.resp = resp
-    self.content = content
-    self.reason = reason
+    @util.positional(2)
+    def __init__(self, reason, resp=None, content=None):
+        self.resp = resp
+        self.content = content
+        self.reason = reason
 
-  def __repr__(self):
-    if getattr(self.resp, 'status', None) is None:
-      return '<BatchError "%s">' % (self.reason)
-    else:
-      return '<BatchError %s "%s">' % (self.resp.status, self.reason)
+    def __repr__(self):
+        if getattr(self.resp, "status", None) is None:
+            return '<BatchError "%s">' % (self.reason)
+        else:
+            return '<BatchError %s "%s">' % (self.resp.status, self.reason)
 
-  __str__ = __repr__
+    __str__ = __repr__
 
 
 class UnexpectedMethodError(Error):
-  """Exception raised by RequestMockBuilder on unexpected calls."""
+    """Exception raised by RequestMockBuilder on unexpected calls."""
 
-  @util.positional(1)
-  def __init__(self, methodId=None):
-    """Constructor for an UnexpectedMethodError."""
-    super(UnexpectedMethodError, self).__init__(
-        'Received unexpected call %s' % methodId)
+    @util.positional(1)
+    def __init__(self, methodId=None):
+        """Constructor for an UnexpectedMethodError."""
+        super(UnexpectedMethodError, self).__init__(
+            "Received unexpected call %s" % methodId
+        )
 
 
 class UnexpectedBodyError(Error):
-  """Exception raised by RequestMockBuilder on unexpected bodies."""
+    """Exception raised by RequestMockBuilder on unexpected bodies."""
 
-  def __init__(self, expected, provided):
-    """Constructor for an UnexpectedMethodError."""
-    super(UnexpectedBodyError, self).__init__(
-        'Expected: [%s] - Provided: [%s]' % (expected, provided))
+    def __init__(self, expected, provided):
+        """Constructor for an UnexpectedMethodError."""
+        super(UnexpectedBodyError, self).__init__(
+            "Expected: [%s] - Provided: [%s]" % (expected, provided)
+        )
diff --git a/googleapiclient/http.py b/googleapiclient/http.py
index 9733aa3..5b5ea15 100644
--- a/googleapiclient/http.py
+++ b/googleapiclient/http.py
@@ -23,7 +23,7 @@
 from six.moves import http_client
 from six.moves import range
 
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+__author__ = "jcgregorio@google.com (Joe Gregorio)"
 
 from six import BytesIO, StringIO
 from six.moves.urllib.parse import urlparse, urlunparse, quote, unquote
@@ -44,11 +44,11 @@
 
 # TODO(issue 221): Remove this conditional import jibbajabba.
 try:
-  import ssl
+    import ssl
 except ImportError:
-  _ssl_SSLError = object()
+    _ssl_SSLError = object()
 else:
-  _ssl_SSLError = ssl.SSLError
+    _ssl_SSLError = ssl.SSLError
 
 from email.generator import Generator
 from email.mime.multipart import MIMEMultipart
@@ -69,7 +69,7 @@
 
 LOGGER = logging.getLogger(__name__)
 
-DEFAULT_CHUNK_SIZE = 100*1024*1024
+DEFAULT_CHUNK_SIZE = 100 * 1024 * 1024
 
 MAX_URI_LENGTH = 2048
 
@@ -79,11 +79,11 @@
 
 DEFAULT_HTTP_TIMEOUT_SEC = 60
 
-_LEGACY_BATCH_URI = 'https://www.googleapis.com/batch'
+_LEGACY_BATCH_URI = "https://www.googleapis.com/batch"
 
 
 def _should_retry_response(resp_status, content):
-  """Determines whether a response should be retried.
+    """Determines whether a response should be retried.
 
   Args:
     resp_status: The response status received.
@@ -92,45 +92,46 @@
   Returns:
     True if the response should be retried, otherwise False.
   """
-  # Retry on 5xx errors.
-  if resp_status >= 500:
-    return True
+    # Retry on 5xx errors.
+    if resp_status >= 500:
+        return True
 
-  # Retry on 429 errors.
-  if resp_status == _TOO_MANY_REQUESTS:
-    return True
+    # Retry on 429 errors.
+    if resp_status == _TOO_MANY_REQUESTS:
+        return True
 
-  # For 403 errors, we have to check for the `reason` in the response to
-  # determine if we should retry.
-  if resp_status == six.moves.http_client.FORBIDDEN:
-    # If there's no details about the 403 type, don't retry.
-    if not content:
-      return False
+    # For 403 errors, we have to check for the `reason` in the response to
+    # determine if we should retry.
+    if resp_status == six.moves.http_client.FORBIDDEN:
+        # If there's no details about the 403 type, don't retry.
+        if not content:
+            return False
 
-    # Content is in JSON format.
-    try:
-      data = json.loads(content.decode('utf-8'))
-      if isinstance(data, dict):
-        reason = data['error']['errors'][0]['reason']
-      else:
-        reason = data[0]['error']['errors']['reason']
-    except (UnicodeDecodeError, ValueError, KeyError):
-      LOGGER.warning('Invalid JSON content from response: %s', content)
-      return False
+        # Content is in JSON format.
+        try:
+            data = json.loads(content.decode("utf-8"))
+            if isinstance(data, dict):
+                reason = data["error"]["errors"][0]["reason"]
+            else:
+                reason = data[0]["error"]["errors"]["reason"]
+        except (UnicodeDecodeError, ValueError, KeyError):
+            LOGGER.warning("Invalid JSON content from response: %s", content)
+            return False
 
-    LOGGER.warning('Encountered 403 Forbidden with reason "%s"', reason)
+        LOGGER.warning('Encountered 403 Forbidden with reason "%s"', reason)
 
-    # Only retry on rate limit related failures.
-    if reason in ('userRateLimitExceeded', 'rateLimitExceeded', ):
-      return True
+        # Only retry on rate limit related failures.
+        if reason in ("userRateLimitExceeded", "rateLimitExceeded"):
+            return True
 
-  # Everything else is a success or non-retriable so break.
-  return False
+    # Everything else is a success or non-retriable so break.
+    return False
 
 
-def _retry_request(http, num_retries, req_type, sleep, rand, uri, method, *args,
-                   **kwargs):
-  """Retries an HTTP request multiple times while handling errors.
+def _retry_request(
+    http, num_retries, req_type, sleep, rand, uri, method, *args, **kwargs
+):
+    """Retries an HTTP request multiple times while handling errors.
 
   If after all retries the request still fails, last error is either returned as
   return value (for HTTP 5xx errors) or thrown (for ssl.SSLError).
@@ -147,105 +148,115 @@
   Returns:
     resp, content - Response from the http request (may be HTTP 5xx).
   """
-  resp = None
-  content = None
-  exception = None
-  for retry_num in range(num_retries + 1):
-    if retry_num > 0:
-      # Sleep before retrying.
-      sleep_time = rand() * 2 ** retry_num
-      LOGGER.warning(
-          'Sleeping %.2f seconds before retry %d of %d for %s: %s %s, after %s',
-          sleep_time, retry_num, num_retries, req_type, method, uri,
-          resp.status if resp else exception)
-      sleep(sleep_time)
+    resp = None
+    content = None
+    exception = None
+    for retry_num in range(num_retries + 1):
+        if retry_num > 0:
+            # Sleep before retrying.
+            sleep_time = rand() * 2 ** retry_num
+            LOGGER.warning(
+                "Sleeping %.2f seconds before retry %d of %d for %s: %s %s, after %s",
+                sleep_time,
+                retry_num,
+                num_retries,
+                req_type,
+                method,
+                uri,
+                resp.status if resp else exception,
+            )
+            sleep(sleep_time)
 
-    try:
-      exception = None
-      resp, content = http.request(uri, method, *args, **kwargs)
-    # Retry on SSL errors and socket timeout errors.
-    except _ssl_SSLError as ssl_error:
-      exception = ssl_error
-    except socket.timeout as socket_timeout:
-      # It's important that this be before socket.error as it's a subclass
-      # socket.timeout has no errorcode
-      exception = socket_timeout
-    except socket.error as socket_error:
-      # errno's contents differ by platform, so we have to match by name.
-      if socket.errno.errorcode.get(socket_error.errno) not in {
-        'WSAETIMEDOUT', 'ETIMEDOUT', 'EPIPE', 'ECONNABORTED'}:
-        raise
-      exception = socket_error
-    except httplib2.ServerNotFoundError as server_not_found_error:
-      exception = server_not_found_error
+        try:
+            exception = None
+            resp, content = http.request(uri, method, *args, **kwargs)
+        # Retry on SSL errors and socket timeout errors.
+        except _ssl_SSLError as ssl_error:
+            exception = ssl_error
+        except socket.timeout as socket_timeout:
+            # It's important that this be before socket.error as it's a subclass
+            # socket.timeout has no errorcode
+            exception = socket_timeout
+        except socket.error as socket_error:
+            # errno's contents differ by platform, so we have to match by name.
+            if socket.errno.errorcode.get(socket_error.errno) not in {
+                "WSAETIMEDOUT",
+                "ETIMEDOUT",
+                "EPIPE",
+                "ECONNABORTED",
+            }:
+                raise
+            exception = socket_error
+        except httplib2.ServerNotFoundError as server_not_found_error:
+            exception = server_not_found_error
 
-    if exception:
-      if retry_num == num_retries:
-        raise exception
-      else:
-        continue
+        if exception:
+            if retry_num == num_retries:
+                raise exception
+            else:
+                continue
 
-    if not _should_retry_response(resp.status, content):
-      break
+        if not _should_retry_response(resp.status, content):
+            break
 
-  return resp, content
+    return resp, content
 
 
 class MediaUploadProgress(object):
-  """Status of a resumable upload."""
+    """Status of a resumable upload."""
 
-  def __init__(self, resumable_progress, total_size):
-    """Constructor.
+    def __init__(self, resumable_progress, total_size):
+        """Constructor.
 
     Args:
       resumable_progress: int, bytes sent so far.
       total_size: int, total bytes in complete upload, or None if the total
         upload size isn't known ahead of time.
     """
-    self.resumable_progress = resumable_progress
-    self.total_size = total_size
+        self.resumable_progress = resumable_progress
+        self.total_size = total_size
 
-  def progress(self):
-    """Percent of upload completed, as a float.
+    def progress(self):
+        """Percent of upload completed, as a float.
 
     Returns:
       the percentage complete as a float, returning 0.0 if the total size of
       the upload is unknown.
     """
-    if self.total_size is not None and self.total_size != 0:
-      return float(self.resumable_progress) / float(self.total_size)
-    else:
-      return 0.0
+        if self.total_size is not None and self.total_size != 0:
+            return float(self.resumable_progress) / float(self.total_size)
+        else:
+            return 0.0
 
 
 class MediaDownloadProgress(object):
-  """Status of a resumable download."""
+    """Status of a resumable download."""
 
-  def __init__(self, resumable_progress, total_size):
-    """Constructor.
+    def __init__(self, resumable_progress, total_size):
+        """Constructor.
 
     Args:
       resumable_progress: int, bytes received so far.
       total_size: int, total bytes in complete download.
     """
-    self.resumable_progress = resumable_progress
-    self.total_size = total_size
+        self.resumable_progress = resumable_progress
+        self.total_size = total_size
 
-  def progress(self):
-    """Percent of download completed, as a float.
+    def progress(self):
+        """Percent of download completed, as a float.
 
     Returns:
       the percentage complete as a float, returning 0.0 if the total size of
       the download is unknown.
     """
-    if self.total_size is not None and self.total_size != 0:
-      return float(self.resumable_progress) / float(self.total_size)
-    else:
-      return 0.0
+        if self.total_size is not None and self.total_size != 0:
+            return float(self.resumable_progress) / float(self.total_size)
+        else:
+            return 0.0
 
 
 class MediaUpload(object):
-  """Describes a media object to upload.
+    """Describes a media object to upload.
 
   Base class that defines the interface of MediaUpload subclasses.
 
@@ -271,40 +282,40 @@
   needs.
   """
 
-  def chunksize(self):
-    """Chunk size for resumable uploads.
+    def chunksize(self):
+        """Chunk size for resumable uploads.
 
     Returns:
       Chunk size in bytes.
     """
-    raise NotImplementedError()
+        raise NotImplementedError()
 
-  def mimetype(self):
-    """Mime type of the body.
+    def mimetype(self):
+        """Mime type of the body.
 
     Returns:
       Mime type.
     """
-    return 'application/octet-stream'
+        return "application/octet-stream"
 
-  def size(self):
-    """Size of upload.
+    def size(self):
+        """Size of upload.
 
     Returns:
       Size of the body, or None of the size is unknown.
     """
-    return None
+        return None
 
-  def resumable(self):
-    """Whether this upload is resumable.
+    def resumable(self):
+        """Whether this upload is resumable.
 
     Returns:
       True if resumable upload or False.
     """
-    return False
+        return False
 
-  def getbytes(self, begin, end):
-    """Get bytes from the media.
+    def getbytes(self, begin, end):
+        """Get bytes from the media.
 
     Args:
       begin: int, offset from beginning of file.
@@ -314,10 +325,10 @@
       A string of bytes read. May be shorter than length if EOF was reached
       first.
     """
-    raise NotImplementedError()
+        raise NotImplementedError()
 
-  def has_stream(self):
-    """Does the underlying upload support a streaming interface.
+    def has_stream(self):
+        """Does the underlying upload support a streaming interface.
 
     Streaming means it is an io.IOBase subclass that supports seek, i.e.
     seekable() returns True.
@@ -326,20 +337,20 @@
       True if the call to stream() will return an instance of a seekable io.Base
       subclass.
     """
-    return False
+        return False
 
-  def stream(self):
-    """A stream interface to the data being uploaded.
+    def stream(self):
+        """A stream interface to the data being uploaded.
 
     Returns:
       The returned value is an io.IOBase subclass that supports seek, i.e.
       seekable() returns True.
     """
-    raise NotImplementedError()
+        raise NotImplementedError()
 
-  @util.positional(1)
-  def _to_json(self, strip=None):
-    """Utility function for creating a JSON representation of a MediaUpload.
+    @util.positional(1)
+    def _to_json(self, strip=None):
+        """Utility function for creating a JSON representation of a MediaUpload.
 
     Args:
       strip: array, An array of names of members to not include in the JSON.
@@ -348,27 +359,27 @@
        string, a JSON representation of this instance, suitable to pass to
        from_json().
     """
-    t = type(self)
-    d = copy.copy(self.__dict__)
-    if strip is not None:
-      for member in strip:
-        del d[member]
-    d['_class'] = t.__name__
-    d['_module'] = t.__module__
-    return json.dumps(d)
+        t = type(self)
+        d = copy.copy(self.__dict__)
+        if strip is not None:
+            for member in strip:
+                del d[member]
+        d["_class"] = t.__name__
+        d["_module"] = t.__module__
+        return json.dumps(d)
 
-  def to_json(self):
-    """Create a JSON representation of an instance of MediaUpload.
+    def to_json(self):
+        """Create a JSON representation of an instance of MediaUpload.
 
     Returns:
        string, a JSON representation of this instance, suitable to pass to
        from_json().
     """
-    return self._to_json()
+        return self._to_json()
 
-  @classmethod
-  def new_from_json(cls, s):
-    """Utility class method to instantiate a MediaUpload subclass from a JSON
+    @classmethod
+    def new_from_json(cls, s):
+        """Utility class method to instantiate a MediaUpload subclass from a JSON
     representation produced by to_json().
 
     Args:
@@ -378,17 +389,17 @@
       An instance of the subclass of MediaUpload that was serialized with
       to_json().
     """
-    data = json.loads(s)
-    # Find and call the right classmethod from_json() to restore the object.
-    module = data['_module']
-    m = __import__(module, fromlist=module.split('.')[:-1])
-    kls = getattr(m, data['_class'])
-    from_json = getattr(kls, 'from_json')
-    return from_json(s)
+        data = json.loads(s)
+        # Find and call the right classmethod from_json() to restore the object.
+        module = data["_module"]
+        m = __import__(module, fromlist=module.split(".")[:-1])
+        kls = getattr(m, data["_class"])
+        from_json = getattr(kls, "from_json")
+        return from_json(s)
 
 
 class MediaIoBaseUpload(MediaUpload):
-  """A MediaUpload for a io.Base objects.
+    """A MediaUpload for a io.Base objects.
 
   Note that the Python file object is compatible with io.Base and can be used
   with this class also.
@@ -410,10 +421,9 @@
   your chunksize larger than 5MB, or to -1.
   """
 
-  @util.positional(3)
-  def __init__(self, fd, mimetype, chunksize=DEFAULT_CHUNK_SIZE,
-      resumable=False):
-    """Constructor.
+    @util.positional(3)
+    def __init__(self, fd, mimetype, chunksize=DEFAULT_CHUNK_SIZE, resumable=False):
+        """Constructor.
 
     Args:
       fd: io.Base or file object, The source of the bytes to upload. MUST be
@@ -429,51 +439,51 @@
       resumable: bool, True if this is a resumable upload. False means upload
         in a single request.
     """
-    super(MediaIoBaseUpload, self).__init__()
-    self._fd = fd
-    self._mimetype = mimetype
-    if not (chunksize == -1 or chunksize > 0):
-      raise InvalidChunkSizeError()
-    self._chunksize = chunksize
-    self._resumable = resumable
+        super(MediaIoBaseUpload, self).__init__()
+        self._fd = fd
+        self._mimetype = mimetype
+        if not (chunksize == -1 or chunksize > 0):
+            raise InvalidChunkSizeError()
+        self._chunksize = chunksize
+        self._resumable = resumable
 
-    self._fd.seek(0, os.SEEK_END)
-    self._size = self._fd.tell()
+        self._fd.seek(0, os.SEEK_END)
+        self._size = self._fd.tell()
 
-  def chunksize(self):
-    """Chunk size for resumable uploads.
+    def chunksize(self):
+        """Chunk size for resumable uploads.
 
     Returns:
       Chunk size in bytes.
     """
-    return self._chunksize
+        return self._chunksize
 
-  def mimetype(self):
-    """Mime type of the body.
+    def mimetype(self):
+        """Mime type of the body.
 
     Returns:
       Mime type.
     """
-    return self._mimetype
+        return self._mimetype
 
-  def size(self):
-    """Size of upload.
+    def size(self):
+        """Size of upload.
 
     Returns:
       Size of the body, or None of the size is unknown.
     """
-    return self._size
+        return self._size
 
-  def resumable(self):
-    """Whether this upload is resumable.
+    def resumable(self):
+        """Whether this upload is resumable.
 
     Returns:
       True if resumable upload or False.
     """
-    return self._resumable
+        return self._resumable
 
-  def getbytes(self, begin, length):
-    """Get bytes from the media.
+    def getbytes(self, begin, length):
+        """Get bytes from the media.
 
     Args:
       begin: int, offset from beginning of file.
@@ -483,11 +493,11 @@
       A string of bytes read. May be shorted than length if EOF was reached
       first.
     """
-    self._fd.seek(begin)
-    return self._fd.read(length)
+        self._fd.seek(begin)
+        return self._fd.read(length)
 
-  def has_stream(self):
-    """Does the underlying upload support a streaming interface.
+    def has_stream(self):
+        """Does the underlying upload support a streaming interface.
 
     Streaming means it is an io.IOBase subclass that supports seek, i.e.
     seekable() returns True.
@@ -496,24 +506,24 @@
       True if the call to stream() will return an instance of a seekable io.Base
       subclass.
     """
-    return True
+        return True
 
-  def stream(self):
-    """A stream interface to the data being uploaded.
+    def stream(self):
+        """A stream interface to the data being uploaded.
 
     Returns:
       The returned value is an io.IOBase subclass that supports seek, i.e.
       seekable() returns True.
     """
-    return self._fd
+        return self._fd
 
-  def to_json(self):
-    """This upload type is not serializable."""
-    raise NotImplementedError('MediaIoBaseUpload is not serializable.')
+    def to_json(self):
+        """This upload type is not serializable."""
+        raise NotImplementedError("MediaIoBaseUpload is not serializable.")
 
 
 class MediaFileUpload(MediaIoBaseUpload):
-  """A MediaUpload for a file.
+    """A MediaUpload for a file.
 
   Construct a MediaFileUpload and pass as the media_body parameter of the
   method. For example, if we had a service that allowed uploading images:
@@ -534,10 +544,11 @@
   your chunksize larger than 5MB, or to -1.
   """
 
-  @util.positional(2)
-  def __init__(self, filename, mimetype=None, chunksize=DEFAULT_CHUNK_SIZE,
-               resumable=False):
-    """Constructor.
+    @util.positional(2)
+    def __init__(
+        self, filename, mimetype=None, chunksize=DEFAULT_CHUNK_SIZE, resumable=False
+    ):
+        """Constructor.
 
     Args:
       filename: string, Name of the file.
@@ -551,48 +562,57 @@
       resumable: bool, True if this is a resumable upload. False means upload
         in a single request.
     """
-    self._filename = filename
-    fd = open(self._filename, 'rb')
-    if mimetype is None:
-      # No mimetype provided, make a guess.
-      mimetype, _ = mimetypes.guess_type(filename)
-      if mimetype is None:
-        # Guess failed, use octet-stream.
-        mimetype = 'application/octet-stream'
-    super(MediaFileUpload, self).__init__(fd, mimetype,
-                                          chunksize=chunksize,
-                                          resumable=resumable)
+        self._filename = filename
+        fd = open(self._filename, "rb")
+        if mimetype is None:
+            # No mimetype provided, make a guess.
+            mimetype, _ = mimetypes.guess_type(filename)
+            if mimetype is None:
+                # Guess failed, use octet-stream.
+                mimetype = "application/octet-stream"
+        super(MediaFileUpload, self).__init__(
+            fd, mimetype, chunksize=chunksize, resumable=resumable
+        )
 
-  def __del__(self):
-    self._fd.close()
+    def __del__(self):
+        self._fd.close()
 
-  def to_json(self):
-    """Creating a JSON representation of an instance of MediaFileUpload.
+    def to_json(self):
+        """Creating a JSON representation of an instance of MediaFileUpload.
 
     Returns:
        string, a JSON representation of this instance, suitable to pass to
        from_json().
     """
-    return self._to_json(strip=['_fd'])
+        return self._to_json(strip=["_fd"])
 
-  @staticmethod
-  def from_json(s):
-    d = json.loads(s)
-    return MediaFileUpload(d['_filename'], mimetype=d['_mimetype'],
-                           chunksize=d['_chunksize'], resumable=d['_resumable'])
+    @staticmethod
+    def from_json(s):
+        d = json.loads(s)
+        return MediaFileUpload(
+            d["_filename"],
+            mimetype=d["_mimetype"],
+            chunksize=d["_chunksize"],
+            resumable=d["_resumable"],
+        )
 
 
 class MediaInMemoryUpload(MediaIoBaseUpload):
-  """MediaUpload for a chunk of bytes.
+    """MediaUpload for a chunk of bytes.
 
   DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for
   the stream.
   """
 
-  @util.positional(2)
-  def __init__(self, body, mimetype='application/octet-stream',
-               chunksize=DEFAULT_CHUNK_SIZE, resumable=False):
-    """Create a new MediaInMemoryUpload.
+    @util.positional(2)
+    def __init__(
+        self,
+        body,
+        mimetype="application/octet-stream",
+        chunksize=DEFAULT_CHUNK_SIZE,
+        resumable=False,
+    ):
+        """Create a new MediaInMemoryUpload.
 
   DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for
   the stream.
@@ -606,13 +626,14 @@
     resumable: bool, True if this is a resumable upload. False means upload
       in a single request.
     """
-    fd = BytesIO(body)
-    super(MediaInMemoryUpload, self).__init__(fd, mimetype, chunksize=chunksize,
-                                              resumable=resumable)
+        fd = BytesIO(body)
+        super(MediaInMemoryUpload, self).__init__(
+            fd, mimetype, chunksize=chunksize, resumable=resumable
+        )
 
 
 class MediaIoBaseDownload(object):
-  """"Download media resources.
+    """"Download media resources.
 
   Note that the Python file object is compatible with io.Base and can be used
   with this class also.
@@ -631,9 +652,9 @@
     print "Download Complete!"
   """
 
-  @util.positional(3)
-  def __init__(self, fd, request, chunksize=DEFAULT_CHUNK_SIZE):
-    """Constructor.
+    @util.positional(3)
+    def __init__(self, fd, request, chunksize=DEFAULT_CHUNK_SIZE):
+        """Constructor.
 
     Args:
       fd: io.Base or file object, The stream in which to write the downloaded
@@ -642,29 +663,29 @@
         chunks.
       chunksize: int, File will be downloaded in chunks of this many bytes.
     """
-    self._fd = fd
-    self._request = request
-    self._uri = request.uri
-    self._chunksize = chunksize
-    self._progress = 0
-    self._total_size = None
-    self._done = False
+        self._fd = fd
+        self._request = request
+        self._uri = request.uri
+        self._chunksize = chunksize
+        self._progress = 0
+        self._total_size = None
+        self._done = False
 
-    # Stubs for testing.
-    self._sleep = time.sleep
-    self._rand = random.random
+        # Stubs for testing.
+        self._sleep = time.sleep
+        self._rand = random.random
 
-    self._headers = {}
-    for k, v in six.iteritems(request.headers):
-      # allow users to supply custom headers by setting them on the request
-      # but strip out the ones that are set by default on requests generated by
-      # API methods like Drive's files().get(fileId=...)
-      if not k.lower() in ('accept', 'accept-encoding', 'user-agent'):
-        self._headers[k] = v
+        self._headers = {}
+        for k, v in six.iteritems(request.headers):
+            # allow users to supply custom headers by setting them on the request
+            # but strip out the ones that are set by default on requests generated by
+            # API methods like Drive's files().get(fileId=...)
+            if not k.lower() in ("accept", "accept-encoding", "user-agent"):
+                self._headers[k] = v
 
-  @util.positional(1)
-  def next_chunk(self, num_retries=0):
-    """Get the next chunk of the download.
+    @util.positional(1)
+    def next_chunk(self, num_retries=0):
+        """Get the next chunk of the download.
 
     Args:
       num_retries: Integer, number of times to retry with randomized
@@ -681,37 +702,46 @@
       googleapiclient.errors.HttpError if the response was not a 2xx.
       httplib2.HttpLib2Error if a transport error has occured.
     """
-    headers = self._headers.copy()
-    headers['range'] = 'bytes=%d-%d' % (
-            self._progress, self._progress + self._chunksize)
-    http = self._request.http
+        headers = self._headers.copy()
+        headers["range"] = "bytes=%d-%d" % (
+            self._progress,
+            self._progress + self._chunksize,
+        )
+        http = self._request.http
 
-    resp, content = _retry_request(
-        http, num_retries, 'media download', self._sleep, self._rand, self._uri,
-        'GET', headers=headers)
+        resp, content = _retry_request(
+            http,
+            num_retries,
+            "media download",
+            self._sleep,
+            self._rand,
+            self._uri,
+            "GET",
+            headers=headers,
+        )
 
-    if resp.status in [200, 206]:
-      if 'content-location' in resp and resp['content-location'] != self._uri:
-        self._uri = resp['content-location']
-      self._progress += len(content)
-      self._fd.write(content)
+        if resp.status in [200, 206]:
+            if "content-location" in resp and resp["content-location"] != self._uri:
+                self._uri = resp["content-location"]
+            self._progress += len(content)
+            self._fd.write(content)
 
-      if 'content-range' in resp:
-        content_range = resp['content-range']
-        length = content_range.rsplit('/', 1)[1]
-        self._total_size = int(length)
-      elif 'content-length' in resp:
-        self._total_size = int(resp['content-length'])
+            if "content-range" in resp:
+                content_range = resp["content-range"]
+                length = content_range.rsplit("/", 1)[1]
+                self._total_size = int(length)
+            elif "content-length" in resp:
+                self._total_size = int(resp["content-length"])
 
-      if self._total_size is None or self._progress == self._total_size:
-        self._done = True
-      return MediaDownloadProgress(self._progress, self._total_size), self._done
-    else:
-      raise HttpError(resp, content, uri=self._uri)
+            if self._total_size is None or self._progress == self._total_size:
+                self._done = True
+            return MediaDownloadProgress(self._progress, self._total_size), self._done
+        else:
+            raise HttpError(resp, content, uri=self._uri)
 
 
 class _StreamSlice(object):
-  """Truncated stream.
+    """Truncated stream.
 
   Takes a stream and presents a stream that is a slice of the original stream.
   This is used when uploading media in chunks. In later versions of Python a
@@ -720,21 +750,21 @@
   wrapper presents a virtual stream that only reads to the end of the chunk.
   """
 
-  def __init__(self, stream, begin, chunksize):
-    """Constructor.
+    def __init__(self, stream, begin, chunksize):
+        """Constructor.
 
     Args:
       stream: (io.Base, file object), the stream to wrap.
       begin: int, the seek position the chunk begins at.
       chunksize: int, the size of the chunk.
     """
-    self._stream = stream
-    self._begin = begin
-    self._chunksize = chunksize
-    self._stream.seek(begin)
+        self._stream = stream
+        self._begin = begin
+        self._chunksize = chunksize
+        self._stream.seek(begin)
 
-  def read(self, n=-1):
-    """Read n bytes.
+    def read(self, n=-1):
+        """Read n bytes.
 
     Args:
       n, int, the number of bytes to read.
@@ -742,25 +772,30 @@
     Returns:
       A string of length 'n', or less if EOF is reached.
     """
-    # The data left available to read sits in [cur, end)
-    cur = self._stream.tell()
-    end = self._begin + self._chunksize
-    if n == -1 or cur + n > end:
-      n = end - cur
-    return self._stream.read(n)
+        # The data left available to read sits in [cur, end)
+        cur = self._stream.tell()
+        end = self._begin + self._chunksize
+        if n == -1 or cur + n > end:
+            n = end - cur
+        return self._stream.read(n)
 
 
 class HttpRequest(object):
-  """Encapsulates a single HTTP request."""
+    """Encapsulates a single HTTP request."""
 
-  @util.positional(4)
-  def __init__(self, http, postproc, uri,
-               method='GET',
-               body=None,
-               headers=None,
-               methodId=None,
-               resumable=None):
-    """Constructor for an HttpRequest.
+    @util.positional(4)
+    def __init__(
+        self,
+        http,
+        postproc,
+        uri,
+        method="GET",
+        body=None,
+        headers=None,
+        methodId=None,
+        resumable=None,
+    ):
+        """Constructor for an HttpRequest.
 
     Args:
       http: httplib2.Http, the transport object to use to make a request
@@ -774,33 +809,33 @@
       methodId: string, a unique identifier for the API method being called.
       resumable: MediaUpload, None if this is not a resumbale request.
     """
-    self.uri = uri
-    self.method = method
-    self.body = body
-    self.headers = headers or {}
-    self.methodId = methodId
-    self.http = http
-    self.postproc = postproc
-    self.resumable = resumable
-    self.response_callbacks = []
-    self._in_error_state = False
+        self.uri = uri
+        self.method = method
+        self.body = body
+        self.headers = headers or {}
+        self.methodId = methodId
+        self.http = http
+        self.postproc = postproc
+        self.resumable = resumable
+        self.response_callbacks = []
+        self._in_error_state = False
 
-    # The size of the non-media part of the request.
-    self.body_size = len(self.body or '')
+        # The size of the non-media part of the request.
+        self.body_size = len(self.body or "")
 
-    # The resumable URI to send chunks to.
-    self.resumable_uri = None
+        # The resumable URI to send chunks to.
+        self.resumable_uri = None
 
-    # The bytes that have been uploaded.
-    self.resumable_progress = 0
+        # The bytes that have been uploaded.
+        self.resumable_progress = 0
 
-    # Stubs for testing.
-    self._rand = random.random
-    self._sleep = time.sleep
+        # Stubs for testing.
+        self._rand = random.random
+        self._sleep = time.sleep
 
-  @util.positional(1)
-  def execute(self, http=None, num_retries=0):
-    """Execute the request.
+    @util.positional(1)
+    def execute(self, http=None, num_retries=0):
+        """Execute the request.
 
     Args:
       http: httplib2.Http, an http object to be used in place of the
@@ -818,47 +853,54 @@
       googleapiclient.errors.HttpError if the response was not a 2xx.
       httplib2.HttpLib2Error if a transport error has occured.
     """
-    if http is None:
-      http = self.http
+        if http is None:
+            http = self.http
 
-    if self.resumable:
-      body = None
-      while body is None:
-        _, body = self.next_chunk(http=http, num_retries=num_retries)
-      return body
+        if self.resumable:
+            body = None
+            while body is None:
+                _, body = self.next_chunk(http=http, num_retries=num_retries)
+            return body
 
-    # Non-resumable case.
+        # Non-resumable case.
 
-    if 'content-length' not in self.headers:
-      self.headers['content-length'] = str(self.body_size)
-    # If the request URI is too long then turn it into a POST request.
-    # Assume that a GET request never contains a request body.
-    if len(self.uri) > MAX_URI_LENGTH and self.method == 'GET':
-      self.method = 'POST'
-      self.headers['x-http-method-override'] = 'GET'
-      self.headers['content-type'] = 'application/x-www-form-urlencoded'
-      parsed = urlparse(self.uri)
-      self.uri = urlunparse(
-          (parsed.scheme, parsed.netloc, parsed.path, parsed.params, None,
-           None)
-          )
-      self.body = parsed.query
-      self.headers['content-length'] = str(len(self.body))
+        if "content-length" not in self.headers:
+            self.headers["content-length"] = str(self.body_size)
+        # If the request URI is too long then turn it into a POST request.
+        # Assume that a GET request never contains a request body.
+        if len(self.uri) > MAX_URI_LENGTH and self.method == "GET":
+            self.method = "POST"
+            self.headers["x-http-method-override"] = "GET"
+            self.headers["content-type"] = "application/x-www-form-urlencoded"
+            parsed = urlparse(self.uri)
+            self.uri = urlunparse(
+                (parsed.scheme, parsed.netloc, parsed.path, parsed.params, None, None)
+            )
+            self.body = parsed.query
+            self.headers["content-length"] = str(len(self.body))
 
-    # Handle retries for server-side errors.
-    resp, content = _retry_request(
-          http, num_retries, 'request', self._sleep, self._rand, str(self.uri),
-          method=str(self.method), body=self.body, headers=self.headers)
+        # Handle retries for server-side errors.
+        resp, content = _retry_request(
+            http,
+            num_retries,
+            "request",
+            self._sleep,
+            self._rand,
+            str(self.uri),
+            method=str(self.method),
+            body=self.body,
+            headers=self.headers,
+        )
 
-    for callback in self.response_callbacks:
-      callback(resp)
-    if resp.status >= 300:
-      raise HttpError(resp, content, uri=self.uri)
-    return self.postproc(resp, content)
+        for callback in self.response_callbacks:
+            callback(resp)
+        if resp.status >= 300:
+            raise HttpError(resp, content, uri=self.uri)
+        return self.postproc(resp, content)
 
-  @util.positional(2)
-  def add_response_callback(self, cb):
-    """add_response_headers_callback
+    @util.positional(2)
+    def add_response_callback(self, cb):
+        """add_response_headers_callback
 
     Args:
       cb: Callback to be called on receiving the response headers, of signature:
@@ -866,11 +908,11 @@
       def cb(resp):
         # Where resp is an instance of httplib2.Response
     """
-    self.response_callbacks.append(cb)
+        self.response_callbacks.append(cb)
 
-  @util.positional(1)
-  def next_chunk(self, http=None, num_retries=0):
-    """Execute the next step of a resumable upload.
+    @util.positional(1)
+    def next_chunk(self, http=None, num_retries=0):
+        """Execute the next step of a resumable upload.
 
     Can only be used if the method being executed supports media uploads and
     the MediaUpload object passed in was flagged as using resumable upload.
@@ -907,95 +949,103 @@
       googleapiclient.errors.HttpError if the response was not a 2xx.
       httplib2.HttpLib2Error if a transport error has occured.
     """
-    if http is None:
-      http = self.http
+        if http is None:
+            http = self.http
 
-    if self.resumable.size() is None:
-      size = '*'
-    else:
-      size = str(self.resumable.size())
+        if self.resumable.size() is None:
+            size = "*"
+        else:
+            size = str(self.resumable.size())
 
-    if self.resumable_uri is None:
-      start_headers = copy.copy(self.headers)
-      start_headers['X-Upload-Content-Type'] = self.resumable.mimetype()
-      if size != '*':
-        start_headers['X-Upload-Content-Length'] = size
-      start_headers['content-length'] = str(self.body_size)
+        if self.resumable_uri is None:
+            start_headers = copy.copy(self.headers)
+            start_headers["X-Upload-Content-Type"] = self.resumable.mimetype()
+            if size != "*":
+                start_headers["X-Upload-Content-Length"] = size
+            start_headers["content-length"] = str(self.body_size)
 
-      resp, content = _retry_request(
-          http, num_retries, 'resumable URI request', self._sleep, self._rand,
-          self.uri, method=self.method, body=self.body, headers=start_headers)
+            resp, content = _retry_request(
+                http,
+                num_retries,
+                "resumable URI request",
+                self._sleep,
+                self._rand,
+                self.uri,
+                method=self.method,
+                body=self.body,
+                headers=start_headers,
+            )
 
-      if resp.status == 200 and 'location' in resp:
-        self.resumable_uri = resp['location']
-      else:
-        raise ResumableUploadError(resp, content)
-    elif self._in_error_state:
-      # If we are in an error state then query the server for current state of
-      # the upload by sending an empty PUT and reading the 'range' header in
-      # the response.
-      headers = {
-          'Content-Range': 'bytes */%s' % size,
-          'content-length': '0'
-          }
-      resp, content = http.request(self.resumable_uri, 'PUT',
-                                   headers=headers)
-      status, body = self._process_response(resp, content)
-      if body:
-        # The upload was complete.
-        return (status, body)
+            if resp.status == 200 and "location" in resp:
+                self.resumable_uri = resp["location"]
+            else:
+                raise ResumableUploadError(resp, content)
+        elif self._in_error_state:
+            # If we are in an error state then query the server for current state of
+            # the upload by sending an empty PUT and reading the 'range' header in
+            # the response.
+            headers = {"Content-Range": "bytes */%s" % size, "content-length": "0"}
+            resp, content = http.request(self.resumable_uri, "PUT", headers=headers)
+            status, body = self._process_response(resp, content)
+            if body:
+                # The upload was complete.
+                return (status, body)
 
-    if self.resumable.has_stream():
-      data = self.resumable.stream()
-      if self.resumable.chunksize() == -1:
-        data.seek(self.resumable_progress)
-        chunk_end = self.resumable.size() - self.resumable_progress - 1
-      else:
-        # Doing chunking with a stream, so wrap a slice of the stream.
-        data = _StreamSlice(data, self.resumable_progress,
-                            self.resumable.chunksize())
-        chunk_end = min(
-            self.resumable_progress + self.resumable.chunksize() - 1,
-            self.resumable.size() - 1)
-    else:
-      data = self.resumable.getbytes(
-          self.resumable_progress, self.resumable.chunksize())
+        if self.resumable.has_stream():
+            data = self.resumable.stream()
+            if self.resumable.chunksize() == -1:
+                data.seek(self.resumable_progress)
+                chunk_end = self.resumable.size() - self.resumable_progress - 1
+            else:
+                # Doing chunking with a stream, so wrap a slice of the stream.
+                data = _StreamSlice(
+                    data, self.resumable_progress, self.resumable.chunksize()
+                )
+                chunk_end = min(
+                    self.resumable_progress + self.resumable.chunksize() - 1,
+                    self.resumable.size() - 1,
+                )
+        else:
+            data = self.resumable.getbytes(
+                self.resumable_progress, self.resumable.chunksize()
+            )
 
-      # A short read implies that we are at EOF, so finish the upload.
-      if len(data) < self.resumable.chunksize():
-        size = str(self.resumable_progress + len(data))
+            # A short read implies that we are at EOF, so finish the upload.
+            if len(data) < self.resumable.chunksize():
+                size = str(self.resumable_progress + len(data))
 
-      chunk_end = self.resumable_progress + len(data) - 1
+            chunk_end = self.resumable_progress + len(data) - 1
 
-    headers = {
-        'Content-Range': 'bytes %d-%d/%s' % (
-            self.resumable_progress, chunk_end, size),
-        # Must set the content-length header here because httplib can't
-        # calculate the size when working with _StreamSlice.
-        'Content-Length': str(chunk_end - self.resumable_progress + 1)
+        headers = {
+            "Content-Range": "bytes %d-%d/%s"
+            % (self.resumable_progress, chunk_end, size),
+            # Must set the content-length header here because httplib can't
+            # calculate the size when working with _StreamSlice.
+            "Content-Length": str(chunk_end - self.resumable_progress + 1),
         }
 
-    for retry_num in range(num_retries + 1):
-      if retry_num > 0:
-        self._sleep(self._rand() * 2**retry_num)
-        LOGGER.warning(
-            'Retry #%d for media upload: %s %s, following status: %d'
-            % (retry_num, self.method, self.uri, resp.status))
+        for retry_num in range(num_retries + 1):
+            if retry_num > 0:
+                self._sleep(self._rand() * 2 ** retry_num)
+                LOGGER.warning(
+                    "Retry #%d for media upload: %s %s, following status: %d"
+                    % (retry_num, self.method, self.uri, resp.status)
+                )
 
-      try:
-        resp, content = http.request(self.resumable_uri, method='PUT',
-                                     body=data,
-                                     headers=headers)
-      except:
-        self._in_error_state = True
-        raise
-      if not _should_retry_response(resp.status, content):
-        break
+            try:
+                resp, content = http.request(
+                    self.resumable_uri, method="PUT", body=data, headers=headers
+                )
+            except:
+                self._in_error_state = True
+                raise
+            if not _should_retry_response(resp.status, content):
+                break
 
-    return self._process_response(resp, content)
+        return self._process_response(resp, content)
 
-  def _process_response(self, resp, content):
-    """Process the response from a single chunk upload.
+    def _process_response(self, resp, content):
+        """Process the response from a single chunk upload.
 
     Args:
       resp: httplib2.Response, the response object.
@@ -1008,57 +1058,60 @@
     Raises:
       googleapiclient.errors.HttpError if the response was not a 2xx or a 308.
     """
-    if resp.status in [200, 201]:
-      self._in_error_state = False
-      return None, self.postproc(resp, content)
-    elif resp.status == 308:
-      self._in_error_state = False
-      # A "308 Resume Incomplete" indicates we are not done.
-      try:
-        self.resumable_progress = int(resp['range'].split('-')[1]) + 1
-      except KeyError:
-        # If resp doesn't contain range header, resumable progress is 0
-        self.resumable_progress = 0
-      if 'location' in resp:
-        self.resumable_uri = resp['location']
-    else:
-      self._in_error_state = True
-      raise HttpError(resp, content, uri=self.uri)
+        if resp.status in [200, 201]:
+            self._in_error_state = False
+            return None, self.postproc(resp, content)
+        elif resp.status == 308:
+            self._in_error_state = False
+            # A "308 Resume Incomplete" indicates we are not done.
+            try:
+                self.resumable_progress = int(resp["range"].split("-")[1]) + 1
+            except KeyError:
+                # If resp doesn't contain range header, resumable progress is 0
+                self.resumable_progress = 0
+            if "location" in resp:
+                self.resumable_uri = resp["location"]
+        else:
+            self._in_error_state = True
+            raise HttpError(resp, content, uri=self.uri)
 
-    return (MediaUploadProgress(self.resumable_progress, self.resumable.size()),
-            None)
+        return (
+            MediaUploadProgress(self.resumable_progress, self.resumable.size()),
+            None,
+        )
 
-  def to_json(self):
-    """Returns a JSON representation of the HttpRequest."""
-    d = copy.copy(self.__dict__)
-    if d['resumable'] is not None:
-      d['resumable'] = self.resumable.to_json()
-    del d['http']
-    del d['postproc']
-    del d['_sleep']
-    del d['_rand']
+    def to_json(self):
+        """Returns a JSON representation of the HttpRequest."""
+        d = copy.copy(self.__dict__)
+        if d["resumable"] is not None:
+            d["resumable"] = self.resumable.to_json()
+        del d["http"]
+        del d["postproc"]
+        del d["_sleep"]
+        del d["_rand"]
 
-    return json.dumps(d)
+        return json.dumps(d)
 
-  @staticmethod
-  def from_json(s, http, postproc):
-    """Returns an HttpRequest populated with info from a JSON object."""
-    d = json.loads(s)
-    if d['resumable'] is not None:
-      d['resumable'] = MediaUpload.new_from_json(d['resumable'])
-    return HttpRequest(
-        http,
-        postproc,
-        uri=d['uri'],
-        method=d['method'],
-        body=d['body'],
-        headers=d['headers'],
-        methodId=d['methodId'],
-        resumable=d['resumable'])
+    @staticmethod
+    def from_json(s, http, postproc):
+        """Returns an HttpRequest populated with info from a JSON object."""
+        d = json.loads(s)
+        if d["resumable"] is not None:
+            d["resumable"] = MediaUpload.new_from_json(d["resumable"])
+        return HttpRequest(
+            http,
+            postproc,
+            uri=d["uri"],
+            method=d["method"],
+            body=d["body"],
+            headers=d["headers"],
+            methodId=d["methodId"],
+            resumable=d["resumable"],
+        )
 
 
 class BatchHttpRequest(object):
-  """Batches multiple HttpRequest objects into a single HTTP request.
+    """Batches multiple HttpRequest objects into a single HTTP request.
 
   Example:
     from googleapiclient.http import BatchHttpRequest
@@ -1090,9 +1143,9 @@
     batch.execute(http=http)
   """
 
-  @util.positional(1)
-  def __init__(self, callback=None, batch_uri=None):
-    """Constructor for a BatchHttpRequest.
+    @util.positional(1)
+    def __init__(self, callback=None, batch_uri=None):
+        """Constructor for a BatchHttpRequest.
 
     Args:
       callback: callable, A callback to be called for each response, of the
@@ -1102,77 +1155,77 @@
         occurred while processing the request, or None if no error occurred.
       batch_uri: string, URI to send batch requests to.
     """
-    if batch_uri is None:
-      batch_uri = _LEGACY_BATCH_URI
+        if batch_uri is None:
+            batch_uri = _LEGACY_BATCH_URI
 
-    if batch_uri == _LEGACY_BATCH_URI:
-      LOGGER.warn(
-        "You have constructed a BatchHttpRequest using the legacy batch "
-        "endpoint %s. This endpoint will be turned down on March 25, 2019. "
-        "Please provide the API-specific endpoint or use "
-        "service.new_batch_http_request(). For more details see "
-        "https://developers.googleblog.com/2018/03/discontinuing-support-for-json-rpc-and.html"
-        "and https://developers.google.com/api-client-library/python/guide/batch.",
-        _LEGACY_BATCH_URI)
-    self._batch_uri = batch_uri
+        if batch_uri == _LEGACY_BATCH_URI:
+            LOGGER.warn(
+                "You have constructed a BatchHttpRequest using the legacy batch "
+                "endpoint %s. This endpoint will be turned down on March 25, 2019. "
+                "Please provide the API-specific endpoint or use "
+                "service.new_batch_http_request(). For more details see "
+                "https://developers.googleblog.com/2018/03/discontinuing-support-for-json-rpc-and.html"
+                "and https://developers.google.com/api-client-library/python/guide/batch.",
+                _LEGACY_BATCH_URI,
+            )
+        self._batch_uri = batch_uri
 
-    # Global callback to be called for each individual response in the batch.
-    self._callback = callback
+        # Global callback to be called for each individual response in the batch.
+        self._callback = callback
 
-    # A map from id to request.
-    self._requests = {}
+        # A map from id to request.
+        self._requests = {}
 
-    # A map from id to callback.
-    self._callbacks = {}
+        # A map from id to callback.
+        self._callbacks = {}
 
-    # List of request ids, in the order in which they were added.
-    self._order = []
+        # List of request ids, in the order in which they were added.
+        self._order = []
 
-    # The last auto generated id.
-    self._last_auto_id = 0
+        # The last auto generated id.
+        self._last_auto_id = 0
 
-    # Unique ID on which to base the Content-ID headers.
-    self._base_id = None
+        # Unique ID on which to base the Content-ID headers.
+        self._base_id = None
 
-    # A map from request id to (httplib2.Response, content) response pairs
-    self._responses = {}
+        # A map from request id to (httplib2.Response, content) response pairs
+        self._responses = {}
 
-    # A map of id(Credentials) that have been refreshed.
-    self._refreshed_credentials = {}
+        # A map of id(Credentials) that have been refreshed.
+        self._refreshed_credentials = {}
 
-  def _refresh_and_apply_credentials(self, request, http):
-    """Refresh the credentials and apply to the request.
+    def _refresh_and_apply_credentials(self, request, http):
+        """Refresh the credentials and apply to the request.
 
     Args:
       request: HttpRequest, the request.
       http: httplib2.Http, the global http object for the batch.
     """
-    # For the credentials to refresh, but only once per refresh_token
-    # If there is no http per the request then refresh the http passed in
-    # via execute()
-    creds = None
-    request_credentials = False
+        # For the credentials to refresh, but only once per refresh_token
+        # If there is no http per the request then refresh the http passed in
+        # via execute()
+        creds = None
+        request_credentials = False
 
-    if request.http is not None:
-      creds = _auth.get_credentials_from_http(request.http)
-      request_credentials = True
+        if request.http is not None:
+            creds = _auth.get_credentials_from_http(request.http)
+            request_credentials = True
 
-    if creds is None and http is not None:
-      creds = _auth.get_credentials_from_http(http)
+        if creds is None and http is not None:
+            creds = _auth.get_credentials_from_http(http)
 
-    if creds is not None:
-      if id(creds) not in self._refreshed_credentials:
-        _auth.refresh_credentials(creds)
-        self._refreshed_credentials[id(creds)] = 1
+        if creds is not None:
+            if id(creds) not in self._refreshed_credentials:
+                _auth.refresh_credentials(creds)
+                self._refreshed_credentials[id(creds)] = 1
 
-    # Only apply the credentials if we are using the http object passed in,
-    # otherwise apply() will get called during _serialize_request().
-    if request.http is None or not request_credentials:
-      _auth.apply_credentials(creds, request.headers)
+        # Only apply the credentials if we are using the http object passed in,
+        # otherwise apply() will get called during _serialize_request().
+        if request.http is None or not request_credentials:
+            _auth.apply_credentials(creds, request.headers)
 
-
-  def _id_to_header(self, id_):
-    """Convert an id to a Content-ID header value.
+    def _id_to_header(self, id_):
+        """Convert an id to a Content-ID header value.
 
     Args:
       id_: string, identifier of individual request.
@@ -1182,16 +1235,16 @@
       the value because Content-ID headers are supposed to be universally
       unique.
     """
-    if self._base_id is None:
-      self._base_id = uuid.uuid4()
+        if self._base_id is None:
+            self._base_id = uuid.uuid4()
 
-    # NB: we intentionally leave whitespace between base/id and '+', so RFC2822
-    # line folding works properly on Python 3; see
-    # https://github.com/google/google-api-python-client/issues/164
-    return '<%s + %s>' % (self._base_id, quote(id_))
+        # NB: we intentionally leave whitespace between base/id and '+', so RFC2822
+        # line folding works properly on Python 3; see
+        # https://github.com/google/google-api-python-client/issues/164
+        return "<%s + %s>" % (self._base_id, quote(id_))
 
-  def _header_to_id(self, header):
-    """Convert a Content-ID header value to an id.
+    def _header_to_id(self, header):
+        """Convert a Content-ID header value to an id.
 
     Presumes the Content-ID header conforms to the format that _id_to_header()
     returns.
@@ -1205,16 +1258,16 @@
     Raises:
       BatchError if the header is not in the expected format.
     """
-    if header[0] != '<' or header[-1] != '>':
-      raise BatchError("Invalid value for Content-ID: %s" % header)
-    if '+' not in header:
-      raise BatchError("Invalid value for Content-ID: %s" % header)
-    base, id_ = header[1:-1].split(' + ', 1)
+        if header[0] != "<" or header[-1] != ">":
+            raise BatchError("Invalid value for Content-ID: %s" % header)
+        if "+" not in header:
+            raise BatchError("Invalid value for Content-ID: %s" % header)
+        base, id_ = header[1:-1].split(" + ", 1)
 
-    return unquote(id_)
+        return unquote(id_)
 
-  def _serialize_request(self, request):
-    """Convert an HttpRequest object into a string.
+    def _serialize_request(self, request):
+        """Convert an HttpRequest object into a string.
 
     Args:
       request: HttpRequest, the request to serialize.
@@ -1222,45 +1275,47 @@
     Returns:
       The request as a string in application/http format.
     """
-    # Construct status line
-    parsed = urlparse(request.uri)
-    request_line = urlunparse(
-        ('', '', parsed.path, parsed.params, parsed.query, '')
+        # Construct status line
+        parsed = urlparse(request.uri)
+        request_line = urlunparse(
+            ("", "", parsed.path, parsed.params, parsed.query, "")
         )
-    status_line = request.method + ' ' + request_line + ' HTTP/1.1\n'
-    major, minor = request.headers.get('content-type', 'application/json').split('/')
-    msg = MIMENonMultipart(major, minor)
-    headers = request.headers.copy()
+        status_line = request.method + " " + request_line + " HTTP/1.1\n"
+        major, minor = request.headers.get("content-type", "application/json").split(
+            "/"
+        )
+        msg = MIMENonMultipart(major, minor)
+        headers = request.headers.copy()
 
-    if request.http is not None:
-      credentials = _auth.get_credentials_from_http(request.http)
-      if credentials is not None:
-        _auth.apply_credentials(credentials, headers)
+        if request.http is not None:
+            credentials = _auth.get_credentials_from_http(request.http)
+            if credentials is not None:
+                _auth.apply_credentials(credentials, headers)
 
-    # MIMENonMultipart adds its own Content-Type header.
-    if 'content-type' in headers:
-      del headers['content-type']
+        # MIMENonMultipart adds its own Content-Type header.
+        if "content-type" in headers:
+            del headers["content-type"]
 
-    for key, value in six.iteritems(headers):
-      msg[key] = value
-    msg['Host'] = parsed.netloc
-    msg.set_unixfrom(None)
+        for key, value in six.iteritems(headers):
+            msg[key] = value
+        msg["Host"] = parsed.netloc
+        msg.set_unixfrom(None)
 
-    if request.body is not None:
-      msg.set_payload(request.body)
-      msg['content-length'] = str(len(request.body))
+        if request.body is not None:
+            msg.set_payload(request.body)
+            msg["content-length"] = str(len(request.body))
 
-    # Serialize the mime message.
-    fp = StringIO()
-    # maxheaderlen=0 means don't line wrap headers.
-    g = Generator(fp, maxheaderlen=0)
-    g.flatten(msg, unixfrom=False)
-    body = fp.getvalue()
+        # Serialize the mime message.
+        fp = StringIO()
+        # maxheaderlen=0 means don't line wrap headers.
+        g = Generator(fp, maxheaderlen=0)
+        g.flatten(msg, unixfrom=False)
+        body = fp.getvalue()
 
-    return status_line + body
+        return status_line + body
 
-  def _deserialize_response(self, payload):
-    """Convert string into httplib2 response and content.
+    def _deserialize_response(self, payload):
+        """Convert string into httplib2 response and content.
 
     Args:
       payload: string, headers and body as a string.
@@ -1268,41 +1323,41 @@
     Returns:
       A pair (resp, content), such as would be returned from httplib2.request.
     """
-    # Strip off the status line
-    status_line, payload = payload.split('\n', 1)
-    protocol, status, reason = status_line.split(' ', 2)
+        # Strip off the status line
+        status_line, payload = payload.split("\n", 1)
+        protocol, status, reason = status_line.split(" ", 2)
 
-    # Parse the rest of the response
-    parser = FeedParser()
-    parser.feed(payload)
-    msg = parser.close()
-    msg['status'] = status
+        # Parse the rest of the response
+        parser = FeedParser()
+        parser.feed(payload)
+        msg = parser.close()
+        msg["status"] = status
 
-    # Create httplib2.Response from the parsed headers.
-    resp = httplib2.Response(msg)
-    resp.reason = reason
-    resp.version = int(protocol.split('/', 1)[1].replace('.', ''))
+        # Create httplib2.Response from the parsed headers.
+        resp = httplib2.Response(msg)
+        resp.reason = reason
+        resp.version = int(protocol.split("/", 1)[1].replace(".", ""))
 
-    content = payload.split('\r\n\r\n', 1)[1]
+        content = payload.split("\r\n\r\n", 1)[1]
 
-    return resp, content
+        return resp, content
 
-  def _new_id(self):
-    """Create a new id.
+    def _new_id(self):
+        """Create a new id.
 
     Auto incrementing number that avoids conflicts with ids already used.
 
     Returns:
        string, a new unique id.
     """
-    self._last_auto_id += 1
-    while str(self._last_auto_id) in self._requests:
-      self._last_auto_id += 1
-    return str(self._last_auto_id)
+        self._last_auto_id += 1
+        while str(self._last_auto_id) in self._requests:
+            self._last_auto_id += 1
+        return str(self._last_auto_id)
 
-  @util.positional(2)
-  def add(self, request, callback=None, request_id=None):
-    """Add a new request.
+    @util.positional(2)
+    def add(self, request, callback=None, request_id=None):
+        """Add a new request.
 
     Every callback added will be paired with a unique id, the request_id. That
     unique id will be passed back to the callback when the response comes back
@@ -1330,21 +1385,23 @@
       KeyError is the request_id is not unique.
     """
 
-    if len(self._order) >= MAX_BATCH_LIMIT:
-      raise BatchError("Exceeded the maximum calls(%d) in a single batch request."
-                       % MAX_BATCH_LIMIT)
-    if request_id is None:
-      request_id = self._new_id()
-    if request.resumable is not None:
-      raise BatchError("Media requests cannot be used in a batch request.")
-    if request_id in self._requests:
-      raise KeyError("A request with this ID already exists: %s" % request_id)
-    self._requests[request_id] = request
-    self._callbacks[request_id] = callback
-    self._order.append(request_id)
+        if len(self._order) >= MAX_BATCH_LIMIT:
+            raise BatchError(
+                "Exceeded the maximum calls(%d) in a single batch request."
+                % MAX_BATCH_LIMIT
+            )
+        if request_id is None:
+            request_id = self._new_id()
+        if request.resumable is not None:
+            raise BatchError("Media requests cannot be used in a batch request.")
+        if request_id in self._requests:
+            raise KeyError("A request with this ID already exists: %s" % request_id)
+        self._requests[request_id] = request
+        self._callbacks[request_id] = callback
+        self._order.append(request_id)
 
-  def _execute(self, http, order, requests):
-    """Serialize batch request, send to server, process response.
+    def _execute(self, http, order, requests):
+        """Serialize batch request, send to server, process response.
 
     Args:
       http: httplib2.Http, an http object to be used to make the request with.
@@ -1356,66 +1413,69 @@
       httplib2.HttpLib2Error if a transport error has occured.
       googleapiclient.errors.BatchError if the response is the wrong format.
     """
-    message = MIMEMultipart('mixed')
-    # Message should not write out it's own headers.
-    setattr(message, '_write_headers', lambda self: None)
+        message = MIMEMultipart("mixed")
+        # Message should not write out it's own headers.
+        setattr(message, "_write_headers", lambda self: None)
 
-    # Add all the individual requests.
-    for request_id in order:
-      request = requests[request_id]
+        # Add all the individual requests.
+        for request_id in order:
+            request = requests[request_id]
 
-      msg = MIMENonMultipart('application', 'http')
-      msg['Content-Transfer-Encoding'] = 'binary'
-      msg['Content-ID'] = self._id_to_header(request_id)
+            msg = MIMENonMultipart("application", "http")
+            msg["Content-Transfer-Encoding"] = "binary"
+            msg["Content-ID"] = self._id_to_header(request_id)
 
-      body = self._serialize_request(request)
-      msg.set_payload(body)
-      message.attach(msg)
+            body = self._serialize_request(request)
+            msg.set_payload(body)
+            message.attach(msg)
 
-    # encode the body: note that we can't use `as_string`, because
-    # it plays games with `From ` lines.
-    fp = StringIO()
-    g = Generator(fp, mangle_from_=False)
-    g.flatten(message, unixfrom=False)
-    body = fp.getvalue()
+        # encode the body: note that we can't use `as_string`, because
+        # it plays games with `From ` lines.
+        fp = StringIO()
+        g = Generator(fp, mangle_from_=False)
+        g.flatten(message, unixfrom=False)
+        body = fp.getvalue()
 
-    headers = {}
-    headers['content-type'] = ('multipart/mixed; '
-                               'boundary="%s"') % message.get_boundary()
+        headers = {}
+        headers["content-type"] = (
+            "multipart/mixed; " 'boundary="%s"'
+        ) % message.get_boundary()
 
-    resp, content = http.request(self._batch_uri, method='POST', body=body,
-                                 headers=headers)
+        resp, content = http.request(
+            self._batch_uri, method="POST", body=body, headers=headers
+        )
 
-    if resp.status >= 300:
-      raise HttpError(resp, content, uri=self._batch_uri)
+        if resp.status >= 300:
+            raise HttpError(resp, content, uri=self._batch_uri)
 
-    # Prepend with a content-type header so FeedParser can handle it.
-    header = 'content-type: %s\r\n\r\n' % resp['content-type']
-    # PY3's FeedParser only accepts unicode. So we should decode content
-    # here, and encode each payload again.
-    if six.PY3:
-      content = content.decode('utf-8')
-    for_parser = header + content
+        # Prepend with a content-type header so FeedParser can handle it.
+        header = "content-type: %s\r\n\r\n" % resp["content-type"]
+        # PY3's FeedParser only accepts unicode. So we should decode content
+        # here, and encode each payload again.
+        if six.PY3:
+            content = content.decode("utf-8")
+        for_parser = header + content
 
-    parser = FeedParser()
-    parser.feed(for_parser)
-    mime_response = parser.close()
+        parser = FeedParser()
+        parser.feed(for_parser)
+        mime_response = parser.close()
 
-    if not mime_response.is_multipart():
-      raise BatchError("Response not in multipart/mixed format.", resp=resp,
-                       content=content)
+        if not mime_response.is_multipart():
+            raise BatchError(
+                "Response not in multipart/mixed format.", resp=resp, content=content
+            )
 
-    for part in mime_response.get_payload():
-      request_id = self._header_to_id(part['Content-ID'])
-      response, content = self._deserialize_response(part.get_payload())
-      # We encode content here to emulate normal http response.
-      if isinstance(content, six.text_type):
-        content = content.encode('utf-8')
-      self._responses[request_id] = (response, content)
+        for part in mime_response.get_payload():
+            request_id = self._header_to_id(part["Content-ID"])
+            response, content = self._deserialize_response(part.get_payload())
+            # We encode content here to emulate normal http response.
+            if isinstance(content, six.text_type):
+                content = content.encode("utf-8")
+            self._responses[request_id] = (response, content)
 
-  @util.positional(1)
-  def execute(self, http=None):
-    """Execute all the requests as a single batched HTTP request.
+    @util.positional(1)
+    def execute(self, http=None):
+        """Execute all the requests as a single batched HTTP request.
 
     Args:
       http: httplib2.Http, an http object to be used in place of the one the
@@ -1429,80 +1489,80 @@
       httplib2.HttpLib2Error if a transport error has occured.
       googleapiclient.errors.BatchError if the response is the wrong format.
     """
-    # If we have no requests return
-    if len(self._order) == 0:
-      return None
+        # If we have no requests return
+        if len(self._order) == 0:
+            return None
 
-    # If http is not supplied use the first valid one given in the requests.
-    if http is None:
-      for request_id in self._order:
-        request = self._requests[request_id]
-        if request is not None:
-          http = request.http
-          break
+        # If http is not supplied use the first valid one given in the requests.
+        if http is None:
+            for request_id in self._order:
+                request = self._requests[request_id]
+                if request is not None:
+                    http = request.http
+                    break
 
-    if http is None:
-      raise ValueError("Missing a valid http object.")
+        if http is None:
+            raise ValueError("Missing a valid http object.")
 
-    # Special case for OAuth2Credentials-style objects which have not yet been
-    # refreshed with an initial access_token.
-    creds = _auth.get_credentials_from_http(http)
-    if creds is not None:
-      if not _auth.is_valid(creds):
-        LOGGER.info('Attempting refresh to obtain initial access_token')
-        _auth.refresh_credentials(creds)
+        # Special case for OAuth2Credentials-style objects which have not yet been
+        # refreshed with an initial access_token.
+        creds = _auth.get_credentials_from_http(http)
+        if creds is not None:
+            if not _auth.is_valid(creds):
+                LOGGER.info("Attempting refresh to obtain initial access_token")
+                _auth.refresh_credentials(creds)
 
-    self._execute(http, self._order, self._requests)
+        self._execute(http, self._order, self._requests)
 
-    # Loop over all the requests and check for 401s. For each 401 request the
-    # credentials should be refreshed and then sent again in a separate batch.
-    redo_requests = {}
-    redo_order = []
+        # Loop over all the requests and check for 401s. For each 401 request the
+        # credentials should be refreshed and then sent again in a separate batch.
+        redo_requests = {}
+        redo_order = []
 
-    for request_id in self._order:
-      resp, content = self._responses[request_id]
-      if resp['status'] == '401':
-        redo_order.append(request_id)
-        request = self._requests[request_id]
-        self._refresh_and_apply_credentials(request, http)
-        redo_requests[request_id] = request
+        for request_id in self._order:
+            resp, content = self._responses[request_id]
+            if resp["status"] == "401":
+                redo_order.append(request_id)
+                request = self._requests[request_id]
+                self._refresh_and_apply_credentials(request, http)
+                redo_requests[request_id] = request
 
-    if redo_requests:
-      self._execute(http, redo_order, redo_requests)
+        if redo_requests:
+            self._execute(http, redo_order, redo_requests)
 
-    # Now process all callbacks that are erroring, and raise an exception for
-    # ones that return a non-2xx response? Or add extra parameter to callback
-    # that contains an HttpError?
+        # Now process all callbacks that are erroring, and raise an exception for
+        # ones that return a non-2xx response? Or add extra parameter to callback
+        # that contains an HttpError?
 
-    for request_id in self._order:
-      resp, content = self._responses[request_id]
+        for request_id in self._order:
+            resp, content = self._responses[request_id]
 
-      request = self._requests[request_id]
-      callback = self._callbacks[request_id]
+            request = self._requests[request_id]
+            callback = self._callbacks[request_id]
 
-      response = None
-      exception = None
-      try:
-        if resp.status >= 300:
-          raise HttpError(resp, content, uri=request.uri)
-        response = request.postproc(resp, content)
-      except HttpError as e:
-        exception = e
+            response = None
+            exception = None
+            try:
+                if resp.status >= 300:
+                    raise HttpError(resp, content, uri=request.uri)
+                response = request.postproc(resp, content)
+            except HttpError as e:
+                exception = e
 
-      if callback is not None:
-        callback(request_id, response, exception)
-      if self._callback is not None:
-        self._callback(request_id, response, exception)
+            if callback is not None:
+                callback(request_id, response, exception)
+            if self._callback is not None:
+                self._callback(request_id, response, exception)
 
 
 class HttpRequestMock(object):
-  """Mock of HttpRequest.
+    """Mock of HttpRequest.
 
   Do not construct directly, instead use RequestMockBuilder.
   """
 
-  def __init__(self, resp, content, postproc):
-    """Constructor for HttpRequestMock
+    def __init__(self, resp, content, postproc):
+        """Constructor for HttpRequestMock
 
     Args:
       resp: httplib2.Response, the response to emulate coming from the request
@@ -1510,25 +1570,25 @@
       postproc: callable, the post processing function usually supplied by
                 the model class. See model.JsonModel.response() as an example.
     """
-    self.resp = resp
-    self.content = content
-    self.postproc = postproc
-    if resp is None:
-      self.resp = httplib2.Response({'status': 200, 'reason': 'OK'})
-    if 'reason' in self.resp:
-      self.resp.reason = self.resp['reason']
+        self.resp = resp
+        self.content = content
+        self.postproc = postproc
+        if resp is None:
+            self.resp = httplib2.Response({"status": 200, "reason": "OK"})
+        if "reason" in self.resp:
+            self.resp.reason = self.resp["reason"]
 
-  def execute(self, http=None):
-    """Execute the request.
+    def execute(self, http=None):
+        """Execute the request.
 
     Same behavior as HttpRequest.execute(), but the response is
     mocked and not really from an HTTP request/response.
     """
-    return self.postproc(self.resp, self.content)
+        return self.postproc(self.resp, self.content)
 
 
 class RequestMockBuilder(object):
-  """A simple mock of HttpRequest
+    """A simple mock of HttpRequest
 
     Pass in a dictionary to the constructor that maps request methodIds to
     tuples of (httplib2.Response, content, opt_expected_body) that should be
@@ -1554,8 +1614,8 @@
     For more details see the project wiki.
   """
 
-  def __init__(self, responses, check_unexpected=False):
-    """Constructor for RequestMockBuilder
+    def __init__(self, responses, check_unexpected=False):
+        """Constructor for RequestMockBuilder
 
     The constructed object should be a callable object
     that can replace the class HttpResponse.
@@ -1567,79 +1627,90 @@
     check_unexpected - A boolean setting whether or not UnexpectedMethodError
                        should be raised on unsupplied method.
     """
-    self.responses = responses
-    self.check_unexpected = check_unexpected
+        self.responses = responses
+        self.check_unexpected = check_unexpected
 
-  def __call__(self, http, postproc, uri, method='GET', body=None,
-               headers=None, methodId=None, resumable=None):
-    """Implements the callable interface that discovery.build() expects
+    def __call__(
+        self,
+        http,
+        postproc,
+        uri,
+        method="GET",
+        body=None,
+        headers=None,
+        methodId=None,
+        resumable=None,
+    ):
+        """Implements the callable interface that discovery.build() expects
     of requestBuilder, which is to build an object compatible with
     HttpRequest.execute(). See that method for the description of the
     parameters and the expected response.
     """
-    if methodId in self.responses:
-      response = self.responses[methodId]
-      resp, content = response[:2]
-      if len(response) > 2:
-        # Test the body against the supplied expected_body.
-        expected_body = response[2]
-        if bool(expected_body) != bool(body):
-          # Not expecting a body and provided one
-          # or expecting a body and not provided one.
-          raise UnexpectedBodyError(expected_body, body)
-        if isinstance(expected_body, str):
-          expected_body = json.loads(expected_body)
-        body = json.loads(body)
-        if body != expected_body:
-          raise UnexpectedBodyError(expected_body, body)
-      return HttpRequestMock(resp, content, postproc)
-    elif self.check_unexpected:
-      raise UnexpectedMethodError(methodId=methodId)
-    else:
-      model = JsonModel(False)
-      return HttpRequestMock(None, '{}', model.response)
+        if methodId in self.responses:
+            response = self.responses[methodId]
+            resp, content = response[:2]
+            if len(response) > 2:
+                # Test the body against the supplied expected_body.
+                expected_body = response[2]
+                if bool(expected_body) != bool(body):
+                    # Not expecting a body and provided one
+                    # or expecting a body and not provided one.
+                    raise UnexpectedBodyError(expected_body, body)
+                if isinstance(expected_body, str):
+                    expected_body = json.loads(expected_body)
+                body = json.loads(body)
+                if body != expected_body:
+                    raise UnexpectedBodyError(expected_body, body)
+            return HttpRequestMock(resp, content, postproc)
+        elif self.check_unexpected:
+            raise UnexpectedMethodError(methodId=methodId)
+        else:
+            model = JsonModel(False)
+            return HttpRequestMock(None, "{}", model.response)
 
 
 class HttpMock(object):
-  """Mock of httplib2.Http"""
+    """Mock of httplib2.Http"""
 
-  def __init__(self, filename=None, headers=None):
-    """
+    def __init__(self, filename=None, headers=None):
+        """
     Args:
       filename: string, absolute filename to read response from
       headers: dict, header to return with response
     """
-    if headers is None:
-      headers = {'status': '200'}
-    if filename:
-      f = open(filename, 'rb')
-      self.data = f.read()
-      f.close()
-    else:
-      self.data = None
-    self.response_headers = headers
-    self.headers = None
-    self.uri = None
-    self.method = None
-    self.body = None
-    self.headers = None
+        if headers is None:
+            headers = {"status": "200"}
+        if filename:
+            f = open(filename, "rb")
+            self.data = f.read()
+            f.close()
+        else:
+            self.data = None
+        self.response_headers = headers
+        self.headers = None
+        self.uri = None
+        self.method = None
+        self.body = None
+        self.headers = None
 
-
-  def request(self, uri,
-              method='GET',
-              body=None,
-              headers=None,
-              redirections=1,
-              connection_type=None):
-    self.uri = uri
-    self.method = method
-    self.body = body
-    self.headers = headers
-    return httplib2.Response(self.response_headers), self.data
+    def request(
+        self,
+        uri,
+        method="GET",
+        body=None,
+        headers=None,
+        redirections=1,
+        connection_type=None,
+    ):
+        self.uri = uri
+        self.method = method
+        self.body = body
+        self.headers = headers
+        return httplib2.Response(self.response_headers), self.data
 
 
 class HttpMockSequence(object):
-  """Mock of httplib2.Http
+    """Mock of httplib2.Http
 
   Mocks a sequence of calls to request returning different responses for each
   call. Create an instance initialized with the desired response headers
@@ -1662,39 +1733,42 @@
   'echo_request_uri' means return the request uri in the response body
   """
 
-  def __init__(self, iterable):
-    """
+    def __init__(self, iterable):
+        """
     Args:
       iterable: iterable, a sequence of pairs of (headers, body)
     """
-    self._iterable = iterable
-    self.follow_redirects = True
+        self._iterable = iterable
+        self.follow_redirects = True
 
-  def request(self, uri,
-              method='GET',
-              body=None,
-              headers=None,
-              redirections=1,
-              connection_type=None):
-    resp, content = self._iterable.pop(0)
-    if content == 'echo_request_headers':
-      content = headers
-    elif content == 'echo_request_headers_as_json':
-      content = json.dumps(headers)
-    elif content == 'echo_request_body':
-      if hasattr(body, 'read'):
-        content = body.read()
-      else:
-        content = body
-    elif content == 'echo_request_uri':
-      content = uri
-    if isinstance(content, six.text_type):
-      content = content.encode('utf-8')
-    return httplib2.Response(resp), content
+    def request(
+        self,
+        uri,
+        method="GET",
+        body=None,
+        headers=None,
+        redirections=1,
+        connection_type=None,
+    ):
+        resp, content = self._iterable.pop(0)
+        if content == "echo_request_headers":
+            content = headers
+        elif content == "echo_request_headers_as_json":
+            content = json.dumps(headers)
+        elif content == "echo_request_body":
+            if hasattr(body, "read"):
+                content = body.read()
+            else:
+                content = body
+        elif content == "echo_request_uri":
+            content = uri
+        if isinstance(content, six.text_type):
+            content = content.encode("utf-8")
+        return httplib2.Response(resp), content
 
 
 def set_user_agent(http, user_agent):
-  """Set the user-agent on every request.
+    """Set the user-agent on every request.
 
   Args:
      http - An instance of httplib2.Http
@@ -1712,29 +1786,40 @@
   Most of the time the user-agent will be set doing auth, this is for the rare
   cases where you are accessing an unauthenticated endpoint.
   """
-  request_orig = http.request
+    request_orig = http.request
 
-  # The closure that will replace 'httplib2.Http.request'.
-  def new_request(uri, method='GET', body=None, headers=None,
-                  redirections=httplib2.DEFAULT_MAX_REDIRECTS,
-                  connection_type=None):
-    """Modify the request headers to add the user-agent."""
-    if headers is None:
-      headers = {}
-    if 'user-agent' in headers:
-      headers['user-agent'] = user_agent + ' ' + headers['user-agent']
-    else:
-      headers['user-agent'] = user_agent
-    resp, content = request_orig(uri, method=method, body=body, headers=headers,
-                        redirections=redirections, connection_type=connection_type)
-    return resp, content
+    # The closure that will replace 'httplib2.Http.request'.
+    def new_request(
+        uri,
+        method="GET",
+        body=None,
+        headers=None,
+        redirections=httplib2.DEFAULT_MAX_REDIRECTS,
+        connection_type=None,
+    ):
+        """Modify the request headers to add the user-agent."""
+        if headers is None:
+            headers = {}
+        if "user-agent" in headers:
+            headers["user-agent"] = user_agent + " " + headers["user-agent"]
+        else:
+            headers["user-agent"] = user_agent
+        resp, content = request_orig(
+            uri,
+            method=method,
+            body=body,
+            headers=headers,
+            redirections=redirections,
+            connection_type=connection_type,
+        )
+        return resp, content
 
-  http.request = new_request
-  return http
+    http.request = new_request
+    return http
 
 
 def tunnel_patch(http):
-  """Tunnel PATCH requests over POST.
+    """Tunnel PATCH requests over POST.
   Args:
      http - An instance of httplib2.Http
          or something that acts like it.
@@ -1751,31 +1836,43 @@
   Apply this last if you are using OAuth 1.0, as changing the method
   will result in a different signature.
   """
-  request_orig = http.request
+    request_orig = http.request
 
-  # The closure that will replace 'httplib2.Http.request'.
-  def new_request(uri, method='GET', body=None, headers=None,
-                  redirections=httplib2.DEFAULT_MAX_REDIRECTS,
-                  connection_type=None):
-    """Modify the request headers to add the user-agent."""
-    if headers is None:
-      headers = {}
-    if method == 'PATCH':
-      if 'oauth_token' in headers.get('authorization', ''):
-        LOGGER.warning(
-            'OAuth 1.0 request made with Credentials after tunnel_patch.')
-      headers['x-http-method-override'] = "PATCH"
-      method = 'POST'
-    resp, content = request_orig(uri, method=method, body=body, headers=headers,
-                        redirections=redirections, connection_type=connection_type)
-    return resp, content
+    # The closure that will replace 'httplib2.Http.request'.
+    def new_request(
+        uri,
+        method="GET",
+        body=None,
+        headers=None,
+        redirections=httplib2.DEFAULT_MAX_REDIRECTS,
+        connection_type=None,
+    ):
+        """Modify the request headers to add the user-agent."""
+        if headers is None:
+            headers = {}
+        if method == "PATCH":
+            if "oauth_token" in headers.get("authorization", ""):
+                LOGGER.warning(
+                    "OAuth 1.0 request made with Credentials after tunnel_patch."
+                )
+            headers["x-http-method-override"] = "PATCH"
+            method = "POST"
+        resp, content = request_orig(
+            uri,
+            method=method,
+            body=body,
+            headers=headers,
+            redirections=redirections,
+            connection_type=connection_type,
+        )
+        return resp, content
 
-  http.request = new_request
-  return http
+    http.request = new_request
+    return http
 
 
 def build_http():
-  """Builds httplib2.Http object
+    """Builds httplib2.Http object
 
   Returns:
   A httplib2.Http object, which is used to make http requests, and which has timeout set by default.
@@ -1785,8 +1882,8 @@
 
   before interacting with this method.
   """
-  if socket.getdefaulttimeout() is not None:
-    http_timeout = socket.getdefaulttimeout()
-  else:
-    http_timeout = DEFAULT_HTTP_TIMEOUT_SEC
-  return httplib2.Http(timeout=http_timeout)
+    if socket.getdefaulttimeout() is not None:
+        http_timeout = socket.getdefaulttimeout()
+    else:
+        http_timeout = DEFAULT_HTTP_TIMEOUT_SEC
+    return httplib2.Http(timeout=http_timeout)
diff --git a/googleapiclient/mimeparse.py b/googleapiclient/mimeparse.py
index bc9ad09..6051628 100644
--- a/googleapiclient/mimeparse.py
+++ b/googleapiclient/mimeparse.py
@@ -25,11 +25,11 @@
 from functools import reduce
 import six
 
-__version__ = '0.1.3'
-__author__ = 'Joe Gregorio'
-__email__ = 'joe@bitworking.org'
-__license__ = 'MIT License'
-__credits__ = ''
+__version__ = "0.1.3"
+__author__ = "Joe Gregorio"
+__email__ = "joe@bitworking.org"
+__license__ = "MIT License"
+__credits__ = ""
 
 
 def parse_mime_type(mime_type):
@@ -42,16 +42,16 @@
 
        ('application', 'xhtml', {'q', '0.5'})
        """
-    parts = mime_type.split(';')
-    params = dict([tuple([s.strip() for s in param.split('=', 1)])\
-            for param in parts[1:]
-                  ])
+    parts = mime_type.split(";")
+    params = dict(
+        [tuple([s.strip() for s in param.split("=", 1)]) for param in parts[1:]]
+    )
     full_type = parts[0].strip()
     # Java URLConnection class sends an Accept header that includes a
     # single '*'. Turn it into a legal wildcard.
-    if full_type == '*':
-        full_type = '*/*'
-    (type, subtype) = full_type.split('/')
+    if full_type == "*":
+        full_type = "*/*"
+    (type, subtype) = full_type.split("/")
 
     return (type.strip(), subtype.strip(), params)
 
@@ -71,10 +71,14 @@
     necessary.
     """
     (type, subtype, params) = parse_mime_type(range)
-    if 'q' not in params or not params['q'] or \
-            not float(params['q']) or float(params['q']) > 1\
-            or float(params['q']) < 0:
-        params['q'] = '1'
+    if (
+        "q" not in params
+        or not params["q"]
+        or not float(params["q"])
+        or float(params["q"]) > 1
+        or float(params["q"]) < 0
+    ):
+        params["q"] = "1"
 
     return (type, subtype, params)
 
@@ -90,25 +94,28 @@
     """
     best_fitness = -1
     best_fit_q = 0
-    (target_type, target_subtype, target_params) =\
-            parse_media_range(mime_type)
+    (target_type, target_subtype, target_params) = parse_media_range(mime_type)
     for (type, subtype, params) in parsed_ranges:
-        type_match = (type == target_type or\
-                      type == '*' or\
-                      target_type == '*')
-        subtype_match = (subtype == target_subtype or\
-                         subtype == '*' or\
-                         target_subtype == '*')
+        type_match = type == target_type or type == "*" or target_type == "*"
+        subtype_match = (
+            subtype == target_subtype or subtype == "*" or target_subtype == "*"
+        )
         if type_match and subtype_match:
-            param_matches = reduce(lambda x, y: x + y, [1 for (key, value) in \
-                    six.iteritems(target_params) if key != 'q' and \
-                    key in params and value == params[key]], 0)
+            param_matches = reduce(
+                lambda x, y: x + y,
+                [
+                    1
+                    for (key, value) in six.iteritems(target_params)
+                    if key != "q" and key in params and value == params[key]
+                ],
+                0,
+            )
             fitness = (type == target_type) and 100 or 0
             fitness += (subtype == target_subtype) and 10 or 0
             fitness += param_matches
             if fitness > best_fitness:
                 best_fitness = fitness
-                best_fit_q = params['q']
+                best_fit_q = params["q"]
 
     return best_fitness, float(best_fit_q)
 
@@ -137,7 +144,7 @@
     0.7
 
     """
-    parsed_ranges = [parse_media_range(r) for r in ranges.split(',')]
+    parsed_ranges = [parse_media_range(r) for r in ranges.split(",")]
 
     return quality_parsed(mime_type, parsed_ranges)
 
@@ -156,17 +163,18 @@
                    'text/*;q=0.5,*/*; q=0.1')
     'text/xml'
     """
-    split_header = _filter_blank(header.split(','))
+    split_header = _filter_blank(header.split(","))
     parsed_header = [parse_media_range(r) for r in split_header]
     weighted_matches = []
     pos = 0
     for mime_type in supported:
-        weighted_matches.append((fitness_and_quality_parsed(mime_type,
-                                 parsed_header), pos, mime_type))
+        weighted_matches.append(
+            (fitness_and_quality_parsed(mime_type, parsed_header), pos, mime_type)
+        )
         pos += 1
     weighted_matches.sort()
 
-    return weighted_matches[-1][0][1] and weighted_matches[-1][2] or ''
+    return weighted_matches[-1][0][1] and weighted_matches[-1][2] or ""
 
 
 def _filter_blank(i):
diff --git a/googleapiclient/model.py b/googleapiclient/model.py
index 7ab80e9..0449a92 100644
--- a/googleapiclient/model.py
+++ b/googleapiclient/model.py
@@ -22,7 +22,7 @@
 from __future__ import absolute_import
 import six
 
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+__author__ = "jcgregorio@google.com (Joe Gregorio)"
 
 import json
 import logging
@@ -41,19 +41,19 @@
 
 
 def _abstract():
-  raise NotImplementedError('You need to override this function')
+    raise NotImplementedError("You need to override this function")
 
 
 class Model(object):
-  """Model base class.
+    """Model base class.
 
   All Model classes should implement this interface.
   The Model serializes and de-serializes between a wire
   format such as JSON and a Python object representation.
   """
 
-  def request(self, headers, path_params, query_params, body_value):
-    """Updates outgoing requests with a serialized body.
+    def request(self, headers, path_params, query_params, body_value):
+        """Updates outgoing requests with a serialized body.
 
     Args:
       headers: dict, request headers
@@ -69,10 +69,10 @@
       query: string, query part of the request URI
       body: string, the body serialized in the desired wire format.
     """
-    _abstract()
+        _abstract()
 
-  def response(self, resp, content):
-    """Convert the response wire format into a Python object.
+    def response(self, resp, content):
+        """Convert the response wire format into a Python object.
 
     Args:
       resp: httplib2.Response, the HTTP response headers and status
@@ -84,11 +84,11 @@
     Raises:
       googleapiclient.errors.HttpError if a non 2xx response is received.
     """
-    _abstract()
+        _abstract()
 
 
 class BaseModel(Model):
-  """Base model class.
+    """Base model class.
 
   Subclasses should provide implementations for the "serialize" and
   "deserialize" methods, as well as values for the following class attributes.
@@ -101,29 +101,29 @@
     alt_param: The value to supply as the "alt" query parameter for requests.
   """
 
-  accept = None
-  content_type = None
-  no_content_response = None
-  alt_param = None
+    accept = None
+    content_type = None
+    no_content_response = None
+    alt_param = None
 
-  def _log_request(self, headers, path_params, query, body):
-    """Logs debugging information about the request if requested."""
-    if dump_request_response:
-      LOGGER.info('--request-start--')
-      LOGGER.info('-headers-start-')
-      for h, v in six.iteritems(headers):
-        LOGGER.info('%s: %s', h, v)
-      LOGGER.info('-headers-end-')
-      LOGGER.info('-path-parameters-start-')
-      for h, v in six.iteritems(path_params):
-        LOGGER.info('%s: %s', h, v)
-      LOGGER.info('-path-parameters-end-')
-      LOGGER.info('body: %s', body)
-      LOGGER.info('query: %s', query)
-      LOGGER.info('--request-end--')
+    def _log_request(self, headers, path_params, query, body):
+        """Logs debugging information about the request if requested."""
+        if dump_request_response:
+            LOGGER.info("--request-start--")
+            LOGGER.info("-headers-start-")
+            for h, v in six.iteritems(headers):
+                LOGGER.info("%s: %s", h, v)
+            LOGGER.info("-headers-end-")
+            LOGGER.info("-path-parameters-start-")
+            for h, v in six.iteritems(path_params):
+                LOGGER.info("%s: %s", h, v)
+            LOGGER.info("-path-parameters-end-")
+            LOGGER.info("body: %s", body)
+            LOGGER.info("query: %s", query)
+            LOGGER.info("--request-end--")
 
-  def request(self, headers, path_params, query_params, body_value):
-    """Updates outgoing requests with a serialized body.
+    def request(self, headers, path_params, query_params, body_value):
+        """Updates outgoing requests with a serialized body.
 
     Args:
       headers: dict, request headers
@@ -139,28 +139,31 @@
       query: string, query part of the request URI
       body: string, the body serialized as JSON
     """
-    query = self._build_query(query_params)
-    headers['accept'] = self.accept
-    headers['accept-encoding'] = 'gzip, deflate'
-    if 'user-agent' in headers:
-      headers['user-agent'] += ' '
-    else:
-      headers['user-agent'] = ''
-    headers['user-agent'] += '(gzip)'
-    if 'x-goog-api-client' in headers:
-      headers['x-goog-api-client'] += ' '
-    else:
-      headers['x-goog-api-client'] = ''
-    headers['x-goog-api-client'] += 'gdcl/%s gl-python/%s' % (__version__, _PY_VERSION)
+        query = self._build_query(query_params)
+        headers["accept"] = self.accept
+        headers["accept-encoding"] = "gzip, deflate"
+        if "user-agent" in headers:
+            headers["user-agent"] += " "
+        else:
+            headers["user-agent"] = ""
+        headers["user-agent"] += "(gzip)"
+        if "x-goog-api-client" in headers:
+            headers["x-goog-api-client"] += " "
+        else:
+            headers["x-goog-api-client"] = ""
+        headers["x-goog-api-client"] += "gdcl/%s gl-python/%s" % (
+            __version__,
+            _PY_VERSION,
+        )
 
-    if body_value is not None:
-      headers['content-type'] = self.content_type
-      body_value = self.serialize(body_value)
-    self._log_request(headers, path_params, query, body_value)
-    return (headers, path_params, query, body_value)
+        if body_value is not None:
+            headers["content-type"] = self.content_type
+            body_value = self.serialize(body_value)
+        self._log_request(headers, path_params, query, body_value)
+        return (headers, path_params, query, body_value)
 
-  def _build_query(self, params):
-    """Builds a query string.
+    def _build_query(self, params):
+        """Builds a query string.
 
     Args:
       params: dict, the query parameters
@@ -168,32 +171,32 @@
     Returns:
       The query parameters properly encoded into an HTTP URI query string.
     """
-    if self.alt_param is not None:
-      params.update({'alt': self.alt_param})
-    astuples = []
-    for key, value in six.iteritems(params):
-      if type(value) == type([]):
-        for x in value:
-          x = x.encode('utf-8')
-          astuples.append((key, x))
-      else:
-        if isinstance(value, six.text_type) and callable(value.encode):
-          value = value.encode('utf-8')
-        astuples.append((key, value))
-    return '?' + urlencode(astuples)
+        if self.alt_param is not None:
+            params.update({"alt": self.alt_param})
+        astuples = []
+        for key, value in six.iteritems(params):
+            if type(value) == type([]):
+                for x in value:
+                    x = x.encode("utf-8")
+                    astuples.append((key, x))
+            else:
+                if isinstance(value, six.text_type) and callable(value.encode):
+                    value = value.encode("utf-8")
+                astuples.append((key, value))
+        return "?" + urlencode(astuples)
 
-  def _log_response(self, resp, content):
-    """Logs debugging information about the response if requested."""
-    if dump_request_response:
-      LOGGER.info('--response-start--')
-      for h, v in six.iteritems(resp):
-        LOGGER.info('%s: %s', h, v)
-      if content:
-        LOGGER.info(content)
-      LOGGER.info('--response-end--')
+    def _log_response(self, resp, content):
+        """Logs debugging information about the response if requested."""
+        if dump_request_response:
+            LOGGER.info("--response-start--")
+            for h, v in six.iteritems(resp):
+                LOGGER.info("%s: %s", h, v)
+            if content:
+                LOGGER.info(content)
+            LOGGER.info("--response-end--")
 
-  def response(self, resp, content):
-    """Convert the response wire format into a Python object.
+    def response(self, resp, content):
+        """Convert the response wire format into a Python object.
 
     Args:
       resp: httplib2.Response, the HTTP response headers and status
@@ -205,21 +208,21 @@
     Raises:
       googleapiclient.errors.HttpError if a non 2xx response is received.
     """
-    self._log_response(resp, content)
-    # Error handling is TBD, for example, do we retry
-    # for some operation/error combinations?
-    if resp.status < 300:
-      if resp.status == 204:
-        # A 204: No Content response should be treated differently
-        # to all the other success states
-        return self.no_content_response
-      return self.deserialize(content)
-    else:
-      LOGGER.debug('Content from bad request was: %s' % content)
-      raise HttpError(resp, content)
+        self._log_response(resp, content)
+        # Error handling is TBD, for example, do we retry
+        # for some operation/error combinations?
+        if resp.status < 300:
+            if resp.status == 204:
+                # A 204: No Content response should be treated differently
+                # to all the other success states
+                return self.no_content_response
+            return self.deserialize(content)
+        else:
+            LOGGER.debug("Content from bad request was: %s" % content)
+            raise HttpError(resp, content)
 
-  def serialize(self, body_value):
-    """Perform the actual Python object serialization.
+    def serialize(self, body_value):
+        """Perform the actual Python object serialization.
 
     Args:
       body_value: object, the request body as a Python object.
@@ -227,10 +230,10 @@
     Returns:
       string, the body in serialized form.
     """
-    _abstract()
+        _abstract()
 
-  def deserialize(self, content):
-    """Perform the actual deserialization from response string to Python
+    def deserialize(self, content):
+        """Perform the actual deserialization from response string to Python
     object.
 
     Args:
@@ -239,98 +242,105 @@
     Returns:
       The body de-serialized as a Python object.
     """
-    _abstract()
+        _abstract()
 
 
 class JsonModel(BaseModel):
-  """Model class for JSON.
+    """Model class for JSON.
 
   Serializes and de-serializes between JSON and the Python
   object representation of HTTP request and response bodies.
   """
-  accept = 'application/json'
-  content_type = 'application/json'
-  alt_param = 'json'
 
-  def __init__(self, data_wrapper=False):
-    """Construct a JsonModel.
+    accept = "application/json"
+    content_type = "application/json"
+    alt_param = "json"
+
+    def __init__(self, data_wrapper=False):
+        """Construct a JsonModel.
 
     Args:
       data_wrapper: boolean, wrap requests and responses in a data wrapper
     """
-    self._data_wrapper = data_wrapper
+        self._data_wrapper = data_wrapper
 
-  def serialize(self, body_value):
-    if (isinstance(body_value, dict) and 'data' not in body_value and
-        self._data_wrapper):
-      body_value = {'data': body_value}
-    return json.dumps(body_value)
+    def serialize(self, body_value):
+        if (
+            isinstance(body_value, dict)
+            and "data" not in body_value
+            and self._data_wrapper
+        ):
+            body_value = {"data": body_value}
+        return json.dumps(body_value)
 
-  def deserialize(self, content):
-    try:
-        content = content.decode('utf-8')
-    except AttributeError:
-        pass
-    body = json.loads(content)
-    if self._data_wrapper and isinstance(body, dict) and 'data' in body:
-      body = body['data']
-    return body
+    def deserialize(self, content):
+        try:
+            content = content.decode("utf-8")
+        except AttributeError:
+            pass
+        body = json.loads(content)
+        if self._data_wrapper and isinstance(body, dict) and "data" in body:
+            body = body["data"]
+        return body
 
-  @property
-  def no_content_response(self):
-    return {}
+    @property
+    def no_content_response(self):
+        return {}
 
 
 class RawModel(JsonModel):
-  """Model class for requests that don't return JSON.
+    """Model class for requests that don't return JSON.
 
   Serializes and de-serializes between JSON and the Python
   object representation of HTTP request, and returns the raw bytes
   of the response body.
   """
-  accept = '*/*'
-  content_type = 'application/json'
-  alt_param = None
 
-  def deserialize(self, content):
-    return content
+    accept = "*/*"
+    content_type = "application/json"
+    alt_param = None
 
-  @property
-  def no_content_response(self):
-    return ''
+    def deserialize(self, content):
+        return content
+
+    @property
+    def no_content_response(self):
+        return ""
 
 
 class MediaModel(JsonModel):
-  """Model class for requests that return Media.
+    """Model class for requests that return Media.
 
   Serializes and de-serializes between JSON and the Python
   object representation of HTTP request, and returns the raw bytes
   of the response body.
   """
-  accept = '*/*'
-  content_type = 'application/json'
-  alt_param = 'media'
 
-  def deserialize(self, content):
-    return content
+    accept = "*/*"
+    content_type = "application/json"
+    alt_param = "media"
 
-  @property
-  def no_content_response(self):
-    return ''
+    def deserialize(self, content):
+        return content
+
+    @property
+    def no_content_response(self):
+        return ""
 
 
 class ProtocolBufferModel(BaseModel):
-  """Model class for protocol buffers.
+    """Model class for protocol buffers.
 
   Serializes and de-serializes the binary protocol buffer sent in the HTTP
   request and response bodies.
   """
-  accept = 'application/x-protobuf'
-  content_type = 'application/x-protobuf'
-  alt_param = 'proto'
 
-  def __init__(self, protocol_buffer):
-    """Constructs a ProtocolBufferModel.
+    accept = "application/x-protobuf"
+    content_type = "application/x-protobuf"
+    alt_param = "proto"
+
+    def __init__(self, protocol_buffer):
+        """Constructs a ProtocolBufferModel.
 
     The serialzed protocol buffer returned in an HTTP response will be
     de-serialized using the given protocol buffer class.
@@ -339,21 +349,21 @@
       protocol_buffer: The protocol buffer class used to de-serialize a
       response from the API.
     """
-    self._protocol_buffer = protocol_buffer
+        self._protocol_buffer = protocol_buffer
 
-  def serialize(self, body_value):
-    return body_value.SerializeToString()
+    def serialize(self, body_value):
+        return body_value.SerializeToString()
 
-  def deserialize(self, content):
-    return self._protocol_buffer.FromString(content)
+    def deserialize(self, content):
+        return self._protocol_buffer.FromString(content)
 
-  @property
-  def no_content_response(self):
-    return self._protocol_buffer()
+    @property
+    def no_content_response(self):
+        return self._protocol_buffer()
 
 
 def makepatch(original, modified):
-  """Create a patch object.
+    """Create a patch object.
 
   Some methods support PATCH, an efficient way to send updates to a resource.
   This method allows the easy construction of patch bodies by looking at the
@@ -373,24 +383,24 @@
     service.activities.patch(postid=postid, userid=userid,
       body=makepatch(original, item)).execute()
   """
-  patch = {}
-  for key, original_value in six.iteritems(original):
-    modified_value = modified.get(key, None)
-    if modified_value is None:
-      # Use None to signal that the element is deleted
-      patch[key] = None
-    elif original_value != modified_value:
-      if type(original_value) == type({}):
-        # Recursively descend objects
-        patch[key] = makepatch(original_value, modified_value)
-      else:
-        # In the case of simple types or arrays we just replace
-        patch[key] = modified_value
-    else:
-      # Don't add anything to patch if there's no change
-      pass
-  for key in modified:
-    if key not in original:
-      patch[key] = modified[key]
+    patch = {}
+    for key, original_value in six.iteritems(original):
+        modified_value = modified.get(key, None)
+        if modified_value is None:
+            # Use None to signal that the element is deleted
+            patch[key] = None
+        elif original_value != modified_value:
+            if type(original_value) == type({}):
+                # Recursively descend objects
+                patch[key] = makepatch(original_value, modified_value)
+            else:
+                # In the case of simple types or arrays we just replace
+                patch[key] = modified_value
+        else:
+            # Don't add anything to patch if there's no change
+            pass
+    for key in modified:
+        if key not in original:
+            patch[key] = modified[key]
 
-  return patch
+    return patch
diff --git a/googleapiclient/sample_tools.py b/googleapiclient/sample_tools.py
index 5cb7a06..2b6a21b 100644
--- a/googleapiclient/sample_tools.py
+++ b/googleapiclient/sample_tools.py
@@ -18,8 +18,8 @@
 """
 from __future__ import absolute_import
 
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
-__all__ = ['init']
+__author__ = "jcgregorio@google.com (Joe Gregorio)"
+__all__ = ["init"]
 
 
 import argparse
@@ -28,8 +28,11 @@
 from googleapiclient import discovery
 from googleapiclient.http import build_http
 
-def init(argv, name, version, doc, filename, scope=None, parents=[], discovery_filename=None):
-  """A common initialization routine for samples.
+
+def init(
+    argv, name, version, doc, filename, scope=None, parents=[], discovery_filename=None
+):
+    """A common initialization routine for samples.
 
   Many of the sample applications do the same initialization, which has now
   been consolidated into this function. This function uses common idioms found
@@ -52,55 +55,56 @@
     A tuple of (service, flags), where service is the service object and flags
     is the parsed command-line flags.
   """
-  try:
-      from oauth2client import client
-      from oauth2client import file
-      from oauth2client import tools
-  except ImportError:
-      raise ImportError('googleapiclient.sample_tools requires oauth2client. Please install oauth2client and try again.')
+    try:
+        from oauth2client import client
+        from oauth2client import file
+        from oauth2client import tools
+    except ImportError:
+        raise ImportError(
+            "googleapiclient.sample_tools requires oauth2client. Please install oauth2client and try again."
+        )
 
-  if scope is None:
-    scope = 'https://www.googleapis.com/auth/' + name
+    if scope is None:
+        scope = "https://www.googleapis.com/auth/" + name
 
-  # Parser command-line arguments.
-  parent_parsers = [tools.argparser]
-  parent_parsers.extend(parents)
-  parser = argparse.ArgumentParser(
-      description=doc,
-      formatter_class=argparse.RawDescriptionHelpFormatter,
-      parents=parent_parsers)
-  flags = parser.parse_args(argv[1:])
+    # Parser command-line arguments.
+    parent_parsers = [tools.argparser]
+    parent_parsers.extend(parents)
+    parser = argparse.ArgumentParser(
+        description=doc,
+        formatter_class=argparse.RawDescriptionHelpFormatter,
+        parents=parent_parsers,
+    )
+    flags = parser.parse_args(argv[1:])
 
-  # Name of a file containing the OAuth 2.0 information for this
-  # application, including client_id and client_secret, which are found
-  # on the API Access tab on the Google APIs
-  # Console <http://code.google.com/apis/console>.
-  client_secrets = os.path.join(os.path.dirname(filename),
-                                'client_secrets.json')
+    # Name of a file containing the OAuth 2.0 information for this
+    # application, including client_id and client_secret, which are found
+    # on the API Access tab on the Google APIs
+    # Console <http://code.google.com/apis/console>.
+    client_secrets = os.path.join(os.path.dirname(filename), "client_secrets.json")
 
-  # Set up a Flow object to be used if we need to authenticate.
-  flow = client.flow_from_clientsecrets(client_secrets,
-      scope=scope,
-      message=tools.message_if_missing(client_secrets))
+    # Set up a Flow object to be used if we need to authenticate.
+    flow = client.flow_from_clientsecrets(
+        client_secrets, scope=scope, message=tools.message_if_missing(client_secrets)
+    )
 
-  # Prepare credentials, and authorize HTTP object with them.
-  # If the credentials don't exist or are invalid run through the native client
-  # flow. The Storage object will ensure that if successful the good
-  # credentials will get written back to a file.
-  storage = file.Storage(name + '.dat')
-  credentials = storage.get()
-  if credentials is None or credentials.invalid:
-    credentials = tools.run_flow(flow, storage, flags)
-  http = credentials.authorize(http=build_http())
+    # Prepare credentials, and authorize HTTP object with them.
+    # If the credentials don't exist or are invalid run through the native client
+    # flow. The Storage object will ensure that if successful the good
+    # credentials will get written back to a file.
+    storage = file.Storage(name + ".dat")
+    credentials = storage.get()
+    if credentials is None or credentials.invalid:
+        credentials = tools.run_flow(flow, storage, flags)
+    http = credentials.authorize(http=build_http())
 
-  if discovery_filename is None:
-    # Construct a service object via the discovery service.
-    service = discovery.build(name, version, http=http)
-  else:
-    # Construct a service object using a local discovery document file.
-    with open(discovery_filename) as discovery_file:
-      service = discovery.build_from_document(
-          discovery_file.read(),
-          base='https://www.googleapis.com/',
-          http=http)
-  return (service, flags)
+    if discovery_filename is None:
+        # Construct a service object via the discovery service.
+        service = discovery.build(name, version, http=http)
+    else:
+        # Construct a service object using a local discovery document file.
+        with open(discovery_filename) as discovery_file:
+            service = discovery.build_from_document(
+                discovery_file.read(), base="https://www.googleapis.com/", http=http
+            )
+    return (service, flags)
diff --git a/googleapiclient/schema.py b/googleapiclient/schema.py
index 10d4a1b..022cb0a 100644
--- a/googleapiclient/schema.py
+++ b/googleapiclient/schema.py
@@ -61,7 +61,7 @@
 
 # TODO(jcgregorio) support format, enum, minimum, maximum
 
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+__author__ = "jcgregorio@google.com (Joe Gregorio)"
 
 import copy
 
@@ -69,23 +69,23 @@
 
 
 class Schemas(object):
-  """Schemas for an API."""
+    """Schemas for an API."""
 
-  def __init__(self, discovery):
-    """Constructor.
+    def __init__(self, discovery):
+        """Constructor.
 
     Args:
       discovery: object, Deserialized discovery document from which we pull
         out the named schema.
     """
-    self.schemas = discovery.get('schemas', {})
+        self.schemas = discovery.get("schemas", {})
 
-    # Cache of pretty printed schemas.
-    self.pretty = {}
+        # Cache of pretty printed schemas.
+        self.pretty = {}
 
-  @util.positional(2)
-  def _prettyPrintByName(self, name, seen=None, dent=0):
-    """Get pretty printed object prototype from the schema name.
+    @util.positional(2)
+    def _prettyPrintByName(self, name, seen=None, dent=0):
+        """Get pretty printed object prototype from the schema name.
 
     Args:
       name: string, Name of schema in the discovery document.
@@ -96,24 +96,25 @@
       string, A string that contains a prototype object with
         comments that conforms to the given schema.
     """
-    if seen is None:
-      seen = []
+        if seen is None:
+            seen = []
 
-    if name in seen:
-      # Do not fall into an infinite loop over recursive definitions.
-      return '# Object with schema name: %s' % name
-    seen.append(name)
+        if name in seen:
+            # Do not fall into an infinite loop over recursive definitions.
+            return "# Object with schema name: %s" % name
+        seen.append(name)
 
-    if name not in self.pretty:
-      self.pretty[name] = _SchemaToStruct(self.schemas[name],
-          seen, dent=dent).to_str(self._prettyPrintByName)
+        if name not in self.pretty:
+            self.pretty[name] = _SchemaToStruct(
+                self.schemas[name], seen, dent=dent
+            ).to_str(self._prettyPrintByName)
 
-    seen.pop()
+        seen.pop()
 
-    return self.pretty[name]
+        return self.pretty[name]
 
-  def prettyPrintByName(self, name):
-    """Get pretty printed object prototype from the schema name.
+    def prettyPrintByName(self, name):
+        """Get pretty printed object prototype from the schema name.
 
     Args:
       name: string, Name of schema in the discovery document.
@@ -122,12 +123,12 @@
       string, A string that contains a prototype object with
         comments that conforms to the given schema.
     """
-    # Return with trailing comma and newline removed.
-    return self._prettyPrintByName(name, seen=[], dent=1)[:-2]
+        # Return with trailing comma and newline removed.
+        return self._prettyPrintByName(name, seen=[], dent=1)[:-2]
 
-  @util.positional(2)
-  def _prettyPrintSchema(self, schema, seen=None, dent=0):
-    """Get pretty printed object prototype of schema.
+    @util.positional(2)
+    def _prettyPrintSchema(self, schema, seen=None, dent=0):
+        """Get pretty printed object prototype of schema.
 
     Args:
       schema: object, Parsed JSON schema.
@@ -138,13 +139,13 @@
       string, A string that contains a prototype object with
         comments that conforms to the given schema.
     """
-    if seen is None:
-      seen = []
+        if seen is None:
+            seen = []
 
-    return _SchemaToStruct(schema, seen, dent=dent).to_str(self._prettyPrintByName)
+        return _SchemaToStruct(schema, seen, dent=dent).to_str(self._prettyPrintByName)
 
-  def prettyPrintSchema(self, schema):
-    """Get pretty printed object prototype of schema.
+    def prettyPrintSchema(self, schema):
+        """Get pretty printed object prototype of schema.
 
     Args:
       schema: object, Parsed JSON schema.
@@ -153,25 +154,25 @@
       string, A string that contains a prototype object with
         comments that conforms to the given schema.
     """
-    # Return with trailing comma and newline removed.
-    return self._prettyPrintSchema(schema, dent=1)[:-2]
+        # Return with trailing comma and newline removed.
+        return self._prettyPrintSchema(schema, dent=1)[:-2]
 
-  def get(self, name, default=None):
-    """Get deserialized JSON schema from the schema name.
+    def get(self, name, default=None):
+        """Get deserialized JSON schema from the schema name.
 
     Args:
       name: string, Schema name.
       default: object, return value if name not found.
     """
-    return self.schemas.get(name, default)
+        return self.schemas.get(name, default)
 
 
 class _SchemaToStruct(object):
-  """Convert schema to a prototype object."""
+    """Convert schema to a prototype object."""
 
-  @util.positional(3)
-  def __init__(self, schema, seen, dent=0):
-    """Constructor.
+    @util.positional(3)
+    def __init__(self, schema, seen, dent=0):
+        """Constructor.
 
     Args:
       schema: object, Parsed JSON schema.
@@ -179,67 +180,67 @@
         handle recursive definitions.
       dent: int, Initial indentation depth.
     """
-    # The result of this parsing kept as list of strings.
-    self.value = []
+        # The result of this parsing kept as list of strings.
+        self.value = []
 
-    # The final value of the parsing.
-    self.string = None
+        # The final value of the parsing.
+        self.string = None
 
-    # The parsed JSON schema.
-    self.schema = schema
+        # The parsed JSON schema.
+        self.schema = schema
 
-    # Indentation level.
-    self.dent = dent
+        # Indentation level.
+        self.dent = dent
 
-    # Method that when called returns a prototype object for the schema with
-    # the given name.
-    self.from_cache = None
+        # Method that when called returns a prototype object for the schema with
+        # the given name.
+        self.from_cache = None
 
-    # List of names of schema already seen while parsing.
-    self.seen = seen
+        # List of names of schema already seen while parsing.
+        self.seen = seen
 
-  def emit(self, text):
-    """Add text as a line to the output.
+    def emit(self, text):
+        """Add text as a line to the output.
 
     Args:
       text: string, Text to output.
     """
-    self.value.extend(["  " * self.dent, text, '\n'])
+        self.value.extend(["  " * self.dent, text, "\n"])
 
-  def emitBegin(self, text):
-    """Add text to the output, but with no line terminator.
+    def emitBegin(self, text):
+        """Add text to the output, but with no line terminator.
 
     Args:
       text: string, Text to output.
       """
-    self.value.extend(["  " * self.dent, text])
+        self.value.extend(["  " * self.dent, text])
 
-  def emitEnd(self, text, comment):
-    """Add text and comment to the output with line terminator.
+    def emitEnd(self, text, comment):
+        """Add text and comment to the output with line terminator.
 
     Args:
       text: string, Text to output.
       comment: string, Python comment.
     """
-    if comment:
-      divider = '\n' + '  ' * (self.dent + 2) + '# '
-      lines = comment.splitlines()
-      lines = [x.rstrip() for x in lines]
-      comment = divider.join(lines)
-      self.value.extend([text, ' # ', comment, '\n'])
-    else:
-      self.value.extend([text, '\n'])
+        if comment:
+            divider = "\n" + "  " * (self.dent + 2) + "# "
+            lines = comment.splitlines()
+            lines = [x.rstrip() for x in lines]
+            comment = divider.join(lines)
+            self.value.extend([text, " # ", comment, "\n"])
+        else:
+            self.value.extend([text, "\n"])
 
-  def indent(self):
-    """Increase indentation level."""
-    self.dent += 1
+    def indent(self):
+        """Increase indentation level."""
+        self.dent += 1
 
-  def undent(self):
-    """Decrease indentation level."""
-    self.dent -= 1
+    def undent(self):
+        """Decrease indentation level."""
+        self.dent -= 1
 
-  def _to_str_impl(self, schema):
-    """Prototype object based on the schema, in Python code with comments.
+    def _to_str_impl(self, schema):
+        """Prototype object based on the schema, in Python code with comments.
 
     Args:
       schema: object, Parsed JSON schema file.
@@ -247,59 +248,59 @@
     Returns:
       Prototype object based on the schema, in Python code with comments.
     """
-    stype = schema.get('type')
-    if stype == 'object':
-      self.emitEnd('{', schema.get('description', ''))
-      self.indent()
-      if 'properties' in schema:
-        for pname, pschema in six.iteritems(schema.get('properties', {})):
-          self.emitBegin('"%s": ' % pname)
-          self._to_str_impl(pschema)
-      elif 'additionalProperties' in schema:
-        self.emitBegin('"a_key": ')
-        self._to_str_impl(schema['additionalProperties'])
-      self.undent()
-      self.emit('},')
-    elif '$ref' in schema:
-      schemaName = schema['$ref']
-      description = schema.get('description', '')
-      s = self.from_cache(schemaName, seen=self.seen)
-      parts = s.splitlines()
-      self.emitEnd(parts[0], description)
-      for line in parts[1:]:
-        self.emit(line.rstrip())
-    elif stype == 'boolean':
-      value = schema.get('default', 'True or False')
-      self.emitEnd('%s,' % str(value), schema.get('description', ''))
-    elif stype == 'string':
-      value = schema.get('default', 'A String')
-      self.emitEnd('"%s",' % str(value), schema.get('description', ''))
-    elif stype == 'integer':
-      value = schema.get('default', '42')
-      self.emitEnd('%s,' % str(value), schema.get('description', ''))
-    elif stype == 'number':
-      value = schema.get('default', '3.14')
-      self.emitEnd('%s,' % str(value), schema.get('description', ''))
-    elif stype == 'null':
-      self.emitEnd('None,', schema.get('description', ''))
-    elif stype == 'any':
-      self.emitEnd('"",', schema.get('description', ''))
-    elif stype == 'array':
-      self.emitEnd('[', schema.get('description'))
-      self.indent()
-      self.emitBegin('')
-      self._to_str_impl(schema['items'])
-      self.undent()
-      self.emit('],')
-    else:
-      self.emit('Unknown type! %s' % stype)
-      self.emitEnd('', '')
+        stype = schema.get("type")
+        if stype == "object":
+            self.emitEnd("{", schema.get("description", ""))
+            self.indent()
+            if "properties" in schema:
+                for pname, pschema in six.iteritems(schema.get("properties", {})):
+                    self.emitBegin('"%s": ' % pname)
+                    self._to_str_impl(pschema)
+            elif "additionalProperties" in schema:
+                self.emitBegin('"a_key": ')
+                self._to_str_impl(schema["additionalProperties"])
+            self.undent()
+            self.emit("},")
+        elif "$ref" in schema:
+            schemaName = schema["$ref"]
+            description = schema.get("description", "")
+            s = self.from_cache(schemaName, seen=self.seen)
+            parts = s.splitlines()
+            self.emitEnd(parts[0], description)
+            for line in parts[1:]:
+                self.emit(line.rstrip())
+        elif stype == "boolean":
+            value = schema.get("default", "True or False")
+            self.emitEnd("%s," % str(value), schema.get("description", ""))
+        elif stype == "string":
+            value = schema.get("default", "A String")
+            self.emitEnd('"%s",' % str(value), schema.get("description", ""))
+        elif stype == "integer":
+            value = schema.get("default", "42")
+            self.emitEnd("%s," % str(value), schema.get("description", ""))
+        elif stype == "number":
+            value = schema.get("default", "3.14")
+            self.emitEnd("%s," % str(value), schema.get("description", ""))
+        elif stype == "null":
+            self.emitEnd("None,", schema.get("description", ""))
+        elif stype == "any":
+            self.emitEnd('"",', schema.get("description", ""))
+        elif stype == "array":
+            self.emitEnd("[", schema.get("description"))
+            self.indent()
+            self.emitBegin("")
+            self._to_str_impl(schema["items"])
+            self.undent()
+            self.emit("],")
+        else:
+            self.emit("Unknown type! %s" % stype)
+            self.emitEnd("", "")
 
-    self.string = ''.join(self.value)
-    return self.string
+        self.string = "".join(self.value)
+        return self.string
 
-  def to_str(self, from_cache):
-    """Prototype object based on the schema, in Python code with comments.
+    def to_str(self, from_cache):
+        """Prototype object based on the schema, in Python code with comments.
 
     Args:
       from_cache: callable(name, seen), Callable that retrieves an object
@@ -310,5 +311,5 @@
       Prototype object based on the schema, in Python code with comments.
       The lines of the code will all be properly indented.
     """
-    self.from_cache = from_cache
-    return self._to_str_impl(self.schema)
+        self.from_cache = from_cache
+        return self._to_str_impl(self.schema)
diff --git a/samples-index.py b/samples-index.py
index 086886a..9c28405 100644
--- a/samples-index.py
+++ b/samples-index.py
@@ -36,35 +36,34 @@
 import os
 import re
 
-BASE_HG_URI = ('http://code.google.com/p/google-api-python-client/source/'
-               'browse/#hg')
+BASE_HG_URI = "http://code.google.com/p/google-api-python-client/source/" "browse/#hg"
 
-http = httplib2.Http('.cache')
-r, c =  http.request('https://www.googleapis.com/discovery/v1/apis')
+http = httplib2.Http(".cache")
+r, c = http.request("https://www.googleapis.com/discovery/v1/apis")
 if r.status != 200:
-  raise ValueError('Received non-200 response when retrieving Discovery.')
+    raise ValueError("Received non-200 response when retrieving Discovery.")
 
 # Dictionary mapping api names to their discovery description.
 DIRECTORY = {}
-for item in json.loads(c)['items']:
-  if item['preferred']:
-    DIRECTORY[item['name']] = item
+for item in json.loads(c)["items"]:
+    if item["preferred"]:
+        DIRECTORY[item["name"]] = item
 
 # A list of valid keywords. Should not be taken as complete, add to
 # this list as needed.
 KEYWORDS = {
-    'appengine': 'Google App Engine',
-    'oauth2': 'OAuth 2.0',
-    'cmdline': 'Command-line',
-    'django': 'Django',
-    'threading': 'Threading',
-    'pagination': 'Pagination',
-    'media': 'Media Upload and Download'
-    }
+    "appengine": "Google App Engine",
+    "oauth2": "OAuth 2.0",
+    "cmdline": "Command-line",
+    "django": "Django",
+    "threading": "Threading",
+    "pagination": "Pagination",
+    "media": "Media Upload and Download",
+}
 
 
 def get_lines(name, lines):
-  """Return lines that begin with name.
+    """Return lines that begin with name.
 
   Lines are expected to look like:
 
@@ -77,25 +76,25 @@
   Returns:
     List of values in the lines that match.
   """
-  retval = []
-  matches = itertools.ifilter(lambda x: x.startswith(name + ':'), lines)
-  for line in matches:
-    retval.extend(line[len(name)+1:].split())
-  return retval
+    retval = []
+    matches = itertools.ifilter(lambda x: x.startswith(name + ":"), lines)
+    for line in matches:
+        retval.extend(line[len(name) + 1 :].split())
+    return retval
 
 
 def wiki_escape(s):
-  """Detect WikiSyntax (i.e. InterCaps, a.k.a. CamelCase) and escape it."""
-  ret = []
-  for word in s.split():
-    if re.match(r'[A-Z]+[a-z]+[A-Z]', word):
-      word = '!%s' % word
-    ret.append(word)
-  return ' '.join(ret)
+    """Detect WikiSyntax (i.e. InterCaps, a.k.a. CamelCase) and escape it."""
+    ret = []
+    for word in s.split():
+        if re.match(r"[A-Z]+[a-z]+[A-Z]", word):
+            word = "!%s" % word
+        ret.append(word)
+    return " ".join(ret)
 
 
 def context_from_sample(api, keywords, dirname, desc, uri):
-  """Return info for expanding a sample into a template.
+    """Return info for expanding a sample into a template.
 
   Args:
     api: string, name of api.
@@ -107,30 +106,30 @@
   Returns:
     A dictionary of values useful for template expansion.
   """
-  if uri is None:
-    uri = BASE_HG_URI + dirname.replace('/', '%2F')
-  else:
-    uri = ''.join(uri)
-  if api is None:
-    return None
-  else:
-    entry = DIRECTORY[api]
-    context = {
-        'api': api,
-        'version': entry['version'],
-        'api_name': wiki_escape(entry.get('title', entry.get('description'))),
-        'api_desc': wiki_escape(entry['description']),
-        'api_icon': entry['icons']['x32'],
-        'keywords': keywords,
-        'dir': dirname,
-        'uri': uri,
-        'desc': wiki_escape(desc),
+    if uri is None:
+        uri = BASE_HG_URI + dirname.replace("/", "%2F")
+    else:
+        uri = "".join(uri)
+    if api is None:
+        return None
+    else:
+        entry = DIRECTORY[api]
+        context = {
+            "api": api,
+            "version": entry["version"],
+            "api_name": wiki_escape(entry.get("title", entry.get("description"))),
+            "api_desc": wiki_escape(entry["description"]),
+            "api_icon": entry["icons"]["x32"],
+            "keywords": keywords,
+            "dir": dirname,
+            "uri": uri,
+            "desc": wiki_escape(desc),
         }
-    return context
+        return context
 
 
 def keyword_context_from_sample(keywords, dirname, desc, uri):
-  """Return info for expanding a sample into a template.
+    """Return info for expanding a sample into a template.
 
   Sample may not be about a specific api.
 
@@ -143,21 +142,21 @@
   Returns:
     A dictionary of values useful for template expansion.
   """
-  if uri is None:
-    uri = BASE_HG_URI + dirname.replace('/', '%2F')
-  else:
-    uri = ''.join(uri)
-  context = {
-      'keywords': keywords,
-      'dir': dirname,
-      'uri': uri,
-      'desc': wiki_escape(desc),
-      }
-  return context
+    if uri is None:
+        uri = BASE_HG_URI + dirname.replace("/", "%2F")
+    else:
+        uri = "".join(uri)
+    context = {
+        "keywords": keywords,
+        "dir": dirname,
+        "uri": uri,
+        "desc": wiki_escape(desc),
+    }
+    return context
 
 
 def scan_readme_files(dirname):
-  """Scans all subdirs of dirname for README files.
+    """Scans all subdirs of dirname for README files.
 
   Args:
     dirname: string, name of directory to walk.
@@ -166,82 +165,89 @@
     (samples, keyword_set): list of information about all samples, the union
       of all keywords found.
   """
-  samples = []
-  keyword_set = set()
+    samples = []
+    keyword_set = set()
 
-  for root, dirs, files in os.walk(dirname):
-    if 'README' in files:
-      filename = os.path.join(root, 'README')
-      with open(filename, 'r') as f:
-        content = f.read()
-        lines = content.splitlines()
-        desc = ' '.join(itertools.takewhile(lambda x: x, lines))
-        api = get_lines('api', lines)
-        keywords = get_lines('keywords', lines)
-        uri = get_lines('uri', lines)
-        if not uri:
-          uri = None
+    for root, dirs, files in os.walk(dirname):
+        if "README" in files:
+            filename = os.path.join(root, "README")
+            with open(filename, "r") as f:
+                content = f.read()
+                lines = content.splitlines()
+                desc = " ".join(itertools.takewhile(lambda x: x, lines))
+                api = get_lines("api", lines)
+                keywords = get_lines("keywords", lines)
+                uri = get_lines("uri", lines)
+                if not uri:
+                    uri = None
 
-        for k in keywords:
-          if k not in KEYWORDS:
-            raise ValueError(
-                '%s is not a valid keyword in file %s' % (k, filename))
-        keyword_set.update(keywords)
-        if not api:
-          api = [None]
-        samples.append((api[0], keywords, root[1:], desc, uri))
+                for k in keywords:
+                    if k not in KEYWORDS:
+                        raise ValueError(
+                            "%s is not a valid keyword in file %s" % (k, filename)
+                        )
+                keyword_set.update(keywords)
+                if not api:
+                    api = [None]
+                samples.append((api[0], keywords, root[1:], desc, uri))
 
-  samples.sort()
+    samples.sort()
 
-  return samples, keyword_set
+    return samples, keyword_set
 
 
 def main():
-  # Get all the information we need out of the README files in the samples.
-  samples, keyword_set = scan_readme_files('./samples')
+    # Get all the information we need out of the README files in the samples.
+    samples, keyword_set = scan_readme_files("./samples")
 
-  # Now build a wiki page with all that information. Accumulate all the
-  # information as string to be concatenated when were done.
-  page = ['<wiki:toc max_depth="3" />\n= Samples By API =\n']
+    # Now build a wiki page with all that information. Accumulate all the
+    # information as string to be concatenated when were done.
+    page = ['<wiki:toc max_depth="3" />\n= Samples By API =\n']
 
-  # All the samples, grouped by API.
-  current_api = None
-  for api, keywords, dirname, desc, uri in samples:
-    context = context_from_sample(api, keywords, dirname, desc, uri)
-    if context is None:
-      continue
-    if current_api != api:
-      page.append("""
+    # All the samples, grouped by API.
+    current_api = None
+    for api, keywords, dirname, desc, uri in samples:
+        context = context_from_sample(api, keywords, dirname, desc, uri)
+        if context is None:
+            continue
+        if current_api != api:
+            page.append(
+                """
 === %(api_icon)s %(api_name)s ===
 
 %(api_desc)s
 
 Documentation for the %(api_name)s in [https://google-api-client-libraries.appspot.com/documentation/%(api)s/%(version)s/python/latest/ PyDoc]
 
-""" % context)
-      current_api = api
+"""
+                % context
+            )
+            current_api = api
 
-    page.append('|| [%(uri)s %(dir)s] || %(desc)s ||\n' % context)
+        page.append("|| [%(uri)s %(dir)s] || %(desc)s ||\n" % context)
 
-  # Now group the samples by keywords.
-  for keyword, keyword_name in KEYWORDS.iteritems():
-    if keyword not in keyword_set:
-      continue
-    page.append('\n= %s Samples =\n\n' % keyword_name)
-    page.append('<table border=1 cellspacing=0 cellpadding=8px>\n')
-    for _, keywords, dirname, desc, uri in samples:
-      context = keyword_context_from_sample(keywords, dirname, desc, uri)
-      if keyword not in keywords:
-        continue
-      page.append("""
+    # Now group the samples by keywords.
+    for keyword, keyword_name in KEYWORDS.iteritems():
+        if keyword not in keyword_set:
+            continue
+        page.append("\n= %s Samples =\n\n" % keyword_name)
+        page.append("<table border=1 cellspacing=0 cellpadding=8px>\n")
+        for _, keywords, dirname, desc, uri in samples:
+            context = keyword_context_from_sample(keywords, dirname, desc, uri)
+            if keyword not in keywords:
+                continue
+            page.append(
+                """
 <tr>
   <td>[%(uri)s %(dir)s] </td>
   <td> %(desc)s </td>
-</tr>""" % context)
-    page.append('</table>\n')
+</tr>"""
+                % context
+            )
+        page.append("</table>\n")
 
-  print(''.join(page))
+    print("".join(page))
 
 
-if __name__ == '__main__':
-  main()
+if __name__ == "__main__":
+    main()
diff --git a/setup.py b/setup.py
index 89c32a9..643df2e 100644
--- a/setup.py
+++ b/setup.py
@@ -22,34 +22,29 @@
 import sys
 
 if sys.version_info < (2, 7):
-  print('google-api-python-client requires python version >= 2.7.',
-        file=sys.stderr)
-  sys.exit(1)
+    print("google-api-python-client requires python version >= 2.7.", file=sys.stderr)
+    sys.exit(1)
 if (3, 1) <= sys.version_info < (3, 4):
-  print('google-api-python-client requires python3 version >= 3.4.',
-        file=sys.stderr)
-  sys.exit(1)
+    print("google-api-python-client requires python3 version >= 3.4.", file=sys.stderr)
+    sys.exit(1)
 
 from setuptools import setup
 
-packages = [
-    'apiclient',
-    'googleapiclient',
-    'googleapiclient/discovery_cache',
-]
+packages = ["apiclient", "googleapiclient", "googleapiclient/discovery_cache"]
 
 install_requires = [
-    'httplib2>=0.9.2,<1dev',
-    'google-auth>=1.4.1',
-    'google-auth-httplib2>=0.0.3',
-    'six>=1.6.1,<2dev',
-    'uritemplate>=3.0.0,<4dev',
+    "httplib2>=0.9.2,<1dev",
+    "google-auth>=1.4.1",
+    "google-auth-httplib2>=0.0.3",
+    "six>=1.6.1,<2dev",
+    "uritemplate>=3.0.0,<4dev",
 ]
 
 long_desc = """The Google API Client for Python is a client library for
 accessing the Plus, Moderator, and many other Google APIs."""
 
 import googleapiclient
+
 version = googleapiclient.__version__
 
 setup(
@@ -60,23 +55,23 @@
     author="Google Inc.",
     url="http://github.com/google/google-api-python-client/",
     install_requires=install_requires,
-    python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*',
+    python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*",
     packages=packages,
     package_data={},
     license="Apache 2.0",
     keywords="google api client",
     classifiers=[
-        'Programming Language :: Python :: 2',
-        'Programming Language :: Python :: 2.7',
-        'Programming Language :: Python :: 3',
-        'Programming Language :: Python :: 3.4',
-        'Programming Language :: Python :: 3.5',
-        'Programming Language :: Python :: 3.6',
-        'Programming Language :: Python :: 3.7',
-        'Development Status :: 5 - Production/Stable',
-        'Intended Audience :: Developers',
-        'License :: OSI Approved :: Apache Software License',
-        'Operating System :: OS Independent',
-        'Topic :: Internet :: WWW/HTTP',
+        "Programming Language :: Python :: 2",
+        "Programming Language :: Python :: 2.7",
+        "Programming Language :: Python :: 3",
+        "Programming Language :: Python :: 3.4",
+        "Programming Language :: Python :: 3.5",
+        "Programming Language :: Python :: 3.6",
+        "Programming Language :: Python :: 3.7",
+        "Development Status :: 5 - Production/Stable",
+        "Intended Audience :: Developers",
+        "License :: OSI Approved :: Apache Software License",
+        "Operating System :: OS Independent",
+        "Topic :: Internet :: WWW/HTTP",
     ],
 )
diff --git a/tests/__init__.py b/tests/__init__.py
index d88353f..b4f509d 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -13,12 +13,12 @@
 """Test Package set up."""
 from __future__ import absolute_import
 
-__author__ = 'afshar@google.com (Ali Afshar)'
+__author__ = "afshar@google.com (Ali Afshar)"
 
 
 from googleapiclient import _helpers as util
 
 
 def setup_package():
-  """Run on testing package."""
-  util.positional_parameters_enforcement = 'EXCEPTION'
+    """Run on testing package."""
+    util.positional_parameters_enforcement = "EXCEPTION"
diff --git a/tests/test__auth.py b/tests/test__auth.py
index d43fe68..b65ed81 100644
--- a/tests/test__auth.py
+++ b/tests/test__auth.py
@@ -33,9 +33,8 @@
         _auth.HAS_OAUTH2CLIENT = True
 
     def test_default_credentials(self):
-        with mock.patch('google.auth.default', autospec=True) as default:
-            default.return_value = (
-                mock.sentinel.credentials, mock.sentinel.project)
+        with mock.patch("google.auth.default", autospec=True) as default:
+            default.return_value = (mock.sentinel.credentials, mock.sentinel.project)
 
             credentials = _auth.default_credentials()
 
@@ -50,8 +49,8 @@
 
     def test_with_scopes_scoped(self):
         class CredentialsWithScopes(
-                google.auth.credentials.Credentials,
-                google.auth.credentials.Scoped):
+            google.auth.credentials.Credentials, google.auth.credentials.Scoped
+        ):
             pass
 
         credentials = mock.Mock(spec=CredentialsWithScopes)
@@ -68,9 +67,7 @@
 
         authorized_http = _auth.authorized_http(credentials)
 
-        self.assertIsInstance(
-            authorized_http,
-            google_auth_httplib2.AuthorizedHttp)
+        self.assertIsInstance(authorized_http, google_auth_httplib2.AuthorizedHttp)
         self.assertEqual(authorized_http.credentials, credentials)
         self.assertIsInstance(authorized_http.http, httplib2.Http)
         self.assertIsInstance(authorized_http.http.timeout, int)
@@ -88,7 +85,8 @@
 
     def test_default_credentials(self):
         default_patch = mock.patch(
-            'oauth2client.client.GoogleCredentials.get_application_default')
+            "oauth2client.client.GoogleCredentials.get_application_default"
+        )
 
         with default_patch as default:
             default.return_value = mock.sentinel.credentials
@@ -128,7 +126,6 @@
 
 
 class TestAuthWithoutAuth(unittest.TestCase):
-
     def setUp(self):
         _auth.HAS_GOOGLE_AUTH = False
         _auth.HAS_OAUTH2CLIENT = False
diff --git a/tests/test__helpers.py b/tests/test__helpers.py
index e33ea71..90c75ef 100644
--- a/tests/test__helpers.py
+++ b/tests/test__helpers.py
@@ -25,10 +25,8 @@
 
 
 class PositionalTests(unittest.TestCase):
-
     def test_usage(self):
-        _helpers.positional_parameters_enforcement = (
-            _helpers.POSITIONAL_EXCEPTION)
+        _helpers.positional_parameters_enforcement = _helpers.POSITIONAL_EXCEPTION
 
         # 1 positional arg, 1 keyword-only arg.
         @_helpers.positional(1)
@@ -60,10 +58,9 @@
         with self.assertRaises(TypeError):
             function3(1, 2)
 
-    @mock.patch('googleapiclient._helpers.logger')
+    @mock.patch("googleapiclient._helpers.logger")
     def test_enforcement_warning(self, mock_logger):
-        _helpers.positional_parameters_enforcement = (
-            _helpers.POSITIONAL_WARNING)
+        _helpers.positional_parameters_enforcement = _helpers.POSITIONAL_WARNING
 
         @_helpers.positional(1)
         def function(pos, kwonly=None):
@@ -72,7 +69,7 @@
         self.assertTrue(function(1, 2))
         self.assertTrue(mock_logger.warning.called)
 
-    @mock.patch('googleapiclient._helpers.logger')
+    @mock.patch("googleapiclient._helpers.logger")
     def test_enforcement_ignore(self, mock_logger):
         _helpers.positional_parameters_enforcement = _helpers.POSITIONAL_IGNORE
 
@@ -85,24 +82,22 @@
 
 
 class AddQueryParameterTests(unittest.TestCase):
-
     def test__add_query_parameter(self):
+        self.assertEqual(_helpers._add_query_parameter("/action", "a", None), "/action")
         self.assertEqual(
-            _helpers._add_query_parameter('/action', 'a', None),
-            '/action')
+            _helpers._add_query_parameter("/action", "a", "b"), "/action?a=b"
+        )
         self.assertEqual(
-            _helpers._add_query_parameter('/action', 'a', 'b'),
-            '/action?a=b')
-        self.assertEqual(
-            _helpers._add_query_parameter('/action?a=b', 'a', 'c'),
-            '/action?a=c')
+            _helpers._add_query_parameter("/action?a=b", "a", "c"), "/action?a=c"
+        )
         # Order is non-deterministic.
         self.assertIn(
-            _helpers._add_query_parameter('/action?a=b', 'c', 'd'),
-            ['/action?a=b&c=d', '/action?c=d&a=b'])
+            _helpers._add_query_parameter("/action?a=b", "c", "d"),
+            ["/action?a=b&c=d", "/action?c=d&a=b"],
+        )
         self.assertEqual(
-            _helpers._add_query_parameter('/action', 'a', ' ='),
-            '/action?a=+%3D')
+            _helpers._add_query_parameter("/action", "a", " ="), "/action?a=+%3D"
+        )
 
 
 def assertUrisEqual(testcase, expected, actual):
@@ -123,39 +118,37 @@
 
 
 class Test_update_query_params(unittest.TestCase):
-
     def test_update_query_params_no_params(self):
-        uri = 'http://www.google.com'
-        updated = _helpers.update_query_params(uri, {'a': 'b'})
-        self.assertEqual(updated, uri + '?a=b')
+        uri = "http://www.google.com"
+        updated = _helpers.update_query_params(uri, {"a": "b"})
+        self.assertEqual(updated, uri + "?a=b")
 
     def test_update_query_params_existing_params(self):
-        uri = 'http://www.google.com?x=y'
-        updated = _helpers.update_query_params(uri, {'a': 'b', 'c': 'd&'})
-        hardcoded_update = uri + '&a=b&c=d%26'
+        uri = "http://www.google.com?x=y"
+        updated = _helpers.update_query_params(uri, {"a": "b", "c": "d&"})
+        hardcoded_update = uri + "&a=b&c=d%26"
         assertUrisEqual(self, updated, hardcoded_update)
 
     def test_update_query_params_replace_param(self):
-        base_uri = 'http://www.google.com'
-        uri = base_uri + '?x=a'
-        updated = _helpers.update_query_params(uri, {'x': 'b', 'y': 'c'})
-        hardcoded_update = base_uri + '?x=b&y=c'
+        base_uri = "http://www.google.com"
+        uri = base_uri + "?x=a"
+        updated = _helpers.update_query_params(uri, {"x": "b", "y": "c"})
+        hardcoded_update = base_uri + "?x=b&y=c"
         assertUrisEqual(self, updated, hardcoded_update)
 
     def test_update_query_params_repeated_params(self):
-        uri = 'http://www.google.com?x=a&x=b'
+        uri = "http://www.google.com?x=a&x=b"
         with self.assertRaises(ValueError):
-            _helpers.update_query_params(uri, {'a': 'c'})
+            _helpers.update_query_params(uri, {"a": "c"})
 
 
 class Test_parse_unique_urlencoded(unittest.TestCase):
-
     def test_without_repeats(self):
-        content = 'a=b&c=d'
+        content = "a=b&c=d"
         result = _helpers.parse_unique_urlencoded(content)
-        self.assertEqual(result, {'a': 'b', 'c': 'd'})
+        self.assertEqual(result, {"a": "b", "c": "d"})
 
     def test_with_repeats(self):
-        content = 'a=b&a=d'
+        content = "a=b&a=d"
         with self.assertRaises(ValueError):
             _helpers.parse_unique_urlencoded(content)
diff --git a/tests/test_channel.py b/tests/test_channel.py
index 4141353..8a46bf0 100644
--- a/tests/test_channel.py
+++ b/tests/test_channel.py
@@ -1,7 +1,7 @@
 """Notification channels tests."""
 from __future__ import absolute_import
 
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+__author__ = "jcgregorio@google.com (Joe Gregorio)"
 
 import unittest2 as unittest
 import datetime
@@ -11,115 +11,128 @@
 
 
 class TestChannel(unittest.TestCase):
-  def test_basic(self):
-    ch = channel.Channel('web_hook', 'myid', 'mytoken',
-                         'http://example.org/callback',
-                         expiration=0,
-                         params={'extra': 'info'},
-                         resource_id='the_resource_id',
-                         resource_uri='http://example.com/resource_1')
+    def test_basic(self):
+        ch = channel.Channel(
+            "web_hook",
+            "myid",
+            "mytoken",
+            "http://example.org/callback",
+            expiration=0,
+            params={"extra": "info"},
+            resource_id="the_resource_id",
+            resource_uri="http://example.com/resource_1",
+        )
 
-    # Converting to a body.
-    body = ch.body()
-    self.assertEqual('http://example.org/callback', body['address'])
-    self.assertEqual('myid', body['id'])
-    self.assertEqual('missing', body.get('expiration', 'missing'))
-    self.assertEqual('info', body['params']['extra'])
-    self.assertEqual('the_resource_id', body['resourceId'])
-    self.assertEqual('http://example.com/resource_1', body['resourceUri'])
-    self.assertEqual('web_hook', body['type'])
+        # Converting to a body.
+        body = ch.body()
+        self.assertEqual("http://example.org/callback", body["address"])
+        self.assertEqual("myid", body["id"])
+        self.assertEqual("missing", body.get("expiration", "missing"))
+        self.assertEqual("info", body["params"]["extra"])
+        self.assertEqual("the_resource_id", body["resourceId"])
+        self.assertEqual("http://example.com/resource_1", body["resourceUri"])
+        self.assertEqual("web_hook", body["type"])
 
-    # Converting to a body with expiration set.
-    ch.expiration = 1
-    body = ch.body()
-    self.assertEqual(1, body.get('expiration', 'missing'))
+        # Converting to a body with expiration set.
+        ch.expiration = 1
+        body = ch.body()
+        self.assertEqual(1, body.get("expiration", "missing"))
 
-    # Converting to a body after updating with a response body.
-    ch.update({
-        'resourceId': 'updated_res_id',
-        'resourceUri': 'updated_res_uri',
-        'some_random_parameter': 2,
-        })
+        # Converting to a body after updating with a response body.
+        ch.update(
+            {
+                "resourceId": "updated_res_id",
+                "resourceUri": "updated_res_uri",
+                "some_random_parameter": 2,
+            }
+        )
 
-    body = ch.body()
-    self.assertEqual('http://example.org/callback', body['address'])
-    self.assertEqual('myid', body['id'])
-    self.assertEqual(1, body.get('expiration', 'missing'))
-    self.assertEqual('info', body['params']['extra'])
-    self.assertEqual('updated_res_id', body['resourceId'])
-    self.assertEqual('updated_res_uri', body['resourceUri'])
-    self.assertEqual('web_hook', body['type'])
+        body = ch.body()
+        self.assertEqual("http://example.org/callback", body["address"])
+        self.assertEqual("myid", body["id"])
+        self.assertEqual(1, body.get("expiration", "missing"))
+        self.assertEqual("info", body["params"]["extra"])
+        self.assertEqual("updated_res_id", body["resourceId"])
+        self.assertEqual("updated_res_uri", body["resourceUri"])
+        self.assertEqual("web_hook", body["type"])
 
-  def test_new_webhook_channel(self):
-    ch = channel.new_webhook_channel('http://example.com/callback')
-    self.assertEqual(0, ch.expiration)
-    self.assertEqual('http://example.com/callback', ch.address)
-    self.assertEqual(None, ch.params)
+    def test_new_webhook_channel(self):
+        ch = channel.new_webhook_channel("http://example.com/callback")
+        self.assertEqual(0, ch.expiration)
+        self.assertEqual("http://example.com/callback", ch.address)
+        self.assertEqual(None, ch.params)
 
-    # New channel with an obviously wrong expiration time.
-    ch = channel.new_webhook_channel(
-        'http://example.com/callback',
-        expiration=datetime.datetime(1965, 1, 1))
-    self.assertEqual(0, ch.expiration)
+        # New channel with an obviously wrong expiration time.
+        ch = channel.new_webhook_channel(
+            "http://example.com/callback", expiration=datetime.datetime(1965, 1, 1)
+        )
+        self.assertEqual(0, ch.expiration)
 
-    # New channel with an expiration time.
-    ch = channel.new_webhook_channel(
-        'http://example.com/callback',
-        expiration=datetime.datetime(1970, 1, 1, second=5))
-    self.assertEqual(5000, ch.expiration)
-    self.assertEqual('http://example.com/callback', ch.address)
-    self.assertEqual(None, ch.params)
+        # New channel with an expiration time.
+        ch = channel.new_webhook_channel(
+            "http://example.com/callback",
+            expiration=datetime.datetime(1970, 1, 1, second=5),
+        )
+        self.assertEqual(5000, ch.expiration)
+        self.assertEqual("http://example.com/callback", ch.address)
+        self.assertEqual(None, ch.params)
 
-    # New channel with an expiration time and params.
-    ch = channel.new_webhook_channel(
-        'http://example.com/callback',
-        expiration=datetime.datetime(1970, 1, 1, second=5, microsecond=1000),
-        params={'some':'stuff'})
-    self.assertEqual(5001, ch.expiration)
-    self.assertEqual('http://example.com/callback', ch.address)
-    self.assertEqual({'some': 'stuff'}, ch.params)
+        # New channel with an expiration time and params.
+        ch = channel.new_webhook_channel(
+            "http://example.com/callback",
+            expiration=datetime.datetime(1970, 1, 1, second=5, microsecond=1000),
+            params={"some": "stuff"},
+        )
+        self.assertEqual(5001, ch.expiration)
+        self.assertEqual("http://example.com/callback", ch.address)
+        self.assertEqual({"some": "stuff"}, ch.params)
 
 
 class TestNotification(unittest.TestCase):
-  def test_basic(self):
-    n = channel.Notification(12, 'sync', 'http://example.org',
-                     'http://example.org/v1')
+    def test_basic(self):
+        n = channel.Notification(
+            12, "sync", "http://example.org", "http://example.org/v1"
+        )
 
-    self.assertEqual(12, n.message_number)
-    self.assertEqual('sync', n.state)
-    self.assertEqual('http://example.org', n.resource_uri)
-    self.assertEqual('http://example.org/v1', n.resource_id)
+        self.assertEqual(12, n.message_number)
+        self.assertEqual("sync", n.state)
+        self.assertEqual("http://example.org", n.resource_uri)
+        self.assertEqual("http://example.org/v1", n.resource_id)
 
-  def test_notification_from_headers(self):
-    headers = {
-        'X-GoOG-CHANNEL-ID': 'myid',
-        'X-Goog-MESSAGE-NUMBER': '1',
-        'X-Goog-rESOURCE-STATE': 'sync',
-        'X-Goog-reSOURCE-URI': 'http://example.com/',
-        'X-Goog-resOURCE-ID': 'http://example.com/resource_1',
+    def test_notification_from_headers(self):
+        headers = {
+            "X-GoOG-CHANNEL-ID": "myid",
+            "X-Goog-MESSAGE-NUMBER": "1",
+            "X-Goog-rESOURCE-STATE": "sync",
+            "X-Goog-reSOURCE-URI": "http://example.com/",
+            "X-Goog-resOURCE-ID": "http://example.com/resource_1",
         }
 
-    ch = channel.Channel('web_hook', 'myid', 'mytoken',
-                         'http://example.org/callback',
-                         expiration=0,
-                         params={'extra': 'info'},
-                         resource_id='the_resource_id',
-                         resource_uri='http://example.com/resource_1')
+        ch = channel.Channel(
+            "web_hook",
+            "myid",
+            "mytoken",
+            "http://example.org/callback",
+            expiration=0,
+            params={"extra": "info"},
+            resource_id="the_resource_id",
+            resource_uri="http://example.com/resource_1",
+        )
 
-    # Good test case.
-    n = channel.notification_from_headers(ch, headers)
-    self.assertEqual('http://example.com/resource_1', n.resource_id)
-    self.assertEqual('http://example.com/', n.resource_uri)
-    self.assertEqual('sync', n.state)
-    self.assertEqual(1, n.message_number)
+        # Good test case.
+        n = channel.notification_from_headers(ch, headers)
+        self.assertEqual("http://example.com/resource_1", n.resource_id)
+        self.assertEqual("http://example.com/", n.resource_uri)
+        self.assertEqual("sync", n.state)
+        self.assertEqual(1, n.message_number)
 
-    # Detect id mismatch.
-    ch.id = 'different_id'
-    try:
-      n = channel.notification_from_headers(ch, headers)
-      self.fail('Should have raised exception')
-    except errors.InvalidNotificationError:
-      pass
+        # Detect id mismatch.
+        ch.id = "different_id"
+        try:
+            n = channel.notification_from_headers(ch, headers)
+            self.fail("Should have raised exception")
+        except errors.InvalidNotificationError:
+            pass
 
-    # Set the id back to a correct value.
-    ch.id = 'myid'
+        # Set the id back to a correct value.
+        ch.id = "myid"
diff --git a/tests/test_discovery.py b/tests/test_discovery.py
index b41051a..f85035e 100644
--- a/tests/test_discovery.py
+++ b/tests/test_discovery.py
@@ -23,7 +23,7 @@
 from __future__ import absolute_import
 import six
 
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+__author__ = "jcgregorio@google.com (Joe Gregorio)"
 
 from six import BytesIO, StringIO
 from six.moves.urllib.parse import urlparse, parse_qs
@@ -84,1448 +84,1593 @@
 import uritemplate
 
 
-DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
+DATA_DIR = os.path.join(os.path.dirname(__file__), "data")
 
 
 def assertUrisEqual(testcase, expected, actual):
-  """Test that URIs are the same, up to reordering of query parameters."""
-  expected = urlparse(expected)
-  actual = urlparse(actual)
-  testcase.assertEqual(expected.scheme, actual.scheme)
-  testcase.assertEqual(expected.netloc, actual.netloc)
-  testcase.assertEqual(expected.path, actual.path)
-  testcase.assertEqual(expected.params, actual.params)
-  testcase.assertEqual(expected.fragment, actual.fragment)
-  expected_query = parse_qs(expected.query)
-  actual_query = parse_qs(actual.query)
-  for name in list(expected_query.keys()):
-    testcase.assertEqual(expected_query[name], actual_query[name])
-  for name in list(actual_query.keys()):
-    testcase.assertEqual(expected_query[name], actual_query[name])
+    """Test that URIs are the same, up to reordering of query parameters."""
+    expected = urlparse(expected)
+    actual = urlparse(actual)
+    testcase.assertEqual(expected.scheme, actual.scheme)
+    testcase.assertEqual(expected.netloc, actual.netloc)
+    testcase.assertEqual(expected.path, actual.path)
+    testcase.assertEqual(expected.params, actual.params)
+    testcase.assertEqual(expected.fragment, actual.fragment)
+    expected_query = parse_qs(expected.query)
+    actual_query = parse_qs(actual.query)
+    for name in list(expected_query.keys()):
+        testcase.assertEqual(expected_query[name], actual_query[name])
+    for name in list(actual_query.keys()):
+        testcase.assertEqual(expected_query[name], actual_query[name])
 
 
 def datafile(filename):
-  return os.path.join(DATA_DIR, filename)
+    return os.path.join(DATA_DIR, filename)
 
 
 class SetupHttplib2(unittest.TestCase):
-
-  def test_retries(self):
-    # Merely loading googleapiclient.discovery should set the RETRIES to 1.
-    self.assertEqual(1, httplib2.RETRIES)
+    def test_retries(self):
+        # Merely loading googleapiclient.discovery should set the RETRIES to 1.
+        self.assertEqual(1, httplib2.RETRIES)
 
 
 class Utilities(unittest.TestCase):
+    def setUp(self):
+        with open(datafile("zoo.json"), "r") as fh:
+            self.zoo_root_desc = json.loads(fh.read())
+        self.zoo_get_method_desc = self.zoo_root_desc["methods"]["query"]
+        self.zoo_animals_resource = self.zoo_root_desc["resources"]["animals"]
+        self.zoo_insert_method_desc = self.zoo_animals_resource["methods"]["insert"]
+        self.zoo_schema = Schemas(self.zoo_root_desc)
 
-  def setUp(self):
-    with open(datafile('zoo.json'), 'r') as fh:
-      self.zoo_root_desc = json.loads(fh.read())
-    self.zoo_get_method_desc = self.zoo_root_desc['methods']['query']
-    self.zoo_animals_resource = self.zoo_root_desc['resources']['animals']
-    self.zoo_insert_method_desc = self.zoo_animals_resource['methods']['insert']
-    self.zoo_schema = Schemas(self.zoo_root_desc)
+    def test_key2param(self):
+        self.assertEqual("max_results", key2param("max-results"))
+        self.assertEqual("x007_bond", key2param("007-bond"))
 
-  def test_key2param(self):
-    self.assertEqual('max_results', key2param('max-results'))
-    self.assertEqual('x007_bond', key2param('007-bond'))
+    def _base_fix_up_parameters_test(self, method_desc, http_method, root_desc, schema):
+        self.assertEqual(method_desc["httpMethod"], http_method)
 
-  def _base_fix_up_parameters_test(
-          self, method_desc, http_method, root_desc, schema):
-    self.assertEqual(method_desc['httpMethod'], http_method)
+        method_desc_copy = copy.deepcopy(method_desc)
+        self.assertEqual(method_desc, method_desc_copy)
 
-    method_desc_copy = copy.deepcopy(method_desc)
-    self.assertEqual(method_desc, method_desc_copy)
+        parameters = _fix_up_parameters(
+            method_desc_copy, root_desc, http_method, schema
+        )
 
-    parameters = _fix_up_parameters(method_desc_copy, root_desc, http_method,
-                                    schema)
+        self.assertNotEqual(method_desc, method_desc_copy)
 
-    self.assertNotEqual(method_desc, method_desc_copy)
+        for param_name in STACK_QUERY_PARAMETERS:
+            self.assertEqual(
+                STACK_QUERY_PARAMETER_DEFAULT_VALUE, parameters[param_name]
+            )
 
-    for param_name in STACK_QUERY_PARAMETERS:
-      self.assertEqual(STACK_QUERY_PARAMETER_DEFAULT_VALUE,
-                       parameters[param_name])
+        for param_name, value in six.iteritems(root_desc.get("parameters", {})):
+            self.assertEqual(value, parameters[param_name])
 
-    for param_name, value in six.iteritems(root_desc.get('parameters', {})):
-      self.assertEqual(value, parameters[param_name])
+        return parameters
 
-    return parameters
+    def test_fix_up_parameters_get(self):
+        parameters = self._base_fix_up_parameters_test(
+            self.zoo_get_method_desc, "GET", self.zoo_root_desc, self.zoo_schema
+        )
+        # Since http_method is 'GET'
+        self.assertFalse("body" in parameters)
 
-  def test_fix_up_parameters_get(self):
-    parameters = self._base_fix_up_parameters_test(
-      self.zoo_get_method_desc, 'GET', self.zoo_root_desc, self.zoo_schema)
-    # Since http_method is 'GET'
-    self.assertFalse('body' in parameters)
+    def test_fix_up_parameters_insert(self):
+        parameters = self._base_fix_up_parameters_test(
+            self.zoo_insert_method_desc, "POST", self.zoo_root_desc, self.zoo_schema
+        )
+        body = {"description": "The request body.", "type": "object", "$ref": "Animal"}
+        self.assertEqual(parameters["body"], body)
 
-  def test_fix_up_parameters_insert(self):
-    parameters = self._base_fix_up_parameters_test(
-      self.zoo_insert_method_desc, 'POST', self.zoo_root_desc, self.zoo_schema)
-    body = {
-        'description': 'The request body.',
-        'type': 'object',
-        '$ref': 'Animal',
-    }
-    self.assertEqual(parameters['body'], body)
-
-  def test_fix_up_parameters_check_body(self):
-    dummy_root_desc = {}
-    dummy_schema = {
-      'Request': {
-        'properties': {
-          "description": "Required. Dummy parameter.",
-          "type": "string"
+    def test_fix_up_parameters_check_body(self):
+        dummy_root_desc = {}
+        dummy_schema = {
+            "Request": {
+                "properties": {
+                    "description": "Required. Dummy parameter.",
+                    "type": "string",
+                }
+            }
         }
-      }
-    }
-    no_payload_http_method = 'DELETE'
-    with_payload_http_method = 'PUT'
+        no_payload_http_method = "DELETE"
+        with_payload_http_method = "PUT"
 
-    invalid_method_desc = {'response': 'Who cares'}
-    valid_method_desc = {
-      'request': {
-        'key1': 'value1',
-        'key2': 'value2',
-        '$ref': 'Request'
-      }
-    }
+        invalid_method_desc = {"response": "Who cares"}
+        valid_method_desc = {
+            "request": {"key1": "value1", "key2": "value2", "$ref": "Request"}
+        }
 
-    parameters = _fix_up_parameters(invalid_method_desc, dummy_root_desc,
-                                    no_payload_http_method, dummy_schema)
-    self.assertFalse('body' in parameters)
+        parameters = _fix_up_parameters(
+            invalid_method_desc, dummy_root_desc, no_payload_http_method, dummy_schema
+        )
+        self.assertFalse("body" in parameters)
 
-    parameters = _fix_up_parameters(valid_method_desc, dummy_root_desc,
-                                    no_payload_http_method, dummy_schema)
-    self.assertFalse('body' in parameters)
+        parameters = _fix_up_parameters(
+            valid_method_desc, dummy_root_desc, no_payload_http_method, dummy_schema
+        )
+        self.assertFalse("body" in parameters)
 
-    parameters = _fix_up_parameters(invalid_method_desc, dummy_root_desc,
-                                    with_payload_http_method, dummy_schema)
-    self.assertFalse('body' in parameters)
+        parameters = _fix_up_parameters(
+            invalid_method_desc, dummy_root_desc, with_payload_http_method, dummy_schema
+        )
+        self.assertFalse("body" in parameters)
 
-    parameters = _fix_up_parameters(valid_method_desc, dummy_root_desc,
-                                    with_payload_http_method, dummy_schema)
-    body = {
-        'description': 'The request body.',
-        'type': 'object',
-        '$ref': 'Request',
-        'key1': 'value1',
-        'key2': 'value2',
-    }
-    self.assertEqual(parameters['body'], body)
+        parameters = _fix_up_parameters(
+            valid_method_desc, dummy_root_desc, with_payload_http_method, dummy_schema
+        )
+        body = {
+            "description": "The request body.",
+            "type": "object",
+            "$ref": "Request",
+            "key1": "value1",
+            "key2": "value2",
+        }
+        self.assertEqual(parameters["body"], body)
 
-  def test_fix_up_parameters_optional_body(self):
-    # Request with no parameters
-    dummy_schema = {'Request': {'properties': {}}}
-    method_desc = {'request': {'$ref': 'Request'}}
+    def test_fix_up_parameters_optional_body(self):
+        # Request with no parameters
+        dummy_schema = {"Request": {"properties": {}}}
+        method_desc = {"request": {"$ref": "Request"}}
 
-    parameters = _fix_up_parameters(method_desc, {}, 'POST', dummy_schema)
+        parameters = _fix_up_parameters(method_desc, {}, "POST", dummy_schema)
 
-  def _base_fix_up_method_description_test(
-      self, method_desc, initial_parameters, final_parameters,
-      final_accept, final_max_size, final_media_path_url):
-    fake_root_desc = {'rootUrl': 'http://root/',
-                      'servicePath': 'fake/'}
-    fake_path_url = 'fake-path/'
+    def _base_fix_up_method_description_test(
+        self,
+        method_desc,
+        initial_parameters,
+        final_parameters,
+        final_accept,
+        final_max_size,
+        final_media_path_url,
+    ):
+        fake_root_desc = {"rootUrl": "http://root/", "servicePath": "fake/"}
+        fake_path_url = "fake-path/"
 
-    accept, max_size, media_path_url = _fix_up_media_upload(
-        method_desc, fake_root_desc, fake_path_url, initial_parameters)
-    self.assertEqual(accept, final_accept)
-    self.assertEqual(max_size, final_max_size)
-    self.assertEqual(media_path_url, final_media_path_url)
-    self.assertEqual(initial_parameters, final_parameters)
+        accept, max_size, media_path_url = _fix_up_media_upload(
+            method_desc, fake_root_desc, fake_path_url, initial_parameters
+        )
+        self.assertEqual(accept, final_accept)
+        self.assertEqual(max_size, final_max_size)
+        self.assertEqual(media_path_url, final_media_path_url)
+        self.assertEqual(initial_parameters, final_parameters)
 
-  def test_fix_up_media_upload_no_initial_invalid(self):
-    invalid_method_desc = {'response': 'Who cares'}
-    self._base_fix_up_method_description_test(invalid_method_desc, {}, {},
-                                              [], 0, None)
+    def test_fix_up_media_upload_no_initial_invalid(self):
+        invalid_method_desc = {"response": "Who cares"}
+        self._base_fix_up_method_description_test(
+            invalid_method_desc, {}, {}, [], 0, None
+        )
 
-  def test_fix_up_media_upload_no_initial_valid_minimal(self):
-    valid_method_desc = {'mediaUpload': {'accept': []}}
-    final_parameters = {'media_body': MEDIA_BODY_PARAMETER_DEFAULT_VALUE,
-                        'media_mime_type': MEDIA_MIME_TYPE_PARAMETER_DEFAULT_VALUE}
-    self._base_fix_up_method_description_test(
-        valid_method_desc, {}, final_parameters, [], 0,
-        'http://root/upload/fake/fake-path/')
+    def test_fix_up_media_upload_no_initial_valid_minimal(self):
+        valid_method_desc = {"mediaUpload": {"accept": []}}
+        final_parameters = {
+            "media_body": MEDIA_BODY_PARAMETER_DEFAULT_VALUE,
+            "media_mime_type": MEDIA_MIME_TYPE_PARAMETER_DEFAULT_VALUE,
+        }
+        self._base_fix_up_method_description_test(
+            valid_method_desc,
+            {},
+            final_parameters,
+            [],
+            0,
+            "http://root/upload/fake/fake-path/",
+        )
 
-  def test_fix_up_media_upload_no_initial_valid_full(self):
-    valid_method_desc = {'mediaUpload': {'accept': ['*/*'], 'maxSize': '10GB'}}
-    final_parameters = {'media_body': MEDIA_BODY_PARAMETER_DEFAULT_VALUE,
-                        'media_mime_type': MEDIA_MIME_TYPE_PARAMETER_DEFAULT_VALUE}
-    ten_gb = 10 * 2**30
-    self._base_fix_up_method_description_test(
-        valid_method_desc, {}, final_parameters, ['*/*'],
-        ten_gb, 'http://root/upload/fake/fake-path/')
+    def test_fix_up_media_upload_no_initial_valid_full(self):
+        valid_method_desc = {"mediaUpload": {"accept": ["*/*"], "maxSize": "10GB"}}
+        final_parameters = {
+            "media_body": MEDIA_BODY_PARAMETER_DEFAULT_VALUE,
+            "media_mime_type": MEDIA_MIME_TYPE_PARAMETER_DEFAULT_VALUE,
+        }
+        ten_gb = 10 * 2 ** 30
+        self._base_fix_up_method_description_test(
+            valid_method_desc,
+            {},
+            final_parameters,
+            ["*/*"],
+            ten_gb,
+            "http://root/upload/fake/fake-path/",
+        )
 
-  def test_fix_up_media_upload_with_initial_invalid(self):
-    invalid_method_desc = {'response': 'Who cares'}
-    initial_parameters = {'body': {}}
-    self._base_fix_up_method_description_test(
-        invalid_method_desc, initial_parameters,
-        initial_parameters, [], 0, None)
+    def test_fix_up_media_upload_with_initial_invalid(self):
+        invalid_method_desc = {"response": "Who cares"}
+        initial_parameters = {"body": {}}
+        self._base_fix_up_method_description_test(
+            invalid_method_desc, initial_parameters, initial_parameters, [], 0, None
+        )
 
-  def test_fix_up_media_upload_with_initial_valid_minimal(self):
-    valid_method_desc = {'mediaUpload': {'accept': []}}
-    initial_parameters = {'body': {}}
-    final_parameters = {'body': {},
-                        'media_body': MEDIA_BODY_PARAMETER_DEFAULT_VALUE,
-                        'media_mime_type': MEDIA_MIME_TYPE_PARAMETER_DEFAULT_VALUE}
-    self._base_fix_up_method_description_test(
-        valid_method_desc, initial_parameters, final_parameters, [], 0,
-        'http://root/upload/fake/fake-path/')
+    def test_fix_up_media_upload_with_initial_valid_minimal(self):
+        valid_method_desc = {"mediaUpload": {"accept": []}}
+        initial_parameters = {"body": {}}
+        final_parameters = {
+            "body": {},
+            "media_body": MEDIA_BODY_PARAMETER_DEFAULT_VALUE,
+            "media_mime_type": MEDIA_MIME_TYPE_PARAMETER_DEFAULT_VALUE,
+        }
+        self._base_fix_up_method_description_test(
+            valid_method_desc,
+            initial_parameters,
+            final_parameters,
+            [],
+            0,
+            "http://root/upload/fake/fake-path/",
+        )
 
-  def test_fix_up_media_upload_with_initial_valid_full(self):
-    valid_method_desc = {'mediaUpload': {'accept': ['*/*'], 'maxSize': '10GB'}}
-    initial_parameters = {'body': {}}
-    final_parameters = {'body': {},
-                        'media_body': MEDIA_BODY_PARAMETER_DEFAULT_VALUE,
-                        'media_mime_type': MEDIA_MIME_TYPE_PARAMETER_DEFAULT_VALUE}
-    ten_gb = 10 * 2**30
-    self._base_fix_up_method_description_test(
-        valid_method_desc, initial_parameters, final_parameters, ['*/*'],
-        ten_gb, 'http://root/upload/fake/fake-path/')
+    def test_fix_up_media_upload_with_initial_valid_full(self):
+        valid_method_desc = {"mediaUpload": {"accept": ["*/*"], "maxSize": "10GB"}}
+        initial_parameters = {"body": {}}
+        final_parameters = {
+            "body": {},
+            "media_body": MEDIA_BODY_PARAMETER_DEFAULT_VALUE,
+            "media_mime_type": MEDIA_MIME_TYPE_PARAMETER_DEFAULT_VALUE,
+        }
+        ten_gb = 10 * 2 ** 30
+        self._base_fix_up_method_description_test(
+            valid_method_desc,
+            initial_parameters,
+            final_parameters,
+            ["*/*"],
+            ten_gb,
+            "http://root/upload/fake/fake-path/",
+        )
 
-  def test_fix_up_method_description_get(self):
-    result = _fix_up_method_description(self.zoo_get_method_desc,
-                                        self.zoo_root_desc, self.zoo_schema)
-    path_url = 'query'
-    http_method = 'GET'
-    method_id = 'bigquery.query'
-    accept = []
-    max_size = 0
-    media_path_url = None
-    self.assertEqual(result, (path_url, http_method, method_id, accept,
-                              max_size, media_path_url))
+    def test_fix_up_method_description_get(self):
+        result = _fix_up_method_description(
+            self.zoo_get_method_desc, self.zoo_root_desc, self.zoo_schema
+        )
+        path_url = "query"
+        http_method = "GET"
+        method_id = "bigquery.query"
+        accept = []
+        max_size = 0
+        media_path_url = None
+        self.assertEqual(
+            result, (path_url, http_method, method_id, accept, max_size, media_path_url)
+        )
 
-  def test_fix_up_method_description_insert(self):
-    result = _fix_up_method_description(self.zoo_insert_method_desc,
-                                        self.zoo_root_desc, self.zoo_schema)
-    path_url = 'animals'
-    http_method = 'POST'
-    method_id = 'zoo.animals.insert'
-    accept = ['image/png']
-    max_size = 1024
-    media_path_url = 'https://www.googleapis.com/upload/zoo/v1/animals'
-    self.assertEqual(result, (path_url, http_method, method_id, accept,
-                              max_size, media_path_url))
+    def test_fix_up_method_description_insert(self):
+        result = _fix_up_method_description(
+            self.zoo_insert_method_desc, self.zoo_root_desc, self.zoo_schema
+        )
+        path_url = "animals"
+        http_method = "POST"
+        method_id = "zoo.animals.insert"
+        accept = ["image/png"]
+        max_size = 1024
+        media_path_url = "https://www.googleapis.com/upload/zoo/v1/animals"
+        self.assertEqual(
+            result, (path_url, http_method, method_id, accept, max_size, media_path_url)
+        )
 
-  def test_urljoin(self):
-    # We want to exhaustively test various URL combinations.
-    simple_bases = ['https://www.googleapis.com', 'https://www.googleapis.com/']
-    long_urls = ['foo/v1/bar:custom?alt=json', '/foo/v1/bar:custom?alt=json']
+    def test_urljoin(self):
+        # We want to exhaustively test various URL combinations.
+        simple_bases = ["https://www.googleapis.com", "https://www.googleapis.com/"]
+        long_urls = ["foo/v1/bar:custom?alt=json", "/foo/v1/bar:custom?alt=json"]
 
-    long_bases = [
-      'https://www.googleapis.com/foo/v1',
-      'https://www.googleapis.com/foo/v1/',
-    ]
-    simple_urls = ['bar:custom?alt=json', '/bar:custom?alt=json']
+        long_bases = [
+            "https://www.googleapis.com/foo/v1",
+            "https://www.googleapis.com/foo/v1/",
+        ]
+        simple_urls = ["bar:custom?alt=json", "/bar:custom?alt=json"]
 
-    final_url = 'https://www.googleapis.com/foo/v1/bar:custom?alt=json'
-    for base, url in itertools.product(simple_bases, long_urls):
-      self.assertEqual(final_url, _urljoin(base, url))
-    for base, url in itertools.product(long_bases, simple_urls):
-      self.assertEqual(final_url, _urljoin(base, url))
+        final_url = "https://www.googleapis.com/foo/v1/bar:custom?alt=json"
+        for base, url in itertools.product(simple_bases, long_urls):
+            self.assertEqual(final_url, _urljoin(base, url))
+        for base, url in itertools.product(long_bases, simple_urls):
+            self.assertEqual(final_url, _urljoin(base, url))
 
+    def test_ResourceMethodParameters_zoo_get(self):
+        parameters = ResourceMethodParameters(self.zoo_get_method_desc)
 
-  def test_ResourceMethodParameters_zoo_get(self):
-    parameters = ResourceMethodParameters(self.zoo_get_method_desc)
+        param_types = {
+            "a": "any",
+            "b": "boolean",
+            "e": "string",
+            "er": "string",
+            "i": "integer",
+            "n": "number",
+            "o": "object",
+            "q": "string",
+            "rr": "string",
+        }
+        keys = list(param_types.keys())
+        self.assertEqual(parameters.argmap, dict((key, key) for key in keys))
+        self.assertEqual(parameters.required_params, [])
+        self.assertEqual(sorted(parameters.repeated_params), ["er", "rr"])
+        self.assertEqual(parameters.pattern_params, {"rr": "[a-z]+"})
+        self.assertEqual(
+            sorted(parameters.query_params),
+            ["a", "b", "e", "er", "i", "n", "o", "q", "rr"],
+        )
+        self.assertEqual(parameters.path_params, set())
+        self.assertEqual(parameters.param_types, param_types)
+        enum_params = {"e": ["foo", "bar"], "er": ["one", "two", "three"]}
+        self.assertEqual(parameters.enum_params, enum_params)
 
-    param_types = {'a': 'any',
-                   'b': 'boolean',
-                   'e': 'string',
-                   'er': 'string',
-                   'i': 'integer',
-                   'n': 'number',
-                   'o': 'object',
-                   'q': 'string',
-                   'rr': 'string'}
-    keys = list(param_types.keys())
-    self.assertEqual(parameters.argmap, dict((key, key) for key in keys))
-    self.assertEqual(parameters.required_params, [])
-    self.assertEqual(sorted(parameters.repeated_params), ['er', 'rr'])
-    self.assertEqual(parameters.pattern_params, {'rr': '[a-z]+'})
-    self.assertEqual(sorted(parameters.query_params),
-                     ['a', 'b', 'e', 'er', 'i', 'n', 'o', 'q', 'rr'])
-    self.assertEqual(parameters.path_params, set())
-    self.assertEqual(parameters.param_types, param_types)
-    enum_params = {'e': ['foo', 'bar'],
-                   'er': ['one', 'two', 'three']}
-    self.assertEqual(parameters.enum_params, enum_params)
+    def test_ResourceMethodParameters_zoo_animals_patch(self):
+        method_desc = self.zoo_animals_resource["methods"]["patch"]
+        parameters = ResourceMethodParameters(method_desc)
 
-  def test_ResourceMethodParameters_zoo_animals_patch(self):
-    method_desc = self.zoo_animals_resource['methods']['patch']
-    parameters = ResourceMethodParameters(method_desc)
-
-    param_types = {'name': 'string'}
-    keys = list(param_types.keys())
-    self.assertEqual(parameters.argmap, dict((key, key) for key in keys))
-    self.assertEqual(parameters.required_params, ['name'])
-    self.assertEqual(parameters.repeated_params, [])
-    self.assertEqual(parameters.pattern_params, {})
-    self.assertEqual(parameters.query_params, [])
-    self.assertEqual(parameters.path_params, set(['name']))
-    self.assertEqual(parameters.param_types, param_types)
-    self.assertEqual(parameters.enum_params, {})
+        param_types = {"name": "string"}
+        keys = list(param_types.keys())
+        self.assertEqual(parameters.argmap, dict((key, key) for key in keys))
+        self.assertEqual(parameters.required_params, ["name"])
+        self.assertEqual(parameters.repeated_params, [])
+        self.assertEqual(parameters.pattern_params, {})
+        self.assertEqual(parameters.query_params, [])
+        self.assertEqual(parameters.path_params, set(["name"]))
+        self.assertEqual(parameters.param_types, param_types)
+        self.assertEqual(parameters.enum_params, {})
 
 
 class DiscoveryErrors(unittest.TestCase):
+    def test_tests_should_be_run_with_strict_positional_enforcement(self):
+        try:
+            plus = build("plus", "v1", None)
+            self.fail("should have raised a TypeError exception over missing http=.")
+        except TypeError:
+            pass
 
-  def test_tests_should_be_run_with_strict_positional_enforcement(self):
-    try:
-      plus = build('plus', 'v1', None)
-      self.fail("should have raised a TypeError exception over missing http=.")
-    except TypeError:
-      pass
+    def test_failed_to_parse_discovery_json(self):
+        self.http = HttpMock(datafile("malformed.json"), {"status": "200"})
+        try:
+            plus = build("plus", "v1", http=self.http, cache_discovery=False)
+            self.fail("should have raised an exception over malformed JSON.")
+        except InvalidJsonError:
+            pass
 
-  def test_failed_to_parse_discovery_json(self):
-    self.http = HttpMock(datafile('malformed.json'), {'status': '200'})
-    try:
-      plus = build('plus', 'v1', http=self.http, cache_discovery=False)
-      self.fail("should have raised an exception over malformed JSON.")
-    except InvalidJsonError:
-      pass
+    def test_unknown_api_name_or_version(self):
+        http = HttpMockSequence(
+            [
+                ({"status": "404"}, open(datafile("zoo.json"), "rb").read()),
+                ({"status": "404"}, open(datafile("zoo.json"), "rb").read()),
+            ]
+        )
+        with self.assertRaises(UnknownApiNameOrVersion):
+            plus = build("plus", "v1", http=http, cache_discovery=False)
 
-  def test_unknown_api_name_or_version(self):
-      http = HttpMockSequence([
-        ({'status': '404'}, open(datafile('zoo.json'), 'rb').read()),
-        ({'status': '404'}, open(datafile('zoo.json'), 'rb').read()),
-      ])
-      with self.assertRaises(UnknownApiNameOrVersion):
-        plus = build('plus', 'v1', http=http, cache_discovery=False)
-
-  def test_credentials_and_http_mutually_exclusive(self):
-    http = HttpMock(datafile('plus.json'), {'status': '200'})
-    with self.assertRaises(ValueError):
-      build(
-        'plus', 'v1', http=http, credentials=mock.sentinel.credentials)
+    def test_credentials_and_http_mutually_exclusive(self):
+        http = HttpMock(datafile("plus.json"), {"status": "200"})
+        with self.assertRaises(ValueError):
+            build("plus", "v1", http=http, credentials=mock.sentinel.credentials)
 
 
 class DiscoveryFromDocument(unittest.TestCase):
-  MOCK_CREDENTIALS = mock.Mock(spec=google.auth.credentials.Credentials)
+    MOCK_CREDENTIALS = mock.Mock(spec=google.auth.credentials.Credentials)
 
-  def test_can_build_from_local_document(self):
-    discovery = open(datafile('plus.json')).read()
-    plus = build_from_document(
-      discovery, base="https://www.googleapis.com/",
-      credentials=self.MOCK_CREDENTIALS)
-    self.assertTrue(plus is not None)
-    self.assertTrue(hasattr(plus, 'activities'))
+    def test_can_build_from_local_document(self):
+        discovery = open(datafile("plus.json")).read()
+        plus = build_from_document(
+            discovery,
+            base="https://www.googleapis.com/",
+            credentials=self.MOCK_CREDENTIALS,
+        )
+        self.assertTrue(plus is not None)
+        self.assertTrue(hasattr(plus, "activities"))
 
-  def test_can_build_from_local_deserialized_document(self):
-    discovery = open(datafile('plus.json')).read()
-    discovery = json.loads(discovery)
-    plus = build_from_document(
-      discovery, base="https://www.googleapis.com/",
-      credentials=self.MOCK_CREDENTIALS)
-    self.assertTrue(plus is not None)
-    self.assertTrue(hasattr(plus, 'activities'))
+    def test_can_build_from_local_deserialized_document(self):
+        discovery = open(datafile("plus.json")).read()
+        discovery = json.loads(discovery)
+        plus = build_from_document(
+            discovery,
+            base="https://www.googleapis.com/",
+            credentials=self.MOCK_CREDENTIALS,
+        )
+        self.assertTrue(plus is not None)
+        self.assertTrue(hasattr(plus, "activities"))
 
-  def test_building_with_base_remembers_base(self):
-    discovery = open(datafile('plus.json')).read()
+    def test_building_with_base_remembers_base(self):
+        discovery = open(datafile("plus.json")).read()
 
-    base = "https://www.example.com/"
-    plus = build_from_document(
-      discovery, base=base, credentials=self.MOCK_CREDENTIALS)
-    self.assertEquals("https://www.googleapis.com/plus/v1/", plus._baseUrl)
+        base = "https://www.example.com/"
+        plus = build_from_document(
+            discovery, base=base, credentials=self.MOCK_CREDENTIALS
+        )
+        self.assertEquals("https://www.googleapis.com/plus/v1/", plus._baseUrl)
 
-  def test_building_with_optional_http_with_authorization(self):
-    discovery = open(datafile('plus.json')).read()
-    plus = build_from_document(
-      discovery, base="https://www.googleapis.com/",
-      credentials=self.MOCK_CREDENTIALS)
+    def test_building_with_optional_http_with_authorization(self):
+        discovery = open(datafile("plus.json")).read()
+        plus = build_from_document(
+            discovery,
+            base="https://www.googleapis.com/",
+            credentials=self.MOCK_CREDENTIALS,
+        )
 
-    # plus service requires Authorization, hence we expect to see AuthorizedHttp object here
-    self.assertIsInstance(plus._http, google_auth_httplib2.AuthorizedHttp)
-    self.assertIsInstance(plus._http.http, httplib2.Http)
-    self.assertIsInstance(plus._http.http.timeout, int)
-    self.assertGreater(plus._http.http.timeout, 0)
+        # plus service requires Authorization, hence we expect to see AuthorizedHttp object here
+        self.assertIsInstance(plus._http, google_auth_httplib2.AuthorizedHttp)
+        self.assertIsInstance(plus._http.http, httplib2.Http)
+        self.assertIsInstance(plus._http.http.timeout, int)
+        self.assertGreater(plus._http.http.timeout, 0)
 
-  def test_building_with_optional_http_with_no_authorization(self):
-    discovery = open(datafile('plus.json')).read()
-    # Cleanup auth field, so we would use plain http client
-    discovery = json.loads(discovery)
-    discovery['auth'] = {}
-    discovery = json.dumps(discovery)
+    def test_building_with_optional_http_with_no_authorization(self):
+        discovery = open(datafile("plus.json")).read()
+        # Cleanup auth field, so we would use plain http client
+        discovery = json.loads(discovery)
+        discovery["auth"] = {}
+        discovery = json.dumps(discovery)
 
-    plus = build_from_document(
-      discovery, base="https://www.googleapis.com/",
-      credentials=None)
-    # plus service requires Authorization
-    self.assertIsInstance(plus._http, httplib2.Http)
-    self.assertIsInstance(plus._http.timeout, int)
-    self.assertGreater(plus._http.timeout, 0)
+        plus = build_from_document(
+            discovery, base="https://www.googleapis.com/", credentials=None
+        )
+        # plus service requires Authorization
+        self.assertIsInstance(plus._http, httplib2.Http)
+        self.assertIsInstance(plus._http.timeout, int)
+        self.assertGreater(plus._http.timeout, 0)
 
-  def test_building_with_explicit_http(self):
-    http = HttpMock()
-    discovery = open(datafile('plus.json')).read()
-    plus = build_from_document(
-      discovery, base="https://www.googleapis.com/", http=http)
-    self.assertEquals(plus._http, http)
+    def test_building_with_explicit_http(self):
+        http = HttpMock()
+        discovery = open(datafile("plus.json")).read()
+        plus = build_from_document(
+            discovery, base="https://www.googleapis.com/", http=http
+        )
+        self.assertEquals(plus._http, http)
 
-  def test_building_with_developer_key_skips_adc(self):
-    discovery = open(datafile('plus.json')).read()
-    plus = build_from_document(
-      discovery, base="https://www.googleapis.com/", developerKey='123')
-    self.assertIsInstance(plus._http, httplib2.Http)
-    # It should not be an AuthorizedHttp, because that would indicate that
-    # application default credentials were used.
-    self.assertNotIsInstance(plus._http, google_auth_httplib2.AuthorizedHttp)
+    def test_building_with_developer_key_skips_adc(self):
+        discovery = open(datafile("plus.json")).read()
+        plus = build_from_document(
+            discovery, base="https://www.googleapis.com/", developerKey="123"
+        )
+        self.assertIsInstance(plus._http, httplib2.Http)
+        # It should not be an AuthorizedHttp, because that would indicate that
+        # application default credentials were used.
+        self.assertNotIsInstance(plus._http, google_auth_httplib2.AuthorizedHttp)
 
 
 class DiscoveryFromHttp(unittest.TestCase):
-  def setUp(self):
-    self.old_environ = os.environ.copy()
+    def setUp(self):
+        self.old_environ = os.environ.copy()
 
-  def tearDown(self):
-    os.environ = self.old_environ
+    def tearDown(self):
+        os.environ = self.old_environ
 
-  def test_userip_is_added_to_discovery_uri(self):
-    # build() will raise an HttpError on a 400, use this to pick the request uri
-    # out of the raised exception.
-    os.environ['REMOTE_ADDR'] = '10.0.0.1'
-    try:
-      http = HttpMockSequence([
-        ({'status': '400'}, open(datafile('zoo.json'), 'rb').read()),
-        ])
-      zoo = build('zoo', 'v1', http=http, developerKey=None,
-                  discoveryServiceUrl='http://example.com')
-      self.fail('Should have raised an exception.')
-    except HttpError as e:
-      self.assertEqual(e.uri, 'http://example.com?userIp=10.0.0.1')
+    def test_userip_is_added_to_discovery_uri(self):
+        # build() will raise an HttpError on a 400, use this to pick the request uri
+        # out of the raised exception.
+        os.environ["REMOTE_ADDR"] = "10.0.0.1"
+        try:
+            http = HttpMockSequence(
+                [({"status": "400"}, open(datafile("zoo.json"), "rb").read())]
+            )
+            zoo = build(
+                "zoo",
+                "v1",
+                http=http,
+                developerKey=None,
+                discoveryServiceUrl="http://example.com",
+            )
+            self.fail("Should have raised an exception.")
+        except HttpError as e:
+            self.assertEqual(e.uri, "http://example.com?userIp=10.0.0.1")
 
-  def test_userip_missing_is_not_added_to_discovery_uri(self):
-    # build() will raise an HttpError on a 400, use this to pick the request uri
-    # out of the raised exception.
-    try:
-      http = HttpMockSequence([
-        ({'status': '400'}, open(datafile('zoo.json'), 'rb').read()),
-        ])
-      zoo = build('zoo', 'v1', http=http, developerKey=None,
-                  discoveryServiceUrl='http://example.com')
-      self.fail('Should have raised an exception.')
-    except HttpError as e:
-      self.assertEqual(e.uri, 'http://example.com')
+    def test_userip_missing_is_not_added_to_discovery_uri(self):
+        # build() will raise an HttpError on a 400, use this to pick the request uri
+        # out of the raised exception.
+        try:
+            http = HttpMockSequence(
+                [({"status": "400"}, open(datafile("zoo.json"), "rb").read())]
+            )
+            zoo = build(
+                "zoo",
+                "v1",
+                http=http,
+                developerKey=None,
+                discoveryServiceUrl="http://example.com",
+            )
+            self.fail("Should have raised an exception.")
+        except HttpError as e:
+            self.assertEqual(e.uri, "http://example.com")
 
-  def test_key_is_added_to_discovery_uri(self):
-    # build() will raise an HttpError on a 400, use this to pick the request uri
-    # out of the raised exception.
-    try:
-      http = HttpMockSequence([
-        ({'status': '400'}, open(datafile('zoo.json'), 'rb').read()),
-        ])
-      zoo = build('zoo', 'v1', http=http, developerKey='foo',
-                  discoveryServiceUrl='http://example.com')
-      self.fail('Should have raised an exception.')
-    except HttpError as e:
-      self.assertEqual(e.uri, 'http://example.com?key=foo')
+    def test_key_is_added_to_discovery_uri(self):
+        # build() will raise an HttpError on a 400, use this to pick the request uri
+        # out of the raised exception.
+        try:
+            http = HttpMockSequence(
+                [({"status": "400"}, open(datafile("zoo.json"), "rb").read())]
+            )
+            zoo = build(
+                "zoo",
+                "v1",
+                http=http,
+                developerKey="foo",
+                discoveryServiceUrl="http://example.com",
+            )
+            self.fail("Should have raised an exception.")
+        except HttpError as e:
+            self.assertEqual(e.uri, "http://example.com?key=foo")
 
-  def test_discovery_loading_from_v2_discovery_uri(self):
-      http = HttpMockSequence([
-        ({'status': '404'}, 'Not found'),
-        ({'status': '200'}, open(datafile('zoo.json'), 'rb').read()),
-      ])
-      zoo = build('zoo', 'v1', http=http, cache_discovery=False)
-      self.assertTrue(hasattr(zoo, 'animals'))
+    def test_discovery_loading_from_v2_discovery_uri(self):
+        http = HttpMockSequence(
+            [
+                ({"status": "404"}, "Not found"),
+                ({"status": "200"}, open(datafile("zoo.json"), "rb").read()),
+            ]
+        )
+        zoo = build("zoo", "v1", http=http, cache_discovery=False)
+        self.assertTrue(hasattr(zoo, "animals"))
+
 
 class DiscoveryFromAppEngineCache(unittest.TestCase):
-  def test_appengine_memcache(self):
-    # Hack module import
-    self.orig_import = __import__
-    self.mocked_api = mock.MagicMock()
+    def test_appengine_memcache(self):
+        # Hack module import
+        self.orig_import = __import__
+        self.mocked_api = mock.MagicMock()
 
-    def import_mock(name, *args, **kwargs):
-      if name == 'google.appengine.api':
-        return self.mocked_api
-      return self.orig_import(name, *args, **kwargs)
+        def import_mock(name, *args, **kwargs):
+            if name == "google.appengine.api":
+                return self.mocked_api
+            return self.orig_import(name, *args, **kwargs)
 
-    import_fullname = '__builtin__.__import__'
-    if sys.version_info[0] >= 3:
-      import_fullname = 'builtins.__import__'
+        import_fullname = "__builtin__.__import__"
+        if sys.version_info[0] >= 3:
+            import_fullname = "builtins.__import__"
 
-    with mock.patch(import_fullname, side_effect=import_mock):
-      namespace = 'google-api-client'
-      self.http = HttpMock(datafile('plus.json'), {'status': '200'})
+        with mock.patch(import_fullname, side_effect=import_mock):
+            namespace = "google-api-client"
+            self.http = HttpMock(datafile("plus.json"), {"status": "200"})
 
-      self.mocked_api.memcache.get.return_value = None
+            self.mocked_api.memcache.get.return_value = None
 
-      plus = build('plus', 'v1', http=self.http)
+            plus = build("plus", "v1", http=self.http)
 
-      # memcache.get is called once
-      url = 'https://www.googleapis.com/discovery/v1/apis/plus/v1/rest'
-      self.mocked_api.memcache.get.assert_called_once_with(url,
-                                                           namespace=namespace)
+            # memcache.get is called once
+            url = "https://www.googleapis.com/discovery/v1/apis/plus/v1/rest"
+            self.mocked_api.memcache.get.assert_called_once_with(
+                url, namespace=namespace
+            )
 
-      # memcache.set is called once
-      with open(datafile('plus.json')) as f:
-        content = f.read()
-      self.mocked_api.memcache.set.assert_called_once_with(
-        url, content, time=DISCOVERY_DOC_MAX_AGE, namespace=namespace)
+            # memcache.set is called once
+            with open(datafile("plus.json")) as f:
+                content = f.read()
+            self.mocked_api.memcache.set.assert_called_once_with(
+                url, content, time=DISCOVERY_DOC_MAX_AGE, namespace=namespace
+            )
 
-      # Returns the cached content this time.
-      self.mocked_api.memcache.get.return_value = content
+            # Returns the cached content this time.
+            self.mocked_api.memcache.get.return_value = content
 
-      # Make sure the contents are returned from the cache.
-      # (Otherwise it should through an error)
-      self.http = HttpMock(None, {'status': '200'})
+            # Make sure the contents are returned from the cache.
+            # (Otherwise it should through an error)
+            self.http = HttpMock(None, {"status": "200"})
 
-      plus = build('plus', 'v1', http=self.http)
+            plus = build("plus", "v1", http=self.http)
 
-      # memcache.get is called twice
-      self.mocked_api.memcache.get.assert_has_calls(
-        [mock.call(url, namespace=namespace),
-         mock.call(url, namespace=namespace)])
+            # memcache.get is called twice
+            self.mocked_api.memcache.get.assert_has_calls(
+                [
+                    mock.call(url, namespace=namespace),
+                    mock.call(url, namespace=namespace),
+                ]
+            )
 
-      # memcahce.set is called just once
-      self.mocked_api.memcache.set.assert_called_once_with(
-        url, content, time=DISCOVERY_DOC_MAX_AGE,namespace=namespace)
+            # memcahce.set is called just once
+            self.mocked_api.memcache.set.assert_called_once_with(
+                url, content, time=DISCOVERY_DOC_MAX_AGE, namespace=namespace
+            )
 
 
 class DictCache(Cache):
-  def __init__(self):
-    self.d = {}
-  def get(self, url):
-    return self.d.get(url, None)
-  def set(self, url, content):
-    self.d[url] = content
-  def contains(self, url):
-    return url in self.d
+    def __init__(self):
+        self.d = {}
+
+    def get(self, url):
+        return self.d.get(url, None)
+
+    def set(self, url, content):
+        self.d[url] = content
+
+    def contains(self, url):
+        return url in self.d
 
 
 class DiscoveryFromFileCache(unittest.TestCase):
-  def test_file_based_cache(self):
-    cache = mock.Mock(wraps=DictCache())
-    with mock.patch('googleapiclient.discovery_cache.autodetect',
-                    return_value=cache):
-      self.http = HttpMock(datafile('plus.json'), {'status': '200'})
+    def test_file_based_cache(self):
+        cache = mock.Mock(wraps=DictCache())
+        with mock.patch(
+            "googleapiclient.discovery_cache.autodetect", return_value=cache
+        ):
+            self.http = HttpMock(datafile("plus.json"), {"status": "200"})
 
-      plus = build('plus', 'v1', http=self.http)
+            plus = build("plus", "v1", http=self.http)
 
-      # cache.get is called once
-      url = 'https://www.googleapis.com/discovery/v1/apis/plus/v1/rest'
-      cache.get.assert_called_once_with(url)
+            # cache.get is called once
+            url = "https://www.googleapis.com/discovery/v1/apis/plus/v1/rest"
+            cache.get.assert_called_once_with(url)
 
-      # cache.set is called once
-      with open(datafile('plus.json')) as f:
-        content = f.read()
-      cache.set.assert_called_once_with(url, content)
+            # cache.set is called once
+            with open(datafile("plus.json")) as f:
+                content = f.read()
+            cache.set.assert_called_once_with(url, content)
 
-      # Make sure there is a cache entry for the plus v1 discovery doc.
-      self.assertTrue(cache.contains(url))
+            # Make sure there is a cache entry for the plus v1 discovery doc.
+            self.assertTrue(cache.contains(url))
 
-      # Make sure the contents are returned from the cache.
-      # (Otherwise it should through an error)
-      self.http = HttpMock(None, {'status': '200'})
+            # Make sure the contents are returned from the cache.
+            # (Otherwise it should through an error)
+            self.http = HttpMock(None, {"status": "200"})
 
-      plus = build('plus', 'v1', http=self.http)
+            plus = build("plus", "v1", http=self.http)
 
-      # cache.get is called twice
-      cache.get.assert_has_calls([mock.call(url), mock.call(url)])
+            # cache.get is called twice
+            cache.get.assert_has_calls([mock.call(url), mock.call(url)])
 
-      # cahce.set is called just once
-      cache.set.assert_called_once_with(url, content)
+            # cahce.set is called just once
+            cache.set.assert_called_once_with(url, content)
 
 
 class Discovery(unittest.TestCase):
+    def test_method_error_checking(self):
+        self.http = HttpMock(datafile("plus.json"), {"status": "200"})
+        plus = build("plus", "v1", http=self.http)
 
-  def test_method_error_checking(self):
-    self.http = HttpMock(datafile('plus.json'), {'status': '200'})
-    plus = build('plus', 'v1', http=self.http)
+        # Missing required parameters
+        try:
+            plus.activities().list()
+            self.fail()
+        except TypeError as e:
+            self.assertTrue("Missing" in str(e))
 
-    # Missing required parameters
-    try:
-      plus.activities().list()
-      self.fail()
-    except TypeError as e:
-      self.assertTrue('Missing' in str(e))
+        # Missing required parameters even if supplied as None.
+        try:
+            plus.activities().list(collection=None, userId=None)
+            self.fail()
+        except TypeError as e:
+            self.assertTrue("Missing" in str(e))
 
-    # Missing required parameters even if supplied as None.
-    try:
-      plus.activities().list(collection=None, userId=None)
-      self.fail()
-    except TypeError as e:
-      self.assertTrue('Missing' in str(e))
+        # Parameter doesn't match regex
+        try:
+            plus.activities().list(collection="not_a_collection_name", userId="me")
+            self.fail()
+        except TypeError as e:
+            self.assertTrue("not an allowed value" in str(e))
 
-    # Parameter doesn't match regex
-    try:
-      plus.activities().list(collection='not_a_collection_name', userId='me')
-      self.fail()
-    except TypeError as e:
-      self.assertTrue('not an allowed value' in str(e))
+        # Unexpected parameter
+        try:
+            plus.activities().list(flubber=12)
+            self.fail()
+        except TypeError as e:
+            self.assertTrue("unexpected" in str(e))
 
-    # Unexpected parameter
-    try:
-      plus.activities().list(flubber=12)
-      self.fail()
-    except TypeError as e:
-      self.assertTrue('unexpected' in str(e))
+    def _check_query_types(self, request):
+        parsed = urlparse(request.uri)
+        q = parse_qs(parsed[4])
+        self.assertEqual(q["q"], ["foo"])
+        self.assertEqual(q["i"], ["1"])
+        self.assertEqual(q["n"], ["1.0"])
+        self.assertEqual(q["b"], ["false"])
+        self.assertEqual(q["a"], ["[1, 2, 3]"])
+        self.assertEqual(q["o"], ["{'a': 1}"])
+        self.assertEqual(q["e"], ["bar"])
 
-  def _check_query_types(self, request):
-    parsed = urlparse(request.uri)
-    q = parse_qs(parsed[4])
-    self.assertEqual(q['q'], ['foo'])
-    self.assertEqual(q['i'], ['1'])
-    self.assertEqual(q['n'], ['1.0'])
-    self.assertEqual(q['b'], ['false'])
-    self.assertEqual(q['a'], ['[1, 2, 3]'])
-    self.assertEqual(q['o'], ['{\'a\': 1}'])
-    self.assertEqual(q['e'], ['bar'])
+    def test_type_coercion(self):
+        http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=http)
 
-  def test_type_coercion(self):
-    http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=http)
+        request = zoo.query(
+            q="foo", i=1.0, n=1.0, b=0, a=[1, 2, 3], o={"a": 1}, e="bar"
+        )
+        self._check_query_types(request)
+        request = zoo.query(
+            q="foo", i=1, n=1, b=False, a=[1, 2, 3], o={"a": 1}, e="bar"
+        )
+        self._check_query_types(request)
 
-    request = zoo.query(
-        q="foo", i=1.0, n=1.0, b=0, a=[1,2,3], o={'a':1}, e='bar')
-    self._check_query_types(request)
-    request = zoo.query(
-        q="foo", i=1, n=1, b=False, a=[1,2,3], o={'a':1}, e='bar')
-    self._check_query_types(request)
+        request = zoo.query(
+            q="foo", i="1", n="1", b="", a=[1, 2, 3], o={"a": 1}, e="bar", er="two"
+        )
 
-    request = zoo.query(
-        q="foo", i="1", n="1", b="", a=[1,2,3], o={'a':1}, e='bar', er='two')
+        request = zoo.query(
+            q="foo",
+            i="1",
+            n="1",
+            b="",
+            a=[1, 2, 3],
+            o={"a": 1},
+            e="bar",
+            er=["one", "three"],
+            rr=["foo", "bar"],
+        )
+        self._check_query_types(request)
 
-    request = zoo.query(
-        q="foo", i="1", n="1", b="", a=[1,2,3], o={'a':1}, e='bar',
-        er=['one', 'three'], rr=['foo', 'bar'])
-    self._check_query_types(request)
+        # Five is right out.
+        self.assertRaises(TypeError, zoo.query, er=["one", "five"])
 
-    # Five is right out.
-    self.assertRaises(TypeError, zoo.query, er=['one', 'five'])
+    def test_optional_stack_query_parameters(self):
+        http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=http)
+        request = zoo.query(trace="html", fields="description")
 
-  def test_optional_stack_query_parameters(self):
-    http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=http)
-    request = zoo.query(trace='html', fields='description')
+        parsed = urlparse(request.uri)
+        q = parse_qs(parsed[4])
+        self.assertEqual(q["trace"], ["html"])
+        self.assertEqual(q["fields"], ["description"])
 
-    parsed = urlparse(request.uri)
-    q = parse_qs(parsed[4])
-    self.assertEqual(q['trace'], ['html'])
-    self.assertEqual(q['fields'], ['description'])
+    def test_string_params_value_of_none_get_dropped(self):
+        http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=http)
+        request = zoo.query(trace=None, fields="description")
 
-  def test_string_params_value_of_none_get_dropped(self):
-    http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=http)
-    request = zoo.query(trace=None, fields='description')
+        parsed = urlparse(request.uri)
+        q = parse_qs(parsed[4])
+        self.assertFalse("trace" in q)
 
-    parsed = urlparse(request.uri)
-    q = parse_qs(parsed[4])
-    self.assertFalse('trace' in q)
+    def test_model_added_query_parameters(self):
+        http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=http)
+        request = zoo.animals().get(name="Lion")
 
-  def test_model_added_query_parameters(self):
-    http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=http)
-    request = zoo.animals().get(name='Lion')
+        parsed = urlparse(request.uri)
+        q = parse_qs(parsed[4])
+        self.assertEqual(q["alt"], ["json"])
+        self.assertEqual(request.headers["accept"], "application/json")
 
-    parsed = urlparse(request.uri)
-    q = parse_qs(parsed[4])
-    self.assertEqual(q['alt'], ['json'])
-    self.assertEqual(request.headers['accept'], 'application/json')
+    def test_fallback_to_raw_model(self):
+        http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=http)
+        request = zoo.animals().getmedia(name="Lion")
 
-  def test_fallback_to_raw_model(self):
-    http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=http)
-    request = zoo.animals().getmedia(name='Lion')
+        parsed = urlparse(request.uri)
+        q = parse_qs(parsed[4])
+        self.assertTrue("alt" not in q)
+        self.assertEqual(request.headers["accept"], "*/*")
 
-    parsed = urlparse(request.uri)
-    q = parse_qs(parsed[4])
-    self.assertTrue('alt' not in q)
-    self.assertEqual(request.headers['accept'], '*/*')
+    def test_patch(self):
+        http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=http)
+        request = zoo.animals().patch(name="lion", body='{"description": "foo"}')
 
-  def test_patch(self):
-    http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=http)
-    request = zoo.animals().patch(name='lion', body='{"description": "foo"}')
+        self.assertEqual(request.method, "PATCH")
 
-    self.assertEqual(request.method, 'PATCH')
+    def test_batch_request_from_discovery(self):
+        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        # zoo defines a batchPath
+        zoo = build("zoo", "v1", http=self.http)
+        batch_request = zoo.new_batch_http_request()
+        self.assertEqual(
+            batch_request._batch_uri, "https://www.googleapis.com/batchZoo"
+        )
 
-  def test_batch_request_from_discovery(self):
-    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    # zoo defines a batchPath
-    zoo = build('zoo', 'v1', http=self.http)
-    batch_request = zoo.new_batch_http_request()
-    self.assertEqual(batch_request._batch_uri,
-                     "https://www.googleapis.com/batchZoo")
+    def test_batch_request_from_default(self):
+        self.http = HttpMock(datafile("plus.json"), {"status": "200"})
+        # plus does not define a batchPath
+        plus = build("plus", "v1", http=self.http)
+        batch_request = plus.new_batch_http_request()
+        self.assertEqual(batch_request._batch_uri, "https://www.googleapis.com/batch")
 
-  def test_batch_request_from_default(self):
-    self.http = HttpMock(datafile('plus.json'), {'status': '200'})
-    # plus does not define a batchPath
-    plus = build('plus', 'v1', http=self.http)
-    batch_request = plus.new_batch_http_request()
-    self.assertEqual(batch_request._batch_uri,
-                     "https://www.googleapis.com/batch")
+    def test_tunnel_patch(self):
+        http = HttpMockSequence(
+            [
+                ({"status": "200"}, open(datafile("zoo.json"), "rb").read()),
+                ({"status": "200"}, "echo_request_headers_as_json"),
+            ]
+        )
+        http = tunnel_patch(http)
+        zoo = build("zoo", "v1", http=http, cache_discovery=False)
+        resp = zoo.animals().patch(name="lion", body='{"description": "foo"}').execute()
 
-  def test_tunnel_patch(self):
-    http = HttpMockSequence([
-      ({'status': '200'}, open(datafile('zoo.json'), 'rb').read()),
-      ({'status': '200'}, 'echo_request_headers_as_json'),
-      ])
-    http = tunnel_patch(http)
-    zoo = build('zoo', 'v1', http=http, cache_discovery=False)
-    resp = zoo.animals().patch(
-        name='lion', body='{"description": "foo"}').execute()
+        self.assertTrue("x-http-method-override" in resp)
 
-    self.assertTrue('x-http-method-override' in resp)
+    def test_plus_resources(self):
+        self.http = HttpMock(datafile("plus.json"), {"status": "200"})
+        plus = build("plus", "v1", http=self.http)
+        self.assertTrue(getattr(plus, "activities"))
+        self.assertTrue(getattr(plus, "people"))
 
-  def test_plus_resources(self):
-    self.http = HttpMock(datafile('plus.json'), {'status': '200'})
-    plus = build('plus', 'v1', http=self.http)
-    self.assertTrue(getattr(plus, 'activities'))
-    self.assertTrue(getattr(plus, 'people'))
+    def test_oauth2client_credentials(self):
+        credentials = mock.Mock(spec=GoogleCredentials)
+        credentials.create_scoped_required.return_value = False
 
-  def test_oauth2client_credentials(self):
-    credentials = mock.Mock(spec=GoogleCredentials)
-    credentials.create_scoped_required.return_value = False
+        discovery = open(datafile("plus.json")).read()
+        service = build_from_document(discovery, credentials=credentials)
+        self.assertEqual(service._http, credentials.authorize.return_value)
 
-    discovery = open(datafile('plus.json')).read()
-    service = build_from_document(discovery, credentials=credentials)
-    self.assertEqual(service._http, credentials.authorize.return_value)
+    def test_google_auth_credentials(self):
+        credentials = mock.Mock(spec=google.auth.credentials.Credentials)
+        discovery = open(datafile("plus.json")).read()
+        service = build_from_document(discovery, credentials=credentials)
 
-  def test_google_auth_credentials(self):
-    credentials = mock.Mock(spec=google.auth.credentials.Credentials)
-    discovery = open(datafile('plus.json')).read()
-    service = build_from_document(discovery, credentials=credentials)
+        self.assertIsInstance(service._http, google_auth_httplib2.AuthorizedHttp)
+        self.assertEqual(service._http.credentials, credentials)
 
-    self.assertIsInstance(service._http, google_auth_httplib2.AuthorizedHttp)
-    self.assertEqual(service._http.credentials, credentials)
+    def test_no_scopes_no_credentials(self):
+        # Zoo doesn't have scopes
+        discovery = open(datafile("zoo.json")).read()
+        service = build_from_document(discovery)
+        # Should be an ordinary httplib2.Http instance and not AuthorizedHttp.
+        self.assertIsInstance(service._http, httplib2.Http)
 
-  def test_no_scopes_no_credentials(self):
-    # Zoo doesn't have scopes
-    discovery = open(datafile('zoo.json')).read()
-    service = build_from_document(discovery)
-    # Should be an ordinary httplib2.Http instance and not AuthorizedHttp.
-    self.assertIsInstance(service._http, httplib2.Http)
+    def test_full_featured(self):
+        # Zoo should exercise all discovery facets
+        # and should also have no future.json file.
+        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=self.http)
+        self.assertTrue(getattr(zoo, "animals"))
 
-  def test_full_featured(self):
-    # Zoo should exercise all discovery facets
-    # and should also have no future.json file.
-    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=self.http)
-    self.assertTrue(getattr(zoo, 'animals'))
+        request = zoo.animals().list(name="bat", projection="full")
+        parsed = urlparse(request.uri)
+        q = parse_qs(parsed[4])
+        self.assertEqual(q["name"], ["bat"])
+        self.assertEqual(q["projection"], ["full"])
 
-    request = zoo.animals().list(name='bat', projection="full")
-    parsed = urlparse(request.uri)
-    q = parse_qs(parsed[4])
-    self.assertEqual(q['name'], ['bat'])
-    self.assertEqual(q['projection'], ['full'])
+    def test_nested_resources(self):
+        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=self.http)
+        self.assertTrue(getattr(zoo, "animals"))
+        request = zoo.my().favorites().list(max_results="5")
+        parsed = urlparse(request.uri)
+        q = parse_qs(parsed[4])
+        self.assertEqual(q["max-results"], ["5"])
 
-  def test_nested_resources(self):
-    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=self.http)
-    self.assertTrue(getattr(zoo, 'animals'))
-    request = zoo.my().favorites().list(max_results="5")
-    parsed = urlparse(request.uri)
-    q = parse_qs(parsed[4])
-    self.assertEqual(q['max-results'], ['5'])
+    @unittest.skipIf(six.PY3, "print is not a reserved name in Python 3")
+    def test_methods_with_reserved_names(self):
+        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=self.http)
+        self.assertTrue(getattr(zoo, "animals"))
+        request = zoo.global_().print_().assert_(max_results="5")
+        parsed = urlparse(request.uri)
+        self.assertEqual(parsed[2], "/zoo/v1/global/print/assert")
 
-  @unittest.skipIf(six.PY3, 'print is not a reserved name in Python 3')
-  def test_methods_with_reserved_names(self):
-    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=self.http)
-    self.assertTrue(getattr(zoo, 'animals'))
-    request = zoo.global_().print_().assert_(max_results="5")
-    parsed = urlparse(request.uri)
-    self.assertEqual(parsed[2], '/zoo/v1/global/print/assert')
+    def test_top_level_functions(self):
+        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=self.http)
+        self.assertTrue(getattr(zoo, "query"))
+        request = zoo.query(q="foo")
+        parsed = urlparse(request.uri)
+        q = parse_qs(parsed[4])
+        self.assertEqual(q["q"], ["foo"])
 
-  def test_top_level_functions(self):
-    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=self.http)
-    self.assertTrue(getattr(zoo, 'query'))
-    request = zoo.query(q="foo")
-    parsed = urlparse(request.uri)
-    q = parse_qs(parsed[4])
-    self.assertEqual(q['q'], ['foo'])
+    def test_simple_media_uploads(self):
+        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=self.http)
+        doc = getattr(zoo.animals().insert, "__doc__")
+        self.assertTrue("media_body" in doc)
 
-  def test_simple_media_uploads(self):
-    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=self.http)
-    doc = getattr(zoo.animals().insert, '__doc__')
-    self.assertTrue('media_body' in doc)
+    def test_simple_media_upload_no_max_size_provided(self):
+        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=self.http)
+        request = zoo.animals().crossbreed(media_body=datafile("small.png"))
+        self.assertEquals("image/png", request.headers["content-type"])
+        self.assertEquals(b"PNG", request.body[1:4])
 
-  def test_simple_media_upload_no_max_size_provided(self):
-    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=self.http)
-    request = zoo.animals().crossbreed(media_body=datafile('small.png'))
-    self.assertEquals('image/png', request.headers['content-type'])
-    self.assertEquals(b'PNG', request.body[1:4])
+    def test_simple_media_raise_correct_exceptions(self):
+        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=self.http)
 
-  def test_simple_media_raise_correct_exceptions(self):
-    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=self.http)
+        try:
+            zoo.animals().insert(media_body=datafile("smiley.png"))
+            self.fail("should throw exception if media is too large.")
+        except MediaUploadSizeError:
+            pass
 
-    try:
-      zoo.animals().insert(media_body=datafile('smiley.png'))
-      self.fail("should throw exception if media is too large.")
-    except MediaUploadSizeError:
-      pass
+        try:
+            zoo.animals().insert(media_body=datafile("small.jpg"))
+            self.fail("should throw exception if mimetype is unacceptable.")
+        except UnacceptableMimeTypeError:
+            pass
 
-    try:
-      zoo.animals().insert(media_body=datafile('small.jpg'))
-      self.fail("should throw exception if mimetype is unacceptable.")
-    except UnacceptableMimeTypeError:
-      pass
+    def test_simple_media_good_upload(self):
+        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=self.http)
 
-  def test_simple_media_good_upload(self):
-    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=self.http)
+        request = zoo.animals().insert(media_body=datafile("small.png"))
+        self.assertEquals("image/png", request.headers["content-type"])
+        self.assertEquals(b"PNG", request.body[1:4])
+        assertUrisEqual(
+            self,
+            "https://www.googleapis.com/upload/zoo/v1/animals?uploadType=media&alt=json",
+            request.uri,
+        )
 
-    request = zoo.animals().insert(media_body=datafile('small.png'))
-    self.assertEquals('image/png', request.headers['content-type'])
-    self.assertEquals(b'PNG', request.body[1:4])
-    assertUrisEqual(self,
-        'https://www.googleapis.com/upload/zoo/v1/animals?uploadType=media&alt=json',
-        request.uri)
+    def test_simple_media_unknown_mimetype(self):
+        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=self.http)
 
-  def test_simple_media_unknown_mimetype(self):
-    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=self.http)
+        try:
+            zoo.animals().insert(media_body=datafile("small-png"))
+            self.fail("should throw exception if mimetype is unknown.")
+        except UnknownFileType:
+            pass
 
-    try:
-      zoo.animals().insert(media_body=datafile('small-png'))
-      self.fail("should throw exception if mimetype is unknown.")
-    except UnknownFileType:
-      pass
+        request = zoo.animals().insert(
+            media_body=datafile("small-png"), media_mime_type="image/png"
+        )
+        self.assertEquals("image/png", request.headers["content-type"])
+        self.assertEquals(b"PNG", request.body[1:4])
+        assertUrisEqual(
+            self,
+            "https://www.googleapis.com/upload/zoo/v1/animals?uploadType=media&alt=json",
+            request.uri,
+        )
 
-    request = zoo.animals().insert(media_body=datafile('small-png'),
-                                   media_mime_type='image/png')
-    self.assertEquals('image/png', request.headers['content-type'])
-    self.assertEquals(b'PNG', request.body[1:4])
-    assertUrisEqual(self,
-        'https://www.googleapis.com/upload/zoo/v1/animals?uploadType=media&alt=json',
-        request.uri)
+    def test_multipart_media_raise_correct_exceptions(self):
+        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=self.http)
 
-  def test_multipart_media_raise_correct_exceptions(self):
-    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=self.http)
+        try:
+            zoo.animals().insert(media_body=datafile("smiley.png"), body={})
+            self.fail("should throw exception if media is too large.")
+        except MediaUploadSizeError:
+            pass
 
-    try:
-      zoo.animals().insert(media_body=datafile('smiley.png'), body={})
-      self.fail("should throw exception if media is too large.")
-    except MediaUploadSizeError:
-      pass
+        try:
+            zoo.animals().insert(media_body=datafile("small.jpg"), body={})
+            self.fail("should throw exception if mimetype is unacceptable.")
+        except UnacceptableMimeTypeError:
+            pass
 
-    try:
-      zoo.animals().insert(media_body=datafile('small.jpg'), body={})
-      self.fail("should throw exception if mimetype is unacceptable.")
-    except UnacceptableMimeTypeError:
-      pass
+    def test_multipart_media_good_upload(self):
+        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=self.http)
 
-  def test_multipart_media_good_upload(self):
-    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=self.http)
+        request = zoo.animals().insert(media_body=datafile("small.png"), body={})
+        self.assertTrue(request.headers["content-type"].startswith("multipart/related"))
+        with open(datafile("small.png"), "rb") as f:
+            contents = f.read()
+        boundary = re.match(b"--=+([^=]+)", request.body).group(1)
+        self.assertEqual(
+            request.body.rstrip(b"\n"),  # Python 2.6 does not add a trailing \n
+            b"--==============="
+            + boundary
+            + b"==\n"
+            + b"Content-Type: application/json\n"
+            + b"MIME-Version: 1.0\n\n"
+            + b'{"data": {}}\n'
+            + b"--==============="
+            + boundary
+            + b"==\n"
+            + b"Content-Type: image/png\n"
+            + b"MIME-Version: 1.0\n"
+            + b"Content-Transfer-Encoding: binary\n\n"
+            + contents
+            + b"\n--==============="
+            + boundary
+            + b"==--",
+        )
+        assertUrisEqual(
+            self,
+            "https://www.googleapis.com/upload/zoo/v1/animals?uploadType=multipart&alt=json",
+            request.uri,
+        )
 
-    request = zoo.animals().insert(media_body=datafile('small.png'), body={})
-    self.assertTrue(request.headers['content-type'].startswith(
-        'multipart/related'))
-    with open(datafile('small.png'), 'rb') as f:
-      contents = f.read()
-    boundary = re.match(b'--=+([^=]+)', request.body).group(1)
-    self.assertEqual(
-      request.body.rstrip(b"\n"), # Python 2.6 does not add a trailing \n
-      b'--===============' + boundary + b'==\n' +
-      b'Content-Type: application/json\n' +
-      b'MIME-Version: 1.0\n\n' +
-      b'{"data": {}}\n' +
-      b'--===============' + boundary + b'==\n' +
-      b'Content-Type: image/png\n' +
-      b'MIME-Version: 1.0\n' +
-      b'Content-Transfer-Encoding: binary\n\n' +
-      contents +
-      b'\n--===============' + boundary + b'==--')
-    assertUrisEqual(self,
-        'https://www.googleapis.com/upload/zoo/v1/animals?uploadType=multipart&alt=json',
-        request.uri)
+    def test_media_capable_method_without_media(self):
+        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=self.http)
 
-  def test_media_capable_method_without_media(self):
-    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=self.http)
+        request = zoo.animals().insert(body={})
+        self.assertTrue(request.headers["content-type"], "application/json")
 
-    request = zoo.animals().insert(body={})
-    self.assertTrue(request.headers['content-type'], 'application/json')
+    def test_resumable_multipart_media_good_upload(self):
+        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=self.http)
 
-  def test_resumable_multipart_media_good_upload(self):
-    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=self.http)
+        media_upload = MediaFileUpload(datafile("small.png"), resumable=True)
+        request = zoo.animals().insert(media_body=media_upload, body={})
+        self.assertTrue(request.headers["content-type"].startswith("application/json"))
+        self.assertEquals('{"data": {}}', request.body)
+        self.assertEquals(media_upload, request.resumable)
 
-    media_upload = MediaFileUpload(datafile('small.png'), resumable=True)
-    request = zoo.animals().insert(media_body=media_upload, body={})
-    self.assertTrue(request.headers['content-type'].startswith(
-        'application/json'))
-    self.assertEquals('{"data": {}}', request.body)
-    self.assertEquals(media_upload, request.resumable)
+        self.assertEquals("image/png", request.resumable.mimetype())
 
-    self.assertEquals('image/png', request.resumable.mimetype())
+        self.assertNotEquals(request.body, None)
+        self.assertEquals(request.resumable_uri, None)
 
-    self.assertNotEquals(request.body, None)
-    self.assertEquals(request.resumable_uri, None)
+        http = HttpMockSequence(
+            [
+                ({"status": "200", "location": "http://upload.example.com"}, ""),
+                ({"status": "308", "location": "http://upload.example.com/2"}, ""),
+                (
+                    {
+                        "status": "308",
+                        "location": "http://upload.example.com/3",
+                        "range": "0-12",
+                    },
+                    "",
+                ),
+                (
+                    {
+                        "status": "308",
+                        "location": "http://upload.example.com/4",
+                        "range": "0-%d" % (media_upload.size() - 2),
+                    },
+                    "",
+                ),
+                ({"status": "200"}, '{"foo": "bar"}'),
+            ]
+        )
 
-    http = HttpMockSequence([
-      ({'status': '200',
-        'location': 'http://upload.example.com'}, ''),
-      ({'status': '308',
-        'location': 'http://upload.example.com/2'}, ''),
-      ({'status': '308',
-        'location': 'http://upload.example.com/3',
-        'range': '0-12'}, ''),
-      ({'status': '308',
-        'location': 'http://upload.example.com/4',
-        'range': '0-%d' % (media_upload.size() - 2)}, ''),
-      ({'status': '200'}, '{"foo": "bar"}'),
-      ])
+        status, body = request.next_chunk(http=http)
+        self.assertEquals(None, body)
+        self.assertTrue(isinstance(status, MediaUploadProgress))
+        self.assertEquals(0, status.resumable_progress)
 
-    status, body = request.next_chunk(http=http)
-    self.assertEquals(None, body)
-    self.assertTrue(isinstance(status, MediaUploadProgress))
-    self.assertEquals(0, status.resumable_progress)
+        # Two requests should have been made and the resumable_uri should have been
+        # updated for each one.
+        self.assertEquals(request.resumable_uri, "http://upload.example.com/2")
+        self.assertEquals(media_upload, request.resumable)
+        self.assertEquals(0, request.resumable_progress)
 
-    # Two requests should have been made and the resumable_uri should have been
-    # updated for each one.
-    self.assertEquals(request.resumable_uri, 'http://upload.example.com/2')
-    self.assertEquals(media_upload, request.resumable)
-    self.assertEquals(0, request.resumable_progress)
-    
-    # This next chuck call should upload the first chunk
-    status, body = request.next_chunk(http=http)
-    self.assertEquals(request.resumable_uri, 'http://upload.example.com/3')
-    self.assertEquals(media_upload, request.resumable)
-    self.assertEquals(13, request.resumable_progress)
+        # This next chuck call should upload the first chunk
+        status, body = request.next_chunk(http=http)
+        self.assertEquals(request.resumable_uri, "http://upload.example.com/3")
+        self.assertEquals(media_upload, request.resumable)
+        self.assertEquals(13, request.resumable_progress)
 
-    # This call will upload the next chunk
-    status, body = request.next_chunk(http=http)
-    self.assertEquals(request.resumable_uri, 'http://upload.example.com/4')
-    self.assertEquals(media_upload.size()-1, request.resumable_progress)
-    self.assertEquals('{"data": {}}', request.body)
+        # This call will upload the next chunk
+        status, body = request.next_chunk(http=http)
+        self.assertEquals(request.resumable_uri, "http://upload.example.com/4")
+        self.assertEquals(media_upload.size() - 1, request.resumable_progress)
+        self.assertEquals('{"data": {}}', request.body)
 
-    # Final call to next_chunk should complete the upload.
-    status, body = request.next_chunk(http=http)
-    self.assertEquals(body, {"foo": "bar"})
-    self.assertEquals(status, None)
+        # Final call to next_chunk should complete the upload.
+        status, body = request.next_chunk(http=http)
+        self.assertEquals(body, {"foo": "bar"})
+        self.assertEquals(status, None)
 
+    def test_resumable_media_good_upload(self):
+        """Not a multipart upload."""
+        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=self.http)
 
-  def test_resumable_media_good_upload(self):
-    """Not a multipart upload."""
-    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=self.http)
+        media_upload = MediaFileUpload(datafile("small.png"), resumable=True)
+        request = zoo.animals().insert(media_body=media_upload, body=None)
+        self.assertEquals(media_upload, request.resumable)
 
-    media_upload = MediaFileUpload(datafile('small.png'), resumable=True)
-    request = zoo.animals().insert(media_body=media_upload, body=None)
-    self.assertEquals(media_upload, request.resumable)
+        self.assertEquals("image/png", request.resumable.mimetype())
 
-    self.assertEquals('image/png', request.resumable.mimetype())
+        self.assertEquals(request.body, None)
+        self.assertEquals(request.resumable_uri, None)
 
-    self.assertEquals(request.body, None)
-    self.assertEquals(request.resumable_uri, None)
+        http = HttpMockSequence(
+            [
+                ({"status": "200", "location": "http://upload.example.com"}, ""),
+                (
+                    {
+                        "status": "308",
+                        "location": "http://upload.example.com/2",
+                        "range": "0-12",
+                    },
+                    "",
+                ),
+                (
+                    {
+                        "status": "308",
+                        "location": "http://upload.example.com/3",
+                        "range": "0-%d" % (media_upload.size() - 2),
+                    },
+                    "",
+                ),
+                ({"status": "200"}, '{"foo": "bar"}'),
+            ]
+        )
 
-    http = HttpMockSequence([
-      ({'status': '200',
-        'location': 'http://upload.example.com'}, ''),
-      ({'status': '308',
-        'location': 'http://upload.example.com/2',
-        'range': '0-12'}, ''),
-      ({'status': '308',
-        'location': 'http://upload.example.com/3',
-        'range': '0-%d' % (media_upload.size() - 2)}, ''),
-      ({'status': '200'}, '{"foo": "bar"}'),
-      ])
+        status, body = request.next_chunk(http=http)
+        self.assertEquals(None, body)
+        self.assertTrue(isinstance(status, MediaUploadProgress))
+        self.assertEquals(13, status.resumable_progress)
+
+        # Two requests should have been made and the resumable_uri should have been
+        # updated for each one.
+        self.assertEquals(request.resumable_uri, "http://upload.example.com/2")
 
-    status, body = request.next_chunk(http=http)
-    self.assertEquals(None, body)
-    self.assertTrue(isinstance(status, MediaUploadProgress))
-    self.assertEquals(13, status.resumable_progress)
+        self.assertEquals(media_upload, request.resumable)
+        self.assertEquals(13, request.resumable_progress)
 
-    # Two requests should have been made and the resumable_uri should have been
-    # updated for each one.
-    self.assertEquals(request.resumable_uri, 'http://upload.example.com/2')
+        status, body = request.next_chunk(http=http)
+        self.assertEquals(request.resumable_uri, "http://upload.example.com/3")
+        self.assertEquals(media_upload.size() - 1, request.resumable_progress)
+        self.assertEquals(request.body, None)
 
-    self.assertEquals(media_upload, request.resumable)
-    self.assertEquals(13, request.resumable_progress)
+        # Final call to next_chunk should complete the upload.
+        status, body = request.next_chunk(http=http)
+        self.assertEquals(body, {"foo": "bar"})
+        self.assertEquals(status, None)
 
-    status, body = request.next_chunk(http=http)
-    self.assertEquals(request.resumable_uri, 'http://upload.example.com/3')
-    self.assertEquals(media_upload.size()-1, request.resumable_progress)
-    self.assertEquals(request.body, None)
+    def test_resumable_media_good_upload_from_execute(self):
+        """Not a multipart upload."""
+        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=self.http)
 
-    # Final call to next_chunk should complete the upload.
-    status, body = request.next_chunk(http=http)
-    self.assertEquals(body, {"foo": "bar"})
-    self.assertEquals(status, None)
+        media_upload = MediaFileUpload(datafile("small.png"), resumable=True)
+        request = zoo.animals().insert(media_body=media_upload, body=None)
+        assertUrisEqual(
+            self,
+            "https://www.googleapis.com/upload/zoo/v1/animals?uploadType=resumable&alt=json",
+            request.uri,
+        )
 
-  def test_resumable_media_good_upload_from_execute(self):
-    """Not a multipart upload."""
-    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=self.http)
+        http = HttpMockSequence(
+            [
+                ({"status": "200", "location": "http://upload.example.com"}, ""),
+                (
+                    {
+                        "status": "308",
+                        "location": "http://upload.example.com/2",
+                        "range": "0-12",
+                    },
+                    "",
+                ),
+                (
+                    {
+                        "status": "308",
+                        "location": "http://upload.example.com/3",
+                        "range": "0-%d" % media_upload.size(),
+                    },
+                    "",
+                ),
+                ({"status": "200"}, '{"foo": "bar"}'),
+            ]
+        )
 
-    media_upload = MediaFileUpload(datafile('small.png'), resumable=True)
-    request = zoo.animals().insert(media_body=media_upload, body=None)
-    assertUrisEqual(self,
-        'https://www.googleapis.com/upload/zoo/v1/animals?uploadType=resumable&alt=json',
-        request.uri)
+        body = request.execute(http=http)
+        self.assertEquals(body, {"foo": "bar"})
 
-    http = HttpMockSequence([
-      ({'status': '200',
-        'location': 'http://upload.example.com'}, ''),
-      ({'status': '308',
-        'location': 'http://upload.example.com/2',
-        'range': '0-12'}, ''),
-      ({'status': '308',
-        'location': 'http://upload.example.com/3',
-        'range': '0-%d' % media_upload.size()}, ''),
-      ({'status': '200'}, '{"foo": "bar"}'),
-      ])
+    def test_resumable_media_fail_unknown_response_code_first_request(self):
+        """Not a multipart upload."""
+        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=self.http)
 
-    body = request.execute(http=http)
-    self.assertEquals(body, {"foo": "bar"})
+        media_upload = MediaFileUpload(datafile("small.png"), resumable=True)
+        request = zoo.animals().insert(media_body=media_upload, body=None)
 
-  def test_resumable_media_fail_unknown_response_code_first_request(self):
-    """Not a multipart upload."""
-    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=self.http)
+        http = HttpMockSequence(
+            [({"status": "400", "location": "http://upload.example.com"}, "")]
+        )
 
-    media_upload = MediaFileUpload(datafile('small.png'), resumable=True)
-    request = zoo.animals().insert(media_body=media_upload, body=None)
+        try:
+            request.execute(http=http)
+            self.fail("Should have raised ResumableUploadError.")
+        except ResumableUploadError as e:
+            self.assertEqual(400, e.resp.status)
 
-    http = HttpMockSequence([
-      ({'status': '400',
-        'location': 'http://upload.example.com'}, ''),
-      ])
+    def test_resumable_media_fail_unknown_response_code_subsequent_request(self):
+        """Not a multipart upload."""
+        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=self.http)
 
-    try:
-      request.execute(http=http)
-      self.fail('Should have raised ResumableUploadError.')
-    except ResumableUploadError as e:
-      self.assertEqual(400, e.resp.status)
+        media_upload = MediaFileUpload(datafile("small.png"), resumable=True)
+        request = zoo.animals().insert(media_body=media_upload, body=None)
 
-  def test_resumable_media_fail_unknown_response_code_subsequent_request(self):
-    """Not a multipart upload."""
-    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=self.http)
+        http = HttpMockSequence(
+            [
+                ({"status": "200", "location": "http://upload.example.com"}, ""),
+                ({"status": "400"}, ""),
+            ]
+        )
 
-    media_upload = MediaFileUpload(datafile('small.png'), resumable=True)
-    request = zoo.animals().insert(media_body=media_upload, body=None)
+        self.assertRaises(HttpError, request.execute, http=http)
+        self.assertTrue(request._in_error_state)
 
-    http = HttpMockSequence([
-      ({'status': '200',
-        'location': 'http://upload.example.com'}, ''),
-      ({'status': '400'}, ''),
-      ])
+        http = HttpMockSequence(
+            [
+                ({"status": "308", "range": "0-5"}, ""),
+                ({"status": "308", "range": "0-6"}, ""),
+            ]
+        )
 
-    self.assertRaises(HttpError, request.execute, http=http)
-    self.assertTrue(request._in_error_state)
+        status, body = request.next_chunk(http=http)
+        self.assertEquals(
+            status.resumable_progress,
+            7,
+            "Should have first checked length and then tried to PUT more.",
+        )
+        self.assertFalse(request._in_error_state)
 
-    http = HttpMockSequence([
-      ({'status': '308',
-        'range': '0-5'}, ''),
-      ({'status': '308',
-        'range': '0-6'}, ''),
-      ])
+        # Put it back in an error state.
+        http = HttpMockSequence([({"status": "400"}, "")])
+        self.assertRaises(HttpError, request.execute, http=http)
+        self.assertTrue(request._in_error_state)
 
-    status, body = request.next_chunk(http=http)
-    self.assertEquals(status.resumable_progress, 7,
-      'Should have first checked length and then tried to PUT more.')
-    self.assertFalse(request._in_error_state)
+        # Pretend the last request that 400'd actually succeeded.
+        http = HttpMockSequence([({"status": "200"}, '{"foo": "bar"}')])
+        status, body = request.next_chunk(http=http)
+        self.assertEqual(body, {"foo": "bar"})
 
-    # Put it back in an error state.
-    http = HttpMockSequence([
-      ({'status': '400'}, ''),
-      ])
-    self.assertRaises(HttpError, request.execute, http=http)
-    self.assertTrue(request._in_error_state)
+    def test_media_io_base_stream_unlimited_chunksize_resume(self):
+        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=self.http)
 
-    # Pretend the last request that 400'd actually succeeded.
-    http = HttpMockSequence([
-      ({'status': '200'}, '{"foo": "bar"}'),
-      ])
-    status, body = request.next_chunk(http=http)
-    self.assertEqual(body, {'foo': 'bar'})
+        # Set up a seekable stream and try to upload in single chunk.
+        fd = BytesIO(b'01234"56789"')
+        media_upload = MediaIoBaseUpload(
+            fd=fd, mimetype="text/plain", chunksize=-1, resumable=True
+        )
 
-  def test_media_io_base_stream_unlimited_chunksize_resume(self):
-    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=self.http)
+        request = zoo.animals().insert(media_body=media_upload, body=None)
 
-    # Set up a seekable stream and try to upload in single chunk.
-    fd = BytesIO(b'01234"56789"')
-    media_upload = MediaIoBaseUpload(
-        fd=fd, mimetype='text/plain', chunksize=-1, resumable=True)
+        # The single chunk fails, restart at the right point.
+        http = HttpMockSequence(
+            [
+                ({"status": "200", "location": "http://upload.example.com"}, ""),
+                (
+                    {
+                        "status": "308",
+                        "location": "http://upload.example.com/2",
+                        "range": "0-4",
+                    },
+                    "",
+                ),
+                ({"status": "200"}, "echo_request_body"),
+            ]
+        )
 
-    request = zoo.animals().insert(media_body=media_upload, body=None)
+        body = request.execute(http=http)
+        self.assertEqual("56789", body)
 
-    # The single chunk fails, restart at the right point.
-    http = HttpMockSequence([
-      ({'status': '200',
-        'location': 'http://upload.example.com'}, ''),
-      ({'status': '308',
-        'location': 'http://upload.example.com/2',
-        'range': '0-4'}, ''),
-      ({'status': '200'}, 'echo_request_body'),
-      ])
+    def test_media_io_base_stream_chunksize_resume(self):
+        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=self.http)
 
-    body = request.execute(http=http)
-    self.assertEqual('56789', body)
+        # Set up a seekable stream and try to upload in chunks.
+        fd = BytesIO(b"0123456789")
+        media_upload = MediaIoBaseUpload(
+            fd=fd, mimetype="text/plain", chunksize=5, resumable=True
+        )
 
-  def test_media_io_base_stream_chunksize_resume(self):
-    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=self.http)
+        request = zoo.animals().insert(media_body=media_upload, body=None)
 
-    # Set up a seekable stream and try to upload in chunks.
-    fd = BytesIO(b'0123456789')
-    media_upload = MediaIoBaseUpload(
-        fd=fd, mimetype='text/plain', chunksize=5, resumable=True)
+        # The single chunk fails, pull the content sent out of the exception.
+        http = HttpMockSequence(
+            [
+                ({"status": "200", "location": "http://upload.example.com"}, ""),
+                ({"status": "400"}, "echo_request_body"),
+            ]
+        )
 
-    request = zoo.animals().insert(media_body=media_upload, body=None)
+        try:
+            body = request.execute(http=http)
+        except HttpError as e:
+            self.assertEqual(b"01234", e.content)
 
-    # The single chunk fails, pull the content sent out of the exception.
-    http = HttpMockSequence([
-      ({'status': '200',
-        'location': 'http://upload.example.com'}, ''),
-      ({'status': '400'}, 'echo_request_body'),
-      ])
+    def test_resumable_media_handle_uploads_of_unknown_size(self):
+        http = HttpMockSequence(
+            [
+                ({"status": "200", "location": "http://upload.example.com"}, ""),
+                ({"status": "200"}, "echo_request_headers_as_json"),
+            ]
+        )
 
-    try:
-      body = request.execute(http=http)
-    except HttpError as e:
-      self.assertEqual(b'01234', e.content)
+        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=self.http)
 
-  def test_resumable_media_handle_uploads_of_unknown_size(self):
-    http = HttpMockSequence([
-      ({'status': '200',
-        'location': 'http://upload.example.com'}, ''),
-      ({'status': '200'}, 'echo_request_headers_as_json'),
-      ])
+        # Create an upload that doesn't know the full size of the media.
+        class IoBaseUnknownLength(MediaUpload):
+            def chunksize(self):
+                return 10
 
-    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=self.http)
+            def mimetype(self):
+                return "image/png"
 
-    # Create an upload that doesn't know the full size of the media.
-    class IoBaseUnknownLength(MediaUpload):
-      def chunksize(self):
-        return 10
+            def size(self):
+                return None
 
-      def mimetype(self):
-        return 'image/png'
+            def resumable(self):
+                return True
 
-      def size(self):
-        return None
+            def getbytes(self, begin, length):
+                return "0123456789"
 
-      def resumable(self):
-        return True
+        upload = IoBaseUnknownLength()
 
-      def getbytes(self, begin, length):
-        return '0123456789'
+        request = zoo.animals().insert(media_body=upload, body=None)
+        status, body = request.next_chunk(http=http)
+        self.assertEqual(body, {"Content-Range": "bytes 0-9/*", "Content-Length": "10"})
 
-    upload = IoBaseUnknownLength()
+    def test_resumable_media_no_streaming_on_unsupported_platforms(self):
+        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=self.http)
 
-    request = zoo.animals().insert(media_body=upload, body=None)
-    status, body = request.next_chunk(http=http)
-    self.assertEqual(body, {
-        'Content-Range': 'bytes 0-9/*',
-        'Content-Length': '10',
-        })
+        class IoBaseHasStream(MediaUpload):
+            def chunksize(self):
+                return 10
 
-  def test_resumable_media_no_streaming_on_unsupported_platforms(self):
-    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=self.http)
+            def mimetype(self):
+                return "image/png"
 
-    class IoBaseHasStream(MediaUpload):
-      def chunksize(self):
-        return 10
+            def size(self):
+                return None
 
-      def mimetype(self):
-        return 'image/png'
+            def resumable(self):
+                return True
 
-      def size(self):
-        return None
+            def getbytes(self, begin, length):
+                return "0123456789"
 
-      def resumable(self):
-        return True
+            def has_stream(self):
+                return True
 
-      def getbytes(self, begin, length):
-        return '0123456789'
+            def stream(self):
+                raise NotImplementedError()
 
-      def has_stream(self):
-        return True
+        upload = IoBaseHasStream()
 
-      def stream(self):
-        raise NotImplementedError()
+        orig_version = sys.version_info
 
-    upload = IoBaseHasStream()
+        sys.version_info = (2, 6, 5, "final", 0)
 
-    orig_version = sys.version_info
+        request = zoo.animals().insert(media_body=upload, body=None)
 
-    sys.version_info = (2, 6, 5, 'final', 0)
+        # This should raise an exception because stream() will be called.
+        http = HttpMockSequence(
+            [
+                ({"status": "200", "location": "http://upload.example.com"}, ""),
+                ({"status": "200"}, "echo_request_headers_as_json"),
+            ]
+        )
 
-    request = zoo.animals().insert(media_body=upload, body=None)
+        self.assertRaises(NotImplementedError, request.next_chunk, http=http)
 
-    # This should raise an exception because stream() will be called.
-    http = HttpMockSequence([
-      ({'status': '200',
-        'location': 'http://upload.example.com'}, ''),
-      ({'status': '200'}, 'echo_request_headers_as_json'),
-      ])
+        sys.version_info = orig_version
 
-    self.assertRaises(NotImplementedError, request.next_chunk, http=http)
+    def test_resumable_media_handle_uploads_of_unknown_size_eof(self):
+        http = HttpMockSequence(
+            [
+                ({"status": "200", "location": "http://upload.example.com"}, ""),
+                ({"status": "200"}, "echo_request_headers_as_json"),
+            ]
+        )
 
-    sys.version_info = orig_version
+        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=self.http)
 
-  def test_resumable_media_handle_uploads_of_unknown_size_eof(self):
-    http = HttpMockSequence([
-      ({'status': '200',
-        'location': 'http://upload.example.com'}, ''),
-      ({'status': '200'}, 'echo_request_headers_as_json'),
-      ])
+        fd = BytesIO(b"data goes here")
 
-    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=self.http)
+        # Create an upload that doesn't know the full size of the media.
+        upload = MediaIoBaseUpload(
+            fd=fd, mimetype="image/png", chunksize=15, resumable=True
+        )
 
-    fd = BytesIO(b'data goes here')
+        request = zoo.animals().insert(media_body=upload, body=None)
+        status, body = request.next_chunk(http=http)
+        self.assertEqual(
+            body, {"Content-Range": "bytes 0-13/14", "Content-Length": "14"}
+        )
 
-    # Create an upload that doesn't know the full size of the media.
-    upload = MediaIoBaseUpload(
-        fd=fd, mimetype='image/png', chunksize=15, resumable=True)
+    def test_resumable_media_handle_resume_of_upload_of_unknown_size(self):
+        http = HttpMockSequence(
+            [
+                ({"status": "200", "location": "http://upload.example.com"}, ""),
+                ({"status": "400"}, ""),
+            ]
+        )
 
-    request = zoo.animals().insert(media_body=upload, body=None)
-    status, body = request.next_chunk(http=http)
-    self.assertEqual(body, {
-        'Content-Range': 'bytes 0-13/14',
-        'Content-Length': '14',
-        })
+        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=self.http)
 
-  def test_resumable_media_handle_resume_of_upload_of_unknown_size(self):
-    http = HttpMockSequence([
-      ({'status': '200',
-        'location': 'http://upload.example.com'}, ''),
-      ({'status': '400'}, ''),
-      ])
+        # Create an upload that doesn't know the full size of the media.
+        fd = BytesIO(b"data goes here")
 
-    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=self.http)
+        upload = MediaIoBaseUpload(
+            fd=fd, mimetype="image/png", chunksize=500, resumable=True
+        )
 
-    # Create an upload that doesn't know the full size of the media.
-    fd = BytesIO(b'data goes here')
+        request = zoo.animals().insert(media_body=upload, body=None)
 
-    upload = MediaIoBaseUpload(
-        fd=fd, mimetype='image/png', chunksize=500, resumable=True)
+        # Put it in an error state.
+        self.assertRaises(HttpError, request.next_chunk, http=http)
 
-    request = zoo.animals().insert(media_body=upload, body=None)
+        http = HttpMockSequence(
+            [({"status": "400", "range": "0-5"}, "echo_request_headers_as_json")]
+        )
+        try:
+            # Should resume the upload by first querying the status of the upload.
+            request.next_chunk(http=http)
+        except HttpError as e:
+            expected = {"Content-Range": "bytes */14", "content-length": "0"}
+            self.assertEqual(
+                expected,
+                json.loads(e.content.decode("utf-8")),
+                "Should send an empty body when requesting the current upload status.",
+            )
 
-    # Put it in an error state.
-    self.assertRaises(HttpError, request.next_chunk, http=http)
+    def test_pickle(self):
+        sorted_resource_keys = [
+            "_baseUrl",
+            "_developerKey",
+            "_dynamic_attrs",
+            "_http",
+            "_model",
+            "_requestBuilder",
+            "_resourceDesc",
+            "_rootDesc",
+            "_schema",
+            "animals",
+            "global_",
+            "load",
+            "loadNoTemplate",
+            "my",
+            "new_batch_http_request",
+            "query",
+            "scopedAnimals",
+        ]
 
-    http = HttpMockSequence([
-      ({'status': '400',
-        'range': '0-5'}, 'echo_request_headers_as_json'),
-      ])
-    try:
-      # Should resume the upload by first querying the status of the upload.
-      request.next_chunk(http=http)
-    except HttpError as e:
-      expected = {
-          'Content-Range': 'bytes */14',
-          'content-length': '0'
-          }
-      self.assertEqual(expected, json.loads(e.content.decode('utf-8')),
-        'Should send an empty body when requesting the current upload status.')
+        http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=http)
+        self.assertEqual(sorted(zoo.__dict__.keys()), sorted_resource_keys)
 
-  def test_pickle(self):
-    sorted_resource_keys = ['_baseUrl',
-                            '_developerKey',
-                            '_dynamic_attrs',
-                            '_http',
-                            '_model',
-                            '_requestBuilder',
-                            '_resourceDesc',
-                            '_rootDesc',
-                            '_schema',
-                            'animals',
-                            'global_',
-                            'load',
-                            'loadNoTemplate',
-                            'my',
-                            'new_batch_http_request',
-                            'query',
-                            'scopedAnimals']
+        pickled_zoo = pickle.dumps(zoo)
+        new_zoo = pickle.loads(pickled_zoo)
+        self.assertEqual(sorted(new_zoo.__dict__.keys()), sorted_resource_keys)
+        self.assertTrue(hasattr(new_zoo, "animals"))
+        self.assertTrue(callable(new_zoo.animals))
+        self.assertTrue(hasattr(new_zoo, "global_"))
+        self.assertTrue(callable(new_zoo.global_))
+        self.assertTrue(hasattr(new_zoo, "load"))
+        self.assertTrue(callable(new_zoo.load))
+        self.assertTrue(hasattr(new_zoo, "loadNoTemplate"))
+        self.assertTrue(callable(new_zoo.loadNoTemplate))
+        self.assertTrue(hasattr(new_zoo, "my"))
+        self.assertTrue(callable(new_zoo.my))
+        self.assertTrue(hasattr(new_zoo, "query"))
+        self.assertTrue(callable(new_zoo.query))
+        self.assertTrue(hasattr(new_zoo, "scopedAnimals"))
+        self.assertTrue(callable(new_zoo.scopedAnimals))
 
-    http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=http)
-    self.assertEqual(sorted(zoo.__dict__.keys()), sorted_resource_keys)
+        self.assertEqual(sorted(zoo._dynamic_attrs), sorted(new_zoo._dynamic_attrs))
+        self.assertEqual(zoo._baseUrl, new_zoo._baseUrl)
+        self.assertEqual(zoo._developerKey, new_zoo._developerKey)
+        self.assertEqual(zoo._requestBuilder, new_zoo._requestBuilder)
+        self.assertEqual(zoo._resourceDesc, new_zoo._resourceDesc)
+        self.assertEqual(zoo._rootDesc, new_zoo._rootDesc)
+        # _http, _model and _schema won't be equal since we will get new
+        # instances upon un-pickling
 
-    pickled_zoo = pickle.dumps(zoo)
-    new_zoo = pickle.loads(pickled_zoo)
-    self.assertEqual(sorted(new_zoo.__dict__.keys()), sorted_resource_keys)
-    self.assertTrue(hasattr(new_zoo, 'animals'))
-    self.assertTrue(callable(new_zoo.animals))
-    self.assertTrue(hasattr(new_zoo, 'global_'))
-    self.assertTrue(callable(new_zoo.global_))
-    self.assertTrue(hasattr(new_zoo, 'load'))
-    self.assertTrue(callable(new_zoo.load))
-    self.assertTrue(hasattr(new_zoo, 'loadNoTemplate'))
-    self.assertTrue(callable(new_zoo.loadNoTemplate))
-    self.assertTrue(hasattr(new_zoo, 'my'))
-    self.assertTrue(callable(new_zoo.my))
-    self.assertTrue(hasattr(new_zoo, 'query'))
-    self.assertTrue(callable(new_zoo.query))
-    self.assertTrue(hasattr(new_zoo, 'scopedAnimals'))
-    self.assertTrue(callable(new_zoo.scopedAnimals))
+    def _dummy_zoo_request(self):
+        with open(os.path.join(DATA_DIR, "zoo.json"), "rU") as fh:
+            zoo_contents = fh.read()
 
-    self.assertEqual(sorted(zoo._dynamic_attrs), sorted(new_zoo._dynamic_attrs))
-    self.assertEqual(zoo._baseUrl, new_zoo._baseUrl)
-    self.assertEqual(zoo._developerKey, new_zoo._developerKey)
-    self.assertEqual(zoo._requestBuilder, new_zoo._requestBuilder)
-    self.assertEqual(zoo._resourceDesc, new_zoo._resourceDesc)
-    self.assertEqual(zoo._rootDesc, new_zoo._rootDesc)
-    # _http, _model and _schema won't be equal since we will get new
-    # instances upon un-pickling
+        zoo_uri = uritemplate.expand(DISCOVERY_URI, {"api": "zoo", "apiVersion": "v1"})
+        if "REMOTE_ADDR" in os.environ:
+            zoo_uri = util._add_query_parameter(
+                zoo_uri, "userIp", os.environ["REMOTE_ADDR"]
+            )
 
-  def _dummy_zoo_request(self):
-    with open(os.path.join(DATA_DIR, 'zoo.json'), 'rU') as fh:
-      zoo_contents = fh.read()
+        http = build_http()
+        original_request = http.request
 
-    zoo_uri = uritemplate.expand(DISCOVERY_URI,
-                                 {'api': 'zoo', 'apiVersion': 'v1'})
-    if 'REMOTE_ADDR' in os.environ:
-        zoo_uri = util._add_query_parameter(zoo_uri, 'userIp',
-                                            os.environ['REMOTE_ADDR'])
+        def wrapped_request(uri, method="GET", *args, **kwargs):
+            if uri == zoo_uri:
+                return httplib2.Response({"status": "200"}), zoo_contents
+            return original_request(uri, method=method, *args, **kwargs)
 
-    http = build_http()
-    original_request = http.request
-    def wrapped_request(uri, method='GET', *args, **kwargs):
-        if uri == zoo_uri:
-          return httplib2.Response({'status': '200'}), zoo_contents
-        return original_request(uri, method=method, *args, **kwargs)
-    http.request = wrapped_request
-    return http
+        http.request = wrapped_request
+        return http
 
-  def _dummy_token(self):
-    access_token = 'foo'
-    client_id = 'some_client_id'
-    client_secret = 'cOuDdkfjxxnv+'
-    refresh_token = '1/0/a.df219fjls0'
-    token_expiry = datetime.datetime.utcnow()
-    user_agent = 'refresh_checker/1.0'
-    return OAuth2Credentials(
-        access_token, client_id, client_secret,
-        refresh_token, token_expiry, GOOGLE_TOKEN_URI,
-        user_agent)
+    def _dummy_token(self):
+        access_token = "foo"
+        client_id = "some_client_id"
+        client_secret = "cOuDdkfjxxnv+"
+        refresh_token = "1/0/a.df219fjls0"
+        token_expiry = datetime.datetime.utcnow()
+        user_agent = "refresh_checker/1.0"
+        return OAuth2Credentials(
+            access_token,
+            client_id,
+            client_secret,
+            refresh_token,
+            token_expiry,
+            GOOGLE_TOKEN_URI,
+            user_agent,
+        )
 
-  def test_pickle_with_credentials(self):
-    credentials = self._dummy_token()
-    http = self._dummy_zoo_request()
-    http = credentials.authorize(http)
-    self.assertTrue(hasattr(http.request, 'credentials'))
+    def test_pickle_with_credentials(self):
+        credentials = self._dummy_token()
+        http = self._dummy_zoo_request()
+        http = credentials.authorize(http)
+        self.assertTrue(hasattr(http.request, "credentials"))
 
-    zoo = build('zoo', 'v1', http=http)
-    pickled_zoo = pickle.dumps(zoo)
-    new_zoo = pickle.loads(pickled_zoo)
-    self.assertEqual(sorted(zoo.__dict__.keys()),
-                     sorted(new_zoo.__dict__.keys()))
-    new_http = new_zoo._http
-    self.assertFalse(hasattr(new_http.request, 'credentials'))
+        zoo = build("zoo", "v1", http=http)
+        pickled_zoo = pickle.dumps(zoo)
+        new_zoo = pickle.loads(pickled_zoo)
+        self.assertEqual(sorted(zoo.__dict__.keys()), sorted(new_zoo.__dict__.keys()))
+        new_http = new_zoo._http
+        self.assertFalse(hasattr(new_http.request, "credentials"))
 
-  def test_resumable_media_upload_no_content(self):
-    self.http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=self.http)
+    def test_resumable_media_upload_no_content(self):
+        self.http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=self.http)
 
-    media_upload = MediaFileUpload(datafile('empty'), resumable=True)
-    request = zoo.animals().insert(media_body=media_upload, body=None)
+        media_upload = MediaFileUpload(datafile("empty"), resumable=True)
+        request = zoo.animals().insert(media_body=media_upload, body=None)
 
-    self.assertEquals(media_upload, request.resumable)
-    self.assertEquals(request.body, None)
-    self.assertEquals(request.resumable_uri, None)
+        self.assertEquals(media_upload, request.resumable)
+        self.assertEquals(request.body, None)
+        self.assertEquals(request.resumable_uri, None)
 
-    http = HttpMockSequence([
-      ({'status': '200',
-        'location': 'http://upload.example.com'}, ''),
-      ({'status': '308',
-        'location': 'http://upload.example.com/2',
-        'range': '0-0'}, ''),
-    ])
+        http = HttpMockSequence(
+            [
+                ({"status": "200", "location": "http://upload.example.com"}, ""),
+                (
+                    {
+                        "status": "308",
+                        "location": "http://upload.example.com/2",
+                        "range": "0-0",
+                    },
+                    "",
+                ),
+            ]
+        )
 
-    status, body = request.next_chunk(http=http)
-    self.assertEquals(None, body)
-    self.assertTrue(isinstance(status, MediaUploadProgress))
-    self.assertEquals(0, status.progress())
+        status, body = request.next_chunk(http=http)
+        self.assertEquals(None, body)
+        self.assertTrue(isinstance(status, MediaUploadProgress))
+        self.assertEquals(0, status.progress())
 
 
 class Next(unittest.TestCase):
+    def test_next_successful_none_on_no_next_page_token(self):
+        self.http = HttpMock(datafile("tasks.json"), {"status": "200"})
+        tasks = build("tasks", "v1", http=self.http)
+        request = tasks.tasklists().list()
+        self.assertEqual(None, tasks.tasklists().list_next(request, {}))
 
-  def test_next_successful_none_on_no_next_page_token(self):
-    self.http = HttpMock(datafile('tasks.json'), {'status': '200'})
-    tasks = build('tasks', 'v1', http=self.http)
-    request = tasks.tasklists().list()
-    self.assertEqual(None, tasks.tasklists().list_next(request, {}))
+    def test_next_successful_none_on_empty_page_token(self):
+        self.http = HttpMock(datafile("tasks.json"), {"status": "200"})
+        tasks = build("tasks", "v1", http=self.http)
+        request = tasks.tasklists().list()
+        next_request = tasks.tasklists().list_next(request, {"nextPageToken": ""})
+        self.assertEqual(None, next_request)
 
-  def test_next_successful_none_on_empty_page_token(self):
-    self.http = HttpMock(datafile('tasks.json'), {'status': '200'})
-    tasks = build('tasks', 'v1', http=self.http)
-    request = tasks.tasklists().list()
-    next_request = tasks.tasklists().list_next(
-        request, {'nextPageToken': ''})
-    self.assertEqual(None, next_request)
+    def test_next_successful_with_next_page_token(self):
+        self.http = HttpMock(datafile("tasks.json"), {"status": "200"})
+        tasks = build("tasks", "v1", http=self.http)
+        request = tasks.tasklists().list()
+        next_request = tasks.tasklists().list_next(request, {"nextPageToken": "123abc"})
+        parsed = list(urlparse(next_request.uri))
+        q = parse_qs(parsed[4])
+        self.assertEqual(q["pageToken"][0], "123abc")
 
-  def test_next_successful_with_next_page_token(self):
-    self.http = HttpMock(datafile('tasks.json'), {'status': '200'})
-    tasks = build('tasks', 'v1', http=self.http)
-    request = tasks.tasklists().list()
-    next_request = tasks.tasklists().list_next(
-        request, {'nextPageToken': '123abc'})
-    parsed = list(urlparse(next_request.uri))
-    q = parse_qs(parsed[4])
-    self.assertEqual(q['pageToken'][0], '123abc')
+    def test_next_successful_with_next_page_token_alternate_name(self):
+        self.http = HttpMock(datafile("bigquery.json"), {"status": "200"})
+        bigquery = build("bigquery", "v2", http=self.http)
+        request = bigquery.tabledata().list(datasetId="", projectId="", tableId="")
+        next_request = bigquery.tabledata().list_next(request, {"pageToken": "123abc"})
+        parsed = list(urlparse(next_request.uri))
+        q = parse_qs(parsed[4])
+        self.assertEqual(q["pageToken"][0], "123abc")
 
-  def test_next_successful_with_next_page_token_alternate_name(self):
-    self.http = HttpMock(datafile('bigquery.json'), {'status': '200'})
-    bigquery = build('bigquery', 'v2', http=self.http)
-    request = bigquery.tabledata().list(datasetId='', projectId='', tableId='')
-    next_request = bigquery.tabledata().list_next(
-        request, {'pageToken': '123abc'})
-    parsed = list(urlparse(next_request.uri))
-    q = parse_qs(parsed[4])
-    self.assertEqual(q['pageToken'][0], '123abc')
+    def test_next_successful_with_next_page_token_in_body(self):
+        self.http = HttpMock(datafile("logging.json"), {"status": "200"})
+        logging = build("logging", "v2", http=self.http)
+        request = logging.entries().list(body={})
+        next_request = logging.entries().list_next(request, {"nextPageToken": "123abc"})
+        body = JsonModel().deserialize(next_request.body)
+        self.assertEqual(body["pageToken"], "123abc")
 
-  def test_next_successful_with_next_page_token_in_body(self):
-    self.http = HttpMock(datafile('logging.json'), {'status': '200'})
-    logging = build('logging', 'v2', http=self.http)
-    request = logging.entries().list(body={})
-    next_request = logging.entries().list_next(
-        request, {'nextPageToken': '123abc'})
-    body = JsonModel().deserialize(next_request.body)
-    self.assertEqual(body['pageToken'], '123abc')
+    def test_next_with_method_with_no_properties(self):
+        self.http = HttpMock(datafile("latitude.json"), {"status": "200"})
+        service = build("latitude", "v1", http=self.http)
+        service.currentLocation().get()
 
-  def test_next_with_method_with_no_properties(self):
-    self.http = HttpMock(datafile('latitude.json'), {'status': '200'})
-    service = build('latitude', 'v1', http=self.http)
-    service.currentLocation().get()
+    def test_next_nonexistent_with_no_next_page_token(self):
+        self.http = HttpMock(datafile("drive.json"), {"status": "200"})
+        drive = build("drive", "v3", http=self.http)
+        drive.changes().watch(body={})
+        self.assertFalse(callable(getattr(drive.changes(), "watch_next", None)))
 
-  def test_next_nonexistent_with_no_next_page_token(self):
-    self.http = HttpMock(datafile('drive.json'), {'status': '200'})
-    drive = build('drive', 'v3', http=self.http)
-    drive.changes().watch(body={})
-    self.assertFalse(callable(getattr(drive.changes(), 'watch_next', None)))
-
-  def test_next_successful_with_next_page_token_required(self):
-    self.http = HttpMock(datafile('drive.json'), {'status': '200'})
-    drive = build('drive', 'v3', http=self.http)
-    request = drive.changes().list(pageToken='startPageToken')
-    next_request = drive.changes().list_next(
-        request, {'nextPageToken': '123abc'})
-    parsed = list(urlparse(next_request.uri))
-    q = parse_qs(parsed[4])
-    self.assertEqual(q['pageToken'][0], '123abc')
+    def test_next_successful_with_next_page_token_required(self):
+        self.http = HttpMock(datafile("drive.json"), {"status": "200"})
+        drive = build("drive", "v3", http=self.http)
+        request = drive.changes().list(pageToken="startPageToken")
+        next_request = drive.changes().list_next(request, {"nextPageToken": "123abc"})
+        parsed = list(urlparse(next_request.uri))
+        q = parse_qs(parsed[4])
+        self.assertEqual(q["pageToken"][0], "123abc")
 
 
 class MediaGet(unittest.TestCase):
+    def test_get_media(self):
+        http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=http)
+        request = zoo.animals().get_media(name="Lion")
 
-  def test_get_media(self):
-    http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=http)
-    request = zoo.animals().get_media(name='Lion')
+        parsed = urlparse(request.uri)
+        q = parse_qs(parsed[4])
+        self.assertEqual(q["alt"], ["media"])
+        self.assertEqual(request.headers["accept"], "*/*")
 
-    parsed = urlparse(request.uri)
-    q = parse_qs(parsed[4])
-    self.assertEqual(q['alt'], ['media'])
-    self.assertEqual(request.headers['accept'], '*/*')
-
-    http = HttpMockSequence([
-      ({'status': '200'}, 'standing in for media'),
-      ])
-    response = request.execute(http=http)
-    self.assertEqual(b'standing in for media', response)
+        http = HttpMockSequence([({"status": "200"}, "standing in for media")])
+        response = request.execute(http=http)
+        self.assertEqual(b"standing in for media", response)
 
 
-if __name__ == '__main__':
-  unittest.main()
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/test_discovery_cache.py b/tests/test_discovery_cache.py
index 1786406..2c3efd8 100644
--- a/tests/test_discovery_cache.py
+++ b/tests/test_discovery_cache.py
@@ -26,33 +26,38 @@
 from googleapiclient.discovery_cache.base import Cache
 
 try:
-  from googleapiclient.discovery_cache.file_cache import Cache as FileCache
+    from googleapiclient.discovery_cache.file_cache import Cache as FileCache
 except ImportError:
-  FileCache = None
+    FileCache = None
 
 
-@unittest.skipIf(FileCache is None, 'FileCache unavailable.')
+@unittest.skipIf(FileCache is None, "FileCache unavailable.")
 class FileCacheTest(unittest.TestCase):
-  @mock.patch('googleapiclient.discovery_cache.file_cache.FILENAME',
-              new='google-api-python-client-discovery-doc-tests.cache')
-  def test_expiration(self):
-    def future_now():
-      return datetime.datetime.now() + datetime.timedelta(
-        seconds=DISCOVERY_DOC_MAX_AGE)
-    mocked_datetime = mock.MagicMock()
-    mocked_datetime.datetime.now.side_effect = future_now
-    cache = FileCache(max_age=DISCOVERY_DOC_MAX_AGE)
-    first_url = 'url-1'
-    first_url_content = 'url-1-content'
-    cache.set(first_url, first_url_content)
+    @mock.patch(
+        "googleapiclient.discovery_cache.file_cache.FILENAME",
+        new="google-api-python-client-discovery-doc-tests.cache",
+    )
+    def test_expiration(self):
+        def future_now():
+            return datetime.datetime.now() + datetime.timedelta(
+                seconds=DISCOVERY_DOC_MAX_AGE
+            )
 
-    # Make sure the content is cached.
-    self.assertEqual(first_url_content, cache.get(first_url))
+        mocked_datetime = mock.MagicMock()
+        mocked_datetime.datetime.now.side_effect = future_now
+        cache = FileCache(max_age=DISCOVERY_DOC_MAX_AGE)
+        first_url = "url-1"
+        first_url_content = "url-1-content"
+        cache.set(first_url, first_url_content)
 
-    # Simulate another `set` call in the future date.
-    with mock.patch('googleapiclient.discovery_cache.file_cache.datetime',
-                    new=mocked_datetime):
-      cache.set('url-2', 'url-2-content')
-    
-    # Make sure the content is expired
-    self.assertEqual(None, cache.get(first_url))
+        # Make sure the content is cached.
+        self.assertEqual(first_url_content, cache.get(first_url))
+
+        # Simulate another `set` call in the future date.
+        with mock.patch(
+            "googleapiclient.discovery_cache.file_cache.datetime", new=mocked_datetime
+        ):
+            cache.set("url-2", "url-2-content")
+
+        # Make sure the content is expired
+        self.assertEqual(None, cache.get(first_url))
diff --git a/tests/test_errors.py b/tests/test_errors.py
index e4d2f09..b0d1e43 100644
--- a/tests/test_errors.py
+++ b/tests/test_errors.py
@@ -18,7 +18,7 @@
 """
 from __future__ import absolute_import
 
-__author__ = 'afshar@google.com (Ali Afshar)'
+__author__ = "afshar@google.com (Ali Afshar)"
 
 
 import unittest2 as unittest
@@ -47,55 +47,68 @@
 }
 """
 
-def fake_response(data, headers, reason='Ok'):
-  response = httplib2.Response(headers)
-  response.reason = reason
-  return response, data
+
+def fake_response(data, headers, reason="Ok"):
+    response = httplib2.Response(headers)
+    response.reason = reason
+    return response, data
 
 
 class Error(unittest.TestCase):
-  """Test handling of error bodies."""
+    """Test handling of error bodies."""
 
-  def test_json_body(self):
-    """Test a nicely formed, expected error response."""
-    resp, content = fake_response(JSON_ERROR_CONTENT,
-        {'status':'400', 'content-type': 'application/json'},
-        reason='Failed')
-    error = HttpError(resp, content, uri='http://example.org')
-    self.assertEqual(str(error), '<HttpError 400 when requesting http://example.org returned "country is required". Details: "error details">')
+    def test_json_body(self):
+        """Test a nicely formed, expected error response."""
+        resp, content = fake_response(
+            JSON_ERROR_CONTENT,
+            {"status": "400", "content-type": "application/json"},
+            reason="Failed",
+        )
+        error = HttpError(resp, content, uri="http://example.org")
+        self.assertEqual(
+            str(error),
+            '<HttpError 400 when requesting http://example.org returned "country is required". Details: "error details">',
+        )
 
-  def test_bad_json_body(self):
-    """Test handling of bodies with invalid json."""
-    resp, content = fake_response(b'{',
-        { 'status':'400', 'content-type': 'application/json'},
-        reason='Failed')
-    error = HttpError(resp, content)
-    self.assertEqual(str(error), '<HttpError 400 "Failed">')
+    def test_bad_json_body(self):
+        """Test handling of bodies with invalid json."""
+        resp, content = fake_response(
+            b"{", {"status": "400", "content-type": "application/json"}, reason="Failed"
+        )
+        error = HttpError(resp, content)
+        self.assertEqual(str(error), '<HttpError 400 "Failed">')
 
-  def test_with_uri(self):
-    """Test handling of passing in the request uri."""
-    resp, content = fake_response(b'{',
-        {'status':'400', 'content-type': 'application/json'},
-        reason='Failure')
-    error = HttpError(resp, content, uri='http://example.org')
-    self.assertEqual(str(error), '<HttpError 400 when requesting http://example.org returned "Failure">')
+    def test_with_uri(self):
+        """Test handling of passing in the request uri."""
+        resp, content = fake_response(
+            b"{",
+            {"status": "400", "content-type": "application/json"},
+            reason="Failure",
+        )
+        error = HttpError(resp, content, uri="http://example.org")
+        self.assertEqual(
+            str(error),
+            '<HttpError 400 when requesting http://example.org returned "Failure">',
+        )
 
-  def test_missing_message_json_body(self):
-    """Test handling of bodies with missing expected 'message' element."""
-    resp, content = fake_response(b'{}',
-        {'status':'400', 'content-type': 'application/json'},
-        reason='Failed')
-    error = HttpError(resp, content)
-    self.assertEqual(str(error), '<HttpError 400 "Failed">')
+    def test_missing_message_json_body(self):
+        """Test handling of bodies with missing expected 'message' element."""
+        resp, content = fake_response(
+            b"{}",
+            {"status": "400", "content-type": "application/json"},
+            reason="Failed",
+        )
+        error = HttpError(resp, content)
+        self.assertEqual(str(error), '<HttpError 400 "Failed">')
 
-  def test_non_json(self):
-    """Test handling of non-JSON bodies"""
-    resp, content = fake_response(b'}NOT OK', {'status':'400'})
-    error = HttpError(resp, content)
-    self.assertEqual(str(error), '<HttpError 400 "Ok">')
+    def test_non_json(self):
+        """Test handling of non-JSON bodies"""
+        resp, content = fake_response(b"}NOT OK", {"status": "400"})
+        error = HttpError(resp, content)
+        self.assertEqual(str(error), '<HttpError 400 "Ok">')
 
-  def test_missing_reason(self):
-    """Test an empty dict with a missing resp.reason."""
-    resp, content = fake_response(b'}NOT OK', {'status': '400'}, reason=None)
-    error = HttpError(resp, content)
-    self.assertEqual(str(error), '<HttpError 400 "">')
+    def test_missing_reason(self):
+        """Test an empty dict with a missing resp.reason."""
+        resp, content = fake_response(b"}NOT OK", {"status": "400"}, reason=None)
+        error = HttpError(resp, content)
+        self.assertEqual(str(error), '<HttpError 400 "">')
diff --git a/tests/test_http.py b/tests/test_http.py
index b92e63f..1b0caa5 100644
--- a/tests/test_http.py
+++ b/tests/test_http.py
@@ -21,7 +21,7 @@
 from __future__ import absolute_import
 from six.moves import range
 
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+__author__ = "jcgregorio@google.com (Joe Gregorio)"
 
 from six import PY3
 from six import BytesIO, StringIO
@@ -63,594 +63,607 @@
 
 
 class MockCredentials(Credentials):
-  """Mock class for all Credentials objects."""
-  def __init__(self, bearer_token, expired=False):
-    super(MockCredentials, self).__init__()
-    self._authorized = 0
-    self._refreshed = 0
-    self._applied = 0
-    self._bearer_token = bearer_token
-    self._access_token_expired = expired
+    """Mock class for all Credentials objects."""
 
-  @property
-  def access_token(self):
-    return self._bearer_token
+    def __init__(self, bearer_token, expired=False):
+        super(MockCredentials, self).__init__()
+        self._authorized = 0
+        self._refreshed = 0
+        self._applied = 0
+        self._bearer_token = bearer_token
+        self._access_token_expired = expired
 
-  @property
-  def access_token_expired(self):
-    return self._access_token_expired
+    @property
+    def access_token(self):
+        return self._bearer_token
 
-  def authorize(self, http):
-    self._authorized += 1
+    @property
+    def access_token_expired(self):
+        return self._access_token_expired
 
-    request_orig = http.request
+    def authorize(self, http):
+        self._authorized += 1
 
-    # The closure that will replace 'httplib2.Http.request'.
-    def new_request(uri, method='GET', body=None, headers=None,
-                    redirections=httplib2.DEFAULT_MAX_REDIRECTS,
-                    connection_type=None):
-      # Modify the request headers to add the appropriate
-      # Authorization header.
-      if headers is None:
-        headers = {}
-      self.apply(headers)
+        request_orig = http.request
 
-      resp, content = request_orig(uri, method, body, headers,
-                                   redirections, connection_type)
+        # The closure that will replace 'httplib2.Http.request'.
+        def new_request(
+            uri,
+            method="GET",
+            body=None,
+            headers=None,
+            redirections=httplib2.DEFAULT_MAX_REDIRECTS,
+            connection_type=None,
+        ):
+            # Modify the request headers to add the appropriate
+            # Authorization header.
+            if headers is None:
+                headers = {}
+            self.apply(headers)
 
-      return resp, content
+            resp, content = request_orig(
+                uri, method, body, headers, redirections, connection_type
+            )
 
-    # Replace the request method with our own closure.
-    http.request = new_request
+            return resp, content
 
-    # Set credentials as a property of the request method.
-    setattr(http.request, 'credentials', self)
+        # Replace the request method with our own closure.
+        http.request = new_request
 
-    return http
+        # Set credentials as a property of the request method.
+        setattr(http.request, "credentials", self)
 
-  def refresh(self, http):
-    self._refreshed += 1
+        return http
 
-  def apply(self, headers):
-    self._applied += 1
-    headers['authorization'] = self._bearer_token + ' ' + str(self._refreshed)
+    def refresh(self, http):
+        self._refreshed += 1
+
+    def apply(self, headers):
+        self._applied += 1
+        headers["authorization"] = self._bearer_token + " " + str(self._refreshed)
 
 
 class HttpMockWithErrors(object):
-  def __init__(self, num_errors, success_json, success_data):
-    self.num_errors = num_errors
-    self.success_json = success_json
-    self.success_data = success_data
+    def __init__(self, num_errors, success_json, success_data):
+        self.num_errors = num_errors
+        self.success_json = success_json
+        self.success_data = success_data
 
-  def request(self, *args, **kwargs):
-    if not self.num_errors:
-      return httplib2.Response(self.success_json), self.success_data
-    else:
-      self.num_errors -= 1
-      if self.num_errors == 1:  # initial == 2
-        raise ssl.SSLError()
-      if self.num_errors == 3:  # initial == 4
-        raise httplib2.ServerNotFoundError()
-      else:  # initial != 2,4
-        if self.num_errors == 2:
-          # first try a broken pipe error (#218)
-          ex = socket.error()
-          ex.errno = socket.errno.EPIPE
+    def request(self, *args, **kwargs):
+        if not self.num_errors:
+            return httplib2.Response(self.success_json), self.success_data
         else:
-          # Initialize the timeout error code to the platform's error code.
-          try:
-            # For Windows:
-            ex = socket.error()
-            ex.errno = socket.errno.WSAETIMEDOUT
-          except AttributeError:
-            # For Linux/Mac:
-            if PY3:
-              ex = socket.timeout()
-            else:
-              ex = socket.error()
-              ex.errno = socket.errno.ETIMEDOUT
-        # Now raise the correct error.
-        raise ex
+            self.num_errors -= 1
+            if self.num_errors == 1:  # initial == 2
+                raise ssl.SSLError()
+            if self.num_errors == 3:  # initial == 4
+                raise httplib2.ServerNotFoundError()
+            else:  # initial != 2,4
+                if self.num_errors == 2:
+                    # first try a broken pipe error (#218)
+                    ex = socket.error()
+                    ex.errno = socket.errno.EPIPE
+                else:
+                    # Initialize the timeout error code to the platform's error code.
+                    try:
+                        # For Windows:
+                        ex = socket.error()
+                        ex.errno = socket.errno.WSAETIMEDOUT
+                    except AttributeError:
+                        # For Linux/Mac:
+                        if PY3:
+                            ex = socket.timeout()
+                        else:
+                            ex = socket.error()
+                            ex.errno = socket.errno.ETIMEDOUT
+                # Now raise the correct error.
+                raise ex
 
 
 class HttpMockWithNonRetriableErrors(object):
-  def __init__(self, num_errors, success_json, success_data):
-    self.num_errors = num_errors
-    self.success_json = success_json
-    self.success_data = success_data
+    def __init__(self, num_errors, success_json, success_data):
+        self.num_errors = num_errors
+        self.success_json = success_json
+        self.success_data = success_data
 
-  def request(self, *args, **kwargs):
-    if not self.num_errors:
-      return httplib2.Response(self.success_json), self.success_data
-    else:
-      self.num_errors -= 1
-      ex = socket.error()
-      # set errno to a non-retriable value
-      try:
-        # For Windows:
-        ex.errno = socket.errno.WSAECONNREFUSED
-      except AttributeError:
-        # For Linux/Mac:
-        ex.errno = socket.errno.ECONNREFUSED
-      # Now raise the correct timeout error.
-      raise ex
+    def request(self, *args, **kwargs):
+        if not self.num_errors:
+            return httplib2.Response(self.success_json), self.success_data
+        else:
+            self.num_errors -= 1
+            ex = socket.error()
+            # set errno to a non-retriable value
+            try:
+                # For Windows:
+                ex.errno = socket.errno.WSAECONNREFUSED
+            except AttributeError:
+                # For Linux/Mac:
+                ex.errno = socket.errno.ECONNREFUSED
+            # Now raise the correct timeout error.
+            raise ex
 
 
-DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
+DATA_DIR = os.path.join(os.path.dirname(__file__), "data")
 
 
 def datafile(filename):
-  return os.path.join(DATA_DIR, filename)
+    return os.path.join(DATA_DIR, filename)
+
 
 def _postproc_none(*kwargs):
-  pass
+    pass
 
 
 class TestUserAgent(unittest.TestCase):
+    def test_set_user_agent(self):
+        http = HttpMockSequence([({"status": "200"}, "echo_request_headers")])
 
-  def test_set_user_agent(self):
-    http = HttpMockSequence([
-      ({'status': '200'}, 'echo_request_headers'),
-      ])
+        http = set_user_agent(http, "my_app/5.5")
+        resp, content = http.request("http://example.com")
+        self.assertEqual("my_app/5.5", content["user-agent"])
 
-    http = set_user_agent(http, "my_app/5.5")
-    resp, content = http.request("http://example.com")
-    self.assertEqual('my_app/5.5', content['user-agent'])
+    def test_set_user_agent_nested(self):
+        http = HttpMockSequence([({"status": "200"}, "echo_request_headers")])
 
-  def test_set_user_agent_nested(self):
-    http = HttpMockSequence([
-      ({'status': '200'}, 'echo_request_headers'),
-      ])
-
-    http = set_user_agent(http, "my_app/5.5")
-    http = set_user_agent(http, "my_library/0.1")
-    resp, content = http.request("http://example.com")
-    self.assertEqual('my_app/5.5 my_library/0.1', content['user-agent'])
+        http = set_user_agent(http, "my_app/5.5")
+        http = set_user_agent(http, "my_library/0.1")
+        resp, content = http.request("http://example.com")
+        self.assertEqual("my_app/5.5 my_library/0.1", content["user-agent"])
 
 
 class TestMediaUpload(unittest.TestCase):
+    def test_media_file_upload_closes_fd_in___del__(self):
+        file_desc = mock.Mock(spec=io.TextIOWrapper)
+        opener = mock.mock_open(file_desc)
+        if PY3:
+            with mock.patch("builtins.open", return_value=opener):
+                upload = MediaFileUpload(datafile("test_close"), mimetype="text/plain")
+        else:
+            with mock.patch("__builtin__.open", return_value=opener):
+                upload = MediaFileUpload(datafile("test_close"), mimetype="text/plain")
+        self.assertIs(upload.stream(), file_desc)
+        del upload
+        file_desc.close.assert_called_once_with()
 
-  def test_media_file_upload_closes_fd_in___del__(self):
-    file_desc = mock.Mock(spec=io.TextIOWrapper)
-    opener = mock.mock_open(file_desc)
-    if PY3:
-      with mock.patch('builtins.open', return_value=opener):
-        upload = MediaFileUpload(datafile('test_close'), mimetype='text/plain')
-    else:
-      with mock.patch('__builtin__.open', return_value=opener):
-        upload = MediaFileUpload(datafile('test_close'), mimetype='text/plain')     
-    self.assertIs(upload.stream(), file_desc)
-    del upload
-    file_desc.close.assert_called_once_with()
+    def test_media_file_upload_mimetype_detection(self):
+        upload = MediaFileUpload(datafile("small.png"))
+        self.assertEqual("image/png", upload.mimetype())
 
-  def test_media_file_upload_mimetype_detection(self):
-    upload = MediaFileUpload(datafile('small.png'))
-    self.assertEqual('image/png', upload.mimetype())
+        upload = MediaFileUpload(datafile("empty"))
+        self.assertEqual("application/octet-stream", upload.mimetype())
 
-    upload = MediaFileUpload(datafile('empty'))
-    self.assertEqual('application/octet-stream', upload.mimetype())
+    def test_media_file_upload_to_from_json(self):
+        upload = MediaFileUpload(datafile("small.png"), chunksize=500, resumable=True)
+        self.assertEqual("image/png", upload.mimetype())
+        self.assertEqual(190, upload.size())
+        self.assertEqual(True, upload.resumable())
+        self.assertEqual(500, upload.chunksize())
+        self.assertEqual(b"PNG", upload.getbytes(1, 3))
 
-  def test_media_file_upload_to_from_json(self):
-    upload = MediaFileUpload(
-        datafile('small.png'), chunksize=500, resumable=True)
-    self.assertEqual('image/png', upload.mimetype())
-    self.assertEqual(190, upload.size())
-    self.assertEqual(True, upload.resumable())
-    self.assertEqual(500, upload.chunksize())
-    self.assertEqual(b'PNG', upload.getbytes(1, 3))
+        json = upload.to_json()
+        new_upload = MediaUpload.new_from_json(json)
 
-    json = upload.to_json()
-    new_upload = MediaUpload.new_from_json(json)
+        self.assertEqual("image/png", new_upload.mimetype())
+        self.assertEqual(190, new_upload.size())
+        self.assertEqual(True, new_upload.resumable())
+        self.assertEqual(500, new_upload.chunksize())
+        self.assertEqual(b"PNG", new_upload.getbytes(1, 3))
 
-    self.assertEqual('image/png', new_upload.mimetype())
-    self.assertEqual(190, new_upload.size())
-    self.assertEqual(True, new_upload.resumable())
-    self.assertEqual(500, new_upload.chunksize())
-    self.assertEqual(b'PNG', new_upload.getbytes(1, 3))
+    def test_media_file_upload_raises_on_invalid_chunksize(self):
+        self.assertRaises(
+            InvalidChunkSizeError,
+            MediaFileUpload,
+            datafile("small.png"),
+            mimetype="image/png",
+            chunksize=-2,
+            resumable=True,
+        )
 
-  def test_media_file_upload_raises_on_invalid_chunksize(self):
-    self.assertRaises(InvalidChunkSizeError, MediaFileUpload,
-        datafile('small.png'), mimetype='image/png', chunksize=-2,
-        resumable=True)
+    def test_media_inmemory_upload(self):
+        media = MediaInMemoryUpload(
+            b"abcdef", mimetype="text/plain", chunksize=10, resumable=True
+        )
+        self.assertEqual("text/plain", media.mimetype())
+        self.assertEqual(10, media.chunksize())
+        self.assertTrue(media.resumable())
+        self.assertEqual(b"bc", media.getbytes(1, 2))
+        self.assertEqual(6, media.size())
 
-  def test_media_inmemory_upload(self):
-    media = MediaInMemoryUpload(b'abcdef', mimetype='text/plain', chunksize=10,
-                                resumable=True)
-    self.assertEqual('text/plain', media.mimetype())
-    self.assertEqual(10, media.chunksize())
-    self.assertTrue(media.resumable())
-    self.assertEqual(b'bc', media.getbytes(1, 2))
-    self.assertEqual(6, media.size())
+    def test_http_request_to_from_json(self):
+        http = build_http()
+        media_upload = MediaFileUpload(
+            datafile("small.png"), chunksize=500, resumable=True
+        )
+        req = HttpRequest(
+            http,
+            _postproc_none,
+            "http://example.com",
+            method="POST",
+            body="{}",
+            headers={"content-type": 'multipart/related; boundary="---flubber"'},
+            methodId="foo",
+            resumable=media_upload,
+        )
 
-  def test_http_request_to_from_json(self):
-    http = build_http()
-    media_upload = MediaFileUpload(
-        datafile('small.png'), chunksize=500, resumable=True)
-    req = HttpRequest(
-        http,
-        _postproc_none,
-        'http://example.com',
-        method='POST',
-        body='{}',
-        headers={'content-type': 'multipart/related; boundary="---flubber"'},
-        methodId='foo',
-        resumable=media_upload)
+        json = req.to_json()
+        new_req = HttpRequest.from_json(json, http, _postproc_none)
 
-    json = req.to_json()
-    new_req = HttpRequest.from_json(json, http, _postproc_none)
+        self.assertEqual(
+            {"content-type": 'multipart/related; boundary="---flubber"'},
+            new_req.headers,
+        )
+        self.assertEqual("http://example.com", new_req.uri)
+        self.assertEqual("{}", new_req.body)
+        self.assertEqual(http, new_req.http)
+        self.assertEqual(media_upload.to_json(), new_req.resumable.to_json())
 
-    self.assertEqual({'content-type':
-                       'multipart/related; boundary="---flubber"'},
-                       new_req.headers)
-    self.assertEqual('http://example.com', new_req.uri)
-    self.assertEqual('{}', new_req.body)
-    self.assertEqual(http, new_req.http)
-    self.assertEqual(media_upload.to_json(), new_req.resumable.to_json())
-
-    self.assertEqual(random.random, new_req._rand)
-    self.assertEqual(time.sleep, new_req._sleep)
+        self.assertEqual(random.random, new_req._rand)
+        self.assertEqual(time.sleep, new_req._sleep)
 
 
 class TestMediaIoBaseUpload(unittest.TestCase):
+    def test_media_io_base_upload_from_file_io(self):
+        fd = FileIO(datafile("small.png"), "r")
+        upload = MediaIoBaseUpload(
+            fd=fd, mimetype="image/png", chunksize=500, resumable=True
+        )
+        self.assertEqual("image/png", upload.mimetype())
+        self.assertEqual(190, upload.size())
+        self.assertEqual(True, upload.resumable())
+        self.assertEqual(500, upload.chunksize())
+        self.assertEqual(b"PNG", upload.getbytes(1, 3))
 
-  def test_media_io_base_upload_from_file_io(self):
-    fd = FileIO(datafile('small.png'), 'r')
-    upload = MediaIoBaseUpload(
-        fd=fd, mimetype='image/png', chunksize=500, resumable=True)
-    self.assertEqual('image/png', upload.mimetype())
-    self.assertEqual(190, upload.size())
-    self.assertEqual(True, upload.resumable())
-    self.assertEqual(500, upload.chunksize())
-    self.assertEqual(b'PNG', upload.getbytes(1, 3))
+    def test_media_io_base_upload_from_file_object(self):
+        f = open(datafile("small.png"), "rb")
+        upload = MediaIoBaseUpload(
+            fd=f, mimetype="image/png", chunksize=500, resumable=True
+        )
+        self.assertEqual("image/png", upload.mimetype())
+        self.assertEqual(190, upload.size())
+        self.assertEqual(True, upload.resumable())
+        self.assertEqual(500, upload.chunksize())
+        self.assertEqual(b"PNG", upload.getbytes(1, 3))
+        f.close()
 
-  def test_media_io_base_upload_from_file_object(self):
-    f = open(datafile('small.png'), 'rb')
-    upload = MediaIoBaseUpload(
-        fd=f, mimetype='image/png', chunksize=500, resumable=True)
-    self.assertEqual('image/png', upload.mimetype())
-    self.assertEqual(190, upload.size())
-    self.assertEqual(True, upload.resumable())
-    self.assertEqual(500, upload.chunksize())
-    self.assertEqual(b'PNG', upload.getbytes(1, 3))
-    f.close()
+    def test_media_io_base_upload_serializable(self):
+        f = open(datafile("small.png"), "rb")
+        upload = MediaIoBaseUpload(fd=f, mimetype="image/png")
 
-  def test_media_io_base_upload_serializable(self):
-    f = open(datafile('small.png'), 'rb')
-    upload = MediaIoBaseUpload(fd=f, mimetype='image/png')
+        try:
+            json = upload.to_json()
+            self.fail("MediaIoBaseUpload should not be serializable.")
+        except NotImplementedError:
+            pass
 
-    try:
-      json = upload.to_json()
-      self.fail('MediaIoBaseUpload should not be serializable.')
-    except NotImplementedError:
-      pass
+    @unittest.skipIf(PY3, "Strings and Bytes are different types")
+    def test_media_io_base_upload_from_string_io(self):
+        f = open(datafile("small.png"), "rb")
+        fd = StringIO(f.read())
+        f.close()
 
-  @unittest.skipIf(PY3, 'Strings and Bytes are different types')
-  def test_media_io_base_upload_from_string_io(self):
-    f = open(datafile('small.png'), 'rb')
-    fd = StringIO(f.read())
-    f.close()
+        upload = MediaIoBaseUpload(
+            fd=fd, mimetype="image/png", chunksize=500, resumable=True
+        )
+        self.assertEqual("image/png", upload.mimetype())
+        self.assertEqual(190, upload.size())
+        self.assertEqual(True, upload.resumable())
+        self.assertEqual(500, upload.chunksize())
+        self.assertEqual(b"PNG", upload.getbytes(1, 3))
+        f.close()
 
-    upload = MediaIoBaseUpload(
-        fd=fd, mimetype='image/png', chunksize=500, resumable=True)
-    self.assertEqual('image/png', upload.mimetype())
-    self.assertEqual(190, upload.size())
-    self.assertEqual(True, upload.resumable())
-    self.assertEqual(500, upload.chunksize())
-    self.assertEqual(b'PNG', upload.getbytes(1, 3))
-    f.close()
+    def test_media_io_base_upload_from_bytes(self):
+        f = open(datafile("small.png"), "rb")
+        fd = BytesIO(f.read())
+        upload = MediaIoBaseUpload(
+            fd=fd, mimetype="image/png", chunksize=500, resumable=True
+        )
+        self.assertEqual("image/png", upload.mimetype())
+        self.assertEqual(190, upload.size())
+        self.assertEqual(True, upload.resumable())
+        self.assertEqual(500, upload.chunksize())
+        self.assertEqual(b"PNG", upload.getbytes(1, 3))
 
-  def test_media_io_base_upload_from_bytes(self):
-    f = open(datafile('small.png'), 'rb')
-    fd = BytesIO(f.read())
-    upload = MediaIoBaseUpload(
-        fd=fd, mimetype='image/png', chunksize=500, resumable=True)
-    self.assertEqual('image/png', upload.mimetype())
-    self.assertEqual(190, upload.size())
-    self.assertEqual(True, upload.resumable())
-    self.assertEqual(500, upload.chunksize())
-    self.assertEqual(b'PNG', upload.getbytes(1, 3))
+    def test_media_io_base_upload_raises_on_invalid_chunksize(self):
+        f = open(datafile("small.png"), "rb")
+        fd = BytesIO(f.read())
+        self.assertRaises(
+            InvalidChunkSizeError,
+            MediaIoBaseUpload,
+            fd,
+            "image/png",
+            chunksize=-2,
+            resumable=True,
+        )
 
-  def test_media_io_base_upload_raises_on_invalid_chunksize(self):
-    f = open(datafile('small.png'), 'rb')
-    fd = BytesIO(f.read())
-    self.assertRaises(InvalidChunkSizeError, MediaIoBaseUpload,
-        fd, 'image/png', chunksize=-2, resumable=True)
+    def test_media_io_base_upload_streamable(self):
+        fd = BytesIO(b"stuff")
+        upload = MediaIoBaseUpload(
+            fd=fd, mimetype="image/png", chunksize=500, resumable=True
+        )
+        self.assertEqual(True, upload.has_stream())
+        self.assertEqual(fd, upload.stream())
 
-  def test_media_io_base_upload_streamable(self):
-    fd = BytesIO(b'stuff')
-    upload = MediaIoBaseUpload(
-        fd=fd, mimetype='image/png', chunksize=500, resumable=True)
-    self.assertEqual(True, upload.has_stream())
-    self.assertEqual(fd, upload.stream())
+    def test_media_io_base_next_chunk_retries(self):
+        f = open(datafile("small.png"), "rb")
+        fd = BytesIO(f.read())
+        upload = MediaIoBaseUpload(
+            fd=fd, mimetype="image/png", chunksize=500, resumable=True
+        )
 
-  def test_media_io_base_next_chunk_retries(self):
-    f = open(datafile('small.png'), 'rb')
-    fd = BytesIO(f.read())
-    upload = MediaIoBaseUpload(
-        fd=fd, mimetype='image/png', chunksize=500, resumable=True)
+        # Simulate errors for both the request that creates the resumable upload
+        # and the upload itself.
+        http = HttpMockSequence(
+            [
+                ({"status": "500"}, ""),
+                ({"status": "500"}, ""),
+                ({"status": "503"}, ""),
+                ({"status": "200", "location": "location"}, ""),
+                ({"status": "403"}, USER_RATE_LIMIT_EXCEEDED_RESPONSE),
+                ({"status": "403"}, RATE_LIMIT_EXCEEDED_RESPONSE),
+                ({"status": "429"}, ""),
+                ({"status": "200"}, "{}"),
+            ]
+        )
 
-    # Simulate errors for both the request that creates the resumable upload
-    # and the upload itself.
-    http = HttpMockSequence([
-      ({'status': '500'}, ''),
-      ({'status': '500'}, ''),
-      ({'status': '503'}, ''),
-      ({'status': '200', 'location': 'location'}, ''),
-      ({'status': '403'}, USER_RATE_LIMIT_EXCEEDED_RESPONSE),
-      ({'status': '403'}, RATE_LIMIT_EXCEEDED_RESPONSE),
-      ({'status': '429'}, ''),
-      ({'status': '200'}, '{}'),
-    ])
+        model = JsonModel()
+        uri = u"https://www.googleapis.com/someapi/v1/upload/?foo=bar"
+        method = u"POST"
+        request = HttpRequest(
+            http, model.response, uri, method=method, headers={}, resumable=upload
+        )
 
-    model = JsonModel()
-    uri = u'https://www.googleapis.com/someapi/v1/upload/?foo=bar'
-    method = u'POST'
-    request = HttpRequest(
-        http,
-        model.response,
-        uri,
-        method=method,
-        headers={},
-        resumable=upload)
+        sleeptimes = []
+        request._sleep = lambda x: sleeptimes.append(x)
+        request._rand = lambda: 10
 
-    sleeptimes = []
-    request._sleep = lambda x: sleeptimes.append(x)
-    request._rand = lambda: 10
+        request.execute(num_retries=3)
+        self.assertEqual([20, 40, 80, 20, 40, 80], sleeptimes)
 
-    request.execute(num_retries=3)
-    self.assertEqual([20, 40, 80, 20, 40, 80], sleeptimes)
+    def test_media_io_base_next_chunk_no_retry_403_not_configured(self):
+        fd = BytesIO(b"i am png")
+        upload = MediaIoBaseUpload(
+            fd=fd, mimetype="image/png", chunksize=500, resumable=True
+        )
 
-  def test_media_io_base_next_chunk_no_retry_403_not_configured(self):
-    fd = BytesIO(b"i am png")
-    upload = MediaIoBaseUpload(
-        fd=fd, mimetype='image/png', chunksize=500, resumable=True)
+        http = HttpMockSequence(
+            [({"status": "403"}, NOT_CONFIGURED_RESPONSE), ({"status": "200"}, "{}")]
+        )
 
-    http = HttpMockSequence([
-        ({'status': '403'}, NOT_CONFIGURED_RESPONSE),
-        ({'status': '200'}, '{}')
-        ])
+        model = JsonModel()
+        uri = u"https://www.googleapis.com/someapi/v1/upload/?foo=bar"
+        method = u"POST"
+        request = HttpRequest(
+            http, model.response, uri, method=method, headers={}, resumable=upload
+        )
 
-    model = JsonModel()
-    uri = u'https://www.googleapis.com/someapi/v1/upload/?foo=bar'
-    method = u'POST'
-    request = HttpRequest(
-        http,
-        model.response,
-        uri,
-        method=method,
-        headers={},
-        resumable=upload)
+        request._rand = lambda: 1.0
+        request._sleep = mock.MagicMock()
 
-    request._rand = lambda: 1.0
-    request._sleep =  mock.MagicMock()
-
-    with self.assertRaises(HttpError):
-      request.execute(num_retries=3)
-    request._sleep.assert_not_called()
+        with self.assertRaises(HttpError):
+            request.execute(num_retries=3)
+        request._sleep.assert_not_called()
 
 
 class TestMediaIoBaseDownload(unittest.TestCase):
+    def setUp(self):
+        http = HttpMock(datafile("zoo.json"), {"status": "200"})
+        zoo = build("zoo", "v1", http=http)
+        self.request = zoo.animals().get_media(name="Lion")
+        self.fd = BytesIO()
 
-  def setUp(self):
-    http = HttpMock(datafile('zoo.json'), {'status': '200'})
-    zoo = build('zoo', 'v1', http=http)
-    self.request = zoo.animals().get_media(name='Lion')
-    self.fd = BytesIO()
+    def test_media_io_base_download(self):
+        self.request.http = HttpMockSequence(
+            [
+                ({"status": "200", "content-range": "0-2/5"}, b"123"),
+                ({"status": "200", "content-range": "3-4/5"}, b"45"),
+            ]
+        )
+        self.assertEqual(True, self.request.http.follow_redirects)
 
-  def test_media_io_base_download(self):
-    self.request.http = HttpMockSequence([
-      ({'status': '200',
-        'content-range': '0-2/5'}, b'123'),
-      ({'status': '200',
-        'content-range': '3-4/5'}, b'45'),
-    ])
-    self.assertEqual(True, self.request.http.follow_redirects)
+        download = MediaIoBaseDownload(fd=self.fd, request=self.request, chunksize=3)
 
-    download = MediaIoBaseDownload(
-        fd=self.fd, request=self.request, chunksize=3)
+        self.assertEqual(self.fd, download._fd)
+        self.assertEqual(3, download._chunksize)
+        self.assertEqual(0, download._progress)
+        self.assertEqual(None, download._total_size)
+        self.assertEqual(False, download._done)
+        self.assertEqual(self.request.uri, download._uri)
 
-    self.assertEqual(self.fd, download._fd)
-    self.assertEqual(3, download._chunksize)
-    self.assertEqual(0, download._progress)
-    self.assertEqual(None, download._total_size)
-    self.assertEqual(False, download._done)
-    self.assertEqual(self.request.uri, download._uri)
+        status, done = download.next_chunk()
 
-    status, done = download.next_chunk()
+        self.assertEqual(self.fd.getvalue(), b"123")
+        self.assertEqual(False, done)
+        self.assertEqual(3, download._progress)
+        self.assertEqual(5, download._total_size)
+        self.assertEqual(3, status.resumable_progress)
 
-    self.assertEqual(self.fd.getvalue(), b'123')
-    self.assertEqual(False, done)
-    self.assertEqual(3, download._progress)
-    self.assertEqual(5, download._total_size)
-    self.assertEqual(3, status.resumable_progress)
+        status, done = download.next_chunk()
 
-    status, done = download.next_chunk()
+        self.assertEqual(self.fd.getvalue(), b"12345")
+        self.assertEqual(True, done)
+        self.assertEqual(5, download._progress)
+        self.assertEqual(5, download._total_size)
 
-    self.assertEqual(self.fd.getvalue(), b'12345')
-    self.assertEqual(True, done)
-    self.assertEqual(5, download._progress)
-    self.assertEqual(5, download._total_size)
+    def test_media_io_base_download_custom_request_headers(self):
+        self.request.http = HttpMockSequence(
+            [
+                (
+                    {"status": "200", "content-range": "0-2/5"},
+                    "echo_request_headers_as_json",
+                ),
+                (
+                    {"status": "200", "content-range": "3-4/5"},
+                    "echo_request_headers_as_json",
+                ),
+            ]
+        )
+        self.assertEqual(True, self.request.http.follow_redirects)
 
-  def test_media_io_base_download_custom_request_headers(self):
-    self.request.http = HttpMockSequence([
-      ({'status': '200',
-        'content-range': '0-2/5'}, 'echo_request_headers_as_json'),
-      ({'status': '200',
-        'content-range': '3-4/5'}, 'echo_request_headers_as_json'),
-    ])
-    self.assertEqual(True, self.request.http.follow_redirects)
+        self.request.headers["Cache-Control"] = "no-store"
 
-    self.request.headers['Cache-Control'] = 'no-store'
+        download = MediaIoBaseDownload(fd=self.fd, request=self.request, chunksize=3)
 
-    download = MediaIoBaseDownload(
-        fd=self.fd, request=self.request, chunksize=3)
+        self.assertEqual(download._headers.get("Cache-Control"), "no-store")
 
-    self.assertEqual(download._headers.get('Cache-Control'), 'no-store')
+        status, done = download.next_chunk()
 
-    status, done = download.next_chunk()
+        result = json.loads(self.fd.getvalue().decode("utf-8"))
 
-    result = json.loads(self.fd.getvalue().decode('utf-8'))
+        # assert that that the header we added to the original request is
+        # sent up to the server on each call to next_chunk
 
-    # assert that that the header we added to the original request is
-    # sent up to the server on each call to next_chunk
+        self.assertEqual(result.get("Cache-Control"), "no-store")
 
-    self.assertEqual(result.get("Cache-Control"), "no-store")
+        download._fd = self.fd = BytesIO()
+        status, done = download.next_chunk()
 
-    download._fd = self.fd = BytesIO()
-    status, done = download.next_chunk()
+        result = json.loads(self.fd.getvalue().decode("utf-8"))
+        self.assertEqual(result.get("Cache-Control"), "no-store")
 
-    result = json.loads(self.fd.getvalue().decode('utf-8'))
-    self.assertEqual(result.get("Cache-Control"), "no-store")
+    def test_media_io_base_download_handle_redirects(self):
+        self.request.http = HttpMockSequence(
+            [
+                (
+                    {
+                        "status": "200",
+                        "content-location": "https://secure.example.net/lion",
+                    },
+                    b"",
+                ),
+                ({"status": "200", "content-range": "0-2/5"}, b"abc"),
+            ]
+        )
 
-  def test_media_io_base_download_handle_redirects(self):
-    self.request.http = HttpMockSequence([
-      ({'status': '200',
-        'content-location': 'https://secure.example.net/lion'}, b''),
-      ({'status': '200',
-        'content-range': '0-2/5'}, b'abc'),
-    ])
+        download = MediaIoBaseDownload(fd=self.fd, request=self.request, chunksize=3)
 
-    download = MediaIoBaseDownload(
-        fd=self.fd, request=self.request, chunksize=3)
+        status, done = download.next_chunk()
 
-    status, done = download.next_chunk()
+        self.assertEqual("https://secure.example.net/lion", download._uri)
 
-    self.assertEqual('https://secure.example.net/lion', download._uri)
+    def test_media_io_base_download_handle_4xx(self):
+        self.request.http = HttpMockSequence([({"status": "400"}, "")])
 
-  def test_media_io_base_download_handle_4xx(self):
-    self.request.http = HttpMockSequence([
-      ({'status': '400'}, ''),
-    ])
+        download = MediaIoBaseDownload(fd=self.fd, request=self.request, chunksize=3)
 
-    download = MediaIoBaseDownload(
-        fd=self.fd, request=self.request, chunksize=3)
+        try:
+            status, done = download.next_chunk()
+            self.fail("Should raise an exception")
+        except HttpError:
+            pass
 
-    try:
-      status, done = download.next_chunk()
-      self.fail('Should raise an exception')
-    except HttpError:
-      pass
+        # Even after raising an exception we can pick up where we left off.
+        self.request.http = HttpMockSequence(
+            [({"status": "200", "content-range": "0-2/5"}, b"123")]
+        )
 
-    # Even after raising an exception we can pick up where we left off.
-    self.request.http = HttpMockSequence([
-      ({'status': '200',
-        'content-range': '0-2/5'}, b'123'),
-    ])
+        status, done = download.next_chunk()
 
-    status, done = download.next_chunk()
+        self.assertEqual(self.fd.getvalue(), b"123")
 
-    self.assertEqual(self.fd.getvalue(), b'123')
+    def test_media_io_base_download_retries_connection_errors(self):
+        self.request.http = HttpMockWithErrors(
+            4, {"status": "200", "content-range": "0-2/3"}, b"123"
+        )
 
-  def test_media_io_base_download_retries_connection_errors(self):
-    self.request.http = HttpMockWithErrors(
-        4, {'status': '200', 'content-range': '0-2/3'}, b'123')
+        download = MediaIoBaseDownload(fd=self.fd, request=self.request, chunksize=3)
+        download._sleep = lambda _x: 0  # do nothing
+        download._rand = lambda: 10
 
-    download = MediaIoBaseDownload(
-        fd=self.fd, request=self.request, chunksize=3)
-    download._sleep = lambda _x: 0  # do nothing
-    download._rand = lambda: 10
+        status, done = download.next_chunk(num_retries=4)
 
-    status, done = download.next_chunk(num_retries=4)
+        self.assertEqual(self.fd.getvalue(), b"123")
+        self.assertEqual(True, done)
 
-    self.assertEqual(self.fd.getvalue(), b'123')
-    self.assertEqual(True, done)
+    def test_media_io_base_download_retries_5xx(self):
+        self.request.http = HttpMockSequence(
+            [
+                ({"status": "500"}, ""),
+                ({"status": "500"}, ""),
+                ({"status": "500"}, ""),
+                ({"status": "200", "content-range": "0-2/5"}, b"123"),
+                ({"status": "503"}, ""),
+                ({"status": "503"}, ""),
+                ({"status": "503"}, ""),
+                ({"status": "200", "content-range": "3-4/5"}, b"45"),
+            ]
+        )
 
-  def test_media_io_base_download_retries_5xx(self):
-    self.request.http = HttpMockSequence([
-      ({'status': '500'}, ''),
-      ({'status': '500'}, ''),
-      ({'status': '500'}, ''),
-      ({'status': '200',
-        'content-range': '0-2/5'}, b'123'),
-      ({'status': '503'}, ''),
-      ({'status': '503'}, ''),
-      ({'status': '503'}, ''),
-      ({'status': '200',
-        'content-range': '3-4/5'}, b'45'),
-    ])
+        download = MediaIoBaseDownload(fd=self.fd, request=self.request, chunksize=3)
 
-    download = MediaIoBaseDownload(
-        fd=self.fd, request=self.request, chunksize=3)
+        self.assertEqual(self.fd, download._fd)
+        self.assertEqual(3, download._chunksize)
+        self.assertEqual(0, download._progress)
+        self.assertEqual(None, download._total_size)
+        self.assertEqual(False, download._done)
+        self.assertEqual(self.request.uri, download._uri)
 
-    self.assertEqual(self.fd, download._fd)
-    self.assertEqual(3, download._chunksize)
-    self.assertEqual(0, download._progress)
-    self.assertEqual(None, download._total_size)
-    self.assertEqual(False, download._done)
-    self.assertEqual(self.request.uri, download._uri)
+        # Set time.sleep and random.random stubs.
+        sleeptimes = []
+        download._sleep = lambda x: sleeptimes.append(x)
+        download._rand = lambda: 10
 
-    # Set time.sleep and random.random stubs.
-    sleeptimes = []
-    download._sleep = lambda x: sleeptimes.append(x)
-    download._rand = lambda: 10
+        status, done = download.next_chunk(num_retries=3)
 
-    status, done = download.next_chunk(num_retries=3)
+        # Check for exponential backoff using the rand function above.
+        self.assertEqual([20, 40, 80], sleeptimes)
 
-    # Check for exponential backoff using the rand function above.
-    self.assertEqual([20, 40, 80], sleeptimes)
+        self.assertEqual(self.fd.getvalue(), b"123")
+        self.assertEqual(False, done)
+        self.assertEqual(3, download._progress)
+        self.assertEqual(5, download._total_size)
+        self.assertEqual(3, status.resumable_progress)
 
-    self.assertEqual(self.fd.getvalue(), b'123')
-    self.assertEqual(False, done)
-    self.assertEqual(3, download._progress)
-    self.assertEqual(5, download._total_size)
-    self.assertEqual(3, status.resumable_progress)
+        # Reset time.sleep stub.
+        del sleeptimes[0 : len(sleeptimes)]
 
-    # Reset time.sleep stub.
-    del sleeptimes[0:len(sleeptimes)]
+        status, done = download.next_chunk(num_retries=3)
 
-    status, done = download.next_chunk(num_retries=3)
+        # Check for exponential backoff using the rand function above.
+        self.assertEqual([20, 40, 80], sleeptimes)
 
-    # Check for exponential backoff using the rand function above.
-    self.assertEqual([20, 40, 80], sleeptimes)
+        self.assertEqual(self.fd.getvalue(), b"12345")
+        self.assertEqual(True, done)
+        self.assertEqual(5, download._progress)
+        self.assertEqual(5, download._total_size)
 
-    self.assertEqual(self.fd.getvalue(), b'12345')
-    self.assertEqual(True, done)
-    self.assertEqual(5, download._progress)
-    self.assertEqual(5, download._total_size)
+    def test_media_io_base_download_empty_file(self):
+        self.request.http = HttpMockSequence(
+            [({"status": "200", "content-range": "0-0/0"}, b"")]
+        )
 
-  def test_media_io_base_download_empty_file(self):
-    self.request.http = HttpMockSequence([
-      ({'status': '200',
-        'content-range': '0-0/0'}, b''),
-    ])
+        download = MediaIoBaseDownload(fd=self.fd, request=self.request, chunksize=3)
 
-    download = MediaIoBaseDownload(
-      fd=self.fd, request=self.request, chunksize=3)
+        self.assertEqual(self.fd, download._fd)
+        self.assertEqual(0, download._progress)
+        self.assertEqual(None, download._total_size)
+        self.assertEqual(False, download._done)
+        self.assertEqual(self.request.uri, download._uri)
 
-    self.assertEqual(self.fd, download._fd)
-    self.assertEqual(0, download._progress)
-    self.assertEqual(None, download._total_size)
-    self.assertEqual(False, download._done)
-    self.assertEqual(self.request.uri, download._uri)
+        status, done = download.next_chunk()
 
-    status, done = download.next_chunk()
+        self.assertEqual(True, done)
+        self.assertEqual(0, download._progress)
+        self.assertEqual(0, download._total_size)
+        self.assertEqual(0, status.progress())
 
-    self.assertEqual(True, done)
-    self.assertEqual(0, download._progress)
-    self.assertEqual(0, download._total_size)
-    self.assertEqual(0, status.progress())
+    def test_media_io_base_download_unknown_media_size(self):
+        self.request.http = HttpMockSequence([({"status": "200"}, b"123")])
 
-  def test_media_io_base_download_unknown_media_size(self):
-    self.request.http = HttpMockSequence([
-      ({'status': '200'}, b'123')
-    ])
+        download = MediaIoBaseDownload(fd=self.fd, request=self.request, chunksize=3)
 
-    download = MediaIoBaseDownload(
-      fd=self.fd, request=self.request, chunksize=3)
+        self.assertEqual(self.fd, download._fd)
+        self.assertEqual(0, download._progress)
+        self.assertEqual(None, download._total_size)
+        self.assertEqual(False, download._done)
+        self.assertEqual(self.request.uri, download._uri)
 
-    self.assertEqual(self.fd, download._fd)
-    self.assertEqual(0, download._progress)
-    self.assertEqual(None, download._total_size)
-    self.assertEqual(False, download._done)
-    self.assertEqual(self.request.uri, download._uri)
+        status, done = download.next_chunk()
 
-    status, done = download.next_chunk()
-
-    self.assertEqual(self.fd.getvalue(), b'123')
-    self.assertEqual(True, done)
-    self.assertEqual(3, download._progress)
-    self.assertEqual(None, download._total_size)
-    self.assertEqual(0, status.progress())
+        self.assertEqual(self.fd.getvalue(), b"123")
+        self.assertEqual(True, done)
+        self.assertEqual(3, download._progress)
+        self.assertEqual(None, download._total_size)
+        self.assertEqual(0, status.progress())
 
 
 EXPECTED = """POST /someapi/v1/collection/?foo=bar HTTP/1.1
@@ -829,746 +842,813 @@
  }
 ]"""
 
-class Callbacks(object):
-  def __init__(self):
-    self.responses = {}
-    self.exceptions = {}
 
-  def f(self, request_id, response, exception):
-    self.responses[request_id] = response
-    self.exceptions[request_id] = exception
+class Callbacks(object):
+    def __init__(self):
+        self.responses = {}
+        self.exceptions = {}
+
+    def f(self, request_id, response, exception):
+        self.responses[request_id] = response
+        self.exceptions[request_id] = exception
 
 
 class TestHttpRequest(unittest.TestCase):
-  def test_unicode(self):
-    http = HttpMock(datafile('zoo.json'), headers={'status': '200'})
-    model = JsonModel()
-    uri = u'https://www.googleapis.com/someapi/v1/collection/?foo=bar'
-    method = u'POST'
-    request = HttpRequest(
-        http,
-        model.response,
-        uri,
-        method=method,
-        body=u'{}',
-        headers={'content-type': 'application/json'})
-    request.execute()
-    self.assertEqual(uri, http.uri)
-    self.assertEqual(str, type(http.uri))
-    self.assertEqual(method, http.method)
-    self.assertEqual(str, type(http.method))
+    def test_unicode(self):
+        http = HttpMock(datafile("zoo.json"), headers={"status": "200"})
+        model = JsonModel()
+        uri = u"https://www.googleapis.com/someapi/v1/collection/?foo=bar"
+        method = u"POST"
+        request = HttpRequest(
+            http,
+            model.response,
+            uri,
+            method=method,
+            body=u"{}",
+            headers={"content-type": "application/json"},
+        )
+        request.execute()
+        self.assertEqual(uri, http.uri)
+        self.assertEqual(str, type(http.uri))
+        self.assertEqual(method, http.method)
+        self.assertEqual(str, type(http.method))
 
-  def test_empty_content_type(self):
-    """Test for #284"""
-    http = HttpMock(None, headers={'status': 200})
-    uri = u'https://www.googleapis.com/someapi/v1/upload/?foo=bar'
-    method = u'POST'
-    request = HttpRequest(
-        http,
-        _postproc_none,
-        uri,
-        method=method,
-        headers={'content-type': ''})
-    request.execute()
-    self.assertEqual('', http.headers.get('content-type'))
+    def test_empty_content_type(self):
+        """Test for #284"""
+        http = HttpMock(None, headers={"status": 200})
+        uri = u"https://www.googleapis.com/someapi/v1/upload/?foo=bar"
+        method = u"POST"
+        request = HttpRequest(
+            http, _postproc_none, uri, method=method, headers={"content-type": ""}
+        )
+        request.execute()
+        self.assertEqual("", http.headers.get("content-type"))
 
-  def test_no_retry_connection_errors(self):
-    model = JsonModel()
-    request = HttpRequest(
-        HttpMockWithNonRetriableErrors(1, {'status': '200'}, '{"foo": "bar"}'),
-        model.response,
-        u'https://www.example.com/json_api_endpoint')
-    request._sleep = lambda _x: 0  # do nothing
-    request._rand = lambda: 10
-    with self.assertRaises(socket.error):
-      response = request.execute(num_retries=3)
+    def test_no_retry_connection_errors(self):
+        model = JsonModel()
+        request = HttpRequest(
+            HttpMockWithNonRetriableErrors(1, {"status": "200"}, '{"foo": "bar"}'),
+            model.response,
+            u"https://www.example.com/json_api_endpoint",
+        )
+        request._sleep = lambda _x: 0  # do nothing
+        request._rand = lambda: 10
+        with self.assertRaises(socket.error):
+            response = request.execute(num_retries=3)
 
+    def test_retry_connection_errors_non_resumable(self):
+        model = JsonModel()
+        request = HttpRequest(
+            HttpMockWithErrors(4, {"status": "200"}, '{"foo": "bar"}'),
+            model.response,
+            u"https://www.example.com/json_api_endpoint",
+        )
+        request._sleep = lambda _x: 0  # do nothing
+        request._rand = lambda: 10
+        response = request.execute(num_retries=4)
+        self.assertEqual({u"foo": u"bar"}, response)
 
-  def test_retry_connection_errors_non_resumable(self):
-    model = JsonModel()
-    request = HttpRequest(
-        HttpMockWithErrors(4, {'status': '200'}, '{"foo": "bar"}'),
-        model.response,
-        u'https://www.example.com/json_api_endpoint')
-    request._sleep = lambda _x: 0  # do nothing
-    request._rand = lambda: 10
-    response = request.execute(num_retries=4)
-    self.assertEqual({u'foo': u'bar'}, response)
+    def test_retry_connection_errors_resumable(self):
+        with open(datafile("small.png"), "rb") as small_png_file:
+            small_png_fd = BytesIO(small_png_file.read())
+        upload = MediaIoBaseUpload(
+            fd=small_png_fd, mimetype="image/png", chunksize=500, resumable=True
+        )
+        model = JsonModel()
 
-  def test_retry_connection_errors_resumable(self):
-    with open(datafile('small.png'), 'rb') as small_png_file:
-      small_png_fd = BytesIO(small_png_file.read())
-    upload = MediaIoBaseUpload(fd=small_png_fd, mimetype='image/png',
-                               chunksize=500, resumable=True)
-    model = JsonModel()
+        request = HttpRequest(
+            HttpMockWithErrors(
+                4, {"status": "200", "location": "location"}, '{"foo": "bar"}'
+            ),
+            model.response,
+            u"https://www.example.com/file_upload",
+            method="POST",
+            resumable=upload,
+        )
+        request._sleep = lambda _x: 0  # do nothing
+        request._rand = lambda: 10
+        response = request.execute(num_retries=4)
+        self.assertEqual({u"foo": u"bar"}, response)
 
-    request = HttpRequest(
-        HttpMockWithErrors(
-            4, {'status': '200', 'location': 'location'}, '{"foo": "bar"}'),
-        model.response,
-        u'https://www.example.com/file_upload',
-        method='POST',
-        resumable=upload)
-    request._sleep = lambda _x: 0  # do nothing
-    request._rand = lambda: 10
-    response = request.execute(num_retries=4)
-    self.assertEqual({u'foo': u'bar'}, response)
+    def test_retry(self):
+        num_retries = 5
+        resp_seq = [({"status": "500"}, "")] * (num_retries - 3)
+        resp_seq.append(({"status": "403"}, RATE_LIMIT_EXCEEDED_RESPONSE))
+        resp_seq.append(({"status": "403"}, USER_RATE_LIMIT_EXCEEDED_RESPONSE))
+        resp_seq.append(({"status": "429"}, ""))
+        resp_seq.append(({"status": "200"}, "{}"))
 
-  def test_retry(self):
-    num_retries = 5
-    resp_seq = [({'status': '500'}, '')] * (num_retries - 3)
-    resp_seq.append(({'status': '403'}, RATE_LIMIT_EXCEEDED_RESPONSE))
-    resp_seq.append(({'status': '403'}, USER_RATE_LIMIT_EXCEEDED_RESPONSE))
-    resp_seq.append(({'status': '429'}, ''))
-    resp_seq.append(({'status': '200'}, '{}'))
+        http = HttpMockSequence(resp_seq)
+        model = JsonModel()
+        uri = u"https://www.googleapis.com/someapi/v1/collection/?foo=bar"
+        method = u"POST"
+        request = HttpRequest(
+            http,
+            model.response,
+            uri,
+            method=method,
+            body=u"{}",
+            headers={"content-type": "application/json"},
+        )
 
-    http = HttpMockSequence(resp_seq)
-    model = JsonModel()
-    uri = u'https://www.googleapis.com/someapi/v1/collection/?foo=bar'
-    method = u'POST'
-    request = HttpRequest(
-        http,
-        model.response,
-        uri,
-        method=method,
-        body=u'{}',
-        headers={'content-type': 'application/json'})
+        sleeptimes = []
+        request._sleep = lambda x: sleeptimes.append(x)
+        request._rand = lambda: 10
 
-    sleeptimes = []
-    request._sleep = lambda x: sleeptimes.append(x)
-    request._rand = lambda: 10
+        request.execute(num_retries=num_retries)
 
-    request.execute(num_retries=num_retries)
+        self.assertEqual(num_retries, len(sleeptimes))
+        for retry_num in range(num_retries):
+            self.assertEqual(10 * 2 ** (retry_num + 1), sleeptimes[retry_num])
 
-    self.assertEqual(num_retries, len(sleeptimes))
-    for retry_num in range(num_retries):
-      self.assertEqual(10 * 2**(retry_num + 1), sleeptimes[retry_num])
+    def test_no_retry_succeeds(self):
+        num_retries = 5
+        resp_seq = [({"status": "200"}, "{}")] * (num_retries)
 
-  def test_no_retry_succeeds(self):
-    num_retries = 5
-    resp_seq = [({'status': '200'}, '{}')] * (num_retries)
+        http = HttpMockSequence(resp_seq)
+        model = JsonModel()
+        uri = u"https://www.googleapis.com/someapi/v1/collection/?foo=bar"
+        method = u"POST"
+        request = HttpRequest(
+            http,
+            model.response,
+            uri,
+            method=method,
+            body=u"{}",
+            headers={"content-type": "application/json"},
+        )
 
-    http = HttpMockSequence(resp_seq)
-    model = JsonModel()
-    uri = u'https://www.googleapis.com/someapi/v1/collection/?foo=bar'
-    method = u'POST'
-    request = HttpRequest(
-        http,
-        model.response,
-        uri,
-        method=method,
-        body=u'{}',
-        headers={'content-type': 'application/json'})
+        sleeptimes = []
+        request._sleep = lambda x: sleeptimes.append(x)
+        request._rand = lambda: 10
 
-    sleeptimes = []
-    request._sleep = lambda x: sleeptimes.append(x)
-    request._rand = lambda: 10
+        request.execute(num_retries=num_retries)
 
-    request.execute(num_retries=num_retries)
+        self.assertEqual(0, len(sleeptimes))
 
-    self.assertEqual(0, len(sleeptimes))
+    def test_no_retry_fails_fast(self):
+        http = HttpMockSequence([({"status": "500"}, ""), ({"status": "200"}, "{}")])
+        model = JsonModel()
+        uri = u"https://www.googleapis.com/someapi/v1/collection/?foo=bar"
+        method = u"POST"
+        request = HttpRequest(
+            http,
+            model.response,
+            uri,
+            method=method,
+            body=u"{}",
+            headers={"content-type": "application/json"},
+        )
 
-  def test_no_retry_fails_fast(self):
-    http = HttpMockSequence([
-        ({'status': '500'}, ''),
-        ({'status': '200'}, '{}')
-        ])
-    model = JsonModel()
-    uri = u'https://www.googleapis.com/someapi/v1/collection/?foo=bar'
-    method = u'POST'
-    request = HttpRequest(
-        http,
-        model.response,
-        uri,
-        method=method,
-        body=u'{}',
-        headers={'content-type': 'application/json'})
+        request._rand = lambda: 1.0
+        request._sleep = mock.MagicMock()
 
-    request._rand = lambda: 1.0
-    request._sleep = mock.MagicMock()
+        with self.assertRaises(HttpError):
+            request.execute()
+        request._sleep.assert_not_called()
 
-    with self.assertRaises(HttpError):
-      request.execute()
-    request._sleep.assert_not_called()
+    def test_no_retry_403_not_configured_fails_fast(self):
+        http = HttpMockSequence(
+            [({"status": "403"}, NOT_CONFIGURED_RESPONSE), ({"status": "200"}, "{}")]
+        )
+        model = JsonModel()
+        uri = u"https://www.googleapis.com/someapi/v1/collection/?foo=bar"
+        method = u"POST"
+        request = HttpRequest(
+            http,
+            model.response,
+            uri,
+            method=method,
+            body=u"{}",
+            headers={"content-type": "application/json"},
+        )
 
-  def test_no_retry_403_not_configured_fails_fast(self):
-    http = HttpMockSequence([
-        ({'status': '403'}, NOT_CONFIGURED_RESPONSE),
-        ({'status': '200'}, '{}')
-        ])
-    model = JsonModel()
-    uri = u'https://www.googleapis.com/someapi/v1/collection/?foo=bar'
-    method = u'POST'
-    request = HttpRequest(
-        http,
-        model.response,
-        uri,
-        method=method,
-        body=u'{}',
-        headers={'content-type': 'application/json'})
+        request._rand = lambda: 1.0
+        request._sleep = mock.MagicMock()
 
-    request._rand = lambda: 1.0
-    request._sleep =  mock.MagicMock()
+        with self.assertRaises(HttpError):
+            request.execute()
+        request._sleep.assert_not_called()
 
-    with self.assertRaises(HttpError):
-      request.execute()
-    request._sleep.assert_not_called()
+    def test_no_retry_403_fails_fast(self):
+        http = HttpMockSequence([({"status": "403"}, ""), ({"status": "200"}, "{}")])
+        model = JsonModel()
+        uri = u"https://www.googleapis.com/someapi/v1/collection/?foo=bar"
+        method = u"POST"
+        request = HttpRequest(
+            http,
+            model.response,
+            uri,
+            method=method,
+            body=u"{}",
+            headers={"content-type": "application/json"},
+        )
 
-  def test_no_retry_403_fails_fast(self):
-    http = HttpMockSequence([
-        ({'status': '403'}, ''),
-        ({'status': '200'}, '{}')
-        ])
-    model = JsonModel()
-    uri = u'https://www.googleapis.com/someapi/v1/collection/?foo=bar'
-    method = u'POST'
-    request = HttpRequest(
-        http,
-        model.response,
-        uri,
-        method=method,
-        body=u'{}',
-        headers={'content-type': 'application/json'})
+        request._rand = lambda: 1.0
+        request._sleep = mock.MagicMock()
 
-    request._rand = lambda: 1.0
-    request._sleep =  mock.MagicMock()
+        with self.assertRaises(HttpError):
+            request.execute()
+        request._sleep.assert_not_called()
 
-    with self.assertRaises(HttpError):
-      request.execute()
-    request._sleep.assert_not_called()
+    def test_no_retry_401_fails_fast(self):
+        http = HttpMockSequence([({"status": "401"}, ""), ({"status": "200"}, "{}")])
+        model = JsonModel()
+        uri = u"https://www.googleapis.com/someapi/v1/collection/?foo=bar"
+        method = u"POST"
+        request = HttpRequest(
+            http,
+            model.response,
+            uri,
+            method=method,
+            body=u"{}",
+            headers={"content-type": "application/json"},
+        )
 
-  def test_no_retry_401_fails_fast(self):
-    http = HttpMockSequence([
-        ({'status': '401'}, ''),
-        ({'status': '200'}, '{}')
-        ])
-    model = JsonModel()
-    uri = u'https://www.googleapis.com/someapi/v1/collection/?foo=bar'
-    method = u'POST'
-    request = HttpRequest(
-        http,
-        model.response,
-        uri,
-        method=method,
-        body=u'{}',
-        headers={'content-type': 'application/json'})
+        request._rand = lambda: 1.0
+        request._sleep = mock.MagicMock()
 
-    request._rand = lambda: 1.0
-    request._sleep =  mock.MagicMock()
+        with self.assertRaises(HttpError):
+            request.execute()
+        request._sleep.assert_not_called()
 
-    with self.assertRaises(HttpError):
-      request.execute()
-    request._sleep.assert_not_called()
+    def test_no_retry_403_list_fails(self):
+        http = HttpMockSequence(
+            [
+                ({"status": "403"}, LIST_NOT_CONFIGURED_RESPONSE),
+                ({"status": "200"}, "{}"),
+            ]
+        )
+        model = JsonModel()
+        uri = u"https://www.googleapis.com/someapi/v1/collection/?foo=bar"
+        method = u"POST"
+        request = HttpRequest(
+            http,
+            model.response,
+            uri,
+            method=method,
+            body=u"{}",
+            headers={"content-type": "application/json"},
+        )
 
-  def test_no_retry_403_list_fails(self):
-    http = HttpMockSequence([
-        ({'status': '403'}, LIST_NOT_CONFIGURED_RESPONSE),
-        ({'status': '200'}, '{}')
-        ])
-    model = JsonModel()
-    uri = u'https://www.googleapis.com/someapi/v1/collection/?foo=bar'
-    method = u'POST'
-    request = HttpRequest(
-        http,
-        model.response,
-        uri,
-        method=method,
-        body=u'{}',
-        headers={'content-type': 'application/json'})
+        request._rand = lambda: 1.0
+        request._sleep = mock.MagicMock()
 
-    request._rand = lambda: 1.0
-    request._sleep =  mock.MagicMock()
+        with self.assertRaises(HttpError):
+            request.execute()
+        request._sleep.assert_not_called()
 
-    with self.assertRaises(HttpError):
-      request.execute()
-    request._sleep.assert_not_called()
 
 class TestBatch(unittest.TestCase):
+    def setUp(self):
+        model = JsonModel()
+        self.request1 = HttpRequest(
+            None,
+            model.response,
+            "https://www.googleapis.com/someapi/v1/collection/?foo=bar",
+            method="POST",
+            body="{}",
+            headers={"content-type": "application/json"},
+        )
 
-  def setUp(self):
-    model = JsonModel()
-    self.request1 = HttpRequest(
-        None,
-        model.response,
-        'https://www.googleapis.com/someapi/v1/collection/?foo=bar',
-        method='POST',
-        body='{}',
-        headers={'content-type': 'application/json'})
+        self.request2 = HttpRequest(
+            None,
+            model.response,
+            "https://www.googleapis.com/someapi/v1/collection/?foo=bar",
+            method="GET",
+            body="",
+            headers={"content-type": "application/json"},
+        )
 
-    self.request2 = HttpRequest(
-        None,
-        model.response,
-        'https://www.googleapis.com/someapi/v1/collection/?foo=bar',
-        method='GET',
-        body='',
-        headers={'content-type': 'application/json'})
+    def test_id_to_from_content_id_header(self):
+        batch = BatchHttpRequest()
+        self.assertEquals("12", batch._header_to_id(batch._id_to_header("12")))
 
+    def test_invalid_content_id_header(self):
+        batch = BatchHttpRequest()
+        self.assertRaises(BatchError, batch._header_to_id, "[foo+x]")
+        self.assertRaises(BatchError, batch._header_to_id, "foo+1")
+        self.assertRaises(BatchError, batch._header_to_id, "<foo>")
 
-  def test_id_to_from_content_id_header(self):
-    batch = BatchHttpRequest()
-    self.assertEquals('12', batch._header_to_id(batch._id_to_header('12')))
+    def test_serialize_request(self):
+        batch = BatchHttpRequest()
+        request = HttpRequest(
+            None,
+            None,
+            "https://www.googleapis.com/someapi/v1/collection/?foo=bar",
+            method="POST",
+            body=u"{}",
+            headers={"content-type": "application/json"},
+            methodId=None,
+            resumable=None,
+        )
+        s = batch._serialize_request(request).splitlines()
+        self.assertEqual(EXPECTED.splitlines(), s)
 
-  def test_invalid_content_id_header(self):
-    batch = BatchHttpRequest()
-    self.assertRaises(BatchError, batch._header_to_id, '[foo+x]')
-    self.assertRaises(BatchError, batch._header_to_id, 'foo+1')
-    self.assertRaises(BatchError, batch._header_to_id, '<foo>')
+    def test_serialize_request_media_body(self):
+        batch = BatchHttpRequest()
+        f = open(datafile("small.png"), "rb")
+        body = f.read()
+        f.close()
 
-  def test_serialize_request(self):
-    batch = BatchHttpRequest()
-    request = HttpRequest(
-        None,
-        None,
-        'https://www.googleapis.com/someapi/v1/collection/?foo=bar',
-        method='POST',
-        body=u'{}',
-        headers={'content-type': 'application/json'},
-        methodId=None,
-        resumable=None)
-    s = batch._serialize_request(request).splitlines()
-    self.assertEqual(EXPECTED.splitlines(), s)
+        request = HttpRequest(
+            None,
+            None,
+            "https://www.googleapis.com/someapi/v1/collection/?foo=bar",
+            method="POST",
+            body=body,
+            headers={"content-type": "application/json"},
+            methodId=None,
+            resumable=None,
+        )
+        # Just testing it shouldn't raise an exception.
+        s = batch._serialize_request(request).splitlines()
 
-  def test_serialize_request_media_body(self):
-    batch = BatchHttpRequest()
-    f = open(datafile('small.png'), 'rb')
-    body = f.read()
-    f.close()
+    def test_serialize_request_no_body(self):
+        batch = BatchHttpRequest()
+        request = HttpRequest(
+            None,
+            None,
+            "https://www.googleapis.com/someapi/v1/collection/?foo=bar",
+            method="POST",
+            body=b"",
+            headers={"content-type": "application/json"},
+            methodId=None,
+            resumable=None,
+        )
+        s = batch._serialize_request(request).splitlines()
+        self.assertEqual(NO_BODY_EXPECTED.splitlines(), s)
 
-    request = HttpRequest(
-        None,
-        None,
-        'https://www.googleapis.com/someapi/v1/collection/?foo=bar',
-        method='POST',
-        body=body,
-        headers={'content-type': 'application/json'},
-        methodId=None,
-        resumable=None)
-    # Just testing it shouldn't raise an exception.
-    s = batch._serialize_request(request).splitlines()
+    def test_serialize_get_request_no_body(self):
+        batch = BatchHttpRequest()
+        request = HttpRequest(
+            None,
+            None,
+            "https://www.googleapis.com/someapi/v1/collection/?foo=bar",
+            method="GET",
+            body=None,
+            headers={"content-type": "application/json"},
+            methodId=None,
+            resumable=None,
+        )
+        s = batch._serialize_request(request).splitlines()
+        self.assertEqual(NO_BODY_EXPECTED_GET.splitlines(), s)
 
-  def test_serialize_request_no_body(self):
-    batch = BatchHttpRequest()
-    request = HttpRequest(
-        None,
-        None,
-        'https://www.googleapis.com/someapi/v1/collection/?foo=bar',
-        method='POST',
-        body=b'',
-        headers={'content-type': 'application/json'},
-        methodId=None,
-        resumable=None)
-    s = batch._serialize_request(request).splitlines()
-    self.assertEqual(NO_BODY_EXPECTED.splitlines(), s)
+    def test_deserialize_response(self):
+        batch = BatchHttpRequest()
+        resp, content = batch._deserialize_response(RESPONSE)
 
-  def test_serialize_get_request_no_body(self):
-    batch = BatchHttpRequest()
-    request = HttpRequest(
-        None,
-        None,
-        'https://www.googleapis.com/someapi/v1/collection/?foo=bar',
-        method='GET',
-        body=None,
-        headers={'content-type': 'application/json'},
-        methodId=None,
-        resumable=None)
-    s = batch._serialize_request(request).splitlines()
-    self.assertEqual(NO_BODY_EXPECTED_GET.splitlines(), s)
+        self.assertEqual(200, resp.status)
+        self.assertEqual("OK", resp.reason)
+        self.assertEqual(11, resp.version)
+        self.assertEqual('{"answer": 42}', content)
 
-  def test_deserialize_response(self):
-    batch = BatchHttpRequest()
-    resp, content = batch._deserialize_response(RESPONSE)
+    def test_new_id(self):
+        batch = BatchHttpRequest()
 
-    self.assertEqual(200, resp.status)
-    self.assertEqual('OK', resp.reason)
-    self.assertEqual(11, resp.version)
-    self.assertEqual('{"answer": 42}', content)
+        id_ = batch._new_id()
+        self.assertEqual("1", id_)
 
-  def test_new_id(self):
-    batch = BatchHttpRequest()
+        id_ = batch._new_id()
+        self.assertEqual("2", id_)
 
-    id_ = batch._new_id()
-    self.assertEqual('1', id_)
+        batch.add(self.request1, request_id="3")
 
-    id_ = batch._new_id()
-    self.assertEqual('2', id_)
+        id_ = batch._new_id()
+        self.assertEqual("4", id_)
 
-    batch.add(self.request1, request_id='3')
+    def test_add(self):
+        batch = BatchHttpRequest()
+        batch.add(self.request1, request_id="1")
+        self.assertRaises(KeyError, batch.add, self.request1, request_id="1")
 
-    id_ = batch._new_id()
-    self.assertEqual('4', id_)
+    def test_add_fail_for_over_limit(self):
+        from googleapiclient.http import MAX_BATCH_LIMIT
 
-  def test_add(self):
-    batch = BatchHttpRequest()
-    batch.add(self.request1, request_id='1')
-    self.assertRaises(KeyError, batch.add, self.request1, request_id='1')
-
-  def test_add_fail_for_over_limit(self):
-    from googleapiclient.http import MAX_BATCH_LIMIT
-
-    batch = BatchHttpRequest()
-    for i in range(0, MAX_BATCH_LIMIT):
-      batch.add(HttpRequest(
-        None,
-        None,
-        'https://www.googleapis.com/someapi/v1/collection/?foo=bar',
-        method='POST',
-        body='{}',
-        headers={'content-type': 'application/json'})
-      )
-    self.assertRaises(BatchError, batch.add, self.request1)
-
-  def test_add_fail_for_resumable(self):
-    batch = BatchHttpRequest()
-
-    upload = MediaFileUpload(
-        datafile('small.png'), chunksize=500, resumable=True)
-    self.request1.resumable = upload
-    with self.assertRaises(BatchError) as batch_error:
-      batch.add(self.request1, request_id='1')
-    str(batch_error.exception)
-
-  def test_execute_empty_batch_no_http(self):
-    batch = BatchHttpRequest()
-    ret = batch.execute()
-    self.assertEqual(None, ret)
-
-  def test_execute(self):
-    batch = BatchHttpRequest()
-    callbacks = Callbacks()
-
-    batch.add(self.request1, callback=callbacks.f)
-    batch.add(self.request2, callback=callbacks.f)
-    http = HttpMockSequence([
-      ({'status': '200',
-        'content-type': 'multipart/mixed; boundary="batch_foobarbaz"'},
-       BATCH_RESPONSE),
-      ])
-    batch.execute(http=http)
-    self.assertEqual({'foo': 42}, callbacks.responses['1'])
-    self.assertEqual(None, callbacks.exceptions['1'])
-    self.assertEqual({'baz': 'qux'}, callbacks.responses['2'])
-    self.assertEqual(None, callbacks.exceptions['2'])
-
-  def test_execute_request_body(self):
-    batch = BatchHttpRequest()
-
-    batch.add(self.request1)
-    batch.add(self.request2)
-    http = HttpMockSequence([
-      ({'status': '200',
-        'content-type': 'multipart/mixed; boundary="batch_foobarbaz"'},
-        'echo_request_body'),
-      ])
-    try:
-      batch.execute(http=http)
-      self.fail('Should raise exception')
-    except BatchError as e:
-      boundary, _ = e.content.split(None, 1)
-      self.assertEqual('--', boundary[:2])
-      parts = e.content.split(boundary)
-      self.assertEqual(4, len(parts))
-      self.assertEqual('', parts[0])
-      self.assertEqual('--', parts[3].rstrip())
-      header = parts[1].splitlines()[1]
-      self.assertEqual('Content-Type: application/http', header)
-
-  def test_execute_request_body_with_custom_long_request_ids(self):
-    batch = BatchHttpRequest()
-
-    batch.add(self.request1, request_id='abc'*20)
-    batch.add(self.request2, request_id='def'*20)
-    http = HttpMockSequence([
-      ({'status': '200',
-        'content-type': 'multipart/mixed; boundary="batch_foobarbaz"'},
-        'echo_request_body'),
-      ])
-    try:
-      batch.execute(http=http)
-      self.fail('Should raise exception')
-    except BatchError as e:
-      boundary, _ = e.content.split(None, 1)
-      self.assertEqual('--', boundary[:2])
-      parts = e.content.split(boundary)
-      self.assertEqual(4, len(parts))
-      self.assertEqual('', parts[0])
-      self.assertEqual('--', parts[3].rstrip())
-      for partindex, request_id in ((1, 'abc'*20), (2, 'def'*20)):
-        lines = parts[partindex].splitlines()
-        for n, line in enumerate(lines):
-          if line.startswith('Content-ID:'):
-            # assert correct header folding
-            self.assertTrue(line.endswith('+'), line)
-            header_continuation = lines[n+1]
-            self.assertEqual(
-              header_continuation,
-              ' %s>' % request_id,
-              header_continuation
+        batch = BatchHttpRequest()
+        for i in range(0, MAX_BATCH_LIMIT):
+            batch.add(
+                HttpRequest(
+                    None,
+                    None,
+                    "https://www.googleapis.com/someapi/v1/collection/?foo=bar",
+                    method="POST",
+                    body="{}",
+                    headers={"content-type": "application/json"},
+                )
             )
+        self.assertRaises(BatchError, batch.add, self.request1)
 
-  def test_execute_initial_refresh_oauth2(self):
-    batch = BatchHttpRequest()
-    callbacks = Callbacks()
-    cred = MockCredentials('Foo', expired=True)
+    def test_add_fail_for_resumable(self):
+        batch = BatchHttpRequest()
 
-    http = HttpMockSequence([
-      ({'status': '200',
-        'content-type': 'multipart/mixed; boundary="batch_foobarbaz"'},
-       BATCH_SINGLE_RESPONSE),
-    ])
+        upload = MediaFileUpload(datafile("small.png"), chunksize=500, resumable=True)
+        self.request1.resumable = upload
+        with self.assertRaises(BatchError) as batch_error:
+            batch.add(self.request1, request_id="1")
+        str(batch_error.exception)
 
-    cred.authorize(http)
+    def test_execute_empty_batch_no_http(self):
+        batch = BatchHttpRequest()
+        ret = batch.execute()
+        self.assertEqual(None, ret)
 
-    batch.add(self.request1, callback=callbacks.f)
-    batch.execute(http=http)
+    def test_execute(self):
+        batch = BatchHttpRequest()
+        callbacks = Callbacks()
 
-    self.assertEqual({'foo': 42}, callbacks.responses['1'])
-    self.assertIsNone(callbacks.exceptions['1'])
+        batch.add(self.request1, callback=callbacks.f)
+        batch.add(self.request2, callback=callbacks.f)
+        http = HttpMockSequence(
+            [
+                (
+                    {
+                        "status": "200",
+                        "content-type": 'multipart/mixed; boundary="batch_foobarbaz"',
+                    },
+                    BATCH_RESPONSE,
+                )
+            ]
+        )
+        batch.execute(http=http)
+        self.assertEqual({"foo": 42}, callbacks.responses["1"])
+        self.assertEqual(None, callbacks.exceptions["1"])
+        self.assertEqual({"baz": "qux"}, callbacks.responses["2"])
+        self.assertEqual(None, callbacks.exceptions["2"])
 
-    self.assertEqual(1, cred._refreshed)
+    def test_execute_request_body(self):
+        batch = BatchHttpRequest()
 
-    self.assertEqual(1, cred._authorized)
+        batch.add(self.request1)
+        batch.add(self.request2)
+        http = HttpMockSequence(
+            [
+                (
+                    {
+                        "status": "200",
+                        "content-type": 'multipart/mixed; boundary="batch_foobarbaz"',
+                    },
+                    "echo_request_body",
+                )
+            ]
+        )
+        try:
+            batch.execute(http=http)
+            self.fail("Should raise exception")
+        except BatchError as e:
+            boundary, _ = e.content.split(None, 1)
+            self.assertEqual("--", boundary[:2])
+            parts = e.content.split(boundary)
+            self.assertEqual(4, len(parts))
+            self.assertEqual("", parts[0])
+            self.assertEqual("--", parts[3].rstrip())
+            header = parts[1].splitlines()[1]
+            self.assertEqual("Content-Type: application/http", header)
 
-    self.assertEqual(1, cred._applied)
+    def test_execute_request_body_with_custom_long_request_ids(self):
+        batch = BatchHttpRequest()
 
-  def test_execute_refresh_and_retry_on_401(self):
-    batch = BatchHttpRequest()
-    callbacks = Callbacks()
-    cred_1 = MockCredentials('Foo')
-    cred_2 = MockCredentials('Bar')
+        batch.add(self.request1, request_id="abc" * 20)
+        batch.add(self.request2, request_id="def" * 20)
+        http = HttpMockSequence(
+            [
+                (
+                    {
+                        "status": "200",
+                        "content-type": 'multipart/mixed; boundary="batch_foobarbaz"',
+                    },
+                    "echo_request_body",
+                )
+            ]
+        )
+        try:
+            batch.execute(http=http)
+            self.fail("Should raise exception")
+        except BatchError as e:
+            boundary, _ = e.content.split(None, 1)
+            self.assertEqual("--", boundary[:2])
+            parts = e.content.split(boundary)
+            self.assertEqual(4, len(parts))
+            self.assertEqual("", parts[0])
+            self.assertEqual("--", parts[3].rstrip())
+            for partindex, request_id in ((1, "abc" * 20), (2, "def" * 20)):
+                lines = parts[partindex].splitlines()
+                for n, line in enumerate(lines):
+                    if line.startswith("Content-ID:"):
+                        # assert correct header folding
+                        self.assertTrue(line.endswith("+"), line)
+                        header_continuation = lines[n + 1]
+                        self.assertEqual(
+                            header_continuation,
+                            " %s>" % request_id,
+                            header_continuation,
+                        )
 
-    http = HttpMockSequence([
-      ({'status': '200',
-        'content-type': 'multipart/mixed; boundary="batch_foobarbaz"'},
-       BATCH_RESPONSE_WITH_401),
-      ({'status': '200',
-        'content-type': 'multipart/mixed; boundary="batch_foobarbaz"'},
-       BATCH_SINGLE_RESPONSE),
-      ])
+    def test_execute_initial_refresh_oauth2(self):
+        batch = BatchHttpRequest()
+        callbacks = Callbacks()
+        cred = MockCredentials("Foo", expired=True)
 
-    creds_http_1 = HttpMockSequence([])
-    cred_1.authorize(creds_http_1)
+        http = HttpMockSequence(
+            [
+                (
+                    {
+                        "status": "200",
+                        "content-type": 'multipart/mixed; boundary="batch_foobarbaz"',
+                    },
+                    BATCH_SINGLE_RESPONSE,
+                )
+            ]
+        )
 
-    creds_http_2 = HttpMockSequence([])
-    cred_2.authorize(creds_http_2)
+        cred.authorize(http)
 
-    self.request1.http = creds_http_1
-    self.request2.http = creds_http_2
+        batch.add(self.request1, callback=callbacks.f)
+        batch.execute(http=http)
 
-    batch.add(self.request1, callback=callbacks.f)
-    batch.add(self.request2, callback=callbacks.f)
-    batch.execute(http=http)
+        self.assertEqual({"foo": 42}, callbacks.responses["1"])
+        self.assertIsNone(callbacks.exceptions["1"])
 
-    self.assertEqual({'foo': 42}, callbacks.responses['1'])
-    self.assertEqual(None, callbacks.exceptions['1'])
-    self.assertEqual({'baz': 'qux'}, callbacks.responses['2'])
-    self.assertEqual(None, callbacks.exceptions['2'])
+        self.assertEqual(1, cred._refreshed)
 
-    self.assertEqual(1, cred_1._refreshed)
-    self.assertEqual(0, cred_2._refreshed)
+        self.assertEqual(1, cred._authorized)
 
-    self.assertEqual(1, cred_1._authorized)
-    self.assertEqual(1, cred_2._authorized)
+        self.assertEqual(1, cred._applied)
 
-    self.assertEqual(1, cred_2._applied)
-    self.assertEqual(2, cred_1._applied)
+    def test_execute_refresh_and_retry_on_401(self):
+        batch = BatchHttpRequest()
+        callbacks = Callbacks()
+        cred_1 = MockCredentials("Foo")
+        cred_2 = MockCredentials("Bar")
 
-  def test_http_errors_passed_to_callback(self):
-    batch = BatchHttpRequest()
-    callbacks = Callbacks()
-    cred_1 = MockCredentials('Foo')
-    cred_2 = MockCredentials('Bar')
+        http = HttpMockSequence(
+            [
+                (
+                    {
+                        "status": "200",
+                        "content-type": 'multipart/mixed; boundary="batch_foobarbaz"',
+                    },
+                    BATCH_RESPONSE_WITH_401,
+                ),
+                (
+                    {
+                        "status": "200",
+                        "content-type": 'multipart/mixed; boundary="batch_foobarbaz"',
+                    },
+                    BATCH_SINGLE_RESPONSE,
+                ),
+            ]
+        )
 
-    http = HttpMockSequence([
-      ({'status': '200',
-        'content-type': 'multipart/mixed; boundary="batch_foobarbaz"'},
-       BATCH_RESPONSE_WITH_401),
-      ({'status': '200',
-        'content-type': 'multipart/mixed; boundary="batch_foobarbaz"'},
-       BATCH_RESPONSE_WITH_401),
-      ])
+        creds_http_1 = HttpMockSequence([])
+        cred_1.authorize(creds_http_1)
 
-    creds_http_1 = HttpMockSequence([])
-    cred_1.authorize(creds_http_1)
+        creds_http_2 = HttpMockSequence([])
+        cred_2.authorize(creds_http_2)
 
-    creds_http_2 = HttpMockSequence([])
-    cred_2.authorize(creds_http_2)
+        self.request1.http = creds_http_1
+        self.request2.http = creds_http_2
 
-    self.request1.http = creds_http_1
-    self.request2.http = creds_http_2
+        batch.add(self.request1, callback=callbacks.f)
+        batch.add(self.request2, callback=callbacks.f)
+        batch.execute(http=http)
 
-    batch.add(self.request1, callback=callbacks.f)
-    batch.add(self.request2, callback=callbacks.f)
-    batch.execute(http=http)
+        self.assertEqual({"foo": 42}, callbacks.responses["1"])
+        self.assertEqual(None, callbacks.exceptions["1"])
+        self.assertEqual({"baz": "qux"}, callbacks.responses["2"])
+        self.assertEqual(None, callbacks.exceptions["2"])
 
-    self.assertEqual(None, callbacks.responses['1'])
-    self.assertEqual(401, callbacks.exceptions['1'].resp.status)
-    self.assertEqual(
-        'Authorization Required', callbacks.exceptions['1'].resp.reason)
-    self.assertEqual({u'baz': u'qux'}, callbacks.responses['2'])
-    self.assertEqual(None, callbacks.exceptions['2'])
+        self.assertEqual(1, cred_1._refreshed)
+        self.assertEqual(0, cred_2._refreshed)
 
-  def test_execute_global_callback(self):
-    callbacks = Callbacks()
-    batch = BatchHttpRequest(callback=callbacks.f)
+        self.assertEqual(1, cred_1._authorized)
+        self.assertEqual(1, cred_2._authorized)
 
-    batch.add(self.request1)
-    batch.add(self.request2)
-    http = HttpMockSequence([
-      ({'status': '200',
-        'content-type': 'multipart/mixed; boundary="batch_foobarbaz"'},
-       BATCH_RESPONSE),
-      ])
-    batch.execute(http=http)
-    self.assertEqual({'foo': 42}, callbacks.responses['1'])
-    self.assertEqual({'baz': 'qux'}, callbacks.responses['2'])
+        self.assertEqual(1, cred_2._applied)
+        self.assertEqual(2, cred_1._applied)
 
-  def test_execute_batch_http_error(self):
-    callbacks = Callbacks()
-    batch = BatchHttpRequest(callback=callbacks.f)
+    def test_http_errors_passed_to_callback(self):
+        batch = BatchHttpRequest()
+        callbacks = Callbacks()
+        cred_1 = MockCredentials("Foo")
+        cred_2 = MockCredentials("Bar")
 
-    batch.add(self.request1)
-    batch.add(self.request2)
-    http = HttpMockSequence([
-      ({'status': '200',
-        'content-type': 'multipart/mixed; boundary="batch_foobarbaz"'},
-       BATCH_ERROR_RESPONSE),
-      ])
-    batch.execute(http=http)
-    self.assertEqual({'foo': 42}, callbacks.responses['1'])
-    expected = ('<HttpError 403 when requesting '
-        'https://www.googleapis.com/someapi/v1/collection/?foo=bar returned '
-        '"Access Not Configured">')
-    self.assertEqual(expected, str(callbacks.exceptions['2']))
+        http = HttpMockSequence(
+            [
+                (
+                    {
+                        "status": "200",
+                        "content-type": 'multipart/mixed; boundary="batch_foobarbaz"',
+                    },
+                    BATCH_RESPONSE_WITH_401,
+                ),
+                (
+                    {
+                        "status": "200",
+                        "content-type": 'multipart/mixed; boundary="batch_foobarbaz"',
+                    },
+                    BATCH_RESPONSE_WITH_401,
+                ),
+            ]
+        )
+
+        creds_http_1 = HttpMockSequence([])
+        cred_1.authorize(creds_http_1)
+
+        creds_http_2 = HttpMockSequence([])
+        cred_2.authorize(creds_http_2)
+
+        self.request1.http = creds_http_1
+        self.request2.http = creds_http_2
+
+        batch.add(self.request1, callback=callbacks.f)
+        batch.add(self.request2, callback=callbacks.f)
+        batch.execute(http=http)
+
+        self.assertEqual(None, callbacks.responses["1"])
+        self.assertEqual(401, callbacks.exceptions["1"].resp.status)
+        self.assertEqual(
+            "Authorization Required", callbacks.exceptions["1"].resp.reason
+        )
+        self.assertEqual({u"baz": u"qux"}, callbacks.responses["2"])
+        self.assertEqual(None, callbacks.exceptions["2"])
+
+    def test_execute_global_callback(self):
+        callbacks = Callbacks()
+        batch = BatchHttpRequest(callback=callbacks.f)
+
+        batch.add(self.request1)
+        batch.add(self.request2)
+        http = HttpMockSequence(
+            [
+                (
+                    {
+                        "status": "200",
+                        "content-type": 'multipart/mixed; boundary="batch_foobarbaz"',
+                    },
+                    BATCH_RESPONSE,
+                )
+            ]
+        )
+        batch.execute(http=http)
+        self.assertEqual({"foo": 42}, callbacks.responses["1"])
+        self.assertEqual({"baz": "qux"}, callbacks.responses["2"])
+
+    def test_execute_batch_http_error(self):
+        callbacks = Callbacks()
+        batch = BatchHttpRequest(callback=callbacks.f)
+
+        batch.add(self.request1)
+        batch.add(self.request2)
+        http = HttpMockSequence(
+            [
+                (
+                    {
+                        "status": "200",
+                        "content-type": 'multipart/mixed; boundary="batch_foobarbaz"',
+                    },
+                    BATCH_ERROR_RESPONSE,
+                )
+            ]
+        )
+        batch.execute(http=http)
+        self.assertEqual({"foo": 42}, callbacks.responses["1"])
+        expected = (
+            "<HttpError 403 when requesting "
+            "https://www.googleapis.com/someapi/v1/collection/?foo=bar returned "
+            '"Access Not Configured">'
+        )
+        self.assertEqual(expected, str(callbacks.exceptions["2"]))
 
 
 class TestRequestUriTooLong(unittest.TestCase):
+    def test_turn_get_into_post(self):
+        def _postproc(resp, content):
+            return content
 
-  def test_turn_get_into_post(self):
+        http = HttpMockSequence(
+            [
+                ({"status": "200"}, "echo_request_body"),
+                ({"status": "200"}, "echo_request_headers"),
+            ]
+        )
 
-    def _postproc(resp, content):
-      return content
+        # Send a long query parameter.
+        query = {"q": "a" * MAX_URI_LENGTH + "?&"}
+        req = HttpRequest(
+            http,
+            _postproc,
+            "http://example.com?" + urlencode(query),
+            method="GET",
+            body=None,
+            headers={},
+            methodId="foo",
+            resumable=None,
+        )
 
-    http = HttpMockSequence([
-      ({'status': '200'},
-        'echo_request_body'),
-      ({'status': '200'},
-        'echo_request_headers'),
-      ])
+        # Query parameters should be sent in the body.
+        response = req.execute()
+        self.assertEqual(b"q=" + b"a" * MAX_URI_LENGTH + b"%3F%26", response)
 
-    # Send a long query parameter.
-    query = {
-        'q': 'a' * MAX_URI_LENGTH + '?&'
-        }
-    req = HttpRequest(
-        http,
-        _postproc,
-        'http://example.com?' + urlencode(query),
-        method='GET',
-        body=None,
-        headers={},
-        methodId='foo',
-        resumable=None)
-
-    # Query parameters should be sent in the body.
-    response = req.execute()
-    self.assertEqual(b'q=' + b'a' * MAX_URI_LENGTH + b'%3F%26', response)
-
-    # Extra headers should be set.
-    response = req.execute()
-    self.assertEqual('GET', response['x-http-method-override'])
-    self.assertEqual(str(MAX_URI_LENGTH + 8), response['content-length'])
-    self.assertEqual(
-        'application/x-www-form-urlencoded', response['content-type'])
+        # Extra headers should be set.
+        response = req.execute()
+        self.assertEqual("GET", response["x-http-method-override"])
+        self.assertEqual(str(MAX_URI_LENGTH + 8), response["content-length"])
+        self.assertEqual("application/x-www-form-urlencoded", response["content-type"])
 
 
 class TestStreamSlice(unittest.TestCase):
-  """Test _StreamSlice."""
+    """Test _StreamSlice."""
 
-  def setUp(self):
-    self.stream = BytesIO(b'0123456789')
+    def setUp(self):
+        self.stream = BytesIO(b"0123456789")
 
-  def test_read(self):
-    s =  _StreamSlice(self.stream, 0, 4)
-    self.assertEqual(b'', s.read(0))
-    self.assertEqual(b'0', s.read(1))
-    self.assertEqual(b'123', s.read())
+    def test_read(self):
+        s = _StreamSlice(self.stream, 0, 4)
+        self.assertEqual(b"", s.read(0))
+        self.assertEqual(b"0", s.read(1))
+        self.assertEqual(b"123", s.read())
 
-  def test_read_too_much(self):
-    s =  _StreamSlice(self.stream, 1, 4)
-    self.assertEqual(b'1234', s.read(6))
+    def test_read_too_much(self):
+        s = _StreamSlice(self.stream, 1, 4)
+        self.assertEqual(b"1234", s.read(6))
 
-  def test_read_all(self):
-    s =  _StreamSlice(self.stream, 2, 1)
-    self.assertEqual(b'2', s.read(-1))
+    def test_read_all(self):
+        s = _StreamSlice(self.stream, 2, 1)
+        self.assertEqual(b"2", s.read(-1))
 
 
 class TestResponseCallback(unittest.TestCase):
-  """Test adding callbacks to responses."""
+    """Test adding callbacks to responses."""
 
-  def test_ensure_response_callback(self):
-    m = JsonModel()
-    request = HttpRequest(
-        None,
-        m.response,
-        'https://www.googleapis.com/someapi/v1/collection/?foo=bar',
-        method='POST',
-        body='{}',
-        headers={'content-type': 'application/json'})
-    h = HttpMockSequence([ ({'status': 200}, '{}')])
-    responses = []
-    def _on_response(resp, responses=responses):
-      responses.append(resp)
-    request.add_response_callback(_on_response)
-    request.execute(http=h)
-    self.assertEqual(1, len(responses))
+    def test_ensure_response_callback(self):
+        m = JsonModel()
+        request = HttpRequest(
+            None,
+            m.response,
+            "https://www.googleapis.com/someapi/v1/collection/?foo=bar",
+            method="POST",
+            body="{}",
+            headers={"content-type": "application/json"},
+        )
+        h = HttpMockSequence([({"status": 200}, "{}")])
+        responses = []
+
+        def _on_response(resp, responses=responses):
+            responses.append(resp)
+
+        request.add_response_callback(_on_response)
+        request.execute(http=h)
+        self.assertEqual(1, len(responses))
 
 
 class TestHttpMock(unittest.TestCase):
-  def test_default_response_headers(self):
-    http = HttpMock(datafile('zoo.json'))
-    resp, content = http.request("http://example.com")
-    self.assertEqual(resp.status, 200)
+    def test_default_response_headers(self):
+        http = HttpMock(datafile("zoo.json"))
+        resp, content = http.request("http://example.com")
+        self.assertEqual(resp.status, 200)
 
-  def test_error_response(self):
-    http = HttpMock(datafile('bad_request.json'), {'status': '400'})
-    model = JsonModel()
-    request = HttpRequest(
-        http,
-        model.response,
-        'https://www.googleapis.com/someapi/v1/collection/?foo=bar',
-        method='GET',
-        headers={})
-    self.assertRaises(HttpError, request.execute)
+    def test_error_response(self):
+        http = HttpMock(datafile("bad_request.json"), {"status": "400"})
+        model = JsonModel()
+        request = HttpRequest(
+            http,
+            model.response,
+            "https://www.googleapis.com/someapi/v1/collection/?foo=bar",
+            method="GET",
+            headers={},
+        )
+        self.assertRaises(HttpError, request.execute)
 
 
 class TestHttpBuild(unittest.TestCase):
-  original_socket_default_timeout = None
+    original_socket_default_timeout = None
 
-  @classmethod
-  def setUpClass(cls):
-    cls.original_socket_default_timeout = socket.getdefaulttimeout()
+    @classmethod
+    def setUpClass(cls):
+        cls.original_socket_default_timeout = socket.getdefaulttimeout()
 
-  @classmethod
-  def tearDownClass(cls):
-    socket.setdefaulttimeout(cls.original_socket_default_timeout)
+    @classmethod
+    def tearDownClass(cls):
+        socket.setdefaulttimeout(cls.original_socket_default_timeout)
 
-  def test_build_http_sets_default_timeout_if_none_specified(self):
-    socket.setdefaulttimeout(None)
-    http = build_http()
-    self.assertIsInstance(http.timeout, int)
-    self.assertGreater(http.timeout, 0)
+    def test_build_http_sets_default_timeout_if_none_specified(self):
+        socket.setdefaulttimeout(None)
+        http = build_http()
+        self.assertIsInstance(http.timeout, int)
+        self.assertGreater(http.timeout, 0)
 
-  def test_build_http_default_timeout_can_be_overridden(self):
-    socket.setdefaulttimeout(1.5)
-    http = build_http()
-    self.assertAlmostEqual(http.timeout, 1.5, delta=0.001)
+    def test_build_http_default_timeout_can_be_overridden(self):
+        socket.setdefaulttimeout(1.5)
+        http = build_http()
+        self.assertAlmostEqual(http.timeout, 1.5, delta=0.001)
 
-  def test_build_http_default_timeout_can_be_set_to_zero(self):
-    socket.setdefaulttimeout(0)
-    http = build_http()
-    self.assertEquals(http.timeout, 0)
+    def test_build_http_default_timeout_can_be_set_to_zero(self):
+        socket.setdefaulttimeout(0)
+        http = build_http()
+        self.assertEquals(http.timeout, 0)
 
 
-if __name__ == '__main__':
-  logging.getLogger().setLevel(logging.ERROR)
-  unittest.main()
+if __name__ == "__main__":
+    logging.getLogger().setLevel(logging.ERROR)
+    unittest.main()
diff --git a/tests/test_json_model.py b/tests/test_json_model.py
index 006eb47..0064f3f 100644
--- a/tests/test_json_model.py
+++ b/tests/test_json_model.py
@@ -21,7 +21,7 @@
 from __future__ import absolute_import
 import six
 
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+__author__ = "jcgregorio@google.com (Joe Gregorio)"
 
 import copy
 import json
@@ -39,256 +39,278 @@
 
 
 class Model(unittest.TestCase):
-  def test_json_no_body(self):
-    model = JsonModel(data_wrapper=False)
+    def test_json_no_body(self):
+        model = JsonModel(data_wrapper=False)
 
-    headers = {}
-    path_params = {}
-    query_params = {}
-    body = None
+        headers = {}
+        path_params = {}
+        query_params = {}
+        body = None
 
-    headers, unused_params, query, body = model.request(
-        headers, path_params, query_params, body)
+        headers, unused_params, query, body = model.request(
+            headers, path_params, query_params, body
+        )
 
-    self.assertEqual(headers['accept'], 'application/json')
-    self.assertTrue('content-type' not in headers)
-    self.assertNotEqual(query, '')
-    self.assertEqual(body, None)
+        self.assertEqual(headers["accept"], "application/json")
+        self.assertTrue("content-type" not in headers)
+        self.assertNotEqual(query, "")
+        self.assertEqual(body, None)
 
-  def test_json_body(self):
-    model = JsonModel(data_wrapper=False)
+    def test_json_body(self):
+        model = JsonModel(data_wrapper=False)
 
-    headers = {}
-    path_params = {}
-    query_params = {}
-    body = {}
+        headers = {}
+        path_params = {}
+        query_params = {}
+        body = {}
 
-    headers, unused_params, query, body = model.request(
-        headers, path_params, query_params, body)
+        headers, unused_params, query, body = model.request(
+            headers, path_params, query_params, body
+        )
 
-    self.assertEqual(headers['accept'], 'application/json')
-    self.assertEqual(headers['content-type'], 'application/json')
-    self.assertNotEqual(query, '')
-    self.assertEqual(body, '{}')
+        self.assertEqual(headers["accept"], "application/json")
+        self.assertEqual(headers["content-type"], "application/json")
+        self.assertNotEqual(query, "")
+        self.assertEqual(body, "{}")
 
-  def test_json_body_data_wrapper(self):
-    model = JsonModel(data_wrapper=True)
+    def test_json_body_data_wrapper(self):
+        model = JsonModel(data_wrapper=True)
 
-    headers = {}
-    path_params = {}
-    query_params = {}
-    body = {}
+        headers = {}
+        path_params = {}
+        query_params = {}
+        body = {}
 
-    headers, unused_params, query, body = model.request(
-        headers, path_params, query_params, body)
+        headers, unused_params, query, body = model.request(
+            headers, path_params, query_params, body
+        )
 
-    self.assertEqual(headers['accept'], 'application/json')
-    self.assertEqual(headers['content-type'], 'application/json')
-    self.assertNotEqual(query, '')
-    self.assertEqual(body, '{"data": {}}')
+        self.assertEqual(headers["accept"], "application/json")
+        self.assertEqual(headers["content-type"], "application/json")
+        self.assertNotEqual(query, "")
+        self.assertEqual(body, '{"data": {}}')
 
-  def test_json_body_default_data(self):
-    """Test that a 'data' wrapper doesn't get added if one is already present."""
-    model = JsonModel(data_wrapper=True)
+    def test_json_body_default_data(self):
+        """Test that a 'data' wrapper doesn't get added if one is already present."""
+        model = JsonModel(data_wrapper=True)
 
-    headers = {}
-    path_params = {}
-    query_params = {}
-    body = {'data': 'foo'}
+        headers = {}
+        path_params = {}
+        query_params = {}
+        body = {"data": "foo"}
 
-    headers, unused_params, query, body = model.request(
-        headers, path_params, query_params, body)
+        headers, unused_params, query, body = model.request(
+            headers, path_params, query_params, body
+        )
 
-    self.assertEqual(headers['accept'], 'application/json')
-    self.assertEqual(headers['content-type'], 'application/json')
-    self.assertNotEqual(query, '')
-    self.assertEqual(body, '{"data": "foo"}')
+        self.assertEqual(headers["accept"], "application/json")
+        self.assertEqual(headers["content-type"], "application/json")
+        self.assertNotEqual(query, "")
+        self.assertEqual(body, '{"data": "foo"}')
 
-  def test_json_build_query(self):
-    model = JsonModel(data_wrapper=False)
+    def test_json_build_query(self):
+        model = JsonModel(data_wrapper=False)
 
-    headers = {}
-    path_params = {}
-    query_params = {'foo': 1, 'bar': u'\N{COMET}',
-        'baz': ['fe', 'fi', 'fo', 'fum'], # Repeated parameters
-        'qux': []}
-    body = {}
-
-    headers, unused_params, query, body = model.request(
-        headers, path_params, query_params, body)
-
-    self.assertEqual(headers['accept'], 'application/json')
-    self.assertEqual(headers['content-type'], 'application/json')
-
-    query_dict = parse_qs(query[1:])
-    self.assertEqual(query_dict['foo'], ['1'])
-    if six.PY3:
-      # Python 3, no need to encode
-      self.assertEqual(query_dict['bar'], [u'\N{COMET}'])
-    else:
-      # Python 2, encode string
-      self.assertEqual(query_dict['bar'], [u'\N{COMET}'.encode('utf-8')])
-    self.assertEqual(query_dict['baz'], ['fe', 'fi', 'fo', 'fum'])
-    self.assertTrue('qux' not in query_dict)
-    self.assertEqual(body, '{}')
-
-  def test_user_agent(self):
-    model = JsonModel(data_wrapper=False)
-
-    headers = {'user-agent': 'my-test-app/1.23.4'}
-    path_params = {}
-    query_params = {}
-    body = {}
-
-    headers, unused_params, unused_query, body = model.request(
-        headers, path_params, query_params, body)
-
-    self.assertEqual(headers['user-agent'],
-        'my-test-app/1.23.4 (gzip)')
-
-  def test_x_goog_api_client(self):
-    model = JsonModel(data_wrapper=False)
-
-    # test header composition for cloud clients that wrap discovery
-    headers = {'x-goog-api-client': 'gccl/1.23.4'}
-    path_params = {}
-    query_params = {}
-    body = {}
-
-    headers, unused_params, unused_query, body = model.request(
-        headers, path_params, query_params, body)
-
-    self.assertEqual(headers['x-goog-api-client'],
-        'gccl/1.23.4' + ' gdcl/' + __version__ + ' gl-python/' + platform.python_version())
-
-  def test_bad_response(self):
-    model = JsonModel(data_wrapper=False)
-    resp = httplib2.Response({'status': '401'})
-    resp.reason = 'Unauthorized'
-    content = b'{"error": {"message": "not authorized"}}'
-
-    try:
-      content = model.response(resp, content)
-      self.fail('Should have thrown an exception')
-    except HttpError as e:
-      self.assertTrue('not authorized' in str(e))
-
-    resp['content-type'] = 'application/json'
-
-    try:
-      content = model.response(resp, content)
-      self.fail('Should have thrown an exception')
-    except HttpError as e:
-      self.assertTrue('not authorized' in str(e))
-
-  def test_good_response(self):
-    model = JsonModel(data_wrapper=True)
-    resp = httplib2.Response({'status': '200'})
-    resp.reason = 'OK'
-    content = '{"data": "is good"}'
-
-    content = model.response(resp, content)
-    self.assertEqual(content, 'is good')
-
-  def test_good_response_wo_data(self):
-    model = JsonModel(data_wrapper=False)
-    resp = httplib2.Response({'status': '200'})
-    resp.reason = 'OK'
-    content = '{"foo": "is good"}'
-
-    content = model.response(resp, content)
-    self.assertEqual(content, {'foo': 'is good'})
-
-  def test_good_response_wo_data_str(self):
-    model = JsonModel(data_wrapper=False)
-    resp = httplib2.Response({'status': '200'})
-    resp.reason = 'OK'
-    content = '"data goes here"'
-
-    content = model.response(resp, content)
-    self.assertEqual(content, 'data goes here')
-
-  def test_no_content_response(self):
-    model = JsonModel(data_wrapper=False)
-    resp = httplib2.Response({'status': '204'})
-    resp.reason = 'No Content'
-    content = ''
-
-    content = model.response(resp, content)
-    self.assertEqual(content, {})
-
-  def test_logging(self):
-    class MockLogging(object):
-      def __init__(self):
-        self.info_record = []
-        self.debug_record = []
-      def info(self, message, *args):
-        self.info_record.append(message % args)
-
-      def debug(self, message, *args):
-        self.debug_record.append(message % args)
-
-    class MockResponse(dict):
-      def __init__(self, items):
-        super(MockResponse, self).__init__()
-        self.status = items['status']
-        for key, value in six.iteritems(items):
-          self[key] = value
-    old_logging = googleapiclient.model.LOGGER
-    googleapiclient.model.LOGGER = MockLogging()
-    googleapiclient.model.dump_request_response = True
-    model = JsonModel()
-    request_body = {
-        'field1': 'value1',
-        'field2': 'value2'
+        headers = {}
+        path_params = {}
+        query_params = {
+            "foo": 1,
+            "bar": u"\N{COMET}",
+            "baz": ["fe", "fi", "fo", "fum"],  # Repeated parameters
+            "qux": [],
         }
-    body_string = model.request({}, {}, {}, request_body)[-1]
-    json_body = json.loads(body_string)
-    self.assertEqual(request_body, json_body)
+        body = {}
 
-    response = {'status': 200,
-                'response_field_1': 'response_value_1',
-                'response_field_2': 'response_value_2'}
-    response_body = model.response(MockResponse(response), body_string)
-    self.assertEqual(request_body, response_body)
-    self.assertEqual(googleapiclient.model.LOGGER.info_record[:2],
-                     ['--request-start--',
-                      '-headers-start-'])
-    self.assertTrue('response_field_1: response_value_1' in
-                    googleapiclient.model.LOGGER.info_record)
-    self.assertTrue('response_field_2: response_value_2' in
-                    googleapiclient.model.LOGGER.info_record)
-    self.assertEqual(json.loads(googleapiclient.model.LOGGER.info_record[-2]),
-                     request_body)
-    self.assertEqual(googleapiclient.model.LOGGER.info_record[-1],
-                     '--response-end--')
-    googleapiclient.model.LOGGER = old_logging
+        headers, unused_params, query, body = model.request(
+            headers, path_params, query_params, body
+        )
 
-  def test_no_data_wrapper_deserialize(self):
-    model = JsonModel(data_wrapper=False)
-    resp = httplib2.Response({'status': '200'})
-    resp.reason = 'OK'
-    content = '{"data": "is good"}'
-    content = model.response(resp, content)
-    self.assertEqual(content, {'data': 'is good'})
+        self.assertEqual(headers["accept"], "application/json")
+        self.assertEqual(headers["content-type"], "application/json")
 
-  def test_data_wrapper_deserialize(self):
-    model = JsonModel(data_wrapper=True)
-    resp = httplib2.Response({'status': '200'})
-    resp.reason = 'OK'
-    content = '{"data": "is good"}'
-    content = model.response(resp, content)
-    self.assertEqual(content, 'is good')
+        query_dict = parse_qs(query[1:])
+        self.assertEqual(query_dict["foo"], ["1"])
+        if six.PY3:
+            # Python 3, no need to encode
+            self.assertEqual(query_dict["bar"], [u"\N{COMET}"])
+        else:
+            # Python 2, encode string
+            self.assertEqual(query_dict["bar"], [u"\N{COMET}".encode("utf-8")])
+        self.assertEqual(query_dict["baz"], ["fe", "fi", "fo", "fum"])
+        self.assertTrue("qux" not in query_dict)
+        self.assertEqual(body, "{}")
 
-  def test_data_wrapper_deserialize_nodata(self):
-    model = JsonModel(data_wrapper=True)
-    resp = httplib2.Response({'status': '200'})
-    resp.reason = 'OK'
-    content = '{"atad": "is good"}'
-    content = model.response(resp, content)
-    self.assertEqual(content, {'atad': 'is good'})
+    def test_user_agent(self):
+        model = JsonModel(data_wrapper=False)
+
+        headers = {"user-agent": "my-test-app/1.23.4"}
+        path_params = {}
+        query_params = {}
+        body = {}
+
+        headers, unused_params, unused_query, body = model.request(
+            headers, path_params, query_params, body
+        )
+
+        self.assertEqual(headers["user-agent"], "my-test-app/1.23.4 (gzip)")
+
+    def test_x_goog_api_client(self):
+        model = JsonModel(data_wrapper=False)
+
+        # test header composition for cloud clients that wrap discovery
+        headers = {"x-goog-api-client": "gccl/1.23.4"}
+        path_params = {}
+        query_params = {}
+        body = {}
+
+        headers, unused_params, unused_query, body = model.request(
+            headers, path_params, query_params, body
+        )
+
+        self.assertEqual(
+            headers["x-goog-api-client"],
+            "gccl/1.23.4"
+            + " gdcl/"
+            + __version__
+            + " gl-python/"
+            + platform.python_version(),
+        )
+
+    def test_bad_response(self):
+        model = JsonModel(data_wrapper=False)
+        resp = httplib2.Response({"status": "401"})
+        resp.reason = "Unauthorized"
+        content = b'{"error": {"message": "not authorized"}}'
+
+        try:
+            content = model.response(resp, content)
+            self.fail("Should have thrown an exception")
+        except HttpError as e:
+            self.assertTrue("not authorized" in str(e))
+
+        resp["content-type"] = "application/json"
+
+        try:
+            content = model.response(resp, content)
+            self.fail("Should have thrown an exception")
+        except HttpError as e:
+            self.assertTrue("not authorized" in str(e))
+
+    def test_good_response(self):
+        model = JsonModel(data_wrapper=True)
+        resp = httplib2.Response({"status": "200"})
+        resp.reason = "OK"
+        content = '{"data": "is good"}'
+
+        content = model.response(resp, content)
+        self.assertEqual(content, "is good")
+
+    def test_good_response_wo_data(self):
+        model = JsonModel(data_wrapper=False)
+        resp = httplib2.Response({"status": "200"})
+        resp.reason = "OK"
+        content = '{"foo": "is good"}'
+
+        content = model.response(resp, content)
+        self.assertEqual(content, {"foo": "is good"})
+
+    def test_good_response_wo_data_str(self):
+        model = JsonModel(data_wrapper=False)
+        resp = httplib2.Response({"status": "200"})
+        resp.reason = "OK"
+        content = '"data goes here"'
+
+        content = model.response(resp, content)
+        self.assertEqual(content, "data goes here")
+
+    def test_no_content_response(self):
+        model = JsonModel(data_wrapper=False)
+        resp = httplib2.Response({"status": "204"})
+        resp.reason = "No Content"
+        content = ""
+
+        content = model.response(resp, content)
+        self.assertEqual(content, {})
+
+    def test_logging(self):
+        class MockLogging(object):
+            def __init__(self):
+                self.info_record = []
+                self.debug_record = []
+
+            def info(self, message, *args):
+                self.info_record.append(message % args)
+
+            def debug(self, message, *args):
+                self.debug_record.append(message % args)
+
+        class MockResponse(dict):
+            def __init__(self, items):
+                super(MockResponse, self).__init__()
+                self.status = items["status"]
+                for key, value in six.iteritems(items):
+                    self[key] = value
+
+        old_logging = googleapiclient.model.LOGGER
+        googleapiclient.model.LOGGER = MockLogging()
+        googleapiclient.model.dump_request_response = True
+        model = JsonModel()
+        request_body = {"field1": "value1", "field2": "value2"}
+        body_string = model.request({}, {}, {}, request_body)[-1]
+        json_body = json.loads(body_string)
+        self.assertEqual(request_body, json_body)
+
+        response = {
+            "status": 200,
+            "response_field_1": "response_value_1",
+            "response_field_2": "response_value_2",
+        }
+        response_body = model.response(MockResponse(response), body_string)
+        self.assertEqual(request_body, response_body)
+        self.assertEqual(
+            googleapiclient.model.LOGGER.info_record[:2],
+            ["--request-start--", "-headers-start-"],
+        )
+        self.assertTrue(
+            "response_field_1: response_value_1"
+            in googleapiclient.model.LOGGER.info_record
+        )
+        self.assertTrue(
+            "response_field_2: response_value_2"
+            in googleapiclient.model.LOGGER.info_record
+        )
+        self.assertEqual(
+            json.loads(googleapiclient.model.LOGGER.info_record[-2]), request_body
+        )
+        self.assertEqual(
+            googleapiclient.model.LOGGER.info_record[-1], "--response-end--"
+        )
+        googleapiclient.model.LOGGER = old_logging
+
+    def test_no_data_wrapper_deserialize(self):
+        model = JsonModel(data_wrapper=False)
+        resp = httplib2.Response({"status": "200"})
+        resp.reason = "OK"
+        content = '{"data": "is good"}'
+        content = model.response(resp, content)
+        self.assertEqual(content, {"data": "is good"})
+
+    def test_data_wrapper_deserialize(self):
+        model = JsonModel(data_wrapper=True)
+        resp = httplib2.Response({"status": "200"})
+        resp.reason = "OK"
+        content = '{"data": "is good"}'
+        content = model.response(resp, content)
+        self.assertEqual(content, "is good")
+
+    def test_data_wrapper_deserialize_nodata(self):
+        model = JsonModel(data_wrapper=True)
+        resp = httplib2.Response({"status": "200"})
+        resp.reason = "OK"
+        content = '{"atad": "is good"}'
+        content = model.response(resp, content)
+        self.assertEqual(content, {"atad": "is good"})
 
 
-
-if __name__ == '__main__':
-  unittest.main()
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/test_mocks.py b/tests/test_mocks.py
index a456b9e..f020f6b 100644
--- a/tests/test_mocks.py
+++ b/tests/test_mocks.py
@@ -20,7 +20,7 @@
 """
 from __future__ import absolute_import
 
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+__author__ = "jcgregorio@google.com (Joe Gregorio)"
 
 import httplib2
 import os
@@ -34,118 +34,133 @@
 from googleapiclient.http import HttpMock
 
 
-DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
+DATA_DIR = os.path.join(os.path.dirname(__file__), "data")
+
 
 def datafile(filename):
-  return os.path.join(DATA_DIR, filename)
+    return os.path.join(DATA_DIR, filename)
 
 
 class Mocks(unittest.TestCase):
-  def setUp(self):
-    self.http = HttpMock(datafile('plus.json'), {'status': '200'})
-    self.zoo_http = HttpMock(datafile('zoo.json'), {'status': '200'})
+    def setUp(self):
+        self.http = HttpMock(datafile("plus.json"), {"status": "200"})
+        self.zoo_http = HttpMock(datafile("zoo.json"), {"status": "200"})
 
-  def test_default_response(self):
-    requestBuilder = RequestMockBuilder({})
-    plus = build('plus', 'v1', http=self.http, requestBuilder=requestBuilder)
-    activity = plus.activities().get(activityId='tag:blah').execute()
-    self.assertEqual({}, activity)
+    def test_default_response(self):
+        requestBuilder = RequestMockBuilder({})
+        plus = build("plus", "v1", http=self.http, requestBuilder=requestBuilder)
+        activity = plus.activities().get(activityId="tag:blah").execute()
+        self.assertEqual({}, activity)
 
-  def test_simple_response(self):
-    requestBuilder = RequestMockBuilder({
-        'plus.activities.get': (None, '{"foo": "bar"}')
-        })
-    plus = build('plus', 'v1', http=self.http, requestBuilder=requestBuilder)
+    def test_simple_response(self):
+        requestBuilder = RequestMockBuilder(
+            {"plus.activities.get": (None, '{"foo": "bar"}')}
+        )
+        plus = build("plus", "v1", http=self.http, requestBuilder=requestBuilder)
 
-    activity = plus.activities().get(activityId='tag:blah').execute()
-    self.assertEqual({"foo": "bar"}, activity)
+        activity = plus.activities().get(activityId="tag:blah").execute()
+        self.assertEqual({"foo": "bar"}, activity)
 
-  def test_unexpected_call(self):
-    requestBuilder = RequestMockBuilder({}, check_unexpected=True)
+    def test_unexpected_call(self):
+        requestBuilder = RequestMockBuilder({}, check_unexpected=True)
 
-    plus = build('plus', 'v1', http=self.http, requestBuilder=requestBuilder)
+        plus = build("plus", "v1", http=self.http, requestBuilder=requestBuilder)
 
-    try:
-      plus.activities().get(activityId='tag:blah').execute()
-      self.fail('UnexpectedMethodError should have been raised')
-    except UnexpectedMethodError:
-      pass
+        try:
+            plus.activities().get(activityId="tag:blah").execute()
+            self.fail("UnexpectedMethodError should have been raised")
+        except UnexpectedMethodError:
+            pass
 
-  def test_simple_unexpected_body(self):
-    requestBuilder = RequestMockBuilder({
-        'zoo.animals.insert': (None, '{"data": {"foo": "bar"}}', None)
-        })
-    zoo = build('zoo', 'v1', http=self.zoo_http, requestBuilder=requestBuilder)
+    def test_simple_unexpected_body(self):
+        requestBuilder = RequestMockBuilder(
+            {"zoo.animals.insert": (None, '{"data": {"foo": "bar"}}', None)}
+        )
+        zoo = build("zoo", "v1", http=self.zoo_http, requestBuilder=requestBuilder)
 
-    try:
-      zoo.animals().insert(body='{}').execute()
-      self.fail('UnexpectedBodyError should have been raised')
-    except UnexpectedBodyError:
-      pass
+        try:
+            zoo.animals().insert(body="{}").execute()
+            self.fail("UnexpectedBodyError should have been raised")
+        except UnexpectedBodyError:
+            pass
 
-  def test_simple_expected_body(self):
-    requestBuilder = RequestMockBuilder({
-        'zoo.animals.insert': (None, '{"data": {"foo": "bar"}}', '{}')
-        })
-    zoo = build('zoo', 'v1', http=self.zoo_http, requestBuilder=requestBuilder)
+    def test_simple_expected_body(self):
+        requestBuilder = RequestMockBuilder(
+            {"zoo.animals.insert": (None, '{"data": {"foo": "bar"}}', "{}")}
+        )
+        zoo = build("zoo", "v1", http=self.zoo_http, requestBuilder=requestBuilder)
 
-    try:
-      zoo.animals().insert(body='').execute()
-      self.fail('UnexpectedBodyError should have been raised')
-    except UnexpectedBodyError:
-      pass
+        try:
+            zoo.animals().insert(body="").execute()
+            self.fail("UnexpectedBodyError should have been raised")
+        except UnexpectedBodyError:
+            pass
 
-  def test_simple_wrong_body(self):
-    requestBuilder = RequestMockBuilder({
-        'zoo.animals.insert': (None, '{"data": {"foo": "bar"}}',
-                                    '{"data": {"foo": "bar"}}')
-        })
-    zoo = build('zoo', 'v1', http=self.zoo_http, requestBuilder=requestBuilder)
+    def test_simple_wrong_body(self):
+        requestBuilder = RequestMockBuilder(
+            {
+                "zoo.animals.insert": (
+                    None,
+                    '{"data": {"foo": "bar"}}',
+                    '{"data": {"foo": "bar"}}',
+                )
+            }
+        )
+        zoo = build("zoo", "v1", http=self.zoo_http, requestBuilder=requestBuilder)
 
-    try:
-      zoo.animals().insert(
-          body='{"data": {"foo": "blah"}}').execute()
-      self.fail('UnexpectedBodyError should have been raised')
-    except UnexpectedBodyError:
-      pass
+        try:
+            zoo.animals().insert(body='{"data": {"foo": "blah"}}').execute()
+            self.fail("UnexpectedBodyError should have been raised")
+        except UnexpectedBodyError:
+            pass
 
-  def test_simple_matching_str_body(self):
-    requestBuilder = RequestMockBuilder({
-        'zoo.animals.insert': (None, '{"data": {"foo": "bar"}}',
-                                    '{"data": {"foo": "bar"}}')
-        })
-    zoo = build('zoo', 'v1', http=self.zoo_http, requestBuilder=requestBuilder)
+    def test_simple_matching_str_body(self):
+        requestBuilder = RequestMockBuilder(
+            {
+                "zoo.animals.insert": (
+                    None,
+                    '{"data": {"foo": "bar"}}',
+                    '{"data": {"foo": "bar"}}',
+                )
+            }
+        )
+        zoo = build("zoo", "v1", http=self.zoo_http, requestBuilder=requestBuilder)
 
-    activity = zoo.animals().insert(
-        body={'data': {'foo': 'bar'}}).execute()
-    self.assertEqual({'foo': 'bar'}, activity)
+        activity = zoo.animals().insert(body={"data": {"foo": "bar"}}).execute()
+        self.assertEqual({"foo": "bar"}, activity)
 
-  def test_simple_matching_dict_body(self):
-    requestBuilder = RequestMockBuilder({
-        'zoo.animals.insert': (None, '{"data": {"foo": "bar"}}',
-                                    {'data': {'foo': 'bar'}})
-        })
-    zoo = build('zoo', 'v1', http=self.zoo_http, requestBuilder=requestBuilder)
+    def test_simple_matching_dict_body(self):
+        requestBuilder = RequestMockBuilder(
+            {
+                "zoo.animals.insert": (
+                    None,
+                    '{"data": {"foo": "bar"}}',
+                    {"data": {"foo": "bar"}},
+                )
+            }
+        )
+        zoo = build("zoo", "v1", http=self.zoo_http, requestBuilder=requestBuilder)
 
-    activity = zoo.animals().insert(
-        body={'data': {'foo': 'bar'}}).execute()
-    self.assertEqual({'foo': 'bar'}, activity)
+        activity = zoo.animals().insert(body={"data": {"foo": "bar"}}).execute()
+        self.assertEqual({"foo": "bar"}, activity)
 
-  def test_errors(self):
-    errorResponse = httplib2.Response({'status': 500, 'reason': 'Server Error'})
-    requestBuilder = RequestMockBuilder({
-        'plus.activities.list': (errorResponse, b'{}')
-        })
-    plus = build('plus', 'v1', http=self.http, requestBuilder=requestBuilder)
+    def test_errors(self):
+        errorResponse = httplib2.Response({"status": 500, "reason": "Server Error"})
+        requestBuilder = RequestMockBuilder(
+            {"plus.activities.list": (errorResponse, b"{}")}
+        )
+        plus = build("plus", "v1", http=self.http, requestBuilder=requestBuilder)
 
-    try:
-      activity = plus.activities().list(collection='public', userId='me').execute()
-      self.fail('An exception should have been thrown')
-    except HttpError as e:
-      self.assertEqual(b'{}', e.content)
-      self.assertEqual(500, e.resp.status)
-      self.assertEqual('Server Error', e.resp.reason)
+        try:
+            activity = (
+                plus.activities().list(collection="public", userId="me").execute()
+            )
+            self.fail("An exception should have been thrown")
+        except HttpError as e:
+            self.assertEqual(b"{}", e.content)
+            self.assertEqual(500, e.resp.status)
+            self.assertEqual("Server Error", e.resp.reason)
 
 
-if __name__ == '__main__':
-  unittest.main()
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/test_model.py b/tests/test_model.py
index 6506cfc..c3c936c 100644
--- a/tests/test_model.py
+++ b/tests/test_model.py
@@ -21,7 +21,7 @@
 """
 from __future__ import absolute_import
 
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+__author__ = "jcgregorio@google.com (Joe Gregorio)"
 
 import unittest2 as unittest
 
@@ -31,42 +31,43 @@
 
 TEST_CASES = [
     # (message, original, modified, expected)
-    ("Remove an item from an object",
-     {'a': 1, 'b': 2},  {'a': 1},         {'b': None}),
-    ("Add an item to an object",
-     {'a': 1},          {'a': 1, 'b': 2}, {'b': 2}),
-    ("No changes",
-     {'a': 1, 'b': 2},  {'a': 1, 'b': 2}, {}),
-    ("Empty objects",
-     {},  {}, {}),
-    ("Modify an item in an object",
-     {'a': 1, 'b': 2},  {'a': 1, 'b': 3}, {'b': 3}),
-    ("Change an array",
-     {'a': 1, 'b': [2, 3]},  {'a': 1, 'b': [2]}, {'b': [2]}),
-    ("Modify a nested item",
-     {'a': 1, 'b': {'foo':'bar', 'baz': 'qux'}},
-     {'a': 1, 'b': {'foo':'bar', 'baz': 'qaax'}},
-     {'b': {'baz': 'qaax'}}),
-    ("Modify a nested array",
-     {'a': 1, 'b': [{'foo':'bar', 'baz': 'qux'}]},
-     {'a': 1, 'b': [{'foo':'bar', 'baz': 'qaax'}]},
-     {'b': [{'foo':'bar', 'baz': 'qaax'}]}),
-    ("Remove item from a nested array",
-     {'a': 1, 'b': [{'foo':'bar', 'baz': 'qux'}]},
-     {'a': 1, 'b': [{'foo':'bar'}]},
-     {'b': [{'foo':'bar'}]}),
-    ("Remove a nested item",
-     {'a': 1, 'b': {'foo':'bar', 'baz': 'qux'}},
-     {'a': 1, 'b': {'foo':'bar'}},
-     {'b': {'baz': None}})
+    ("Remove an item from an object", {"a": 1, "b": 2}, {"a": 1}, {"b": None}),
+    ("Add an item to an object", {"a": 1}, {"a": 1, "b": 2}, {"b": 2}),
+    ("No changes", {"a": 1, "b": 2}, {"a": 1, "b": 2}, {}),
+    ("Empty objects", {}, {}, {}),
+    ("Modify an item in an object", {"a": 1, "b": 2}, {"a": 1, "b": 3}, {"b": 3}),
+    ("Change an array", {"a": 1, "b": [2, 3]}, {"a": 1, "b": [2]}, {"b": [2]}),
+    (
+        "Modify a nested item",
+        {"a": 1, "b": {"foo": "bar", "baz": "qux"}},
+        {"a": 1, "b": {"foo": "bar", "baz": "qaax"}},
+        {"b": {"baz": "qaax"}},
+    ),
+    (
+        "Modify a nested array",
+        {"a": 1, "b": [{"foo": "bar", "baz": "qux"}]},
+        {"a": 1, "b": [{"foo": "bar", "baz": "qaax"}]},
+        {"b": [{"foo": "bar", "baz": "qaax"}]},
+    ),
+    (
+        "Remove item from a nested array",
+        {"a": 1, "b": [{"foo": "bar", "baz": "qux"}]},
+        {"a": 1, "b": [{"foo": "bar"}]},
+        {"b": [{"foo": "bar"}]},
+    ),
+    (
+        "Remove a nested item",
+        {"a": 1, "b": {"foo": "bar", "baz": "qux"}},
+        {"a": 1, "b": {"foo": "bar"}},
+        {"b": {"baz": None}},
+    ),
 ]
 
 
 class TestPatch(unittest.TestCase):
-
-  def test_patch(self):
-    for (msg, orig, mod, expected_patch) in TEST_CASES:
-      self.assertEqual(expected_patch, makepatch(orig, mod), msg=msg)
+    def test_patch(self):
+        for (msg, orig, mod, expected_patch) in TEST_CASES:
+            self.assertEqual(expected_patch, makepatch(orig, mod), msg=msg)
 
 
 class TestBaseModel(unittest.TestCase):
@@ -74,16 +75,16 @@
         model = BaseModel()
 
         test_cases = [
-            ('hello', 'world', '?hello=world'),
-            ('hello', u'world', '?hello=world'),
-            ('hello', '세계', '?hello=%EC%84%B8%EA%B3%84'),
-            ('hello', u'세계', '?hello=%EC%84%B8%EA%B3%84'),
-            ('hello', 'こんにちは', '?hello=%E3%81%93%E3%82%93%E3%81%AB%E3%81%A1%E3%81%AF'),
-            ('hello', u'こんにちは', '?hello=%E3%81%93%E3%82%93%E3%81%AB%E3%81%A1%E3%81%AF'),
-            ('hello', '你好', '?hello=%E4%BD%A0%E5%A5%BD'),
-            ('hello', u'你好', '?hello=%E4%BD%A0%E5%A5%BD'),
-            ('hello', 'مرحبا', '?hello=%D9%85%D8%B1%D8%AD%D8%A8%D8%A7'),
-            ('hello', u'مرحبا', '?hello=%D9%85%D8%B1%D8%AD%D8%A8%D8%A7')
+            ("hello", "world", "?hello=world"),
+            ("hello", u"world", "?hello=world"),
+            ("hello", "세계", "?hello=%EC%84%B8%EA%B3%84"),
+            ("hello", u"세계", "?hello=%EC%84%B8%EA%B3%84"),
+            ("hello", "こんにちは", "?hello=%E3%81%93%E3%82%93%E3%81%AB%E3%81%A1%E3%81%AF"),
+            ("hello", u"こんにちは", "?hello=%E3%81%93%E3%82%93%E3%81%AB%E3%81%A1%E3%81%AF"),
+            ("hello", "你好", "?hello=%E4%BD%A0%E5%A5%BD"),
+            ("hello", u"你好", "?hello=%E4%BD%A0%E5%A5%BD"),
+            ("hello", "مرحبا", "?hello=%D9%85%D8%B1%D8%AD%D8%A8%D8%A7"),
+            ("hello", u"مرحبا", "?hello=%D9%85%D8%B1%D8%AD%D8%A8%D8%A7"),
         ]
 
         for case in test_cases:
@@ -91,5 +92,5 @@
             self.assertEqual(expect, model._build_query({key: value}))
 
 
-if __name__ == '__main__':
-  unittest.main()
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/test_protobuf_model.py b/tests/test_protobuf_model.py
index 465d120..78caf4e 100644
--- a/tests/test_protobuf_model.py
+++ b/tests/test_protobuf_model.py
@@ -20,7 +20,7 @@
 """
 from __future__ import absolute_import
 
-__author__ = 'mmcdonald@google.com (Matt McDonald)'
+__author__ = "mmcdonald@google.com (Matt McDonald)"
 
 import unittest2 as unittest
 import httplib2
@@ -33,68 +33,70 @@
 
 
 class MockProtocolBuffer(object):
-  def __init__(self, data=None):
-    self.data = data
+    def __init__(self, data=None):
+        self.data = data
 
-  def __eq__(self, other):
-    return self.data == other.data
+    def __eq__(self, other):
+        return self.data == other.data
 
-  @classmethod
-  def FromString(cls, string):
-    return cls(string)
+    @classmethod
+    def FromString(cls, string):
+        return cls(string)
 
-  def SerializeToString(self):
-    return self.data
+    def SerializeToString(self):
+        return self.data
 
 
 class Model(unittest.TestCase):
-  def setUp(self):
-    self.model = ProtocolBufferModel(MockProtocolBuffer)
+    def setUp(self):
+        self.model = ProtocolBufferModel(MockProtocolBuffer)
 
-  def test_no_body(self):
-    headers = {}
-    path_params = {}
-    query_params = {}
-    body = None
+    def test_no_body(self):
+        headers = {}
+        path_params = {}
+        query_params = {}
+        body = None
 
-    headers, params, query, body = self.model.request(
-        headers, path_params, query_params, body)
+        headers, params, query, body = self.model.request(
+            headers, path_params, query_params, body
+        )
 
-    self.assertEqual(headers['accept'], 'application/x-protobuf')
-    self.assertTrue('content-type' not in headers)
-    self.assertNotEqual(query, '')
-    self.assertEqual(body, None)
+        self.assertEqual(headers["accept"], "application/x-protobuf")
+        self.assertTrue("content-type" not in headers)
+        self.assertNotEqual(query, "")
+        self.assertEqual(body, None)
 
-  def test_body(self):
-    headers = {}
-    path_params = {}
-    query_params = {}
-    body = MockProtocolBuffer('data')
+    def test_body(self):
+        headers = {}
+        path_params = {}
+        query_params = {}
+        body = MockProtocolBuffer("data")
 
-    headers, params, query, body = self.model.request(
-        headers, path_params, query_params, body)
+        headers, params, query, body = self.model.request(
+            headers, path_params, query_params, body
+        )
 
-    self.assertEqual(headers['accept'], 'application/x-protobuf')
-    self.assertEqual(headers['content-type'], 'application/x-protobuf')
-    self.assertNotEqual(query, '')
-    self.assertEqual(body, 'data')
+        self.assertEqual(headers["accept"], "application/x-protobuf")
+        self.assertEqual(headers["content-type"], "application/x-protobuf")
+        self.assertNotEqual(query, "")
+        self.assertEqual(body, "data")
 
-  def test_good_response(self):
-    resp = httplib2.Response({'status': '200'})
-    resp.reason = 'OK'
-    content = 'data'
+    def test_good_response(self):
+        resp = httplib2.Response({"status": "200"})
+        resp.reason = "OK"
+        content = "data"
 
-    content = self.model.response(resp, content)
-    self.assertEqual(content, MockProtocolBuffer('data'))
+        content = self.model.response(resp, content)
+        self.assertEqual(content, MockProtocolBuffer("data"))
 
-  def test_no_content_response(self):
-    resp = httplib2.Response({'status': '204'})
-    resp.reason = 'No Content'
-    content = ''
+    def test_no_content_response(self):
+        resp = httplib2.Response({"status": "204"})
+        resp.reason = "No Content"
+        content = ""
 
-    content = self.model.response(resp, content)
-    self.assertEqual(content, MockProtocolBuffer())
+        content = self.model.response(resp, content)
+        self.assertEqual(content, MockProtocolBuffer())
 
 
-if __name__ == '__main__':
-  unittest.main()
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tests/test_schema.py b/tests/test_schema.py
index c1216a5..1732d85 100644
--- a/tests/test_schema.py
+++ b/tests/test_schema.py
@@ -15,7 +15,7 @@
 """Unit tests for googleapiclient.schema."""
 from __future__ import absolute_import
 
-__author__ = 'jcgregorio@google.com (Joe Gregorio)'
+__author__ = "jcgregorio@google.com (Joe Gregorio)"
 
 import json
 import os
@@ -24,11 +24,12 @@
 from googleapiclient.schema import Schemas
 
 
-DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
+DATA_DIR = os.path.join(os.path.dirname(__file__), "data")
 
 
 def datafile(filename):
-  return os.path.join(DATA_DIR, filename)
+    return os.path.join(DATA_DIR, filename)
+
 
 LOAD_FEED = """{
     "items": [
@@ -46,113 +47,131 @@
     "kind": "zoo#loadFeed",
   }"""
 
+
 class SchemasTest(unittest.TestCase):
-  def setUp(self):
-    f = open(datafile('zoo.json'))
-    discovery = f.read()
-    f.close()
-    discovery = json.loads(discovery)
-    self.sc = Schemas(discovery)
+    def setUp(self):
+        f = open(datafile("zoo.json"))
+        discovery = f.read()
+        f.close()
+        discovery = json.loads(discovery)
+        self.sc = Schemas(discovery)
 
-  def test_basic_formatting(self):
-    self.assertEqual(sorted(LOAD_FEED.splitlines()),
-                     sorted(self.sc.prettyPrintByName('LoadFeed').splitlines()))
+    def test_basic_formatting(self):
+        self.assertEqual(
+            sorted(LOAD_FEED.splitlines()),
+            sorted(self.sc.prettyPrintByName("LoadFeed").splitlines()),
+        )
 
-  def test_empty_edge_case(self):
-    self.assertTrue('Unknown type' in self.sc.prettyPrintSchema({}))
+    def test_empty_edge_case(self):
+        self.assertTrue("Unknown type" in self.sc.prettyPrintSchema({}))
 
-  def test_simple_object(self):
-    self.assertEqual({}, eval(self.sc.prettyPrintSchema({'type': 'object'})))
+    def test_simple_object(self):
+        self.assertEqual({}, eval(self.sc.prettyPrintSchema({"type": "object"})))
 
-  def test_string(self):
-    self.assertEqual(type(""), type(eval(self.sc.prettyPrintSchema({'type':
-      'string'}))))
+    def test_string(self):
+        self.assertEqual(
+            type(""), type(eval(self.sc.prettyPrintSchema({"type": "string"})))
+        )
 
-  def test_integer(self):
-    self.assertEqual(type(20), type(eval(self.sc.prettyPrintSchema({'type':
-      'integer'}))))
+    def test_integer(self):
+        self.assertEqual(
+            type(20), type(eval(self.sc.prettyPrintSchema({"type": "integer"})))
+        )
 
-  def test_number(self):
-    self.assertEqual(type(1.2), type(eval(self.sc.prettyPrintSchema({'type':
-      'number'}))))
+    def test_number(self):
+        self.assertEqual(
+            type(1.2), type(eval(self.sc.prettyPrintSchema({"type": "number"})))
+        )
 
-  def test_boolean(self):
-    self.assertEqual(type(True), type(eval(self.sc.prettyPrintSchema({'type':
-      'boolean'}))))
+    def test_boolean(self):
+        self.assertEqual(
+            type(True), type(eval(self.sc.prettyPrintSchema({"type": "boolean"})))
+        )
 
-  def test_string_default(self):
-    self.assertEqual('foo', eval(self.sc.prettyPrintSchema({'type':
-      'string', 'default': 'foo'})))
+    def test_string_default(self):
+        self.assertEqual(
+            "foo", eval(self.sc.prettyPrintSchema({"type": "string", "default": "foo"}))
+        )
 
-  def test_integer_default(self):
-    self.assertEqual(20, eval(self.sc.prettyPrintSchema({'type':
-      'integer', 'default': 20})))
+    def test_integer_default(self):
+        self.assertEqual(
+            20, eval(self.sc.prettyPrintSchema({"type": "integer", "default": 20}))
+        )
 
-  def test_number_default(self):
-    self.assertEqual(1.2, eval(self.sc.prettyPrintSchema({'type':
-      'number', 'default': 1.2})))
+    def test_number_default(self):
+        self.assertEqual(
+            1.2, eval(self.sc.prettyPrintSchema({"type": "number", "default": 1.2}))
+        )
 
-  def test_boolean_default(self):
-    self.assertEqual(False, eval(self.sc.prettyPrintSchema({'type':
-      'boolean', 'default': False})))
+    def test_boolean_default(self):
+        self.assertEqual(
+            False,
+            eval(self.sc.prettyPrintSchema({"type": "boolean", "default": False})),
+        )
 
-  def test_null(self):
-    self.assertEqual(None, eval(self.sc.prettyPrintSchema({'type': 'null'})))
+    def test_null(self):
+        self.assertEqual(None, eval(self.sc.prettyPrintSchema({"type": "null"})))
 
-  def test_any(self):
-    self.assertEqual('', eval(self.sc.prettyPrintSchema({'type': 'any'})))
+    def test_any(self):
+        self.assertEqual("", eval(self.sc.prettyPrintSchema({"type": "any"})))
 
-  def test_array(self):
-    self.assertEqual([{}], eval(self.sc.prettyPrintSchema({'type': 'array',
-      'items': {'type': 'object'}})))
+    def test_array(self):
+        self.assertEqual(
+            [{}],
+            eval(
+                self.sc.prettyPrintSchema(
+                    {"type": "array", "items": {"type": "object"}}
+                )
+            ),
+        )
 
-  def test_nested_references(self):
-    feed = {
-        'items': [ {
-            'photo': {
-              'hash': 'A String',
-              'hashAlgorithm': 'A String',
-              'filename': 'A String',
-              'type': 'A String',
-              'size': 42
-              },
-            'kind': 'zoo#animal',
-            'etag': 'A String',
-            'name': 'A String'
-          }
-        ],
-        'kind': 'zoo#animalFeed',
-        'etag': 'A String'
-      }
+    def test_nested_references(self):
+        feed = {
+            "items": [
+                {
+                    "photo": {
+                        "hash": "A String",
+                        "hashAlgorithm": "A String",
+                        "filename": "A String",
+                        "type": "A String",
+                        "size": 42,
+                    },
+                    "kind": "zoo#animal",
+                    "etag": "A String",
+                    "name": "A String",
+                }
+            ],
+            "kind": "zoo#animalFeed",
+            "etag": "A String",
+        }
 
-    self.assertEqual(feed, eval(self.sc.prettyPrintByName('AnimalFeed')))
+        self.assertEqual(feed, eval(self.sc.prettyPrintByName("AnimalFeed")))
 
-  def test_additional_properties(self):
-    items = {
-        'animals': {
-          'a_key': {
-            'photo': {
-              'hash': 'A String',
-              'hashAlgorithm': 'A String',
-              'filename': 'A String',
-              'type': 'A String',
-              'size': 42
-              },
-            'kind': 'zoo#animal',
-            'etag': 'A String',
-            'name': 'A String'
-          }
-        },
-        'kind': 'zoo#animalMap',
-        'etag': 'A String'
-      }
+    def test_additional_properties(self):
+        items = {
+            "animals": {
+                "a_key": {
+                    "photo": {
+                        "hash": "A String",
+                        "hashAlgorithm": "A String",
+                        "filename": "A String",
+                        "type": "A String",
+                        "size": 42,
+                    },
+                    "kind": "zoo#animal",
+                    "etag": "A String",
+                    "name": "A String",
+                }
+            },
+            "kind": "zoo#animalMap",
+            "etag": "A String",
+        }
 
-    self.assertEqual(items, eval(self.sc.prettyPrintByName('AnimalMap')))
+        self.assertEqual(items, eval(self.sc.prettyPrintByName("AnimalMap")))
 
-  def test_unknown_name(self):
-    self.assertRaises(KeyError,
-        self.sc.prettyPrintByName, 'UknownSchemaThing')
+    def test_unknown_name(self):
+        self.assertRaises(KeyError, self.sc.prettyPrintByName, "UknownSchemaThing")
 
 
-if __name__ == '__main__':
-  unittest.main()
+if __name__ == "__main__":
+    unittest.main()