Merge commit '488110c' into import
am: 24f334cf96

Change-Id: I37cab18ed5efeb1e5dc5c85f2c3d46dbc628e4e9
diff --git a/.coveragerc b/.coveragerc
new file mode 100644
index 0000000..6339b4e
--- /dev/null
+++ b/.coveragerc
@@ -0,0 +1,8 @@
+[report]
+omit =
+    */demo/*
+exclude_lines =
+    # Re-enable the standard pragma
+    pragma: NO COVER
+    # Ignore debug-only repr
+    def __repr__
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..02eda25
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,18 @@
+*~
+*.py[cod]
+*.egg-info/
+build/
+dist/
+distribute-*
+
+# Test files
+.tox/
+nosetests.xml
+
+# Coverage related
+.coverage
+coverage.xml
+htmlcov/
+
+# Make sure a generated file isn't accidentally committed.
+reduced.pylintrc
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..6d9ac9d
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,25 @@
+language: python
+sudo: false
+env:
+  - TOX_ENV=py26
+  - TOX_ENV=py27
+  - TOX_ENV=py27oldoauth2client
+  - TOX_ENV=py34
+  - TOX_ENV=py35
+  - TOX_ENV=pypy
+  - TOX_ENV=lint
+install:
+  - pip install tox
+  - pip install . --allow-external argparse
+script: tox -e $TOX_ENV
+after_success:
+  - if [[ "${TOX_ENV}" == "py27" ]]; then tox -e coveralls; fi
+
+# Tweak for adding python3.5; see
+# https://github.com/travis-ci/travis-ci/issues/4794
+addons:
+  apt:
+    sources:
+      - deadsnakes
+    packages:
+      - python3.5
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..7585bef
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,5 @@
+include *.py
+include *.txt
+include *.md
+recursive-include apitools *.py
+recursive-include apitools *.json
diff --git a/METADATA b/METADATA
new file mode 100644
index 0000000..97042dd
--- /dev/null
+++ b/METADATA
@@ -0,0 +1,17 @@
+name: "apitools"
+description:
+    "google-apitools is a collection of utilities to make it easier to build "
+    "client-side tools, especially those that talk to Google APIs."
+
+third_party {
+  url {
+    type: HOMEPAGE
+    value: "https://github.com/google/apitools"
+  }
+  url {
+    type: GIT
+    value: "https://github.com/google/apitools"
+  }
+  version: "0.5.11"
+  last_upgrade_date { year: 2018 month: 6 day: 5 }
+}
diff --git a/MODULE_LICENSE_APACHE2 b/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/MODULE_LICENSE_APACHE2
diff --git a/NOTICE b/NOTICE
new file mode 120000
index 0000000..7a694c9
--- /dev/null
+++ b/NOTICE
@@ -0,0 +1 @@
+LICENSE
\ No newline at end of file
diff --git a/README.rst b/README.rst
new file mode 100644
index 0000000..e9eae94
--- /dev/null
+++ b/README.rst
@@ -0,0 +1,47 @@
+google-apitools
+===============
+
+|pypi| |build| |coverage|
+
+``google-apitools`` is a collection of utilities to make it easier to build
+client-side tools, especially those that talk to Google APIs.
+
+**NOTE**: This library is stable, but in maintenance mode, and not under
+active development. However, any bugs or security issues will be fixed
+promptly.
+
+Installing as a library
+-----------------------
+
+To install the library into the current virtual environment::
+
+   $ pip install google-apitools
+
+Installing the command-line tools
+---------------------------------
+
+To install the command-line scripts into the current virtual environment::
+
+   $ pip install google-apitools[cli]
+
+Running the tests
+-----------------
+
+First, install the testing dependencies::
+
+   $ pip install google-apitools[testing]
+
+and the ``nose`` testrunner::
+
+   $ pip install nose
+
+Then run the tests::
+
+   $ nosetests
+
+.. |build| image:: https://travis-ci.org/google/apitools.svg?branch=master
+   :target: https://travis-ci.org/google/apitools
+.. |pypi| image:: https://img.shields.io/pypi/v/google-apitools.svg
+   :target: https://pypi.python.org/pypi/google-apitools
+.. |coverage| image:: https://coveralls.io/repos/google/apitools/badge.svg?branch=master
+   :target: https://coveralls.io/r/google/apitools?branch=master
diff --git a/apitools/Android.bp b/apitools/Android.bp
new file mode 100644
index 0000000..67dd2b6
--- /dev/null
+++ b/apitools/Android.bp
@@ -0,0 +1,39 @@
+// Copyright 2018 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+python_library {
+    name: "py-apitools",
+    host_supported: true,
+    srcs: [
+        "*.py",
+        "base/*.py",
+        "base/py/*.py",
+        "gen/*.py",
+        "scripts/*.py",
+    ],
+    version: {
+        py2: {
+            enabled: true,
+        },
+        py3: {
+            enabled: true,
+        },
+    },
+    libs: [
+        "py-httplib2",
+        "py-oauth2client",
+        "py-six",
+    ],
+    pkg_path: "apitools",
+}
+
diff --git a/apitools/__init__.py b/apitools/__init__.py
new file mode 100644
index 0000000..463cb42
--- /dev/null
+++ b/apitools/__init__.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Shared __init__.py for apitools."""
+
+from pkgutil import extend_path
+__path__ = extend_path(__path__, __name__)
diff --git a/apitools/base/__init__.py b/apitools/base/__init__.py
new file mode 100644
index 0000000..463cb42
--- /dev/null
+++ b/apitools/base/__init__.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Shared __init__.py for apitools."""
+
+from pkgutil import extend_path
+__path__ = extend_path(__path__, __name__)
diff --git a/apitools/base/protorpclite/__init__.py b/apitools/base/protorpclite/__init__.py
new file mode 100644
index 0000000..224d433
--- /dev/null
+++ b/apitools/base/protorpclite/__init__.py
@@ -0,0 +1,19 @@
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Shared __init__.py for apitools."""
+
+from pkgutil import extend_path
+__path__ = extend_path(__path__, __name__)
diff --git a/apitools/base/protorpclite/descriptor.py b/apitools/base/protorpclite/descriptor.py
new file mode 100644
index 0000000..add0e4c
--- /dev/null
+++ b/apitools/base/protorpclite/descriptor.py
@@ -0,0 +1,615 @@
+#!/usr/bin/env python
+#
+# Copyright 2010 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Services descriptor definitions.
+
+Contains message definitions and functions for converting
+service classes into transmittable message format.
+
+Describing an Enum instance, Enum class, Field class or Message class will
+generate an appropriate descriptor object that describes that class.
+This message can itself be used to transmit information to clients wishing
+to know the description of an enum value, enum, field or message without
+needing to download the source code.  This format is also compatible with
+other, non-Python languages.
+
+The descriptors are modeled to be binary compatible with
+  https://github.com/google/protobuf
+
+NOTE: The names of types and fields are not always the same between these
+descriptors and the ones defined in descriptor.proto.  This was done in order
+to make source code files that use these descriptors easier to read.  For
+example, it is not necessary to prefix TYPE to all the values in
+FieldDescriptor.Variant as is done in descriptor.proto
+FieldDescriptorProto.Type.
+
+Example:
+
+  class Pixel(messages.Message):
+
+    x = messages.IntegerField(1, required=True)
+    y = messages.IntegerField(2, required=True)
+
+    color = messages.BytesField(3)
+
+  # Describe Pixel class using message descriptor.
+  fields = []
+
+  field = FieldDescriptor()
+  field.name = 'x'
+  field.number = 1
+  field.label = FieldDescriptor.Label.REQUIRED
+  field.variant = FieldDescriptor.Variant.INT64
+  fields.append(field)
+
+  field = FieldDescriptor()
+  field.name = 'y'
+  field.number = 2
+  field.label = FieldDescriptor.Label.REQUIRED
+  field.variant = FieldDescriptor.Variant.INT64
+  fields.append(field)
+
+  field = FieldDescriptor()
+  field.name = 'color'
+  field.number = 3
+  field.label = FieldDescriptor.Label.OPTIONAL
+  field.variant = FieldDescriptor.Variant.BYTES
+  fields.append(field)
+
+  message = MessageDescriptor()
+  message.name = 'Pixel'
+  message.fields = fields
+
+  # Describing is the equivalent of building the above message.
+  message == describe_message(Pixel)
+
+Public Classes:
+  EnumValueDescriptor: Describes Enum values.
+  EnumDescriptor: Describes Enum classes.
+  FieldDescriptor: Describes field instances.
+  FileDescriptor: Describes a single 'file' unit.
+  FileSet: Describes a collection of file descriptors.
+  MessageDescriptor: Describes Message classes.
+
+Public Functions:
+  describe_enum_value: Describe an individual enum-value.
+  describe_enum: Describe an Enum class.
+  describe_field: Describe a Field definition.
+  describe_file: Describe a 'file' unit from a Python module or object.
+  describe_file_set: Describe a file set from a list of modules or objects.
+  describe_message: Describe a Message definition.
+"""
+import codecs
+import types
+
+import six
+
+from apitools.base.protorpclite import messages
+from apitools.base.protorpclite import util
+
+
+__all__ = [
+    'EnumDescriptor',
+    'EnumValueDescriptor',
+    'FieldDescriptor',
+    'MessageDescriptor',
+    'FileDescriptor',
+    'FileSet',
+    'DescriptorLibrary',
+
+    'describe_enum',
+    'describe_enum_value',
+    'describe_field',
+    'describe_message',
+    'describe_file',
+    'describe_file_set',
+    'describe',
+    'import_descriptor_loader',
+]
+
+
+# NOTE: MessageField is missing because message fields cannot have
+# a default value at this time.
+# TODO(rafek): Support default message values.
+#
+# Map to functions that convert default values of fields of a given type
+# to a string.  The function must return a value that is compatible with
+# FieldDescriptor.default_value and therefore a unicode string.
+_DEFAULT_TO_STRING_MAP = {
+    messages.IntegerField: six.text_type,
+    messages.FloatField: six.text_type,
+    messages.BooleanField: lambda value: value and u'true' or u'false',
+    messages.BytesField: lambda value: codecs.escape_encode(value)[0],
+    messages.StringField: lambda value: value,
+    messages.EnumField: lambda value: six.text_type(value.number),
+}
+
+_DEFAULT_FROM_STRING_MAP = {
+    messages.IntegerField: int,
+    messages.FloatField: float,
+    messages.BooleanField: lambda value: value == u'true',
+    messages.BytesField: lambda value: codecs.escape_decode(value)[0],
+    messages.StringField: lambda value: value,
+    messages.EnumField: int,
+}
+
+
+class EnumValueDescriptor(messages.Message):
+    """Enum value descriptor.
+
+    Fields:
+      name: Name of enumeration value.
+      number: Number of enumeration value.
+    """
+
+    # TODO(rafek): Why are these listed as optional in descriptor.proto.
+    # Harmonize?
+    name = messages.StringField(1, required=True)
+    number = messages.IntegerField(2,
+                                   required=True,
+                                   variant=messages.Variant.INT32)
+
+
+class EnumDescriptor(messages.Message):
+    """Enum class descriptor.
+
+    Fields:
+      name: Name of Enum without any qualification.
+      values: Values defined by Enum class.
+    """
+
+    name = messages.StringField(1)
+    values = messages.MessageField(EnumValueDescriptor, 2, repeated=True)
+
+
+class FieldDescriptor(messages.Message):
+    """Field definition descriptor.
+
+    Enums:
+      Variant: Wire format hint sub-types for field.
+      Label: Values for optional, required and repeated fields.
+
+    Fields:
+      name: Name of field.
+      number: Number of field.
+      variant: Variant of field.
+      type_name: Type name for message and enum fields.
+      default_value: String representation of default value.
+    """
+
+    Variant = messages.Variant  # pylint:disable=invalid-name
+
+    class Label(messages.Enum):
+        """Field label."""
+
+        OPTIONAL = 1
+        REQUIRED = 2
+        REPEATED = 3
+
+    name = messages.StringField(1, required=True)
+    number = messages.IntegerField(3,
+                                   required=True,
+                                   variant=messages.Variant.INT32)
+    label = messages.EnumField(Label, 4, default=Label.OPTIONAL)
+    variant = messages.EnumField(Variant, 5)
+    type_name = messages.StringField(6)
+
+    # For numeric types, contains the original text representation of
+    #   the value.
+    # For booleans, "true" or "false".
+    # For strings, contains the default text contents (not escaped in any
+    #   way).
+    # For bytes, contains the C escaped value.  All bytes < 128 are that are
+    #   traditionally considered unprintable are also escaped.
+    default_value = messages.StringField(7)
+
+
+class MessageDescriptor(messages.Message):
+    """Message definition descriptor.
+
+    Fields:
+      name: Name of Message without any qualification.
+      fields: Fields defined for message.
+      message_types: Nested Message classes defined on message.
+      enum_types: Nested Enum classes defined on message.
+    """
+
+    name = messages.StringField(1)
+    fields = messages.MessageField(FieldDescriptor, 2, repeated=True)
+
+    message_types = messages.MessageField(
+        'apitools.base.protorpclite.descriptor.MessageDescriptor', 3,
+        repeated=True)
+    enum_types = messages.MessageField(EnumDescriptor, 4, repeated=True)
+
+
+class FileDescriptor(messages.Message):
+    """Description of file containing protobuf definitions.
+
+    Fields:
+      package: Fully qualified name of package that definitions belong to.
+      message_types: Message definitions contained in file.
+      enum_types: Enum definitions contained in file.
+    """
+
+    package = messages.StringField(2)
+
+    # TODO(rafek): Add dependency field
+
+    message_types = messages.MessageField(MessageDescriptor, 4, repeated=True)
+    enum_types = messages.MessageField(EnumDescriptor, 5, repeated=True)
+
+
+class FileSet(messages.Message):
+    """A collection of FileDescriptors.
+
+    Fields:
+      files: Files in file-set.
+    """
+
+    files = messages.MessageField(FileDescriptor, 1, repeated=True)
+
+
+def describe_enum_value(enum_value):
+    """Build descriptor for Enum instance.
+
+    Args:
+      enum_value: Enum value to provide descriptor for.
+
+    Returns:
+      Initialized EnumValueDescriptor instance describing the Enum instance.
+    """
+    enum_value_descriptor = EnumValueDescriptor()
+    enum_value_descriptor.name = six.text_type(enum_value.name)
+    enum_value_descriptor.number = enum_value.number
+    return enum_value_descriptor
+
+
+def describe_enum(enum_definition):
+    """Build descriptor for Enum class.
+
+    Args:
+      enum_definition: Enum class to provide descriptor for.
+
+    Returns:
+      Initialized EnumDescriptor instance describing the Enum class.
+    """
+    enum_descriptor = EnumDescriptor()
+    enum_descriptor.name = enum_definition.definition_name().split('.')[-1]
+
+    values = []
+    for number in enum_definition.numbers():
+        value = enum_definition.lookup_by_number(number)
+        values.append(describe_enum_value(value))
+
+    if values:
+        enum_descriptor.values = values
+
+    return enum_descriptor
+
+
+def describe_field(field_definition):
+    """Build descriptor for Field instance.
+
+    Args:
+      field_definition: Field instance to provide descriptor for.
+
+    Returns:
+      Initialized FieldDescriptor instance describing the Field instance.
+    """
+    field_descriptor = FieldDescriptor()
+    field_descriptor.name = field_definition.name
+    field_descriptor.number = field_definition.number
+    field_descriptor.variant = field_definition.variant
+
+    if isinstance(field_definition, messages.EnumField):
+        field_descriptor.type_name = field_definition.type.definition_name()
+
+    if isinstance(field_definition, messages.MessageField):
+        field_descriptor.type_name = (
+            field_definition.message_type.definition_name())
+
+    if field_definition.default is not None:
+        field_descriptor.default_value = _DEFAULT_TO_STRING_MAP[
+            type(field_definition)](field_definition.default)
+
+    # Set label.
+    if field_definition.repeated:
+        field_descriptor.label = FieldDescriptor.Label.REPEATED
+    elif field_definition.required:
+        field_descriptor.label = FieldDescriptor.Label.REQUIRED
+    else:
+        field_descriptor.label = FieldDescriptor.Label.OPTIONAL
+
+    return field_descriptor
+
+
+def describe_message(message_definition):
+    """Build descriptor for Message class.
+
+    Args:
+      message_definition: Message class to provide descriptor for.
+
+    Returns:
+      Initialized MessageDescriptor instance describing the Message class.
+    """
+    message_descriptor = MessageDescriptor()
+    message_descriptor.name = message_definition.definition_name().split(
+        '.')[-1]
+
+    fields = sorted(message_definition.all_fields(),
+                    key=lambda v: v.number)
+    if fields:
+        message_descriptor.fields = [describe_field(field) for field in fields]
+
+    try:
+        nested_messages = message_definition.__messages__
+    except AttributeError:
+        pass
+    else:
+        message_descriptors = []
+        for name in nested_messages:
+            value = getattr(message_definition, name)
+            message_descriptors.append(describe_message(value))
+
+        message_descriptor.message_types = message_descriptors
+
+    try:
+        nested_enums = message_definition.__enums__
+    except AttributeError:
+        pass
+    else:
+        enum_descriptors = []
+        for name in nested_enums:
+            value = getattr(message_definition, name)
+            enum_descriptors.append(describe_enum(value))
+
+        message_descriptor.enum_types = enum_descriptors
+
+    return message_descriptor
+
+
+def describe_file(module):
+    """Build a file from a specified Python module.
+
+    Args:
+      module: Python module to describe.
+
+    Returns:
+      Initialized FileDescriptor instance describing the module.
+    """
+    descriptor = FileDescriptor()
+    descriptor.package = util.get_package_for_module(module)
+
+    if not descriptor.package:
+        descriptor.package = None
+
+    message_descriptors = []
+    enum_descriptors = []
+
+    # Need to iterate over all top level attributes of the module looking for
+    # message and enum definitions.  Each definition must be itself described.
+    for name in sorted(dir(module)):
+        value = getattr(module, name)
+
+        if isinstance(value, type):
+            if issubclass(value, messages.Message):
+                message_descriptors.append(describe_message(value))
+
+            elif issubclass(value, messages.Enum):
+                enum_descriptors.append(describe_enum(value))
+
+    if message_descriptors:
+        descriptor.message_types = message_descriptors
+
+    if enum_descriptors:
+        descriptor.enum_types = enum_descriptors
+
+    return descriptor
+
+
+def describe_file_set(modules):
+    """Build a file set from a specified Python modules.
+
+    Args:
+      modules: Iterable of Python module to describe.
+
+    Returns:
+      Initialized FileSet instance describing the modules.
+    """
+    descriptor = FileSet()
+    file_descriptors = []
+    for module in modules:
+        file_descriptors.append(describe_file(module))
+
+    if file_descriptors:
+        descriptor.files = file_descriptors
+
+    return descriptor
+
+
+def describe(value):
+    """Describe any value as a descriptor.
+
+    Helper function for describing any object with an appropriate descriptor
+    object.
+
+    Args:
+      value: Value to describe as a descriptor.
+
+    Returns:
+      Descriptor message class if object is describable as a descriptor, else
+      None.
+    """
+    if isinstance(value, types.ModuleType):
+        return describe_file(value)
+    elif isinstance(value, messages.Field):
+        return describe_field(value)
+    elif isinstance(value, messages.Enum):
+        return describe_enum_value(value)
+    elif isinstance(value, type):
+        if issubclass(value, messages.Message):
+            return describe_message(value)
+        elif issubclass(value, messages.Enum):
+            return describe_enum(value)
+    return None
+
+
+@util.positional(1)
+def import_descriptor_loader(definition_name, importer=__import__):
+    """Find objects by importing modules as needed.
+
+    A definition loader is a function that resolves a definition name to a
+    descriptor.
+
+    The import finder resolves definitions to their names by importing modules
+    when necessary.
+
+    Args:
+      definition_name: Name of definition to find.
+      importer: Import function used for importing new modules.
+
+    Returns:
+      Appropriate descriptor for any describable type located by name.
+
+    Raises:
+      DefinitionNotFoundError when a name does not refer to either a definition
+      or a module.
+    """
+    # Attempt to import descriptor as a module.
+    if definition_name.startswith('.'):
+        definition_name = definition_name[1:]
+    if not definition_name.startswith('.'):
+        leaf = definition_name.split('.')[-1]
+        if definition_name:
+            try:
+                module = importer(definition_name, '', '', [leaf])
+            except ImportError:
+                pass
+            else:
+                return describe(module)
+
+    try:
+        # Attempt to use messages.find_definition to find item.
+        return describe(messages.find_definition(definition_name,
+                                                 importer=__import__))
+    except messages.DefinitionNotFoundError as err:
+        # There are things that find_definition will not find, but if
+        # the parent is loaded, its children can be searched for a
+        # match.
+        split_name = definition_name.rsplit('.', 1)
+        if len(split_name) > 1:
+            parent, child = split_name
+            try:
+                parent_definition = import_descriptor_loader(
+                    parent, importer=importer)
+            except messages.DefinitionNotFoundError:
+                # Fall through to original error.
+                pass
+            else:
+                # Check the parent definition for a matching descriptor.
+                if isinstance(parent_definition, EnumDescriptor):
+                    search_list = parent_definition.values or []
+                elif isinstance(parent_definition, MessageDescriptor):
+                    search_list = parent_definition.fields or []
+                else:
+                    search_list = []
+
+                for definition in search_list:
+                    if definition.name == child:
+                        return definition
+
+        # Still didn't find.  Reraise original exception.
+        raise err
+
+
+class DescriptorLibrary(object):
+    """A descriptor library is an object that contains known definitions.
+
+    A descriptor library contains a cache of descriptor objects mapped by
+    definition name.  It contains all types of descriptors except for
+    file sets.
+
+    When a definition name is requested that the library does not know about
+    it can be provided with a descriptor loader which attempt to resolve the
+    missing descriptor.
+    """
+
+    @util.positional(1)
+    def __init__(self,
+                 descriptors=None,
+                 descriptor_loader=import_descriptor_loader):
+        """Constructor.
+
+        Args:
+          descriptors: A dictionary or dictionary-like object that can be used
+            to store and cache descriptors by definition name.
+          definition_loader: A function used for resolving missing descriptors.
+            The function takes a definition name as its parameter and returns
+            an appropriate descriptor.  It may raise DefinitionNotFoundError.
+        """
+        self.__descriptor_loader = descriptor_loader
+        self.__descriptors = descriptors or {}
+
+    def lookup_descriptor(self, definition_name):
+        """Lookup descriptor by name.
+
+        Get descriptor from library by name.  If descriptor is not found will
+        attempt to find via descriptor loader if provided.
+
+        Args:
+          definition_name: Definition name to find.
+
+        Returns:
+          Descriptor that describes definition name.
+
+        Raises:
+          DefinitionNotFoundError if not descriptor exists for definition name.
+        """
+        try:
+            return self.__descriptors[definition_name]
+        except KeyError:
+            pass
+
+        if self.__descriptor_loader:
+            definition = self.__descriptor_loader(definition_name)
+            self.__descriptors[definition_name] = definition
+            return definition
+        else:
+            raise messages.DefinitionNotFoundError(
+                'Could not find definition for %s' % definition_name)
+
+    def lookup_package(self, definition_name):
+        """Determines the package name for any definition.
+
+        Determine the package that any definition name belongs to. May
+        check parent for package name and will resolve missing
+        descriptors if provided descriptor loader.
+
+        Args:
+          definition_name: Definition name to find package for.
+
+        """
+        while True:
+            descriptor = self.lookup_descriptor(definition_name)
+            if isinstance(descriptor, FileDescriptor):
+                return descriptor.package
+            else:
+                index = definition_name.rfind('.')
+                if index < 0:
+                    return None
+                definition_name = definition_name[:index]
diff --git a/apitools/base/protorpclite/descriptor_test.py b/apitools/base/protorpclite/descriptor_test.py
new file mode 100644
index 0000000..fc27ec4
--- /dev/null
+++ b/apitools/base/protorpclite/descriptor_test.py
@@ -0,0 +1,515 @@
+#!/usr/bin/env python
+#
+# Copyright 2010 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Tests for apitools.base.protorpclite.descriptor."""
+import platform
+import types
+
+import six
+import unittest2
+
+from apitools.base.protorpclite import descriptor
+from apitools.base.protorpclite import message_types
+from apitools.base.protorpclite import messages
+from apitools.base.protorpclite import test_util
+
+
+RUSSIA = u'\u0420\u043e\u0441\u0441\u0438\u044f'
+
+
+class ModuleInterfaceTest(test_util.ModuleInterfaceTest,
+                          test_util.TestCase):
+
+    MODULE = descriptor
+
+
+class DescribeEnumValueTest(test_util.TestCase):
+
+    def testDescribe(self):
+        class MyEnum(messages.Enum):
+            MY_NAME = 10
+
+        expected = descriptor.EnumValueDescriptor()
+        expected.name = 'MY_NAME'
+        expected.number = 10
+
+        described = descriptor.describe_enum_value(MyEnum.MY_NAME)
+        described.check_initialized()
+        self.assertEquals(expected, described)
+
+
+class DescribeEnumTest(test_util.TestCase):
+
+    def testEmptyEnum(self):
+        class EmptyEnum(messages.Enum):
+            pass
+
+        expected = descriptor.EnumDescriptor()
+        expected.name = 'EmptyEnum'
+
+        described = descriptor.describe_enum(EmptyEnum)
+        described.check_initialized()
+        self.assertEquals(expected, described)
+
+    def testNestedEnum(self):
+        class MyScope(messages.Message):
+
+            class NestedEnum(messages.Enum):
+                pass
+
+        expected = descriptor.EnumDescriptor()
+        expected.name = 'NestedEnum'
+
+        described = descriptor.describe_enum(MyScope.NestedEnum)
+        described.check_initialized()
+        self.assertEquals(expected, described)
+
+    @unittest2.skipIf('PyPy' in platform.python_implementation(),
+                      'todo: reenable this')
+    def testEnumWithItems(self):
+        class EnumWithItems(messages.Enum):
+            A = 3
+            B = 1
+            C = 2
+
+        expected = descriptor.EnumDescriptor()
+        expected.name = 'EnumWithItems'
+
+        a = descriptor.EnumValueDescriptor()
+        a.name = 'A'
+        a.number = 3
+
+        b = descriptor.EnumValueDescriptor()
+        b.name = 'B'
+        b.number = 1
+
+        c = descriptor.EnumValueDescriptor()
+        c.name = 'C'
+        c.number = 2
+
+        expected.values = [b, c, a]
+
+        described = descriptor.describe_enum(EnumWithItems)
+        described.check_initialized()
+        self.assertEquals(expected, described)
+
+
+class DescribeFieldTest(test_util.TestCase):
+
+    def testLabel(self):
+        for repeated, required, expected_label in (
+                (True, False, descriptor.FieldDescriptor.Label.REPEATED),
+                (False, True, descriptor.FieldDescriptor.Label.REQUIRED),
+                (False, False, descriptor.FieldDescriptor.Label.OPTIONAL)):
+            field = messages.IntegerField(
+                10, required=required, repeated=repeated)
+            field.name = 'a_field'
+
+            expected = descriptor.FieldDescriptor()
+            expected.name = 'a_field'
+            expected.number = 10
+            expected.label = expected_label
+            expected.variant = descriptor.FieldDescriptor.Variant.INT64
+
+            described = descriptor.describe_field(field)
+            described.check_initialized()
+            self.assertEquals(expected, described)
+
+    def testDefault(self):
+        test_cases = (
+            (messages.IntegerField, 200, '200'),
+            (messages.FloatField, 1.5, '1.5'),
+            (messages.FloatField, 1e6, '1000000.0'),
+            (messages.BooleanField, True, 'true'),
+            (messages.BooleanField, False, 'false'),
+            (messages.BytesField,
+             b''.join([six.int2byte(x) for x in (31, 32, 33)]),
+             b'\\x1f !'),
+            (messages.StringField, RUSSIA, RUSSIA),
+        )
+        for field_class, default, expected_default in test_cases:
+            field = field_class(10, default=default)
+            field.name = u'a_field'
+
+            expected = descriptor.FieldDescriptor()
+            expected.name = u'a_field'
+            expected.number = 10
+            expected.label = descriptor.FieldDescriptor.Label.OPTIONAL
+            expected.variant = field_class.DEFAULT_VARIANT
+            expected.default_value = expected_default
+
+            described = descriptor.describe_field(field)
+            described.check_initialized()
+            self.assertEquals(expected, described)
+
+    def testDefault_EnumField(self):
+        class MyEnum(messages.Enum):
+
+            VAL = 1
+
+        module_name = test_util.get_module_name(MyEnum)
+        field = messages.EnumField(MyEnum, 10, default=MyEnum.VAL)
+        field.name = 'a_field'
+
+        expected = descriptor.FieldDescriptor()
+        expected.name = 'a_field'
+        expected.number = 10
+        expected.label = descriptor.FieldDescriptor.Label.OPTIONAL
+        expected.variant = messages.EnumField.DEFAULT_VARIANT
+        expected.type_name = '%s.MyEnum' % module_name
+        expected.default_value = '1'
+
+        described = descriptor.describe_field(field)
+        self.assertEquals(expected, described)
+
+    def testMessageField(self):
+        field = messages.MessageField(descriptor.FieldDescriptor, 10)
+        field.name = 'a_field'
+
+        expected = descriptor.FieldDescriptor()
+        expected.name = 'a_field'
+        expected.number = 10
+        expected.label = descriptor.FieldDescriptor.Label.OPTIONAL
+        expected.variant = messages.MessageField.DEFAULT_VARIANT
+        expected.type_name = (
+            'apitools.base.protorpclite.descriptor.FieldDescriptor')
+
+        described = descriptor.describe_field(field)
+        described.check_initialized()
+        self.assertEquals(expected, described)
+
+    def testDateTimeField(self):
+        field = message_types.DateTimeField(20)
+        field.name = 'a_timestamp'
+
+        expected = descriptor.FieldDescriptor()
+        expected.name = 'a_timestamp'
+        expected.number = 20
+        expected.label = descriptor.FieldDescriptor.Label.OPTIONAL
+        expected.variant = messages.MessageField.DEFAULT_VARIANT
+        expected.type_name = (
+            'apitools.base.protorpclite.message_types.DateTimeMessage')
+
+        described = descriptor.describe_field(field)
+        described.check_initialized()
+        self.assertEquals(expected, described)
+
+
+class DescribeMessageTest(test_util.TestCase):
+
+    def testEmptyDefinition(self):
+        class MyMessage(messages.Message):
+            pass
+
+        expected = descriptor.MessageDescriptor()
+        expected.name = 'MyMessage'
+
+        described = descriptor.describe_message(MyMessage)
+        described.check_initialized()
+        self.assertEquals(expected, described)
+
+    def testDefinitionWithFields(self):
+        class MessageWithFields(messages.Message):
+            field1 = messages.IntegerField(10)
+            field2 = messages.StringField(30)
+            field3 = messages.IntegerField(20)
+
+        expected = descriptor.MessageDescriptor()
+        expected.name = 'MessageWithFields'
+
+        expected.fields = [
+            descriptor.describe_field(
+                MessageWithFields.field_by_name('field1')),
+            descriptor.describe_field(
+                MessageWithFields.field_by_name('field3')),
+            descriptor.describe_field(
+                MessageWithFields.field_by_name('field2')),
+        ]
+
+        described = descriptor.describe_message(MessageWithFields)
+        described.check_initialized()
+        self.assertEquals(expected, described)
+
+    def testNestedEnum(self):
+        class MessageWithEnum(messages.Message):
+
+            class Mood(messages.Enum):
+                GOOD = 1
+                BAD = 2
+                UGLY = 3
+
+            class Music(messages.Enum):
+                CLASSIC = 1
+                JAZZ = 2
+                BLUES = 3
+
+        expected = descriptor.MessageDescriptor()
+        expected.name = 'MessageWithEnum'
+
+        expected.enum_types = [descriptor.describe_enum(MessageWithEnum.Mood),
+                               descriptor.describe_enum(MessageWithEnum.Music)]
+
+        described = descriptor.describe_message(MessageWithEnum)
+        described.check_initialized()
+        self.assertEquals(expected, described)
+
+    def testNestedMessage(self):
+        class MessageWithMessage(messages.Message):
+
+            class Nesty(messages.Message):
+                pass
+
+        expected = descriptor.MessageDescriptor()
+        expected.name = 'MessageWithMessage'
+
+        expected.message_types = [
+            descriptor.describe_message(MessageWithMessage.Nesty)]
+
+        described = descriptor.describe_message(MessageWithMessage)
+        described.check_initialized()
+        self.assertEquals(expected, described)
+
+
+class DescribeFileTest(test_util.TestCase):
+    """Test describing modules."""
+
+    def LoadModule(self, module_name, source):
+        result = {
+            '__name__': module_name,
+            'messages': messages,
+        }
+        exec(source, result)
+
+        module = types.ModuleType(module_name)
+        for name, value in result.items():
+            setattr(module, name, value)
+
+        return module
+
+    def testEmptyModule(self):
+        """Test describing an empty file."""
+        module = types.ModuleType('my.package.name')
+
+        expected = descriptor.FileDescriptor()
+        expected.package = 'my.package.name'
+
+        described = descriptor.describe_file(module)
+        described.check_initialized()
+        self.assertEquals(expected, described)
+
+    def testNoPackageName(self):
+        """Test describing a module with no module name."""
+        module = types.ModuleType('')
+
+        expected = descriptor.FileDescriptor()
+
+        described = descriptor.describe_file(module)
+        described.check_initialized()
+        self.assertEquals(expected, described)
+
+    def testPackageName(self):
+        """Test using the 'package' module attribute."""
+        module = types.ModuleType('my.module.name')
+        module.package = 'my.package.name'
+
+        expected = descriptor.FileDescriptor()
+        expected.package = 'my.package.name'
+
+        described = descriptor.describe_file(module)
+        described.check_initialized()
+        self.assertEquals(expected, described)
+
+    def testMain(self):
+        """Test using the 'package' module attribute."""
+        module = types.ModuleType('__main__')
+        module.__file__ = '/blim/blam/bloom/my_package.py'
+
+        expected = descriptor.FileDescriptor()
+        expected.package = 'my_package'
+
+        described = descriptor.describe_file(module)
+        described.check_initialized()
+        self.assertEquals(expected, described)
+
+    def testMessages(self):
+        """Test that messages are described."""
+        module = self.LoadModule('my.package',
+                                 'class Message1(messages.Message): pass\n'
+                                 'class Message2(messages.Message): pass\n')
+
+        message1 = descriptor.MessageDescriptor()
+        message1.name = 'Message1'
+
+        message2 = descriptor.MessageDescriptor()
+        message2.name = 'Message2'
+
+        expected = descriptor.FileDescriptor()
+        expected.package = 'my.package'
+        expected.message_types = [message1, message2]
+
+        described = descriptor.describe_file(module)
+        described.check_initialized()
+        self.assertEquals(expected, described)
+
+    def testEnums(self):
+        """Test that enums are described."""
+        module = self.LoadModule('my.package',
+                                 'class Enum1(messages.Enum): pass\n'
+                                 'class Enum2(messages.Enum): pass\n')
+
+        enum1 = descriptor.EnumDescriptor()
+        enum1.name = 'Enum1'
+
+        enum2 = descriptor.EnumDescriptor()
+        enum2.name = 'Enum2'
+
+        expected = descriptor.FileDescriptor()
+        expected.package = 'my.package'
+        expected.enum_types = [enum1, enum2]
+
+        described = descriptor.describe_file(module)
+        described.check_initialized()
+        self.assertEquals(expected, described)
+
+
+class DescribeFileSetTest(test_util.TestCase):
+    """Test describing multiple modules."""
+
+    def testNoModules(self):
+        """Test what happens when no modules provided."""
+        described = descriptor.describe_file_set([])
+        described.check_initialized()
+        # The described FileSet.files will be None.
+        self.assertEquals(descriptor.FileSet(), described)
+
+    def testWithModules(self):
+        """Test what happens when no modules provided."""
+        modules = [types.ModuleType('package1'), types.ModuleType('package1')]
+
+        file1 = descriptor.FileDescriptor()
+        file1.package = 'package1'
+        file2 = descriptor.FileDescriptor()
+        file2.package = 'package2'
+
+        expected = descriptor.FileSet()
+        expected.files = [file1, file1]
+
+        described = descriptor.describe_file_set(modules)
+        described.check_initialized()
+        self.assertEquals(expected, described)
+
+
+class DescribeTest(test_util.TestCase):
+
+    def testModule(self):
+        self.assertEquals(descriptor.describe_file(test_util),
+                          descriptor.describe(test_util))
+
+    def testField(self):
+        self.assertEquals(
+            descriptor.describe_field(test_util.NestedMessage.a_value),
+            descriptor.describe(test_util.NestedMessage.a_value))
+
+    def testEnumValue(self):
+        self.assertEquals(
+            descriptor.describe_enum_value(
+                test_util.OptionalMessage.SimpleEnum.VAL1),
+            descriptor.describe(test_util.OptionalMessage.SimpleEnum.VAL1))
+
+    def testMessage(self):
+        self.assertEquals(descriptor.describe_message(test_util.NestedMessage),
+                          descriptor.describe(test_util.NestedMessage))
+
+    def testEnum(self):
+        self.assertEquals(
+            descriptor.describe_enum(test_util.OptionalMessage.SimpleEnum),
+            descriptor.describe(test_util.OptionalMessage.SimpleEnum))
+
+    def testUndescribable(self):
+        class NonService(object):
+
+            def fn(self):
+                pass
+
+        for value in (NonService,
+                      NonService.fn,
+                      1,
+                      'string',
+                      1.2,
+                      None):
+            self.assertEquals(None, descriptor.describe(value))
+
+
+class ModuleFinderTest(test_util.TestCase):
+
+    def testFindMessage(self):
+        self.assertEquals(
+            descriptor.describe_message(descriptor.FileSet),
+            descriptor.import_descriptor_loader(
+                'apitools.base.protorpclite.descriptor.FileSet'))
+
+    def testFindField(self):
+        self.assertEquals(
+            descriptor.describe_field(descriptor.FileSet.files),
+            descriptor.import_descriptor_loader(
+                'apitools.base.protorpclite.descriptor.FileSet.files'))
+
+    def testFindEnumValue(self):
+        self.assertEquals(
+            descriptor.describe_enum_value(
+                test_util.OptionalMessage.SimpleEnum.VAL1),
+            descriptor.import_descriptor_loader(
+                'apitools.base.protorpclite.test_util.'
+                'OptionalMessage.SimpleEnum.VAL1'))
+
+
+class DescriptorLibraryTest(test_util.TestCase):
+
+    def setUp(self):
+        self.packageless = descriptor.MessageDescriptor()
+        self.packageless.name = 'Packageless'
+        self.library = descriptor.DescriptorLibrary(
+            descriptors={
+                'not.real.Packageless': self.packageless,
+                'Packageless': self.packageless,
+            })
+
+    def testLookupPackage(self):
+        self.assertEquals('csv', self.library.lookup_package('csv'))
+        self.assertEquals(
+            'apitools.base.protorpclite',
+            self.library.lookup_package('apitools.base.protorpclite'))
+
+    def testLookupNonPackages(self):
+        lib = 'apitools.base.protorpclite.descriptor.DescriptorLibrary'
+        for name in ('', 'a', lib):
+            self.assertRaisesWithRegexpMatch(
+                messages.DefinitionNotFoundError,
+                'Could not find definition for %s' % name,
+                self.library.lookup_package, name)
+
+    def testNoPackage(self):
+        self.assertRaisesWithRegexpMatch(
+            messages.DefinitionNotFoundError,
+            'Could not find definition for not.real',
+            self.library.lookup_package, 'not.real.Packageless')
+
+        self.assertEquals(None, self.library.lookup_package('Packageless'))
+
+
+if __name__ == '__main__':
+    unittest2.main()
diff --git a/apitools/base/protorpclite/message_types.py b/apitools/base/protorpclite/message_types.py
new file mode 100644
index 0000000..1bbac38
--- /dev/null
+++ b/apitools/base/protorpclite/message_types.py
@@ -0,0 +1,119 @@
+#!/usr/bin/env python
+#
+# Copyright 2010 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Simple protocol message types.
+
+Includes new message and field types that are outside what is defined by the
+protocol buffers standard.
+"""
+import datetime
+
+from apitools.base.protorpclite import messages
+from apitools.base.protorpclite import util
+
+__all__ = [
+    'DateTimeField',
+    'DateTimeMessage',
+    'VoidMessage',
+]
+
+
+class VoidMessage(messages.Message):
+    """Empty message."""
+
+
+class DateTimeMessage(messages.Message):
+    """Message to store/transmit a DateTime.
+
+    Fields:
+      milliseconds: Milliseconds since Jan 1st 1970 local time.
+      time_zone_offset: Optional time zone offset, in minutes from UTC.
+    """
+    milliseconds = messages.IntegerField(1, required=True)
+    time_zone_offset = messages.IntegerField(2)
+
+
+class DateTimeField(messages.MessageField):
+    """Field definition for datetime values.
+
+    Stores a python datetime object as a field.  If time zone information is
+    included in the datetime object, it will be included in
+    the encoded data when this is encoded/decoded.
+    """
+
+    type = datetime.datetime
+
+    message_type = DateTimeMessage
+
+    @util.positional(3)
+    def __init__(self,
+                 number,
+                 **kwargs):
+        super(DateTimeField, self).__init__(self.message_type,
+                                            number,
+                                            **kwargs)
+
+    def value_from_message(self, message):
+        """Convert DateTimeMessage to a datetime.
+
+        Args:
+          A DateTimeMessage instance.
+
+        Returns:
+          A datetime instance.
+        """
+        message = super(DateTimeField, self).value_from_message(message)
+        if message.time_zone_offset is None:
+            return datetime.datetime.utcfromtimestamp(
+                message.milliseconds / 1000.0)
+
+        # Need to subtract the time zone offset, because when we call
+        # datetime.fromtimestamp, it will add the time zone offset to the
+        # value we pass.
+        milliseconds = (message.milliseconds -
+                        60000 * message.time_zone_offset)
+
+        timezone = util.TimeZoneOffset(message.time_zone_offset)
+        return datetime.datetime.fromtimestamp(milliseconds / 1000.0,
+                                               tz=timezone)
+
+    def value_to_message(self, value):
+        value = super(DateTimeField, self).value_to_message(value)
+        # First, determine the delta from the epoch, so we can fill in
+        # DateTimeMessage's milliseconds field.
+        if value.tzinfo is None:
+            time_zone_offset = 0
+            local_epoch = datetime.datetime.utcfromtimestamp(0)
+        else:
+            time_zone_offset = util.total_seconds(
+                value.tzinfo.utcoffset(value))
+            # Determine Jan 1, 1970 local time.
+            local_epoch = datetime.datetime.fromtimestamp(-time_zone_offset,
+                                                          tz=value.tzinfo)
+        delta = value - local_epoch
+
+        # Create and fill in the DateTimeMessage, including time zone if
+        # one was specified.
+        message = DateTimeMessage()
+        message.milliseconds = int(util.total_seconds(delta) * 1000)
+        if value.tzinfo is not None:
+            utc_offset = value.tzinfo.utcoffset(value)
+            if utc_offset is not None:
+                message.time_zone_offset = int(
+                    util.total_seconds(value.tzinfo.utcoffset(value)) / 60)
+
+        return message
diff --git a/apitools/base/protorpclite/message_types_test.py b/apitools/base/protorpclite/message_types_test.py
new file mode 100644
index 0000000..8a5afdb
--- /dev/null
+++ b/apitools/base/protorpclite/message_types_test.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Tests for apitools.base.protorpclite.message_types."""
+import datetime
+import unittest
+
+from apitools.base.protorpclite import message_types
+from apitools.base.protorpclite import messages
+from apitools.base.protorpclite import test_util
+from apitools.base.protorpclite import util
+
+
+class ModuleInterfaceTest(test_util.ModuleInterfaceTest,
+                          test_util.TestCase):
+
+    MODULE = message_types
+
+
+class DateTimeFieldTest(test_util.TestCase):
+
+    def testValueToMessage(self):
+        field = message_types.DateTimeField(1)
+        message = field.value_to_message(
+            datetime.datetime(2033, 2, 4, 11, 22, 10))
+        self.assertEqual(
+            message_types.DateTimeMessage(milliseconds=1991128930000), message)
+
+    def testValueToMessageBadValue(self):
+        field = message_types.DateTimeField(1)
+        self.assertRaisesWithRegexpMatch(
+            messages.EncodeError,
+            'Expected type datetime, got int: 20',
+            field.value_to_message, 20)
+
+    def testValueToMessageWithTimeZone(self):
+        time_zone = util.TimeZoneOffset(60 * 10)
+        field = message_types.DateTimeField(1)
+        message = field.value_to_message(
+            datetime.datetime(2033, 2, 4, 11, 22, 10, tzinfo=time_zone))
+        self.assertEqual(
+            message_types.DateTimeMessage(milliseconds=1991128930000,
+                                          time_zone_offset=600),
+            message)
+
+    def testValueFromMessage(self):
+        message = message_types.DateTimeMessage(milliseconds=1991128000000)
+        field = message_types.DateTimeField(1)
+        timestamp = field.value_from_message(message)
+        self.assertEqual(datetime.datetime(2033, 2, 4, 11, 6, 40),
+                         timestamp)
+
+    def testValueFromMessageBadValue(self):
+        field = message_types.DateTimeField(1)
+        self.assertRaisesWithRegexpMatch(
+            messages.DecodeError,
+            'Expected type DateTimeMessage, got VoidMessage: <VoidMessage>',
+            field.value_from_message, message_types.VoidMessage())
+
+    def testValueFromMessageWithTimeZone(self):
+        message = message_types.DateTimeMessage(milliseconds=1991128000000,
+                                                time_zone_offset=300)
+        field = message_types.DateTimeField(1)
+        timestamp = field.value_from_message(message)
+        time_zone = util.TimeZoneOffset(60 * 5)
+        self.assertEqual(
+            datetime.datetime(2033, 2, 4, 11, 6, 40, tzinfo=time_zone),
+            timestamp)
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/apitools/base/protorpclite/messages.py b/apitools/base/protorpclite/messages.py
new file mode 100644
index 0000000..df59d18
--- /dev/null
+++ b/apitools/base/protorpclite/messages.py
@@ -0,0 +1,2009 @@
+#!/usr/bin/env python
+#
+# Copyright 2010 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# pylint: disable=too-many-lines
+
+"""Stand-alone implementation of in memory protocol messages.
+
+Public Classes:
+  Enum: Represents an enumerated type.
+  Variant: Hint for wire format to determine how to serialize.
+  Message: Base class for user defined messages.
+  IntegerField: Field for integer values.
+  FloatField: Field for float values.
+  BooleanField: Field for boolean values.
+  BytesField: Field for binary string values.
+  StringField: Field for UTF-8 string values.
+  MessageField: Field for other message type values.
+  EnumField: Field for enumerated type values.
+
+Public Exceptions (indentation indications class hierarchy):
+  EnumDefinitionError: Raised when enumeration is incorrectly defined.
+  FieldDefinitionError: Raised when field is incorrectly defined.
+    InvalidVariantError: Raised when variant is not compatible with field type.
+    InvalidDefaultError: Raised when default is not compatiable with field.
+    InvalidNumberError: Raised when field number is out of range or reserved.
+  MessageDefinitionError: Raised when message is incorrectly defined.
+    DuplicateNumberError: Raised when field has duplicate number with another.
+  ValidationError: Raised when a message or field is not valid.
+  DefinitionNotFoundError: Raised when definition not found.
+"""
+import types
+import weakref
+
+import six
+
+from apitools.base.protorpclite import util
+
+__all__ = [
+    'MAX_ENUM_VALUE',
+    'MAX_FIELD_NUMBER',
+    'FIRST_RESERVED_FIELD_NUMBER',
+    'LAST_RESERVED_FIELD_NUMBER',
+
+    'Enum',
+    'Field',
+    'FieldList',
+    'Variant',
+    'Message',
+    'IntegerField',
+    'FloatField',
+    'BooleanField',
+    'BytesField',
+    'StringField',
+    'MessageField',
+    'EnumField',
+    'find_definition',
+
+    'Error',
+    'DecodeError',
+    'EncodeError',
+    'EnumDefinitionError',
+    'FieldDefinitionError',
+    'InvalidVariantError',
+    'InvalidDefaultError',
+    'InvalidNumberError',
+    'MessageDefinitionError',
+    'DuplicateNumberError',
+    'ValidationError',
+    'DefinitionNotFoundError',
+]
+
+# pylint:disable=attribute-defined-outside-init
+# pylint:disable=protected-access
+
+
+# TODO(rafek): Add extended module test to ensure all exceptions
+# in services extends Error.
+Error = util.Error
+
+
+class EnumDefinitionError(Error):
+    """Enumeration definition error."""
+
+
+class FieldDefinitionError(Error):
+    """Field definition error."""
+
+
+class InvalidVariantError(FieldDefinitionError):
+    """Invalid variant provided to field."""
+
+
+class InvalidDefaultError(FieldDefinitionError):
+    """Invalid default provided to field."""
+
+
+class InvalidNumberError(FieldDefinitionError):
+    """Invalid number provided to field."""
+
+
+class MessageDefinitionError(Error):
+    """Message definition error."""
+
+
+class DuplicateNumberError(Error):
+    """Duplicate number assigned to field."""
+
+
+class DefinitionNotFoundError(Error):
+    """Raised when definition is not found."""
+
+
+class DecodeError(Error):
+    """Error found decoding message from encoded form."""
+
+
+class EncodeError(Error):
+    """Error found when encoding message."""
+
+
+class ValidationError(Error):
+    """Invalid value for message error."""
+
+    def __str__(self):
+        """Prints string with field name if present on exception."""
+        return Error.__str__(self)
+
+
+# Attributes that are reserved by a class definition that
+# may not be used by either Enum or Message class definitions.
+_RESERVED_ATTRIBUTE_NAMES = frozenset(
+    ['__module__', '__doc__', '__qualname__'])
+
+_POST_INIT_FIELD_ATTRIBUTE_NAMES = frozenset(
+    ['name',
+     '_message_definition',
+     '_MessageField__type',
+     '_EnumField__type',
+     '_EnumField__resolved_default'])
+
+_POST_INIT_ATTRIBUTE_NAMES = frozenset(
+    ['_message_definition'])
+
+# Maximum enumeration value as defined by the protocol buffers standard.
+# All enum values must be less than or equal to this value.
+MAX_ENUM_VALUE = (2 ** 29) - 1
+
+# Maximum field number as defined by the protocol buffers standard.
+# All field numbers must be less than or equal to this value.
+MAX_FIELD_NUMBER = (2 ** 29) - 1
+
+# Field numbers between 19000 and 19999 inclusive are reserved by the
+# protobuf protocol and may not be used by fields.
+FIRST_RESERVED_FIELD_NUMBER = 19000
+LAST_RESERVED_FIELD_NUMBER = 19999
+
+
+# pylint: disable=no-value-for-parameter
+class _DefinitionClass(type):
+    """Base meta-class used for definition meta-classes.
+
+    The Enum and Message definition classes share some basic functionality.
+    Both of these classes may be contained by a Message definition.  After
+    initialization, neither class may have attributes changed
+    except for the protected _message_definition attribute, and that attribute
+    may change only once.
+    """
+
+    __initialized = False  # pylint:disable=invalid-name
+
+    def __init__(cls, name, bases, dct):
+        """Constructor."""
+        type.__init__(cls, name, bases, dct)
+        # Base classes may never be initialized.
+        if cls.__bases__ != (object,):
+            cls.__initialized = True
+
+    def message_definition(cls):
+        """Get outer Message definition that contains this definition.
+
+        Returns:
+          Containing Message definition if definition is contained within one,
+          else None.
+        """
+        try:
+            return cls._message_definition()
+        except AttributeError:
+            return None
+
+    def __setattr__(cls, name, value):
+        """Overridden to avoid setting variables after init.
+
+        Setting attributes on a class must work during the period of
+        initialization to set the enumation value class variables and
+        build the name/number maps. Once __init__ has set the
+        __initialized flag to True prohibits setting any more values
+        on the class. The class is in effect frozen.
+
+        Args:
+          name: Name of value to set.
+          value: Value to set.
+
+        """
+        if cls.__initialized and name not in _POST_INIT_ATTRIBUTE_NAMES:
+            raise AttributeError('May not change values: %s' % name)
+        else:
+            type.__setattr__(cls, name, value)
+
+    def __delattr__(cls, name):
+        """Overridden so that cannot delete varaibles on definition classes."""
+        raise TypeError('May not delete attributes on definition class')
+
+    def definition_name(cls):
+        """Helper method for creating definition name.
+
+        Names will be generated to include the classes package name,
+        scope (if the class is nested in another definition) and class
+        name.
+
+        By default, the package name for a definition is derived from
+        its module name. However, this value can be overriden by
+        placing a 'package' attribute in the module that contains the
+        definition class. For example:
+
+          package = 'some.alternate.package'
+
+          class MyMessage(Message):
+            ...
+
+          >>> MyMessage.definition_name()
+          some.alternate.package.MyMessage
+
+        Returns:
+          Dot-separated fully qualified name of definition.
+
+        """
+        outer_definition_name = cls.outer_definition_name()
+        if outer_definition_name is None:
+            return six.text_type(cls.__name__)
+        return u'%s.%s' % (outer_definition_name, cls.__name__)
+
+    def outer_definition_name(cls):
+        """Helper method for creating outer definition name.
+
+        Returns:
+          If definition is nested, will return the outer definitions
+          name, else the package name.
+
+        """
+        outer_definition = cls.message_definition()
+        if not outer_definition:
+            return util.get_package_for_module(cls.__module__)
+        return outer_definition.definition_name()
+
+    def definition_package(cls):
+        """Helper method for creating creating the package of a definition.
+
+        Returns:
+          Name of package that definition belongs to.
+        """
+        outer_definition = cls.message_definition()
+        if not outer_definition:
+            return util.get_package_for_module(cls.__module__)
+        return outer_definition.definition_package()
+
+
+class _EnumClass(_DefinitionClass):
+    """Meta-class used for defining the Enum base class.
+
+    Meta-class enables very specific behavior for any defined Enum
+    class.  All attributes defined on an Enum sub-class must be integers.
+    Each attribute defined on an Enum sub-class is translated
+    into an instance of that sub-class, with the name of the attribute
+    as its name, and the number provided as its value.  It also ensures
+    that only one level of Enum class hierarchy is possible.  In other
+    words it is not possible to delcare sub-classes of sub-classes of
+    Enum.
+
+    This class also defines some functions in order to restrict the
+    behavior of the Enum class and its sub-classes.  It is not possible
+    to change the behavior of the Enum class in later classes since
+    any new classes may be defined with only integer values, and no methods.
+    """
+
+    def __init__(cls, name, bases, dct):
+        # Can only define one level of sub-classes below Enum.
+        if not (bases == (object,) or bases == (Enum,)):
+            raise EnumDefinitionError(
+                'Enum type %s may only inherit from Enum' % name)
+
+        cls.__by_number = {}
+        cls.__by_name = {}
+
+        # Enum base class does not need to be initialized or locked.
+        if bases != (object,):
+            # Replace integer with number.
+            for attribute, value in dct.items():
+
+                # Module will be in every enum class.
+                if attribute in _RESERVED_ATTRIBUTE_NAMES:
+                    continue
+
+                # Reject anything that is not an int.
+                if not isinstance(value, six.integer_types):
+                    raise EnumDefinitionError(
+                        'May only use integers in Enum definitions.  '
+                        'Found: %s = %s' %
+                        (attribute, value))
+
+                # Protocol buffer standard recommends non-negative values.
+                # Reject negative values.
+                if value < 0:
+                    raise EnumDefinitionError(
+                        'Must use non-negative enum values.  Found: %s = %d' %
+                        (attribute, value))
+
+                if value > MAX_ENUM_VALUE:
+                    raise EnumDefinitionError(
+                        'Must use enum values less than or equal %d.  '
+                        'Found: %s = %d' %
+                        (MAX_ENUM_VALUE, attribute, value))
+
+                if value in cls.__by_number:
+                    raise EnumDefinitionError(
+                        'Value for %s = %d is already defined: %s' %
+                        (attribute, value, cls.__by_number[value].name))
+
+                # Create enum instance and list in new Enum type.
+                instance = object.__new__(cls)
+                # pylint:disable=non-parent-init-called
+                cls.__init__(instance, attribute, value)
+                cls.__by_name[instance.name] = instance
+                cls.__by_number[instance.number] = instance
+                setattr(cls, attribute, instance)
+
+        _DefinitionClass.__init__(cls, name, bases, dct)
+
+    def __iter__(cls):
+        """Iterate over all values of enum.
+
+        Yields:
+          Enumeration instances of the Enum class in arbitrary order.
+        """
+        return iter(cls.__by_number.values())
+
+    def names(cls):
+        """Get all names for Enum.
+
+        Returns:
+          An iterator for names of the enumeration in arbitrary order.
+        """
+        return cls.__by_name.keys()
+
+    def numbers(cls):
+        """Get all numbers for Enum.
+
+        Returns:
+          An iterator for all numbers of the enumeration in arbitrary order.
+        """
+        return cls.__by_number.keys()
+
+    def lookup_by_name(cls, name):
+        """Look up Enum by name.
+
+        Args:
+          name: Name of enum to find.
+
+        Returns:
+          Enum sub-class instance of that value.
+        """
+        return cls.__by_name[name]
+
+    def lookup_by_number(cls, number):
+        """Look up Enum by number.
+
+        Args:
+          number: Number of enum to find.
+
+        Returns:
+          Enum sub-class instance of that value.
+        """
+        return cls.__by_number[number]
+
+    def __len__(cls):
+        return len(cls.__by_name)
+
+
+class Enum(six.with_metaclass(_EnumClass, object)):
+    """Base class for all enumerated types."""
+
+    __slots__ = set(('name', 'number'))
+
+    def __new__(cls, index):
+        """Acts as look-up routine after class is initialized.
+
+        The purpose of overriding __new__ is to provide a way to treat
+        Enum subclasses as casting types, similar to how the int type
+        functions.  A program can pass a string or an integer and this
+        method with "convert" that value in to an appropriate Enum instance.
+
+        Args:
+          index: Name or number to look up.  During initialization
+            this is always the name of the new enum value.
+
+        Raises:
+          TypeError: When an inappropriate index value is passed provided.
+        """
+        # If is enum type of this class, return it.
+        if isinstance(index, cls):
+            return index
+
+        # If number, look up by number.
+        if isinstance(index, six.integer_types):
+            try:
+                return cls.lookup_by_number(index)
+            except KeyError:
+                pass
+
+        # If name, look up by name.
+        if isinstance(index, six.string_types):
+            try:
+                return cls.lookup_by_name(index)
+            except KeyError:
+                pass
+
+        raise TypeError('No such value for %s in Enum %s' %
+                        (index, cls.__name__))
+
+    def __init__(self, name, number=None):
+        """Initialize new Enum instance.
+
+        Since this should only be called during class initialization any
+        calls that happen after the class is frozen raises an exception.
+        """
+        # Immediately return if __init__ was called after _Enum.__init__().
+        # It means that casting operator version of the class constructor
+        # is being used.
+        if getattr(type(self), '_DefinitionClass__initialized'):
+            return
+        object.__setattr__(self, 'name', name)
+        object.__setattr__(self, 'number', number)
+
+    def __setattr__(self, name, value):
+        raise TypeError('May not change enum values')
+
+    def __str__(self):
+        return self.name
+
+    def __int__(self):
+        return self.number
+
+    def __repr__(self):
+        return '%s(%s, %d)' % (type(self).__name__, self.name, self.number)
+
+    def __reduce__(self):
+        """Enable pickling.
+
+        Returns:
+          A 2-tuple containing the class and __new__ args to be used
+          for restoring a pickled instance.
+
+        """
+        return self.__class__, (self.number,)
+
+    def __cmp__(self, other):
+        """Order is by number."""
+        if isinstance(other, type(self)):
+            return cmp(self.number, other.number)
+        return NotImplemented
+
+    def __lt__(self, other):
+        """Order is by number."""
+        if isinstance(other, type(self)):
+            return self.number < other.number
+        return NotImplemented
+
+    def __le__(self, other):
+        """Order is by number."""
+        if isinstance(other, type(self)):
+            return self.number <= other.number
+        return NotImplemented
+
+    def __eq__(self, other):
+        """Order is by number."""
+        if isinstance(other, type(self)):
+            return self.number == other.number
+        return NotImplemented
+
+    def __ne__(self, other):
+        """Order is by number."""
+        if isinstance(other, type(self)):
+            return self.number != other.number
+        return NotImplemented
+
+    def __ge__(self, other):
+        """Order is by number."""
+        if isinstance(other, type(self)):
+            return self.number >= other.number
+        return NotImplemented
+
+    def __gt__(self, other):
+        """Order is by number."""
+        if isinstance(other, type(self)):
+            return self.number > other.number
+        return NotImplemented
+
+    def __hash__(self):
+        """Hash by number."""
+        return hash(self.number)
+
+    @classmethod
+    def to_dict(cls):
+        """Make dictionary version of enumerated class.
+
+        Dictionary created this way can be used with def_num.
+
+        Returns:
+          A dict (name) -> number
+        """
+        return dict((item.name, item.number) for item in iter(cls))
+
+    @staticmethod
+    def def_enum(dct, name):
+        """Define enum class from dictionary.
+
+        Args:
+          dct: Dictionary of enumerated values for type.
+          name: Name of enum.
+        """
+        return type(name, (Enum,), dct)
+
+
+# TODO(rafek): Determine to what degree this enumeration should be compatible
+# with FieldDescriptor.Type in https://github.com/google/protobuf.
+class Variant(Enum):
+    """Wire format variant.
+
+    Used by the 'protobuf' wire format to determine how to transmit
+    a single piece of data.  May be used by other formats.
+
+    See: http://code.google.com/apis/protocolbuffers/docs/encoding.html
+
+    Values:
+      DOUBLE: 64-bit floating point number.
+      FLOAT: 32-bit floating point number.
+      INT64: 64-bit signed integer.
+      UINT64: 64-bit unsigned integer.
+      INT32: 32-bit signed integer.
+      BOOL: Boolean value (True or False).
+      STRING: String of UTF-8 encoded text.
+      MESSAGE: Embedded message as byte string.
+      BYTES: String of 8-bit bytes.
+      UINT32: 32-bit unsigned integer.
+      ENUM: Enum value as integer.
+      SINT32: 32-bit signed integer.  Uses "zig-zag" encoding.
+      SINT64: 64-bit signed integer.  Uses "zig-zag" encoding.
+    """
+    DOUBLE = 1
+    FLOAT = 2
+    INT64 = 3
+    UINT64 = 4
+    INT32 = 5
+    BOOL = 8
+    STRING = 9
+    MESSAGE = 11
+    BYTES = 12
+    UINT32 = 13
+    ENUM = 14
+    SINT32 = 17
+    SINT64 = 18
+
+
+class _MessageClass(_DefinitionClass):
+    """Meta-class used for defining the Message base class.
+
+    For more details about Message classes, see the Message class docstring.
+    Information contained there may help understanding this class.
+
+    Meta-class enables very specific behavior for any defined Message
+    class. All attributes defined on an Message sub-class must be
+    field instances, Enum class definitions or other Message class
+    definitions. Each field attribute defined on an Message sub-class
+    is added to the set of field definitions and the attribute is
+    translated in to a slot. It also ensures that only one level of
+    Message class hierarchy is possible. In other words it is not
+    possible to declare sub-classes of sub-classes of Message.
+
+    This class also defines some functions in order to restrict the
+    behavior of the Message class and its sub-classes. It is not
+    possible to change the behavior of the Message class in later
+    classes since any new classes may be defined with only field,
+    Enums and Messages, and no methods.
+
+    """
+
+    # pylint:disable=bad-mcs-classmethod-argument
+    def __new__(cls, name, bases, dct):
+        """Create new Message class instance.
+
+        The __new__ method of the _MessageClass type is overridden so as to
+        allow the translation of Field instances to slots.
+        """
+        by_number = {}
+        by_name = {}
+
+        variant_map = {}  # pylint:disable=unused-variable
+
+        if bases != (object,):
+            # Can only define one level of sub-classes below Message.
+            if bases != (Message,):
+                raise MessageDefinitionError(
+                    'Message types may only inherit from Message')
+
+            enums = []
+            messages = []
+            # Must not use iteritems because this loop will change the state of
+            # dct.
+            for key, field in dct.items():
+
+                if key in _RESERVED_ATTRIBUTE_NAMES:
+                    continue
+
+                if isinstance(field, type) and issubclass(field, Enum):
+                    enums.append(key)
+                    continue
+
+                if (isinstance(field, type) and
+                        issubclass(field, Message) and
+                        field is not Message):
+                    messages.append(key)
+                    continue
+
+                # Reject anything that is not a field.
+                # pylint:disable=unidiomatic-typecheck
+                if type(field) is Field or not isinstance(field, Field):
+                    raise MessageDefinitionError(
+                        'May only use fields in message definitions.  '
+                        'Found: %s = %s' %
+                        (key, field))
+
+                if field.number in by_number:
+                    raise DuplicateNumberError(
+                        'Field with number %d declared more than once in %s' %
+                        (field.number, name))
+
+                field.name = key
+
+                # Place in name and number maps.
+                by_name[key] = field
+                by_number[field.number] = field
+
+            # Add enums if any exist.
+            if enums:
+                dct['__enums__'] = sorted(enums)
+
+            # Add messages if any exist.
+            if messages:
+                dct['__messages__'] = sorted(messages)
+
+        dct['_Message__by_number'] = by_number
+        dct['_Message__by_name'] = by_name
+
+        return _DefinitionClass.__new__(cls, name, bases, dct)
+
+    def __init__(cls, name, bases, dct):
+        """Initializer required to assign references to new class."""
+        if bases != (object,):
+            for v in dct.values():
+                if isinstance(v, _DefinitionClass) and v is not Message:
+                    v._message_definition = weakref.ref(cls)
+
+            for field in cls.all_fields():
+                field._message_definition = weakref.ref(cls)
+
+        _DefinitionClass.__init__(cls, name, bases, dct)
+
+
+class Message(six.with_metaclass(_MessageClass, object)):
+    """Base class for user defined message objects.
+
+    Used to define messages for efficient transmission across network or
+    process space.  Messages are defined using the field classes (IntegerField,
+    FloatField, EnumField, etc.).
+
+    Messages are more restricted than normal classes in that they may
+    only contain field attributes and other Message and Enum
+    definitions. These restrictions are in place because the structure
+    of the Message class is intentended to itself be transmitted
+    across network or process space and used directly by clients or
+    even other servers. As such methods and non-field attributes could
+    not be transmitted with the structural information causing
+    discrepancies between different languages and implementations.
+
+    Initialization and validation:
+
+      A Message object is considered to be initialized if it has all required
+      fields and any nested messages are also initialized.
+
+      Calling 'check_initialized' will raise a ValidationException if it is not
+      initialized; 'is_initialized' returns a boolean value indicating if it is
+      valid.
+
+      Validation automatically occurs when Message objects are created
+      and populated.  Validation that a given value will be compatible with
+      a field that it is assigned to can be done through the Field instances
+      validate() method.  The validate method used on a message will check that
+      all values of a message and its sub-messages are valid.  Assingning an
+      invalid value to a field will raise a ValidationException.
+
+    Example:
+
+      # Trade type.
+      class TradeType(Enum):
+        BUY = 1
+        SELL = 2
+        SHORT = 3
+        CALL = 4
+
+      class Lot(Message):
+        price = IntegerField(1, required=True)
+        quantity = IntegerField(2, required=True)
+
+      class Order(Message):
+        symbol = StringField(1, required=True)
+        total_quantity = IntegerField(2, required=True)
+        trade_type = EnumField(TradeType, 3, required=True)
+        lots = MessageField(Lot, 4, repeated=True)
+        limit = IntegerField(5)
+
+      order = Order(symbol='GOOG',
+                    total_quantity=10,
+                    trade_type=TradeType.BUY)
+
+      lot1 = Lot(price=304,
+                 quantity=7)
+
+      lot2 = Lot(price = 305,
+                 quantity=3)
+
+      order.lots = [lot1, lot2]
+
+      # Now object is initialized!
+      order.check_initialized()
+
+    """
+
+    def __init__(self, **kwargs):
+        """Initialize internal messages state.
+
+        Args:
+          A message can be initialized via the constructor by passing
+          in keyword arguments corresponding to fields. For example:
+
+            class Date(Message):
+              day = IntegerField(1)
+              month = IntegerField(2)
+              year = IntegerField(3)
+
+          Invoking:
+
+            date = Date(day=6, month=6, year=1911)
+
+          is the same as doing:
+
+            date = Date()
+            date.day = 6
+            date.month = 6
+            date.year = 1911
+
+        """
+        # Tag being an essential implementation detail must be private.
+        self.__tags = {}
+        self.__unrecognized_fields = {}
+
+        assigned = set()
+        for name, value in kwargs.items():
+            setattr(self, name, value)
+            assigned.add(name)
+
+        # initialize repeated fields.
+        for field in self.all_fields():
+            if field.repeated and field.name not in assigned:
+                setattr(self, field.name, [])
+
+    def check_initialized(self):
+        """Check class for initialization status.
+
+        Check that all required fields are initialized
+
+        Raises:
+          ValidationError: If message is not initialized.
+        """
+        for name, field in self.__by_name.items():
+            value = getattr(self, name)
+            if value is None:
+                if field.required:
+                    raise ValidationError(
+                        "Message %s is missing required field %s" %
+                        (type(self).__name__, name))
+            else:
+                try:
+                    if (isinstance(field, MessageField) and
+                            issubclass(field.message_type, Message)):
+                        if field.repeated:
+                            for item in value:
+                                item_message_value = field.value_to_message(
+                                    item)
+                                item_message_value.check_initialized()
+                        else:
+                            message_value = field.value_to_message(value)
+                            message_value.check_initialized()
+                except ValidationError as err:
+                    if not hasattr(err, 'message_name'):
+                        err.message_name = type(self).__name__
+                    raise
+
+    def is_initialized(self):
+        """Get initialization status.
+
+        Returns:
+          True if message is valid, else False.
+        """
+        try:
+            self.check_initialized()
+        except ValidationError:
+            return False
+        else:
+            return True
+
+    @classmethod
+    def all_fields(cls):
+        """Get all field definition objects.
+
+        Ordering is arbitrary.
+
+        Returns:
+          Iterator over all values in arbitrary order.
+        """
+        return cls.__by_name.values()
+
+    @classmethod
+    def field_by_name(cls, name):
+        """Get field by name.
+
+        Returns:
+          Field object associated with name.
+
+        Raises:
+          KeyError if no field found by that name.
+        """
+        return cls.__by_name[name]
+
+    @classmethod
+    def field_by_number(cls, number):
+        """Get field by number.
+
+        Returns:
+          Field object associated with number.
+
+        Raises:
+          KeyError if no field found by that number.
+        """
+        return cls.__by_number[number]
+
+    def get_assigned_value(self, name):
+        """Get the assigned value of an attribute.
+
+        Get the underlying value of an attribute. If value has not
+        been set, will not return the default for the field.
+
+        Args:
+          name: Name of attribute to get.
+
+        Returns:
+          Value of attribute, None if it has not been set.
+
+        """
+        message_type = type(self)
+        try:
+            field = message_type.field_by_name(name)
+        except KeyError:
+            raise AttributeError('Message %s has no field %s' % (
+                message_type.__name__, name))
+        return self.__tags.get(field.number)
+
+    def reset(self, name):
+        """Reset assigned value for field.
+
+        Resetting a field will return it to its default value or None.
+
+        Args:
+          name: Name of field to reset.
+        """
+        message_type = type(self)
+        try:
+            field = message_type.field_by_name(name)
+        except KeyError:
+            if name not in message_type.__by_name:
+                raise AttributeError('Message %s has no field %s' % (
+                    message_type.__name__, name))
+        if field.repeated:
+            self.__tags[field.number] = FieldList(field, [])
+        else:
+            self.__tags.pop(field.number, None)
+
+    def all_unrecognized_fields(self):
+        """Get the names of all unrecognized fields in this message."""
+        return list(self.__unrecognized_fields.keys())
+
+    def get_unrecognized_field_info(self, key, value_default=None,
+                                    variant_default=None):
+        """Get the value and variant of an unknown field in this message.
+
+        Args:
+          key: The name or number of the field to retrieve.
+          value_default: Value to be returned if the key isn't found.
+          variant_default: Value to be returned as variant if the key isn't
+            found.
+
+        Returns:
+          (value, variant), where value and variant are whatever was passed
+          to set_unrecognized_field.
+        """
+        value, variant = self.__unrecognized_fields.get(key, (value_default,
+                                                              variant_default))
+        return value, variant
+
+    def set_unrecognized_field(self, key, value, variant):
+        """Set an unrecognized field, used when decoding a message.
+
+        Args:
+          key: The name or number used to refer to this unknown value.
+          value: The value of the field.
+          variant: Type information needed to interpret the value or re-encode
+            it.
+
+        Raises:
+          TypeError: If the variant is not an instance of messages.Variant.
+        """
+        if not isinstance(variant, Variant):
+            raise TypeError('Variant type %s is not valid.' % variant)
+        self.__unrecognized_fields[key] = value, variant
+
+    def __setattr__(self, name, value):
+        """Change set behavior for messages.
+
+        Messages may only be assigned values that are fields.
+
+        Does not try to validate field when set.
+
+        Args:
+          name: Name of field to assign to.
+          value: Value to assign to field.
+
+        Raises:
+          AttributeError when trying to assign value that is not a field.
+        """
+        if name in self.__by_name or name.startswith('_Message__'):
+            object.__setattr__(self, name, value)
+        else:
+            raise AttributeError("May not assign arbitrary value %s "
+                                 "to message %s" % (name, type(self).__name__))
+
+    def __repr__(self):
+        """Make string representation of message.
+
+        Example:
+
+          class MyMessage(messages.Message):
+            integer_value = messages.IntegerField(1)
+            string_value = messages.StringField(2)
+
+          my_message = MyMessage()
+          my_message.integer_value = 42
+          my_message.string_value = u'A string'
+
+          print my_message
+          >>> <MyMessage
+          ...  integer_value: 42
+          ...  string_value: u'A string'>
+
+        Returns:
+          String representation of message, including the values
+          of all fields and repr of all sub-messages.
+        """
+        body = ['<', type(self).__name__]
+        for field in sorted(self.all_fields(),
+                            key=lambda f: f.number):
+            attribute = field.name
+            value = self.get_assigned_value(field.name)
+            if value is not None:
+                body.append('\n %s: %s' % (attribute, repr(value)))
+        body.append('>')
+        return ''.join(body)
+
+    def __eq__(self, other):
+        """Equality operator.
+
+        Does field by field comparison with other message.  For
+        equality, must be same type and values of all fields must be
+        equal.
+
+        Messages not required to be initialized for comparison.
+
+        Does not attempt to determine equality for values that have
+        default values that are not set.  In other words:
+
+          class HasDefault(Message):
+
+            attr1 = StringField(1, default='default value')
+
+          message1 = HasDefault()
+          message2 = HasDefault()
+          message2.attr1 = 'default value'
+
+          message1 != message2
+
+        Does not compare unknown values.
+
+        Args:
+          other: Other message to compare with.
+        """
+        # TODO(rafek): Implement "equivalent" which does comparisons
+        # taking default values in to consideration.
+        if self is other:
+            return True
+
+        if type(self) is not type(other):
+            return False
+
+        return self.__tags == other.__tags
+
+    def __ne__(self, other):
+        """Not equals operator.
+
+        Does field by field comparison with other message.  For
+        non-equality, must be different type or any value of a field must be
+        non-equal to the same field in the other instance.
+
+        Messages not required to be initialized for comparison.
+
+        Args:
+          other: Other message to compare with.
+        """
+        return not self.__eq__(other)
+
+
+class FieldList(list):
+    """List implementation that validates field values.
+
+    This list implementation overrides all methods that add values in
+    to a list in order to validate those new elements. Attempting to
+    add or set list values that are not of the correct type will raise
+    ValidationError.
+
+    """
+
+    def __init__(self, field_instance, sequence):
+        """Constructor.
+
+        Args:
+          field_instance: Instance of field that validates the list.
+          sequence: List or tuple to construct list from.
+        """
+        if not field_instance.repeated:
+            raise FieldDefinitionError(
+                'FieldList may only accept repeated fields')
+        self.__field = field_instance
+        self.__field.validate(sequence)
+        list.__init__(self, sequence)
+
+    def __getstate__(self):
+        """Enable pickling.
+
+        The assigned field instance can't be pickled if it belongs to
+        a Message definition (message_definition uses a weakref), so
+        the Message class and field number are returned in that case.
+
+        Returns:
+          A 3-tuple containing:
+            - The field instance, or None if it belongs to a Message class.
+            - The Message class that the field instance belongs to, or None.
+            - The field instance number of the Message class it belongs to, or
+                None.
+
+        """
+        message_class = self.__field.message_definition()
+        if message_class is None:
+            return self.__field, None, None
+        return None, message_class, self.__field.number
+
+    def __setstate__(self, state):
+        """Enable unpickling.
+
+        Args:
+          state: A 3-tuple containing:
+            - The field instance, or None if it belongs to a Message class.
+            - The Message class that the field instance belongs to, or None.
+            - The field instance number of the Message class it belongs to, or
+                None.
+        """
+        field_instance, message_class, number = state
+        if field_instance is None:
+            self.__field = message_class.field_by_number(number)
+        else:
+            self.__field = field_instance
+
+    @property
+    def field(self):
+        """Field that validates list."""
+        return self.__field
+
+    def __setslice__(self, i, j, sequence):
+        """Validate slice assignment to list."""
+        self.__field.validate(sequence)
+        list.__setslice__(self, i, j, sequence)
+
+    def __setitem__(self, index, value):
+        """Validate item assignment to list."""
+        if isinstance(index, slice):
+            self.__field.validate(value)
+        else:
+            self.__field.validate_element(value)
+        list.__setitem__(self, index, value)
+
+    def append(self, value):
+        """Validate item appending to list."""
+        self.__field.validate_element(value)
+        return list.append(self, value)
+
+    def extend(self, sequence):
+        """Validate extension of list."""
+        self.__field.validate(sequence)
+        return list.extend(self, sequence)
+
+    def insert(self, index, value):
+        """Validate item insertion to list."""
+        self.__field.validate_element(value)
+        return list.insert(self, index, value)
+
+
+class _FieldMeta(type):
+
+    def __init__(cls, name, bases, dct):
+        getattr(cls, '_Field__variant_to_type').update(
+            (variant, cls) for variant in dct.get('VARIANTS', []))
+        type.__init__(cls, name, bases, dct)
+
+
+# TODO(rafek): Prevent additional field subclasses.
+class Field(six.with_metaclass(_FieldMeta, object)):
+    """Definition for message field."""
+
+    __initialized = False  # pylint:disable=invalid-name
+    __variant_to_type = {}  # pylint:disable=invalid-name
+
+    # TODO(craigcitro): Remove this alias.
+    #
+    # We add an alias here for backwards compatibility; note that in
+    # python3, this attribute will silently be ignored.
+    __metaclass__ = _FieldMeta
+
+    @util.positional(2)
+    def __init__(self,
+                 number,
+                 required=False,
+                 repeated=False,
+                 variant=None,
+                 default=None):
+        """Constructor.
+
+        The required and repeated parameters are mutually exclusive.
+        Setting both to True will raise a FieldDefinitionError.
+
+        Sub-class Attributes:
+          Each sub-class of Field must define the following:
+            VARIANTS: Set of variant types accepted by that field.
+            DEFAULT_VARIANT: Default variant type if not specified in
+              constructor.
+
+        Args:
+          number: Number of field.  Must be unique per message class.
+          required: Whether or not field is required.  Mutually exclusive with
+            'repeated'.
+          repeated: Whether or not field is repeated.  Mutually exclusive with
+            'required'.
+          variant: Wire-format variant hint.
+          default: Default value for field if not found in stream.
+
+        Raises:
+          InvalidVariantError when invalid variant for field is provided.
+          InvalidDefaultError when invalid default for field is provided.
+          FieldDefinitionError when invalid number provided or mutually
+            exclusive fields are used.
+          InvalidNumberError when the field number is out of range or reserved.
+
+        """
+        if not isinstance(number, int) or not 1 <= number <= MAX_FIELD_NUMBER:
+            raise InvalidNumberError(
+                'Invalid number for field: %s\n'
+                'Number must be 1 or greater and %d or less' %
+                (number, MAX_FIELD_NUMBER))
+
+        if FIRST_RESERVED_FIELD_NUMBER <= number <= LAST_RESERVED_FIELD_NUMBER:
+            raise InvalidNumberError('Tag number %d is a reserved number.\n'
+                                     'Numbers %d to %d are reserved' %
+                                     (number, FIRST_RESERVED_FIELD_NUMBER,
+                                      LAST_RESERVED_FIELD_NUMBER))
+
+        if repeated and required:
+            raise FieldDefinitionError('Cannot set both repeated and required')
+
+        if variant is None:
+            variant = self.DEFAULT_VARIANT
+
+        if repeated and default is not None:
+            raise FieldDefinitionError('Repeated fields may not have defaults')
+
+        if variant not in self.VARIANTS:
+            raise InvalidVariantError(
+                'Invalid variant: %s\nValid variants for %s are %r' %
+                (variant, type(self).__name__, sorted(self.VARIANTS)))
+
+        self.number = number
+        self.required = required
+        self.repeated = repeated
+        self.variant = variant
+
+        if default is not None:
+            try:
+                self.validate_default(default)
+            except ValidationError as err:
+                try:
+                    name = self.name
+                except AttributeError:
+                    # For when raising error before name initialization.
+                    raise InvalidDefaultError(
+                        'Invalid default value for %s: %r: %s' %
+                        (self.__class__.__name__, default, err))
+                else:
+                    raise InvalidDefaultError(
+                        'Invalid default value for field %s: '
+                        '%r: %s' % (name, default, err))
+
+        self.__default = default
+        self.__initialized = True
+
+    def __setattr__(self, name, value):
+        """Setter overidden to prevent assignment to fields after creation.
+
+        Args:
+          name: Name of attribute to set.
+          value: Value to assign.
+        """
+        # Special case post-init names.  They need to be set after constructor.
+        if name in _POST_INIT_FIELD_ATTRIBUTE_NAMES:
+            object.__setattr__(self, name, value)
+            return
+
+        # All other attributes must be set before __initialized.
+        if not self.__initialized:
+            # Not initialized yet, allow assignment.
+            object.__setattr__(self, name, value)
+        else:
+            raise AttributeError('Field objects are read-only')
+
+    def __set__(self, message_instance, value):
+        """Set value on message.
+
+        Args:
+          message_instance: Message instance to set value on.
+          value: Value to set on message.
+        """
+        # Reaches in to message instance directly to assign to private tags.
+        if value is None:
+            if self.repeated:
+                raise ValidationError(
+                    'May not assign None to repeated field %s' % self.name)
+            else:
+                message_instance._Message__tags.pop(self.number, None)
+        else:
+            if self.repeated:
+                value = FieldList(self, value)
+            else:
+                value = self.validate(value)
+            message_instance._Message__tags[self.number] = value
+
+    def __get__(self, message_instance, message_class):
+        if message_instance is None:
+            return self
+
+        result = message_instance._Message__tags.get(self.number)
+        if result is None:
+            return self.default
+        return result
+
+    def validate_element(self, value):
+        """Validate single element of field.
+
+        This is different from validate in that it is used on individual
+        values of repeated fields.
+
+        Args:
+          value: Value to validate.
+
+        Returns:
+          The value casted in the expected type.
+
+        Raises:
+          ValidationError if value is not expected type.
+        """
+        if not isinstance(value, self.type):
+
+            # Authorize int values as float.
+            if isinstance(value, six.integer_types) and self.type == float:
+                return float(value)
+
+            if value is None:
+                if self.required:
+                    raise ValidationError('Required field is missing')
+            else:
+                try:
+                    name = self.name
+                except AttributeError:
+                    raise ValidationError('Expected type %s for %s, '
+                                          'found %s (type %s)' %
+                                          (self.type, self.__class__.__name__,
+                                           value, type(value)))
+                else:
+                    raise ValidationError(
+                        'Expected type %s for field %s, found %s (type %s)' %
+                        (self.type, name, value, type(value)))
+        return value
+
+    def __validate(self, value, validate_element):
+        """Internal validation function.
+
+        Validate an internal value using a function to validate
+        individual elements.
+
+        Args:
+          value: Value to validate.
+          validate_element: Function to use to validate individual elements.
+
+        Raises:
+          ValidationError if value is not expected type.
+
+        """
+        if not self.repeated:
+            return validate_element(value)
+        else:
+            # Must be a list or tuple, may not be a string.
+            if isinstance(value, (list, tuple)):
+                result = []
+                for element in value:
+                    if element is None:
+                        try:
+                            name = self.name
+                        except AttributeError:
+                            raise ValidationError(
+                                'Repeated values for %s '
+                                'may not be None' % self.__class__.__name__)
+                        else:
+                            raise ValidationError(
+                                'Repeated values for field %s '
+                                'may not be None' % name)
+                    result.append(validate_element(element))
+                return result
+            elif value is not None:
+                try:
+                    name = self.name
+                except AttributeError:
+                    raise ValidationError('%s is repeated. Found: %s' % (
+                        self.__class__.__name__, value))
+                else:
+                    raise ValidationError(
+                        'Field %s is repeated. Found: %s' % (name, value))
+        return value
+
+    def validate(self, value):
+        """Validate value assigned to field.
+
+        Args:
+          value: Value to validate.
+
+        Returns:
+          the value in casted in the correct type.
+
+        Raises:
+          ValidationError if value is not expected type.
+        """
+        return self.__validate(value, self.validate_element)
+
+    def validate_default_element(self, value):
+        """Validate value as assigned to field default field.
+
+        Some fields may allow for delayed resolution of default types
+        necessary in the case of circular definition references. In
+        this case, the default value might be a place holder that is
+        resolved when needed after all the message classes are
+        defined.
+
+        Args:
+          value: Default value to validate.
+
+        Returns:
+          the value in casted in the correct type.
+
+        Raises:
+          ValidationError if value is not expected type.
+
+        """
+        return self.validate_element(value)
+
+    def validate_default(self, value):
+        """Validate default value assigned to field.
+
+        Args:
+          value: Value to validate.
+
+        Returns:
+          the value in casted in the correct type.
+
+        Raises:
+          ValidationError if value is not expected type.
+        """
+        return self.__validate(value, self.validate_default_element)
+
+    def message_definition(self):
+        """Get Message definition that contains this Field definition.
+
+        Returns:
+          Containing Message definition for Field. Will return None if
+          for some reason Field is defined outside of a Message class.
+
+        """
+        try:
+            return self._message_definition()
+        except AttributeError:
+            return None
+
+    @property
+    def default(self):
+        """Get default value for field."""
+        return self.__default
+
+    @classmethod
+    def lookup_field_type_by_variant(cls, variant):
+        return cls.__variant_to_type[variant]
+
+
+class IntegerField(Field):
+    """Field definition for integer values."""
+
+    VARIANTS = frozenset([
+        Variant.INT32,
+        Variant.INT64,
+        Variant.UINT32,
+        Variant.UINT64,
+        Variant.SINT32,
+        Variant.SINT64,
+    ])
+
+    DEFAULT_VARIANT = Variant.INT64
+
+    type = six.integer_types
+
+
+class FloatField(Field):
+    """Field definition for float values."""
+
+    VARIANTS = frozenset([
+        Variant.FLOAT,
+        Variant.DOUBLE,
+    ])
+
+    DEFAULT_VARIANT = Variant.DOUBLE
+
+    type = float
+
+
+class BooleanField(Field):
+    """Field definition for boolean values."""
+
+    VARIANTS = frozenset([Variant.BOOL])
+
+    DEFAULT_VARIANT = Variant.BOOL
+
+    type = bool
+
+
+class BytesField(Field):
+    """Field definition for byte string values."""
+
+    VARIANTS = frozenset([Variant.BYTES])
+
+    DEFAULT_VARIANT = Variant.BYTES
+
+    type = bytes
+
+
+class StringField(Field):
+    """Field definition for unicode string values."""
+
+    VARIANTS = frozenset([Variant.STRING])
+
+    DEFAULT_VARIANT = Variant.STRING
+
+    type = six.text_type
+
+    def validate_element(self, value):
+        """Validate StringField allowing for str and unicode.
+
+        Raises:
+          ValidationError if a str value is not 7-bit ascii.
+        """
+        # If value is str is it considered valid.  Satisfies "required=True".
+        if isinstance(value, bytes):
+            try:
+                six.text_type(value, 'ascii')
+            except UnicodeDecodeError as err:
+                try:
+                    _ = self.name
+                except AttributeError:
+                    validation_error = ValidationError(
+                        'Field encountered non-ASCII string %r: %s' % (value,
+                                                                       err))
+                else:
+                    validation_error = ValidationError(
+                        'Field %s encountered non-ASCII string %r: %s' % (
+                            self.name, value, err))
+                    validation_error.field_name = self.name
+                raise validation_error
+        else:
+            return super(StringField, self).validate_element(value)
+        return value
+
+
+class MessageField(Field):
+    """Field definition for sub-message values.
+
+    Message fields contain instance of other messages.  Instances stored
+    on messages stored on message fields  are considered to be owned by
+    the containing message instance and should not be shared between
+    owning instances.
+
+    Message fields must be defined to reference a single type of message.
+    Normally message field are defined by passing the referenced message
+    class in to the constructor.
+
+    It is possible to define a message field for a type that does not
+    yet exist by passing the name of the message in to the constructor
+    instead of a message class. Resolution of the actual type of the
+    message is deferred until it is needed, for example, during
+    message verification. Names provided to the constructor must refer
+    to a class within the same python module as the class that is
+    using it. Names refer to messages relative to the containing
+    messages scope. For example, the two fields of OuterMessage refer
+    to the same message type:
+
+      class Outer(Message):
+
+        inner_relative = MessageField('Inner', 1)
+        inner_absolute = MessageField('Outer.Inner', 2)
+
+        class Inner(Message):
+          ...
+
+    When resolving an actual type, MessageField will traverse the
+    entire scope of nested messages to match a message name. This
+    makes it easy for siblings to reference siblings:
+
+      class Outer(Message):
+
+        class Inner(Message):
+
+          sibling = MessageField('Sibling', 1)
+
+        class Sibling(Message):
+          ...
+
+    """
+
+    VARIANTS = frozenset([Variant.MESSAGE])
+
+    DEFAULT_VARIANT = Variant.MESSAGE
+
+    @util.positional(3)
+    def __init__(self,
+                 message_type,
+                 number,
+                 required=False,
+                 repeated=False,
+                 variant=None):
+        """Constructor.
+
+        Args:
+          message_type: Message type for field.  Must be subclass of Message.
+          number: Number of field.  Must be unique per message class.
+          required: Whether or not field is required.  Mutually exclusive to
+            'repeated'.
+          repeated: Whether or not field is repeated.  Mutually exclusive to
+            'required'.
+          variant: Wire-format variant hint.
+
+        Raises:
+          FieldDefinitionError when invalid message_type is provided.
+        """
+        valid_type = (isinstance(message_type, six.string_types) or
+                      (message_type is not Message and
+                       isinstance(message_type, type) and
+                       issubclass(message_type, Message)))
+
+        if not valid_type:
+            raise FieldDefinitionError(
+                'Invalid message class: %s' % message_type)
+
+        if isinstance(message_type, six.string_types):
+            self.__type_name = message_type
+            self.__type = None
+        else:
+            self.__type = message_type
+
+        super(MessageField, self).__init__(number,
+                                           required=required,
+                                           repeated=repeated,
+                                           variant=variant)
+
+    def __set__(self, message_instance, value):
+        """Set value on message.
+
+        Args:
+          message_instance: Message instance to set value on.
+          value: Value to set on message.
+        """
+        t = self.type
+        if isinstance(t, type) and issubclass(t, Message):
+            if self.repeated:
+                if value and isinstance(value, (list, tuple)):
+                    value = [(t(**v) if isinstance(v, dict) else v)
+                             for v in value]
+            elif isinstance(value, dict):
+                value = t(**value)
+        super(MessageField, self).__set__(message_instance, value)
+
+    @property
+    def type(self):
+        """Message type used for field."""
+        if self.__type is None:
+            message_type = find_definition(
+                self.__type_name, self.message_definition())
+            if not (message_type is not Message and
+                    isinstance(message_type, type) and
+                    issubclass(message_type, Message)):
+                raise FieldDefinitionError(
+                    'Invalid message class: %s' % message_type)
+            self.__type = message_type
+        return self.__type
+
+    @property
+    def message_type(self):
+        """Underlying message type used for serialization.
+
+        Will always be a sub-class of Message.  This is different from type
+        which represents the python value that message_type is mapped to for
+        use by the user.
+        """
+        return self.type
+
+    def value_from_message(self, message):
+        """Convert a message to a value instance.
+
+        Used by deserializers to convert from underlying messages to
+        value of expected user type.
+
+        Args:
+          message: A message instance of type self.message_type.
+
+        Returns:
+          Value of self.message_type.
+        """
+        if not isinstance(message, self.message_type):
+            raise DecodeError('Expected type %s, got %s: %r' %
+                              (self.message_type.__name__,
+                               type(message).__name__,
+                               message))
+        return message
+
+    def value_to_message(self, value):
+        """Convert a value instance to a message.
+
+        Used by serializers to convert Python user types to underlying
+        messages for transmission.
+
+        Args:
+          value: A value of type self.type.
+
+        Returns:
+          An instance of type self.message_type.
+        """
+        if not isinstance(value, self.type):
+            raise EncodeError('Expected type %s, got %s: %r' %
+                              (self.type.__name__,
+                               type(value).__name__,
+                               value))
+        return value
+
+
+class EnumField(Field):
+    """Field definition for enum values.
+
+    Enum fields may have default values that are delayed until the
+    associated enum type is resolved. This is necessary to support
+    certain circular references.
+
+    For example:
+
+      class Message1(Message):
+
+        class Color(Enum):
+
+          RED = 1
+          GREEN = 2
+          BLUE = 3
+
+        # This field default value  will be validated when default is accessed.
+        animal = EnumField('Message2.Animal', 1, default='HORSE')
+
+      class Message2(Message):
+
+        class Animal(Enum):
+
+          DOG = 1
+          CAT = 2
+          HORSE = 3
+
+        # This fields default value will be validated right away since Color
+        # is already fully resolved.
+        color = EnumField(Message1.Color, 1, default='RED')
+    """
+
+    VARIANTS = frozenset([Variant.ENUM])
+
+    DEFAULT_VARIANT = Variant.ENUM
+
+    def __init__(self, enum_type, number, **kwargs):
+        """Constructor.
+
+        Args:
+          enum_type: Enum type for field.  Must be subclass of Enum.
+          number: Number of field.  Must be unique per message class.
+          required: Whether or not field is required.  Mutually exclusive to
+            'repeated'.
+          repeated: Whether or not field is repeated.  Mutually exclusive to
+            'required'.
+          variant: Wire-format variant hint.
+          default: Default value for field if not found in stream.
+
+        Raises:
+          FieldDefinitionError when invalid enum_type is provided.
+        """
+        valid_type = (isinstance(enum_type, six.string_types) or
+                      (enum_type is not Enum and
+                       isinstance(enum_type, type) and
+                       issubclass(enum_type, Enum)))
+
+        if not valid_type:
+            raise FieldDefinitionError('Invalid enum type: %s' % enum_type)
+
+        if isinstance(enum_type, six.string_types):
+            self.__type_name = enum_type
+            self.__type = None
+        else:
+            self.__type = enum_type
+
+        super(EnumField, self).__init__(number, **kwargs)
+
+    def validate_default_element(self, value):
+        """Validate default element of Enum field.
+
+        Enum fields allow for delayed resolution of default values
+        when the type of the field has not been resolved. The default
+        value of a field may be a string or an integer. If the Enum
+        type of the field has been resolved, the default value is
+        validated against that type.
+
+        Args:
+          value: Value to validate.
+
+        Raises:
+          ValidationError if value is not expected message type.
+
+        """
+        if isinstance(value, (six.string_types, six.integer_types)):
+            # Validation of the value does not happen for delayed resolution
+            # enumerated types.  Ignore if type is not yet resolved.
+            if self.__type:
+                self.__type(value)
+            return value
+
+        return super(EnumField, self).validate_default_element(value)
+
+    @property
+    def type(self):
+        """Enum type used for field."""
+        if self.__type is None:
+            found_type = find_definition(
+                self.__type_name, self.message_definition())
+            if not (found_type is not Enum and
+                    isinstance(found_type, type) and
+                    issubclass(found_type, Enum)):
+                raise FieldDefinitionError(
+                    'Invalid enum type: %s' % found_type)
+
+            self.__type = found_type
+        return self.__type
+
+    @property
+    def default(self):
+        """Default for enum field.
+
+        Will cause resolution of Enum type and unresolved default value.
+        """
+        try:
+            return self.__resolved_default
+        except AttributeError:
+            resolved_default = super(EnumField, self).default
+            if isinstance(resolved_default, (six.string_types,
+                                             six.integer_types)):
+                # pylint:disable=not-callable
+                resolved_default = self.type(resolved_default)
+            self.__resolved_default = resolved_default
+            return self.__resolved_default
+
+
+@util.positional(2)
+def find_definition(name, relative_to=None, importer=__import__):
+    """Find definition by name in module-space.
+
+    The find algorthm will look for definitions by name relative to a
+    message definition or by fully qualfied name. If no definition is
+    found relative to the relative_to parameter it will do the same
+    search against the container of relative_to. If relative_to is a
+    nested Message, it will search its message_definition(). If that
+    message has no message_definition() it will search its module. If
+    relative_to is a module, it will attempt to look for the
+    containing module and search relative to it. If the module is a
+    top-level module, it will look for the a message using a fully
+    qualified name. If no message is found then, the search fails and
+    DefinitionNotFoundError is raised.
+
+    For example, when looking for any definition 'foo.bar.ADefinition'
+    relative to an actual message definition abc.xyz.SomeMessage:
+
+      find_definition('foo.bar.ADefinition', SomeMessage)
+
+    It is like looking for the following fully qualified names:
+
+      abc.xyz.SomeMessage. foo.bar.ADefinition
+      abc.xyz. foo.bar.ADefinition
+      abc. foo.bar.ADefinition
+      foo.bar.ADefinition
+
+    When resolving the name relative to Message definitions and modules, the
+    algorithm searches any Messages or sub-modules found in its path.
+    Non-Message values are not searched.
+
+    A name that begins with '.' is considered to be a fully qualified
+    name. The name is always searched for from the topmost package.
+    For example, assume two message types:
+
+      abc.xyz.SomeMessage
+      xyz.SomeMessage
+
+    Searching for '.xyz.SomeMessage' relative to 'abc' will resolve to
+    'xyz.SomeMessage' and not 'abc.xyz.SomeMessage'.  For this kind of name,
+    the relative_to parameter is effectively ignored and always set to None.
+
+    For more information about package name resolution, please see:
+
+      http://code.google.com/apis/protocolbuffers/docs/proto.html#packages
+
+    Args:
+      name: Name of definition to find.  May be fully qualified or relative
+        name.
+      relative_to: Search for definition relative to message definition or
+        module. None will cause a fully qualified name search.
+      importer: Import function to use for resolving modules.
+
+    Returns:
+      Enum or Message class definition associated with name.
+
+    Raises:
+      DefinitionNotFoundError if no definition is found in any search path.
+
+    """
+    # Check parameters.
+    if not (relative_to is None or
+            isinstance(relative_to, types.ModuleType) or
+            isinstance(relative_to, type) and
+            issubclass(relative_to, Message)):
+        raise TypeError(
+            'relative_to must be None, Message definition or module.'
+            '  Found: %s' % relative_to)
+
+    name_path = name.split('.')
+
+    # Handle absolute path reference.
+    if not name_path[0]:
+        relative_to = None
+        name_path = name_path[1:]
+
+    def search_path():
+        """Performs a single iteration searching the path from relative_to.
+
+        This is the function that searches up the path from a relative object.
+
+          fully.qualified.object . relative.or.nested.Definition
+                                   ---------------------------->
+                                                      ^
+                                                      |
+                                this part of search --+
+
+        Returns:
+          Message or Enum at the end of name_path, else None.
+        """
+        next_part = relative_to
+        for node in name_path:
+            # Look for attribute first.
+            attribute = getattr(next_part, node, None)
+
+            if attribute is not None:
+                next_part = attribute
+            else:
+                # If module, look for sub-module.
+                if (next_part is None or
+                        isinstance(next_part, types.ModuleType)):
+                    if next_part is None:
+                        module_name = node
+                    else:
+                        module_name = '%s.%s' % (next_part.__name__, node)
+
+                    try:
+                        fromitem = module_name.split('.')[-1]
+                        next_part = importer(module_name, '', '',
+                                             [str(fromitem)])
+                    except ImportError:
+                        return None
+                else:
+                    return None
+
+            if not isinstance(next_part, types.ModuleType):
+                if not (isinstance(next_part, type) and
+                        issubclass(next_part, (Message, Enum))):
+                    return None
+
+        return next_part
+
+    while True:
+        found = search_path()
+        if isinstance(found, type) and issubclass(found, (Enum, Message)):
+            return found
+        else:
+            # Find next relative_to to search against.
+            #
+            #   fully.qualified.object . relative.or.nested.Definition
+            #   <---------------------
+            #           ^
+            #           |
+            #   does this part of search
+            if relative_to is None:
+                # Fully qualified search was done.  Nothing found.  Fail.
+                raise DefinitionNotFoundError(
+                    'Could not find definition for %s' % name)
+            else:
+                if isinstance(relative_to, types.ModuleType):
+                    # Find parent module.
+                    module_path = relative_to.__name__.split('.')[:-1]
+                    if not module_path:
+                        relative_to = None
+                    else:
+                        # Should not raise ImportError. If it does...
+                        # weird and unexpected. Propagate.
+                        relative_to = importer(
+                            '.'.join(module_path), '', '', [module_path[-1]])
+                elif (isinstance(relative_to, type) and
+                      issubclass(relative_to, Message)):
+                    parent = relative_to.message_definition()
+                    if parent is None:
+                        last_module_name = relative_to.__module__.split(
+                            '.')[-1]
+                        relative_to = importer(
+                            relative_to.__module__, '', '', [last_module_name])
+                    else:
+                        relative_to = parent
diff --git a/apitools/base/protorpclite/messages_test.py b/apitools/base/protorpclite/messages_test.py
new file mode 100644
index 0000000..78fe76e
--- /dev/null
+++ b/apitools/base/protorpclite/messages_test.py
@@ -0,0 +1,2157 @@
+#!/usr/bin/env python
+#
+# Copyright 2010 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Tests for apitools.base.protorpclite.messages."""
+import pickle
+import re
+import sys
+import types
+import unittest
+
+import six
+
+from apitools.base.protorpclite import descriptor
+from apitools.base.protorpclite import message_types
+from apitools.base.protorpclite import messages
+from apitools.base.protorpclite import test_util
+
+# This package plays lots of games with modifying global variables inside
+# test cases. Hence:
+# pylint:disable=function-redefined
+# pylint:disable=global-variable-not-assigned
+# pylint:disable=global-variable-undefined
+# pylint:disable=redefined-outer-name
+# pylint:disable=undefined-variable
+# pylint:disable=unused-variable
+# pylint:disable=too-many-lines
+
+
+class ModuleInterfaceTest(test_util.ModuleInterfaceTest,
+                          test_util.TestCase):
+
+    MODULE = messages
+
+
+class ValidationErrorTest(test_util.TestCase):
+
+    def testStr_NoFieldName(self):
+        """Test string version of ValidationError when no name provided."""
+        self.assertEquals('Validation error',
+                          str(messages.ValidationError('Validation error')))
+
+    def testStr_FieldName(self):
+        """Test string version of ValidationError when no name provided."""
+        validation_error = messages.ValidationError('Validation error')
+        validation_error.field_name = 'a_field'
+        self.assertEquals('Validation error', str(validation_error))
+
+
+class EnumTest(test_util.TestCase):
+
+    def setUp(self):
+        """Set up tests."""
+        # Redefine Color class in case so that changes to it (an
+        # error) in one test does not affect other tests.
+        global Color  # pylint:disable=global-variable-not-assigned
+
+        # pylint:disable=unused-variable
+        class Color(messages.Enum):
+            RED = 20
+            ORANGE = 2
+            YELLOW = 40
+            GREEN = 4
+            BLUE = 50
+            INDIGO = 5
+            VIOLET = 80
+
+    def testNames(self):
+        """Test that names iterates over enum names."""
+        self.assertEquals(
+            set(['BLUE', 'GREEN', 'INDIGO', 'ORANGE', 'RED',
+                 'VIOLET', 'YELLOW']),
+            set(Color.names()))
+
+    def testNumbers(self):
+        """Tests that numbers iterates of enum numbers."""
+        self.assertEquals(set([2, 4, 5, 20, 40, 50, 80]), set(Color.numbers()))
+
+    def testIterate(self):
+        """Test that __iter__ iterates over all enum values."""
+        self.assertEquals(set(Color),
+                          set([Color.RED,
+                               Color.ORANGE,
+                               Color.YELLOW,
+                               Color.GREEN,
+                               Color.BLUE,
+                               Color.INDIGO,
+                               Color.VIOLET]))
+
+    def testNaturalOrder(self):
+        """Test that natural order enumeration is in numeric order."""
+        self.assertEquals([Color.ORANGE,
+                           Color.GREEN,
+                           Color.INDIGO,
+                           Color.RED,
+                           Color.YELLOW,
+                           Color.BLUE,
+                           Color.VIOLET],
+                          sorted(Color))
+
+    def testByName(self):
+        """Test look-up by name."""
+        self.assertEquals(Color.RED, Color.lookup_by_name('RED'))
+        self.assertRaises(KeyError, Color.lookup_by_name, 20)
+        self.assertRaises(KeyError, Color.lookup_by_name, Color.RED)
+
+    def testByNumber(self):
+        """Test look-up by number."""
+        self.assertRaises(KeyError, Color.lookup_by_number, 'RED')
+        self.assertEquals(Color.RED, Color.lookup_by_number(20))
+        self.assertRaises(KeyError, Color.lookup_by_number, Color.RED)
+
+    def testConstructor(self):
+        """Test that constructor look-up by name or number."""
+        self.assertEquals(Color.RED, Color('RED'))
+        self.assertEquals(Color.RED, Color(u'RED'))
+        self.assertEquals(Color.RED, Color(20))
+        if six.PY2:
+            self.assertEquals(Color.RED, Color(long(20)))
+        self.assertEquals(Color.RED, Color(Color.RED))
+        self.assertRaises(TypeError, Color, 'Not exists')
+        self.assertRaises(TypeError, Color, 'Red')
+        self.assertRaises(TypeError, Color, 100)
+        self.assertRaises(TypeError, Color, 10.0)
+
+    def testLen(self):
+        """Test that len function works to count enums."""
+        self.assertEquals(7, len(Color))
+
+    def testNoSubclasses(self):
+        """Test that it is not possible to sub-class enum classes."""
+        def declare_subclass():
+            class MoreColor(Color):
+                pass
+        self.assertRaises(messages.EnumDefinitionError,
+                          declare_subclass)
+
+    def testClassNotMutable(self):
+        """Test that enum classes themselves are not mutable."""
+        self.assertRaises(AttributeError,
+                          setattr,
+                          Color,
+                          'something_new',
+                          10)
+
+    def testInstancesMutable(self):
+        """Test that enum instances are not mutable."""
+        self.assertRaises(TypeError,
+                          setattr,
+                          Color.RED,
+                          'something_new',
+                          10)
+
+    def testDefEnum(self):
+        """Test def_enum works by building enum class from dict."""
+        WeekDay = messages.Enum.def_enum({'Monday': 1,
+                                          'Tuesday': 2,
+                                          'Wednesday': 3,
+                                          'Thursday': 4,
+                                          'Friday': 6,
+                                          'Saturday': 7,
+                                          'Sunday': 8},
+                                         'WeekDay')
+        self.assertEquals('Wednesday', WeekDay(3).name)
+        self.assertEquals(6, WeekDay('Friday').number)
+        self.assertEquals(WeekDay.Sunday, WeekDay('Sunday'))
+
+    def testNonInt(self):
+        """Test that non-integer values rejection by enum def."""
+        self.assertRaises(messages.EnumDefinitionError,
+                          messages.Enum.def_enum,
+                          {'Bad': '1'},
+                          'BadEnum')
+
+    def testNegativeInt(self):
+        """Test that negative numbers rejection by enum def."""
+        self.assertRaises(messages.EnumDefinitionError,
+                          messages.Enum.def_enum,
+                          {'Bad': -1},
+                          'BadEnum')
+
+    def testLowerBound(self):
+        """Test that zero is accepted by enum def."""
+        class NotImportant(messages.Enum):
+            """Testing for value zero"""
+            VALUE = 0
+
+        self.assertEquals(0, int(NotImportant.VALUE))
+
+    def testTooLargeInt(self):
+        """Test that numbers too large are rejected."""
+        self.assertRaises(messages.EnumDefinitionError,
+                          messages.Enum.def_enum,
+                          {'Bad': (2 ** 29)},
+                          'BadEnum')
+
+    def testRepeatedInt(self):
+        """Test duplicated numbers are forbidden."""
+        self.assertRaises(messages.EnumDefinitionError,
+                          messages.Enum.def_enum,
+                          {'Ok': 1, 'Repeated': 1},
+                          'BadEnum')
+
+    def testStr(self):
+        """Test converting to string."""
+        self.assertEquals('RED', str(Color.RED))
+        self.assertEquals('ORANGE', str(Color.ORANGE))
+
+    def testInt(self):
+        """Test converting to int."""
+        self.assertEquals(20, int(Color.RED))
+        self.assertEquals(2, int(Color.ORANGE))
+
+    def testRepr(self):
+        """Test enum representation."""
+        self.assertEquals('Color(RED, 20)', repr(Color.RED))
+        self.assertEquals('Color(YELLOW, 40)', repr(Color.YELLOW))
+
+    def testDocstring(self):
+        """Test that docstring is supported ok."""
+        class NotImportant(messages.Enum):
+            """I have a docstring."""
+
+            VALUE1 = 1
+
+        self.assertEquals('I have a docstring.', NotImportant.__doc__)
+
+    def testDeleteEnumValue(self):
+        """Test that enum values cannot be deleted."""
+        self.assertRaises(TypeError, delattr, Color, 'RED')
+
+    def testEnumName(self):
+        """Test enum name."""
+        module_name = test_util.get_module_name(EnumTest)
+        self.assertEquals('%s.Color' % module_name, Color.definition_name())
+        self.assertEquals(module_name, Color.outer_definition_name())
+        self.assertEquals(module_name, Color.definition_package())
+
+    def testDefinitionName_OverrideModule(self):
+        """Test enum module is overriden by module package name."""
+        global package
+        try:
+            package = 'my.package'
+            self.assertEquals('my.package.Color', Color.definition_name())
+            self.assertEquals('my.package', Color.outer_definition_name())
+            self.assertEquals('my.package', Color.definition_package())
+        finally:
+            del package
+
+    def testDefinitionName_NoModule(self):
+        """Test what happens when there is no module for enum."""
+        class Enum1(messages.Enum):
+            pass
+
+        original_modules = sys.modules
+        sys.modules = dict(sys.modules)
+        try:
+            del sys.modules[__name__]
+            self.assertEquals('Enum1', Enum1.definition_name())
+            self.assertEquals(None, Enum1.outer_definition_name())
+            self.assertEquals(None, Enum1.definition_package())
+            self.assertEquals(six.text_type, type(Enum1.definition_name()))
+        finally:
+            sys.modules = original_modules
+
+    def testDefinitionName_Nested(self):
+        """Test nested Enum names."""
+        class MyMessage(messages.Message):
+
+            class NestedEnum(messages.Enum):
+
+                pass
+
+            class NestedMessage(messages.Message):
+
+                class NestedEnum(messages.Enum):
+
+                    pass
+
+        module_name = test_util.get_module_name(EnumTest)
+        self.assertEquals('%s.MyMessage.NestedEnum' % module_name,
+                          MyMessage.NestedEnum.definition_name())
+        self.assertEquals('%s.MyMessage' % module_name,
+                          MyMessage.NestedEnum.outer_definition_name())
+        self.assertEquals(module_name,
+                          MyMessage.NestedEnum.definition_package())
+
+        self.assertEquals(
+            '%s.MyMessage.NestedMessage.NestedEnum' % module_name,
+            MyMessage.NestedMessage.NestedEnum.definition_name())
+        self.assertEquals(
+            '%s.MyMessage.NestedMessage' % module_name,
+            MyMessage.NestedMessage.NestedEnum.outer_definition_name())
+        self.assertEquals(
+            module_name,
+            MyMessage.NestedMessage.NestedEnum.definition_package())
+
+    def testMessageDefinition(self):
+        """Test that enumeration knows its enclosing message definition."""
+        class OuterEnum(messages.Enum):
+            pass
+
+        self.assertEquals(None, OuterEnum.message_definition())
+
+        class OuterMessage(messages.Message):
+
+            class InnerEnum(messages.Enum):
+                pass
+
+        self.assertEquals(
+            OuterMessage, OuterMessage.InnerEnum.message_definition())
+
+    def testComparison(self):
+        """Test comparing various enums to different types."""
+        class Enum1(messages.Enum):
+            VAL1 = 1
+            VAL2 = 2
+
+        class Enum2(messages.Enum):
+            VAL1 = 1
+
+        self.assertEquals(Enum1.VAL1, Enum1.VAL1)
+        self.assertNotEquals(Enum1.VAL1, Enum1.VAL2)
+        self.assertNotEquals(Enum1.VAL1, Enum2.VAL1)
+        self.assertNotEquals(Enum1.VAL1, 'VAL1')
+        self.assertNotEquals(Enum1.VAL1, 1)
+        self.assertNotEquals(Enum1.VAL1, 2)
+        self.assertNotEquals(Enum1.VAL1, None)
+        self.assertNotEquals(Enum1.VAL1, Enum2.VAL1)
+
+        self.assertTrue(Enum1.VAL1 < Enum1.VAL2)
+        self.assertTrue(Enum1.VAL2 > Enum1.VAL1)
+
+        self.assertNotEquals(1, Enum2.VAL1)
+
+    def testPickle(self):
+        """Testing pickling and unpickling of Enum instances."""
+        colors = list(Color)
+        unpickled = pickle.loads(pickle.dumps(colors))
+        self.assertEquals(colors, unpickled)
+        # Unpickling shouldn't create new enum instances.
+        for i, color in enumerate(colors):
+            self.assertTrue(color is unpickled[i])
+
+
+class FieldListTest(test_util.TestCase):
+
+    def setUp(self):
+        self.integer_field = messages.IntegerField(1, repeated=True)
+
+    def testConstructor(self):
+        self.assertEquals([1, 2, 3],
+                          messages.FieldList(self.integer_field, [1, 2, 3]))
+        self.assertEquals([1, 2, 3],
+                          messages.FieldList(self.integer_field, (1, 2, 3)))
+        self.assertEquals([], messages.FieldList(self.integer_field, []))
+
+    def testNone(self):
+        self.assertRaises(TypeError, messages.FieldList,
+                          self.integer_field, None)
+
+    def testDoNotAutoConvertString(self):
+        string_field = messages.StringField(1, repeated=True)
+        self.assertRaises(messages.ValidationError,
+                          messages.FieldList, string_field, 'abc')
+
+    def testConstructorCopies(self):
+        a_list = [1, 3, 6]
+        field_list = messages.FieldList(self.integer_field, a_list)
+        self.assertFalse(a_list is field_list)
+        self.assertFalse(field_list is
+                         messages.FieldList(self.integer_field, field_list))
+
+    def testNonRepeatedField(self):
+        self.assertRaisesWithRegexpMatch(
+            messages.FieldDefinitionError,
+            'FieldList may only accept repeated fields',
+            messages.FieldList,
+            messages.IntegerField(1),
+            [])
+
+    def testConstructor_InvalidValues(self):
+        self.assertRaisesWithRegexpMatch(
+            messages.ValidationError,
+            re.escape("Expected type %r "
+                      "for IntegerField, found 1 (type %r)"
+                      % (six.integer_types, str)),
+            messages.FieldList, self.integer_field, ["1", "2", "3"])
+
+    def testConstructor_Scalars(self):
+        self.assertRaisesWithRegexpMatch(
+            messages.ValidationError,
+            "IntegerField is repeated. Found: 3",
+            messages.FieldList, self.integer_field, 3)
+
+        self.assertRaisesWithRegexpMatch(
+            messages.ValidationError,
+            ("IntegerField is repeated. Found: "
+             "<(list[_]?|sequence)iterator object"),
+            messages.FieldList, self.integer_field, iter([1, 2, 3]))
+
+    def testSetSlice(self):
+        field_list = messages.FieldList(self.integer_field, [1, 2, 3, 4, 5])
+        field_list[1:3] = [10, 20]
+        self.assertEquals([1, 10, 20, 4, 5], field_list)
+
+    def testSetSlice_InvalidValues(self):
+        field_list = messages.FieldList(self.integer_field, [1, 2, 3, 4, 5])
+
+        def setslice():
+            field_list[1:3] = ['10', '20']
+
+        msg_re = re.escape("Expected type %r "
+                           "for IntegerField, found 10 (type %r)"
+                           % (six.integer_types, str))
+        self.assertRaisesWithRegexpMatch(
+            messages.ValidationError,
+            msg_re,
+            setslice)
+
+    def testSetItem(self):
+        field_list = messages.FieldList(self.integer_field, [2])
+        field_list[0] = 10
+        self.assertEquals([10], field_list)
+
+    def testSetItem_InvalidValues(self):
+        field_list = messages.FieldList(self.integer_field, [2])
+
+        def setitem():
+            field_list[0] = '10'
+        self.assertRaisesWithRegexpMatch(
+            messages.ValidationError,
+            re.escape("Expected type %r "
+                      "for IntegerField, found 10 (type %r)"
+                      % (six.integer_types, str)),
+            setitem)
+
+    def testAppend(self):
+        field_list = messages.FieldList(self.integer_field, [2])
+        field_list.append(10)
+        self.assertEquals([2, 10], field_list)
+
+    def testAppend_InvalidValues(self):
+        field_list = messages.FieldList(self.integer_field, [2])
+        field_list.name = 'a_field'
+
+        def append():
+            field_list.append('10')
+        self.assertRaisesWithRegexpMatch(
+            messages.ValidationError,
+            re.escape("Expected type %r "
+                      "for IntegerField, found 10 (type %r)"
+                      % (six.integer_types, str)),
+            append)
+
+    def testExtend(self):
+        field_list = messages.FieldList(self.integer_field, [2])
+        field_list.extend([10])
+        self.assertEquals([2, 10], field_list)
+
+    def testExtend_InvalidValues(self):
+        field_list = messages.FieldList(self.integer_field, [2])
+
+        def extend():
+            field_list.extend(['10'])
+        self.assertRaisesWithRegexpMatch(
+            messages.ValidationError,
+            re.escape("Expected type %r "
+                      "for IntegerField, found 10 (type %r)"
+                      % (six.integer_types, str)),
+            extend)
+
+    def testInsert(self):
+        field_list = messages.FieldList(self.integer_field, [2, 3])
+        field_list.insert(1, 10)
+        self.assertEquals([2, 10, 3], field_list)
+
+    def testInsert_InvalidValues(self):
+        field_list = messages.FieldList(self.integer_field, [2, 3])
+
+        def insert():
+            field_list.insert(1, '10')
+        self.assertRaisesWithRegexpMatch(
+            messages.ValidationError,
+            re.escape("Expected type %r "
+                      "for IntegerField, found 10 (type %r)"
+                      % (six.integer_types, str)),
+            insert)
+
+    def testPickle(self):
+        """Testing pickling and unpickling of FieldList instances."""
+        field_list = messages.FieldList(self.integer_field, [1, 2, 3, 4, 5])
+        unpickled = pickle.loads(pickle.dumps(field_list))
+        self.assertEquals(field_list, unpickled)
+        self.assertIsInstance(unpickled.field, messages.IntegerField)
+        self.assertEquals(1, unpickled.field.number)
+        self.assertTrue(unpickled.field.repeated)
+
+
+class FieldTest(test_util.TestCase):
+
+    def ActionOnAllFieldClasses(self, action):
+        """Test all field classes except Message and Enum.
+
+        Message and Enum require separate tests.
+
+        Args:
+          action: Callable that takes the field class as a parameter.
+        """
+        classes = (messages.IntegerField,
+                   messages.FloatField,
+                   messages.BooleanField,
+                   messages.BytesField,
+                   messages.StringField)
+        for field_class in classes:
+            action(field_class)
+
+    def testNumberAttribute(self):
+        """Test setting the number attribute."""
+        def action(field_class):
+            # Check range.
+            self.assertRaises(messages.InvalidNumberError,
+                              field_class,
+                              0)
+            self.assertRaises(messages.InvalidNumberError,
+                              field_class,
+                              -1)
+            self.assertRaises(messages.InvalidNumberError,
+                              field_class,
+                              messages.MAX_FIELD_NUMBER + 1)
+
+            # Check reserved.
+            self.assertRaises(messages.InvalidNumberError,
+                              field_class,
+                              messages.FIRST_RESERVED_FIELD_NUMBER)
+            self.assertRaises(messages.InvalidNumberError,
+                              field_class,
+                              messages.LAST_RESERVED_FIELD_NUMBER)
+            self.assertRaises(messages.InvalidNumberError,
+                              field_class,
+                              '1')
+
+            # This one should work.
+            field_class(number=1)
+        self.ActionOnAllFieldClasses(action)
+
+    def testRequiredAndRepeated(self):
+        """Test setting the required and repeated fields."""
+        def action(field_class):
+            field_class(1, required=True)
+            field_class(1, repeated=True)
+            self.assertRaises(messages.FieldDefinitionError,
+                              field_class,
+                              1,
+                              required=True,
+                              repeated=True)
+        self.ActionOnAllFieldClasses(action)
+
+    def testInvalidVariant(self):
+        """Test field with invalid variants."""
+        def action(field_class):
+            if field_class is not message_types.DateTimeField:
+                self.assertRaises(messages.InvalidVariantError,
+                                  field_class,
+                                  1,
+                                  variant=messages.Variant.ENUM)
+        self.ActionOnAllFieldClasses(action)
+
+    def testDefaultVariant(self):
+        """Test that default variant is used when not set."""
+        def action(field_class):
+            field = field_class(1)
+            self.assertEquals(field_class.DEFAULT_VARIANT, field.variant)
+
+        self.ActionOnAllFieldClasses(action)
+
+    def testAlternateVariant(self):
+        """Test that default variant is used when not set."""
+        field = messages.IntegerField(1, variant=messages.Variant.UINT32)
+        self.assertEquals(messages.Variant.UINT32, field.variant)
+
+    def testDefaultFields_Single(self):
+        """Test default field is correct type (single)."""
+        defaults = {
+            messages.IntegerField: 10,
+            messages.FloatField: 1.5,
+            messages.BooleanField: False,
+            messages.BytesField: b'abc',
+            messages.StringField: u'abc',
+        }
+
+        def action(field_class):
+            field_class(1, default=defaults[field_class])
+        self.ActionOnAllFieldClasses(action)
+
+        # Run defaults test again checking for str/unicode compatiblity.
+        defaults[messages.StringField] = 'abc'
+        self.ActionOnAllFieldClasses(action)
+
+    def testStringField_BadUnicodeInDefault(self):
+        """Test binary values in string field."""
+        self.assertRaisesWithRegexpMatch(
+            messages.InvalidDefaultError,
+            r"Invalid default value for StringField:.*: "
+            r"Field encountered non-ASCII string .*: "
+            r"'ascii' codec can't decode byte 0x89 in position 0: "
+            r"ordinal not in range",
+            messages.StringField, 1, default=b'\x89')
+
+    def testDefaultFields_InvalidSingle(self):
+        """Test default field is correct type (invalid single)."""
+        def action(field_class):
+            self.assertRaises(messages.InvalidDefaultError,
+                              field_class,
+                              1,
+                              default=object())
+        self.ActionOnAllFieldClasses(action)
+
+    def testDefaultFields_InvalidRepeated(self):
+        """Test default field does not accept defaults."""
+        self.assertRaisesWithRegexpMatch(
+            messages.FieldDefinitionError,
+            'Repeated fields may not have defaults',
+            messages.StringField, 1, repeated=True, default=[1, 2, 3])
+
+    def testDefaultFields_None(self):
+        """Test none is always acceptable."""
+        def action(field_class):
+            field_class(1, default=None)
+            field_class(1, required=True, default=None)
+            field_class(1, repeated=True, default=None)
+        self.ActionOnAllFieldClasses(action)
+
+    def testDefaultFields_Enum(self):
+        """Test the default for enum fields."""
+        class Symbol(messages.Enum):
+
+            ALPHA = 1
+            BETA = 2
+            GAMMA = 3
+
+        field = messages.EnumField(Symbol, 1, default=Symbol.ALPHA)
+
+        self.assertEquals(Symbol.ALPHA, field.default)
+
+    def testDefaultFields_EnumStringDelayedResolution(self):
+        """Test that enum fields resolve default strings."""
+        field = messages.EnumField(
+            'apitools.base.protorpclite.descriptor.FieldDescriptor.Label',
+            1,
+            default='OPTIONAL')
+
+        self.assertEquals(
+            descriptor.FieldDescriptor.Label.OPTIONAL, field.default)
+
+    def testDefaultFields_EnumIntDelayedResolution(self):
+        """Test that enum fields resolve default integers."""
+        field = messages.EnumField(
+            'apitools.base.protorpclite.descriptor.FieldDescriptor.Label',
+            1,
+            default=2)
+
+        self.assertEquals(
+            descriptor.FieldDescriptor.Label.REQUIRED, field.default)
+
+    def testDefaultFields_EnumOkIfTypeKnown(self):
+        """Test enum fields accept valid default values when type is known."""
+        field = messages.EnumField(descriptor.FieldDescriptor.Label,
+                                   1,
+                                   default='REPEATED')
+
+        self.assertEquals(
+            descriptor.FieldDescriptor.Label.REPEATED, field.default)
+
+    def testDefaultFields_EnumForceCheckIfTypeKnown(self):
+        """Test that enum fields validate default values if type is known."""
+        self.assertRaisesWithRegexpMatch(TypeError,
+                                         'No such value for NOT_A_LABEL in '
+                                         'Enum Label',
+                                         messages.EnumField,
+                                         descriptor.FieldDescriptor.Label,
+                                         1,
+                                         default='NOT_A_LABEL')
+
+    def testDefaultFields_EnumInvalidDelayedResolution(self):
+        """Test that enum fields raise errors upon delayed resolution error."""
+        field = messages.EnumField(
+            'apitools.base.protorpclite.descriptor.FieldDescriptor.Label',
+            1,
+            default=200)
+
+        self.assertRaisesWithRegexpMatch(TypeError,
+                                         'No such value for 200 in Enum Label',
+                                         getattr,
+                                         field,
+                                         'default')
+
+    def testValidate_Valid(self):
+        """Test validation of valid values."""
+        values = {
+            messages.IntegerField: 10,
+            messages.FloatField: 1.5,
+            messages.BooleanField: False,
+            messages.BytesField: b'abc',
+            messages.StringField: u'abc',
+        }
+
+        def action(field_class):
+            # Optional.
+            field = field_class(1)
+            field.validate(values[field_class])
+
+            # Required.
+            field = field_class(1, required=True)
+            field.validate(values[field_class])
+
+            # Repeated.
+            field = field_class(1, repeated=True)
+            field.validate([])
+            field.validate(())
+            field.validate([values[field_class]])
+            field.validate((values[field_class],))
+
+            # Right value, but not repeated.
+            self.assertRaises(messages.ValidationError,
+                              field.validate,
+                              values[field_class])
+            self.assertRaises(messages.ValidationError,
+                              field.validate,
+                              values[field_class])
+
+        self.ActionOnAllFieldClasses(action)
+
+    def testValidate_Invalid(self):
+        """Test validation of valid values."""
+        values = {
+            messages.IntegerField: "10",
+            messages.FloatField: "blah",
+            messages.BooleanField: 0,
+            messages.BytesField: 10.20,
+            messages.StringField: 42,
+        }
+
+        def action(field_class):
+            # Optional.
+            field = field_class(1)
+            self.assertRaises(messages.ValidationError,
+                              field.validate,
+                              values[field_class])
+
+            # Required.
+            field = field_class(1, required=True)
+            self.assertRaises(messages.ValidationError,
+                              field.validate,
+                              values[field_class])
+
+            # Repeated.
+            field = field_class(1, repeated=True)
+            self.assertRaises(messages.ValidationError,
+                              field.validate,
+                              [values[field_class]])
+            self.assertRaises(messages.ValidationError,
+                              field.validate,
+                              (values[field_class],))
+        self.ActionOnAllFieldClasses(action)
+
+    def testValidate_None(self):
+        """Test that None is valid for non-required fields."""
+        def action(field_class):
+            # Optional.
+            field = field_class(1)
+            field.validate(None)
+
+            # Required.
+            field = field_class(1, required=True)
+            self.assertRaisesWithRegexpMatch(messages.ValidationError,
+                                             'Required field is missing',
+                                             field.validate,
+                                             None)
+
+            # Repeated.
+            field = field_class(1, repeated=True)
+            field.validate(None)
+            self.assertRaisesWithRegexpMatch(
+                messages.ValidationError,
+                'Repeated values for %s may '
+                'not be None' % field_class.__name__,
+                field.validate,
+                [None])
+            self.assertRaises(messages.ValidationError,
+                              field.validate,
+                              (None,))
+        self.ActionOnAllFieldClasses(action)
+
+    def testValidateElement(self):
+        """Test validation of valid values."""
+        values = {
+            messages.IntegerField: (10, -1, 0),
+            messages.FloatField: (1.5, -1.5, 3),  # for json it is all a number
+            messages.BooleanField: (True, False),
+            messages.BytesField: (b'abc',),
+            messages.StringField: (u'abc',),
+        }
+
+        def action(field_class):
+            # Optional.
+            field = field_class(1)
+            for value in values[field_class]:
+                field.validate_element(value)
+
+            # Required.
+            field = field_class(1, required=True)
+            for value in values[field_class]:
+                field.validate_element(value)
+
+            # Repeated.
+            field = field_class(1, repeated=True)
+            self.assertRaises(messages.ValidationError,
+                              field.validate_element,
+                              [])
+            self.assertRaises(messages.ValidationError,
+                              field.validate_element,
+                              ())
+            for value in values[field_class]:
+                field.validate_element(value)
+
+            # Right value, but repeated.
+            self.assertRaises(messages.ValidationError,
+                              field.validate_element,
+                              list(values[field_class]))  # testing list
+            self.assertRaises(messages.ValidationError,
+                              field.validate_element,
+                              values[field_class])  # testing tuple
+
+        self.ActionOnAllFieldClasses(action)
+
+    def testValidateCastingElement(self):
+        field = messages.FloatField(1)
+        self.assertEquals(type(field.validate_element(12)), float)
+        self.assertEquals(type(field.validate_element(12.0)), float)
+        field = messages.IntegerField(1)
+        self.assertEquals(type(field.validate_element(12)), int)
+        self.assertRaises(messages.ValidationError,
+                          field.validate_element,
+                          12.0)  # should fails from float to int
+
+    def testReadOnly(self):
+        """Test that objects are all read-only."""
+        def action(field_class):
+            field = field_class(10)
+            self.assertRaises(AttributeError,
+                              setattr,
+                              field,
+                              'number',
+                              20)
+            self.assertRaises(AttributeError,
+                              setattr,
+                              field,
+                              'anything_else',
+                              'whatever')
+        self.ActionOnAllFieldClasses(action)
+
+    def testMessageField(self):
+        """Test the construction of message fields."""
+        self.assertRaises(messages.FieldDefinitionError,
+                          messages.MessageField,
+                          str,
+                          10)
+
+        self.assertRaises(messages.FieldDefinitionError,
+                          messages.MessageField,
+                          messages.Message,
+                          10)
+
+        class MyMessage(messages.Message):
+            pass
+
+        field = messages.MessageField(MyMessage, 10)
+        self.assertEquals(MyMessage, field.type)
+
+    def testMessageField_ForwardReference(self):
+        """Test the construction of forward reference message fields."""
+        global MyMessage
+        global ForwardMessage
+        try:
+            class MyMessage(messages.Message):
+
+                self_reference = messages.MessageField('MyMessage', 1)
+                forward = messages.MessageField('ForwardMessage', 2)
+                nested = messages.MessageField(
+                    'ForwardMessage.NestedMessage', 3)
+                inner = messages.MessageField('Inner', 4)
+
+                class Inner(messages.Message):
+
+                    sibling = messages.MessageField('Sibling', 1)
+
+                class Sibling(messages.Message):
+
+                    pass
+
+            class ForwardMessage(messages.Message):
+
+                class NestedMessage(messages.Message):
+
+                    pass
+
+            self.assertEquals(MyMessage,
+                              MyMessage.field_by_name('self_reference').type)
+
+            self.assertEquals(ForwardMessage,
+                              MyMessage.field_by_name('forward').type)
+
+            self.assertEquals(ForwardMessage.NestedMessage,
+                              MyMessage.field_by_name('nested').type)
+
+            self.assertEquals(MyMessage.Inner,
+                              MyMessage.field_by_name('inner').type)
+
+            self.assertEquals(MyMessage.Sibling,
+                              MyMessage.Inner.field_by_name('sibling').type)
+        finally:
+            try:
+                del MyMessage
+                del ForwardMessage
+            except:  # pylint:disable=bare-except
+                pass
+
+    def testMessageField_WrongType(self):
+        """Test that forward referencing the wrong type raises an error."""
+        global AnEnum
+        try:
+            class AnEnum(messages.Enum):
+                pass
+
+            class AnotherMessage(messages.Message):
+
+                a_field = messages.MessageField('AnEnum', 1)
+
+            self.assertRaises(messages.FieldDefinitionError,
+                              getattr,
+                              AnotherMessage.field_by_name('a_field'),
+                              'type')
+        finally:
+            del AnEnum
+
+    def testMessageFieldValidate(self):
+        """Test validation on message field."""
+        class MyMessage(messages.Message):
+            pass
+
+        class AnotherMessage(messages.Message):
+            pass
+
+        field = messages.MessageField(MyMessage, 10)
+        field.validate(MyMessage())
+
+        self.assertRaises(messages.ValidationError,
+                          field.validate,
+                          AnotherMessage())
+
+    def testMessageFieldMessageType(self):
+        """Test message_type property."""
+        class MyMessage(messages.Message):
+            pass
+
+        class HasMessage(messages.Message):
+            field = messages.MessageField(MyMessage, 1)
+
+        self.assertEqual(HasMessage.field.type, HasMessage.field.message_type)
+
+    def testMessageFieldValueFromMessage(self):
+        class MyMessage(messages.Message):
+            pass
+
+        class HasMessage(messages.Message):
+            field = messages.MessageField(MyMessage, 1)
+
+        instance = MyMessage()
+
+        self.assertTrue(
+            instance is HasMessage.field.value_from_message(instance))
+
+    def testMessageFieldValueFromMessageWrongType(self):
+        class MyMessage(messages.Message):
+            pass
+
+        class HasMessage(messages.Message):
+            field = messages.MessageField(MyMessage, 1)
+
+        self.assertRaisesWithRegexpMatch(
+            messages.DecodeError,
+            'Expected type MyMessage, got int: 10',
+            HasMessage.field.value_from_message, 10)
+
+    def testMessageFieldValueToMessage(self):
+        class MyMessage(messages.Message):
+            pass
+
+        class HasMessage(messages.Message):
+            field = messages.MessageField(MyMessage, 1)
+
+        instance = MyMessage()
+
+        self.assertTrue(
+            instance is HasMessage.field.value_to_message(instance))
+
+    def testMessageFieldValueToMessageWrongType(self):
+        class MyMessage(messages.Message):
+            pass
+
+        class MyOtherMessage(messages.Message):
+            pass
+
+        class HasMessage(messages.Message):
+            field = messages.MessageField(MyMessage, 1)
+
+        instance = MyOtherMessage()
+
+        self.assertRaisesWithRegexpMatch(
+            messages.EncodeError,
+            'Expected type MyMessage, got MyOtherMessage: <MyOtherMessage>',
+            HasMessage.field.value_to_message, instance)
+
+    def testIntegerField_AllowLong(self):
+        """Test that the integer field allows for longs."""
+        if six.PY2:
+            messages.IntegerField(10, default=long(10))
+
+    def testMessageFieldValidate_Initialized(self):
+        """Test validation on message field."""
+        class MyMessage(messages.Message):
+            field1 = messages.IntegerField(1, required=True)
+
+        field = messages.MessageField(MyMessage, 10)
+
+        # Will validate messages where is_initialized() is False.
+        message = MyMessage()
+        field.validate(message)
+        message.field1 = 20
+        field.validate(message)
+
+    def testEnumField(self):
+        """Test the construction of enum fields."""
+        self.assertRaises(messages.FieldDefinitionError,
+                          messages.EnumField,
+                          str,
+                          10)
+
+        self.assertRaises(messages.FieldDefinitionError,
+                          messages.EnumField,
+                          messages.Enum,
+                          10)
+
+        class Color(messages.Enum):
+            RED = 1
+            GREEN = 2
+            BLUE = 3
+
+        field = messages.EnumField(Color, 10)
+        self.assertEquals(Color, field.type)
+
+        class Another(messages.Enum):
+            VALUE = 1
+
+        self.assertRaises(messages.InvalidDefaultError,
+                          messages.EnumField,
+                          Color,
+                          10,
+                          default=Another.VALUE)
+
+    def testEnumField_ForwardReference(self):
+        """Test the construction of forward reference enum fields."""
+        global MyMessage
+        global ForwardEnum
+        global ForwardMessage
+        try:
+            class MyMessage(messages.Message):
+
+                forward = messages.EnumField('ForwardEnum', 1)
+                nested = messages.EnumField('ForwardMessage.NestedEnum', 2)
+                inner = messages.EnumField('Inner', 3)
+
+                class Inner(messages.Enum):
+                    pass
+
+            class ForwardEnum(messages.Enum):
+                pass
+
+            class ForwardMessage(messages.Message):
+
+                class NestedEnum(messages.Enum):
+                    pass
+
+            self.assertEquals(ForwardEnum,
+                              MyMessage.field_by_name('forward').type)
+
+            self.assertEquals(ForwardMessage.NestedEnum,
+                              MyMessage.field_by_name('nested').type)
+
+            self.assertEquals(MyMessage.Inner,
+                              MyMessage.field_by_name('inner').type)
+        finally:
+            try:
+                del MyMessage
+                del ForwardEnum
+                del ForwardMessage
+            except:  # pylint:disable=bare-except
+                pass
+
+    def testEnumField_WrongType(self):
+        """Test that forward referencing the wrong type raises an error."""
+        global AMessage
+        try:
+            class AMessage(messages.Message):
+                pass
+
+            class AnotherMessage(messages.Message):
+
+                a_field = messages.EnumField('AMessage', 1)
+
+            self.assertRaises(messages.FieldDefinitionError,
+                              getattr,
+                              AnotherMessage.field_by_name('a_field'),
+                              'type')
+        finally:
+            del AMessage
+
+    def testMessageDefinition(self):
+        """Test that message definition is set on fields."""
+        class MyMessage(messages.Message):
+
+            my_field = messages.StringField(1)
+
+        self.assertEquals(
+            MyMessage,
+            MyMessage.field_by_name('my_field').message_definition())
+
+    def testNoneAssignment(self):
+        """Test that assigning None does not change comparison."""
+        class MyMessage(messages.Message):
+
+            my_field = messages.StringField(1)
+
+        m1 = MyMessage()
+        m2 = MyMessage()
+        m2.my_field = None
+        self.assertEquals(m1, m2)
+
+    def testNonAsciiStr(self):
+        """Test validation fails for non-ascii StringField values."""
+        class Thing(messages.Message):
+            string_field = messages.StringField(2)
+
+        thing = Thing()
+        self.assertRaisesWithRegexpMatch(
+            messages.ValidationError,
+            'Field string_field encountered non-ASCII string',
+            setattr, thing, 'string_field', test_util.BINARY)
+
+
+class MessageTest(test_util.TestCase):
+    """Tests for message class."""
+
+    def CreateMessageClass(self):
+        """Creates a simple message class with 3 fields.
+
+        Fields are defined in alphabetical order but with conflicting numeric
+        order.
+        """
+        class ComplexMessage(messages.Message):
+            a3 = messages.IntegerField(3)
+            b1 = messages.StringField(1)
+            c2 = messages.StringField(2)
+
+        return ComplexMessage
+
+    def testSameNumbers(self):
+        """Test that cannot assign two fields with same numbers."""
+
+        def action():
+            class BadMessage(messages.Message):
+                f1 = messages.IntegerField(1)
+                f2 = messages.IntegerField(1)
+        self.assertRaises(messages.DuplicateNumberError,
+                          action)
+
+    def testStrictAssignment(self):
+        """Tests that cannot assign to unknown or non-reserved attributes."""
+        class SimpleMessage(messages.Message):
+            field = messages.IntegerField(1)
+
+        simple_message = SimpleMessage()
+        self.assertRaises(AttributeError,
+                          setattr,
+                          simple_message,
+                          'does_not_exist',
+                          10)
+
+    def testListAssignmentDoesNotCopy(self):
+        class SimpleMessage(messages.Message):
+            repeated = messages.IntegerField(1, repeated=True)
+
+        message = SimpleMessage()
+        original = message.repeated
+        message.repeated = []
+        self.assertFalse(original is message.repeated)
+
+    def testValidate_Optional(self):
+        """Tests validation of optional fields."""
+        class SimpleMessage(messages.Message):
+            non_required = messages.IntegerField(1)
+
+        simple_message = SimpleMessage()
+        simple_message.check_initialized()
+        simple_message.non_required = 10
+        simple_message.check_initialized()
+
+    def testValidate_Required(self):
+        """Tests validation of required fields."""
+        class SimpleMessage(messages.Message):
+            required = messages.IntegerField(1, required=True)
+
+        simple_message = SimpleMessage()
+        self.assertRaises(messages.ValidationError,
+                          simple_message.check_initialized)
+        simple_message.required = 10
+        simple_message.check_initialized()
+
+    def testValidate_Repeated(self):
+        """Tests validation of repeated fields."""
+        class SimpleMessage(messages.Message):
+            repeated = messages.IntegerField(1, repeated=True)
+
+        simple_message = SimpleMessage()
+
+        # Check valid values.
+        for valid_value in [], [10], [10, 20], (), (10,), (10, 20):
+            simple_message.repeated = valid_value
+            simple_message.check_initialized()
+
+        # Check cleared.
+        simple_message.repeated = []
+        simple_message.check_initialized()
+
+        # Check invalid values.
+        for invalid_value in 10, ['10', '20'], [None], (None,):
+            self.assertRaises(
+                messages.ValidationError,
+                setattr, simple_message, 'repeated', invalid_value)
+
+    def testIsInitialized(self):
+        """Tests is_initialized."""
+        class SimpleMessage(messages.Message):
+            required = messages.IntegerField(1, required=True)
+
+        simple_message = SimpleMessage()
+        self.assertFalse(simple_message.is_initialized())
+
+        simple_message.required = 10
+
+        self.assertTrue(simple_message.is_initialized())
+
+    def testIsInitializedNestedField(self):
+        """Tests is_initialized for nested fields."""
+        class SimpleMessage(messages.Message):
+            required = messages.IntegerField(1, required=True)
+
+        class NestedMessage(messages.Message):
+            simple = messages.MessageField(SimpleMessage, 1)
+
+        simple_message = SimpleMessage()
+        self.assertFalse(simple_message.is_initialized())
+        nested_message = NestedMessage(simple=simple_message)
+        self.assertFalse(nested_message.is_initialized())
+
+        simple_message.required = 10
+
+        self.assertTrue(simple_message.is_initialized())
+        self.assertTrue(nested_message.is_initialized())
+
+    def testInitializeNestedFieldFromDict(self):
+        """Tests initializing nested fields from dict."""
+        class SimpleMessage(messages.Message):
+            required = messages.IntegerField(1, required=True)
+
+        class NestedMessage(messages.Message):
+            simple = messages.MessageField(SimpleMessage, 1)
+
+        class RepeatedMessage(messages.Message):
+            simple = messages.MessageField(SimpleMessage, 1, repeated=True)
+
+        nested_message1 = NestedMessage(simple={'required': 10})
+        self.assertTrue(nested_message1.is_initialized())
+        self.assertTrue(nested_message1.simple.is_initialized())
+
+        nested_message2 = NestedMessage()
+        nested_message2.simple = {'required': 10}
+        self.assertTrue(nested_message2.is_initialized())
+        self.assertTrue(nested_message2.simple.is_initialized())
+
+        repeated_values = [{}, {'required': 10}, SimpleMessage(required=20)]
+
+        repeated_message1 = RepeatedMessage(simple=repeated_values)
+        self.assertEquals(3, len(repeated_message1.simple))
+        self.assertFalse(repeated_message1.is_initialized())
+
+        repeated_message1.simple[0].required = 0
+        self.assertTrue(repeated_message1.is_initialized())
+
+        repeated_message2 = RepeatedMessage()
+        repeated_message2.simple = repeated_values
+        self.assertEquals(3, len(repeated_message2.simple))
+        self.assertFalse(repeated_message2.is_initialized())
+
+        repeated_message2.simple[0].required = 0
+        self.assertTrue(repeated_message2.is_initialized())
+
+    def testNestedMethodsNotAllowed(self):
+        """Test that method definitions on Message classes are not allowed."""
+        def action():
+            class WithMethods(messages.Message):
+
+                def not_allowed(self):
+                    pass
+
+        self.assertRaises(messages.MessageDefinitionError,
+                          action)
+
+    def testNestedAttributesNotAllowed(self):
+        """Test attribute assignment on Message classes is not allowed."""
+        def int_attribute():
+            class WithMethods(messages.Message):
+                not_allowed = 1
+
+        def string_attribute():
+            class WithMethods(messages.Message):
+                not_allowed = 'not allowed'
+
+        def enum_attribute():
+            class WithMethods(messages.Message):
+                not_allowed = Color.RED
+
+        for action in (int_attribute, string_attribute, enum_attribute):
+            self.assertRaises(messages.MessageDefinitionError,
+                              action)
+
+    def testNameIsSetOnFields(self):
+        """Make sure name is set on fields after Message class init."""
+        class HasNamedFields(messages.Message):
+            field = messages.StringField(1)
+
+        self.assertEquals('field', HasNamedFields.field_by_number(1).name)
+
+    def testSubclassingMessageDisallowed(self):
+        """Not permitted to create sub-classes of message classes."""
+        class SuperClass(messages.Message):
+            pass
+
+        def action():
+            class SubClass(SuperClass):
+                pass
+
+        self.assertRaises(messages.MessageDefinitionError,
+                          action)
+
+    def testAllFields(self):
+        """Test all_fields method."""
+        ComplexMessage = self.CreateMessageClass()
+        fields = list(ComplexMessage.all_fields())
+
+        # Order does not matter, so sort now.
+        fields = sorted(fields, key=lambda f: f.name)
+
+        self.assertEquals(3, len(fields))
+        self.assertEquals('a3', fields[0].name)
+        self.assertEquals('b1', fields[1].name)
+        self.assertEquals('c2', fields[2].name)
+
+    def testFieldByName(self):
+        """Test getting field by name."""
+        ComplexMessage = self.CreateMessageClass()
+
+        self.assertEquals(3, ComplexMessage.field_by_name('a3').number)
+        self.assertEquals(1, ComplexMessage.field_by_name('b1').number)
+        self.assertEquals(2, ComplexMessage.field_by_name('c2').number)
+
+        self.assertRaises(KeyError,
+                          ComplexMessage.field_by_name,
+                          'unknown')
+
+    def testFieldByNumber(self):
+        """Test getting field by number."""
+        ComplexMessage = self.CreateMessageClass()
+
+        self.assertEquals('a3', ComplexMessage.field_by_number(3).name)
+        self.assertEquals('b1', ComplexMessage.field_by_number(1).name)
+        self.assertEquals('c2', ComplexMessage.field_by_number(2).name)
+
+        self.assertRaises(KeyError,
+                          ComplexMessage.field_by_number,
+                          4)
+
+    def testGetAssignedValue(self):
+        """Test getting the assigned value of a field."""
+        class SomeMessage(messages.Message):
+            a_value = messages.StringField(1, default=u'a default')
+
+        message = SomeMessage()
+        self.assertEquals(None, message.get_assigned_value('a_value'))
+
+        message.a_value = u'a string'
+        self.assertEquals(u'a string', message.get_assigned_value('a_value'))
+
+        message.a_value = u'a default'
+        self.assertEquals(u'a default', message.get_assigned_value('a_value'))
+
+        self.assertRaisesWithRegexpMatch(
+            AttributeError,
+            'Message SomeMessage has no field no_such_field',
+            message.get_assigned_value,
+            'no_such_field')
+
+    def testReset(self):
+        """Test resetting a field value."""
+        class SomeMessage(messages.Message):
+            a_value = messages.StringField(1, default=u'a default')
+            repeated = messages.IntegerField(2, repeated=True)
+
+        message = SomeMessage()
+
+        self.assertRaises(AttributeError, message.reset, 'unknown')
+
+        self.assertEquals(u'a default', message.a_value)
+        message.reset('a_value')
+        self.assertEquals(u'a default', message.a_value)
+
+        message.a_value = u'a new value'
+        self.assertEquals(u'a new value', message.a_value)
+        message.reset('a_value')
+        self.assertEquals(u'a default', message.a_value)
+
+        message.repeated = [1, 2, 3]
+        self.assertEquals([1, 2, 3], message.repeated)
+        saved = message.repeated
+        message.reset('repeated')
+        self.assertEquals([], message.repeated)
+        self.assertIsInstance(message.repeated, messages.FieldList)
+        self.assertEquals([1, 2, 3], saved)
+
+    def testAllowNestedEnums(self):
+        """Test allowing nested enums in a message definition."""
+        class Trade(messages.Message):
+
+            class Duration(messages.Enum):
+                GTC = 1
+                DAY = 2
+
+            class Currency(messages.Enum):
+                USD = 1
+                GBP = 2
+                INR = 3
+
+        # Sorted by name order seems to be the only feasible option.
+        self.assertEquals(['Currency', 'Duration'], Trade.__enums__)
+
+        # Message definition will now be set on Enumerated objects.
+        self.assertEquals(Trade, Trade.Duration.message_definition())
+
+    def testAllowNestedMessages(self):
+        """Test allowing nested messages in a message definition."""
+        class Trade(messages.Message):
+
+            class Lot(messages.Message):
+                pass
+
+            class Agent(messages.Message):
+                pass
+
+        # Sorted by name order seems to be the only feasible option.
+        self.assertEquals(['Agent', 'Lot'], Trade.__messages__)
+        self.assertEquals(Trade, Trade.Agent.message_definition())
+        self.assertEquals(Trade, Trade.Lot.message_definition())
+
+        # But not Message itself.
+        def action():
+            class Trade(messages.Message):
+                NiceTry = messages.Message
+        self.assertRaises(messages.MessageDefinitionError, action)
+
+    def testDisallowClassAssignments(self):
+        """Test setting class attributes may not happen."""
+        class MyMessage(messages.Message):
+            pass
+
+        self.assertRaises(AttributeError,
+                          setattr,
+                          MyMessage,
+                          'x',
+                          'do not assign')
+
+    def testEquality(self):
+        """Test message class equality."""
+        # Comparison against enums must work.
+        class MyEnum(messages.Enum):
+            val1 = 1
+            val2 = 2
+
+        # Comparisons against nested messages must work.
+        class AnotherMessage(messages.Message):
+            string = messages.StringField(1)
+
+        class MyMessage(messages.Message):
+            field1 = messages.IntegerField(1)
+            field2 = messages.EnumField(MyEnum, 2)
+            field3 = messages.MessageField(AnotherMessage, 3)
+
+        message1 = MyMessage()
+
+        self.assertNotEquals('hi', message1)
+        self.assertNotEquals(AnotherMessage(), message1)
+        self.assertEquals(message1, message1)
+
+        message2 = MyMessage()
+
+        self.assertEquals(message1, message2)
+
+        message1.field1 = 10
+        self.assertNotEquals(message1, message2)
+
+        message2.field1 = 20
+        self.assertNotEquals(message1, message2)
+
+        message2.field1 = 10
+        self.assertEquals(message1, message2)
+
+        message1.field2 = MyEnum.val1
+        self.assertNotEquals(message1, message2)
+
+        message2.field2 = MyEnum.val2
+        self.assertNotEquals(message1, message2)
+
+        message2.field2 = MyEnum.val1
+        self.assertEquals(message1, message2)
+
+        message1.field3 = AnotherMessage()
+        message1.field3.string = 'value1'
+        self.assertNotEquals(message1, message2)
+
+        message2.field3 = AnotherMessage()
+        message2.field3.string = 'value2'
+        self.assertNotEquals(message1, message2)
+
+        message2.field3.string = 'value1'
+        self.assertEquals(message1, message2)
+
+    def testEqualityWithUnknowns(self):
+        """Test message class equality with unknown fields."""
+
+        class MyMessage(messages.Message):
+            field1 = messages.IntegerField(1)
+
+        message1 = MyMessage()
+        message2 = MyMessage()
+        self.assertEquals(message1, message2)
+        message1.set_unrecognized_field('unknown1', 'value1',
+                                        messages.Variant.STRING)
+        self.assertEquals(message1, message2)
+
+        message1.set_unrecognized_field('unknown2', ['asdf', 3],
+                                        messages.Variant.STRING)
+        message1.set_unrecognized_field('unknown3', 4.7,
+                                        messages.Variant.DOUBLE)
+        self.assertEquals(message1, message2)
+
+    def testUnrecognizedFieldInvalidVariant(self):
+        class MyMessage(messages.Message):
+            field1 = messages.IntegerField(1)
+
+        message1 = MyMessage()
+        self.assertRaises(
+            TypeError, message1.set_unrecognized_field, 'unknown4',
+            {'unhandled': 'type'}, None)
+        self.assertRaises(
+            TypeError, message1.set_unrecognized_field, 'unknown4',
+            {'unhandled': 'type'}, 123)
+
+    def testRepr(self):
+        """Test represtation of Message object."""
+        class MyMessage(messages.Message):
+            integer_value = messages.IntegerField(1)
+            string_value = messages.StringField(2)
+            unassigned = messages.StringField(3)
+            unassigned_with_default = messages.StringField(
+                4, default=u'a default')
+
+        my_message = MyMessage()
+        my_message.integer_value = 42
+        my_message.string_value = u'A string'
+
+        pat = re.compile(r"<MyMessage\n integer_value: 42\n"
+                         " string_value: [u]?'A string'>")
+        self.assertTrue(pat.match(repr(my_message)) is not None)
+
+    def testValidation(self):
+        """Test validation of message values."""
+        # Test optional.
+        class SubMessage(messages.Message):
+            pass
+
+        class Message(messages.Message):
+            val = messages.MessageField(SubMessage, 1)
+
+        message = Message()
+
+        message_field = messages.MessageField(Message, 1)
+        message_field.validate(message)
+        message.val = SubMessage()
+        message_field.validate(message)
+        self.assertRaises(messages.ValidationError,
+                          setattr, message, 'val', [SubMessage()])
+
+        # Test required.
+        class Message(messages.Message):
+            val = messages.MessageField(SubMessage, 1, required=True)
+
+        message = Message()
+
+        message_field = messages.MessageField(Message, 1)
+        message_field.validate(message)
+        message.val = SubMessage()
+        message_field.validate(message)
+        self.assertRaises(messages.ValidationError,
+                          setattr, message, 'val', [SubMessage()])
+
+        # Test repeated.
+        class Message(messages.Message):
+            val = messages.MessageField(SubMessage, 1, repeated=True)
+
+        message = Message()
+
+        message_field = messages.MessageField(Message, 1)
+        message_field.validate(message)
+        self.assertRaisesWithRegexpMatch(
+            messages.ValidationError,
+            "Field val is repeated. Found: <SubMessage>",
+            setattr, message, 'val', SubMessage())
+        message.val = [SubMessage()]
+        message_field.validate(message)
+
+    def testDefinitionName(self):
+        """Test message name."""
+        class MyMessage(messages.Message):
+            pass
+
+        module_name = test_util.get_module_name(FieldTest)
+        self.assertEquals('%s.MyMessage' % module_name,
+                          MyMessage.definition_name())
+        self.assertEquals(module_name, MyMessage.outer_definition_name())
+        self.assertEquals(module_name, MyMessage.definition_package())
+
+        self.assertEquals(six.text_type, type(MyMessage.definition_name()))
+        self.assertEquals(six.text_type, type(
+            MyMessage.outer_definition_name()))
+        self.assertEquals(six.text_type, type(MyMessage.definition_package()))
+
+    def testDefinitionName_OverrideModule(self):
+        """Test message module is overriden by module package name."""
+        class MyMessage(messages.Message):
+            pass
+
+        global package
+        package = 'my.package'
+
+        try:
+            self.assertEquals('my.package.MyMessage',
+                              MyMessage.definition_name())
+            self.assertEquals('my.package', MyMessage.outer_definition_name())
+            self.assertEquals('my.package', MyMessage.definition_package())
+
+            self.assertEquals(six.text_type, type(MyMessage.definition_name()))
+            self.assertEquals(six.text_type, type(
+                MyMessage.outer_definition_name()))
+            self.assertEquals(six.text_type, type(
+                MyMessage.definition_package()))
+        finally:
+            del package
+
+    def testDefinitionName_NoModule(self):
+        """Test what happens when there is no module for message."""
+        class MyMessage(messages.Message):
+            pass
+
+        original_modules = sys.modules
+        sys.modules = dict(sys.modules)
+        try:
+            del sys.modules[__name__]
+            self.assertEquals('MyMessage', MyMessage.definition_name())
+            self.assertEquals(None, MyMessage.outer_definition_name())
+            self.assertEquals(None, MyMessage.definition_package())
+
+            self.assertEquals(six.text_type, type(MyMessage.definition_name()))
+        finally:
+            sys.modules = original_modules
+
+    def testDefinitionName_Nested(self):
+        """Test nested message names."""
+        class MyMessage(messages.Message):
+
+            class NestedMessage(messages.Message):
+
+                class NestedMessage(messages.Message):
+
+                    pass
+
+        module_name = test_util.get_module_name(MessageTest)
+        self.assertEquals('%s.MyMessage.NestedMessage' % module_name,
+                          MyMessage.NestedMessage.definition_name())
+        self.assertEquals('%s.MyMessage' % module_name,
+                          MyMessage.NestedMessage.outer_definition_name())
+        self.assertEquals(module_name,
+                          MyMessage.NestedMessage.definition_package())
+
+        self.assertEquals(
+            '%s.MyMessage.NestedMessage.NestedMessage' % module_name,
+            MyMessage.NestedMessage.NestedMessage.definition_name())
+        self.assertEquals(
+            '%s.MyMessage.NestedMessage' % module_name,
+            MyMessage.NestedMessage.NestedMessage.outer_definition_name())
+        self.assertEquals(
+            module_name,
+            MyMessage.NestedMessage.NestedMessage.definition_package())
+
+    def testMessageDefinition(self):
+        """Test that enumeration knows its enclosing message definition."""
+        class OuterMessage(messages.Message):
+
+            class InnerMessage(messages.Message):
+                pass
+
+        self.assertEquals(None, OuterMessage.message_definition())
+        self.assertEquals(OuterMessage,
+                          OuterMessage.InnerMessage.message_definition())
+
+    def testConstructorKwargs(self):
+        """Test kwargs via constructor."""
+        class SomeMessage(messages.Message):
+            name = messages.StringField(1)
+            number = messages.IntegerField(2)
+
+        expected = SomeMessage()
+        expected.name = 'my name'
+        expected.number = 200
+        self.assertEquals(expected, SomeMessage(name='my name', number=200))
+
+    def testConstructorNotAField(self):
+        """Test kwargs via constructor with wrong names."""
+        class SomeMessage(messages.Message):
+            pass
+
+        self.assertRaisesWithRegexpMatch(
+            AttributeError,
+            ('May not assign arbitrary value does_not_exist to message '
+             'SomeMessage'),
+            SomeMessage,
+            does_not_exist=10)
+
+    def testGetUnsetRepeatedValue(self):
+        class SomeMessage(messages.Message):
+            repeated = messages.IntegerField(1, repeated=True)
+
+        instance = SomeMessage()
+        self.assertEquals([], instance.repeated)
+        self.assertTrue(isinstance(instance.repeated, messages.FieldList))
+
+    def testCompareAutoInitializedRepeatedFields(self):
+        class SomeMessage(messages.Message):
+            repeated = messages.IntegerField(1, repeated=True)
+
+        message1 = SomeMessage(repeated=[])
+        message2 = SomeMessage()
+        self.assertEquals(message1, message2)
+
+    def testUnknownValues(self):
+        """Test message class equality with unknown fields."""
+        class MyMessage(messages.Message):
+            field1 = messages.IntegerField(1)
+
+        message = MyMessage()
+        self.assertEquals([], message.all_unrecognized_fields())
+        self.assertEquals((None, None),
+                          message.get_unrecognized_field_info('doesntexist'))
+        self.assertEquals((None, None),
+                          message.get_unrecognized_field_info(
+                              'doesntexist', None, None))
+        self.assertEquals(('defaultvalue', 'defaultwire'),
+                          message.get_unrecognized_field_info(
+                              'doesntexist', 'defaultvalue', 'defaultwire'))
+        self.assertEquals((3, None),
+                          message.get_unrecognized_field_info(
+                              'doesntexist', value_default=3))
+
+        message.set_unrecognized_field('exists', 9.5, messages.Variant.DOUBLE)
+        self.assertEquals(1, len(message.all_unrecognized_fields()))
+        self.assertTrue('exists' in message.all_unrecognized_fields())
+        self.assertEquals((9.5, messages.Variant.DOUBLE),
+                          message.get_unrecognized_field_info('exists'))
+        self.assertEquals((9.5, messages.Variant.DOUBLE),
+                          message.get_unrecognized_field_info('exists', 'type',
+                                                              1234))
+        self.assertEquals(
+            (1234, None),
+            message.get_unrecognized_field_info('doesntexist', 1234))
+
+        message.set_unrecognized_field(
+            'another', 'value', messages.Variant.STRING)
+        self.assertEquals(2, len(message.all_unrecognized_fields()))
+        self.assertTrue('exists' in message.all_unrecognized_fields())
+        self.assertTrue('another' in message.all_unrecognized_fields())
+        self.assertEquals((9.5, messages.Variant.DOUBLE),
+                          message.get_unrecognized_field_info('exists'))
+        self.assertEquals(('value', messages.Variant.STRING),
+                          message.get_unrecognized_field_info('another'))
+
+        message.set_unrecognized_field('typetest1', ['list', 0, ('test',)],
+                                       messages.Variant.STRING)
+        self.assertEquals((['list', 0, ('test',)], messages.Variant.STRING),
+                          message.get_unrecognized_field_info('typetest1'))
+        message.set_unrecognized_field(
+            'typetest2', '', messages.Variant.STRING)
+        self.assertEquals(('', messages.Variant.STRING),
+                          message.get_unrecognized_field_info('typetest2'))
+
+    def testPickle(self):
+        """Testing pickling and unpickling of Message instances."""
+        global MyEnum
+        global AnotherMessage
+        global MyMessage
+
+        class MyEnum(messages.Enum):
+            val1 = 1
+            val2 = 2
+
+        class AnotherMessage(messages.Message):
+            string = messages.StringField(1, repeated=True)
+
+        class MyMessage(messages.Message):
+            field1 = messages.IntegerField(1)
+            field2 = messages.EnumField(MyEnum, 2)
+            field3 = messages.MessageField(AnotherMessage, 3)
+
+        message = MyMessage(field1=1, field2=MyEnum.val2,
+                            field3=AnotherMessage(string=['a', 'b', 'c']))
+        message.set_unrecognized_field(
+            'exists', 'value', messages.Variant.STRING)
+        message.set_unrecognized_field('repeated', ['list', 0, ('test',)],
+                                       messages.Variant.STRING)
+        unpickled = pickle.loads(pickle.dumps(message))
+        self.assertEquals(message, unpickled)
+        self.assertTrue(AnotherMessage.string is unpickled.field3.string.field)
+        self.assertTrue('exists' in message.all_unrecognized_fields())
+        self.assertEquals(('value', messages.Variant.STRING),
+                          message.get_unrecognized_field_info('exists'))
+        self.assertEquals((['list', 0, ('test',)], messages.Variant.STRING),
+                          message.get_unrecognized_field_info('repeated'))
+
+
+class FindDefinitionTest(test_util.TestCase):
+    """Test finding definitions relative to various definitions and modules."""
+
+    def setUp(self):
+        """Set up module-space.  Starts off empty."""
+        self.modules = {}
+
+    def DefineModule(self, name):
+        """Define a module and its parents in module space.
+
+        Modules that are already defined in self.modules are not re-created.
+
+        Args:
+          name: Fully qualified name of modules to create.
+
+        Returns:
+          Deepest nested module.  For example:
+
+            DefineModule('a.b.c')  # Returns c.
+        """
+        name_path = name.split('.')
+        full_path = []
+        for node in name_path:
+            full_path.append(node)
+            full_name = '.'.join(full_path)
+            self.modules.setdefault(full_name, types.ModuleType(full_name))
+        return self.modules[name]
+
+    def DefineMessage(self, module, name, children=None, add_to_module=True):
+        """Define a new Message class in the context of a module.
+
+        Used for easily describing complex Message hierarchy. Message
+        is defined including all child definitions.
+
+        Args:
+          module: Fully qualified name of module to place Message class in.
+          name: Name of Message to define within module.
+          children: Define any level of nesting of children
+            definitions. To define a message, map the name to another
+            dictionary. The dictionary can itself contain additional
+            definitions, and so on. To map to an Enum, define the Enum
+            class separately and map it by name.
+          add_to_module: If True, new Message class is added to
+            module. If False, new Message is not added.
+
+        """
+        children = children or {}
+        # Make sure module exists.
+        module_instance = self.DefineModule(module)
+
+        # Recursively define all child messages.
+        for attribute, value in children.items():
+            if isinstance(value, dict):
+                children[attribute] = self.DefineMessage(
+                    module, attribute, value, False)
+
+        # Override default __module__ variable.
+        children['__module__'] = module
+
+        # Instantiate and possibly add to module.
+        message_class = type(name, (messages.Message,), dict(children))
+        if add_to_module:
+            setattr(module_instance, name, message_class)
+        return message_class
+
+    # pylint:disable=unused-argument
+    # pylint:disable=redefined-builtin
+    def Importer(self, module, globals='', locals='', fromlist=None):
+        """Importer function.
+
+        Acts like __import__. Only loads modules from self.modules.
+        Does not try to load real modules defined elsewhere. Does not
+        try to handle relative imports.
+
+        Args:
+          module: Fully qualified name of module to load from self.modules.
+
+        """
+        if fromlist is None:
+            module = module.split('.')[0]
+        try:
+            return self.modules[module]
+        except KeyError:
+            raise ImportError()
+    # pylint:disable=unused-argument
+
+    def testNoSuchModule(self):
+        """Test searching for definitions that do no exist."""
+        self.assertRaises(messages.DefinitionNotFoundError,
+                          messages.find_definition,
+                          'does.not.exist',
+                          importer=self.Importer)
+
+    def testRefersToModule(self):
+        """Test that referring to a module does not return that module."""
+        self.DefineModule('i.am.a.module')
+        self.assertRaises(messages.DefinitionNotFoundError,
+                          messages.find_definition,
+                          'i.am.a.module',
+                          importer=self.Importer)
+
+    def testNoDefinition(self):
+        """Test not finding a definition in an existing module."""
+        self.DefineModule('i.am.a.module')
+        self.assertRaises(messages.DefinitionNotFoundError,
+                          messages.find_definition,
+                          'i.am.a.module.MyMessage',
+                          importer=self.Importer)
+
+    def testNotADefinition(self):
+        """Test trying to fetch something that is not a definition."""
+        module = self.DefineModule('i.am.a.module')
+        setattr(module, 'A', 'a string')
+        self.assertRaises(messages.DefinitionNotFoundError,
+                          messages.find_definition,
+                          'i.am.a.module.A',
+                          importer=self.Importer)
+
+    def testGlobalFind(self):
+        """Test finding definitions from fully qualified module names."""
+        A = self.DefineMessage('a.b.c', 'A', {})
+        self.assertEquals(A, messages.find_definition('a.b.c.A',
+                                                      importer=self.Importer))
+        B = self.DefineMessage('a.b.c', 'B', {'C': {}})
+        self.assertEquals(
+            B.C,
+            messages.find_definition('a.b.c.B.C', importer=self.Importer))
+
+    def testRelativeToModule(self):
+        """Test finding definitions relative to modules."""
+        # Define modules.
+        a = self.DefineModule('a')
+        b = self.DefineModule('a.b')
+        c = self.DefineModule('a.b.c')
+
+        # Define messages.
+        A = self.DefineMessage('a', 'A')
+        B = self.DefineMessage('a.b', 'B')
+        C = self.DefineMessage('a.b.c', 'C')
+        D = self.DefineMessage('a.b.d', 'D')
+
+        # Find A, B, C and D relative to a.
+        self.assertEquals(A, messages.find_definition(
+            'A', a, importer=self.Importer))
+        self.assertEquals(B, messages.find_definition(
+            'b.B', a, importer=self.Importer))
+        self.assertEquals(C, messages.find_definition(
+            'b.c.C', a, importer=self.Importer))
+        self.assertEquals(D, messages.find_definition(
+            'b.d.D', a, importer=self.Importer))
+
+        # Find A, B, C and D relative to b.
+        self.assertEquals(A, messages.find_definition(
+            'A', b, importer=self.Importer))
+        self.assertEquals(B, messages.find_definition(
+            'B', b, importer=self.Importer))
+        self.assertEquals(C, messages.find_definition(
+            'c.C', b, importer=self.Importer))
+        self.assertEquals(D, messages.find_definition(
+            'd.D', b, importer=self.Importer))
+
+        # Find A, B, C and D relative to c.  Module d is the same case as c.
+        self.assertEquals(A, messages.find_definition(
+            'A', c, importer=self.Importer))
+        self.assertEquals(B, messages.find_definition(
+            'B', c, importer=self.Importer))
+        self.assertEquals(C, messages.find_definition(
+            'C', c, importer=self.Importer))
+        self.assertEquals(D, messages.find_definition(
+            'd.D', c, importer=self.Importer))
+
+    def testRelativeToMessages(self):
+        """Test finding definitions relative to Message definitions."""
+        A = self.DefineMessage('a.b', 'A', {'B': {'C': {}, 'D': {}}})
+        B = A.B
+        C = A.B.C
+        D = A.B.D
+
+        # Find relative to A.
+        self.assertEquals(A, messages.find_definition(
+            'A', A, importer=self.Importer))
+        self.assertEquals(B, messages.find_definition(
+            'B', A, importer=self.Importer))
+        self.assertEquals(C, messages.find_definition(
+            'B.C', A, importer=self.Importer))
+        self.assertEquals(D, messages.find_definition(
+            'B.D', A, importer=self.Importer))
+
+        # Find relative to B.
+        self.assertEquals(A, messages.find_definition(
+            'A', B, importer=self.Importer))
+        self.assertEquals(B, messages.find_definition(
+            'B', B, importer=self.Importer))
+        self.assertEquals(C, messages.find_definition(
+            'C', B, importer=self.Importer))
+        self.assertEquals(D, messages.find_definition(
+            'D', B, importer=self.Importer))
+
+        # Find relative to C.
+        self.assertEquals(A, messages.find_definition(
+            'A', C, importer=self.Importer))
+        self.assertEquals(B, messages.find_definition(
+            'B', C, importer=self.Importer))
+        self.assertEquals(C, messages.find_definition(
+            'C', C, importer=self.Importer))
+        self.assertEquals(D, messages.find_definition(
+            'D', C, importer=self.Importer))
+
+        # Find relative to C searching from c.
+        self.assertEquals(A, messages.find_definition(
+            'b.A', C, importer=self.Importer))
+        self.assertEquals(B, messages.find_definition(
+            'b.A.B', C, importer=self.Importer))
+        self.assertEquals(C, messages.find_definition(
+            'b.A.B.C', C, importer=self.Importer))
+        self.assertEquals(D, messages.find_definition(
+            'b.A.B.D', C, importer=self.Importer))
+
+    def testAbsoluteReference(self):
+        """Test finding absolute definition names."""
+        # Define modules.
+        a = self.DefineModule('a')
+        b = self.DefineModule('a.a')
+
+        # Define messages.
+        aA = self.DefineMessage('a', 'A')
+        aaA = self.DefineMessage('a.a', 'A')
+
+        # Always find a.A.
+        self.assertEquals(aA, messages.find_definition('.a.A', None,
+                                                       importer=self.Importer))
+        self.assertEquals(aA, messages.find_definition('.a.A', a,
+                                                       importer=self.Importer))
+        self.assertEquals(aA, messages.find_definition('.a.A', aA,
+                                                       importer=self.Importer))
+        self.assertEquals(aA, messages.find_definition('.a.A', aaA,
+                                                       importer=self.Importer))
+
+    def testFindEnum(self):
+        """Test that Enums are found."""
+        class Color(messages.Enum):
+            pass
+        A = self.DefineMessage('a', 'A', {'Color': Color})
+
+        self.assertEquals(
+            Color,
+            messages.find_definition('Color', A, importer=self.Importer))
+
+    def testFalseScope(self):
+        """Test Message definitions nested in strange objects are hidden."""
+        global X
+
+        class X(object):
+
+            class A(messages.Message):
+                pass
+
+        self.assertRaises(TypeError, messages.find_definition, 'A', X)
+        self.assertRaises(messages.DefinitionNotFoundError,
+                          messages.find_definition,
+                          'X.A', sys.modules[__name__])
+
+    def testSearchAttributeFirst(self):
+        """Make sure not faked out by module, but continues searching."""
+        A = self.DefineMessage('a', 'A')
+        module_A = self.DefineModule('a.A')
+
+        self.assertEquals(A, messages.find_definition(
+            'a.A', None, importer=self.Importer))
+
+
+def main():
+    unittest.main()
+
+
+if __name__ == '__main__':
+    main()
diff --git a/apitools/base/protorpclite/protojson.py b/apitools/base/protorpclite/protojson.py
new file mode 100644
index 0000000..4c87cf4
--- /dev/null
+++ b/apitools/base/protorpclite/protojson.py
@@ -0,0 +1,367 @@
+#!/usr/bin/env python
+#
+# Copyright 2010 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""JSON support for message types.
+
+Public classes:
+  MessageJSONEncoder: JSON encoder for message objects.
+
+Public functions:
+  encode_message: Encodes a message in to a JSON string.
+  decode_message: Merge from a JSON string in to a message.
+"""
+import base64
+import binascii
+import logging
+
+import six
+
+from apitools.base.protorpclite import message_types
+from apitools.base.protorpclite import messages
+from apitools.base.protorpclite import util
+
+__all__ = [
+    'ALTERNATIVE_CONTENT_TYPES',
+    'CONTENT_TYPE',
+    'MessageJSONEncoder',
+    'encode_message',
+    'decode_message',
+    'ProtoJson',
+]
+
+
+def _load_json_module():
+    """Try to load a valid json module.
+
+    There are more than one json modules that might be installed.  They are
+    mostly compatible with one another but some versions may be different.
+    This function attempts to load various json modules in a preferred order.
+    It does a basic check to guess if a loaded version of json is compatible.
+
+    Returns:
+      Compatible json module.
+
+    Raises:
+      ImportError if there are no json modules or the loaded json module is
+        not compatible with ProtoRPC.
+    """
+    first_import_error = None
+    for module_name in ['json',
+                        'simplejson']:
+        try:
+            module = __import__(module_name, {}, {}, 'json')
+            if not hasattr(module, 'JSONEncoder'):
+                message = (
+                    'json library "%s" is not compatible with ProtoRPC' %
+                    module_name)
+                logging.warning(message)
+                raise ImportError(message)
+            else:
+                return module
+        except ImportError as err:
+            if not first_import_error:
+                first_import_error = err
+
+    logging.error('Must use valid json library (json or simplejson)')
+    raise first_import_error  # pylint:disable=raising-bad-type
+json = _load_json_module()
+
+
+# TODO: Rename this to MessageJsonEncoder.
+class MessageJSONEncoder(json.JSONEncoder):
+    """Message JSON encoder class.
+
+    Extension of JSONEncoder that can build JSON from a message object.
+    """
+
+    def __init__(self, protojson_protocol=None, **kwargs):
+        """Constructor.
+
+        Args:
+          protojson_protocol: ProtoJson instance.
+        """
+        super(MessageJSONEncoder, self).__init__(**kwargs)
+        self.__protojson_protocol = (
+            protojson_protocol or ProtoJson.get_default())
+
+    def default(self, value):
+        """Return dictionary instance from a message object.
+
+        Args:
+        value: Value to get dictionary for.  If not encodable, will
+          call superclasses default method.
+        """
+        if isinstance(value, messages.Enum):
+            return str(value)
+
+        if six.PY3 and isinstance(value, bytes):
+            return value.decode('utf8')
+
+        if isinstance(value, messages.Message):
+            result = {}
+            for field in value.all_fields():
+                item = value.get_assigned_value(field.name)
+                if item not in (None, [], ()):
+                    result[field.name] = (
+                        self.__protojson_protocol.encode_field(field, item))
+            # Handle unrecognized fields, so they're included when a message is
+            # decoded then encoded.
+            for unknown_key in value.all_unrecognized_fields():
+                unrecognized_field, _ = value.get_unrecognized_field_info(
+                    unknown_key)
+                result[unknown_key] = unrecognized_field
+            return result
+
+        return super(MessageJSONEncoder, self).default(value)
+
+
+class ProtoJson(object):
+    """ProtoRPC JSON implementation class.
+
+    Implementation of JSON based protocol used for serializing and
+    deserializing message objects. Instances of remote.ProtocolConfig
+    constructor or used with remote.Protocols.add_protocol. See the
+    remote.py module for more details.
+
+    """
+
+    CONTENT_TYPE = 'application/json'
+    ALTERNATIVE_CONTENT_TYPES = [
+        'application/x-javascript',
+        'text/javascript',
+        'text/x-javascript',
+        'text/x-json',
+        'text/json',
+    ]
+
+    def encode_field(self, field, value):
+        """Encode a python field value to a JSON value.
+
+        Args:
+          field: A ProtoRPC field instance.
+          value: A python value supported by field.
+
+        Returns:
+          A JSON serializable value appropriate for field.
+        """
+        if isinstance(field, messages.BytesField):
+            if field.repeated:
+                value = [base64.b64encode(byte) for byte in value]
+            else:
+                value = base64.b64encode(value)
+        elif isinstance(field, message_types.DateTimeField):
+            # DateTimeField stores its data as a RFC 3339 compliant string.
+            if field.repeated:
+                value = [i.isoformat() for i in value]
+            else:
+                value = value.isoformat()
+        return value
+
+    def encode_message(self, message):
+        """Encode Message instance to JSON string.
+
+        Args:
+          Message instance to encode in to JSON string.
+
+        Returns:
+          String encoding of Message instance in protocol JSON format.
+
+        Raises:
+          messages.ValidationError if message is not initialized.
+        """
+        message.check_initialized()
+
+        return json.dumps(message, cls=MessageJSONEncoder,
+                          protojson_protocol=self)
+
+    def decode_message(self, message_type, encoded_message):
+        """Merge JSON structure to Message instance.
+
+        Args:
+          message_type: Message to decode data to.
+          encoded_message: JSON encoded version of message.
+
+        Returns:
+          Decoded instance of message_type.
+
+        Raises:
+          ValueError: If encoded_message is not valid JSON.
+          messages.ValidationError if merged message is not initialized.
+        """
+        if not encoded_message.strip():
+            return message_type()
+
+        dictionary = json.loads(encoded_message)
+        message = self.__decode_dictionary(message_type, dictionary)
+        message.check_initialized()
+        return message
+
+    def __find_variant(self, value):
+        """Find the messages.Variant type that describes this value.
+
+        Args:
+          value: The value whose variant type is being determined.
+
+        Returns:
+          The messages.Variant value that best describes value's type,
+          or None if it's a type we don't know how to handle.
+
+        """
+        if isinstance(value, bool):
+            return messages.Variant.BOOL
+        elif isinstance(value, six.integer_types):
+            return messages.Variant.INT64
+        elif isinstance(value, float):
+            return messages.Variant.DOUBLE
+        elif isinstance(value, six.string_types):
+            return messages.Variant.STRING
+        elif isinstance(value, (list, tuple)):
+            # Find the most specific variant that covers all elements.
+            variant_priority = [None,
+                                messages.Variant.INT64,
+                                messages.Variant.DOUBLE,
+                                messages.Variant.STRING]
+            chosen_priority = 0
+            for v in value:
+                variant = self.__find_variant(v)
+                try:
+                    priority = variant_priority.index(variant)
+                except IndexError:
+                    priority = -1
+                if priority > chosen_priority:
+                    chosen_priority = priority
+            return variant_priority[chosen_priority]
+        # Unrecognized type.
+        return None
+
+    def __decode_dictionary(self, message_type, dictionary):
+        """Merge dictionary in to message.
+
+        Args:
+          message: Message to merge dictionary in to.
+          dictionary: Dictionary to extract information from.  Dictionary
+            is as parsed from JSON.  Nested objects will also be dictionaries.
+        """
+        message = message_type()
+        for key, value in six.iteritems(dictionary):
+            if value is None:
+                try:
+                    message.reset(key)
+                except AttributeError:
+                    pass  # This is an unrecognized field, skip it.
+                continue
+
+            try:
+                field = message.field_by_name(key)
+            except KeyError:
+                # Save unknown values.
+                variant = self.__find_variant(value)
+                if variant:
+                    message.set_unrecognized_field(key, value, variant)
+                continue
+
+            if field.repeated:
+                # This should be unnecessary? Or in fact become an error.
+                if not isinstance(value, list):
+                    value = [value]
+                valid_value = [self.decode_field(field, item)
+                               for item in value]
+                setattr(message, field.name, valid_value)
+            else:
+                # This is just for consistency with the old behavior.
+                if value == []:
+                    continue
+                setattr(message, field.name, self.decode_field(field, value))
+
+        return message
+
+    def decode_field(self, field, value):
+        """Decode a JSON value to a python value.
+
+        Args:
+          field: A ProtoRPC field instance.
+          value: A serialized JSON value.
+
+        Return:
+          A Python value compatible with field.
+        """
+        if isinstance(field, messages.EnumField):
+            try:
+                return field.type(value)
+            except TypeError:
+                raise messages.DecodeError(
+                    'Invalid enum value "%s"' % (value or ''))
+
+        elif isinstance(field, messages.BytesField):
+            try:
+                return base64.b64decode(value)
+            except (binascii.Error, TypeError) as err:
+                raise messages.DecodeError('Base64 decoding error: %s' % err)
+
+        elif isinstance(field, message_types.DateTimeField):
+            try:
+                return util.decode_datetime(value)
+            except ValueError as err:
+                raise messages.DecodeError(err)
+
+        elif (isinstance(field, messages.MessageField) and
+              issubclass(field.type, messages.Message)):
+            return self.__decode_dictionary(field.type, value)
+
+        elif (isinstance(field, messages.FloatField) and
+              isinstance(value, (six.integer_types, six.string_types))):
+            try:
+                return float(value)
+            except:  # pylint:disable=bare-except
+                pass
+
+        elif (isinstance(field, messages.IntegerField) and
+              isinstance(value, six.string_types)):
+            try:
+                return int(value)
+            except:  # pylint:disable=bare-except
+                pass
+
+        return value
+
+    @staticmethod
+    def get_default():
+        """Get default instanceof ProtoJson."""
+        try:
+            return ProtoJson.__default
+        except AttributeError:
+            ProtoJson.__default = ProtoJson()
+            return ProtoJson.__default
+
+    @staticmethod
+    def set_default(protocol):
+        """Set the default instance of ProtoJson.
+
+        Args:
+          protocol: A ProtoJson instance.
+        """
+        if not isinstance(protocol, ProtoJson):
+            raise TypeError('Expected protocol of type ProtoJson')
+        ProtoJson.__default = protocol
+
+CONTENT_TYPE = ProtoJson.CONTENT_TYPE
+
+ALTERNATIVE_CONTENT_TYPES = ProtoJson.ALTERNATIVE_CONTENT_TYPES
+
+encode_message = ProtoJson.get_default().encode_message
+
+decode_message = ProtoJson.get_default().decode_message
diff --git a/apitools/base/protorpclite/protojson_test.py b/apitools/base/protorpclite/protojson_test.py
new file mode 100644
index 0000000..4e4702a
--- /dev/null
+++ b/apitools/base/protorpclite/protojson_test.py
@@ -0,0 +1,464 @@
+#!/usr/bin/env python
+#
+# Copyright 2010 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Tests for apitools.base.protorpclite.protojson."""
+import datetime
+import json
+import unittest
+
+from apitools.base.protorpclite import message_types
+from apitools.base.protorpclite import messages
+from apitools.base.protorpclite import protojson
+from apitools.base.protorpclite import test_util
+
+
+class CustomField(messages.MessageField):
+    """Custom MessageField class."""
+
+    type = int
+    message_type = message_types.VoidMessage
+
+    def __init__(self, number, **kwargs):
+        super(CustomField, self).__init__(self.message_type, number, **kwargs)
+
+    def value_to_message(self, value):
+        return self.message_type()  # pylint:disable=not-callable
+
+
+class MyMessage(messages.Message):
+    """Test message containing various types."""
+
+    class Color(messages.Enum):
+
+        RED = 1
+        GREEN = 2
+        BLUE = 3
+
+    class Nested(messages.Message):
+
+        nested_value = messages.StringField(1)
+
+    a_string = messages.StringField(2)
+    an_integer = messages.IntegerField(3)
+    a_float = messages.FloatField(4)
+    a_boolean = messages.BooleanField(5)
+    an_enum = messages.EnumField(Color, 6)
+    a_nested = messages.MessageField(Nested, 7)
+    a_repeated = messages.IntegerField(8, repeated=True)
+    a_repeated_float = messages.FloatField(9, repeated=True)
+    a_datetime = message_types.DateTimeField(10)
+    a_repeated_datetime = message_types.DateTimeField(11, repeated=True)
+    a_custom = CustomField(12)
+    a_repeated_custom = CustomField(13, repeated=True)
+
+
+class ModuleInterfaceTest(test_util.ModuleInterfaceTest,
+                          test_util.TestCase):
+
+    MODULE = protojson
+
+
+# TODO(rafek): Convert this test to the compliance test in test_util.
+class ProtojsonTest(test_util.TestCase,
+                    test_util.ProtoConformanceTestBase):
+    """Test JSON encoding and decoding."""
+
+    PROTOLIB = protojson
+
+    def CompareEncoded(self, expected_encoded, actual_encoded):
+        """JSON encoding will be laundered to remove string differences."""
+        self.assertEquals(json.loads(expected_encoded),
+                          json.loads(actual_encoded))
+
+    encoded_empty_message = '{}'
+
+    encoded_partial = """{
+    "double_value": 1.23,
+    "int64_value": -100000000000,
+    "int32_value": 1020,
+    "string_value": "a string",
+    "enum_value": "VAL2"
+    }
+    """
+
+    # pylint:disable=anomalous-unicode-escape-in-string
+    encoded_full = """{
+    "double_value": 1.23,
+    "float_value": -2.5,
+    "int64_value": -100000000000,
+    "uint64_value": 102020202020,
+    "int32_value": 1020,
+    "bool_value": true,
+    "string_value": "a string\u044f",
+    "bytes_value": "YSBieXRlc//+",
+    "enum_value": "VAL2"
+    }
+    """
+
+    encoded_repeated = """{
+    "double_value": [1.23, 2.3],
+    "float_value": [-2.5, 0.5],
+    "int64_value": [-100000000000, 20],
+    "uint64_value": [102020202020, 10],
+    "int32_value": [1020, 718],
+    "bool_value": [true, false],
+    "string_value": ["a string\u044f", "another string"],
+    "bytes_value": ["YSBieXRlc//+", "YW5vdGhlciBieXRlcw=="],
+    "enum_value": ["VAL2", "VAL1"]
+    }
+    """
+
+    encoded_nested = """{
+    "nested": {
+      "a_value": "a string"
+    }
+    }
+    """
+
+    encoded_repeated_nested = """{
+    "repeated_nested": [{"a_value": "a string"},
+                        {"a_value": "another string"}]
+    }
+    """
+
+    unexpected_tag_message = '{"unknown": "value"}'
+
+    encoded_default_assigned = '{"a_value": "a default"}'
+
+    encoded_nested_empty = '{"nested": {}}'
+
+    encoded_repeated_nested_empty = '{"repeated_nested": [{}, {}]}'
+
+    encoded_extend_message = '{"int64_value": [400, 50, 6000]}'
+
+    encoded_string_types = '{"string_value": "Latin"}'
+
+    encoded_invalid_enum = '{"enum_value": "undefined"}'
+
+    def testConvertIntegerToFloat(self):
+        """Test that integers passed in to float fields are converted.
+
+        This is necessary because JSON outputs integers for numbers
+        with 0 decimals.
+
+        """
+        message = protojson.decode_message(MyMessage, '{"a_float": 10}')
+
+        self.assertTrue(isinstance(message.a_float, float))
+        self.assertEquals(10.0, message.a_float)
+
+    def testConvertStringToNumbers(self):
+        """Test that strings passed to integer fields are converted."""
+        message = protojson.decode_message(MyMessage,
+                                           """{"an_integer": "10",
+                                           "a_float": "3.5",
+                                           "a_repeated": ["1", "2"],
+                                           "a_repeated_float": ["1.5", "2", 10]
+                                           }""")
+
+        self.assertEquals(MyMessage(an_integer=10,
+                                    a_float=3.5,
+                                    a_repeated=[1, 2],
+                                    a_repeated_float=[1.5, 2.0, 10.0]),
+                          message)
+
+    def testWrongTypeAssignment(self):
+        """Test when wrong type is assigned to a field."""
+        self.assertRaises(messages.ValidationError,
+                          protojson.decode_message,
+                          MyMessage, '{"a_string": 10}')
+        self.assertRaises(messages.ValidationError,
+                          protojson.decode_message,
+                          MyMessage, '{"an_integer": 10.2}')
+        self.assertRaises(messages.ValidationError,
+                          protojson.decode_message,
+                          MyMessage, '{"an_integer": "10.2"}')
+
+    def testNumericEnumeration(self):
+        """Test that numbers work for enum values."""
+        message = protojson.decode_message(MyMessage, '{"an_enum": 2}')
+
+        expected_message = MyMessage()
+        expected_message.an_enum = MyMessage.Color.GREEN
+
+        self.assertEquals(expected_message, message)
+
+    def testNumericEnumerationNegativeTest(self):
+        """Test with an invalid number for the enum value."""
+        self.assertRaisesRegexp(
+            messages.DecodeError,
+            'Invalid enum value "89"',
+            protojson.decode_message,
+            MyMessage,
+            '{"an_enum": 89}')
+
+    def testAlphaEnumeration(self):
+        """Test that alpha enum values work."""
+        message = protojson.decode_message(MyMessage, '{"an_enum": "RED"}')
+
+        expected_message = MyMessage()
+        expected_message.an_enum = MyMessage.Color.RED
+
+        self.assertEquals(expected_message, message)
+
+    def testAlphaEnumerationNegativeTest(self):
+        """The alpha enum value is invalid."""
+        self.assertRaisesRegexp(
+            messages.DecodeError,
+            'Invalid enum value "IAMINVALID"',
+            protojson.decode_message,
+            MyMessage,
+            '{"an_enum": "IAMINVALID"}')
+
+    def testEnumerationNegativeTestWithEmptyString(self):
+        """The enum value is an empty string."""
+        self.assertRaisesRegexp(
+            messages.DecodeError,
+            'Invalid enum value ""',
+            protojson.decode_message,
+            MyMessage,
+            '{"an_enum": ""}')
+
+    def testNullValues(self):
+        """Test that null values overwrite existing values."""
+        self.assertEquals(MyMessage(),
+                          protojson.decode_message(MyMessage,
+                                                   ('{"an_integer": null,'
+                                                    ' "a_nested": null,'
+                                                    ' "an_enum": null'
+                                                    '}')))
+
+    def testEmptyList(self):
+        """Test that empty lists are ignored."""
+        self.assertEquals(MyMessage(),
+                          protojson.decode_message(MyMessage,
+                                                   '{"a_repeated": []}'))
+
+    def testNotJSON(self):
+        """Test error when string is not valid JSON."""
+        self.assertRaises(
+            ValueError,
+            protojson.decode_message, MyMessage,
+            '{this is not json}')
+
+    def testDoNotEncodeStrangeObjects(self):
+        """Test trying to encode a strange object.
+
+        The main purpose of this test is to complete coverage. It
+        ensures that the default behavior of the JSON encoder is
+        preserved when someone tries to serialized an unexpected type.
+
+        """
+        class BogusObject(object):
+
+            def check_initialized(self):
+                pass
+
+        self.assertRaises(TypeError,
+                          protojson.encode_message,
+                          BogusObject())
+
+    def testMergeEmptyString(self):
+        """Test merging the empty or space only string."""
+        message = protojson.decode_message(test_util.OptionalMessage, '')
+        self.assertEquals(test_util.OptionalMessage(), message)
+
+        message = protojson.decode_message(test_util.OptionalMessage, ' ')
+        self.assertEquals(test_util.OptionalMessage(), message)
+
+    def testProtojsonUnrecognizedFieldName(self):
+        """Test that unrecognized fields are saved and can be accessed."""
+        decoded = protojson.decode_message(
+            MyMessage,
+            ('{"an_integer": 1, "unknown_val": 2}'))
+        self.assertEquals(decoded.an_integer, 1)
+        self.assertEquals(1, len(decoded.all_unrecognized_fields()))
+        self.assertEquals('unknown_val', decoded.all_unrecognized_fields()[0])
+        self.assertEquals((2, messages.Variant.INT64),
+                          decoded.get_unrecognized_field_info('unknown_val'))
+
+    def testProtojsonUnrecognizedFieldNumber(self):
+        """Test that unrecognized fields are saved and can be accessed."""
+        decoded = protojson.decode_message(
+            MyMessage,
+            '{"an_integer": 1, "1001": "unknown", "-123": "negative", '
+            '"456_mixed": 2}')
+        self.assertEquals(decoded.an_integer, 1)
+        self.assertEquals(3, len(decoded.all_unrecognized_fields()))
+        self.assertFalse(1001 in decoded.all_unrecognized_fields())
+        self.assertTrue('1001' in decoded.all_unrecognized_fields())
+        self.assertEquals(('unknown', messages.Variant.STRING),
+                          decoded.get_unrecognized_field_info('1001'))
+        self.assertTrue('-123' in decoded.all_unrecognized_fields())
+        self.assertEquals(('negative', messages.Variant.STRING),
+                          decoded.get_unrecognized_field_info('-123'))
+        self.assertTrue('456_mixed' in decoded.all_unrecognized_fields())
+        self.assertEquals((2, messages.Variant.INT64),
+                          decoded.get_unrecognized_field_info('456_mixed'))
+
+    def testProtojsonUnrecognizedNull(self):
+        """Test that unrecognized fields that are None are skipped."""
+        decoded = protojson.decode_message(
+            MyMessage,
+            '{"an_integer": 1, "unrecognized_null": null}')
+        self.assertEquals(decoded.an_integer, 1)
+        self.assertEquals(decoded.all_unrecognized_fields(), [])
+
+    def testUnrecognizedFieldVariants(self):
+        """Test that unrecognized fields are mapped to the right variants."""
+        for encoded, expected_variant in (
+                ('{"an_integer": 1, "unknown_val": 2}',
+                 messages.Variant.INT64),
+                ('{"an_integer": 1, "unknown_val": 2.0}',
+                 messages.Variant.DOUBLE),
+                ('{"an_integer": 1, "unknown_val": "string value"}',
+                 messages.Variant.STRING),
+                ('{"an_integer": 1, "unknown_val": [1, 2, 3]}',
+                 messages.Variant.INT64),
+                ('{"an_integer": 1, "unknown_val": [1, 2.0, 3]}',
+                 messages.Variant.DOUBLE),
+                ('{"an_integer": 1, "unknown_val": [1, "foo", 3]}',
+                 messages.Variant.STRING),
+                ('{"an_integer": 1, "unknown_val": true}',
+                 messages.Variant.BOOL)):
+            decoded = protojson.decode_message(MyMessage, encoded)
+            self.assertEquals(decoded.an_integer, 1)
+            self.assertEquals(1, len(decoded.all_unrecognized_fields()))
+            self.assertEquals(
+                'unknown_val', decoded.all_unrecognized_fields()[0])
+            _, decoded_variant = decoded.get_unrecognized_field_info(
+                'unknown_val')
+            self.assertEquals(expected_variant, decoded_variant)
+
+    def testDecodeDateTime(self):
+        for datetime_string, datetime_vals in (
+                ('2012-09-30T15:31:50.262', (2012, 9, 30, 15, 31, 50, 262000)),
+                ('2012-09-30T15:31:50', (2012, 9, 30, 15, 31, 50, 0))):
+            message = protojson.decode_message(
+                MyMessage, '{"a_datetime": "%s"}' % datetime_string)
+            expected_message = MyMessage(
+                a_datetime=datetime.datetime(*datetime_vals))
+
+            self.assertEquals(expected_message, message)
+
+    def testDecodeInvalidDateTime(self):
+        self.assertRaises(messages.DecodeError, protojson.decode_message,
+                          MyMessage, '{"a_datetime": "invalid"}')
+
+    def testEncodeDateTime(self):
+        for datetime_string, datetime_vals in (
+                ('2012-09-30T15:31:50.262000',
+                 (2012, 9, 30, 15, 31, 50, 262000)),
+                ('2012-09-30T15:31:50.262123',
+                 (2012, 9, 30, 15, 31, 50, 262123)),
+                ('2012-09-30T15:31:50',
+                 (2012, 9, 30, 15, 31, 50, 0))):
+            decoded_message = protojson.encode_message(
+                MyMessage(a_datetime=datetime.datetime(*datetime_vals)))
+            expected_decoding = '{"a_datetime": "%s"}' % datetime_string
+            self.CompareEncoded(expected_decoding, decoded_message)
+
+    def testDecodeRepeatedDateTime(self):
+        message = protojson.decode_message(
+            MyMessage,
+            '{"a_repeated_datetime": ["2012-09-30T15:31:50.262", '
+            '"2010-01-21T09:52:00", "2000-01-01T01:00:59.999999"]}')
+        expected_message = MyMessage(
+            a_repeated_datetime=[
+                datetime.datetime(2012, 9, 30, 15, 31, 50, 262000),
+                datetime.datetime(2010, 1, 21, 9, 52),
+                datetime.datetime(2000, 1, 1, 1, 0, 59, 999999)])
+
+        self.assertEquals(expected_message, message)
+
+    def testDecodeCustom(self):
+        message = protojson.decode_message(MyMessage, '{"a_custom": 1}')
+        self.assertEquals(MyMessage(a_custom=1), message)
+
+    def testDecodeInvalidCustom(self):
+        self.assertRaises(messages.ValidationError, protojson.decode_message,
+                          MyMessage, '{"a_custom": "invalid"}')
+
+    def testEncodeCustom(self):
+        decoded_message = protojson.encode_message(MyMessage(a_custom=1))
+        self.CompareEncoded('{"a_custom": 1}', decoded_message)
+
+    def testDecodeRepeatedCustom(self):
+        message = protojson.decode_message(
+            MyMessage, '{"a_repeated_custom": [1, 2, 3]}')
+        self.assertEquals(MyMessage(a_repeated_custom=[1, 2, 3]), message)
+
+    def testDecodeRepeatedEmpty(self):
+        message = protojson.decode_message(
+            MyMessage, '{"a_repeated": []}')
+        self.assertEquals(MyMessage(a_repeated=[]), message)
+
+    def testDecodeNone(self):
+        message = protojson.decode_message(
+            MyMessage, '{"an_integer": []}')
+        self.assertEquals(MyMessage(an_integer=None), message)
+
+    def testDecodeBadBase64BytesField(self):
+        """Test decoding improperly encoded base64 bytes value."""
+        self.assertRaisesWithRegexpMatch(
+            messages.DecodeError,
+            'Base64 decoding error: Incorrect padding',
+            protojson.decode_message,
+            test_util.OptionalMessage,
+            '{"bytes_value": "abcdefghijklmnopq"}')
+
+
+class CustomProtoJson(protojson.ProtoJson):
+
+    def encode_field(self, field, value):
+        return '{encoded}' + value
+
+    def decode_field(self, field, value):
+        return '{decoded}' + value
+
+
+class CustomProtoJsonTest(test_util.TestCase):
+    """Tests for serialization overriding functionality."""
+
+    def setUp(self):
+        self.protojson = CustomProtoJson()
+
+    def testEncode(self):
+        self.assertEqual(
+            '{"a_string": "{encoded}xyz"}',
+            self.protojson.encode_message(MyMessage(a_string='xyz')))
+
+    def testDecode(self):
+        self.assertEqual(
+            MyMessage(a_string='{decoded}xyz'),
+            self.protojson.decode_message(MyMessage, '{"a_string": "xyz"}'))
+
+    def testDecodeEmptyMessage(self):
+        self.assertEqual(
+            MyMessage(a_string='{decoded}'),
+            self.protojson.decode_message(MyMessage, '{"a_string": ""}'))
+
+    def testDefault(self):
+        self.assertTrue(protojson.ProtoJson.get_default(),
+                        protojson.ProtoJson.get_default())
+
+        instance = CustomProtoJson()
+        protojson.ProtoJson.set_default(instance)
+        self.assertTrue(instance is protojson.ProtoJson.get_default())
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/apitools/base/protorpclite/test_util.py b/apitools/base/protorpclite/test_util.py
new file mode 100644
index 0000000..a86cfc7
--- /dev/null
+++ b/apitools/base/protorpclite/test_util.py
@@ -0,0 +1,644 @@
+#!/usr/bin/env python
+#
+# Copyright 2010 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Test utilities for message testing.
+
+Includes module interface test to ensure that public parts of module are
+correctly declared in __all__.
+
+Includes message types that correspond to those defined in
+services_test.proto.
+
+Includes additional test utilities to make sure encoding/decoding libraries
+conform.
+"""
+import cgi
+import datetime
+import inspect
+import os
+import re
+import socket
+import types
+
+import six
+from six.moves import range  # pylint: disable=redefined-builtin
+import unittest2 as unittest
+
+from apitools.base.protorpclite import message_types
+from apitools.base.protorpclite import messages
+from apitools.base.protorpclite import util
+
+# Unicode of the word "Russian" in cyrillic.
+RUSSIAN = u'\u0440\u0443\u0441\u0441\u043a\u0438\u0439'
+
+# All characters binary value interspersed with nulls.
+BINARY = b''.join(six.int2byte(value) + b'\0' for value in range(256))
+
+
+class TestCase(unittest.TestCase):
+
+    def assertRaisesWithRegexpMatch(self,
+                                    exception,
+                                    regexp,
+                                    function,
+                                    *params,
+                                    **kwargs):
+        """Check that exception is raised and text matches regular expression.
+
+        Args:
+          exception: Exception type that is expected.
+          regexp: String regular expression that is expected in error message.
+          function: Callable to test.
+          params: Parameters to forward to function.
+          kwargs: Keyword arguments to forward to function.
+        """
+        try:
+            function(*params, **kwargs)
+            self.fail('Expected exception %s was not raised' %
+                      exception.__name__)
+        except exception as err:
+            match = bool(re.match(regexp, str(err)))
+            self.assertTrue(match, 'Expected match "%s", found "%s"' % (regexp,
+                                                                        err))
+
+    def assertHeaderSame(self, header1, header2):
+        """Check that two HTTP headers are the same.
+
+        Args:
+          header1: Header value string 1.
+          header2: header value string 2.
+        """
+        value1, params1 = cgi.parse_header(header1)
+        value2, params2 = cgi.parse_header(header2)
+        self.assertEqual(value1, value2)
+        self.assertEqual(params1, params2)
+
+    def assertIterEqual(self, iter1, iter2):
+        """Check that two iterators or iterables are equal independent of order.
+
+        Similar to Python 2.7 assertItemsEqual.  Named differently in order to
+        avoid potential conflict.
+
+        Args:
+          iter1: An iterator or iterable.
+          iter2: An iterator or iterable.
+        """
+        list1 = list(iter1)
+        list2 = list(iter2)
+
+        unmatched1 = list()
+
+        while list1:
+            item1 = list1[0]
+            del list1[0]
+            for index in range(len(list2)):
+                if item1 == list2[index]:
+                    del list2[index]
+                    break
+            else:
+                unmatched1.append(item1)
+
+        error_message = []
+        for item in unmatched1:
+            error_message.append(
+                '  Item from iter1 not found in iter2: %r' % item)
+        for item in list2:
+            error_message.append(
+                '  Item from iter2 not found in iter1: %r' % item)
+        if error_message:
+            self.fail('Collections not equivalent:\n' +
+                      '\n'.join(error_message))
+
+
+class ModuleInterfaceTest(object):
+    """Test to ensure module interface is carefully constructed.
+
+    A module interface is the set of public objects listed in the
+    module __all__ attribute. Modules that that are considered public
+    should have this interface carefully declared. At all times, the
+    __all__ attribute should have objects intended to be publically
+    used and all other objects in the module should be considered
+    unused.
+
+    Protected attributes (those beginning with '_') and other imported
+    modules should not be part of this set of variables. An exception
+    is for variables that begin and end with '__' which are implicitly
+    part of the interface (eg. __name__, __file__, __all__ itself,
+    etc.).
+
+    Modules that are imported in to the tested modules are an
+    exception and may be left out of the __all__ definition. The test
+    is done by checking the value of what would otherwise be a public
+    name and not allowing it to be exported if it is an instance of a
+    module. Modules that are explicitly exported are for the time
+    being not permitted.
+
+    To use this test class a module should define a new class that
+    inherits first from ModuleInterfaceTest and then from
+    test_util.TestCase. No other tests should be added to this test
+    case, making the order of inheritance less important, but if setUp
+    for some reason is overidden, it is important that
+    ModuleInterfaceTest is first in the list so that its setUp method
+    is invoked.
+
+    Multiple inheritance is required so that ModuleInterfaceTest is
+    not itself a test, and is not itself executed as one.
+
+    The test class is expected to have the following class attributes
+    defined:
+
+      MODULE: A reference to the module that is being validated for interface
+        correctness.
+
+    Example:
+      Module definition (hello.py):
+
+        import sys
+
+        __all__ = ['hello']
+
+        def _get_outputter():
+          return sys.stdout
+
+        def hello():
+          _get_outputter().write('Hello\n')
+
+      Test definition:
+
+        import unittest
+        from protorpc import test_util
+
+        import hello
+
+        class ModuleInterfaceTest(test_util.ModuleInterfaceTest,
+                                  test_util.TestCase):
+
+          MODULE = hello
+
+
+        class HelloTest(test_util.TestCase):
+          ... Test 'hello' module ...
+
+
+        if __name__ == '__main__':
+          unittest.main()
+
+    """
+
+    def setUp(self):
+        """Set up makes sure that MODULE and IMPORTED_MODULES is defined.
+
+        This is a basic configuration test for the test itself so does not
+        get it's own test case.
+        """
+        if not hasattr(self, 'MODULE'):
+            self.fail(
+                "You must define 'MODULE' on ModuleInterfaceTest sub-class "
+                "%s." % type(self).__name__)
+
+    def testAllExist(self):
+        """Test that all attributes defined in __all__ exist."""
+        missing_attributes = []
+        for attribute in self.MODULE.__all__:
+            if not hasattr(self.MODULE, attribute):
+                missing_attributes.append(attribute)
+        if missing_attributes:
+            self.fail('%s of __all__ are not defined in module.' %
+                      missing_attributes)
+
+    def testAllExported(self):
+        """Test that all public attributes not imported are in __all__."""
+        missing_attributes = []
+        for attribute in dir(self.MODULE):
+            if not attribute.startswith('_'):
+                if (attribute not in self.MODULE.__all__ and
+                        not isinstance(getattr(self.MODULE, attribute),
+                                       types.ModuleType) and
+                        attribute != 'with_statement'):
+                    missing_attributes.append(attribute)
+        if missing_attributes:
+            self.fail('%s are not modules and not defined in __all__.' %
+                      missing_attributes)
+
+    def testNoExportedProtectedVariables(self):
+        """Test that there are no protected variables listed in __all__."""
+        protected_variables = []
+        for attribute in self.MODULE.__all__:
+            if attribute.startswith('_'):
+                protected_variables.append(attribute)
+        if protected_variables:
+            self.fail('%s are protected variables and may not be exported.' %
+                      protected_variables)
+
+    def testNoExportedModules(self):
+        """Test that no modules exist in __all__."""
+        exported_modules = []
+        for attribute in self.MODULE.__all__:
+            try:
+                value = getattr(self.MODULE, attribute)
+            except AttributeError:
+                # This is a different error case tested for in testAllExist.
+                pass
+            else:
+                if isinstance(value, types.ModuleType):
+                    exported_modules.append(attribute)
+        if exported_modules:
+            self.fail('%s are modules and may not be exported.' %
+                      exported_modules)
+
+
+class NestedMessage(messages.Message):
+    """Simple message that gets nested in another message."""
+
+    a_value = messages.StringField(1, required=True)
+
+
+class HasNestedMessage(messages.Message):
+    """Message that has another message nested in it."""
+
+    nested = messages.MessageField(NestedMessage, 1)
+    repeated_nested = messages.MessageField(NestedMessage, 2, repeated=True)
+
+
+class HasDefault(messages.Message):
+    """Has a default value."""
+
+    a_value = messages.StringField(1, default=u'a default')
+
+
+class OptionalMessage(messages.Message):
+    """Contains all message types."""
+
+    class SimpleEnum(messages.Enum):
+        """Simple enumeration type."""
+        VAL1 = 1
+        VAL2 = 2
+
+    double_value = messages.FloatField(1, variant=messages.Variant.DOUBLE)
+    float_value = messages.FloatField(2, variant=messages.Variant.FLOAT)
+    int64_value = messages.IntegerField(3, variant=messages.Variant.INT64)
+    uint64_value = messages.IntegerField(4, variant=messages.Variant.UINT64)
+    int32_value = messages.IntegerField(5, variant=messages.Variant.INT32)
+    bool_value = messages.BooleanField(6, variant=messages.Variant.BOOL)
+    string_value = messages.StringField(7, variant=messages.Variant.STRING)
+    bytes_value = messages.BytesField(8, variant=messages.Variant.BYTES)
+    enum_value = messages.EnumField(SimpleEnum, 10)
+
+
+class RepeatedMessage(messages.Message):
+    """Contains all message types as repeated fields."""
+
+    class SimpleEnum(messages.Enum):
+        """Simple enumeration type."""
+        VAL1 = 1
+        VAL2 = 2
+
+    double_value = messages.FloatField(1,
+                                       variant=messages.Variant.DOUBLE,
+                                       repeated=True)
+    float_value = messages.FloatField(2,
+                                      variant=messages.Variant.FLOAT,
+                                      repeated=True)
+    int64_value = messages.IntegerField(3,
+                                        variant=messages.Variant.INT64,
+                                        repeated=True)
+    uint64_value = messages.IntegerField(4,
+                                         variant=messages.Variant.UINT64,
+                                         repeated=True)
+    int32_value = messages.IntegerField(5,
+                                        variant=messages.Variant.INT32,
+                                        repeated=True)
+    bool_value = messages.BooleanField(6,
+                                       variant=messages.Variant.BOOL,
+                                       repeated=True)
+    string_value = messages.StringField(7,
+                                        variant=messages.Variant.STRING,
+                                        repeated=True)
+    bytes_value = messages.BytesField(8,
+                                      variant=messages.Variant.BYTES,
+                                      repeated=True)
+    enum_value = messages.EnumField(SimpleEnum,
+                                    10,
+                                    repeated=True)
+
+
+class HasOptionalNestedMessage(messages.Message):
+
+    nested = messages.MessageField(OptionalMessage, 1)
+    repeated_nested = messages.MessageField(OptionalMessage, 2, repeated=True)
+
+
+# pylint:disable=anomalous-unicode-escape-in-string
+class ProtoConformanceTestBase(object):
+    """Protocol conformance test base class.
+
+    Each supported protocol should implement two methods that support encoding
+    and decoding of Message objects in that format:
+
+      encode_message(message) - Serialize to encoding.
+      encode_message(message, encoded_message) - Deserialize from encoding.
+
+    Tests for the modules where these functions are implemented should extend
+    this class in order to support basic behavioral expectations.  This ensures
+    that protocols correctly encode and decode message transparently to the
+    caller.
+
+    In order to support these test, the base class should also extend
+    the TestCase class and implement the following class attributes
+    which define the encoded version of certain protocol buffers:
+
+      encoded_partial:
+        <OptionalMessage
+          double_value: 1.23
+          int64_value: -100000000000
+          string_value: u"a string"
+          enum_value: OptionalMessage.SimpleEnum.VAL2
+          >
+
+      encoded_full:
+        <OptionalMessage
+          double_value: 1.23
+          float_value: -2.5
+          int64_value: -100000000000
+          uint64_value: 102020202020
+          int32_value: 1020
+          bool_value: true
+          string_value: u"a string\u044f"
+          bytes_value: b"a bytes\xff\xfe"
+          enum_value: OptionalMessage.SimpleEnum.VAL2
+          >
+
+      encoded_repeated:
+        <RepeatedMessage
+          double_value: [1.23, 2.3]
+          float_value: [-2.5, 0.5]
+          int64_value: [-100000000000, 20]
+          uint64_value: [102020202020, 10]
+          int32_value: [1020, 718]
+          bool_value: [true, false]
+          string_value: [u"a string\u044f", u"another string"]
+          bytes_value: [b"a bytes\xff\xfe", b"another bytes"]
+          enum_value: [OptionalMessage.SimpleEnum.VAL2,
+                       OptionalMessage.SimpleEnum.VAL 1]
+          >
+
+      encoded_nested:
+        <HasNestedMessage
+          nested: <NestedMessage
+            a_value: "a string"
+            >
+          >
+
+      encoded_repeated_nested:
+        <HasNestedMessage
+          repeated_nested: [
+              <NestedMessage a_value: "a string">,
+              <NestedMessage a_value: "another string">
+            ]
+          >
+
+      unexpected_tag_message:
+        An encoded message that has an undefined tag or number in the stream.
+
+      encoded_default_assigned:
+        <HasDefault
+          a_value: "a default"
+          >
+
+      encoded_nested_empty:
+        <HasOptionalNestedMessage
+          nested: <OptionalMessage>
+          >
+
+      encoded_invalid_enum:
+        <OptionalMessage
+          enum_value: (invalid value for serialization type)
+          >
+    """
+
+    encoded_empty_message = ''
+
+    def testEncodeInvalidMessage(self):
+        message = NestedMessage()
+        self.assertRaises(messages.ValidationError,
+                          self.PROTOLIB.encode_message, message)
+
+    def CompareEncoded(self, expected_encoded, actual_encoded):
+        """Compare two encoded protocol values.
+
+        Can be overridden by sub-classes to special case comparison.
+        For example, to eliminate white space from output that is not
+        relevant to encoding.
+
+        Args:
+          expected_encoded: Expected string encoded value.
+          actual_encoded: Actual string encoded value.
+        """
+        self.assertEquals(expected_encoded, actual_encoded)
+
+    def EncodeDecode(self, encoded, expected_message):
+        message = self.PROTOLIB.decode_message(type(expected_message), encoded)
+        self.assertEquals(expected_message, message)
+        self.CompareEncoded(encoded, self.PROTOLIB.encode_message(message))
+
+    def testEmptyMessage(self):
+        self.EncodeDecode(self.encoded_empty_message, OptionalMessage())
+
+    def testPartial(self):
+        """Test message with a few values set."""
+        message = OptionalMessage()
+        message.double_value = 1.23
+        message.int64_value = -100000000000
+        message.int32_value = 1020
+        message.string_value = u'a string'
+        message.enum_value = OptionalMessage.SimpleEnum.VAL2
+
+        self.EncodeDecode(self.encoded_partial, message)
+
+    def testFull(self):
+        """Test all types."""
+        message = OptionalMessage()
+        message.double_value = 1.23
+        message.float_value = -2.5
+        message.int64_value = -100000000000
+        message.uint64_value = 102020202020
+        message.int32_value = 1020
+        message.bool_value = True
+        message.string_value = u'a string\u044f'
+        message.bytes_value = b'a bytes\xff\xfe'
+        message.enum_value = OptionalMessage.SimpleEnum.VAL2
+
+        self.EncodeDecode(self.encoded_full, message)
+
+    def testRepeated(self):
+        """Test repeated fields."""
+        message = RepeatedMessage()
+        message.double_value = [1.23, 2.3]
+        message.float_value = [-2.5, 0.5]
+        message.int64_value = [-100000000000, 20]
+        message.uint64_value = [102020202020, 10]
+        message.int32_value = [1020, 718]
+        message.bool_value = [True, False]
+        message.string_value = [u'a string\u044f', u'another string']
+        message.bytes_value = [b'a bytes\xff\xfe', b'another bytes']
+        message.enum_value = [RepeatedMessage.SimpleEnum.VAL2,
+                              RepeatedMessage.SimpleEnum.VAL1]
+
+        self.EncodeDecode(self.encoded_repeated, message)
+
+    def testNested(self):
+        """Test nested messages."""
+        nested_message = NestedMessage()
+        nested_message.a_value = u'a string'
+
+        message = HasNestedMessage()
+        message.nested = nested_message
+
+        self.EncodeDecode(self.encoded_nested, message)
+
+    def testRepeatedNested(self):
+        """Test repeated nested messages."""
+        nested_message1 = NestedMessage()
+        nested_message1.a_value = u'a string'
+        nested_message2 = NestedMessage()
+        nested_message2.a_value = u'another string'
+
+        message = HasNestedMessage()
+        message.repeated_nested = [nested_message1, nested_message2]
+
+        self.EncodeDecode(self.encoded_repeated_nested, message)
+
+    def testStringTypes(self):
+        """Test that encoding str on StringField works."""
+        message = OptionalMessage()
+        message.string_value = 'Latin'
+        self.EncodeDecode(self.encoded_string_types, message)
+
+    def testEncodeUninitialized(self):
+        """Test that cannot encode uninitialized message."""
+        required = NestedMessage()
+        self.assertRaisesWithRegexpMatch(messages.ValidationError,
+                                         "Message NestedMessage is missing "
+                                         "required field a_value",
+                                         self.PROTOLIB.encode_message,
+                                         required)
+
+    def testUnexpectedField(self):
+        """Test decoding and encoding unexpected fields."""
+        loaded_message = self.PROTOLIB.decode_message(
+            OptionalMessage, self.unexpected_tag_message)
+        # Message should be equal to an empty message, since unknown
+        # values aren't included in equality.
+        self.assertEquals(OptionalMessage(), loaded_message)
+        # Verify that the encoded message matches the source, including the
+        # unknown value.
+        self.assertEquals(self.unexpected_tag_message,
+                          self.PROTOLIB.encode_message(loaded_message))
+
+    def testDoNotSendDefault(self):
+        """Test that default is not sent when nothing is assigned."""
+        self.EncodeDecode(self.encoded_empty_message, HasDefault())
+
+    def testSendDefaultExplicitlyAssigned(self):
+        """Test that default is sent when explcitly assigned."""
+        message = HasDefault()
+
+        message.a_value = HasDefault.a_value.default
+
+        self.EncodeDecode(self.encoded_default_assigned, message)
+
+    def testEncodingNestedEmptyMessage(self):
+        """Test encoding a nested empty message."""
+        message = HasOptionalNestedMessage()
+        message.nested = OptionalMessage()
+
+        self.EncodeDecode(self.encoded_nested_empty, message)
+
+    def testEncodingRepeatedNestedEmptyMessage(self):
+        """Test encoding a nested empty message."""
+        message = HasOptionalNestedMessage()
+        message.repeated_nested = [OptionalMessage(), OptionalMessage()]
+
+        self.EncodeDecode(self.encoded_repeated_nested_empty, message)
+
+    def testContentType(self):
+        self.assertTrue(isinstance(self.PROTOLIB.CONTENT_TYPE, str))
+
+    def testDecodeInvalidEnumType(self):
+        self.assertRaisesWithRegexpMatch(messages.DecodeError,
+                                         'Invalid enum value ',
+                                         self.PROTOLIB.decode_message,
+                                         OptionalMessage,
+                                         self.encoded_invalid_enum)
+
+    def testDateTimeNoTimeZone(self):
+        """Test that DateTimeFields are encoded/decoded correctly."""
+
+        class MyMessage(messages.Message):
+            value = message_types.DateTimeField(1)
+
+        value = datetime.datetime(2013, 1, 3, 11, 36, 30, 123000)
+        message = MyMessage(value=value)
+        decoded = self.PROTOLIB.decode_message(
+            MyMessage, self.PROTOLIB.encode_message(message))
+        self.assertEquals(decoded.value, value)
+
+    def testDateTimeWithTimeZone(self):
+        """Test DateTimeFields with time zones."""
+
+        class MyMessage(messages.Message):
+            value = message_types.DateTimeField(1)
+
+        value = datetime.datetime(2013, 1, 3, 11, 36, 30, 123000,
+                                  util.TimeZoneOffset(8 * 60))
+        message = MyMessage(value=value)
+        decoded = self.PROTOLIB.decode_message(
+            MyMessage, self.PROTOLIB.encode_message(message))
+        self.assertEquals(decoded.value, value)
+
+
+def pick_unused_port():
+    """Find an unused port to use in tests.
+
+      Derived from Damon Kohlers example:
+
+        http://code.activestate.com/recipes/531822-pick-unused-port
+    """
+    temp = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+    try:
+        temp.bind(('localhost', 0))
+        port = temp.getsockname()[1]
+    finally:
+        temp.close()
+    return port
+
+
+def get_module_name(module_attribute):
+    """Get the module name.
+
+    Args:
+      module_attribute: An attribute of the module.
+
+    Returns:
+      The fully qualified module name or simple module name where
+      'module_attribute' is defined if the module name is "__main__".
+    """
+    if module_attribute.__module__ == '__main__':
+        module_file = inspect.getfile(module_attribute)
+        default = os.path.basename(module_file).split('.')[0]
+        return default
+    return module_attribute.__module__
diff --git a/apitools/base/protorpclite/util.py b/apitools/base/protorpclite/util.py
new file mode 100644
index 0000000..7a7797d
--- /dev/null
+++ b/apitools/base/protorpclite/util.py
@@ -0,0 +1,292 @@
+#!/usr/bin/env python
+#
+# Copyright 2010 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Common utility library."""
+from __future__ import with_statement
+
+import datetime
+import functools
+import inspect
+import os
+import re
+import sys
+
+import six
+
+__all__ = [
+    'Error',
+    'decode_datetime',
+    'get_package_for_module',
+    'positional',
+    'TimeZoneOffset',
+    'total_seconds',
+]
+
+
+class Error(Exception):
+    """Base class for protorpc exceptions."""
+
+
+_TIME_ZONE_RE_STRING = r"""
+  # Examples:
+  #   +01:00
+  #   -05:30
+  #   Z12:00
+  ((?P<z>Z) | (?P<sign>[-+])
+   (?P<hours>\d\d) :
+   (?P<minutes>\d\d))$
+"""
+_TIME_ZONE_RE = re.compile(_TIME_ZONE_RE_STRING, re.IGNORECASE | re.VERBOSE)
+
+
+def positional(max_positional_args):
+    """A decorator to declare that only the first N arguments may be positional.
+
+    This decorator makes it easy to support Python 3 style keyword-only
+    parameters. For example, in Python 3 it is possible to write:
+
+      def fn(pos1, *, kwonly1=None, kwonly1=None):
+        ...
+
+    All named parameters after * must be a keyword:
+
+      fn(10, 'kw1', 'kw2')  # Raises exception.
+      fn(10, kwonly1='kw1')  # Ok.
+
+    Example:
+      To define a function like above, do:
+
+        @positional(1)
+        def fn(pos1, kwonly1=None, kwonly2=None):
+          ...
+
+      If no default value is provided to a keyword argument, it
+      becomes a required keyword argument:
+
+        @positional(0)
+        def fn(required_kw):
+          ...
+
+      This must be called with the keyword parameter:
+
+        fn()  # Raises exception.
+        fn(10)  # Raises exception.
+        fn(required_kw=10)  # Ok.
+
+      When defining instance or class methods always remember to account for
+      'self' and 'cls':
+
+        class MyClass(object):
+
+          @positional(2)
+          def my_method(self, pos1, kwonly1=None):
+            ...
+
+          @classmethod
+          @positional(2)
+          def my_method(cls, pos1, kwonly1=None):
+            ...
+
+      One can omit the argument to 'positional' altogether, and then no
+      arguments with default values may be passed positionally. This
+      would be equivalent to placing a '*' before the first argument
+      with a default value in Python 3. If there are no arguments with
+      default values, and no argument is given to 'positional', an error
+      is raised.
+
+        @positional
+        def fn(arg1, arg2, required_kw1=None, required_kw2=0):
+          ...
+
+        fn(1, 3, 5)  # Raises exception.
+        fn(1, 3)  # Ok.
+        fn(1, 3, required_kw1=5)  # Ok.
+
+    Args:
+      max_positional_arguments: Maximum number of positional arguments.  All
+        parameters after the this index must be keyword only.
+
+    Returns:
+      A decorator that prevents using arguments after max_positional_args from
+      being used as positional parameters.
+
+    Raises:
+      TypeError if a keyword-only argument is provided as a positional
+        parameter.
+      ValueError if no maximum number of arguments is provided and the function
+        has no arguments with default values.
+    """
+    def positional_decorator(wrapped):
+        """Creates a function wraper to enforce number of arguments."""
+        @functools.wraps(wrapped)
+        def positional_wrapper(*args, **kwargs):
+            if len(args) > max_positional_args:
+                plural_s = ''
+                if max_positional_args != 1:
+                    plural_s = 's'
+                raise TypeError('%s() takes at most %d positional argument%s '
+                                '(%d given)' % (wrapped.__name__,
+                                                max_positional_args,
+                                                plural_s, len(args)))
+            return wrapped(*args, **kwargs)
+        return positional_wrapper
+
+    if isinstance(max_positional_args, six.integer_types):
+        return positional_decorator
+    else:
+        args, _, _, defaults = inspect.getargspec(max_positional_args)
+        if defaults is None:
+            raise ValueError(
+                'Functions with no keyword arguments must specify '
+                'max_positional_args')
+        return positional(len(args) - len(defaults))(max_positional_args)
+
+
+@positional(1)
+def get_package_for_module(module):
+    """Get package name for a module.
+
+    Helper calculates the package name of a module.
+
+    Args:
+      module: Module to get name for.  If module is a string, try to find
+        module in sys.modules.
+
+    Returns:
+      If module contains 'package' attribute, uses that as package name.
+      Else, if module is not the '__main__' module, the module __name__.
+      Else, the base name of the module file name.  Else None.
+    """
+    if isinstance(module, six.string_types):
+        try:
+            module = sys.modules[module]
+        except KeyError:
+            return None
+
+    try:
+        return six.text_type(module.package)
+    except AttributeError:
+        if module.__name__ == '__main__':
+            try:
+                file_name = module.__file__
+            except AttributeError:
+                pass
+            else:
+                base_name = os.path.basename(file_name)
+                split_name = os.path.splitext(base_name)
+                if len(split_name) == 1:
+                    return six.text_type(base_name)
+                return u'.'.join(split_name[:-1])
+
+        return six.text_type(module.__name__)
+
+
+def total_seconds(offset):
+    """Backport of offset.total_seconds() from python 2.7+."""
+    seconds = offset.days * 24 * 60 * 60 + offset.seconds
+    microseconds = seconds * 10**6 + offset.microseconds
+    return microseconds / (10**6 * 1.0)
+
+
+class TimeZoneOffset(datetime.tzinfo):
+    """Time zone information as encoded/decoded for DateTimeFields."""
+
+    def __init__(self, offset):
+        """Initialize a time zone offset.
+
+        Args:
+          offset: Integer or timedelta time zone offset, in minutes from UTC.
+            This can be negative.
+        """
+        super(TimeZoneOffset, self).__init__()
+        if isinstance(offset, datetime.timedelta):
+            offset = total_seconds(offset) / 60
+        self.__offset = offset
+
+    def utcoffset(self, _):
+        """Get the a timedelta with the time zone's offset from UTC.
+
+        Returns:
+          The time zone offset from UTC, as a timedelta.
+        """
+        return datetime.timedelta(minutes=self.__offset)
+
+    def dst(self, _):
+        """Get the daylight savings time offset.
+
+        The formats that ProtoRPC uses to encode/decode time zone
+        information don't contain any information about daylight
+        savings time. So this always returns a timedelta of 0.
+
+        Returns:
+          A timedelta of 0.
+
+        """
+        return datetime.timedelta(0)
+
+
+def decode_datetime(encoded_datetime):
+    """Decode a DateTimeField parameter from a string to a python datetime.
+
+    Args:
+      encoded_datetime: A string in RFC 3339 format.
+
+    Returns:
+      A datetime object with the date and time specified in encoded_datetime.
+
+    Raises:
+      ValueError: If the string is not in a recognized format.
+    """
+    # Check if the string includes a time zone offset.  Break out the
+    # part that doesn't include time zone info.  Convert to uppercase
+    # because all our comparisons should be case-insensitive.
+    time_zone_match = _TIME_ZONE_RE.search(encoded_datetime)
+    if time_zone_match:
+        time_string = encoded_datetime[:time_zone_match.start(1)].upper()
+    else:
+        time_string = encoded_datetime.upper()
+
+    if '.' in time_string:
+        format_string = '%Y-%m-%dT%H:%M:%S.%f'
+    else:
+        format_string = '%Y-%m-%dT%H:%M:%S'
+
+    decoded_datetime = datetime.datetime.strptime(time_string, format_string)
+
+    if not time_zone_match:
+        return decoded_datetime
+
+    # Time zone info was included in the parameter.  Add a tzinfo
+    # object to the datetime.  Datetimes can't be changed after they're
+    # created, so we'll need to create a new one.
+    if time_zone_match.group('z'):
+        offset_minutes = 0
+    else:
+        sign = time_zone_match.group('sign')
+        hours, minutes = [int(value) for value in
+                          time_zone_match.group('hours', 'minutes')]
+        offset_minutes = hours * 60 + minutes
+        if sign == '-':
+            offset_minutes *= -1
+
+    return datetime.datetime(decoded_datetime.year,
+                             decoded_datetime.month,
+                             decoded_datetime.day,
+                             decoded_datetime.hour,
+                             decoded_datetime.minute,
+                             decoded_datetime.second,
+                             decoded_datetime.microsecond,
+                             TimeZoneOffset(offset_minutes))
diff --git a/apitools/base/protorpclite/util_test.py b/apitools/base/protorpclite/util_test.py
new file mode 100644
index 0000000..14e7f7e
--- /dev/null
+++ b/apitools/base/protorpclite/util_test.py
@@ -0,0 +1,237 @@
+#!/usr/bin/env python
+#
+# Copyright 2010 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Tests for apitools.base.protorpclite.util."""
+import datetime
+import sys
+import types
+import unittest
+
+import six
+
+from apitools.base.protorpclite import test_util
+from apitools.base.protorpclite import util
+
+
+class ModuleInterfaceTest(test_util.ModuleInterfaceTest,
+                          test_util.TestCase):
+
+    MODULE = util
+
+
+class UtilTest(test_util.TestCase):
+
+    def testDecoratedFunction_LengthZero(self):
+        @util.positional(0)
+        def fn(kwonly=1):
+            return [kwonly]
+        self.assertEquals([1], fn())
+        self.assertEquals([2], fn(kwonly=2))
+        self.assertRaisesWithRegexpMatch(TypeError,
+                                         r'fn\(\) takes at most 0 positional '
+                                         r'arguments \(1 given\)',
+                                         fn, 1)
+
+    def testDecoratedFunction_LengthOne(self):
+        @util.positional(1)
+        def fn(pos, kwonly=1):
+            return [pos, kwonly]
+        self.assertEquals([1, 1], fn(1))
+        self.assertEquals([2, 2], fn(2, kwonly=2))
+        self.assertRaisesWithRegexpMatch(TypeError,
+                                         r'fn\(\) takes at most 1 positional '
+                                         r'argument \(2 given\)',
+                                         fn, 2, 3)
+
+    def testDecoratedFunction_LengthTwoWithDefault(self):
+        @util.positional(2)
+        def fn(pos1, pos2=1, kwonly=1):
+            return [pos1, pos2, kwonly]
+        self.assertEquals([1, 1, 1], fn(1))
+        self.assertEquals([2, 2, 1], fn(2, 2))
+        self.assertEquals([2, 3, 4], fn(2, 3, kwonly=4))
+        self.assertRaisesWithRegexpMatch(TypeError,
+                                         r'fn\(\) takes at most 2 positional '
+                                         r'arguments \(3 given\)',
+                                         fn, 2, 3, 4)
+
+    def testDecoratedMethod(self):
+        class MyClass(object):
+
+            @util.positional(2)
+            def meth(self, pos1, kwonly=1):
+                return [pos1, kwonly]
+        self.assertEquals([1, 1], MyClass().meth(1))
+        self.assertEquals([2, 2], MyClass().meth(2, kwonly=2))
+        self.assertRaisesWithRegexpMatch(
+            TypeError,
+            r'meth\(\) takes at most 2 positional arguments \(3 given\)',
+            MyClass().meth, 2, 3)
+
+    def testDefaultDecoration(self):
+        @util.positional
+        def fn(a, b, c=None):
+            return a, b, c
+        self.assertEquals((1, 2, 3), fn(1, 2, c=3))
+        self.assertEquals((3, 4, None), fn(3, b=4))
+        self.assertRaisesWithRegexpMatch(TypeError,
+                                         r'fn\(\) takes at most 2 positional '
+                                         r'arguments \(3 given\)',
+                                         fn, 2, 3, 4)
+
+    def testDefaultDecorationNoKwdsFails(self):
+        def fn(a):
+            return a
+        self.assertRaisesRegexp(
+            ValueError,
+            ('Functions with no keyword arguments must specify '
+             'max_positional_args'),
+            util.positional, fn)
+
+    def testDecoratedFunctionDocstring(self):
+        @util.positional(0)
+        def fn(kwonly=1):
+            """fn docstring."""
+            return [kwonly]
+        self.assertEquals('fn docstring.', fn.__doc__)
+
+
+class GetPackageForModuleTest(test_util.TestCase):
+
+    def setUp(self):
+        self.original_modules = dict(sys.modules)
+
+    def tearDown(self):
+        sys.modules.clear()
+        sys.modules.update(self.original_modules)
+
+    def CreateModule(self, name, file_name=None):
+        if file_name is None:
+            file_name = '%s.py' % name
+        module = types.ModuleType(name)
+        sys.modules[name] = module
+        return module
+
+    def assertPackageEquals(self, expected, actual):
+        self.assertEquals(expected, actual)
+        if actual is not None:
+            self.assertTrue(isinstance(actual, six.text_type))
+
+    def testByString(self):
+        module = self.CreateModule('service_module')
+        module.package = 'my_package'
+        self.assertPackageEquals('my_package',
+                                 util.get_package_for_module('service_module'))
+
+    def testModuleNameNotInSys(self):
+        self.assertPackageEquals(None,
+                                 util.get_package_for_module('service_module'))
+
+    def testHasPackage(self):
+        module = self.CreateModule('service_module')
+        module.package = 'my_package'
+        self.assertPackageEquals(
+            'my_package', util.get_package_for_module(module))
+
+    def testHasModuleName(self):
+        module = self.CreateModule('service_module')
+        self.assertPackageEquals('service_module',
+                                 util.get_package_for_module(module))
+
+    def testIsMain(self):
+        module = self.CreateModule('__main__')
+        module.__file__ = '/bing/blam/bloom/blarm/my_file.py'
+        self.assertPackageEquals(
+            'my_file', util.get_package_for_module(module))
+
+    def testIsMainCompiled(self):
+        module = self.CreateModule('__main__')
+        module.__file__ = '/bing/blam/bloom/blarm/my_file.pyc'
+        self.assertPackageEquals(
+            'my_file', util.get_package_for_module(module))
+
+    def testNoExtension(self):
+        module = self.CreateModule('__main__')
+        module.__file__ = '/bing/blam/bloom/blarm/my_file'
+        self.assertPackageEquals(
+            'my_file', util.get_package_for_module(module))
+
+    def testNoPackageAtAll(self):
+        module = self.CreateModule('__main__')
+        self.assertPackageEquals(
+            '__main__', util.get_package_for_module(module))
+
+
+class DateTimeTests(test_util.TestCase):
+
+    def testDecodeDateTime(self):
+        """Test that a RFC 3339 datetime string is decoded properly."""
+        for datetime_string, datetime_vals in (
+                ('2012-09-30T15:31:50.262', (2012, 9, 30, 15, 31, 50, 262000)),
+                ('2012-09-30T15:31:50', (2012, 9, 30, 15, 31, 50, 0))):
+            decoded = util.decode_datetime(datetime_string)
+            expected = datetime.datetime(*datetime_vals)
+            self.assertEquals(expected, decoded)
+
+    def testDateTimeTimeZones(self):
+        """Test that a datetime string with a timezone is decoded correctly."""
+        tests = (
+            ('2012-09-30T15:31:50.262-06:00',
+             (2012, 9, 30, 15, 31, 50, 262000, util.TimeZoneOffset(-360))),
+            ('2012-09-30T15:31:50.262+01:30',
+             (2012, 9, 30, 15, 31, 50, 262000, util.TimeZoneOffset(90))),
+            ('2012-09-30T15:31:50+00:05',
+             (2012, 9, 30, 15, 31, 50, 0, util.TimeZoneOffset(5))),
+            ('2012-09-30T15:31:50+00:00',
+             (2012, 9, 30, 15, 31, 50, 0, util.TimeZoneOffset(0))),
+            ('2012-09-30t15:31:50-00:00',
+             (2012, 9, 30, 15, 31, 50, 0, util.TimeZoneOffset(0))),
+            ('2012-09-30t15:31:50z',
+             (2012, 9, 30, 15, 31, 50, 0, util.TimeZoneOffset(0))),
+            ('2012-09-30T15:31:50-23:00',
+             (2012, 9, 30, 15, 31, 50, 0, util.TimeZoneOffset(-1380))))
+        for datetime_string, datetime_vals in tests:
+            decoded = util.decode_datetime(datetime_string)
+            expected = datetime.datetime(*datetime_vals)
+            self.assertEquals(expected, decoded)
+
+    def testDecodeDateTimeInvalid(self):
+        """Test that decoding malformed datetime strings raises execptions."""
+        for datetime_string in ('invalid',
+                                '2012-09-30T15:31:50.',
+                                '-08:00 2012-09-30T15:31:50.262',
+                                '2012-09-30T15:31',
+                                '2012-09-30T15:31Z',
+                                '2012-09-30T15:31:50ZZ',
+                                '2012-09-30T15:31:50.262 blah blah -08:00',
+                                '1000-99-99T25:99:99.999-99:99'):
+            self.assertRaises(
+                ValueError, util.decode_datetime, datetime_string)
+
+    def testTimeZoneOffsetDelta(self):
+        """Test that delta works with TimeZoneOffset."""
+        time_zone = util.TimeZoneOffset(datetime.timedelta(minutes=3))
+        epoch = time_zone.utcoffset(datetime.datetime.utcfromtimestamp(0))
+        self.assertEqual(180, util.total_seconds(epoch))
+
+
+def main():
+    unittest.main()
+
+
+if __name__ == '__main__':
+    main()
diff --git a/apitools/base/py/__init__.py b/apitools/base/py/__init__.py
new file mode 100644
index 0000000..f0003e2
--- /dev/null
+++ b/apitools/base/py/__init__.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Top-level imports for apitools base files."""
+
+# pylint:disable=wildcard-import
+# pylint:disable=redefined-builtin
+from apitools.base.py.base_api import *
+from apitools.base.py.batch import *
+from apitools.base.py.credentials_lib import *
+from apitools.base.py.encoding import *
+from apitools.base.py.exceptions import *
+from apitools.base.py.extra_types import *
+from apitools.base.py.http_wrapper import *
+from apitools.base.py.list_pager import *
+from apitools.base.py.transfer import *
+from apitools.base.py.util import *
+
+try:
+    # pylint:disable=no-name-in-module
+    from apitools.base.py.internal import *
+except ImportError:
+    pass
diff --git a/apitools/base/py/app2.py b/apitools/base/py/app2.py
new file mode 100644
index 0000000..c0ea9e0
--- /dev/null
+++ b/apitools/base/py/app2.py
@@ -0,0 +1,373 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Appcommands-compatible command class with extra fixins."""
+from __future__ import absolute_import
+from __future__ import print_function
+
+import cmd
+import inspect
+import pdb
+import shlex
+import sys
+import traceback
+import types
+
+import gflags as flags
+from google.apputils import app
+from google.apputils import appcommands
+import six
+
+
+__all__ = [
+    'NewCmd',
+    'Repl',
+]
+
+flags.DEFINE_boolean(
+    'debug_mode', False,
+    'Show tracebacks on Python exceptions.')
+flags.DEFINE_boolean(
+    'headless', False,
+    'Assume no user is at the controlling console.')
+FLAGS = flags.FLAGS
+
+
+def _SafeMakeAscii(s):
+    if isinstance(s, six.text_type):
+        return s.encode('ascii')
+    elif isinstance(s, str):
+        return s.decode('ascii')
+    return six.text_type(s).encode('ascii', 'backslashreplace')
+
+
+class NewCmd(appcommands.Cmd):
+
+    """Featureful extension of appcommands.Cmd."""
+
+    def __init__(self, name, flag_values):
+        super(NewCmd, self).__init__(name, flag_values)
+        run_with_args = getattr(self, 'RunWithArgs', None)
+        self._new_style = isinstance(run_with_args, types.MethodType)
+        if self._new_style:
+            func = run_with_args.__func__
+
+            argspec = inspect.getargspec(func)
+            if argspec.args and argspec.args[0] == 'self':
+                argspec = argspec._replace(  # pylint: disable=protected-access
+                    args=argspec.args[1:])
+            self._argspec = argspec
+            # TODO(craigcitro): Do we really want to support all this
+            # nonsense?
+            self._star_args = self._argspec.varargs is not None
+            self._star_kwds = self._argspec.keywords is not None
+            self._max_args = len(self._argspec.args or ())
+            self._min_args = self._max_args - len(self._argspec.defaults or ())
+            if self._star_args:
+                self._max_args = sys.maxsize
+
+            self._debug_mode = FLAGS.debug_mode
+            self.surface_in_shell = True
+            self.__doc__ = self.RunWithArgs.__doc__
+
+    def __getattr__(self, name):
+        if name in self._command_flags:
+            return self._command_flags[name].value
+        return super(NewCmd, self).__getattribute__(name)
+
+    def _GetFlag(self, flagname):
+        if flagname in self._command_flags:
+            return self._command_flags[flagname]
+        return None
+
+    def Run(self, argv):
+        """Run this command.
+
+        If self is a new-style command, we set up arguments and call
+        self.RunWithArgs, gracefully handling exceptions. If not, we
+        simply call self.Run(argv).
+
+        Args:
+          argv: List of arguments as strings.
+
+        Returns:
+          0 on success, nonzero on failure.
+        """
+        if not self._new_style:
+            return super(NewCmd, self).Run(argv)
+
+        # TODO(craigcitro): We need to save and restore flags each time so
+        # that we can per-command flags in the REPL.
+        args = argv[1:]
+        fail = None
+        fail_template = '%s positional args, found %d, expected at %s %d'
+        if len(args) < self._min_args:
+            fail = fail_template % ('Not enough', len(args),
+                                    'least', self._min_args)
+        if len(args) > self._max_args:
+            fail = fail_template % ('Too many', len(args),
+                                    'most', self._max_args)
+        if fail:
+            print(fail)
+            if self.usage:
+                print('Usage: %s' % (self.usage,))
+            return 1
+
+        if self._debug_mode:
+            return self.RunDebug(args, {})
+        return self.RunSafely(args, {})
+
+    def RunCmdLoop(self, argv):
+        """Hook for use in cmd.Cmd-based command shells."""
+        try:
+            args = shlex.split(argv)
+        except ValueError as e:
+            raise SyntaxError(self.EncodeForPrinting(e))
+        return self.Run([self._command_name] + args)
+
+    @staticmethod
+    def EncodeForPrinting(s):
+        """Safely encode a string as the encoding for sys.stdout."""
+        encoding = sys.stdout.encoding or 'ascii'
+        return six.text_type(s).encode(encoding, 'backslashreplace')
+
+    def _FormatError(self, e):
+        """Hook for subclasses to modify how error messages are printed."""
+        return _SafeMakeAscii(e)
+
+    def _HandleError(self, e):
+        message = self._FormatError(e)
+        print('Exception raised in %s operation: %s' % (
+            self._command_name, message))
+        return 1
+
+    def _IsDebuggableException(self, e):
+        """Hook for subclasses to skip debugging on certain exceptions."""
+        return not isinstance(e, app.UsageError)
+
+    def RunDebug(self, args, kwds):
+        """Run this command in debug mode."""
+        try:
+            return_value = self.RunWithArgs(*args, **kwds)
+        except BaseException as e:
+            # Don't break into the debugger for expected exceptions.
+            if not self._IsDebuggableException(e):
+                return self._HandleError(e)
+            print()
+            print('****************************************************')
+            print('**   Unexpected Exception raised in execution!    **')
+            if FLAGS.headless:
+                print('**  --headless mode enabled, exiting.             **')
+                print('**  See STDERR for traceback.                     **')
+            else:
+                print('**  --debug_mode enabled, starting pdb.           **')
+            print('****************************************************')
+            print()
+            traceback.print_exc()
+            print()
+            if not FLAGS.headless:
+                pdb.post_mortem()
+            return 1
+        return return_value
+
+    def RunSafely(self, args, kwds):
+        """Run this command, turning exceptions into print statements."""
+        try:
+            return_value = self.RunWithArgs(*args, **kwds)
+        except BaseException as e:
+            return self._HandleError(e)
+        return return_value
+
+
+class CommandLoop(cmd.Cmd):
+
+    """Instance of cmd.Cmd built to work with NewCmd."""
+
+    class TerminateSignal(Exception):
+
+        """Exception type used for signaling loop completion."""
+
+    def __init__(self, commands, prompt):
+        cmd.Cmd.__init__(self)
+        self._commands = {'help': commands['help']}
+        self._special_command_names = ['help', 'repl', 'EOF']
+        for name, command in commands.items():
+            if (name not in self._special_command_names and
+                    isinstance(command, NewCmd) and
+                    command.surface_in_shell):
+                self._commands[name] = command
+                setattr(self, 'do_%s' % (name,), command.RunCmdLoop)
+        self._default_prompt = prompt
+        self._set_prompt()
+        self._last_return_code = 0
+
+    @property
+    def last_return_code(self):
+        return self._last_return_code
+
+    def _set_prompt(self):  # pylint: disable=invalid-name
+        self.prompt = self._default_prompt
+
+    def do_EOF(self, *unused_args):  # pylint: disable=invalid-name
+        """Terminate the running command loop.
+
+        This function raises an exception to avoid the need to do
+        potentially-error-prone string parsing inside onecmd.
+
+        Args:
+          *unused_args: unused.
+
+        Returns:
+          Never returns.
+
+        Raises:
+          CommandLoop.TerminateSignal: always.
+        """
+        raise CommandLoop.TerminateSignal()
+
+    def postloop(self):
+        print('Goodbye.')
+
+    # pylint: disable=arguments-differ
+    def completedefault(self, unused_text, line, unused_begidx, unused_endidx):
+        if not line:
+            return []
+        else:
+            command_name = line.partition(' ')[0].lower()
+            usage = ''
+            if command_name in self._commands:
+                usage = self._commands[command_name].usage
+            if usage:
+                print()
+                print(usage)
+                print('%s%s' % (self.prompt, line), end=' ')
+            return []
+    # pylint: enable=arguments-differ
+
+    def emptyline(self):
+        print('Available commands:', end=' ')
+        print(' '.join(list(self._commands)))
+
+    def precmd(self, line):
+        """Preprocess the shell input."""
+        if line == 'EOF':
+            return line
+        if line.startswith('exit') or line.startswith('quit'):
+            return 'EOF'
+        words = line.strip().split()
+        if len(words) == 1 and words[0] not in ['help', 'ls', 'version']:
+            return 'help %s' % (line.strip(),)
+        return line
+
+    def onecmd(self, line):
+        """Process a single command.
+
+        Runs a single command, and stores the return code in
+        self._last_return_code. Always returns False unless the command
+        was EOF.
+
+        Args:
+          line: (str) Command line to process.
+
+        Returns:
+          A bool signaling whether or not the command loop should terminate.
+        """
+        try:
+            self._last_return_code = cmd.Cmd.onecmd(self, line)
+        except CommandLoop.TerminateSignal:
+            return True
+        except BaseException as e:
+            name = line.split(' ')[0]
+            print('Error running %s:' % name)
+            print(e)
+            self._last_return_code = 1
+        return False
+
+    def get_names(self):
+        names = dir(self)
+        commands = (name for name in self._commands
+                    if name not in self._special_command_names)
+        names.extend('do_%s' % (name,) for name in commands)
+        names.remove('do_EOF')
+        return names
+
+    def do_help(self, arg):
+        """Print the help for command_name (if present) or general help."""
+
+        command_name = arg
+
+        # TODO(craigcitro): Add command-specific flags.
+        def FormatOneCmd(name, command, command_names):
+            """Format one command."""
+            indent_size = appcommands.GetMaxCommandLength() + 3
+            if len(command_names) > 1:
+                indent = ' ' * indent_size
+                command_help = flags.TextWrap(
+                    command.CommandGetHelp('', cmd_names=command_names),
+                    indent=indent,
+                    firstline_indent='')
+                first_help_line, _, rest = command_help.partition('\n')
+                first_line = '%-*s%s' % (indent_size,
+                                         name + ':', first_help_line)
+                return '\n'.join((first_line, rest))
+            default_indent = '  '
+            return '\n' + flags.TextWrap(
+                command.CommandGetHelp('', cmd_names=command_names),
+                indent=default_indent,
+                firstline_indent=default_indent) + '\n'
+
+        if not command_name:
+            print('\nHelp for commands:\n')
+            command_names = list(self._commands)
+            print('\n\n'.join(
+                FormatOneCmd(name, command, command_names)
+                for name, command in self._commands.items()
+                if name not in self._special_command_names))
+            print()
+        elif command_name in self._commands:
+            print(FormatOneCmd(command_name, self._commands[command_name],
+                               command_names=[command_name]))
+        return 0
+
+    def postcmd(self, stop, line):
+        return bool(stop) or line == 'EOF'
+
+
+class Repl(NewCmd):
+
+    """Start an interactive session."""
+    PROMPT = '> '
+
+    def __init__(self, name, fv):
+        super(Repl, self).__init__(name, fv)
+        self.surface_in_shell = False
+        flags.DEFINE_string(
+            'prompt', '',
+            'Prompt to use for interactive shell.',
+            flag_values=fv)
+
+    def RunWithArgs(self):
+        """Start an interactive session."""
+        prompt = FLAGS.prompt or self.PROMPT
+        repl = CommandLoop(appcommands.GetCommandList(), prompt=prompt)
+        print('Welcome! (Type help for more information.)')
+        while True:
+            try:
+                repl.cmdloop()
+                break
+            except KeyboardInterrupt:
+                print()
+        return repl.last_return_code
diff --git a/apitools/base/py/base_api.py b/apitools/base/py/base_api.py
new file mode 100644
index 0000000..98836c9
--- /dev/null
+++ b/apitools/base/py/base_api.py
@@ -0,0 +1,729 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Base class for api services."""
+
+import base64
+import contextlib
+import datetime
+import logging
+import pprint
+
+
+import six
+from six.moves import http_client
+from six.moves import urllib
+
+
+from apitools.base.protorpclite import message_types
+from apitools.base.protorpclite import messages
+from apitools.base.py import encoding
+from apitools.base.py import exceptions
+from apitools.base.py import http_wrapper
+from apitools.base.py import util
+
+__all__ = [
+    'ApiMethodInfo',
+    'ApiUploadInfo',
+    'BaseApiClient',
+    'BaseApiService',
+    'NormalizeApiEndpoint',
+]
+
+# TODO(craigcitro): Remove this once we quiet the spurious logging in
+# oauth2client (or drop oauth2client).
+logging.getLogger('oauth2client.util').setLevel(logging.ERROR)
+
+_MAX_URL_LENGTH = 2048
+
+
+class ApiUploadInfo(messages.Message):
+
+    """Media upload information for a method.
+
+    Fields:
+      accept: (repeated) MIME Media Ranges for acceptable media uploads
+          to this method.
+      max_size: (integer) Maximum size of a media upload, such as 3MB
+          or 1TB (converted to an integer).
+      resumable_path: Path to use for resumable uploads.
+      resumable_multipart: (boolean) Whether or not the resumable endpoint
+          supports multipart uploads.
+      simple_path: Path to use for simple uploads.
+      simple_multipart: (boolean) Whether or not the simple endpoint
+          supports multipart uploads.
+    """
+    accept = messages.StringField(1, repeated=True)
+    max_size = messages.IntegerField(2)
+    resumable_path = messages.StringField(3)
+    resumable_multipart = messages.BooleanField(4)
+    simple_path = messages.StringField(5)
+    simple_multipart = messages.BooleanField(6)
+
+
+class ApiMethodInfo(messages.Message):
+
+    """Configuration info for an API method.
+
+    All fields are strings unless noted otherwise.
+
+    Fields:
+      relative_path: Relative path for this method.
+      flat_path: Expanded version (if any) of relative_path.
+      method_id: ID for this method.
+      http_method: HTTP verb to use for this method.
+      path_params: (repeated) path parameters for this method.
+      query_params: (repeated) query parameters for this method.
+      ordered_params: (repeated) ordered list of parameters for
+          this method.
+      description: description of this method.
+      request_type_name: name of the request type.
+      response_type_name: name of the response type.
+      request_field: if not null, the field to pass as the body
+          of this POST request. may also be the REQUEST_IS_BODY
+          value below to indicate the whole message is the body.
+      upload_config: (ApiUploadInfo) Information about the upload
+          configuration supported by this method.
+      supports_download: (boolean) If True, this method supports
+          downloading the request via the `alt=media` query
+          parameter.
+    """
+
+    relative_path = messages.StringField(1)
+    flat_path = messages.StringField(2)
+    method_id = messages.StringField(3)
+    http_method = messages.StringField(4)
+    path_params = messages.StringField(5, repeated=True)
+    query_params = messages.StringField(6, repeated=True)
+    ordered_params = messages.StringField(7, repeated=True)
+    description = messages.StringField(8)
+    request_type_name = messages.StringField(9)
+    response_type_name = messages.StringField(10)
+    request_field = messages.StringField(11, default='')
+    upload_config = messages.MessageField(ApiUploadInfo, 12)
+    supports_download = messages.BooleanField(13, default=False)
+REQUEST_IS_BODY = '<request>'
+
+
+def _LoadClass(name, messages_module):
+    if name.startswith('message_types.'):
+        _, _, classname = name.partition('.')
+        return getattr(message_types, classname)
+    elif '.' not in name:
+        return getattr(messages_module, name)
+    else:
+        raise exceptions.GeneratedClientError('Unknown class %s' % name)
+
+
+def _RequireClassAttrs(obj, attrs):
+    for attr in attrs:
+        attr_name = attr.upper()
+        if not hasattr(obj, '%s' % attr_name) or not getattr(obj, attr_name):
+            msg = 'No %s specified for object of class %s.' % (
+                attr_name, type(obj).__name__)
+            raise exceptions.GeneratedClientError(msg)
+
+
+def NormalizeApiEndpoint(api_endpoint):
+    if not api_endpoint.endswith('/'):
+        api_endpoint += '/'
+    return api_endpoint
+
+
+def _urljoin(base, url):  # pylint: disable=invalid-name
+    """Custom urljoin replacement supporting : before / in url."""
+    # In general, it's unsafe to simply join base and url. However, for
+    # the case of discovery documents, we know:
+    #  * base will never contain params, query, or fragment
+    #  * url will never contain a scheme or net_loc.
+    # In general, this means we can safely join on /; we just need to
+    # ensure we end up with precisely one / joining base and url. The
+    # exception here is the case of media uploads, where url will be an
+    # absolute url.
+    if url.startswith('http://') or url.startswith('https://'):
+        return urllib.parse.urljoin(base, url)
+    new_base = base if base.endswith('/') else base + '/'
+    new_url = url[1:] if url.startswith('/') else url
+    return new_base + new_url
+
+
+class _UrlBuilder(object):
+
+    """Convenient container for url data."""
+
+    def __init__(self, base_url, relative_path=None, query_params=None):
+        components = urllib.parse.urlsplit(_urljoin(
+            base_url, relative_path or ''))
+        if components.fragment:
+            raise exceptions.ConfigurationValueError(
+                'Unexpected url fragment: %s' % components.fragment)
+        self.query_params = urllib.parse.parse_qs(components.query or '')
+        if query_params is not None:
+            self.query_params.update(query_params)
+        self.__scheme = components.scheme
+        self.__netloc = components.netloc
+        self.relative_path = components.path or ''
+
+    @classmethod
+    def FromUrl(cls, url):
+        urlparts = urllib.parse.urlsplit(url)
+        query_params = urllib.parse.parse_qs(urlparts.query)
+        base_url = urllib.parse.urlunsplit((
+            urlparts.scheme, urlparts.netloc, '', None, None))
+        relative_path = urlparts.path or ''
+        return cls(
+            base_url, relative_path=relative_path, query_params=query_params)
+
+    @property
+    def base_url(self):
+        return urllib.parse.urlunsplit(
+            (self.__scheme, self.__netloc, '', '', ''))
+
+    @base_url.setter
+    def base_url(self, value):
+        components = urllib.parse.urlsplit(value)
+        if components.path or components.query or components.fragment:
+            raise exceptions.ConfigurationValueError(
+                'Invalid base url: %s' % value)
+        self.__scheme = components.scheme
+        self.__netloc = components.netloc
+
+    @property
+    def query(self):
+        # TODO(craigcitro): In the case that some of the query params are
+        # non-ASCII, we may silently fail to encode correctly. We should
+        # figure out who is responsible for owning the object -> str
+        # conversion.
+        return urllib.parse.urlencode(self.query_params, True)
+
+    @property
+    def url(self):
+        if '{' in self.relative_path or '}' in self.relative_path:
+            raise exceptions.ConfigurationValueError(
+                'Cannot create url with relative path %s' % self.relative_path)
+        return urllib.parse.urlunsplit((
+            self.__scheme, self.__netloc, self.relative_path, self.query, ''))
+
+
+def _SkipGetCredentials():
+    """Hook for skipping credentials. For internal use."""
+    return False
+
+
+class BaseApiClient(object):
+
+    """Base class for client libraries."""
+    MESSAGES_MODULE = None
+
+    _API_KEY = ''
+    _CLIENT_ID = ''
+    _CLIENT_SECRET = ''
+    _PACKAGE = ''
+    _SCOPES = []
+    _USER_AGENT = ''
+
+    def __init__(self, url, credentials=None, get_credentials=True, http=None,
+                 model=None, log_request=False, log_response=False,
+                 num_retries=5, max_retry_wait=60, credentials_args=None,
+                 default_global_params=None, additional_http_headers=None,
+                 check_response_func=None, retry_func=None):
+        _RequireClassAttrs(self, ('_package', '_scopes', 'messages_module'))
+        if default_global_params is not None:
+            util.Typecheck(default_global_params, self.params_type)
+        self.__default_global_params = default_global_params
+        self.log_request = log_request
+        self.log_response = log_response
+        self.__num_retries = 5
+        self.__max_retry_wait = 60
+        # We let the @property machinery below do our validation.
+        self.num_retries = num_retries
+        self.max_retry_wait = max_retry_wait
+        self._credentials = credentials
+        get_credentials = get_credentials and not _SkipGetCredentials()
+        if get_credentials and not credentials:
+            credentials_args = credentials_args or {}
+            self._SetCredentials(**credentials_args)
+        self._url = NormalizeApiEndpoint(url)
+        self._http = http or http_wrapper.GetHttp()
+        # Note that "no credentials" is totally possible.
+        if self._credentials is not None:
+            self._http = self._credentials.authorize(self._http)
+        # TODO(craigcitro): Remove this field when we switch to proto2.
+        self.__include_fields = None
+
+        self.additional_http_headers = additional_http_headers or {}
+        self.check_response_func = check_response_func
+        self.retry_func = retry_func
+
+        # TODO(craigcitro): Finish deprecating these fields.
+        _ = model
+
+        self.__response_type_model = 'proto'
+
+    def _SetCredentials(self, **kwds):
+        """Fetch credentials, and set them for this client.
+
+        Note that we can't simply return credentials, since creating them
+        may involve side-effecting self.
+
+        Args:
+          **kwds: Additional keyword arguments are passed on to GetCredentials.
+
+        Returns:
+          None. Sets self._credentials.
+        """
+        args = {
+            'api_key': self._API_KEY,
+            'client': self,
+            'client_id': self._CLIENT_ID,
+            'client_secret': self._CLIENT_SECRET,
+            'package_name': self._PACKAGE,
+            'scopes': self._SCOPES,
+            'user_agent': self._USER_AGENT,
+        }
+        args.update(kwds)
+        # credentials_lib can be expensive to import so do it only if needed.
+        from apitools.base.py import credentials_lib
+        # TODO(craigcitro): It's a bit dangerous to pass this
+        # still-half-initialized self into this method, but we might need
+        # to set attributes on it associated with our credentials.
+        # Consider another way around this (maybe a callback?) and whether
+        # or not it's worth it.
+        self._credentials = credentials_lib.GetCredentials(**args)
+
+    @classmethod
+    def ClientInfo(cls):
+        return {
+            'client_id': cls._CLIENT_ID,
+            'client_secret': cls._CLIENT_SECRET,
+            'scope': ' '.join(sorted(util.NormalizeScopes(cls._SCOPES))),
+            'user_agent': cls._USER_AGENT,
+        }
+
+    @property
+    def base_model_class(self):
+        return None
+
+    @property
+    def http(self):
+        return self._http
+
+    @property
+    def url(self):
+        return self._url
+
+    @classmethod
+    def GetScopes(cls):
+        return cls._SCOPES
+
+    @property
+    def params_type(self):
+        return _LoadClass('StandardQueryParameters', self.MESSAGES_MODULE)
+
+    @property
+    def user_agent(self):
+        return self._USER_AGENT
+
+    @property
+    def _default_global_params(self):
+        if self.__default_global_params is None:
+            # pylint: disable=not-callable
+            self.__default_global_params = self.params_type()
+        return self.__default_global_params
+
+    def AddGlobalParam(self, name, value):
+        params = self._default_global_params
+        setattr(params, name, value)
+
+    @property
+    def global_params(self):
+        return encoding.CopyProtoMessage(self._default_global_params)
+
+    @contextlib.contextmanager
+    def IncludeFields(self, include_fields):
+        self.__include_fields = include_fields
+        yield
+        self.__include_fields = None
+
+    @property
+    def response_type_model(self):
+        return self.__response_type_model
+
+    @contextlib.contextmanager
+    def JsonResponseModel(self):
+        """In this context, return raw JSON instead of proto."""
+        old_model = self.response_type_model
+        self.__response_type_model = 'json'
+        yield
+        self.__response_type_model = old_model
+
+    @property
+    def num_retries(self):
+        return self.__num_retries
+
+    @num_retries.setter
+    def num_retries(self, value):
+        util.Typecheck(value, six.integer_types)
+        if value < 0:
+            raise exceptions.InvalidDataError(
+                'Cannot have negative value for num_retries')
+        self.__num_retries = value
+
+    @property
+    def max_retry_wait(self):
+        return self.__max_retry_wait
+
+    @max_retry_wait.setter
+    def max_retry_wait(self, value):
+        util.Typecheck(value, six.integer_types)
+        if value <= 0:
+            raise exceptions.InvalidDataError(
+                'max_retry_wait must be a postiive integer')
+        self.__max_retry_wait = value
+
+    @contextlib.contextmanager
+    def WithRetries(self, num_retries):
+        old_num_retries = self.num_retries
+        self.num_retries = num_retries
+        yield
+        self.num_retries = old_num_retries
+
+    def ProcessRequest(self, method_config, request):
+        """Hook for pre-processing of requests."""
+        if self.log_request:
+            logging.info(
+                'Calling method %s with %s: %s', method_config.method_id,
+                method_config.request_type_name, request)
+        return request
+
+    def ProcessHttpRequest(self, http_request):
+        """Hook for pre-processing of http requests."""
+        http_request.headers.update(self.additional_http_headers)
+        if self.log_request:
+            logging.info('Making http %s to %s',
+                         http_request.http_method, http_request.url)
+            logging.info('Headers: %s', pprint.pformat(http_request.headers))
+            if http_request.body:
+                # TODO(craigcitro): Make this safe to print in the case of
+                # non-printable body characters.
+                logging.info('Body:\n%s',
+                             http_request.loggable_body or http_request.body)
+            else:
+                logging.info('Body: (none)')
+        return http_request
+
+    def ProcessResponse(self, method_config, response):
+        if self.log_response:
+            logging.info('Response of type %s: %s',
+                         method_config.response_type_name, response)
+        return response
+
+    # TODO(craigcitro): Decide where these two functions should live.
+    def SerializeMessage(self, message):
+        return encoding.MessageToJson(
+            message, include_fields=self.__include_fields)
+
+    def DeserializeMessage(self, response_type, data):
+        """Deserialize the given data as method_config.response_type."""
+        try:
+            message = encoding.JsonToMessage(response_type, data)
+        except (exceptions.InvalidDataFromServerError,
+                messages.ValidationError, ValueError) as e:
+            raise exceptions.InvalidDataFromServerError(
+                'Error decoding response "%s" as type %s: %s' % (
+                    data, response_type.__name__, e))
+        return message
+
+    def FinalizeTransferUrl(self, url):
+        """Modify the url for a given transfer, based on auth and version."""
+        url_builder = _UrlBuilder.FromUrl(url)
+        if self.global_params.key:
+            url_builder.query_params['key'] = self.global_params.key
+        return url_builder.url
+
+
+class BaseApiService(object):
+
+    """Base class for generated API services."""
+
+    def __init__(self, client):
+        self.__client = client
+        self._method_configs = {}
+        self._upload_configs = {}
+
+    @property
+    def _client(self):
+        return self.__client
+
+    @property
+    def client(self):
+        return self.__client
+
+    def GetMethodConfig(self, method):
+        """Returns service cached method config for given method."""
+        method_config = self._method_configs.get(method)
+        if method_config:
+            return method_config
+        func = getattr(self, method, None)
+        if func is None:
+            raise KeyError(method)
+        method_config = getattr(func, 'method_config', None)
+        if method_config is None:
+            raise KeyError(method)
+        self._method_configs[method] = config = method_config()
+        return config
+
+    @classmethod
+    def GetMethodsList(cls):
+        return [f.__name__ for f in six.itervalues(cls.__dict__)
+                if getattr(f, 'method_config', None)]
+
+    def GetUploadConfig(self, method):
+        return self._upload_configs.get(method)
+
+    def GetRequestType(self, method):
+        method_config = self.GetMethodConfig(method)
+        return getattr(self.client.MESSAGES_MODULE,
+                       method_config.request_type_name)
+
+    def GetResponseType(self, method):
+        method_config = self.GetMethodConfig(method)
+        return getattr(self.client.MESSAGES_MODULE,
+                       method_config.response_type_name)
+
+    def __CombineGlobalParams(self, global_params, default_params):
+        """Combine the given params with the defaults."""
+        util.Typecheck(global_params, (type(None), self.__client.params_type))
+        result = self.__client.params_type()
+        global_params = global_params or self.__client.params_type()
+        for field in result.all_fields():
+            value = global_params.get_assigned_value(field.name)
+            if value is None:
+                value = default_params.get_assigned_value(field.name)
+            if value not in (None, [], ()):
+                setattr(result, field.name, value)
+        return result
+
+    def __EncodePrettyPrint(self, query_info):
+        # The prettyPrint flag needs custom encoding: it should be encoded
+        # as 0 if False, and ignored otherwise (True is the default).
+        if not query_info.pop('prettyPrint', True):
+            query_info['prettyPrint'] = 0
+        # The One Platform equivalent of prettyPrint is pp, which also needs
+        # custom encoding.
+        if not query_info.pop('pp', True):
+            query_info['pp'] = 0
+        return query_info
+
+    def __FinalUrlValue(self, value, field):
+        """Encode value for the URL, using field to skip encoding for bytes."""
+        if isinstance(field, messages.BytesField) and value is not None:
+            return base64.urlsafe_b64encode(value)
+        elif isinstance(value, six.text_type):
+            return value.encode('utf8')
+        elif isinstance(value, six.binary_type):
+            return value.decode('utf8')
+        elif isinstance(value, datetime.datetime):
+            return value.isoformat()
+        return value
+
+    def __ConstructQueryParams(self, query_params, request, global_params):
+        """Construct a dictionary of query parameters for this request."""
+        # First, handle the global params.
+        global_params = self.__CombineGlobalParams(
+            global_params, self.__client.global_params)
+        global_param_names = util.MapParamNames(
+            [x.name for x in self.__client.params_type.all_fields()],
+            self.__client.params_type)
+        global_params_type = type(global_params)
+        query_info = dict(
+            (param,
+             self.__FinalUrlValue(getattr(global_params, param),
+                                  getattr(global_params_type, param)))
+            for param in global_param_names)
+        # Next, add the query params.
+        query_param_names = util.MapParamNames(query_params, type(request))
+        request_type = type(request)
+        query_info.update(
+            (param,
+             self.__FinalUrlValue(getattr(request, param, None),
+                                  getattr(request_type, param)))
+            for param in query_param_names)
+        query_info = dict((k, v) for k, v in query_info.items()
+                          if v is not None)
+        query_info = self.__EncodePrettyPrint(query_info)
+        query_info = util.MapRequestParams(query_info, type(request))
+        return query_info
+
+    def __ConstructRelativePath(self, method_config, request,
+                                relative_path=None):
+        """Determine the relative path for request."""
+        python_param_names = util.MapParamNames(
+            method_config.path_params, type(request))
+        params = dict([(param, getattr(request, param, None))
+                       for param in python_param_names])
+        params = util.MapRequestParams(params, type(request))
+        return util.ExpandRelativePath(method_config, params,
+                                       relative_path=relative_path)
+
+    def __FinalizeRequest(self, http_request, url_builder):
+        """Make any final general adjustments to the request."""
+        if (http_request.http_method == 'GET' and
+                len(http_request.url) > _MAX_URL_LENGTH):
+            http_request.http_method = 'POST'
+            http_request.headers['x-http-method-override'] = 'GET'
+            http_request.headers[
+                'content-type'] = 'application/x-www-form-urlencoded'
+            http_request.body = url_builder.query
+            url_builder.query_params = {}
+        http_request.url = url_builder.url
+
+    def __ProcessHttpResponse(self, method_config, http_response, request):
+        """Process the given http response."""
+        if http_response.status_code not in (http_client.OK,
+                                             http_client.NO_CONTENT):
+            raise exceptions.HttpError(
+                http_response.info, http_response.content,
+                http_response.request_url, method_config, request)
+        if http_response.status_code == http_client.NO_CONTENT:
+            # TODO(craigcitro): Find out why _replace doesn't seem to work
+            # here.
+            http_response = http_wrapper.Response(
+                info=http_response.info, content='{}',
+                request_url=http_response.request_url)
+        if self.__client.response_type_model == 'json':
+            return http_response.content
+        response_type = _LoadClass(method_config.response_type_name,
+                                   self.__client.MESSAGES_MODULE)
+        return self.__client.DeserializeMessage(
+            response_type, http_response.content)
+
+    def __SetBaseHeaders(self, http_request, client):
+        """Fill in the basic headers on http_request."""
+        # TODO(craigcitro): Make the default a little better here, and
+        # include the apitools version.
+        user_agent = client.user_agent or 'apitools-client/1.0'
+        http_request.headers['user-agent'] = user_agent
+        http_request.headers['accept'] = 'application/json'
+        http_request.headers['accept-encoding'] = 'gzip, deflate'
+
+    def __SetBody(self, http_request, method_config, request, upload):
+        """Fill in the body on http_request."""
+        if not method_config.request_field:
+            return
+
+        request_type = _LoadClass(
+            method_config.request_type_name, self.__client.MESSAGES_MODULE)
+        if method_config.request_field == REQUEST_IS_BODY:
+            body_value = request
+            body_type = request_type
+        else:
+            body_value = getattr(request, method_config.request_field)
+            body_field = request_type.field_by_name(
+                method_config.request_field)
+            util.Typecheck(body_field, messages.MessageField)
+            body_type = body_field.type
+
+        # If there was no body provided, we use an empty message of the
+        # appropriate type.
+        body_value = body_value or body_type()
+        if upload and not body_value:
+            # We're going to fill in the body later.
+            return
+        util.Typecheck(body_value, body_type)
+        http_request.headers['content-type'] = 'application/json'
+        http_request.body = self.__client.SerializeMessage(body_value)
+
+    def PrepareHttpRequest(self, method_config, request, global_params=None,
+                           upload=None, upload_config=None, download=None):
+        """Prepares an HTTP request to be sent."""
+        request_type = _LoadClass(
+            method_config.request_type_name, self.__client.MESSAGES_MODULE)
+        util.Typecheck(request, request_type)
+        request = self.__client.ProcessRequest(method_config, request)
+
+        http_request = http_wrapper.Request(
+            http_method=method_config.http_method)
+        self.__SetBaseHeaders(http_request, self.__client)
+        self.__SetBody(http_request, method_config, request, upload)
+
+        url_builder = _UrlBuilder(
+            self.__client.url, relative_path=method_config.relative_path)
+        url_builder.query_params = self.__ConstructQueryParams(
+            method_config.query_params, request, global_params)
+
+        # It's important that upload and download go before we fill in the
+        # relative path, so that they can replace it.
+        if upload is not None:
+            upload.ConfigureRequest(upload_config, http_request, url_builder)
+        if download is not None:
+            download.ConfigureRequest(http_request, url_builder)
+
+        url_builder.relative_path = self.__ConstructRelativePath(
+            method_config, request, relative_path=url_builder.relative_path)
+        self.__FinalizeRequest(http_request, url_builder)
+
+        return self.__client.ProcessHttpRequest(http_request)
+
+    def _RunMethod(self, method_config, request, global_params=None,
+                   upload=None, upload_config=None, download=None):
+        """Call this method with request."""
+        if upload is not None and download is not None:
+            # TODO(craigcitro): This just involves refactoring the logic
+            # below into callbacks that we can pass around; in particular,
+            # the order should be that the upload gets the initial request,
+            # and then passes its reply to a download if one exists, and
+            # then that goes to ProcessResponse and is returned.
+            raise exceptions.NotYetImplementedError(
+                'Cannot yet use both upload and download at once')
+
+        http_request = self.PrepareHttpRequest(
+            method_config, request, global_params, upload, upload_config,
+            download)
+
+        # TODO(craigcitro): Make num_retries customizable on Transfer
+        # objects, and pass in self.__client.num_retries when initializing
+        # an upload or download.
+        if download is not None:
+            download.InitializeDownload(http_request, client=self.client)
+            return
+
+        http_response = None
+        if upload is not None:
+            http_response = upload.InitializeUpload(
+                http_request, client=self.client)
+        if http_response is None:
+            http = self.__client.http
+            if upload and upload.bytes_http:
+                http = upload.bytes_http
+            opts = {
+                'retries': self.__client.num_retries,
+                'max_retry_wait': self.__client.max_retry_wait,
+            }
+            if self.__client.check_response_func:
+                opts['check_response_func'] = self.__client.check_response_func
+            if self.__client.retry_func:
+                opts['retry_func'] = self.__client.retry_func
+            http_response = http_wrapper.MakeRequest(
+                http, http_request, **opts)
+
+        return self.ProcessHttpResponse(method_config, http_response, request)
+
+    def ProcessHttpResponse(self, method_config, http_response, request=None):
+        """Convert an HTTP response to the expected message type."""
+        return self.__client.ProcessResponse(
+            method_config,
+            self.__ProcessHttpResponse(method_config, http_response, request))
diff --git a/apitools/base/py/base_api_test.py b/apitools/base/py/base_api_test.py
new file mode 100644
index 0000000..7b23fa6
--- /dev/null
+++ b/apitools/base/py/base_api_test.py
@@ -0,0 +1,316 @@
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import base64
+import datetime
+import sys
+import contextlib
+
+import six
+from six.moves import http_client
+from six.moves import urllib_parse
+import unittest2
+
+from apitools.base.protorpclite import message_types
+from apitools.base.protorpclite import messages
+from apitools.base.py import base_api
+from apitools.base.py import encoding
+from apitools.base.py import exceptions
+from apitools.base.py import http_wrapper
+
+
+@contextlib.contextmanager
+def mock(module, fn_name, patch):
+    unpatch = getattr(module, fn_name)
+    setattr(module, fn_name, patch)
+    try:
+        yield
+    finally:
+        setattr(module, fn_name, unpatch)
+
+
+class SimpleMessage(messages.Message):
+    field = messages.StringField(1)
+    bytes_field = messages.BytesField(2)
+
+
+class MessageWithTime(messages.Message):
+    timestamp = message_types.DateTimeField(1)
+
+
+class MessageWithRemappings(messages.Message):
+
+    class AnEnum(messages.Enum):
+        value_one = 1
+        value_two = 2
+
+    str_field = messages.StringField(1)
+    enum_field = messages.EnumField('AnEnum', 2)
+
+
+encoding.AddCustomJsonFieldMapping(
+    MessageWithRemappings, 'str_field', 'remapped_field')
+encoding.AddCustomJsonEnumMapping(
+    MessageWithRemappings.AnEnum, 'value_one', 'ONE/TWO')
+
+
+class StandardQueryParameters(messages.Message):
+    field = messages.StringField(1)
+    prettyPrint = messages.BooleanField(
+        5, default=True)  # pylint: disable=invalid-name
+    pp = messages.BooleanField(6, default=True)
+    nextPageToken = messages.BytesField(7)  # pylint:disable=invalid-name
+
+
+class FakeCredentials(object):
+
+    def authorize(self, _):  # pylint: disable=invalid-name
+        return None
+
+
+class FakeClient(base_api.BaseApiClient):
+    MESSAGES_MODULE = sys.modules[__name__]
+    _PACKAGE = 'package'
+    _SCOPES = ['scope1']
+    _CLIENT_ID = 'client_id'
+    _CLIENT_SECRET = 'client_secret'
+
+
+class FakeService(base_api.BaseApiService):
+
+    def __init__(self, client=None):
+        client = client or FakeClient(
+            'http://www.example.com/', credentials=FakeCredentials())
+        super(FakeService, self).__init__(client)
+
+
+class BaseApiTest(unittest2.TestCase):
+
+    def __GetFakeClient(self):
+        return FakeClient('', credentials=FakeCredentials())
+
+    def testUrlNormalization(self):
+        client = FakeClient('http://www.googleapis.com', get_credentials=False)
+        self.assertTrue(client.url.endswith('/'))
+
+    def testNoCredentials(self):
+        client = FakeClient('', get_credentials=False)
+        self.assertIsNotNone(client)
+        self.assertIsNone(client._credentials)
+
+    def testIncludeEmptyFieldsClient(self):
+        msg = SimpleMessage()
+        client = self.__GetFakeClient()
+        self.assertEqual('{}', client.SerializeMessage(msg))
+        with client.IncludeFields(('field',)):
+            self.assertEqual('{"field": null}', client.SerializeMessage(msg))
+
+    def testJsonResponse(self):
+        method_config = base_api.ApiMethodInfo(
+            response_type_name='SimpleMessage')
+        service = FakeService()
+        http_response = http_wrapper.Response(
+            info={'status': '200'}, content='{"field": "abc"}',
+            request_url='http://www.google.com')
+        response_message = SimpleMessage(field='abc')
+        self.assertEqual(response_message, service.ProcessHttpResponse(
+            method_config, http_response))
+        with service.client.JsonResponseModel():
+            self.assertEqual(
+                http_response.content,
+                service.ProcessHttpResponse(method_config, http_response))
+
+    def testAdditionalHeaders(self):
+        additional_headers = {'Request-Is-Awesome': '1'}
+        client = self.__GetFakeClient()
+
+        # No headers to start
+        http_request = http_wrapper.Request('http://www.example.com')
+        new_request = client.ProcessHttpRequest(http_request)
+        self.assertFalse('Request-Is-Awesome' in new_request.headers)
+
+        # Add a new header and ensure it's added to the request.
+        client.additional_http_headers = additional_headers
+        http_request = http_wrapper.Request('http://www.example.com')
+        new_request = client.ProcessHttpRequest(http_request)
+        self.assertTrue('Request-Is-Awesome' in new_request.headers)
+
+    def testCustomCheckResponse(self):
+        def check_response():
+            pass
+
+        def fakeMakeRequest(*_, **kwargs):
+            self.assertEqual(check_response, kwargs['check_response_func'])
+            return http_wrapper.Response(
+                info={'status': '200'}, content='{"field": "abc"}',
+                request_url='http://www.google.com')
+        method_config = base_api.ApiMethodInfo(
+            request_type_name='SimpleMessage',
+            response_type_name='SimpleMessage')
+        client = self.__GetFakeClient()
+        client.check_response_func = check_response
+        service = FakeService(client=client)
+        request = SimpleMessage()
+        with mock(base_api.http_wrapper, 'MakeRequest', fakeMakeRequest):
+            service._RunMethod(method_config, request)
+
+    def testCustomRetryFunc(self):
+        def retry_func():
+            pass
+
+        def fakeMakeRequest(*_, **kwargs):
+            self.assertEqual(retry_func, kwargs['retry_func'])
+            return http_wrapper.Response(
+                info={'status': '200'}, content='{"field": "abc"}',
+                request_url='http://www.google.com')
+        method_config = base_api.ApiMethodInfo(
+            request_type_name='SimpleMessage',
+            response_type_name='SimpleMessage')
+        client = self.__GetFakeClient()
+        client.retry_func = retry_func
+        service = FakeService(client=client)
+        request = SimpleMessage()
+        with mock(base_api.http_wrapper, 'MakeRequest', fakeMakeRequest):
+            service._RunMethod(method_config, request)
+
+    def testHttpError(self):
+        def fakeMakeRequest(*unused_args, **unused_kwargs):
+            return http_wrapper.Response(
+                info={'status': http_client.BAD_REQUEST},
+                content='{"field": "abc"}',
+                request_url='http://www.google.com')
+        method_config = base_api.ApiMethodInfo(
+            request_type_name='SimpleMessage',
+            response_type_name='SimpleMessage')
+        client = self.__GetFakeClient()
+        service = FakeService(client=client)
+        request = SimpleMessage()
+        with mock(base_api.http_wrapper, 'MakeRequest', fakeMakeRequest):
+            with self.assertRaises(exceptions.HttpError) as error_context:
+                service._RunMethod(method_config, request)
+        http_error = error_context.exception
+        self.assertEquals(400, http_error.status_code)
+        self.assertEquals('http://www.google.com', http_error.url)
+        self.assertEquals('{"field": "abc"}', http_error.content)
+        self.assertEquals(method_config, http_error.method_config)
+        self.assertEquals(request, http_error.request)
+
+    def testQueryEncoding(self):
+        method_config = base_api.ApiMethodInfo(
+            request_type_name='MessageWithTime', query_params=['timestamp'])
+        service = FakeService()
+        request = MessageWithTime(
+            timestamp=datetime.datetime(2014, 10, 0o7, 12, 53, 13))
+        http_request = service.PrepareHttpRequest(method_config, request)
+
+        url_timestamp = urllib_parse.quote(request.timestamp.isoformat())
+        self.assertTrue(http_request.url.endswith(url_timestamp))
+
+    def testPrettyPrintEncoding(self):
+        method_config = base_api.ApiMethodInfo(
+            request_type_name='MessageWithTime', query_params=['timestamp'])
+        service = FakeService()
+        request = MessageWithTime(
+            timestamp=datetime.datetime(2014, 10, 0o7, 12, 53, 13))
+
+        global_params = StandardQueryParameters()
+        http_request = service.PrepareHttpRequest(method_config, request,
+                                                  global_params=global_params)
+        self.assertFalse('prettyPrint' in http_request.url)
+        self.assertFalse('pp' in http_request.url)
+
+        global_params.prettyPrint = False  # pylint: disable=invalid-name
+        global_params.pp = False
+
+        http_request = service.PrepareHttpRequest(method_config, request,
+                                                  global_params=global_params)
+        self.assertTrue('prettyPrint=0' in http_request.url)
+        self.assertTrue('pp=0' in http_request.url)
+
+    def testQueryBytesRequest(self):
+        method_config = base_api.ApiMethodInfo(
+            request_type_name='SimpleMessage', query_params=['bytes_field'])
+        service = FakeService()
+        non_unicode_message = b''.join((six.int2byte(100),
+                                        six.int2byte(200)))
+        request = SimpleMessage(bytes_field=non_unicode_message)
+        global_params = StandardQueryParameters()
+        http_request = service.PrepareHttpRequest(method_config, request,
+                                                  global_params=global_params)
+        want = urllib_parse.urlencode({
+            'bytes_field': base64.urlsafe_b64encode(non_unicode_message),
+        })
+        self.assertIn(want, http_request.url)
+
+    def testQueryBytesGlobalParams(self):
+        method_config = base_api.ApiMethodInfo(
+            request_type_name='SimpleMessage', query_params=['bytes_field'])
+        service = FakeService()
+        non_unicode_message = b''.join((six.int2byte(100),
+                                        six.int2byte(200)))
+        request = SimpleMessage()
+        global_params = StandardQueryParameters(
+            nextPageToken=non_unicode_message)
+        http_request = service.PrepareHttpRequest(method_config, request,
+                                                  global_params=global_params)
+        want = urllib_parse.urlencode({
+            'nextPageToken': base64.urlsafe_b64encode(non_unicode_message),
+        })
+        self.assertIn(want, http_request.url)
+
+    def testQueryRemapping(self):
+        method_config = base_api.ApiMethodInfo(
+            request_type_name='MessageWithRemappings',
+            query_params=['remapped_field', 'enum_field'])
+        request = MessageWithRemappings(
+            str_field='foo', enum_field=MessageWithRemappings.AnEnum.value_one)
+        http_request = FakeService().PrepareHttpRequest(method_config, request)
+        result_params = urllib_parse.parse_qs(
+            urllib_parse.urlparse(http_request.url).query)
+        expected_params = {'enum_field': 'ONE%2FTWO', 'remapped_field': 'foo'}
+        self.assertTrue(expected_params, result_params)
+
+    def testPathRemapping(self):
+        method_config = base_api.ApiMethodInfo(
+            relative_path='parameters/{remapped_field}/remap/{enum_field}',
+            request_type_name='MessageWithRemappings',
+            path_params=['remapped_field', 'enum_field'])
+        request = MessageWithRemappings(
+            str_field='gonna',
+            enum_field=MessageWithRemappings.AnEnum.value_one)
+        service = FakeService()
+        expected_url = service.client.url + 'parameters/gonna/remap/ONE%2FTWO'
+        http_request = service.PrepareHttpRequest(method_config, request)
+        self.assertEqual(expected_url, http_request.url)
+
+        method_config.relative_path = (
+            'parameters/{+remapped_field}/remap/{+enum_field}')
+        expected_url = service.client.url + 'parameters/gonna/remap/ONE/TWO'
+        http_request = service.PrepareHttpRequest(method_config, request)
+        self.assertEqual(expected_url, http_request.url)
+
+    def testColonInRelativePath(self):
+        method_config = base_api.ApiMethodInfo(
+            relative_path='path:withJustColon',
+            request_type_name='SimpleMessage')
+        service = FakeService()
+        request = SimpleMessage()
+        http_request = service.PrepareHttpRequest(method_config, request)
+        self.assertEqual('http://www.example.com/path:withJustColon',
+                         http_request.url)
+
+
+if __name__ == '__main__':
+    unittest2.main()
diff --git a/apitools/base/py/base_cli.py b/apitools/base/py/base_cli.py
new file mode 100644
index 0000000..2527e64
--- /dev/null
+++ b/apitools/base/py/base_cli.py
@@ -0,0 +1,161 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Base script for generated CLI."""
+
+from __future__ import absolute_import
+
+import atexit
+import code
+import logging
+import os
+import readline
+import rlcompleter
+import sys
+
+import gflags as flags
+from google.apputils import appcommands
+
+from apitools.base.py import encoding
+from apitools.base.py import exceptions
+
+__all__ = [
+    'ConsoleWithReadline',
+    'DeclareBaseFlags',
+    'FormatOutput',
+    'SetupLogger',
+    'run_main',
+]
+
+
+# TODO(craigcitro): We should move all the flags for the
+# StandardQueryParameters into this file, so that they can be used
+# elsewhere easily.
+
+_BASE_FLAGS_DECLARED = False
+_OUTPUT_FORMATTER_MAP = {
+    'protorpc': lambda x: x,
+    'json': encoding.MessageToJson,
+}
+
+
+def DeclareBaseFlags():
+    """Declare base flags for all CLIs."""
+    # TODO(craigcitro): FlagValidators?
+    global _BASE_FLAGS_DECLARED  # pylint: disable=global-statement
+    if _BASE_FLAGS_DECLARED:
+        return
+    flags.DEFINE_boolean(
+        'log_request', False,
+        'Log requests.')
+    flags.DEFINE_boolean(
+        'log_response', False,
+        'Log responses.')
+    flags.DEFINE_boolean(
+        'log_request_response', False,
+        'Log requests and responses.')
+    flags.DEFINE_enum(
+        'output_format',
+        'protorpc',
+        _OUTPUT_FORMATTER_MAP.keys(),
+        'Display format for results.')
+
+    _BASE_FLAGS_DECLARED = True
+
+FLAGS = flags.FLAGS
+
+
+def SetupLogger():
+    if FLAGS.log_request or FLAGS.log_response or FLAGS.log_request_response:
+        logging.basicConfig()
+        logging.getLogger().setLevel(logging.INFO)
+
+
+def FormatOutput(message, output_format=None):
+    """Convert the output to the user-specified format."""
+    output_format = output_format or FLAGS.output_format
+    formatter = _OUTPUT_FORMATTER_MAP.get(FLAGS.output_format)
+    if formatter is None:
+        raise exceptions.UserError('Unknown output format: %s' % output_format)
+    return formatter(message)
+
+
+class _SmartCompleter(rlcompleter.Completer):
+
+    def _callable_postfix(self, val, word):
+        if ('(' in readline.get_line_buffer() or
+                not callable(val)):
+            return word
+        return word + '('
+
+    def complete(self, text, state):
+        if not readline.get_line_buffer().strip():
+            if not state:
+                return '  '
+            return None
+        return rlcompleter.Completer.complete(self, text, state)
+
+
+class ConsoleWithReadline(code.InteractiveConsole):
+
+    """InteractiveConsole with readline, tab completion, and history."""
+
+    def __init__(self, env, filename='<console>', histfile=None):
+        new_locals = dict(env)
+        new_locals.update({
+            '_SmartCompleter': _SmartCompleter,
+            'readline': readline,
+            'rlcompleter': rlcompleter,
+        })
+        code.InteractiveConsole.__init__(self, new_locals, filename)
+        readline.parse_and_bind('tab: complete')
+        readline.set_completer(_SmartCompleter(new_locals).complete)
+        if histfile is not None:
+            histfile = os.path.expanduser(histfile)
+            if os.path.exists(histfile):
+                readline.read_history_file(histfile)
+            atexit.register(lambda: readline.write_history_file(histfile))
+
+
+def run_main():  # pylint: disable=invalid-name
+    """Function to be used as setuptools script entry point.
+
+    Appcommands assumes that it always runs as __main__, but launching
+    via a setuptools-generated entry_point breaks this rule. We do some
+    trickery here to make sure that appcommands and flags find their
+    state where they expect to by faking ourselves as __main__.
+    """
+
+    # Put the flags for this module somewhere the flags module will look
+    # for them.
+    # pylint: disable=protected-access
+    new_name = flags._GetMainModule()
+    sys.modules[new_name] = sys.modules['__main__']
+    for flag in FLAGS.FlagsByModuleDict().get(__name__, []):
+        FLAGS._RegisterFlagByModule(new_name, flag)
+        for key_flag in FLAGS.KeyFlagsByModuleDict().get(__name__, []):
+            FLAGS._RegisterKeyFlagForModule(new_name, key_flag)
+    # pylint: enable=protected-access
+
+    # Now set __main__ appropriately so that appcommands will be
+    # happy.
+    sys.modules['__main__'] = sys.modules[__name__]
+    appcommands.Run()
+    sys.modules['__main__'] = sys.modules.pop(new_name)
+
+
+if __name__ == '__main__':
+    appcommands.Run()
diff --git a/apitools/base/py/batch.py b/apitools/base/py/batch.py
new file mode 100644
index 0000000..f925ccf
--- /dev/null
+++ b/apitools/base/py/batch.py
@@ -0,0 +1,492 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Library for handling batch HTTP requests for apitools."""
+
+import collections
+import email.generator as generator
+import email.mime.multipart as mime_multipart
+import email.mime.nonmultipart as mime_nonmultipart
+import email.parser as email_parser
+import itertools
+import time
+import uuid
+
+import six
+from six.moves import http_client
+from six.moves import urllib_parse
+from six.moves import range  # pylint: disable=redefined-builtin
+
+from apitools.base.py import exceptions
+from apitools.base.py import http_wrapper
+
+__all__ = [
+    'BatchApiRequest',
+]
+
+
+class RequestResponseAndHandler(collections.namedtuple(
+        'RequestResponseAndHandler', ['request', 'response', 'handler'])):
+
+    """Container for data related to completing an HTTP request.
+
+    This contains an HTTP request, its response, and a callback for handling
+    the response from the server.
+
+    Attributes:
+      request: An http_wrapper.Request object representing the HTTP request.
+      response: The http_wrapper.Response object returned from the server.
+      handler: A callback function accepting two arguments, response
+        and exception. Response is an http_wrapper.Response object, and
+        exception is an apiclient.errors.HttpError object if an error
+        occurred, or otherwise None.
+    """
+
+
+class BatchApiRequest(object):
+    """Batches multiple api requests into a single request."""
+
+    class ApiCall(object):
+
+        """Holds request and response information for each request.
+
+        ApiCalls are ultimately exposed to the client once the HTTP
+        batch request has been completed.
+
+        Attributes:
+          http_request: A client-supplied http_wrapper.Request to be
+              submitted to the server.
+          response: A http_wrapper.Response object given by the server as a
+              response to the user request, or None if an error occurred.
+          exception: An apiclient.errors.HttpError object if an error
+              occurred, or None.
+
+        """
+
+        def __init__(self, request, retryable_codes, service, method_config):
+            """Initialize an individual API request.
+
+            Args:
+              request: An http_wrapper.Request object.
+              retryable_codes: A list of integer HTTP codes that can
+                  be retried.
+              service: A service inheriting from base_api.BaseApiService.
+              method_config: Method config for the desired API request.
+
+            """
+            self.__retryable_codes = list(
+                set(retryable_codes + [http_client.UNAUTHORIZED]))
+            self.__http_response = None
+            self.__service = service
+            self.__method_config = method_config
+
+            self.http_request = request
+            # TODO(user): Add some validation to these fields.
+            self.__response = None
+            self.__exception = None
+
+        @property
+        def is_error(self):
+            return self.exception is not None
+
+        @property
+        def response(self):
+            return self.__response
+
+        @property
+        def exception(self):
+            return self.__exception
+
+        @property
+        def authorization_failed(self):
+            return (self.__http_response and (
+                self.__http_response.status_code == http_client.UNAUTHORIZED))
+
+        @property
+        def terminal_state(self):
+            if self.__http_response is None:
+                return False
+            response_code = self.__http_response.status_code
+            return response_code not in self.__retryable_codes
+
+        def HandleResponse(self, http_response, exception):
+            """Handles an incoming http response to the request in http_request.
+
+            This is intended to be used as a callback function for
+            BatchHttpRequest.Add.
+
+            Args:
+              http_response: Deserialized http_wrapper.Response object.
+              exception: apiclient.errors.HttpError object if an error
+                  occurred.
+
+            """
+            self.__http_response = http_response
+            self.__exception = exception
+            if self.terminal_state and not self.__exception:
+                self.__response = self.__service.ProcessHttpResponse(
+                    self.__method_config, self.__http_response)
+
+    def __init__(self, batch_url=None, retryable_codes=None):
+        """Initialize a batch API request object.
+
+        Args:
+          batch_url: Base URL for batch API calls.
+          retryable_codes: A list of integer HTTP codes that can be retried.
+        """
+        self.api_requests = []
+        self.retryable_codes = retryable_codes or []
+        self.batch_url = batch_url or 'https://www.googleapis.com/batch'
+
+    def Add(self, service, method, request, global_params=None):
+        """Add a request to the batch.
+
+        Args:
+          service: A class inheriting base_api.BaseApiService.
+          method: A string indicated desired method from the service. See
+              the example in the class docstring.
+          request: An input message appropriate for the specified
+              service.method.
+          global_params: Optional additional parameters to pass into
+              method.PrepareHttpRequest.
+
+        Returns:
+          None
+
+        """
+        # Retrieve the configs for the desired method and service.
+        method_config = service.GetMethodConfig(method)
+        upload_config = service.GetUploadConfig(method)
+
+        # Prepare the HTTP Request.
+        http_request = service.PrepareHttpRequest(
+            method_config, request, global_params=global_params,
+            upload_config=upload_config)
+
+        # Create the request and add it to our master list.
+        api_request = self.ApiCall(
+            http_request, self.retryable_codes, service, method_config)
+        self.api_requests.append(api_request)
+
+    def Execute(self, http, sleep_between_polls=5, max_retries=5,
+                max_batch_size=None, batch_request_callback=None):
+        """Execute all of the requests in the batch.
+
+        Args:
+          http: httplib2.Http object for use in the request.
+          sleep_between_polls: Integer number of seconds to sleep between
+              polls.
+          max_retries: Max retries. Any requests that have not succeeded by
+              this number of retries simply report the last response or
+              exception, whatever it happened to be.
+          max_batch_size: int, if specified requests will be split in batches
+              of given size.
+          batch_request_callback: function of (http_response, exception) passed
+              to BatchHttpRequest which will be run on any given results.
+
+        Returns:
+          List of ApiCalls.
+        """
+        requests = [request for request in self.api_requests
+                    if not request.terminal_state]
+        batch_size = max_batch_size or len(requests)
+
+        for attempt in range(max_retries):
+            if attempt:
+                time.sleep(sleep_between_polls)
+
+            for i in range(0, len(requests), batch_size):
+                # Create a batch_http_request object and populate it with
+                # incomplete requests.
+                batch_http_request = BatchHttpRequest(
+                    batch_url=self.batch_url,
+                    callback=batch_request_callback
+                )
+                for request in itertools.islice(requests,
+                                                i, i + batch_size):
+                    batch_http_request.Add(
+                        request.http_request, request.HandleResponse)
+                batch_http_request.Execute(http)
+
+                if hasattr(http.request, 'credentials'):
+                    if any(request.authorization_failed
+                           for request in itertools.islice(requests,
+                                                           i, i + batch_size)):
+                        http.request.credentials.refresh(http)
+
+            # Collect retryable requests.
+            requests = [request for request in self.api_requests if not
+                        request.terminal_state]
+            if not requests:
+                break
+
+        return self.api_requests
+
+
+class BatchHttpRequest(object):
+
+    """Batches multiple http_wrapper.Request objects into a single request."""
+
+    def __init__(self, batch_url, callback=None):
+        """Constructor for a BatchHttpRequest.
+
+        Args:
+          batch_url: URL to send batch requests to.
+          callback: A callback to be called for each response, of the
+              form callback(response, exception). The first parameter is
+              the deserialized Response object. The second is an
+              apiclient.errors.HttpError exception object if an HTTP error
+              occurred while processing the request, or None if no error
+              occurred.
+        """
+        # Endpoint to which these requests are sent.
+        self.__batch_url = batch_url
+
+        # Global callback to be called for each individual response in the
+        # batch.
+        self.__callback = callback
+
+        # List of requests, responses and handlers.
+        self.__request_response_handlers = {}
+
+        # The last auto generated id.
+        self.__last_auto_id = itertools.count()
+
+        # Unique ID on which to base the Content-ID headers.
+        self.__base_id = uuid.uuid4()
+
+    def _ConvertIdToHeader(self, request_id):
+        """Convert an id to a Content-ID header value.
+
+        Args:
+          request_id: String identifier for a individual request.
+
+        Returns:
+          A Content-ID header with the id_ encoded into it. A UUID is
+          prepended to the value because Content-ID headers are
+          supposed to be universally unique.
+
+        """
+        return '<%s+%s>' % (self.__base_id, urllib_parse.quote(request_id))
+
+    @staticmethod
+    def _ConvertHeaderToId(header):
+        """Convert a Content-ID header value to an id.
+
+        Presumes the Content-ID header conforms to the format that
+        _ConvertIdToHeader() returns.
+
+        Args:
+          header: A string indicating the Content-ID header value.
+
+        Returns:
+          The extracted id value.
+
+        Raises:
+          BatchError if the header is not in the expected format.
+        """
+        if not (header.startswith('<') or header.endswith('>')):
+            raise exceptions.BatchError(
+                'Invalid value for Content-ID: %s' % header)
+        if '+' not in header:
+            raise exceptions.BatchError(
+                'Invalid value for Content-ID: %s' % header)
+        _, request_id = header[1:-1].rsplit('+', 1)
+
+        return urllib_parse.unquote(request_id)
+
+    def _SerializeRequest(self, request):
+        """Convert a http_wrapper.Request object into a string.
+
+        Args:
+          request: A http_wrapper.Request to serialize.
+
+        Returns:
+          The request as a string in application/http format.
+        """
+        # Construct status line
+        parsed = urllib_parse.urlsplit(request.url)
+        request_line = urllib_parse.urlunsplit(
+            (None, None, parsed.path, parsed.query, None))
+        status_line = u' '.join((
+            request.http_method,
+            request_line.decode('utf-8'),
+            u'HTTP/1.1\n'
+        ))
+        major, minor = request.headers.get(
+            'content-type', 'application/json').split('/')
+        msg = mime_nonmultipart.MIMENonMultipart(major, minor)
+
+        # MIMENonMultipart adds its own Content-Type header.
+        # Keep all of the other headers in `request.headers`.
+        for key, value in request.headers.items():
+            if key == 'content-type':
+                continue
+            msg[key] = value
+
+        msg['Host'] = parsed.netloc
+        msg.set_unixfrom(None)
+
+        if request.body is not None:
+            msg.set_payload(request.body)
+
+        # Serialize the mime message.
+        str_io = six.StringIO()
+        # maxheaderlen=0 means don't line wrap headers.
+        gen = generator.Generator(str_io, maxheaderlen=0)
+        gen.flatten(msg, unixfrom=False)
+        body = str_io.getvalue()
+
+        return status_line + body
+
+    def _DeserializeResponse(self, payload):
+        """Convert string into Response and content.
+
+        Args:
+          payload: Header and body string to be deserialized.
+
+        Returns:
+          A Response object
+        """
+        # Strip off the status line.
+        status_line, payload = payload.split('\n', 1)
+        _, status, _ = status_line.split(' ', 2)
+
+        # Parse the rest of the response.
+        parser = email_parser.Parser()
+        msg = parser.parsestr(payload)
+
+        # Get the headers.
+        info = dict(msg)
+        info['status'] = status
+
+        # Create Response from the parsed headers.
+        content = msg.get_payload()
+
+        return http_wrapper.Response(info, content, self.__batch_url)
+
+    def _NewId(self):
+        """Create a new id.
+
+        Auto incrementing number that avoids conflicts with ids already used.
+
+        Returns:
+           A new unique id string.
+        """
+        return str(next(self.__last_auto_id))
+
+    def Add(self, request, callback=None):
+        """Add a new request.
+
+        Args:
+          request: A http_wrapper.Request to add to the batch.
+          callback: A callback to be called for this response, of the
+              form callback(response, exception). The first parameter is the
+              deserialized response object. The second is an
+              apiclient.errors.HttpError exception object if an HTTP error
+              occurred while processing the request, or None if no errors
+              occurred.
+
+        Returns:
+          None
+        """
+        handler = RequestResponseAndHandler(request, None, callback)
+        self.__request_response_handlers[self._NewId()] = handler
+
+    def _Execute(self, http):
+        """Serialize batch request, send to server, process response.
+
+        Args:
+          http: A httplib2.Http object to be used to make the request with.
+
+        Raises:
+          httplib2.HttpLib2Error if a transport error has occured.
+          apiclient.errors.BatchError if the response is the wrong format.
+        """
+        message = mime_multipart.MIMEMultipart('mixed')
+        # Message should not write out its own headers.
+        setattr(message, '_write_headers', lambda self: None)
+
+        # Add all the individual requests.
+        for key in self.__request_response_handlers:
+            msg = mime_nonmultipart.MIMENonMultipart('application', 'http')
+            msg['Content-Transfer-Encoding'] = 'binary'
+            msg['Content-ID'] = self._ConvertIdToHeader(key)
+
+            body = self._SerializeRequest(
+                self.__request_response_handlers[key].request)
+            msg.set_payload(body)
+            message.attach(msg)
+
+        request = http_wrapper.Request(self.__batch_url, 'POST')
+        request.body = message.as_string()
+        request.headers['content-type'] = (
+            'multipart/mixed; boundary="%s"') % message.get_boundary()
+
+        response = http_wrapper.MakeRequest(http, request)
+
+        if response.status_code >= 300:
+            raise exceptions.HttpError.FromResponse(response)
+
+        # Prepend with a content-type header so Parser can handle it.
+        header = 'content-type: %s\r\n\r\n' % response.info['content-type']
+
+        parser = email_parser.Parser()
+        mime_response = parser.parsestr(header + response.content)
+
+        if not mime_response.is_multipart():
+            raise exceptions.BatchError(
+                'Response not in multipart/mixed format.')
+
+        for part in mime_response.get_payload():
+            request_id = self._ConvertHeaderToId(part['Content-ID'])
+            response = self._DeserializeResponse(part.get_payload())
+
+            # Disable protected access because namedtuple._replace(...)
+            # is not actually meant to be protected.
+            # pylint: disable=protected-access
+            self.__request_response_handlers[request_id] = (
+                self.__request_response_handlers[request_id]._replace(
+                    response=response))
+
+    def Execute(self, http):
+        """Execute all the requests as a single batched HTTP request.
+
+        Args:
+          http: A httplib2.Http object to be used with the request.
+
+        Returns:
+          None
+
+        Raises:
+          BatchError if the response is the wrong format.
+        """
+
+        self._Execute(http)
+
+        for key in self.__request_response_handlers:
+            response = self.__request_response_handlers[key].response
+            callback = self.__request_response_handlers[key].handler
+
+            exception = None
+
+            if response.status_code >= 300:
+                exception = exceptions.HttpError.FromResponse(response)
+
+            if callback is not None:
+                callback(response, exception)
+            if self.__callback is not None:
+                self.__callback(response, exception)
diff --git a/apitools/base/py/batch_test.py b/apitools/base/py/batch_test.py
new file mode 100644
index 0000000..9bf9dd0
--- /dev/null
+++ b/apitools/base/py/batch_test.py
@@ -0,0 +1,599 @@
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for apitools.base.py.batch."""
+
+import textwrap
+
+import mock
+from six.moves import http_client
+from six.moves import range  # pylint:disable=redefined-builtin
+from six.moves.urllib import parse
+import unittest2
+
+from apitools.base.py import batch
+from apitools.base.py import exceptions
+from apitools.base.py import http_wrapper
+
+
+class FakeCredentials(object):
+
+    def __init__(self):
+        self.num_refreshes = 0
+
+    def refresh(self, _):
+        self.num_refreshes += 1
+
+
+class FakeHttp(object):
+
+    class FakeRequest(object):
+
+        def __init__(self, credentials=None):
+            if credentials is not None:
+                self.credentials = credentials
+
+    def __init__(self, credentials=None):
+        self.request = FakeHttp.FakeRequest(credentials=credentials)
+
+
+class FakeService(object):
+
+    """A service for testing."""
+
+    def GetMethodConfig(self, _):
+        return {}
+
+    def GetUploadConfig(self, _):
+        return {}
+
+    # pylint: disable=unused-argument
+    def PrepareHttpRequest(
+            self, method_config, request, global_params, upload_config):
+        return global_params['desired_request']
+    # pylint: enable=unused-argument
+
+    def ProcessHttpResponse(self, _, http_response):
+        return http_response
+
+
+class BatchTest(unittest2.TestCase):
+
+    def assertUrlEqual(self, expected_url, provided_url):
+
+        def parse_components(url):
+            parsed = parse.urlsplit(url)
+            query = parse.parse_qs(parsed.query)
+            return parsed._replace(query=''), query
+
+        expected_parse, expected_query = parse_components(expected_url)
+        provided_parse, provided_query = parse_components(provided_url)
+
+        self.assertEqual(expected_parse, provided_parse)
+        self.assertEqual(expected_query, provided_query)
+
+    def __ConfigureMock(self, mock_request, expected_request, response):
+
+        if isinstance(response, list):
+            response = list(response)
+
+        def CheckRequest(_, request, **unused_kwds):
+            self.assertUrlEqual(expected_request.url, request.url)
+            self.assertEqual(expected_request.http_method, request.http_method)
+            if isinstance(response, list):
+                return response.pop(0)
+            return response
+
+        mock_request.side_effect = CheckRequest
+
+    def testRequestServiceUnavailable(self):
+        mock_service = FakeService()
+
+        desired_url = 'https://www.example.com'
+        batch_api_request = batch.BatchApiRequest(batch_url=desired_url,
+                                                  retryable_codes=[])
+        # The request to be added. The actual request sent will be somewhat
+        # larger, as this is added to a batch.
+        desired_request = http_wrapper.Request(desired_url, 'POST', {
+            'content-type': 'multipart/mixed; boundary="None"',
+            'content-length': 80,
+        }, 'x' * 80)
+
+        with mock.patch.object(http_wrapper, 'MakeRequest',
+                               autospec=True) as mock_request:
+            self.__ConfigureMock(
+                mock_request,
+                http_wrapper.Request(desired_url, 'POST', {
+                    'content-type': 'multipart/mixed; boundary="None"',
+                    'content-length': 419,
+                }, 'x' * 419),
+                http_wrapper.Response({
+                    'status': '200',
+                    'content-type': 'multipart/mixed; boundary="boundary"',
+                }, textwrap.dedent("""\
+                --boundary
+                content-type: text/plain
+                content-id: <id+0>
+
+                HTTP/1.1 503 SERVICE UNAVAILABLE
+                nope
+                --boundary--"""), None))
+
+            batch_api_request.Add(
+                mock_service, 'unused', None,
+                global_params={'desired_request': desired_request})
+
+            api_request_responses = batch_api_request.Execute(
+                FakeHttp(), sleep_between_polls=0)
+
+            self.assertEqual(1, len(api_request_responses))
+
+            # Make sure we didn't retry non-retryable code 503.
+            self.assertEqual(1, mock_request.call_count)
+
+            self.assertTrue(api_request_responses[0].is_error)
+            self.assertIsNone(api_request_responses[0].response)
+            self.assertIsInstance(api_request_responses[0].exception,
+                                  exceptions.HttpError)
+
+    def testSingleRequestInBatch(self):
+        desired_url = 'https://www.example.com'
+
+        callback_was_called = []
+
+        def _Callback(response, exception):
+            self.assertEqual({'status': '200'}, response.info)
+            self.assertEqual('content', response.content)
+            self.assertEqual(desired_url, response.request_url)
+            self.assertIsNone(exception)
+            callback_was_called.append(1)
+
+        mock_service = FakeService()
+
+        batch_api_request = batch.BatchApiRequest(batch_url=desired_url)
+        # The request to be added. The actual request sent will be somewhat
+        # larger, as this is added to a batch.
+        desired_request = http_wrapper.Request(desired_url, 'POST', {
+            'content-type': 'multipart/mixed; boundary="None"',
+            'content-length': 80,
+        }, 'x' * 80)
+
+        with mock.patch.object(http_wrapper, 'MakeRequest',
+                               autospec=True) as mock_request:
+            self.__ConfigureMock(
+                mock_request,
+                http_wrapper.Request(desired_url, 'POST', {
+                    'content-type': 'multipart/mixed; boundary="None"',
+                    'content-length': 419,
+                }, 'x' * 419),
+                http_wrapper.Response({
+                    'status': '200',
+                    'content-type': 'multipart/mixed; boundary="boundary"',
+                }, textwrap.dedent("""\
+                --boundary
+                content-type: text/plain
+                content-id: <id+0>
+
+                HTTP/1.1 200 OK
+                content
+                --boundary--"""), None))
+
+            batch_api_request.Add(mock_service, 'unused', None, {
+                'desired_request': desired_request,
+            })
+
+            api_request_responses = batch_api_request.Execute(
+                FakeHttp(), batch_request_callback=_Callback)
+
+            self.assertEqual(1, len(api_request_responses))
+            self.assertEqual(1, mock_request.call_count)
+
+            self.assertFalse(api_request_responses[0].is_error)
+
+            response = api_request_responses[0].response
+            self.assertEqual({'status': '200'}, response.info)
+            self.assertEqual('content', response.content)
+            self.assertEqual(desired_url, response.request_url)
+        self.assertEquals(1, len(callback_was_called))
+
+    def _MakeResponse(self, number_of_parts):
+        return http_wrapper.Response(
+            info={
+                'status': '200',
+                'content-type': 'multipart/mixed; boundary="boundary"',
+            },
+            content='--boundary\n' + '--boundary\n'.join(
+                textwrap.dedent("""\
+                    content-type: text/plain
+                    content-id: <id+{0}>
+
+                    HTTP/1.1 200 OK
+                    response {0} content
+
+                    """)
+                .format(i) for i in range(number_of_parts)) + '--boundary--',
+            request_url=None,
+        )
+
+    def _MakeSampleRequest(self, url, name):
+        return http_wrapper.Request(url, 'POST', {
+            'content-type': 'multipart/mixed; boundary="None"',
+            'content-length': 80,
+        }, '{0} {1}'.format(name, 'x' * (79 - len(name))))
+
+    def testMultipleRequestInBatchWithMax(self):
+        mock_service = FakeService()
+
+        desired_url = 'https://www.example.com'
+        batch_api_request = batch.BatchApiRequest(batch_url=desired_url)
+
+        number_of_requests = 10
+        max_batch_size = 3
+        for i in range(number_of_requests):
+            batch_api_request.Add(
+                mock_service, 'unused', None,
+                {'desired_request': self._MakeSampleRequest(
+                    desired_url, 'Sample-{0}'.format(i))})
+
+        responses = []
+        for i in range(0, number_of_requests, max_batch_size):
+            responses.append(
+                self._MakeResponse(
+                    min(number_of_requests - i, max_batch_size)))
+        with mock.patch.object(http_wrapper, 'MakeRequest',
+                               autospec=True) as mock_request:
+            self.__ConfigureMock(
+                mock_request,
+                expected_request=http_wrapper.Request(desired_url, 'POST', {
+                    'content-type': 'multipart/mixed; boundary="None"',
+                    'content-length': 1142,
+                }, 'x' * 1142),
+                response=responses)
+            api_request_responses = batch_api_request.Execute(
+                FakeHttp(), max_batch_size=max_batch_size)
+
+        self.assertEqual(number_of_requests, len(api_request_responses))
+        self.assertEqual(
+            -(-number_of_requests // max_batch_size),
+            mock_request.call_count)
+
+    def testRefreshOnAuthFailure(self):
+        mock_service = FakeService()
+
+        desired_url = 'https://www.example.com'
+        batch_api_request = batch.BatchApiRequest(batch_url=desired_url)
+        # The request to be added. The actual request sent will be somewhat
+        # larger, as this is added to a batch.
+        desired_request = http_wrapper.Request(desired_url, 'POST', {
+            'content-type': 'multipart/mixed; boundary="None"',
+            'content-length': 80,
+        }, 'x' * 80)
+
+        with mock.patch.object(http_wrapper, 'MakeRequest',
+                               autospec=True) as mock_request:
+            self.__ConfigureMock(
+                mock_request,
+                http_wrapper.Request(desired_url, 'POST', {
+                    'content-type': 'multipart/mixed; boundary="None"',
+                    'content-length': 419,
+                }, 'x' * 419), [
+                    http_wrapper.Response({
+                        'status': '200',
+                        'content-type': 'multipart/mixed; boundary="boundary"',
+                    }, textwrap.dedent("""\
+                    --boundary
+                    content-type: text/plain
+                    content-id: <id+0>
+
+                    HTTP/1.1 401 UNAUTHORIZED
+                    Invalid grant
+
+                    --boundary--"""), None),
+                    http_wrapper.Response({
+                        'status': '200',
+                        'content-type': 'multipart/mixed; boundary="boundary"',
+                    }, textwrap.dedent("""\
+                    --boundary
+                    content-type: text/plain
+                    content-id: <id+0>
+
+                    HTTP/1.1 200 OK
+                    content
+                    --boundary--"""), None)
+                ])
+
+            batch_api_request.Add(mock_service, 'unused', None, {
+                'desired_request': desired_request,
+            })
+
+            credentials = FakeCredentials()
+            api_request_responses = batch_api_request.Execute(
+                FakeHttp(credentials=credentials), sleep_between_polls=0)
+
+            self.assertEqual(1, len(api_request_responses))
+            self.assertEqual(2, mock_request.call_count)
+            self.assertEqual(1, credentials.num_refreshes)
+
+            self.assertFalse(api_request_responses[0].is_error)
+
+            response = api_request_responses[0].response
+            self.assertEqual({'status': '200'}, response.info)
+            self.assertEqual('content', response.content)
+            self.assertEqual(desired_url, response.request_url)
+
+    def testNoAttempts(self):
+        desired_url = 'https://www.example.com'
+        batch_api_request = batch.BatchApiRequest(batch_url=desired_url)
+        batch_api_request.Add(FakeService(), 'unused', None, {
+            'desired_request': http_wrapper.Request(desired_url, 'POST', {
+                'content-type': 'multipart/mixed; boundary="None"',
+                'content-length': 80,
+            }, 'x' * 80),
+        })
+        api_request_responses = batch_api_request.Execute(None, max_retries=0)
+        self.assertEqual(1, len(api_request_responses))
+        self.assertIsNone(api_request_responses[0].response)
+        self.assertIsNone(api_request_responses[0].exception)
+
+    def _DoTestConvertIdToHeader(self, test_id, expected_result):
+        batch_request = batch.BatchHttpRequest('https://www.example.com')
+        self.assertEqual(
+            expected_result % batch_request._BatchHttpRequest__base_id,
+            batch_request._ConvertIdToHeader(test_id))
+
+    def testConvertIdSimple(self):
+        self._DoTestConvertIdToHeader('blah', '<%s+blah>')
+
+    def testConvertIdThatNeedsEscaping(self):
+        self._DoTestConvertIdToHeader('~tilde1', '<%s+%%7Etilde1>')
+
+    def _DoTestConvertHeaderToId(self, header, expected_id):
+        batch_request = batch.BatchHttpRequest('https://www.example.com')
+        self.assertEqual(expected_id,
+                         batch_request._ConvertHeaderToId(header))
+
+    def testConvertHeaderToIdSimple(self):
+        self._DoTestConvertHeaderToId('<hello+blah>', 'blah')
+
+    def testConvertHeaderToIdWithLotsOfPlus(self):
+        self._DoTestConvertHeaderToId('<a+++++plus>', 'plus')
+
+    def _DoTestConvertInvalidHeaderToId(self, invalid_header):
+        batch_request = batch.BatchHttpRequest('https://www.example.com')
+        self.assertRaises(exceptions.BatchError,
+                          batch_request._ConvertHeaderToId, invalid_header)
+
+    def testHeaderWithoutAngleBrackets(self):
+        self._DoTestConvertInvalidHeaderToId('1+1')
+
+    def testHeaderWithoutPlus(self):
+        self._DoTestConvertInvalidHeaderToId('<HEADER>')
+
+    def testSerializeRequest(self):
+        request = http_wrapper.Request(body='Hello World', headers={
+            'content-type': 'protocol/version',
+        })
+        expected_serialized_request = '\n'.join([
+            'GET  HTTP/1.1',
+            'Content-Type: protocol/version',
+            'MIME-Version: 1.0',
+            'content-length: 11',
+            'Host: ',
+            '',
+            'Hello World',
+        ])
+        batch_request = batch.BatchHttpRequest('https://www.example.com')
+        self.assertEqual(expected_serialized_request,
+                         batch_request._SerializeRequest(request))
+
+    def testSerializeRequestPreservesHeaders(self):
+        # Now confirm that if an additional, arbitrary header is added
+        # that it is successfully serialized to the request. Merely
+        # check that it is included, because the order of the headers
+        # in the request is arbitrary.
+        request = http_wrapper.Request(body='Hello World', headers={
+            'content-type': 'protocol/version',
+            'key': 'value',
+        })
+        batch_request = batch.BatchHttpRequest('https://www.example.com')
+        self.assertTrue(
+            'key: value\n' in batch_request._SerializeRequest(request))
+
+    def testSerializeRequestNoBody(self):
+        request = http_wrapper.Request(body=None, headers={
+            'content-type': 'protocol/version',
+        })
+        expected_serialized_request = '\n'.join([
+            'GET  HTTP/1.1',
+            'Content-Type: protocol/version',
+            'MIME-Version: 1.0',
+            'Host: ',
+            '',
+            '',
+        ])
+        batch_request = batch.BatchHttpRequest('https://www.example.com')
+        self.assertEqual(expected_serialized_request,
+                         batch_request._SerializeRequest(request))
+
+    def testDeserializeRequest(self):
+        serialized_payload = '\n'.join([
+            'GET  HTTP/1.1',
+            'Content-Type: protocol/version',
+            'MIME-Version: 1.0',
+            'content-length: 11',
+            'key: value',
+            'Host: ',
+            '',
+            'Hello World',
+        ])
+        example_url = 'https://www.example.com'
+        expected_response = http_wrapper.Response({
+            'content-length': str(len('Hello World')),
+            'Content-Type': 'protocol/version',
+            'key': 'value',
+            'MIME-Version': '1.0',
+            'status': '',
+            'Host': ''
+        }, 'Hello World', example_url)
+
+        batch_request = batch.BatchHttpRequest(example_url)
+        self.assertEqual(
+            expected_response,
+            batch_request._DeserializeResponse(serialized_payload))
+
+    def testNewId(self):
+        batch_request = batch.BatchHttpRequest('https://www.example.com')
+
+        for i in range(100):
+            self.assertEqual(str(i), batch_request._NewId())
+
+    def testAdd(self):
+        batch_request = batch.BatchHttpRequest('https://www.example.com')
+
+        for x in range(100):
+            batch_request.Add(http_wrapper.Request(body=str(x)))
+
+        for key in batch_request._BatchHttpRequest__request_response_handlers:
+            value = batch_request._BatchHttpRequest__request_response_handlers[
+                key]
+            self.assertEqual(key, value.request.body)
+            self.assertFalse(value.request.url)
+            self.assertEqual('GET', value.request.http_method)
+            self.assertIsNone(value.response)
+            self.assertIsNone(value.handler)
+
+    def testInternalExecuteWithFailedRequest(self):
+        with mock.patch.object(http_wrapper, 'MakeRequest',
+                               autospec=True) as mock_request:
+            self.__ConfigureMock(
+                mock_request,
+                http_wrapper.Request('https://www.example.com', 'POST', {
+                    'content-type': 'multipart/mixed; boundary="None"',
+                    'content-length': 80,
+                }, 'x' * 80),
+                http_wrapper.Response({'status': '300'}, None, None))
+
+            batch_request = batch.BatchHttpRequest('https://www.example.com')
+
+            self.assertRaises(
+                exceptions.HttpError, batch_request._Execute, None)
+
+    def testInternalExecuteWithNonMultipartResponse(self):
+        with mock.patch.object(http_wrapper, 'MakeRequest',
+                               autospec=True) as mock_request:
+            self.__ConfigureMock(
+                mock_request,
+                http_wrapper.Request('https://www.example.com', 'POST', {
+                    'content-type': 'multipart/mixed; boundary="None"',
+                    'content-length': 80,
+                }, 'x' * 80),
+                http_wrapper.Response({
+                    'status': '200',
+                    'content-type': 'blah/blah'
+                }, '', None))
+
+            batch_request = batch.BatchHttpRequest('https://www.example.com')
+
+            self.assertRaises(
+                exceptions.BatchError, batch_request._Execute, None)
+
+    def testInternalExecute(self):
+        with mock.patch.object(http_wrapper, 'MakeRequest',
+                               autospec=True) as mock_request:
+            self.__ConfigureMock(
+                mock_request,
+                http_wrapper.Request('https://www.example.com', 'POST', {
+                    'content-type': 'multipart/mixed; boundary="None"',
+                    'content-length': 583,
+                }, 'x' * 583),
+                http_wrapper.Response({
+                    'status': '200',
+                    'content-type': 'multipart/mixed; boundary="boundary"',
+                }, textwrap.dedent("""\
+                --boundary
+                content-type: text/plain
+                content-id: <id+2>
+
+                HTTP/1.1 200 OK
+                Second response
+
+                --boundary
+                content-type: text/plain
+                content-id: <id+1>
+
+                HTTP/1.1 401 UNAUTHORIZED
+                First response
+
+                --boundary--"""), None))
+
+            test_requests = {
+                '1': batch.RequestResponseAndHandler(
+                    http_wrapper.Request(body='first'), None, None),
+                '2': batch.RequestResponseAndHandler(
+                    http_wrapper.Request(body='second'), None, None),
+            }
+
+            batch_request = batch.BatchHttpRequest('https://www.example.com')
+            batch_request._BatchHttpRequest__request_response_handlers = (
+                test_requests)
+
+            batch_request._Execute(FakeHttp())
+
+            test_responses = (
+                batch_request._BatchHttpRequest__request_response_handlers)
+
+            self.assertEqual(http_client.UNAUTHORIZED,
+                             test_responses['1'].response.status_code)
+            self.assertEqual(http_client.OK,
+                             test_responses['2'].response.status_code)
+
+            self.assertIn(
+                'First response', test_responses['1'].response.content)
+            self.assertIn(
+                'Second response', test_responses['2'].response.content)
+
+    def testPublicExecute(self):
+
+        def LocalCallback(response, exception):
+            self.assertEqual({'status': '418'}, response.info)
+            self.assertEqual('Teapot', response.content)
+            self.assertIsNone(response.request_url)
+            self.assertIsInstance(exception, exceptions.HttpError)
+
+        global_callback = mock.Mock()
+        batch_request = batch.BatchHttpRequest(
+            'https://www.example.com', global_callback)
+
+        with mock.patch.object(batch.BatchHttpRequest, '_Execute',
+                               autospec=True) as mock_execute:
+            mock_execute.return_value = None
+
+            test_requests = {
+                '0': batch.RequestResponseAndHandler(
+                    None,
+                    http_wrapper.Response({'status': '200'}, 'Hello!', None),
+                    None),
+                '1': batch.RequestResponseAndHandler(
+                    None,
+                    http_wrapper.Response({'status': '418'}, 'Teapot', None),
+                    LocalCallback),
+            }
+
+            batch_request._BatchHttpRequest__request_response_handlers = (
+                test_requests)
+            batch_request.Execute(None)
+
+            # Global callback was called once per handler.
+            self.assertEqual(len(test_requests), global_callback.call_count)
diff --git a/apitools/base/py/buffered_stream.py b/apitools/base/py/buffered_stream.py
new file mode 100644
index 0000000..a170c86
--- /dev/null
+++ b/apitools/base/py/buffered_stream.py
@@ -0,0 +1,74 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Small helper class to provide a small slice of a stream.
+
+This class reads ahead to detect if we are at the end of the stream.
+"""
+
+from apitools.base.py import exceptions
+
+
+# TODO(user): Consider replacing this with a StringIO.
+class BufferedStream(object):
+
+    """Buffers a stream, reading ahead to determine if we're at the end."""
+
+    def __init__(self, stream, start, size):
+        self.__stream = stream
+        self.__start_pos = start
+        self.__buffer_pos = 0
+        self.__buffered_data = self.__stream.read(size)
+        self.__stream_at_end = len(self.__buffered_data) < size
+        self.__end_pos = self.__start_pos + len(self.__buffered_data)
+
+    def __str__(self):
+        return ('Buffered stream %s from position %s-%s with %s '
+                'bytes remaining' % (self.__stream, self.__start_pos,
+                                     self.__end_pos, self._bytes_remaining))
+
+    def __len__(self):
+        return len(self.__buffered_data)
+
+    @property
+    def stream_exhausted(self):
+        return self.__stream_at_end
+
+    @property
+    def stream_end_position(self):
+        return self.__end_pos
+
+    @property
+    def _bytes_remaining(self):
+        return len(self.__buffered_data) - self.__buffer_pos
+
+    def read(self, size=None):  # pylint: disable=invalid-name
+        """Reads from the buffer."""
+        if size is None or size < 0:
+            raise exceptions.NotYetImplementedError(
+                'Illegal read of size %s requested on BufferedStream. '
+                'Wrapped stream %s is at position %s-%s, '
+                '%s bytes remaining.' %
+                (size, self.__stream, self.__start_pos, self.__end_pos,
+                 self._bytes_remaining))
+
+        data = ''
+        if self._bytes_remaining:
+            size = min(size, self._bytes_remaining)
+            data = self.__buffered_data[
+                self.__buffer_pos:self.__buffer_pos + size]
+            self.__buffer_pos += size
+        return data
diff --git a/apitools/base/py/buffered_stream_test.py b/apitools/base/py/buffered_stream_test.py
new file mode 100644
index 0000000..2098fb1
--- /dev/null
+++ b/apitools/base/py/buffered_stream_test.py
@@ -0,0 +1,67 @@
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for buffered_stream."""
+
+import string
+
+import six
+import unittest2
+
+from apitools.base.py import buffered_stream
+from apitools.base.py import exceptions
+
+
+class BufferedStreamTest(unittest2.TestCase):
+
+    def setUp(self):
+        self.stream = six.StringIO(string.ascii_letters)
+        self.value = self.stream.getvalue()
+        self.stream.seek(0)
+
+    def testEmptyBuffer(self):
+        bs = buffered_stream.BufferedStream(self.stream, 0, 0)
+        self.assertEqual('', bs.read(0))
+        self.assertEqual(0, bs.stream_end_position)
+
+    def testOffsetStream(self):
+        bs = buffered_stream.BufferedStream(self.stream, 50, 100)
+        self.assertEqual(len(self.value), len(bs))
+        self.assertEqual(self.value, bs.read(len(self.value)))
+        self.assertEqual(50 + len(self.value), bs.stream_end_position)
+
+    def testUnexhaustedStream(self):
+        bs = buffered_stream.BufferedStream(self.stream, 0, 50)
+        self.assertEqual(50, bs.stream_end_position)
+        self.assertEqual(False, bs.stream_exhausted)
+        self.assertEqual(self.value[0:50], bs.read(50))
+        self.assertEqual(False, bs.stream_exhausted)
+        self.assertEqual('', bs.read(0))
+        self.assertEqual('', bs.read(100))
+
+    def testExhaustedStream(self):
+        bs = buffered_stream.BufferedStream(self.stream, 0, 100)
+        self.assertEqual(len(self.value), bs.stream_end_position)
+        self.assertEqual(True, bs.stream_exhausted)
+        self.assertEqual(self.value, bs.read(100))
+        self.assertEqual('', bs.read(0))
+        self.assertEqual('', bs.read(100))
+
+    def testArbitraryLengthRead(self):
+        bs = buffered_stream.BufferedStream(self.stream, 0, 20)
+        with self.assertRaises(exceptions.NotYetImplementedError):
+            bs.read()
+        with self.assertRaises(exceptions.NotYetImplementedError):
+            bs.read(size=-1)
diff --git a/apitools/base/py/cli.py b/apitools/base/py/cli.py
new file mode 100644
index 0000000..920cfc5
--- /dev/null
+++ b/apitools/base/py/cli.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Top-level import for all CLI-related functionality in apitools.
+
+Note that importing this file will ultimately have side-effects, and
+may require imports not available in all environments (such as App
+Engine). In particular, picking up some readline-related imports can
+cause pain.
+"""
+
+# pylint:disable=wildcard-import
+# pylint:disable=unused-wildcard-import
+
+from apitools.base.py.app2 import *
+from apitools.base.py.base_cli import *
+
+try:
+    # pylint:disable=no-name-in-module
+    from apitools.base.py.internal.cli import *
+except ImportError:
+    pass
diff --git a/apitools/base/py/credentials_lib.py b/apitools/base/py/credentials_lib.py
new file mode 100644
index 0000000..913c144
--- /dev/null
+++ b/apitools/base/py/credentials_lib.py
@@ -0,0 +1,664 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Common credentials classes and constructors."""
+from __future__ import print_function
+
+import datetime
+import json
+import os
+import threading
+import warnings
+
+import httplib2
+import oauth2client
+import oauth2client.client
+from oauth2client import service_account
+from oauth2client import tools  # for gflags declarations
+from six.moves import http_client
+from six.moves import urllib
+
+from apitools.base.py import exceptions
+from apitools.base.py import util
+
+# Note: we try the oauth2client imports two ways, to accomodate layout
+# changes in oauth2client 2.0+. We can remove these once we no longer
+# support oauth2client < 2.0.
+#
+# pylint: disable=wrong-import-order,ungrouped-imports
+try:
+    from oauth2client.contrib import gce
+except ImportError:
+    from oauth2client import gce
+
+try:
+    from oauth2client.contrib import locked_file
+except ImportError:
+    from oauth2client import locked_file
+
+try:
+    from oauth2client.contrib import multistore_file
+except ImportError:
+    from oauth2client import multistore_file
+
+try:
+    import gflags
+    FLAGS = gflags.FLAGS
+except ImportError:
+    FLAGS = None
+
+
+__all__ = [
+    'CredentialsFromFile',
+    'GaeAssertionCredentials',
+    'GceAssertionCredentials',
+    'GetCredentials',
+    'GetUserinfo',
+    'ServiceAccountCredentialsFromFile',
+]
+
+
+# Lock when accessing the cache file to avoid resource contention.
+cache_file_lock = threading.Lock()
+
+
+def SetCredentialsCacheFileLock(lock):
+    global cache_file_lock  # pylint: disable=global-statement
+    cache_file_lock = lock
+
+
+# List of additional methods we use when attempting to construct
+# credentials. Users can register their own methods here, which we try
+# before the defaults.
+_CREDENTIALS_METHODS = []
+
+
+def _RegisterCredentialsMethod(method, position=None):
+    """Register a new method for fetching credentials.
+
+    This new method should be a function with signature:
+      client_info, **kwds -> Credentials or None
+    This method can be used as a decorator, unless position needs to
+    be supplied.
+
+    Note that method must *always* accept arbitrary keyword arguments.
+
+    Args:
+      method: New credential-fetching method.
+      position: (default: None) Where in the list of methods to
+        add this; if None, we append. In all but rare cases,
+        this should be either 0 or None.
+    Returns:
+      method, for use as a decorator.
+
+    """
+    if position is None:
+        position = len(_CREDENTIALS_METHODS)
+    else:
+        position = min(position, len(_CREDENTIALS_METHODS))
+    _CREDENTIALS_METHODS.insert(position, method)
+    return method
+
+
+def GetCredentials(package_name, scopes, client_id, client_secret, user_agent,
+                   credentials_filename=None,
+                   api_key=None,  # pylint: disable=unused-argument
+                   client=None,  # pylint: disable=unused-argument
+                   oauth2client_args=None,
+                   **kwds):
+    """Attempt to get credentials, using an oauth dance as the last resort."""
+    scopes = util.NormalizeScopes(scopes)
+    client_info = {
+        'client_id': client_id,
+        'client_secret': client_secret,
+        'scope': ' '.join(sorted(scopes)),
+        'user_agent': user_agent or '%s-generated/0.1' % package_name,
+    }
+    for method in _CREDENTIALS_METHODS:
+        credentials = method(client_info, **kwds)
+        if credentials is not None:
+            return credentials
+    credentials_filename = credentials_filename or os.path.expanduser(
+        '~/.apitools.token')
+    credentials = CredentialsFromFile(credentials_filename, client_info,
+                                      oauth2client_args=oauth2client_args)
+    if credentials is not None:
+        return credentials
+    raise exceptions.CredentialsError('Could not create valid credentials')
+
+
+def ServiceAccountCredentialsFromFile(filename, scopes, user_agent=None):
+    """Use the credentials in filename to create a token for scopes."""
+    filename = os.path.expanduser(filename)
+    # We have two options, based on our version of oauth2client.
+    if oauth2client.__version__ > '1.5.2':
+        # oauth2client >= 2.0.0
+        credentials = (
+            service_account.ServiceAccountCredentials.from_json_keyfile_name(
+                filename, scopes=scopes))
+        if credentials is not None:
+            if user_agent is not None:
+                credentials.user_agent = user_agent
+        return credentials
+    else:
+        # oauth2client < 2.0.0
+        with open(filename) as keyfile:
+            service_account_info = json.load(keyfile)
+        account_type = service_account_info.get('type')
+        if account_type != oauth2client.client.SERVICE_ACCOUNT:
+            raise exceptions.CredentialsError(
+                'Invalid service account credentials: %s' % (filename,))
+        # pylint: disable=protected-access
+        credentials = service_account._ServiceAccountCredentials(
+            service_account_id=service_account_info['client_id'],
+            service_account_email=service_account_info['client_email'],
+            private_key_id=service_account_info['private_key_id'],
+            private_key_pkcs8_text=service_account_info['private_key'],
+            scopes=scopes, user_agent=user_agent)
+        # pylint: enable=protected-access
+        return credentials
+
+
+def ServiceAccountCredentialsFromP12File(
+        service_account_name, private_key_filename, scopes, user_agent):
+    """Create a new credential from the named .p12 keyfile."""
+    private_key_filename = os.path.expanduser(private_key_filename)
+    scopes = util.NormalizeScopes(scopes)
+    if oauth2client.__version__ > '1.5.2':
+        # oauth2client >= 2.0.0
+        credentials = (
+            service_account.ServiceAccountCredentials.from_p12_keyfile(
+                service_account_name, private_key_filename, scopes=scopes))
+        if credentials is not None:
+            credentials.user_agent = user_agent
+        return credentials
+    else:
+        # oauth2client < 2.0.0
+        with open(private_key_filename) as key_file:
+            return oauth2client.client.SignedJwtAssertionCredentials(
+                service_account_name, key_file.read(), scopes,
+                user_agent=user_agent)
+
+
+def _EnsureFileExists(filename):
+    """Touches a file; returns False on error, True on success."""
+    if not os.path.exists(filename):
+        old_umask = os.umask(0o177)
+        try:
+            open(filename, 'a+b').close()
+        except OSError:
+            return False
+        finally:
+            os.umask(old_umask)
+    return True
+
+
+def _GceMetadataRequest(relative_url, use_metadata_ip=False):
+    """Request the given url from the GCE metadata service."""
+    if use_metadata_ip:
+        base_url = os.environ.get('GCE_METADATA_IP', '169.254.169.254')
+    else:
+        base_url = os.environ.get(
+            'GCE_METADATA_ROOT', 'metadata.google.internal')
+    url = 'http://' + base_url + '/computeMetadata/v1/' + relative_url
+    # Extra header requirement can be found here:
+    # https://developers.google.com/compute/docs/metadata
+    headers = {'Metadata-Flavor': 'Google'}
+    request = urllib.request.Request(url, headers=headers)
+    opener = urllib.request.build_opener(urllib.request.ProxyHandler({}))
+    try:
+        response = opener.open(request)
+    except urllib.error.URLError as e:
+        raise exceptions.CommunicationError(
+            'Could not reach metadata service: %s' % e.reason)
+    return response
+
+
+class GceAssertionCredentials(gce.AppAssertionCredentials):
+
+    """Assertion credentials for GCE instances."""
+
+    def __init__(self, scopes=None, service_account_name='default', **kwds):
+        """Initializes the credentials instance.
+
+        Args:
+          scopes: The scopes to get. If None, whatever scopes that are
+              available to the instance are used.
+          service_account_name: The service account to retrieve the scopes
+              from.
+          **kwds: Additional keyword args.
+
+        """
+        # If there is a connectivity issue with the metadata server,
+        # detection calls may fail even if we've already successfully
+        # identified these scopes in the same execution. However, the
+        # available scopes don't change once an instance is created,
+        # so there is no reason to perform more than one query.
+        self.__service_account_name = service_account_name
+        cached_scopes = None
+        cache_filename = kwds.get('cache_filename')
+        if cache_filename:
+            cached_scopes = self._CheckCacheFileForMatch(
+                cache_filename, scopes)
+
+        scopes = cached_scopes or self._ScopesFromMetadataServer(scopes)
+
+        if cache_filename and not cached_scopes:
+            self._WriteCacheFile(cache_filename, scopes)
+
+        # We check the scopes above, but don't need them again after
+        # this point. Newer versions of oauth2client let us drop them
+        # here, but since we support older versions as well, we just
+        # catch and squelch the warning.
+        with warnings.catch_warnings():
+            warnings.simplefilter('ignore')
+            super(GceAssertionCredentials, self).__init__(scopes, **kwds)
+
+    @classmethod
+    def Get(cls, *args, **kwds):
+        try:
+            return cls(*args, **kwds)
+        except exceptions.Error:
+            return None
+
+    def _CheckCacheFileForMatch(self, cache_filename, scopes):
+        """Checks the cache file to see if it matches the given credentials.
+
+        Args:
+          cache_filename: Cache filename to check.
+          scopes: Scopes for the desired credentials.
+
+        Returns:
+          List of scopes (if cache matches) or None.
+        """
+        creds = {  # Credentials metadata dict.
+            'scopes': sorted(list(scopes)) if scopes else None,
+            'svc_acct_name': self.__service_account_name,
+        }
+        with cache_file_lock:
+            if _EnsureFileExists(cache_filename):
+                cache_file = locked_file.LockedFile(
+                    cache_filename, 'r+b', 'rb')
+                try:
+                    cache_file.open_and_lock()
+                    cached_creds_str = cache_file.file_handle().read()
+                    if cached_creds_str:
+                        # Cached credentials metadata dict.
+                        cached_creds = json.loads(cached_creds_str)
+                        if (creds['svc_acct_name'] ==
+                                cached_creds['svc_acct_name']):
+                            if (creds['scopes'] in
+                                    (None, cached_creds['scopes'])):
+                                scopes = cached_creds['scopes']
+                except KeyboardInterrupt:
+                    raise
+                except:  # pylint: disable=bare-except
+                    # Treat exceptions as a cache miss.
+                    pass
+                finally:
+                    cache_file.unlock_and_close()
+        return scopes
+
+    def _WriteCacheFile(self, cache_filename, scopes):
+        """Writes the credential metadata to the cache file.
+
+        This does not save the credentials themselves (CredentialStore class
+        optionally handles that after this class is initialized).
+
+        Args:
+          cache_filename: Cache filename to check.
+          scopes: Scopes for the desired credentials.
+        """
+        with cache_file_lock:
+            if _EnsureFileExists(cache_filename):
+                cache_file = locked_file.LockedFile(
+                    cache_filename, 'r+b', 'rb')
+                try:
+                    cache_file.open_and_lock()
+                    if cache_file.is_locked():
+                        creds = {  # Credentials metadata dict.
+                            'scopes': sorted(list(scopes)),
+                            'svc_acct_name': self.__service_account_name}
+                        cache_file.file_handle().write(
+                            json.dumps(creds, encoding='ascii'))
+                        # If it's not locked, the locking process will
+                        # write the same data to the file, so just
+                        # continue.
+                except KeyboardInterrupt:
+                    raise
+                except:  # pylint: disable=bare-except
+                    # Treat exceptions as a cache miss.
+                    pass
+                finally:
+                    cache_file.unlock_and_close()
+
+    def _ScopesFromMetadataServer(self, scopes):
+        """Returns instance scopes based on GCE metadata server."""
+        if not util.DetectGce():
+            raise exceptions.ResourceUnavailableError(
+                'GCE credentials requested outside a GCE instance')
+        if not self.GetServiceAccount(self.__service_account_name):
+            raise exceptions.ResourceUnavailableError(
+                'GCE credentials requested but service account '
+                '%s does not exist.' % self.__service_account_name)
+        if scopes:
+            scope_ls = util.NormalizeScopes(scopes)
+            instance_scopes = self.GetInstanceScopes()
+            if scope_ls > instance_scopes:
+                raise exceptions.CredentialsError(
+                    'Instance did not have access to scopes %s' % (
+                        sorted(list(scope_ls - instance_scopes)),))
+        else:
+            scopes = self.GetInstanceScopes()
+        return scopes
+
+    def GetServiceAccount(self, account):
+        relative_url = 'instance/service-accounts'
+        response = _GceMetadataRequest(relative_url)
+        response_lines = [line.rstrip('/\n\r')
+                          for line in response.readlines()]
+        return account in response_lines
+
+    def GetInstanceScopes(self):
+        relative_url = 'instance/service-accounts/{0}/scopes'.format(
+            self.__service_account_name)
+        response = _GceMetadataRequest(relative_url)
+        return util.NormalizeScopes(scope.strip()
+                                    for scope in response.readlines())
+
+    # pylint: disable=arguments-differ
+    def _refresh(self, do_request):
+        """Refresh self.access_token.
+
+        This function replaces AppAssertionCredentials._refresh, which
+        does not use the credential store and is therefore poorly
+        suited for multi-threaded scenarios.
+
+        Args:
+          do_request: A function matching httplib2.Http.request's signature.
+
+        """
+        # pylint: disable=protected-access
+        oauth2client.client.OAuth2Credentials._refresh(self, do_request)
+        # pylint: enable=protected-access
+
+    def _do_refresh_request(self, unused_http_request):
+        """Refresh self.access_token by querying the metadata server.
+
+        If self.store is initialized, store acquired credentials there.
+        """
+        relative_url = 'instance/service-accounts/{0}/token'.format(
+            self.__service_account_name)
+        try:
+            response = _GceMetadataRequest(relative_url)
+        except exceptions.CommunicationError:
+            self.invalid = True
+            if self.store:
+                self.store.locked_put(self)
+            raise
+        content = response.read()
+        try:
+            credential_info = json.loads(content)
+        except ValueError:
+            raise exceptions.CredentialsError(
+                'Could not parse response as JSON: %s' % content)
+
+        self.access_token = credential_info['access_token']
+        if 'expires_in' in credential_info:
+            expires_in = int(credential_info['expires_in'])
+            self.token_expiry = (
+                datetime.timedelta(seconds=expires_in) +
+                datetime.datetime.utcnow())
+        else:
+            self.token_expiry = None
+        self.invalid = False
+        if self.store:
+            self.store.locked_put(self)
+
+    @classmethod
+    def from_json(cls, json_data):
+        data = json.loads(json_data)
+        kwargs = {}
+        if 'cache_filename' in data.get('kwargs', []):
+            kwargs['cache_filename'] = data['kwargs']['cache_filename']
+        credentials = GceAssertionCredentials(scopes=[data['scope']],
+                                              **kwargs)
+        if 'access_token' in data:
+            credentials.access_token = data['access_token']
+        if 'token_expiry' in data:
+            credentials.token_expiry = datetime.datetime.strptime(
+                data['token_expiry'], oauth2client.client.EXPIRY_FORMAT)
+        if 'invalid' in data:
+            credentials.invalid = data['invalid']
+        return credentials
+
+    @property
+    def serialization_data(self):
+        raise NotImplementedError(
+            'Cannot serialize credentials for GCE service accounts.')
+
+
+# TODO(craigcitro): Currently, we can't even *load*
+# `oauth2client.appengine` without being on appengine, because of how
+# it handles imports. Fix that by splitting that module into
+# GAE-specific and GAE-independent bits, and guarding imports.
+class GaeAssertionCredentials(oauth2client.client.AssertionCredentials):
+
+    """Assertion credentials for Google App Engine apps."""
+
+    def __init__(self, scopes, **kwds):
+        if not util.DetectGae():
+            raise exceptions.ResourceUnavailableError(
+                'GCE credentials requested outside a GCE instance')
+        self._scopes = list(util.NormalizeScopes(scopes))
+        super(GaeAssertionCredentials, self).__init__(None, **kwds)
+
+    @classmethod
+    def Get(cls, *args, **kwds):
+        try:
+            return cls(*args, **kwds)
+        except exceptions.Error:
+            return None
+
+    @classmethod
+    def from_json(cls, json_data):
+        data = json.loads(json_data)
+        return GaeAssertionCredentials(data['_scopes'])
+
+    def _refresh(self, _):
+        """Refresh self.access_token.
+
+        Args:
+          _: (ignored) A function matching httplib2.Http.request's signature.
+        """
+        # pylint: disable=import-error
+        from google.appengine.api import app_identity
+        try:
+            token, _ = app_identity.get_access_token(self._scopes)
+        except app_identity.Error as e:
+            raise exceptions.CredentialsError(str(e))
+        self.access_token = token
+
+    def sign_blob(self, blob):
+        """Cryptographically sign a blob (of bytes).
+
+        This method is provided to support a common interface, but
+        the actual key used for a Google Compute Engine service account
+        is not available, so it can't be used to sign content.
+
+        Args:
+            blob: bytes, Message to be signed.
+
+        Raises:
+            NotImplementedError, always.
+        """
+        raise NotImplementedError(
+            'Compute Engine service accounts cannot sign blobs')
+
+
+def _GetRunFlowFlags(args=None):
+    """Retrieves command line flags based on gflags module."""
+    # There's one rare situation where gsutil will not have argparse
+    # available, but doesn't need anything depending on argparse anyway,
+    # since they're bringing their own credentials. So we just allow this
+    # to fail with an ImportError in those cases.
+    #
+    # TODO(craigcitro): Move this import back to the top when we drop
+    # python 2.6 support (eg when gsutil does).
+    import argparse
+
+    parser = argparse.ArgumentParser(parents=[tools.argparser])
+    # Get command line argparse flags.
+    flags, _ = parser.parse_known_args(args=args)
+
+    # Allow `gflags` and `argparse` to be used side-by-side.
+    if hasattr(FLAGS, 'auth_host_name'):
+        flags.auth_host_name = FLAGS.auth_host_name
+    if hasattr(FLAGS, 'auth_host_port'):
+        flags.auth_host_port = FLAGS.auth_host_port
+    if hasattr(FLAGS, 'auth_local_webserver'):
+        flags.noauth_local_webserver = (not FLAGS.auth_local_webserver)
+    return flags
+
+
+# TODO(craigcitro): Switch this from taking a path to taking a stream.
+def CredentialsFromFile(path, client_info, oauth2client_args=None):
+    """Read credentials from a file."""
+    credential_store = multistore_file.get_credential_storage(
+        path,
+        client_info['client_id'],
+        client_info['user_agent'],
+        client_info['scope'])
+    if hasattr(FLAGS, 'auth_local_webserver'):
+        FLAGS.auth_local_webserver = False
+    credentials = credential_store.get()
+    if credentials is None or credentials.invalid:
+        print('Generating new OAuth credentials ...')
+        for _ in range(20):
+            # If authorization fails, we want to retry, rather than let this
+            # cascade up and get caught elsewhere. If users want out of the
+            # retry loop, they can ^C.
+            try:
+                flow = oauth2client.client.OAuth2WebServerFlow(**client_info)
+                flags = _GetRunFlowFlags(args=oauth2client_args)
+                credentials = tools.run_flow(flow, credential_store, flags)
+                break
+            except (oauth2client.client.FlowExchangeError, SystemExit) as e:
+                # Here SystemExit is "no credential at all", and the
+                # FlowExchangeError is "invalid" -- usually because
+                # you reused a token.
+                print('Invalid authorization: %s' % (e,))
+            except httplib2.HttpLib2Error as e:
+                print('Communication error: %s' % (e,))
+                raise exceptions.CredentialsError(
+                    'Communication error creating credentials: %s' % e)
+    return credentials
+
+
+# TODO(craigcitro): Push this into oauth2client.
+def GetUserinfo(credentials, http=None):  # pylint: disable=invalid-name
+    """Get the userinfo associated with the given credentials.
+
+    This is dependent on the token having either the userinfo.email or
+    userinfo.profile scope for the given token.
+
+    Args:
+      credentials: (oauth2client.client.Credentials) incoming credentials
+      http: (httplib2.Http, optional) http instance to use
+
+    Returns:
+      The email address for this token, or None if the required scopes
+      aren't available.
+    """
+    http = http or httplib2.Http()
+    url = _GetUserinfoUrl(credentials)
+    # We ignore communication woes here (i.e. SSL errors, socket
+    # timeout), as handling these should be done in a common location.
+    response, content = http.request(url)
+    if response.status == http_client.BAD_REQUEST:
+        credentials.refresh(http)
+        url = _GetUserinfoUrl(credentials)
+        response, content = http.request(url)
+    return json.loads(content or '{}')  # Save ourselves from an empty reply.
+
+
+def _GetUserinfoUrl(credentials):
+    url_root = 'https://www.googleapis.com/oauth2/v2/tokeninfo'
+    query_args = {'access_token': credentials.access_token}
+    return '?'.join((url_root, urllib.parse.urlencode(query_args)))
+
+
+@_RegisterCredentialsMethod
+def _GetServiceAccountCredentials(
+        client_info, service_account_name=None, service_account_keyfile=None,
+        service_account_json_keyfile=None, **unused_kwds):
+    """Returns ServiceAccountCredentials from give file."""
+    if ((service_account_name and not service_account_keyfile) or
+            (service_account_keyfile and not service_account_name)):
+        raise exceptions.CredentialsError(
+            'Service account name or keyfile provided without the other')
+    scopes = client_info['scope'].split()
+    user_agent = client_info['user_agent']
+    # Use the .json credentials, if provided.
+    if service_account_json_keyfile:
+        return ServiceAccountCredentialsFromFile(
+            service_account_json_keyfile, scopes, user_agent=user_agent)
+    # Fall back to .p12 if there's no .json credentials.
+    if service_account_name is not None:
+        return ServiceAccountCredentialsFromP12File(
+            service_account_name, service_account_keyfile, scopes, user_agent)
+
+
+@_RegisterCredentialsMethod
+def _GetGaeServiceAccount(client_info, **unused_kwds):
+    scopes = client_info['scope'].split(' ')
+    return GaeAssertionCredentials.Get(scopes=scopes)
+
+
+@_RegisterCredentialsMethod
+def _GetGceServiceAccount(client_info, **unused_kwds):
+    scopes = client_info['scope'].split(' ')
+    return GceAssertionCredentials.Get(scopes=scopes)
+
+
+@_RegisterCredentialsMethod
+def _GetApplicationDefaultCredentials(
+        client_info, skip_application_default_credentials=False,
+        **unused_kwds):
+    """Returns ADC with right scopes."""
+    scopes = client_info['scope'].split()
+    if skip_application_default_credentials:
+        return None
+    gc = oauth2client.client.GoogleCredentials
+    with cache_file_lock:
+        try:
+            # pylint: disable=protected-access
+            # We've already done our own check for GAE/GCE
+            # credentials, we don't want to pay for checking again.
+            credentials = gc._implicit_credentials_from_files()
+        except oauth2client.client.ApplicationDefaultCredentialsError:
+            return None
+    # If we got back a non-service account credential, we need to use
+    # a heuristic to decide whether or not the application default
+    # credential will work for us. We assume that if we're requesting
+    # cloud-platform, our scopes are a subset of cloud scopes, and the
+    # ADC will work.
+    cp = 'https://www.googleapis.com/auth/cloud-platform'
+    if credentials is None:
+        return None
+    if not isinstance(credentials, gc) or cp in scopes:
+        return credentials.create_scoped(scopes)
+    return None
diff --git a/apitools/base/py/credentials_lib_test.py b/apitools/base/py/credentials_lib_test.py
new file mode 100644
index 0000000..1bf5aa7
--- /dev/null
+++ b/apitools/base/py/credentials_lib_test.py
@@ -0,0 +1,125 @@
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import mock
+import six
+import unittest2
+
+from apitools.base.py import credentials_lib
+from apitools.base.py import util
+
+
+class CredentialsLibTest(unittest2.TestCase):
+
+    def _GetServiceCreds(self, service_account_name=None, scopes=None):
+        kwargs = {}
+        if service_account_name is not None:
+            kwargs['service_account_name'] = service_account_name
+        service_account_name = service_account_name or 'default'
+
+        def MockMetadataCalls(request_url):
+            default_scopes = scopes or ['scope1']
+            if request_url.endswith('scopes'):
+                return six.StringIO(''.join(default_scopes))
+            elif request_url.endswith('service-accounts'):
+                return six.StringIO(service_account_name)
+            elif request_url.endswith(
+                    '/service-accounts/%s/token' % service_account_name):
+                return six.StringIO('{"access_token": "token"}')
+            self.fail('Unexpected HTTP request to %s' % request_url)
+
+        with mock.patch.object(credentials_lib, '_GceMetadataRequest',
+                               side_effect=MockMetadataCalls,
+                               autospec=True) as opener_mock:
+            with mock.patch.object(util, 'DetectGce',
+                                   autospec=True) as mock_detect:
+                mock_detect.return_value = True
+                credentials = credentials_lib.GceAssertionCredentials(
+                    scopes, **kwargs)
+                self.assertIsNone(credentials._refresh(None))
+            self.assertEqual(3, opener_mock.call_count)
+        return credentials
+
+    def testGceServiceAccounts(self):
+        scopes = ['scope1']
+        self._GetServiceCreds()
+        self._GetServiceCreds(scopes=scopes)
+        self._GetServiceCreds(service_account_name='my_service_account',
+                              scopes=scopes)
+
+    def testGetServiceAccount(self):
+        # We'd also like to test the metadata calls, which requires
+        # having some knowledge about how HTTP calls are made (so that
+        # we can mock them). It's unfortunate, but there's no way
+        # around it.
+        creds = self._GetServiceCreds()
+        opener = mock.MagicMock()
+        opener.open = mock.MagicMock()
+        opener.open.return_value = six.StringIO('default/\nanother')
+        with mock.patch.object(six.moves.urllib.request, 'build_opener',
+                               return_value=opener,
+                               autospec=True) as build_opener:
+            creds.GetServiceAccount('default')
+            self.assertEqual(1, build_opener.call_count)
+            self.assertEqual(1, opener.open.call_count)
+            req = opener.open.call_args[0][0]
+            self.assertTrue(req.get_full_url().startswith(
+                'http://metadata.google.internal/'))
+            # The urllib module does weird things with header case.
+            self.assertEqual('Google', req.get_header('Metadata-flavor'))
+
+    def testGetAdcNone(self):
+        # Tests that we correctly return None when ADC aren't present in
+        # the well-known file.
+        creds = credentials_lib._GetApplicationDefaultCredentials(
+            client_info={'scope': ''})
+        self.assertIsNone(creds)
+
+
+class TestGetRunFlowFlags(unittest2.TestCase):
+
+    def setUp(self):
+        self._flags_actual = credentials_lib.FLAGS
+
+    def tearDown(self):
+        credentials_lib.FLAGS = self._flags_actual
+
+    def test_with_gflags(self):
+        HOST = 'myhostname'
+        PORT = '144169'
+
+        class MockFlags(object):
+            auth_host_name = HOST
+            auth_host_port = PORT
+            auth_local_webserver = False
+
+        credentials_lib.FLAGS = MockFlags
+        flags = credentials_lib._GetRunFlowFlags([
+            '--auth_host_name=%s' % HOST,
+            '--auth_host_port=%s' % PORT,
+            '--noauth_local_webserver',
+        ])
+        self.assertEqual(flags.auth_host_name, HOST)
+        self.assertEqual(flags.auth_host_port, PORT)
+        self.assertEqual(flags.logging_level, 'ERROR')
+        self.assertEqual(flags.noauth_local_webserver, True)
+
+    def test_without_gflags(self):
+        credentials_lib.FLAGS = None
+        flags = credentials_lib._GetRunFlowFlags([])
+        self.assertEqual(flags.auth_host_name, 'localhost')
+        self.assertEqual(flags.auth_host_port, [8080, 8090])
+        self.assertEqual(flags.logging_level, 'ERROR')
+        self.assertEqual(flags.noauth_local_webserver, False)
diff --git a/apitools/base/py/encoding.py b/apitools/base/py/encoding.py
new file mode 100644
index 0000000..598f6e6
--- /dev/null
+++ b/apitools/base/py/encoding.py
@@ -0,0 +1,731 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Common code for converting proto to other formats, such as JSON."""
+
+import base64
+import collections
+import datetime
+import json
+import os
+import sys
+
+import six
+
+from apitools.base.protorpclite import message_types
+from apitools.base.protorpclite import messages
+from apitools.base.protorpclite import protojson
+from apitools.base.py import exceptions
+
+__all__ = [
+    'CopyProtoMessage',
+    'JsonToMessage',
+    'MessageToJson',
+    'DictToMessage',
+    'MessageToDict',
+    'PyValueToMessage',
+    'MessageToPyValue',
+    'MessageToRepr',
+    'GetCustomJsonFieldMapping',
+    'AddCustomJsonFieldMapping',
+    'GetCustomJsonEnumMapping',
+    'AddCustomJsonEnumMapping',
+]
+
+
+_Codec = collections.namedtuple('_Codec', ['encoder', 'decoder'])
+CodecResult = collections.namedtuple('CodecResult', ['value', 'complete'])
+
+
+# TODO(craigcitro): Make these non-global.
+_UNRECOGNIZED_FIELD_MAPPINGS = {}
+_CUSTOM_MESSAGE_CODECS = {}
+_CUSTOM_FIELD_CODECS = {}
+_FIELD_TYPE_CODECS = {}
+
+
+def MapUnrecognizedFields(field_name):
+    """Register field_name as a container for unrecognized fields."""
+    def Register(cls):
+        _UNRECOGNIZED_FIELD_MAPPINGS[cls] = field_name
+        return cls
+    return Register
+
+
+def RegisterCustomMessageCodec(encoder, decoder):
+    """Register a custom encoder/decoder for this message class."""
+    def Register(cls):
+        _CUSTOM_MESSAGE_CODECS[cls] = _Codec(encoder=encoder, decoder=decoder)
+        return cls
+    return Register
+
+
+def RegisterCustomFieldCodec(encoder, decoder):
+    """Register a custom encoder/decoder for this field."""
+    def Register(field):
+        _CUSTOM_FIELD_CODECS[field] = _Codec(encoder=encoder, decoder=decoder)
+        return field
+    return Register
+
+
+def RegisterFieldTypeCodec(encoder, decoder):
+    """Register a custom encoder/decoder for all fields of this type."""
+    def Register(field_type):
+        _FIELD_TYPE_CODECS[field_type] = _Codec(
+            encoder=encoder, decoder=decoder)
+        return field_type
+    return Register
+
+
+# TODO(craigcitro): Delete this function with the switch to proto2.
+def CopyProtoMessage(message):
+    codec = protojson.ProtoJson()
+    return codec.decode_message(type(message), codec.encode_message(message))
+
+
+def MessageToJson(message, include_fields=None):
+    """Convert the given message to JSON."""
+    result = _ProtoJsonApiTools.Get().encode_message(message)
+    return _IncludeFields(result, message, include_fields)
+
+
+def JsonToMessage(message_type, message):
+    """Convert the given JSON to a message of type message_type."""
+    return _ProtoJsonApiTools.Get().decode_message(message_type, message)
+
+
+# TODO(craigcitro): Do this directly, instead of via JSON.
+def DictToMessage(d, message_type):
+    """Convert the given dictionary to a message of type message_type."""
+    return JsonToMessage(message_type, json.dumps(d))
+
+
+def MessageToDict(message):
+    """Convert the given message to a dictionary."""
+    return json.loads(MessageToJson(message))
+
+
+def DictToProtoMap(properties, additional_property_type, sort_items=False):
+    """Convert the given dictionary to an AdditionalProperty message."""
+    items = properties.items()
+    if sort_items:
+        items = sorted(items)
+    map_ = []
+    for key, value in items:
+        map_.append(additional_property_type.AdditionalProperty(
+            key=key, value=value))
+    return additional_property_type(additional_properties=map_)
+
+
+def PyValueToMessage(message_type, value):
+    """Convert the given python value to a message of type message_type."""
+    return JsonToMessage(message_type, json.dumps(value))
+
+
+def MessageToPyValue(message):
+    """Convert the given message to a python value."""
+    return json.loads(MessageToJson(message))
+
+
+def MessageToRepr(msg, multiline=False, **kwargs):
+    """Return a repr-style string for a protorpc message.
+
+    protorpc.Message.__repr__ does not return anything that could be considered
+    python code. Adding this function lets us print a protorpc message in such
+    a way that it could be pasted into code later, and used to compare against
+    other things.
+
+    Args:
+      msg: protorpc.Message, the message to be repr'd.
+      multiline: bool, True if the returned string should have each field
+          assignment on its own line.
+      **kwargs: {str:str}, Additional flags for how to format the string.
+
+    Known **kwargs:
+      shortstrings: bool, True if all string values should be
+          truncated at 100 characters, since when mocking the contents
+          typically don't matter except for IDs, and IDs are usually
+          less than 100 characters.
+      no_modules: bool, True if the long module name should not be printed with
+          each type.
+
+    Returns:
+      str, A string of valid python (assuming the right imports have been made)
+      that recreates the message passed into this function.
+
+    """
+
+    # TODO(jasmuth): craigcitro suggests a pretty-printer from apitools/gen.
+
+    indent = kwargs.get('indent', 0)
+
+    def IndentKwargs(kwargs):
+        kwargs = dict(kwargs)
+        kwargs['indent'] = kwargs.get('indent', 0) + 4
+        return kwargs
+
+    if isinstance(msg, list):
+        s = '['
+        for item in msg:
+            if multiline:
+                s += '\n' + ' ' * (indent + 4)
+            s += MessageToRepr(
+                item, multiline=multiline, **IndentKwargs(kwargs)) + ','
+        if multiline:
+            s += '\n' + ' ' * indent
+        s += ']'
+        return s
+
+    if isinstance(msg, messages.Message):
+        s = type(msg).__name__ + '('
+        if not kwargs.get('no_modules'):
+            s = msg.__module__ + '.' + s
+        names = sorted([field.name for field in msg.all_fields()])
+        for name in names:
+            field = msg.field_by_name(name)
+            if multiline:
+                s += '\n' + ' ' * (indent + 4)
+            value = getattr(msg, field.name)
+            s += field.name + '=' + MessageToRepr(
+                value, multiline=multiline, **IndentKwargs(kwargs)) + ','
+        if multiline:
+            s += '\n' + ' ' * indent
+        s += ')'
+        return s
+
+    if isinstance(msg, six.string_types):
+        if kwargs.get('shortstrings') and len(msg) > 100:
+            msg = msg[:100]
+
+    if isinstance(msg, datetime.datetime):
+
+        class SpecialTZInfo(datetime.tzinfo):
+
+            def __init__(self, offset):
+                super(SpecialTZInfo, self).__init__()
+                self.offset = offset
+
+            def __repr__(self):
+                s = 'TimeZoneOffset(' + repr(self.offset) + ')'
+                if not kwargs.get('no_modules'):
+                    s = 'apitools.base.protorpclite.util.' + s
+                return s
+
+        msg = datetime.datetime(
+            msg.year, msg.month, msg.day, msg.hour, msg.minute, msg.second,
+            msg.microsecond, SpecialTZInfo(msg.tzinfo.utcoffset(0)))
+
+    return repr(msg)
+
+
+def _GetField(message, field_path):
+    for field in field_path:
+        if field not in dir(message):
+            raise KeyError('no field "%s"' % field)
+        message = getattr(message, field)
+    return message
+
+
+def _SetField(dictblob, field_path, value):
+    for field in field_path[:-1]:
+        dictblob = dictblob.setdefault(field, {})
+    dictblob[field_path[-1]] = value
+
+
+def _IncludeFields(encoded_message, message, include_fields):
+    """Add the requested fields to the encoded message."""
+    if include_fields is None:
+        return encoded_message
+    result = json.loads(encoded_message)
+    for field_name in include_fields:
+        try:
+            value = _GetField(message, field_name.split('.'))
+            nullvalue = None
+            if isinstance(value, list):
+                nullvalue = []
+        except KeyError:
+            raise exceptions.InvalidDataError(
+                'No field named %s in message of type %s' % (
+                    field_name, type(message)))
+        _SetField(result, field_name.split('.'), nullvalue)
+    return json.dumps(result)
+
+
+def _GetFieldCodecs(field, attr):
+    result = [
+        getattr(_CUSTOM_FIELD_CODECS.get(field), attr, None),
+        getattr(_FIELD_TYPE_CODECS.get(type(field)), attr, None),
+    ]
+    return [x for x in result if x is not None]
+
+
+class _ProtoJsonApiTools(protojson.ProtoJson):
+
+    """JSON encoder used by apitools clients."""
+    _INSTANCE = None
+
+    @classmethod
+    def Get(cls):
+        if cls._INSTANCE is None:
+            cls._INSTANCE = cls()
+        return cls._INSTANCE
+
+    def decode_message(self, message_type, encoded_message):
+        if message_type in _CUSTOM_MESSAGE_CODECS:
+            return _CUSTOM_MESSAGE_CODECS[
+                message_type].decoder(encoded_message)
+        result = _DecodeCustomFieldNames(message_type, encoded_message)
+        result = super(_ProtoJsonApiTools, self).decode_message(
+            message_type, result)
+        result = _ProcessUnknownEnums(result, encoded_message)
+        result = _ProcessUnknownMessages(result, encoded_message)
+        return _DecodeUnknownFields(result, encoded_message)
+
+    def decode_field(self, field, value):
+        """Decode the given JSON value.
+
+        Args:
+          field: a messages.Field for the field we're decoding.
+          value: a python value we'd like to decode.
+
+        Returns:
+          A value suitable for assignment to field.
+        """
+        for decoder in _GetFieldCodecs(field, 'decoder'):
+            result = decoder(field, value)
+            value = result.value
+            if result.complete:
+                return value
+        if isinstance(field, messages.MessageField):
+            field_value = self.decode_message(
+                field.message_type, json.dumps(value))
+        elif isinstance(field, messages.EnumField):
+            value = GetCustomJsonEnumMapping(
+                field.type, json_name=value) or value
+            try:
+                field_value = super(
+                    _ProtoJsonApiTools, self).decode_field(field, value)
+            except messages.DecodeError:
+                if not isinstance(value, six.string_types):
+                    raise
+                field_value = None
+        else:
+            field_value = super(
+                _ProtoJsonApiTools, self).decode_field(field, value)
+        return field_value
+
+    def encode_message(self, message):
+        if isinstance(message, messages.FieldList):
+            return '[%s]' % (', '.join(self.encode_message(x)
+                                       for x in message))
+
+        # pylint: disable=unidiomatic-typecheck
+        if type(message) in _CUSTOM_MESSAGE_CODECS:
+            return _CUSTOM_MESSAGE_CODECS[type(message)].encoder(message)
+
+        message = _EncodeUnknownFields(message)
+        result = super(_ProtoJsonApiTools, self).encode_message(message)
+        result = _EncodeCustomFieldNames(message, result)
+        return json.dumps(json.loads(result), sort_keys=True)
+
+    def encode_field(self, field, value):
+        """Encode the given value as JSON.
+
+        Args:
+          field: a messages.Field for the field we're encoding.
+          value: a value for field.
+
+        Returns:
+          A python value suitable for json.dumps.
+        """
+        for encoder in _GetFieldCodecs(field, 'encoder'):
+            result = encoder(field, value)
+            value = result.value
+            if result.complete:
+                return value
+        if isinstance(field, messages.EnumField):
+            if field.repeated:
+                remapped_value = [GetCustomJsonEnumMapping(
+                    field.type, python_name=e.name) or e.name for e in value]
+            else:
+                remapped_value = GetCustomJsonEnumMapping(
+                    field.type, python_name=value.name)
+            if remapped_value:
+                return remapped_value
+        if (isinstance(field, messages.MessageField) and
+                not isinstance(field, message_types.DateTimeField)):
+            value = json.loads(self.encode_message(value))
+        return super(_ProtoJsonApiTools, self).encode_field(field, value)
+
+
+# TODO(craigcitro): Fold this and _IncludeFields in as codecs.
+def _DecodeUnknownFields(message, encoded_message):
+    """Rewrite unknown fields in message into message.destination."""
+    destination = _UNRECOGNIZED_FIELD_MAPPINGS.get(type(message))
+    if destination is None:
+        return message
+    pair_field = message.field_by_name(destination)
+    if not isinstance(pair_field, messages.MessageField):
+        raise exceptions.InvalidDataFromServerError(
+            'Unrecognized fields must be mapped to a compound '
+            'message type.')
+    pair_type = pair_field.message_type
+    # TODO(craigcitro): Add more error checking around the pair
+    # type being exactly what we suspect (field names, etc).
+    if isinstance(pair_type.value, messages.MessageField):
+        new_values = _DecodeUnknownMessages(
+            message, json.loads(encoded_message), pair_type)
+    else:
+        new_values = _DecodeUnrecognizedFields(message, pair_type)
+    setattr(message, destination, new_values)
+    # We could probably get away with not setting this, but
+    # why not clear it?
+    setattr(message, '_Message__unrecognized_fields', {})
+    return message
+
+
+def _DecodeUnknownMessages(message, encoded_message, pair_type):
+    """Process unknown fields in encoded_message of a message type."""
+    field_type = pair_type.value.type
+    new_values = []
+    all_field_names = [x.name for x in message.all_fields()]
+    for name, value_dict in six.iteritems(encoded_message):
+        if name in all_field_names:
+            continue
+        value = PyValueToMessage(field_type, value_dict)
+        if pair_type.value.repeated:
+            value = _AsMessageList(value)
+        new_pair = pair_type(key=name, value=value)
+        new_values.append(new_pair)
+    return new_values
+
+
+def _DecodeUnrecognizedFields(message, pair_type):
+    """Process unrecognized fields in message."""
+    new_values = []
+    for unknown_field in message.all_unrecognized_fields():
+        # TODO(craigcitro): Consider validating the variant if
+        # the assignment below doesn't take care of it. It may
+        # also be necessary to check it in the case that the
+        # type has multiple encodings.
+        value, _ = message.get_unrecognized_field_info(unknown_field)
+        value_type = pair_type.field_by_name('value')
+        if isinstance(value_type, messages.MessageField):
+            decoded_value = DictToMessage(value, pair_type.value.message_type)
+        else:
+            decoded_value = protojson.ProtoJson().decode_field(
+                pair_type.value, value)
+        new_pair = pair_type(key=str(unknown_field), value=decoded_value)
+        new_values.append(new_pair)
+    return new_values
+
+
+def _EncodeUnknownFields(message):
+    """Remap unknown fields in message out of message.source."""
+    source = _UNRECOGNIZED_FIELD_MAPPINGS.get(type(message))
+    if source is None:
+        return message
+    result = CopyProtoMessage(message)
+    pairs_field = message.field_by_name(source)
+    if not isinstance(pairs_field, messages.MessageField):
+        raise exceptions.InvalidUserInputError(
+            'Invalid pairs field %s' % pairs_field)
+    pairs_type = pairs_field.message_type
+    value_variant = pairs_type.field_by_name('value').variant
+    pairs = getattr(message, source)
+    for pair in pairs:
+        if value_variant == messages.Variant.MESSAGE:
+            encoded_value = MessageToDict(pair.value)
+        else:
+            encoded_value = pair.value
+        result.set_unrecognized_field(pair.key, encoded_value, value_variant)
+    setattr(result, source, [])
+    return result
+
+
+def _SafeEncodeBytes(field, value):
+    """Encode the bytes in value as urlsafe base64."""
+    try:
+        if field.repeated:
+            result = [base64.urlsafe_b64encode(byte) for byte in value]
+        else:
+            result = base64.urlsafe_b64encode(value)
+        complete = True
+    except TypeError:
+        result = value
+        complete = False
+    return CodecResult(value=result, complete=complete)
+
+
+def _SafeDecodeBytes(unused_field, value):
+    """Decode the urlsafe base64 value into bytes."""
+    try:
+        result = base64.urlsafe_b64decode(str(value))
+        complete = True
+    except TypeError:
+        result = value
+        complete = False
+    return CodecResult(value=result, complete=complete)
+
+
+def _ProcessUnknownEnums(message, encoded_message):
+    """Add unknown enum values from encoded_message as unknown fields.
+
+    ProtoRPC diverges from the usual protocol buffer behavior here and
+    doesn't allow unknown fields. Throwing on unknown fields makes it
+    impossible to let servers add new enum values and stay compatible
+    with older clients, which isn't reasonable for us. We simply store
+    unrecognized enum values as unknown fields, and all is well.
+
+    Args:
+      message: Proto message we've decoded thus far.
+      encoded_message: JSON string we're decoding.
+
+    Returns:
+      message, with any unknown enums stored as unrecognized fields.
+    """
+    if not encoded_message:
+        return message
+    decoded_message = json.loads(encoded_message)
+    for field in message.all_fields():
+        if (isinstance(field, messages.EnumField) and
+                field.name in decoded_message and
+                message.get_assigned_value(field.name) is None):
+            message.set_unrecognized_field(
+                field.name, decoded_message[field.name], messages.Variant.ENUM)
+    return message
+
+
+def _ProcessUnknownMessages(message, encoded_message):
+    """Store any remaining unknown fields as strings.
+
+    ProtoRPC currently ignores unknown values for which no type can be
+    determined (and logs a "No variant found" message). For the purposes
+    of reserializing, this is quite harmful (since it throws away
+    information). Here we simply add those as unknown fields of type
+    string (so that they can easily be reserialized).
+
+    Args:
+      message: Proto message we've decoded thus far.
+      encoded_message: JSON string we're decoding.
+
+    Returns:
+      message, with any remaining unrecognized fields saved.
+    """
+    if not encoded_message:
+        return message
+    decoded_message = json.loads(encoded_message)
+    message_fields = [x.name for x in message.all_fields()] + list(
+        message.all_unrecognized_fields())
+    missing_fields = [x for x in decoded_message.keys()
+                      if x not in message_fields]
+    for field_name in missing_fields:
+        message.set_unrecognized_field(field_name, decoded_message[field_name],
+                                       messages.Variant.STRING)
+    return message
+
+
+RegisterFieldTypeCodec(_SafeEncodeBytes, _SafeDecodeBytes)(messages.BytesField)
+
+
+# Note that these could share a dictionary, since they're keyed by
+# distinct types, but it's not really worth it.
+_JSON_ENUM_MAPPINGS = {}
+_JSON_FIELD_MAPPINGS = {}
+
+
+def _GetTypeKey(message_type, package):
+    """Get the prefix for this message type in mapping dicts."""
+    key = message_type.definition_name()
+    if package and key.startswith(package + '.'):
+        module_name = message_type.__module__
+        # We normalize '__main__' to something unique, if possible.
+        if module_name == '__main__':
+            try:
+                file_name = sys.modules[module_name].__file__
+            except (AttributeError, KeyError):
+                pass
+            else:
+                base_name = os.path.basename(file_name)
+                split_name = os.path.splitext(base_name)
+                if len(split_name) == 1:
+                    module_name = unicode(base_name)
+                else:
+                    module_name = u'.'.join(split_name[:-1])
+        key = module_name + '.' + key.partition('.')[2]
+    return key
+
+
+def AddCustomJsonEnumMapping(enum_type, python_name, json_name,
+                             package=''):
+    """Add a custom wire encoding for a given enum value.
+
+    This is primarily used in generated code, to handle enum values
+    which happen to be Python keywords.
+
+    Args:
+      enum_type: (messages.Enum) An enum type
+      python_name: (basestring) Python name for this value.
+      json_name: (basestring) JSON name to be used on the wire.
+      package: (basestring, optional) Package prefix for this enum, if
+          present. We strip this off the enum name in order to generate
+          unique keys.
+    """
+    if not issubclass(enum_type, messages.Enum):
+        raise exceptions.TypecheckError(
+            'Cannot set JSON enum mapping for non-enum "%s"' % enum_type)
+    enum_name = _GetTypeKey(enum_type, package)
+    if python_name not in enum_type.names():
+        raise exceptions.InvalidDataError(
+            'Enum value %s not a value for type %s' % (python_name, enum_type))
+    field_mappings = _JSON_ENUM_MAPPINGS.setdefault(enum_name, {})
+    _CheckForExistingMappings('enum', enum_type, python_name, json_name)
+    field_mappings[python_name] = json_name
+
+
+def AddCustomJsonFieldMapping(message_type, python_name, json_name,
+                              package=''):
+    """Add a custom wire encoding for a given message field.
+
+    This is primarily used in generated code, to handle enum values
+    which happen to be Python keywords.
+
+    Args:
+      message_type: (messages.Message) A message type
+      python_name: (basestring) Python name for this value.
+      json_name: (basestring) JSON name to be used on the wire.
+      package: (basestring, optional) Package prefix for this message, if
+          present. We strip this off the message name in order to generate
+          unique keys.
+    """
+    if not issubclass(message_type, messages.Message):
+        raise exceptions.TypecheckError(
+            'Cannot set JSON field mapping for '
+            'non-message "%s"' % message_type)
+    message_name = _GetTypeKey(message_type, package)
+    try:
+        _ = message_type.field_by_name(python_name)
+    except KeyError:
+        raise exceptions.InvalidDataError(
+            'Field %s not recognized for type %s' % (
+                python_name, message_type))
+    field_mappings = _JSON_FIELD_MAPPINGS.setdefault(message_name, {})
+    _CheckForExistingMappings('field', message_type, python_name, json_name)
+    field_mappings[python_name] = json_name
+
+
+def GetCustomJsonEnumMapping(enum_type, python_name=None, json_name=None):
+    """Return the appropriate remapping for the given enum, or None."""
+    return _FetchRemapping(enum_type.definition_name(), 'enum',
+                           python_name=python_name, json_name=json_name,
+                           mappings=_JSON_ENUM_MAPPINGS)
+
+
+def GetCustomJsonFieldMapping(message_type, python_name=None, json_name=None):
+    """Return the appropriate remapping for the given field, or None."""
+    return _FetchRemapping(message_type.definition_name(), 'field',
+                           python_name=python_name, json_name=json_name,
+                           mappings=_JSON_FIELD_MAPPINGS)
+
+
+def _FetchRemapping(type_name, mapping_type, python_name=None, json_name=None,
+                    mappings=None):
+    """Common code for fetching a key or value from a remapping dict."""
+    if python_name and json_name:
+        raise exceptions.InvalidDataError(
+            'Cannot specify both python_name and json_name '
+            'for %s remapping' % mapping_type)
+    if not (python_name or json_name):
+        raise exceptions.InvalidDataError(
+            'Must specify either python_name or json_name for %s remapping' % (
+                mapping_type,))
+    field_remappings = mappings.get(type_name, {})
+    if field_remappings:
+        if python_name:
+            return field_remappings.get(python_name)
+        elif json_name:
+            if json_name in list(field_remappings.values()):
+                return [k for k in field_remappings
+                        if field_remappings[k] == json_name][0]
+    return None
+
+
+def _CheckForExistingMappings(mapping_type, message_type,
+                              python_name, json_name):
+    """Validate that no mappings exist for the given values."""
+    if mapping_type == 'field':
+        getter = GetCustomJsonFieldMapping
+    elif mapping_type == 'enum':
+        getter = GetCustomJsonEnumMapping
+    remapping = getter(message_type, python_name=python_name)
+    if remapping is not None and remapping != json_name:
+        raise exceptions.InvalidDataError(
+            'Cannot add mapping for %s "%s", already mapped to "%s"' % (
+                mapping_type, python_name, remapping))
+    remapping = getter(message_type, json_name=json_name)
+    if remapping is not None and remapping != python_name:
+        raise exceptions.InvalidDataError(
+            'Cannot add mapping for %s "%s", already mapped to "%s"' % (
+                mapping_type, json_name, remapping))
+
+
+def _EncodeCustomFieldNames(message, encoded_value):
+    message_name = type(message).definition_name()
+    field_remappings = list(_JSON_FIELD_MAPPINGS.get(message_name, {}).items())
+    if field_remappings:
+        decoded_value = json.loads(encoded_value)
+        for python_name, json_name in field_remappings:
+            if python_name in encoded_value:
+                decoded_value[json_name] = decoded_value.pop(python_name)
+        encoded_value = json.dumps(decoded_value)
+    return encoded_value
+
+
+def _DecodeCustomFieldNames(message_type, encoded_message):
+    message_name = message_type.definition_name()
+    field_remappings = _JSON_FIELD_MAPPINGS.get(message_name, {})
+    if field_remappings:
+        decoded_message = json.loads(encoded_message)
+        for python_name, json_name in list(field_remappings.items()):
+            if json_name in decoded_message:
+                decoded_message[python_name] = decoded_message.pop(json_name)
+        encoded_message = json.dumps(decoded_message)
+    return encoded_message
+
+
+def _AsMessageList(msg):
+    """Convert the provided list-as-JsonValue to a list."""
+    # This really needs to live in extra_types, but extra_types needs
+    # to import this file to be able to register codecs.
+    # TODO(craigcitro): Split out a codecs module and fix this ugly
+    # import.
+    from apitools.base.py import extra_types
+
+    def _IsRepeatedJsonValue(msg):
+        """Return True if msg is a repeated value as a JsonValue."""
+        if isinstance(msg, extra_types.JsonArray):
+            return True
+        if isinstance(msg, extra_types.JsonValue) and msg.array_value:
+            return True
+        return False
+
+    if not _IsRepeatedJsonValue(msg):
+        raise ValueError('invalid argument to _AsMessageList')
+    if isinstance(msg, extra_types.JsonValue):
+        msg = msg.array_value
+    if isinstance(msg, extra_types.JsonArray):
+        msg = msg.entries
+    return msg
diff --git a/apitools/base/py/encoding_test.py b/apitools/base/py/encoding_test.py
new file mode 100644
index 0000000..cb6bfe5
--- /dev/null
+++ b/apitools/base/py/encoding_test.py
@@ -0,0 +1,504 @@
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import base64
+import datetime
+import json
+import sys
+
+import unittest2
+
+from apitools.base.protorpclite import message_types
+from apitools.base.protorpclite import messages
+from apitools.base.protorpclite import util
+from apitools.base.py import encoding
+from apitools.base.py import exceptions
+from apitools.base.py import extra_types
+
+
+class SimpleMessage(messages.Message):
+    field = messages.StringField(1)
+    repfield = messages.StringField(2, repeated=True)
+
+
+class BytesMessage(messages.Message):
+    field = messages.BytesField(1)
+    repfield = messages.BytesField(2, repeated=True)
+
+
+class TimeMessage(messages.Message):
+    timefield = message_types.DateTimeField(3)
+
+
+@encoding.MapUnrecognizedFields('additional_properties')
+class AdditionalPropertiesMessage(messages.Message):
+
+    class AdditionalProperty(messages.Message):
+        key = messages.StringField(1)
+        value = messages.StringField(2)
+
+    additional_properties = messages.MessageField(
+        'AdditionalProperty', 1, repeated=True)
+
+
+@encoding.MapUnrecognizedFields('additional_properties')
+class AdditionalIntPropertiesMessage(messages.Message):
+
+    class AdditionalProperty(messages.Message):
+        key = messages.StringField(1)
+        value = messages.IntegerField(2)
+
+    additional_properties = messages.MessageField(
+        'AdditionalProperty', 1, repeated=True)
+
+
+@encoding.MapUnrecognizedFields('additional_properties')
+class UnrecognizedEnumMessage(messages.Message):
+
+    class ThisEnum(messages.Enum):
+        VALUE_ONE = 1
+        VALUE_TWO = 2
+
+    class AdditionalProperty(messages.Message):
+        key = messages.StringField(1)
+        value = messages.EnumField('UnrecognizedEnumMessage.ThisEnum', 2)
+
+    additional_properties = messages.MessageField(
+        AdditionalProperty, 1, repeated=True)
+
+
+class CompoundPropertyType(messages.Message):
+    index = messages.IntegerField(1)
+    name = messages.StringField(2)
+
+
+class MessageWithEnum(messages.Message):
+
+    class ThisEnum(messages.Enum):
+        VALUE_ONE = 1
+        VALUE_TWO = 2
+
+    field_one = messages.EnumField(ThisEnum, 1)
+    field_two = messages.EnumField(ThisEnum, 2, default=ThisEnum.VALUE_TWO)
+    ignored_field = messages.EnumField(ThisEnum, 3)
+
+
+@encoding.MapUnrecognizedFields('additional_properties')
+class AdditionalMessagePropertiesMessage(messages.Message):
+
+    class AdditionalProperty(messages.Message):
+        key = messages.StringField(1)
+        value = messages.MessageField(CompoundPropertyType, 2)
+
+    additional_properties = messages.MessageField(
+        'AdditionalProperty', 1, repeated=True)
+
+
+class HasNestedMessage(messages.Message):
+    nested = messages.MessageField(AdditionalPropertiesMessage, 1)
+    nested_list = messages.StringField(2, repeated=True)
+
+
+class ExtraNestedMessage(messages.Message):
+    nested = messages.MessageField(HasNestedMessage, 1)
+
+
+class MessageWithRemappings(messages.Message):
+
+    class SomeEnum(messages.Enum):
+        enum_value = 1
+        second_value = 2
+
+    enum_field = messages.EnumField(SomeEnum, 1)
+    double_encoding = messages.EnumField(SomeEnum, 2)
+    another_field = messages.StringField(3)
+    repeated_enum = messages.EnumField(SomeEnum, 4, repeated=True)
+    repeated_field = messages.StringField(5, repeated=True)
+
+
+@encoding.MapUnrecognizedFields('additional_properties')
+class RepeatedJsonValueMessage(messages.Message):
+
+    class AdditionalProperty(messages.Message):
+        key = messages.StringField(1)
+        value = messages.MessageField(extra_types.JsonValue, 2, repeated=True)
+
+    additional_properties = messages.MessageField('AdditionalProperty', 1,
+                                                  repeated=True)
+
+
+encoding.AddCustomJsonEnumMapping(MessageWithRemappings.SomeEnum,
+                                  'enum_value', 'wire_name')
+encoding.AddCustomJsonFieldMapping(MessageWithRemappings,
+                                   'double_encoding', 'doubleEncoding')
+encoding.AddCustomJsonFieldMapping(MessageWithRemappings,
+                                   'another_field', 'anotherField')
+encoding.AddCustomJsonFieldMapping(MessageWithRemappings,
+                                   'repeated_field', 'repeatedField')
+
+
+class EncodingTest(unittest2.TestCase):
+
+    def testCopyProtoMessage(self):
+        msg = SimpleMessage(field='abc')
+        new_msg = encoding.CopyProtoMessage(msg)
+        self.assertEqual(msg.field, new_msg.field)
+        msg.field = 'def'
+        self.assertNotEqual(msg.field, new_msg.field)
+
+    def testBytesEncoding(self):
+        b64_str = 'AAc+'
+        b64_msg = '{"field": "%s"}' % b64_str
+        urlsafe_b64_str = 'AAc-'
+        urlsafe_b64_msg = '{"field": "%s"}' % urlsafe_b64_str
+        data = base64.b64decode(b64_str)
+        msg = BytesMessage(field=data)
+        self.assertEqual(
+            msg, encoding.JsonToMessage(BytesMessage, urlsafe_b64_msg))
+        self.assertEqual(msg, encoding.JsonToMessage(BytesMessage, b64_msg))
+        self.assertEqual(urlsafe_b64_msg, encoding.MessageToJson(msg))
+
+        enc_rep_msg = '{"repfield": ["%(b)s", "%(b)s"]}' % {
+            'b': urlsafe_b64_str}
+        rep_msg = BytesMessage(repfield=[data, data])
+        self.assertEqual(
+            rep_msg, encoding.JsonToMessage(BytesMessage, enc_rep_msg))
+        self.assertEqual(enc_rep_msg, encoding.MessageToJson(rep_msg))
+
+    def testIncludeFields(self):
+        msg = SimpleMessage()
+        self.assertEqual('{}', encoding.MessageToJson(msg))
+        self.assertEqual(
+            '{"field": null}',
+            encoding.MessageToJson(msg, include_fields=['field']))
+        self.assertEqual(
+            '{"repfield": []}',
+            encoding.MessageToJson(msg, include_fields=['repfield']))
+
+    def testNestedIncludeFields(self):
+        msg = HasNestedMessage(
+            nested=AdditionalPropertiesMessage(
+                additional_properties=[]))
+        self.assertEqual(
+            '{"nested": null}',
+            encoding.MessageToJson(msg, include_fields=['nested']))
+        self.assertEqual(
+            '{"nested": {"additional_properties": []}}',
+            encoding.MessageToJson(
+                msg, include_fields=['nested.additional_properties']))
+        msg = ExtraNestedMessage(nested=msg)
+        self.assertEqual(
+            '{"nested": {"nested": null}}',
+            encoding.MessageToJson(msg, include_fields=['nested.nested']))
+        # When clearing 'nested.nested_list', its sibling ('nested.nested')
+        # should remain unaffected.
+        self.assertIn(
+            encoding.MessageToJson(msg, include_fields=['nested.nested_list']),
+            ['{"nested": {"nested": {}, "nested_list": []}}',
+             '{"nested": {"nested_list": [], "nested": {}}}'])
+        self.assertEqual(
+            '{"nested": {"nested": {"additional_properties": []}}}',
+            encoding.MessageToJson(
+                msg, include_fields=['nested.nested.additional_properties']))
+
+    def testAdditionalPropertyMapping(self):
+        msg = AdditionalPropertiesMessage()
+        msg.additional_properties = [
+            AdditionalPropertiesMessage.AdditionalProperty(
+                key='key_one', value='value_one'),
+            AdditionalPropertiesMessage.AdditionalProperty(
+                key='key_two', value='value_two'),
+        ]
+
+        encoded_msg = encoding.MessageToJson(msg)
+        self.assertEqual(
+            {'key_one': 'value_one', 'key_two': 'value_two'},
+            json.loads(encoded_msg))
+
+        new_msg = encoding.JsonToMessage(type(msg), encoded_msg)
+        self.assertEqual(
+            set(('key_one', 'key_two')),
+            set([x.key for x in new_msg.additional_properties]))
+        self.assertIsNot(msg, new_msg)
+
+        new_msg.additional_properties.pop()
+        self.assertEqual(1, len(new_msg.additional_properties))
+        self.assertEqual(2, len(msg.additional_properties))
+
+    def testNumericPropertyName(self):
+        json_msg = '{"nested": {"123": "def"}}'
+        msg = encoding.JsonToMessage(HasNestedMessage, json_msg)
+        self.assertEqual(1, len(msg.nested.additional_properties))
+
+    def testNumericPropertyValue(self):
+        json_msg = '{"key_one": "123"}'
+        msg = encoding.JsonToMessage(AdditionalIntPropertiesMessage, json_msg)
+        self.assertEqual(
+            AdditionalIntPropertiesMessage(
+                additional_properties=[
+                    AdditionalIntPropertiesMessage.AdditionalProperty(
+                        key='key_one', value=123)]),
+            msg)
+
+    def testAdditionalMessageProperties(self):
+        json_msg = '{"input": {"index": 0, "name": "output"}}'
+        result = encoding.JsonToMessage(
+            AdditionalMessagePropertiesMessage, json_msg)
+        self.assertEqual(1, len(result.additional_properties))
+        self.assertEqual(0, result.additional_properties[0].value.index)
+
+    def testUnrecognizedEnum(self):
+        json_msg = '{"input": "VALUE_ONE"}'
+        result = encoding.JsonToMessage(
+            UnrecognizedEnumMessage, json_msg)
+        self.assertEqual(1, len(result.additional_properties))
+        self.assertEqual(UnrecognizedEnumMessage.ThisEnum.VALUE_ONE,
+                         result.additional_properties[0].value)
+
+    def testNestedFieldMapping(self):
+        nested_msg = AdditionalPropertiesMessage()
+        nested_msg.additional_properties = [
+            AdditionalPropertiesMessage.AdditionalProperty(
+                key='key_one', value='value_one'),
+            AdditionalPropertiesMessage.AdditionalProperty(
+                key='key_two', value='value_two'),
+        ]
+        msg = HasNestedMessage(nested=nested_msg)
+
+        encoded_msg = encoding.MessageToJson(msg)
+        self.assertEqual(
+            {'nested': {'key_one': 'value_one', 'key_two': 'value_two'}},
+            json.loads(encoded_msg))
+
+        new_msg = encoding.JsonToMessage(type(msg), encoded_msg)
+        self.assertEqual(
+            set(('key_one', 'key_two')),
+            set([x.key for x in new_msg.nested.additional_properties]))
+
+        new_msg.nested.additional_properties.pop()
+        self.assertEqual(1, len(new_msg.nested.additional_properties))
+        self.assertEqual(2, len(msg.nested.additional_properties))
+
+    def testValidEnums(self):
+        message_json = '{"field_one": "VALUE_ONE"}'
+        message = encoding.JsonToMessage(MessageWithEnum, message_json)
+        self.assertEqual(MessageWithEnum.ThisEnum.VALUE_ONE, message.field_one)
+        self.assertEqual(MessageWithEnum.ThisEnum.VALUE_TWO, message.field_two)
+        self.assertEqual(json.loads(message_json),
+                         json.loads(encoding.MessageToJson(message)))
+
+    def testIgnoredEnums(self):
+        json_with_typo = '{"field_one": "VALUE_OEN"}'
+        message = encoding.JsonToMessage(MessageWithEnum, json_with_typo)
+        self.assertEqual(None, message.field_one)
+        self.assertEqual(('VALUE_OEN', messages.Variant.ENUM),
+                         message.get_unrecognized_field_info('field_one'))
+        self.assertEqual(json.loads(json_with_typo),
+                         json.loads(encoding.MessageToJson(message)))
+
+        empty_json = ''
+        message = encoding.JsonToMessage(MessageWithEnum, empty_json)
+        self.assertEqual(None, message.field_one)
+
+    def testIgnoredEnumsWithDefaults(self):
+        json_with_typo = '{"field_two": "VALUE_OEN"}'
+        message = encoding.JsonToMessage(MessageWithEnum, json_with_typo)
+        self.assertEqual(MessageWithEnum.ThisEnum.VALUE_TWO, message.field_two)
+        self.assertEqual(json.loads(json_with_typo),
+                         json.loads(encoding.MessageToJson(message)))
+
+    def testUnknownNestedRoundtrip(self):
+        json_message = '{"field": "abc", "submessage": {"a": 1, "b": "foo"}}'
+        message = encoding.JsonToMessage(SimpleMessage, json_message)
+        self.assertEqual(json.loads(json_message),
+                         json.loads(encoding.MessageToJson(message)))
+
+    def testJsonDatetime(self):
+        msg = TimeMessage(timefield=datetime.datetime(
+            2014, 7, 2, 23, 33, 25, 541000,
+            tzinfo=util.TimeZoneOffset(datetime.timedelta(0))))
+        self.assertEqual(
+            '{"timefield": "2014-07-02T23:33:25.541000+00:00"}',
+            encoding.MessageToJson(msg))
+
+    def testEnumRemapping(self):
+        msg = MessageWithRemappings(
+            enum_field=MessageWithRemappings.SomeEnum.enum_value)
+        json_message = encoding.MessageToJson(msg)
+        self.assertEqual('{"enum_field": "wire_name"}', json_message)
+        self.assertEqual(
+            msg, encoding.JsonToMessage(MessageWithRemappings, json_message))
+
+    def testRepeatedEnumRemapping(self):
+        msg = MessageWithRemappings(
+            repeated_enum=[
+                MessageWithRemappings.SomeEnum.enum_value,
+                MessageWithRemappings.SomeEnum.second_value,
+            ])
+        json_message = encoding.MessageToJson(msg)
+        self.assertEqual('{"repeated_enum": ["wire_name", "second_value"]}',
+                         json_message)
+        self.assertEqual(
+            msg, encoding.JsonToMessage(MessageWithRemappings, json_message))
+
+    def testFieldRemapping(self):
+        msg = MessageWithRemappings(another_field='abc')
+        json_message = encoding.MessageToJson(msg)
+        self.assertEqual('{"anotherField": "abc"}', json_message)
+        self.assertEqual(
+            msg, encoding.JsonToMessage(MessageWithRemappings, json_message))
+
+    def testRepeatedFieldRemapping(self):
+        msg = MessageWithRemappings(repeated_field=['abc', 'def'])
+        json_message = encoding.MessageToJson(msg)
+        self.assertEqual('{"repeatedField": ["abc", "def"]}', json_message)
+        self.assertEqual(
+            msg, encoding.JsonToMessage(MessageWithRemappings, json_message))
+
+    def testMultipleRemapping(self):
+        msg = MessageWithRemappings(
+            double_encoding=MessageWithRemappings.SomeEnum.enum_value)
+        json_message = encoding.MessageToJson(msg)
+        self.assertEqual('{"doubleEncoding": "wire_name"}', json_message)
+        self.assertEqual(
+            msg, encoding.JsonToMessage(MessageWithRemappings, json_message))
+
+    def testRepeatedRemapping(self):
+        # Should allow remapping if the mapping remains the same.
+        encoding.AddCustomJsonEnumMapping(MessageWithRemappings.SomeEnum,
+                                          'enum_value', 'wire_name')
+        encoding.AddCustomJsonFieldMapping(MessageWithRemappings,
+                                           'double_encoding', 'doubleEncoding')
+        encoding.AddCustomJsonFieldMapping(MessageWithRemappings,
+                                           'another_field', 'anotherField')
+        encoding.AddCustomJsonFieldMapping(MessageWithRemappings,
+                                           'repeated_field', 'repeatedField')
+
+        # Should raise errors if the remapping changes the mapping.
+        self.assertRaises(
+            exceptions.InvalidDataError,
+            encoding.AddCustomJsonFieldMapping,
+            MessageWithRemappings, 'double_encoding', 'something_else')
+        self.assertRaises(
+            exceptions.InvalidDataError,
+            encoding.AddCustomJsonFieldMapping,
+            MessageWithRemappings, 'enum_field', 'anotherField')
+        self.assertRaises(
+            exceptions.InvalidDataError,
+            encoding.AddCustomJsonEnumMapping,
+            MessageWithRemappings.SomeEnum, 'enum_value', 'another_name')
+        self.assertRaises(
+            exceptions.InvalidDataError,
+            encoding.AddCustomJsonEnumMapping,
+            MessageWithRemappings.SomeEnum, 'second_value', 'wire_name')
+
+    def testMessageToRepr(self):
+        # Using the same string returned by MessageToRepr, with the
+        # module names fixed.
+        # pylint: disable=bad-whitespace
+        msg = SimpleMessage(field='field', repfield=['field', 'field', ],)
+        # pylint: enable=bad-whitespace
+        self.assertEqual(
+            encoding.MessageToRepr(msg),
+            r"%s.SimpleMessage(field='field',repfield=['field','field',],)" % (
+                __name__,))
+        self.assertEqual(
+            encoding.MessageToRepr(msg, no_modules=True),
+            r"SimpleMessage(field='field',repfield=['field','field',],)")
+
+    def testMessageToReprWithTime(self):
+        msg = TimeMessage(timefield=datetime.datetime(
+            2014, 7, 2, 23, 33, 25, 541000,
+            tzinfo=util.TimeZoneOffset(datetime.timedelta(0))))
+        self.assertEqual(
+            encoding.MessageToRepr(msg, multiline=True),
+            ('%s.TimeMessage(\n    '
+             'timefield=datetime.datetime(2014, 7, 2, 23, 33, 25, 541000, '
+             'tzinfo=apitools.base.protorpclite.util.TimeZoneOffset('
+             'datetime.timedelta(0))),\n)') % __name__)
+        self.assertEqual(
+            encoding.MessageToRepr(msg, multiline=True, no_modules=True),
+            'TimeMessage(\n    '
+            'timefield=datetime.datetime(2014, 7, 2, 23, 33, 25, 541000, '
+            'tzinfo=TimeZoneOffset(datetime.timedelta(0))),\n)')
+
+    def testPackageMappingsNoPackage(self):
+        this_module_name = util.get_package_for_module(__name__)
+        full_type_name = 'MessageWithEnum.ThisEnum'
+        full_key = '%s.%s' % (this_module_name, full_type_name)
+        self.assertEqual(full_key,
+                         encoding._GetTypeKey(MessageWithEnum.ThisEnum, ''))
+
+    def testPackageMappingsWithPackage(self):
+        this_module_name = util.get_package_for_module(__name__)
+        full_type_name = 'MessageWithEnum.ThisEnum'
+        full_key = '%s.%s' % (this_module_name, full_type_name)
+        this_module = sys.modules[__name__]
+        new_package = 'new_package'
+        try:
+            setattr(this_module, 'package', new_package)
+            new_key = '%s.%s' % (new_package, full_type_name)
+            self.assertEqual(
+                new_key,
+                encoding._GetTypeKey(MessageWithEnum.ThisEnum, ''))
+            self.assertEqual(
+                full_key,
+                encoding._GetTypeKey(MessageWithEnum.ThisEnum, new_package))
+        finally:
+            delattr(this_module, 'package')
+
+    def testRepeatedJsonValuesAsRepeatedProperty(self):
+        encoded_msg = '{"a": [{"one": 1}]}'
+        msg = encoding.JsonToMessage(RepeatedJsonValueMessage, encoded_msg)
+        self.assertEqual(encoded_msg, encoding.MessageToJson(msg))
+
+    def testDictToProtoMap(self):
+        dict_ = {'key': 'value'}
+
+        encoded_msg = encoding.DictToProtoMap(dict_,
+                                              AdditionalPropertiesMessage)
+        expected_msg = AdditionalPropertiesMessage()
+        expected_msg.additional_properties = [
+            AdditionalPropertiesMessage.AdditionalProperty(
+                key='key', value='value')
+        ]
+        self.assertEqual(encoded_msg, expected_msg)
+
+    def testDictToProtoMapSorted(self):
+        tuples = [('key{0:02}'.format(i), 'value') for i in range(100)]
+        dict_ = dict(tuples)
+
+        encoded_msg = encoding.DictToProtoMap(dict_,
+                                              AdditionalPropertiesMessage,
+                                              sort_items=True)
+        expected_msg = AdditionalPropertiesMessage()
+        expected_msg.additional_properties = [
+            AdditionalPropertiesMessage.AdditionalProperty(
+                key=key, value=value)
+            for key, value in tuples
+        ]
+        self.assertEqual(encoded_msg, expected_msg)
+
+    def testDictToProtoMapNumeric(self):
+        dict_ = {'key': 1}
+
+        encoded_msg = encoding.DictToProtoMap(dict_,
+                                              AdditionalIntPropertiesMessage)
+        expected_msg = AdditionalIntPropertiesMessage()
+        expected_msg.additional_properties = [
+            AdditionalIntPropertiesMessage.AdditionalProperty(
+                key='key', value=1)
+        ]
+        self.assertEqual(encoded_msg, expected_msg)
diff --git a/apitools/base/py/exceptions.py b/apitools/base/py/exceptions.py
new file mode 100644
index 0000000..e63b893
--- /dev/null
+++ b/apitools/base/py/exceptions.py
@@ -0,0 +1,168 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Exceptions for generated client libraries."""
+
+
+class Error(Exception):
+
+    """Base class for all exceptions."""
+
+
+class TypecheckError(Error, TypeError):
+
+    """An object of an incorrect type is provided."""
+
+
+class NotFoundError(Error):
+
+    """A specified resource could not be found."""
+
+
+class UserError(Error):
+
+    """Base class for errors related to user input."""
+
+
+class InvalidDataError(Error):
+
+    """Base class for any invalid data error."""
+
+
+class CommunicationError(Error):
+
+    """Any communication error talking to an API server."""
+
+
+class HttpError(CommunicationError):
+
+    """Error making a request. Soon to be HttpError."""
+
+    def __init__(self, response, content, url,
+                 method_config=None, request=None):
+        super(HttpError, self).__init__()
+        self.response = response
+        self.content = content
+        self.url = url
+        self.method_config = method_config
+        self.request = request
+
+    def __str__(self):
+        content = self.content
+        if isinstance(content, bytes):
+            content = self.content.decode('ascii', 'replace')
+        return 'HttpError accessing <%s>: response: <%s>, content <%s>' % (
+            self.url, self.response, content)
+
+    @property
+    def status_code(self):
+        # TODO(craigcitro): Turn this into something better than a
+        # KeyError if there is no status.
+        return int(self.response['status'])
+
+    @classmethod
+    def FromResponse(cls, http_response):
+        return cls(http_response.info, http_response.content,
+                   http_response.request_url)
+
+
+class InvalidUserInputError(InvalidDataError):
+
+    """User-provided input is invalid."""
+
+
+class InvalidDataFromServerError(InvalidDataError, CommunicationError):
+
+    """Data received from the server is malformed."""
+
+
+class BatchError(Error):
+
+    """Error generated while constructing a batch request."""
+
+
+class ConfigurationError(Error):
+
+    """Base class for configuration errors."""
+
+
+class GeneratedClientError(Error):
+
+    """The generated client configuration is invalid."""
+
+
+class ConfigurationValueError(UserError):
+
+    """Some part of the user-specified client configuration is invalid."""
+
+
+class ResourceUnavailableError(Error):
+
+    """User requested an unavailable resource."""
+
+
+class CredentialsError(Error):
+
+    """Errors related to invalid credentials."""
+
+
+class TransferError(CommunicationError):
+
+    """Errors related to transfers."""
+
+
+class TransferRetryError(TransferError):
+
+    """Retryable errors related to transfers."""
+
+
+class TransferInvalidError(TransferError):
+
+    """The given transfer is invalid."""
+
+
+class RequestError(CommunicationError):
+
+    """The request was not successful."""
+
+
+class RetryAfterError(HttpError):
+
+    """The response contained a retry-after header."""
+
+    def __init__(self, response, content, url, retry_after):
+        super(RetryAfterError, self).__init__(response, content, url)
+        self.retry_after = int(retry_after)
+
+    @classmethod
+    def FromResponse(cls, http_response):
+        return cls(http_response.info, http_response.content,
+                   http_response.request_url, http_response.retry_after)
+
+
+class BadStatusCodeError(HttpError):
+
+    """The request completed but returned a bad status code."""
+
+
+class NotYetImplementedError(GeneratedClientError):
+
+    """This functionality is not yet implemented."""
+
+
+class StreamExhausted(Error):
+
+    """Attempted to read more bytes from a stream than were available."""
diff --git a/apitools/base/py/extra_types.py b/apitools/base/py/extra_types.py
new file mode 100644
index 0000000..79a4900
--- /dev/null
+++ b/apitools/base/py/extra_types.py
@@ -0,0 +1,306 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Extra types understood by apitools."""
+
+import collections
+import datetime
+import json
+import numbers
+
+import six
+
+from apitools.base.protorpclite import message_types
+from apitools.base.protorpclite import messages
+from apitools.base.protorpclite import protojson
+from apitools.base.py import encoding
+from apitools.base.py import exceptions
+from apitools.base.py import util
+
+__all__ = [
+    'DateField',
+    'DateTimeMessage',
+    'JsonArray',
+    'JsonObject',
+    'JsonValue',
+    'JsonProtoEncoder',
+    'JsonProtoDecoder',
+]
+
+# pylint:disable=invalid-name
+DateTimeMessage = message_types.DateTimeMessage
+# pylint:enable=invalid-name
+
+
+# We insert our own metaclass here to avoid letting ProtoRPC
+# register this as the default field type for strings.
+#  * since ProtoRPC does this via metaclasses, we don't have any
+#    choice but to use one ourselves
+#  * since a subclass's metaclass must inherit from its superclass's
+#    metaclass, we're forced to have this hard-to-read inheritance.
+#
+# pylint: disable=protected-access
+class _FieldMeta(messages._FieldMeta):
+
+    def __init__(cls, name, bases, dct):  # pylint: disable=no-self-argument
+        # pylint: disable=super-init-not-called,non-parent-init-called
+        type.__init__(cls, name, bases, dct)
+# pylint: enable=protected-access
+
+
+class DateField(six.with_metaclass(_FieldMeta, messages.Field)):
+
+    """Field definition for Date values."""
+
+    VARIANTS = frozenset([messages.Variant.STRING])
+    DEFAULT_VARIANT = messages.Variant.STRING
+    type = datetime.date
+
+
+def _ValidateJsonValue(json_value):
+    entries = [(f, json_value.get_assigned_value(f.name))
+               for f in json_value.all_fields()]
+    assigned_entries = [(f, value)
+                        for f, value in entries if value is not None]
+    if len(assigned_entries) != 1:
+        raise exceptions.InvalidDataError(
+            'Malformed JsonValue: %s' % json_value)
+
+
+def _JsonValueToPythonValue(json_value):
+    """Convert the given JsonValue to a json string."""
+    util.Typecheck(json_value, JsonValue)
+    _ValidateJsonValue(json_value)
+    if json_value.is_null:
+        return None
+    entries = [(f, json_value.get_assigned_value(f.name))
+               for f in json_value.all_fields()]
+    assigned_entries = [(f, value)
+                        for f, value in entries if value is not None]
+    field, value = assigned_entries[0]
+    if not isinstance(field, messages.MessageField):
+        return value
+    elif field.message_type is JsonObject:
+        return _JsonObjectToPythonValue(value)
+    elif field.message_type is JsonArray:
+        return _JsonArrayToPythonValue(value)
+
+
+def _JsonObjectToPythonValue(json_value):
+    util.Typecheck(json_value, JsonObject)
+    return dict([(prop.key, _JsonValueToPythonValue(prop.value)) for prop
+                 in json_value.properties])
+
+
+def _JsonArrayToPythonValue(json_value):
+    util.Typecheck(json_value, JsonArray)
+    return [_JsonValueToPythonValue(e) for e in json_value.entries]
+
+
+_MAXINT64 = 2 << 63 - 1
+_MININT64 = -(2 << 63)
+
+
+def _PythonValueToJsonValue(py_value):
+    """Convert the given python value to a JsonValue."""
+    if py_value is None:
+        return JsonValue(is_null=True)
+    if isinstance(py_value, bool):
+        return JsonValue(boolean_value=py_value)
+    if isinstance(py_value, six.string_types):
+        return JsonValue(string_value=py_value)
+    if isinstance(py_value, numbers.Number):
+        if isinstance(py_value, six.integer_types):
+            if _MININT64 < py_value < _MAXINT64:
+                return JsonValue(integer_value=py_value)
+        return JsonValue(double_value=float(py_value))
+    if isinstance(py_value, dict):
+        return JsonValue(object_value=_PythonValueToJsonObject(py_value))
+    if isinstance(py_value, collections.Iterable):
+        return JsonValue(array_value=_PythonValueToJsonArray(py_value))
+    raise exceptions.InvalidDataError(
+        'Cannot convert "%s" to JsonValue' % py_value)
+
+
+def _PythonValueToJsonObject(py_value):
+    util.Typecheck(py_value, dict)
+    return JsonObject(
+        properties=[
+            JsonObject.Property(key=key, value=_PythonValueToJsonValue(value))
+            for key, value in py_value.items()])
+
+
+def _PythonValueToJsonArray(py_value):
+    return JsonArray(entries=list(map(_PythonValueToJsonValue, py_value)))
+
+
+class JsonValue(messages.Message):
+
+    """Any valid JSON value."""
+    # Is this JSON object `null`?
+    is_null = messages.BooleanField(1, default=False)
+
+    # Exactly one of the following is provided if is_null is False; none
+    # should be provided if is_null is True.
+    boolean_value = messages.BooleanField(2)
+    string_value = messages.StringField(3)
+    # We keep two numeric fields to keep int64 round-trips exact.
+    double_value = messages.FloatField(4, variant=messages.Variant.DOUBLE)
+    integer_value = messages.IntegerField(5, variant=messages.Variant.INT64)
+    # Compound types
+    object_value = messages.MessageField('JsonObject', 6)
+    array_value = messages.MessageField('JsonArray', 7)
+
+
+class JsonObject(messages.Message):
+
+    """A JSON object value.
+
+    Messages:
+      Property: A property of a JsonObject.
+
+    Fields:
+      properties: A list of properties of a JsonObject.
+    """
+
+    class Property(messages.Message):
+
+        """A property of a JSON object.
+
+        Fields:
+          key: Name of the property.
+          value: A JsonValue attribute.
+        """
+        key = messages.StringField(1)
+        value = messages.MessageField(JsonValue, 2)
+
+    properties = messages.MessageField(Property, 1, repeated=True)
+
+
+class JsonArray(messages.Message):
+
+    """A JSON array value."""
+    entries = messages.MessageField(JsonValue, 1, repeated=True)
+
+
+_JSON_PROTO_TO_PYTHON_MAP = {
+    JsonArray: _JsonArrayToPythonValue,
+    JsonObject: _JsonObjectToPythonValue,
+    JsonValue: _JsonValueToPythonValue,
+}
+_JSON_PROTO_TYPES = tuple(_JSON_PROTO_TO_PYTHON_MAP.keys())
+
+
+def _JsonProtoToPythonValue(json_proto):
+    util.Typecheck(json_proto, _JSON_PROTO_TYPES)
+    return _JSON_PROTO_TO_PYTHON_MAP[type(json_proto)](json_proto)
+
+
+def _PythonValueToJsonProto(py_value):
+    if isinstance(py_value, dict):
+        return _PythonValueToJsonObject(py_value)
+    if (isinstance(py_value, collections.Iterable) and
+            not isinstance(py_value, six.string_types)):
+        return _PythonValueToJsonArray(py_value)
+    return _PythonValueToJsonValue(py_value)
+
+
+def _JsonProtoToJson(json_proto, unused_encoder=None):
+    return json.dumps(_JsonProtoToPythonValue(json_proto))
+
+
+def _JsonToJsonProto(json_data, unused_decoder=None):
+    return _PythonValueToJsonProto(json.loads(json_data))
+
+
+def _JsonToJsonValue(json_data, unused_decoder=None):
+    result = _PythonValueToJsonProto(json.loads(json_data))
+    if isinstance(result, JsonValue):
+        return result
+    elif isinstance(result, JsonObject):
+        return JsonValue(object_value=result)
+    elif isinstance(result, JsonArray):
+        return JsonValue(array_value=result)
+    else:
+        raise exceptions.InvalidDataError(
+            'Malformed JsonValue: %s' % json_data)
+
+
+# pylint:disable=invalid-name
+JsonProtoEncoder = _JsonProtoToJson
+JsonProtoDecoder = _JsonToJsonProto
+# pylint:enable=invalid-name
+encoding.RegisterCustomMessageCodec(
+    encoder=JsonProtoEncoder, decoder=_JsonToJsonValue)(JsonValue)
+encoding.RegisterCustomMessageCodec(
+    encoder=JsonProtoEncoder, decoder=JsonProtoDecoder)(JsonObject)
+encoding.RegisterCustomMessageCodec(
+    encoder=JsonProtoEncoder, decoder=JsonProtoDecoder)(JsonArray)
+
+
+def _EncodeDateTimeField(field, value):
+    result = protojson.ProtoJson().encode_field(field, value)
+    return encoding.CodecResult(value=result, complete=True)
+
+
+def _DecodeDateTimeField(unused_field, value):
+    result = protojson.ProtoJson().decode_field(
+        message_types.DateTimeField(1), value)
+    return encoding.CodecResult(value=result, complete=True)
+
+
+encoding.RegisterFieldTypeCodec(_EncodeDateTimeField, _DecodeDateTimeField)(
+    message_types.DateTimeField)
+
+
+def _EncodeInt64Field(field, value):
+    """Handle the special case of int64 as a string."""
+    capabilities = [
+        messages.Variant.INT64,
+        messages.Variant.UINT64,
+    ]
+    if field.variant not in capabilities:
+        return encoding.CodecResult(value=value, complete=False)
+
+    if field.repeated:
+        result = [str(x) for x in value]
+    else:
+        result = str(value)
+    return encoding.CodecResult(value=result, complete=True)
+
+
+def _DecodeInt64Field(unused_field, value):
+    # Don't need to do anything special, they're decoded just fine
+    return encoding.CodecResult(value=value, complete=False)
+
+encoding.RegisterFieldTypeCodec(_EncodeInt64Field, _DecodeInt64Field)(
+    messages.IntegerField)
+
+
+def _EncodeDateField(field, value):
+    """Encoder for datetime.date objects."""
+    if field.repeated:
+        result = [d.isoformat() for d in value]
+    else:
+        result = value.isoformat()
+    return encoding.CodecResult(value=result, complete=True)
+
+
+def _DecodeDateField(unused_field, value):
+    date = datetime.datetime.strptime(value, '%Y-%m-%d').date()
+    return encoding.CodecResult(value=date, complete=True)
+
+encoding.RegisterFieldTypeCodec(_EncodeDateField, _DecodeDateField)(DateField)
diff --git a/apitools/base/py/extra_types_test.py b/apitools/base/py/extra_types_test.py
new file mode 100644
index 0000000..7e37f7c
--- /dev/null
+++ b/apitools/base/py/extra_types_test.py
@@ -0,0 +1,201 @@
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import datetime
+import json
+import math
+
+import unittest2
+
+from apitools.base.protorpclite import messages
+from apitools.base.py import encoding
+from apitools.base.py import exceptions
+from apitools.base.py import extra_types
+
+
+class ExtraTypesTest(unittest2.TestCase):
+
+    def assertRoundTrip(self, value):
+        if isinstance(value, extra_types._JSON_PROTO_TYPES):
+            self.assertEqual(
+                value,
+                extra_types._PythonValueToJsonProto(
+                    extra_types._JsonProtoToPythonValue(value)))
+        else:
+            self.assertEqual(
+                value,
+                extra_types._JsonProtoToPythonValue(
+                    extra_types._PythonValueToJsonProto(value)))
+
+    def assertTranslations(self, py_value, json_proto):
+        self.assertEqual(
+            py_value, extra_types._JsonProtoToPythonValue(json_proto))
+        self.assertEqual(
+            json_proto, extra_types._PythonValueToJsonProto(py_value))
+
+    def testInvalidProtos(self):
+        with self.assertRaises(exceptions.InvalidDataError):
+            extra_types._ValidateJsonValue(extra_types.JsonValue())
+        with self.assertRaises(exceptions.InvalidDataError):
+            extra_types._ValidateJsonValue(
+                extra_types.JsonValue(is_null=True, string_value='a'))
+        with self.assertRaises(exceptions.InvalidDataError):
+            extra_types._ValidateJsonValue(
+                extra_types.JsonValue(integer_value=3, string_value='a'))
+
+    def testNullEncoding(self):
+        self.assertTranslations(None, extra_types.JsonValue(is_null=True))
+
+    def testJsonNumberEncoding(self):
+        seventeen = extra_types.JsonValue(integer_value=17)
+        self.assertRoundTrip(17)
+        self.assertRoundTrip(seventeen)
+        self.assertTranslations(17, seventeen)
+
+        json_pi = extra_types.JsonValue(double_value=math.pi)
+        self.assertRoundTrip(math.pi)
+        self.assertRoundTrip(json_pi)
+        self.assertTranslations(math.pi, json_pi)
+
+    def testArrayEncoding(self):
+        array = [3, 'four', False]
+        json_array = extra_types.JsonArray(entries=[
+            extra_types.JsonValue(integer_value=3),
+            extra_types.JsonValue(string_value='four'),
+            extra_types.JsonValue(boolean_value=False),
+        ])
+        self.assertRoundTrip(array)
+        self.assertRoundTrip(json_array)
+        self.assertTranslations(array, json_array)
+
+    def testArrayAsValue(self):
+        array_json = '[3, "four", false]'
+        array = [3, 'four', False]
+        value = encoding.JsonToMessage(extra_types.JsonValue, array_json)
+        self.assertTrue(isinstance(value, extra_types.JsonValue))
+        self.assertEqual(array, encoding.MessageToPyValue(value))
+
+    def testObjectAsValue(self):
+        obj_json = '{"works": true}'
+        obj = {'works': True}
+        value = encoding.JsonToMessage(extra_types.JsonValue, obj_json)
+        self.assertTrue(isinstance(value, extra_types.JsonValue))
+        self.assertEqual(obj, encoding.MessageToPyValue(value))
+
+    def testDictEncoding(self):
+        d = {'a': 6, 'b': 'eleventeen'}
+        json_d = extra_types.JsonObject(properties=[
+            extra_types.JsonObject.Property(
+                key='a', value=extra_types.JsonValue(integer_value=6)),
+            extra_types.JsonObject.Property(
+                key='b',
+                value=extra_types.JsonValue(string_value='eleventeen')),
+        ])
+        self.assertRoundTrip(d)
+        # We don't know json_d will round-trip, because of randomness in
+        # python dictionary iteration ordering. We also need to force
+        # comparison as lists, since hashing protos isn't helpful.
+        translated_properties = extra_types._PythonValueToJsonProto(
+            d).properties
+        for p in json_d.properties:
+            self.assertIn(p, translated_properties)
+        for p in translated_properties:
+            self.assertIn(p, json_d.properties)
+
+    def testJsonObjectPropertyTranslation(self):
+        value = extra_types.JsonValue(string_value='abc')
+        obj = extra_types.JsonObject(properties=[
+            extra_types.JsonObject.Property(key='attr_name', value=value)])
+        json_value = '"abc"'
+        json_obj = '{"attr_name": "abc"}'
+
+        self.assertRoundTrip(value)
+        self.assertRoundTrip(obj)
+        self.assertRoundTrip(json_value)
+        self.assertRoundTrip(json_obj)
+
+        self.assertEqual(json_value, encoding.MessageToJson(value))
+        self.assertEqual(json_obj, encoding.MessageToJson(obj))
+
+    def testJsonValueAsFieldTranslation(self):
+        class HasJsonValueMsg(messages.Message):
+            some_value = messages.MessageField(extra_types.JsonValue, 1)
+
+        msg_json = '{"some_value": [1, 2, 3]}'
+        msg = HasJsonValueMsg(
+            some_value=encoding.PyValueToMessage(
+                extra_types.JsonValue, [1, 2, 3]))
+        self.assertEqual(msg,
+                         encoding.JsonToMessage(HasJsonValueMsg, msg_json))
+        self.assertEqual(msg_json, encoding.MessageToJson(msg))
+
+    def testDateField(self):
+
+        class DateMsg(messages.Message):
+            start_date = extra_types.DateField(1)
+            all_dates = extra_types.DateField(2, repeated=True)
+
+        msg = DateMsg(
+            start_date=datetime.date(1752, 9, 9), all_dates=[
+                datetime.date(1979, 5, 6),
+                datetime.date(1980, 10, 24),
+                datetime.date(1981, 1, 19),
+            ])
+        msg_dict = {
+            'start_date': '1752-09-09',
+            'all_dates': ['1979-05-06', '1980-10-24', '1981-01-19'],
+        }
+        self.assertEqual(msg_dict, json.loads(encoding.MessageToJson(msg)))
+        self.assertEqual(
+            msg, encoding.JsonToMessage(DateMsg, json.dumps(msg_dict)))
+
+    def testInt64(self):
+        # Testing roundtrip of type 'long'
+
+        class DogeMsg(messages.Message):
+            such_string = messages.StringField(1)
+            wow = messages.IntegerField(2, variant=messages.Variant.INT64)
+            very_unsigned = messages.IntegerField(
+                3, variant=messages.Variant.UINT64)
+            much_repeated = messages.IntegerField(
+                4, variant=messages.Variant.INT64, repeated=True)
+
+        def MtoJ(msg):
+            return encoding.MessageToJson(msg)
+
+        def JtoM(class_type, json_str):
+            return encoding.JsonToMessage(class_type, json_str)
+
+        def DoRoundtrip(class_type, json_msg=None, message=None, times=4):
+            if json_msg:
+                json_msg = MtoJ(JtoM(class_type, json_msg))
+            if message:
+                message = JtoM(class_type, MtoJ(message))
+            if times == 0:
+                result = json_msg if json_msg else message
+                return result
+            return DoRoundtrip(class_type=class_type, json_msg=json_msg,
+                               message=message, times=times - 1)
+
+        # Single
+        json_msg = ('{"such_string": "poot", "wow": "-1234", '
+                    '"very_unsigned": "999", "much_repeated": ["123", "456"]}')
+        out_json = MtoJ(JtoM(DogeMsg, json_msg))
+        self.assertEqual(json.loads(out_json)['wow'], '-1234')
+
+        # Repeated test case
+        msg = DogeMsg(such_string='wow', wow=-1234,
+                      very_unsigned=800, much_repeated=[123, 456])
+        self.assertEqual(msg, DoRoundtrip(DogeMsg, message=msg))
diff --git a/apitools/base/py/http_wrapper.py b/apitools/base/py/http_wrapper.py
new file mode 100644
index 0000000..7baf09f
--- /dev/null
+++ b/apitools/base/py/http_wrapper.py
@@ -0,0 +1,413 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""HTTP wrapper for apitools.
+
+This library wraps the underlying http library we use, which is
+currently httplib2.
+"""
+
+import collections
+import contextlib
+import logging
+import socket
+import time
+
+import httplib2
+import oauth2client
+import six
+from six.moves import http_client
+from six.moves.urllib import parse
+
+from apitools.base.py import exceptions
+from apitools.base.py import util
+
+__all__ = [
+    'CheckResponse',
+    'GetHttp',
+    'HandleExceptionsAndRebuildHttpConnections',
+    'MakeRequest',
+    'RebuildHttpConnections',
+    'Request',
+    'Response',
+    'RethrowExceptionHandler',
+]
+
+
+# 308 and 429 don't have names in httplib.
+RESUME_INCOMPLETE = 308
+TOO_MANY_REQUESTS = 429
+_REDIRECT_STATUS_CODES = (
+    http_client.MOVED_PERMANENTLY,
+    http_client.FOUND,
+    http_client.SEE_OTHER,
+    http_client.TEMPORARY_REDIRECT,
+    RESUME_INCOMPLETE,
+)
+
+# http: An httplib2.Http instance.
+# http_request: A http_wrapper.Request.
+# exc: Exception being raised.
+# num_retries: Number of retries consumed; used for exponential backoff.
+ExceptionRetryArgs = collections.namedtuple(
+    'ExceptionRetryArgs', ['http', 'http_request', 'exc', 'num_retries',
+                           'max_retry_wait', 'total_wait_sec'])
+
+
+@contextlib.contextmanager
+def _Httplib2Debuglevel(http_request, level, http=None):
+    """Temporarily change the value of httplib2.debuglevel, if necessary.
+
+    If http_request has a `loggable_body` distinct from `body`, then we
+    need to prevent httplib2 from logging the full body. This sets
+    httplib2.debuglevel for the duration of the `with` block; however,
+    that alone won't change the value of existing HTTP connections. If
+    an httplib2.Http object is provided, we'll also change the level on
+    any cached connections attached to it.
+
+    Args:
+      http_request: a Request we're logging.
+      level: (int) the debuglevel for logging.
+      http: (optional) an httplib2.Http whose connections we should
+        set the debuglevel on.
+
+    Yields:
+      None.
+    """
+    if http_request.loggable_body is None:
+        yield
+        return
+    old_level = httplib2.debuglevel
+    http_levels = {}
+    httplib2.debuglevel = level
+    if http is not None:
+        for connection_key, connection in http.connections.items():
+            # httplib2 stores two kinds of values in this dict, connection
+            # classes and instances. Since the connection types are all
+            # old-style classes, we can't easily distinguish by connection
+            # type -- so instead we use the key pattern.
+            if ':' not in connection_key:
+                continue
+            http_levels[connection_key] = connection.debuglevel
+            connection.set_debuglevel(level)
+    yield
+    httplib2.debuglevel = old_level
+    if http is not None:
+        for connection_key, old_level in http_levels.items():
+            if connection_key in http.connections:
+                http.connections[connection_key].set_debuglevel(old_level)
+
+
+class Request(object):
+
+    """Class encapsulating the data for an HTTP request."""
+
+    def __init__(self, url='', http_method='GET', headers=None, body=''):
+        self.url = url
+        self.http_method = http_method
+        self.headers = headers or {}
+        self.__body = None
+        self.__loggable_body = None
+        self.body = body
+
+    @property
+    def loggable_body(self):
+        return self.__loggable_body
+
+    @loggable_body.setter
+    def loggable_body(self, value):
+        if self.body is None:
+            raise exceptions.RequestError(
+                'Cannot set loggable body on request with no body')
+        self.__loggable_body = value
+
+    @property
+    def body(self):
+        return self.__body
+
+    @body.setter
+    def body(self, value):
+        """Sets the request body; handles logging and length measurement."""
+        self.__body = value
+        if value is not None:
+            # Avoid calling len() which cannot exceed 4GiB in 32-bit python.
+            body_length = getattr(
+                self.__body, 'length', None) or len(self.__body)
+            self.headers['content-length'] = str(body_length)
+        else:
+            self.headers.pop('content-length', None)
+        # This line ensures we don't try to print large requests.
+        if not isinstance(value, (type(None), six.string_types)):
+            self.loggable_body = '<media body>'
+
+
+# Note: currently the order of fields here is important, since we want
+# to be able to pass in the result from httplib2.request.
+class Response(collections.namedtuple(
+        'HttpResponse', ['info', 'content', 'request_url'])):
+
+    """Class encapsulating data for an HTTP response."""
+    __slots__ = ()
+
+    def __len__(self):
+        return self.length
+
+    @property
+    def length(self):
+        """Return the length of this response.
+
+        We expose this as an attribute since using len() directly can fail
+        for responses larger than sys.maxint.
+
+        Returns:
+          Response length (as int or long)
+        """
+        def ProcessContentRange(content_range):
+            _, _, range_spec = content_range.partition(' ')
+            byte_range, _, _ = range_spec.partition('/')
+            start, _, end = byte_range.partition('-')
+            return int(end) - int(start) + 1
+
+        if '-content-encoding' in self.info and 'content-range' in self.info:
+            # httplib2 rewrites content-length in the case of a compressed
+            # transfer; we can't trust the content-length header in that
+            # case, but we *can* trust content-range, if it's present.
+            return ProcessContentRange(self.info['content-range'])
+        elif 'content-length' in self.info:
+            return int(self.info.get('content-length'))
+        elif 'content-range' in self.info:
+            return ProcessContentRange(self.info['content-range'])
+        return len(self.content)
+
+    @property
+    def status_code(self):
+        return int(self.info['status'])
+
+    @property
+    def retry_after(self):
+        if 'retry-after' in self.info:
+            return int(self.info['retry-after'])
+
+    @property
+    def is_redirect(self):
+        return (self.status_code in _REDIRECT_STATUS_CODES and
+                'location' in self.info)
+
+
+def CheckResponse(response):
+    if response is None:
+        # Caller shouldn't call us if the response is None, but handle anyway.
+        raise exceptions.RequestError(
+            'Request to url %s did not return a response.' %
+            response.request_url)
+    elif (response.status_code >= 500 or
+          response.status_code == TOO_MANY_REQUESTS):
+        raise exceptions.BadStatusCodeError.FromResponse(response)
+    elif response.retry_after:
+        raise exceptions.RetryAfterError.FromResponse(response)
+
+
+def RebuildHttpConnections(http):
+    """Rebuilds all http connections in the httplib2.Http instance.
+
+    httplib2 overloads the map in http.connections to contain two different
+    types of values:
+    { scheme string:  connection class } and
+    { scheme + authority string : actual http connection }
+    Here we remove all of the entries for actual connections so that on the
+    next request httplib2 will rebuild them from the connection types.
+
+    Args:
+      http: An httplib2.Http instance.
+    """
+    if getattr(http, 'connections', None):
+        for conn_key in list(http.connections.keys()):
+            if ':' in conn_key:
+                del http.connections[conn_key]
+
+
+def RethrowExceptionHandler(*unused_args):
+    # pylint: disable=misplaced-bare-raise
+    raise
+
+
+def HandleExceptionsAndRebuildHttpConnections(retry_args):
+    """Exception handler for http failures.
+
+    This catches known failures and rebuilds the underlying HTTP connections.
+
+    Args:
+      retry_args: An ExceptionRetryArgs tuple.
+    """
+    # If the server indicates how long to wait, use that value.  Otherwise,
+    # calculate the wait time on our own.
+    retry_after = None
+
+    # Transport failures
+    if isinstance(retry_args.exc, (http_client.BadStatusLine,
+                                   http_client.IncompleteRead,
+                                   http_client.ResponseNotReady)):
+        logging.debug('Caught HTTP error %s, retrying: %s',
+                      type(retry_args.exc).__name__, retry_args.exc)
+    elif isinstance(retry_args.exc, socket.error):
+        logging.debug('Caught socket error, retrying: %s', retry_args.exc)
+    elif isinstance(retry_args.exc, socket.gaierror):
+        logging.debug(
+            'Caught socket address error, retrying: %s', retry_args.exc)
+    elif isinstance(retry_args.exc, socket.timeout):
+        logging.debug(
+            'Caught socket timeout error, retrying: %s', retry_args.exc)
+    elif isinstance(retry_args.exc, httplib2.ServerNotFoundError):
+        logging.debug(
+            'Caught server not found error, retrying: %s', retry_args.exc)
+    elif isinstance(retry_args.exc, ValueError):
+        # oauth2client tries to JSON-decode the response, which can result
+        # in a ValueError if the response was invalid. Until that is fixed in
+        # oauth2client, need to handle it here.
+        logging.debug('Response content was invalid (%s), retrying',
+                      retry_args.exc)
+    elif (isinstance(retry_args.exc,
+                     oauth2client.client.HttpAccessTokenRefreshError) and
+          (retry_args.exc.status == TOO_MANY_REQUESTS or
+           retry_args.exc.status >= 500)):
+        logging.debug(
+            'Caught transient credential refresh error (%s), retrying',
+            retry_args.exc)
+    elif isinstance(retry_args.exc, exceptions.RequestError):
+        logging.debug('Request returned no response, retrying')
+    # API-level failures
+    elif isinstance(retry_args.exc, exceptions.BadStatusCodeError):
+        logging.debug('Response returned status %s, retrying',
+                      retry_args.exc.status_code)
+    elif isinstance(retry_args.exc, exceptions.RetryAfterError):
+        logging.debug('Response returned a retry-after header, retrying')
+        retry_after = retry_args.exc.retry_after
+    else:
+        raise  # pylint: disable=misplaced-bare-raise
+    RebuildHttpConnections(retry_args.http)
+    logging.debug('Retrying request to url %s after exception %s',
+                  retry_args.http_request.url, retry_args.exc)
+    time.sleep(
+        retry_after or util.CalculateWaitForRetry(
+            retry_args.num_retries, max_wait=retry_args.max_retry_wait))
+
+
+def MakeRequest(http, http_request, retries=7, max_retry_wait=60,
+                redirections=5,
+                retry_func=HandleExceptionsAndRebuildHttpConnections,
+                check_response_func=CheckResponse):
+    """Send http_request via the given http, performing error/retry handling.
+
+    Args:
+      http: An httplib2.Http instance, or a http multiplexer that delegates to
+          an underlying http, for example, HTTPMultiplexer.
+      http_request: A Request to send.
+      retries: (int, default 7) Number of retries to attempt on retryable
+          replies (such as 429 or 5XX).
+      max_retry_wait: (int, default 60) Maximum number of seconds to wait
+          when retrying.
+      redirections: (int, default 5) Number of redirects to follow.
+      retry_func: Function to handle retries on exceptions. Argument is an
+          ExceptionRetryArgs tuple.
+      check_response_func: Function to validate the HTTP response.
+          Arguments are (Response, response content, url).
+
+    Raises:
+      InvalidDataFromServerError: if there is no response after retries.
+
+    Returns:
+      A Response object.
+
+    """
+    retry = 0
+    first_req_time = time.time()
+    while True:
+        try:
+            return _MakeRequestNoRetry(
+                http, http_request, redirections=redirections,
+                check_response_func=check_response_func)
+        # retry_func will consume the exception types it handles and raise.
+        # pylint: disable=broad-except
+        except Exception as e:
+            retry += 1
+            if retry >= retries:
+                raise
+            else:
+                total_wait_sec = time.time() - first_req_time
+                retry_func(ExceptionRetryArgs(http, http_request, e, retry,
+                                              max_retry_wait, total_wait_sec))
+
+
+def _MakeRequestNoRetry(http, http_request, redirections=5,
+                        check_response_func=CheckResponse):
+    """Send http_request via the given http.
+
+    This wrapper exists to handle translation between the plain httplib2
+    request/response types and the Request and Response types above.
+
+    Args:
+      http: An httplib2.Http instance, or a http multiplexer that delegates to
+          an underlying http, for example, HTTPMultiplexer.
+      http_request: A Request to send.
+      redirections: (int, default 5) Number of redirects to follow.
+      check_response_func: Function to validate the HTTP response.
+          Arguments are (Response, response content, url).
+
+    Returns:
+      A Response object.
+
+    Raises:
+      RequestError if no response could be parsed.
+
+    """
+    connection_type = None
+    # Handle overrides for connection types.  This is used if the caller
+    # wants control over the underlying connection for managing callbacks
+    # or hash digestion.
+    if getattr(http, 'connections', None):
+        url_scheme = parse.urlsplit(http_request.url).scheme
+        if url_scheme and url_scheme in http.connections:
+            connection_type = http.connections[url_scheme]
+
+    # Custom printing only at debuglevel 4
+    new_debuglevel = 4 if httplib2.debuglevel == 4 else 0
+    with _Httplib2Debuglevel(http_request, new_debuglevel, http=http):
+        info, content = http.request(
+            str(http_request.url), method=str(http_request.http_method),
+            body=http_request.body, headers=http_request.headers,
+            redirections=redirections, connection_type=connection_type)
+
+    if info is None:
+        raise exceptions.RequestError()
+
+    response = Response(info, content, http_request.url)
+    check_response_func(response)
+    return response
+
+
+_HTTP_FACTORIES = []
+
+
+def _RegisterHttpFactory(factory):
+    _HTTP_FACTORIES.append(factory)
+
+
+def GetHttp(**kwds):
+    for factory in _HTTP_FACTORIES:
+        http = factory(**kwds)
+        if http is not None:
+            return http
+    return httplib2.Http(**kwds)
diff --git a/apitools/base/py/http_wrapper_test.py b/apitools/base/py/http_wrapper_test.py
new file mode 100644
index 0000000..5df107f
--- /dev/null
+++ b/apitools/base/py/http_wrapper_test.py
@@ -0,0 +1,87 @@
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for http_wrapper."""
+import socket
+
+import httplib2
+import oauth2client
+from six.moves import http_client
+import unittest2
+
+from mock import patch
+
+from apitools.base.py import exceptions
+from apitools.base.py import http_wrapper
+
+
+class _MockHttpRequest(object):
+
+    url = None
+
+
+class _MockHttpResponse(object):
+
+    def __init__(self, status_code):
+        self.response = {'status': status_code}
+
+
+class RaisesExceptionOnLen(object):
+
+    """Supports length property but raises if __len__ is used."""
+
+    def __len__(self):
+        raise Exception('len() called unnecessarily')
+
+    def length(self):
+        return 1
+
+
+class HttpWrapperTest(unittest2.TestCase):
+
+    def testRequestBodyUsesLengthProperty(self):
+        http_wrapper.Request(body=RaisesExceptionOnLen())
+
+    def testRequestBodyWithLen(self):
+        http_wrapper.Request(body='burrito')
+
+    def testDefaultExceptionHandler(self):
+        """Ensures exception handles swallows (retries)"""
+        mock_http_content = 'content'.encode('utf8')
+        for exception_arg in (
+                http_client.BadStatusLine('line'),
+                http_client.IncompleteRead('partial'),
+                http_client.ResponseNotReady(),
+                socket.error(),
+                socket.gaierror(),
+                httplib2.ServerNotFoundError(),
+                ValueError(),
+                oauth2client.client.HttpAccessTokenRefreshError(status=503),
+                exceptions.RequestError(),
+                exceptions.BadStatusCodeError(
+                    {'status': 503}, mock_http_content, 'url'),
+                exceptions.RetryAfterError(
+                    {'status': 429}, mock_http_content, 'url', 0)):
+
+            retry_args = http_wrapper.ExceptionRetryArgs(
+                http={'connections': {}}, http_request=_MockHttpRequest(),
+                exc=exception_arg, num_retries=0, max_retry_wait=0,
+                total_wait_sec=0)
+
+            # Disable time.sleep for this handler as it is called with
+            # a minimum value of 1 second.
+            with patch('time.sleep', return_value=None):
+                http_wrapper.HandleExceptionsAndRebuildHttpConnections(
+                    retry_args)
diff --git a/apitools/base/py/list_pager.py b/apitools/base/py/list_pager.py
new file mode 100644
index 0000000..13b1cba
--- /dev/null
+++ b/apitools/base/py/list_pager.py
@@ -0,0 +1,81 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A helper function that executes a series of List queries for many APIs."""
+
+from apitools.base.py import encoding
+
+__all__ = [
+    'YieldFromList',
+]
+
+
+def YieldFromList(
+        service, request, global_params=None, limit=None, batch_size=100,
+        method='List', field='items', predicate=None,
+        current_token_attribute='pageToken',
+        next_token_attribute='nextPageToken',
+        batch_size_attribute='maxResults'):
+    """Make a series of List requests, keeping track of page tokens.
+
+    Args:
+      service: apitools_base.BaseApiService, A service with a .List() method.
+      request: protorpc.messages.Message, The request message
+          corresponding to the service's .List() method, with all the
+          attributes populated except the .maxResults and .pageToken
+          attributes.
+      global_params: protorpc.messages.Message, The global query parameters to
+           provide when calling the given method.
+      limit: int, The maximum number of records to yield. None if all available
+          records should be yielded.
+      batch_size: int, The number of items to retrieve per request.
+      method: str, The name of the method used to fetch resources.
+      field: str, The field in the response that will be a list of items.
+      predicate: lambda, A function that returns true for items to be yielded.
+      current_token_attribute: str, The name of the attribute in a
+          request message holding the page token for the page being
+          requested.
+      next_token_attribute: str, The name of the attribute in a
+          response message holding the page token for the next page.
+      batch_size_attribute: str, The name of the attribute in a
+          response message holding the maximum number of results to be
+          returned. None if caller-specified batch size is unsupported.
+
+    Yields:
+      protorpc.message.Message, The resources listed by the service.
+
+    """
+    request = encoding.CopyProtoMessage(request)
+    if batch_size_attribute:
+        setattr(request, batch_size_attribute, batch_size)
+    setattr(request, current_token_attribute, None)
+    while limit is None or limit:
+        response = getattr(service, method)(request,
+                                            global_params=global_params)
+        items = getattr(response, field)
+        if predicate:
+            items = list(filter(predicate, items))
+        for item in items:
+            yield item
+            if limit is None:
+                continue
+            limit -= 1
+            if not limit:
+                return
+        token = getattr(response, next_token_attribute)
+        if not token:
+            return
+        setattr(request, current_token_attribute, token)
diff --git a/apitools/base/py/list_pager_test.py b/apitools/base/py/list_pager_test.py
new file mode 100644
index 0000000..3aafede
--- /dev/null
+++ b/apitools/base/py/list_pager_test.py
@@ -0,0 +1,246 @@
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for list_pager."""
+
+import unittest2
+
+from apitools.base.py import list_pager
+from apitools.base.py.testing import mock
+from samples.fusiontables_sample.fusiontables_v1 \
+    import fusiontables_v1_client as fusiontables
+from samples.fusiontables_sample.fusiontables_v1 \
+    import fusiontables_v1_messages as messages
+from samples.iam_sample.iam_v1 import iam_v1_client as iam_client
+from samples.iam_sample.iam_v1 import iam_v1_messages as iam_messages
+
+
+class ListPagerTest(unittest2.TestCase):
+
+    def _AssertInstanceSequence(self, results, n):
+        counter = 0
+        for instance in results:
+            self.assertEqual(instance.name, 'c' + str(counter))
+            counter += 1
+
+        self.assertEqual(counter, n)
+
+    def setUp(self):
+        self.mocked_client = mock.Client(fusiontables.FusiontablesV1)
+        self.mocked_client.Mock()
+        self.addCleanup(self.mocked_client.Unmock)
+
+    def testYieldFromList(self):
+        self.mocked_client.column.List.Expect(
+            messages.FusiontablesColumnListRequest(
+                maxResults=100,
+                pageToken=None,
+                tableId='mytable',
+            ),
+            messages.ColumnList(
+                items=[
+                    messages.Column(name='c0'),
+                    messages.Column(name='c1'),
+                    messages.Column(name='c2'),
+                    messages.Column(name='c3'),
+                ],
+                nextPageToken='x',
+            ))
+        self.mocked_client.column.List.Expect(
+            messages.FusiontablesColumnListRequest(
+                maxResults=100,
+                pageToken='x',
+                tableId='mytable',
+            ),
+            messages.ColumnList(
+                items=[
+                    messages.Column(name='c4'),
+                    messages.Column(name='c5'),
+                    messages.Column(name='c6'),
+                    messages.Column(name='c7'),
+                ],
+            ))
+
+        client = fusiontables.FusiontablesV1(get_credentials=False)
+        request = messages.FusiontablesColumnListRequest(tableId='mytable')
+        results = list_pager.YieldFromList(client.column, request)
+
+        self._AssertInstanceSequence(results, 8)
+
+    def testYieldNoRecords(self):
+        client = fusiontables.FusiontablesV1(get_credentials=False)
+        request = messages.FusiontablesColumnListRequest(tableId='mytable')
+        results = list_pager.YieldFromList(client.column, request, limit=False)
+        self.assertEqual(0, len(list(results)))
+
+    def testYieldFromListPartial(self):
+        self.mocked_client.column.List.Expect(
+            messages.FusiontablesColumnListRequest(
+                maxResults=100,
+                pageToken=None,
+                tableId='mytable',
+            ),
+            messages.ColumnList(
+                items=[
+                    messages.Column(name='c0'),
+                    messages.Column(name='c1'),
+                    messages.Column(name='c2'),
+                    messages.Column(name='c3'),
+                ],
+                nextPageToken='x',
+            ))
+        self.mocked_client.column.List.Expect(
+            messages.FusiontablesColumnListRequest(
+                maxResults=100,
+                pageToken='x',
+                tableId='mytable',
+            ),
+            messages.ColumnList(
+                items=[
+                    messages.Column(name='c4'),
+                    messages.Column(name='c5'),
+                    messages.Column(name='c6'),
+                    messages.Column(name='c7'),
+                ],
+            ))
+
+        client = fusiontables.FusiontablesV1(get_credentials=False)
+        request = messages.FusiontablesColumnListRequest(tableId='mytable')
+        results = list_pager.YieldFromList(client.column, request, limit=6)
+
+        self._AssertInstanceSequence(results, 6)
+
+    def testYieldFromListEmpty(self):
+        self.mocked_client.column.List.Expect(
+            messages.FusiontablesColumnListRequest(
+                maxResults=100,
+                pageToken=None,
+                tableId='mytable',
+            ),
+            messages.ColumnList())
+
+        client = fusiontables.FusiontablesV1(get_credentials=False)
+        request = messages.FusiontablesColumnListRequest(tableId='mytable')
+        results = list_pager.YieldFromList(client.column, request, limit=6)
+
+        self._AssertInstanceSequence(results, 0)
+
+    def testYieldFromListWithPredicate(self):
+        self.mocked_client.column.List.Expect(
+            messages.FusiontablesColumnListRequest(
+                maxResults=100,
+                pageToken=None,
+                tableId='mytable',
+            ),
+            messages.ColumnList(
+                items=[
+                    messages.Column(name='c0'),
+                    messages.Column(name='bad0'),
+                    messages.Column(name='c1'),
+                    messages.Column(name='bad1'),
+                ],
+                nextPageToken='x',
+            ))
+        self.mocked_client.column.List.Expect(
+            messages.FusiontablesColumnListRequest(
+                maxResults=100,
+                pageToken='x',
+                tableId='mytable',
+            ),
+            messages.ColumnList(
+                items=[
+                    messages.Column(name='c2'),
+                ],
+            ))
+
+        client = fusiontables.FusiontablesV1(get_credentials=False)
+        request = messages.FusiontablesColumnListRequest(tableId='mytable')
+        results = list_pager.YieldFromList(
+            client.column, request, predicate=lambda x: 'c' in x.name)
+
+        self._AssertInstanceSequence(results, 3)
+
+
+class ListPagerAttributeTest(unittest2.TestCase):
+
+    def setUp(self):
+        self.mocked_client = mock.Client(iam_client.IamV1)
+        self.mocked_client.Mock()
+        self.addCleanup(self.mocked_client.Unmock)
+
+    def testYieldFromListWithAttributes(self):
+        self.mocked_client.iamPolicies.GetPolicyDetails.Expect(
+            iam_messages.GetPolicyDetailsRequest(
+                pageSize=100,
+                pageToken=None,
+                fullResourcePath='myresource',
+            ),
+            iam_messages.GetPolicyDetailsResponse(
+                policies=[
+                    iam_messages.PolicyDetail(fullResourcePath='c0'),
+                    iam_messages.PolicyDetail(fullResourcePath='c1'),
+                ],
+                nextPageToken='x',
+            ))
+        self.mocked_client.iamPolicies.GetPolicyDetails.Expect(
+            iam_messages.GetPolicyDetailsRequest(
+                pageSize=100,
+                pageToken='x',
+                fullResourcePath='myresource',
+            ),
+            iam_messages.GetPolicyDetailsResponse(
+                policies=[
+                    iam_messages.PolicyDetail(fullResourcePath='c2'),
+                ],
+            ))
+
+        client = iam_client.IamV1(get_credentials=False)
+        request = iam_messages.GetPolicyDetailsRequest(
+            fullResourcePath='myresource')
+        results = list_pager.YieldFromList(
+            client.iamPolicies, request,
+            batch_size_attribute='pageSize',
+            method='GetPolicyDetails', field='policies')
+
+        i = 0
+        for i, instance in enumerate(results):
+            self.assertEquals('c{0}'.format(i), instance.fullResourcePath)
+        self.assertEquals(2, i)
+
+    def testYieldFromListWithNoBatchSizeAttribute(self):
+        self.mocked_client.iamPolicies.GetPolicyDetails.Expect(
+            iam_messages.GetPolicyDetailsRequest(
+                pageToken=None,
+                fullResourcePath='myresource',
+            ),
+            iam_messages.GetPolicyDetailsResponse(
+                policies=[
+                    iam_messages.PolicyDetail(fullResourcePath='c0'),
+                    iam_messages.PolicyDetail(fullResourcePath='c1'),
+                ],
+            ))
+
+        client = iam_client.IamV1(get_credentials=False)
+        request = iam_messages.GetPolicyDetailsRequest(
+            fullResourcePath='myresource')
+        results = list_pager.YieldFromList(
+            client.iamPolicies, request,
+            batch_size_attribute=None,
+            method='GetPolicyDetails', field='policies')
+
+        i = 0
+        for i, instance in enumerate(results):
+            self.assertEquals('c{0}'.format(i), instance.fullResourcePath)
+        self.assertEquals(1, i)
diff --git a/apitools/base/py/stream_slice.py b/apitools/base/py/stream_slice.py
new file mode 100644
index 0000000..8574be8
--- /dev/null
+++ b/apitools/base/py/stream_slice.py
@@ -0,0 +1,79 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Small helper class to provide a small slice of a stream."""
+
+from apitools.base.py import exceptions
+
+
+class StreamSlice(object):
+
+    """Provides a slice-like object for streams."""
+
+    def __init__(self, stream, max_bytes):
+        self.__stream = stream
+        self.__remaining_bytes = max_bytes
+        self.__max_bytes = max_bytes
+
+    def __str__(self):
+        return 'Slice of stream %s with %s/%s bytes not yet read' % (
+            self.__stream, self.__remaining_bytes, self.__max_bytes)
+
+    def __len__(self):
+        return self.__max_bytes
+
+    def __nonzero__(self):
+        # For 32-bit python2.x, len() cannot exceed a 32-bit number; avoid
+        # accidental len() calls from httplib in the form of "if this_object:".
+        return bool(self.__max_bytes)
+
+    @property
+    def length(self):
+        # For 32-bit python2.x, len() cannot exceed a 32-bit number.
+        return self.__max_bytes
+
+    def read(self, size=None):  # pylint: disable=missing-docstring
+        """Read at most size bytes from this slice.
+
+        Compared to other streams, there is one case where we may
+        unexpectedly raise an exception on read: if the underlying stream
+        is exhausted (i.e. returns no bytes on read), and the size of this
+        slice indicates we should still be able to read more bytes, we
+        raise exceptions.StreamExhausted.
+
+        Args:
+          size: If provided, read no more than size bytes from the stream.
+
+        Returns:
+          The bytes read from this slice.
+
+        Raises:
+          exceptions.StreamExhausted
+
+        """
+        if size is not None:
+            read_size = min(size, self.__remaining_bytes)
+        else:
+            read_size = self.__remaining_bytes
+        data = self.__stream.read(read_size)
+        if read_size > 0 and not data:
+            raise exceptions.StreamExhausted(
+                'Not enough bytes in stream; expected %d, exhausted '
+                'after %d' % (
+                    self.__max_bytes,
+                    self.__max_bytes - self.__remaining_bytes))
+        self.__remaining_bytes -= len(data)
+        return data
diff --git a/apitools/base/py/stream_slice_test.py b/apitools/base/py/stream_slice_test.py
new file mode 100644
index 0000000..4d5cdfb
--- /dev/null
+++ b/apitools/base/py/stream_slice_test.py
@@ -0,0 +1,65 @@
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for stream_slice."""
+
+import string
+
+import six
+import unittest2
+
+from apitools.base.py import exceptions
+from apitools.base.py import stream_slice
+
+
+class StreamSliceTest(unittest2.TestCase):
+
+    def setUp(self):
+        self.stream = six.StringIO(string.ascii_letters)
+        self.value = self.stream.getvalue()
+        self.stream.seek(0)
+
+    def testSimpleSlice(self):
+        ss = stream_slice.StreamSlice(self.stream, 10)
+        self.assertEqual('', ss.read(0))
+        self.assertEqual(self.value[0:3], ss.read(3))
+        self.assertIn('7/10', str(ss))
+        self.assertEqual(self.value[3:10], ss.read())
+        self.assertEqual('', ss.read())
+        self.assertEqual('', ss.read(10))
+        self.assertEqual(10, self.stream.tell())
+
+    def testEmptySlice(self):
+        ss = stream_slice.StreamSlice(self.stream, 0)
+        self.assertEqual('', ss.read(5))
+        self.assertEqual('', ss.read())
+        self.assertEqual(0, self.stream.tell())
+
+    def testOffsetStream(self):
+        self.stream.seek(26)
+        ss = stream_slice.StreamSlice(self.stream, 26)
+        self.assertEqual(self.value[26:36], ss.read(10))
+        self.assertEqual(self.value[36:], ss.read())
+        self.assertEqual('', ss.read())
+
+    def testTooShortStream(self):
+        ss = stream_slice.StreamSlice(self.stream, 1000)
+        self.assertEqual(self.value, ss.read())
+        self.assertEqual('', ss.read(0))
+        with self.assertRaises(exceptions.StreamExhausted) as e:
+            ss.read()
+        with self.assertRaises(exceptions.StreamExhausted) as e:
+            ss.read(10)
+        self.assertIn('exhausted after %d' % len(self.value), str(e.exception))
diff --git a/apitools/base/py/testing/__init__.py b/apitools/base/py/testing/__init__.py
new file mode 100644
index 0000000..d47d726
--- /dev/null
+++ b/apitools/base/py/testing/__init__.py
@@ -0,0 +1,16 @@
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Package marker file."""
diff --git a/apitools/base/py/testing/mock.py b/apitools/base/py/testing/mock.py
new file mode 100644
index 0000000..89adca5
--- /dev/null
+++ b/apitools/base/py/testing/mock.py
@@ -0,0 +1,363 @@
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""The mock module allows easy mocking of apitools clients.
+
+This module allows you to mock out the constructor of a particular apitools
+client, for a specific API and version. Then, when the client is created, it
+will be run against an expected session that you define. This way code that is
+not aware of the testing framework can construct new clients as normal, as long
+as it's all done within the context of a mock.
+"""
+
+import difflib
+import sys
+
+import six
+
+from apitools.base.protorpclite import messages
+from apitools.base.py import base_api
+from apitools.base.py import encoding
+from apitools.base.py import exceptions
+
+
+class Error(Exception):
+
+    """Exceptions for this module."""
+
+
+def _MessagesEqual(msg1, msg2):
+    """Compare two protorpc messages for equality.
+
+    Using python's == operator does not work in all cases, specifically when
+    there is a list involved.
+
+    Args:
+      msg1: protorpc.messages.Message or [protorpc.messages.Message] or number
+          or string, One of the messages to compare.
+      msg2: protorpc.messages.Message or [protorpc.messages.Message] or number
+          or string, One of the messages to compare.
+
+    Returns:
+      If the messages are isomorphic.
+    """
+    if isinstance(msg1, list) and isinstance(msg2, list):
+        if len(msg1) != len(msg2):
+            return False
+        return all(_MessagesEqual(x, y) for x, y in zip(msg1, msg2))
+
+    if (not isinstance(msg1, messages.Message) or
+            not isinstance(msg2, messages.Message)):
+        return msg1 == msg2
+    for field in msg1.all_fields():
+        field1 = getattr(msg1, field.name)
+        field2 = getattr(msg2, field.name)
+        if not _MessagesEqual(field1, field2):
+            return False
+    return True
+
+
+class UnexpectedRequestException(Error):
+
+    def __init__(self, received_call, expected_call):
+        expected_key, expected_request = expected_call
+        received_key, received_request = received_call
+
+        expected_repr = encoding.MessageToRepr(
+            expected_request, multiline=True)
+        received_repr = encoding.MessageToRepr(
+            received_request, multiline=True)
+
+        expected_lines = expected_repr.splitlines()
+        received_lines = received_repr.splitlines()
+
+        diff_lines = difflib.unified_diff(expected_lines, received_lines)
+        diff = '\n'.join(diff_lines)
+
+        if expected_key != received_key:
+            msg = '\n'.join((
+                'expected: {expected_key}({expected_request})',
+                'received: {received_key}({received_request})',
+                '',
+            )).format(
+                expected_key=expected_key,
+                expected_request=expected_repr,
+                received_key=received_key,
+                received_request=received_repr)
+            super(UnexpectedRequestException, self).__init__(msg)
+        else:
+            msg = '\n'.join((
+                'for request to {key},',
+                'expected: {expected_request}',
+                'received: {received_request}',
+                'diff: {diff}',
+                '',
+            )).format(
+                key=expected_key,
+                expected_request=expected_repr,
+                received_request=received_repr,
+                diff=diff)
+            super(UnexpectedRequestException, self).__init__(msg)
+
+
+class ExpectedRequestsException(Error):
+
+    def __init__(self, expected_calls):
+        msg = 'expected:\n'
+        for (key, request) in expected_calls:
+            msg += '{key}({request})\n'.format(
+                key=key,
+                request=encoding.MessageToRepr(request, multiline=True))
+        super(ExpectedRequestsException, self).__init__(msg)
+
+
+class _ExpectedRequestResponse(object):
+
+    """Encapsulation of an expected request and corresponding response."""
+
+    def __init__(self, key, request, response=None, exception=None):
+        self.__key = key
+        self.__request = request
+
+        if response and exception:
+            raise exceptions.ConfigurationValueError(
+                'Should specify at most one of response and exception')
+        if response and isinstance(response, exceptions.Error):
+            raise exceptions.ConfigurationValueError(
+                'Responses should not be an instance of Error')
+        if exception and not isinstance(exception, exceptions.Error):
+            raise exceptions.ConfigurationValueError(
+                'Exceptions must be instances of Error')
+
+        self.__response = response
+        self.__exception = exception
+
+    @property
+    def key(self):
+        return self.__key
+
+    @property
+    def request(self):
+        return self.__request
+
+    def ValidateAndRespond(self, key, request):
+        """Validate that key and request match expectations, and respond if so.
+
+        Args:
+          key: str, Actual key to compare against expectations.
+          request: protorpc.messages.Message or [protorpc.messages.Message]
+            or number or string, Actual request to compare againt expectations
+
+        Raises:
+          UnexpectedRequestException: If key or request dont match
+              expectations.
+          apitools_base.Error: If a non-None exception is specified to
+              be thrown.
+
+        Returns:
+          The response that was specified to be returned.
+
+        """
+        if key != self.__key or not _MessagesEqual(request, self.__request):
+            raise UnexpectedRequestException((key, request),
+                                             (self.__key, self.__request))
+
+        if self.__exception:
+            # Can only throw apitools_base.Error.
+            raise self.__exception  # pylint: disable=raising-bad-type
+
+        return self.__response
+
+
+class _MockedMethod(object):
+
+    """A mocked API service method."""
+
+    def __init__(self, key, mocked_client, real_method):
+        self.__name__ = real_method.__name__
+        self.__key = key
+        self.__mocked_client = mocked_client
+        self.__real_method = real_method
+        self.method_config = real_method.method_config
+
+    def Expect(self, request, response=None, exception=None, **unused_kwargs):
+        """Add an expectation on the mocked method.
+
+        Exactly one of response and exception should be specified.
+
+        Args:
+          request: The request that should be expected
+          response: The response that should be returned or None if
+              exception is provided.
+          exception: An exception that should be thrown, or None.
+
+        """
+        # TODO(jasmuth): the unused_kwargs provides a placeholder for
+        # future things that can be passed to Expect(), like special
+        # params to the method call.
+
+        # pylint: disable=protected-access
+        # Class in same module.
+        self.__mocked_client._request_responses.append(
+            _ExpectedRequestResponse(self.__key,
+                                     request,
+                                     response=response,
+                                     exception=exception))
+        # pylint: enable=protected-access
+
+    def __call__(self, request, **unused_kwargs):
+        # TODO(jasmuth): allow the testing code to expect certain
+        # values in these currently unused_kwargs, especially the
+        # upload parameter used by media-heavy services like bigquery
+        # or bigstore.
+
+        # pylint: disable=protected-access
+        # Class in same module.
+        if self.__mocked_client._request_responses:
+            request_response = self.__mocked_client._request_responses.pop(0)
+        else:
+            raise UnexpectedRequestException(
+                (self.__key, request), (None, None))
+        # pylint: enable=protected-access
+
+        response = request_response.ValidateAndRespond(self.__key, request)
+
+        if response is None and self.__real_method:
+            response = self.__real_method(request)
+            print(encoding.MessageToRepr(
+                response, multiline=True, shortstrings=True))
+            return response
+
+        return response
+
+
+def _MakeMockedService(api_name, collection_name,
+                       mock_client, service, real_service):
+    class MockedService(base_api.BaseApiService):
+        pass
+
+    for method in service.GetMethodsList():
+        real_method = None
+        if real_service:
+            real_method = getattr(real_service, method)
+        setattr(MockedService,
+                method,
+                _MockedMethod(api_name + '.' + collection_name + '.' + method,
+                              mock_client,
+                              real_method))
+    return MockedService
+
+
+class Client(object):
+
+    """Mock an apitools client."""
+
+    def __init__(self, client_class, real_client=None):
+        """Mock an apitools API, given its class.
+
+        Args:
+          client_class: The class for the API. eg, if you
+                from apis.sqladmin import v1beta3
+              then you can pass v1beta3.SqladminV1beta3 to this class
+              and anything within its context will use your mocked
+              version.
+          real_client: apitools Client, The client to make requests
+              against when the expected response is None.
+
+        """
+
+        if not real_client:
+            real_client = client_class(get_credentials=False)
+
+        self.__orig_class = self.__class__
+        self.__client_class = client_class
+        self.__real_service_classes = {}
+        self.__real_client = real_client
+
+        self._request_responses = []
+        self.__real_include_fields = None
+
+    def __enter__(self):
+        return self.Mock()
+
+    def Mock(self):
+        """Stub out the client class with mocked services."""
+        client = self.__real_client or self.__client_class(
+            get_credentials=False)
+
+        class Patched(self.__class__, self.__client_class):
+            pass
+        self.__class__ = Patched
+
+        for name in dir(self.__client_class):
+            service_class = getattr(self.__client_class, name)
+            if not isinstance(service_class, type):
+                continue
+            if not issubclass(service_class, base_api.BaseApiService):
+                continue
+            self.__real_service_classes[name] = service_class
+            # pylint: disable=protected-access
+            collection_name = service_class._NAME
+            # pylint: enable=protected-access
+            api_name = '%s_%s' % (self.__client_class._PACKAGE,
+                                  self.__client_class._URL_VERSION)
+            mocked_service_class = _MakeMockedService(
+                api_name, collection_name, self,
+                service_class,
+                service_class(client) if self.__real_client else None)
+
+            setattr(self.__client_class, name, mocked_service_class)
+
+            setattr(self, collection_name, mocked_service_class(self))
+
+        self.__real_include_fields = self.__client_class.IncludeFields
+        self.__client_class.IncludeFields = self.IncludeFields
+
+        # pylint: disable=attribute-defined-outside-init
+        self._url = client._url
+        self._http = client._http
+
+        return self
+
+    def __exit__(self, exc_type, value, traceback):
+        is_active_exception = value is not None
+        self.Unmock(suppress=is_active_exception)
+        if is_active_exception:
+            six.reraise(exc_type, value, traceback)
+        return True
+
+    def Unmock(self, suppress=False):
+        self.__class__ = self.__orig_class
+        for name, service_class in self.__real_service_classes.items():
+            setattr(self.__client_class, name, service_class)
+            delattr(self, service_class._NAME)
+        self.__real_service_classes = {}
+        del self._url
+        del self._http
+
+        self.__client_class.IncludeFields = self.__real_include_fields
+        self.__real_include_fields = None
+
+        requests = [(rq_rs.key, rq_rs.request)
+                    for rq_rs in self._request_responses]
+        self._request_responses = []
+
+        if requests and not suppress and sys.exc_info()[1] is None:
+            raise ExpectedRequestsException(requests)
+
+    def IncludeFields(self, include_fields):
+        if self.__real_client:
+            return self.__real_include_fields(self.__real_client,
+                                              include_fields)
diff --git a/apitools/base/py/testing/mock_test.py b/apitools/base/py/testing/mock_test.py
new file mode 100644
index 0000000..d295f21
--- /dev/null
+++ b/apitools/base/py/testing/mock_test.py
@@ -0,0 +1,224 @@
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for apitools.base.py.testing.mock."""
+
+import httplib2
+import unittest2
+import six
+
+from apitools.base.protorpclite import messages
+
+import apitools.base.py as apitools_base
+from apitools.base.py.testing import mock
+from samples.fusiontables_sample.fusiontables_v1 import \
+    fusiontables_v1_client as fusiontables
+from samples.fusiontables_sample.fusiontables_v1 import \
+    fusiontables_v1_messages as fusiontables_messages
+
+
+def _GetApiServices(api_client_class):
+    return dict(
+        (name, potential_service)
+        for name, potential_service in six.iteritems(api_client_class.__dict__)
+        if (isinstance(potential_service, type) and
+            issubclass(potential_service, apitools_base.BaseApiService)))
+
+
+class CustomException(Exception):
+    pass
+
+
+class MockTest(unittest2.TestCase):
+
+    def testMockFusionBasic(self):
+        with mock.Client(fusiontables.FusiontablesV1) as client_class:
+            client_class.column.List.Expect(request=1, response=2)
+            client = fusiontables.FusiontablesV1(get_credentials=False)
+            self.assertEqual(client.column.List(1), 2)
+            with self.assertRaises(mock.UnexpectedRequestException):
+                client.column.List(3)
+
+    def testMockFusionException(self):
+        with mock.Client(fusiontables.FusiontablesV1) as client_class:
+            client_class.column.List.Expect(
+                request=1,
+                exception=apitools_base.HttpError({'status': 404}, '', ''))
+            client = fusiontables.FusiontablesV1(get_credentials=False)
+            with self.assertRaises(apitools_base.HttpError):
+                client.column.List(1)
+
+    def testMockIfAnotherException(self):
+        with self.assertRaises(CustomException):
+            with mock.Client(fusiontables.FusiontablesV1) as client_class:
+                client_class.column.List.Expect(request=1, response=2)
+                raise CustomException('Something when wrong')
+
+    def testMockFusionOrder(self):
+        with mock.Client(fusiontables.FusiontablesV1) as client_class:
+            client_class.column.List.Expect(request=1, response=2)
+            client_class.column.List.Expect(request=2, response=1)
+            client = fusiontables.FusiontablesV1(get_credentials=False)
+            self.assertEqual(client.column.List(1), 2)
+            self.assertEqual(client.column.List(2), 1)
+
+    def testMockFusionWrongOrder(self):
+        with mock.Client(fusiontables.FusiontablesV1) as client_class:
+            client_class.column.List.Expect(request=1, response=2)
+            client_class.column.List.Expect(request=2, response=1)
+            client = fusiontables.FusiontablesV1(get_credentials=False)
+            with self.assertRaises(mock.UnexpectedRequestException):
+                self.assertEqual(client.column.List(2), 1)
+            with self.assertRaises(mock.UnexpectedRequestException):
+                self.assertEqual(client.column.List(1), 2)
+
+    def testMockFusionTooMany(self):
+        with mock.Client(fusiontables.FusiontablesV1) as client_class:
+            client_class.column.List.Expect(request=1, response=2)
+            client = fusiontables.FusiontablesV1(get_credentials=False)
+            self.assertEqual(client.column.List(1), 2)
+            with self.assertRaises(mock.UnexpectedRequestException):
+                self.assertEqual(client.column.List(2), 1)
+
+    def testMockFusionTooFew(self):
+        with self.assertRaises(mock.ExpectedRequestsException):
+            with mock.Client(fusiontables.FusiontablesV1) as client_class:
+                client_class.column.List.Expect(request=1, response=2)
+                client_class.column.List.Expect(request=2, response=1)
+                client = fusiontables.FusiontablesV1(get_credentials=False)
+                self.assertEqual(client.column.List(1), 2)
+
+    def testFusionUnmock(self):
+        with mock.Client(fusiontables.FusiontablesV1):
+            client = fusiontables.FusiontablesV1(get_credentials=False)
+            mocked_service_type = type(client.column)
+        client = fusiontables.FusiontablesV1(get_credentials=False)
+        self.assertNotEqual(type(client.column), mocked_service_type)
+
+    def testClientUnmock(self):
+        mock_client = mock.Client(fusiontables.FusiontablesV1)
+        self.assertFalse(isinstance(mock_client, fusiontables.FusiontablesV1))
+        attributes = set(mock_client.__dict__.keys())
+        mock_client = mock_client.Mock()
+        self.assertTrue(isinstance(mock_client, fusiontables.FusiontablesV1))
+        self.assertTrue(set(mock_client.__dict__.keys()) - attributes)
+        mock_client.Unmock()
+        self.assertFalse(isinstance(mock_client, fusiontables.FusiontablesV1))
+        self.assertEqual(attributes, set(mock_client.__dict__.keys()))
+
+    def testMockHasMessagesModule(self):
+        with mock.Client(fusiontables.FusiontablesV1) as mock_client:
+            self.assertEquals(fusiontables_messages,
+                              mock_client.MESSAGES_MODULE)
+
+    def testMockHasUrlProperty(self):
+        with mock.Client(fusiontables.FusiontablesV1) as mock_client:
+            self.assertEquals(fusiontables.FusiontablesV1.BASE_URL,
+                              mock_client.url)
+        self.assertFalse(hasattr(mock_client, 'url'))
+
+    def testMockHasOverrideUrlProperty(self):
+        real_client = fusiontables.FusiontablesV1(url='http://localhost:8080',
+                                                  get_credentials=False)
+        with mock.Client(fusiontables.FusiontablesV1,
+                         real_client) as mock_client:
+            self.assertEquals('http://localhost:8080/', mock_client.url)
+
+    def testMockHasHttpProperty(self):
+        with mock.Client(fusiontables.FusiontablesV1) as mock_client:
+            self.assertIsInstance(mock_client.http, httplib2.Http)
+        self.assertFalse(hasattr(mock_client, 'http'))
+
+    def testMockHasOverrideHttpProperty(self):
+        real_client = fusiontables.FusiontablesV1(url='http://localhost:8080',
+                                                  http='SomeHttpObject',
+                                                  get_credentials=False)
+        with mock.Client(fusiontables.FusiontablesV1,
+                         real_client) as mock_client:
+            self.assertEquals('SomeHttpObject', mock_client.http)
+
+    def testMockPreservesServiceMethods(self):
+        services = _GetApiServices(fusiontables.FusiontablesV1)
+        with mock.Client(fusiontables.FusiontablesV1):
+            mocked_services = _GetApiServices(fusiontables.FusiontablesV1)
+            self.assertEquals(services.keys(), mocked_services.keys())
+            for name, service in six.iteritems(services):
+                mocked_service = mocked_services[name]
+                methods = service.GetMethodsList()
+                for method in methods:
+                    mocked_method = getattr(mocked_service, method)
+                    mocked_method_config = mocked_method.method_config()
+                    method_config = getattr(service, method).method_config()
+                    self.assertEquals(method_config, mocked_method_config)
+
+
+class _NestedMessage(messages.Message):
+    nested = messages.StringField(1)
+
+
+class _NestedListMessage(messages.Message):
+    nested_list = messages.MessageField(_NestedMessage, 1, repeated=True)
+
+
+class _NestedNestedMessage(messages.Message):
+    nested = messages.MessageField(_NestedMessage, 1)
+
+
+class UtilTest(unittest2.TestCase):
+
+    def testMessagesEqual(self):
+        self.assertFalse(mock._MessagesEqual(
+            _NestedNestedMessage(
+                nested=_NestedMessage(
+                    nested='foo')),
+            _NestedNestedMessage(
+                nested=_NestedMessage(
+                    nested='bar'))))
+
+        self.assertTrue(mock._MessagesEqual(
+            _NestedNestedMessage(
+                nested=_NestedMessage(
+                    nested='foo')),
+            _NestedNestedMessage(
+                nested=_NestedMessage(
+                    nested='foo'))))
+
+    def testListedMessagesEqual(self):
+        self.assertTrue(mock._MessagesEqual(
+            _NestedListMessage(
+                nested_list=[_NestedMessage(nested='foo')]),
+            _NestedListMessage(
+                nested_list=[_NestedMessage(nested='foo')])))
+
+        self.assertTrue(mock._MessagesEqual(
+            _NestedListMessage(
+                nested_list=[_NestedMessage(nested='foo'),
+                             _NestedMessage(nested='foo2')]),
+            _NestedListMessage(
+                nested_list=[_NestedMessage(nested='foo'),
+                             _NestedMessage(nested='foo2')])))
+
+        self.assertFalse(mock._MessagesEqual(
+            _NestedListMessage(
+                nested_list=[_NestedMessage(nested='foo')]),
+            _NestedListMessage(
+                nested_list=[_NestedMessage(nested='bar')])))
+
+        self.assertFalse(mock._MessagesEqual(
+            _NestedListMessage(
+                nested_list=[_NestedMessage(nested='foo')]),
+            _NestedListMessage(
+                nested_list=[_NestedMessage(nested='foo'),
+                             _NestedMessage(nested='foo')])))
diff --git a/apitools/base/py/transfer.py b/apitools/base/py/transfer.py
new file mode 100644
index 0000000..9fb63a8
--- /dev/null
+++ b/apitools/base/py/transfer.py
@@ -0,0 +1,1020 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Upload and download support for apitools."""
+from __future__ import print_function
+
+import email.generator as email_generator
+import email.mime.multipart as mime_multipart
+import email.mime.nonmultipart as mime_nonmultipart
+import io
+import json
+import mimetypes
+import os
+import threading
+
+import six
+from six.moves import http_client
+
+from apitools.base.py import buffered_stream
+from apitools.base.py import exceptions
+from apitools.base.py import http_wrapper
+from apitools.base.py import stream_slice
+from apitools.base.py import util
+
+__all__ = [
+    'Download',
+    'Upload',
+    'RESUMABLE_UPLOAD',
+    'SIMPLE_UPLOAD',
+    'DownloadProgressPrinter',
+    'DownloadCompletePrinter',
+    'UploadProgressPrinter',
+    'UploadCompletePrinter',
+]
+
+_RESUMABLE_UPLOAD_THRESHOLD = 5 << 20
+SIMPLE_UPLOAD = 'simple'
+RESUMABLE_UPLOAD = 'resumable'
+
+
+def DownloadProgressPrinter(response, unused_download):
+    """Print download progress based on response."""
+    if 'content-range' in response.info:
+        print('Received %s' % response.info['content-range'])
+    else:
+        print('Received %d bytes' % response.length)
+
+
+def DownloadCompletePrinter(unused_response, unused_download):
+    """Print information about a completed download."""
+    print('Download complete')
+
+
+def UploadProgressPrinter(response, unused_upload):
+    """Print upload progress based on response."""
+    print('Sent %s' % response.info['range'])
+
+
+def UploadCompletePrinter(unused_response, unused_upload):
+    """Print information about a completed upload."""
+    print('Upload complete')
+
+
+class _Transfer(object):
+
+    """Generic bits common to Uploads and Downloads."""
+
+    def __init__(self, stream, close_stream=False, chunksize=None,
+                 auto_transfer=True, http=None, num_retries=5):
+        self.__bytes_http = None
+        self.__close_stream = close_stream
+        self.__http = http
+        self.__stream = stream
+        self.__url = None
+
+        self.__num_retries = 5
+        # Let the @property do validation
+        self.num_retries = num_retries
+
+        self.retry_func = (
+            http_wrapper.HandleExceptionsAndRebuildHttpConnections)
+        self.auto_transfer = auto_transfer
+        self.chunksize = chunksize or 1048576
+
+    def __repr__(self):
+        return str(self)
+
+    @property
+    def close_stream(self):
+        return self.__close_stream
+
+    @property
+    def http(self):
+        return self.__http
+
+    @property
+    def bytes_http(self):
+        return self.__bytes_http or self.http
+
+    @bytes_http.setter
+    def bytes_http(self, value):
+        self.__bytes_http = value
+
+    @property
+    def num_retries(self):
+        return self.__num_retries
+
+    @num_retries.setter
+    def num_retries(self, value):
+        util.Typecheck(value, six.integer_types)
+        if value < 0:
+            raise exceptions.InvalidDataError(
+                'Cannot have negative value for num_retries')
+        self.__num_retries = value
+
+    @property
+    def stream(self):
+        return self.__stream
+
+    @property
+    def url(self):
+        return self.__url
+
+    def _Initialize(self, http, url):
+        """Initialize this download by setting self.http and self.url.
+
+        We want the user to be able to override self.http by having set
+        the value in the constructor; in that case, we ignore the provided
+        http.
+
+        Args:
+          http: An httplib2.Http instance or None.
+          url: The url for this transfer.
+
+        Returns:
+          None. Initializes self.
+        """
+        self.EnsureUninitialized()
+        if self.http is None:
+            self.__http = http or http_wrapper.GetHttp()
+        self.__url = url
+
+    @property
+    def initialized(self):
+        return self.url is not None and self.http is not None
+
+    @property
+    def _type_name(self):
+        return type(self).__name__
+
+    def EnsureInitialized(self):
+        if not self.initialized:
+            raise exceptions.TransferInvalidError(
+                'Cannot use uninitialized %s', self._type_name)
+
+    def EnsureUninitialized(self):
+        if self.initialized:
+            raise exceptions.TransferInvalidError(
+                'Cannot re-initialize %s', self._type_name)
+
+    def __del__(self):
+        if self.__close_stream:
+            self.__stream.close()
+
+    def _ExecuteCallback(self, callback, response):
+        # TODO(craigcitro): Push these into a queue.
+        if callback is not None:
+            threading.Thread(target=callback, args=(response, self)).start()
+
+
+class Download(_Transfer):
+
+    """Data for a single download.
+
+    Public attributes:
+      chunksize: default chunksize to use for transfers.
+    """
+    _ACCEPTABLE_STATUSES = set((
+        http_client.OK,
+        http_client.NO_CONTENT,
+        http_client.PARTIAL_CONTENT,
+        http_client.REQUESTED_RANGE_NOT_SATISFIABLE,
+    ))
+    _REQUIRED_SERIALIZATION_KEYS = set((
+        'auto_transfer', 'progress', 'total_size', 'url'))
+
+    def __init__(self, stream, progress_callback=None, finish_callback=None,
+                 **kwds):
+        total_size = kwds.pop('total_size', None)
+        super(Download, self).__init__(stream, **kwds)
+        self.__initial_response = None
+        self.__progress = 0
+        self.__total_size = total_size
+        self.__encoding = None
+
+        self.progress_callback = progress_callback
+        self.finish_callback = finish_callback
+
+    @property
+    def progress(self):
+        return self.__progress
+
+    @property
+    def encoding(self):
+        return self.__encoding
+
+    @classmethod
+    def FromFile(cls, filename, overwrite=False, auto_transfer=True, **kwds):
+        """Create a new download object from a filename."""
+        path = os.path.expanduser(filename)
+        if os.path.exists(path) and not overwrite:
+            raise exceptions.InvalidUserInputError(
+                'File %s exists and overwrite not specified' % path)
+        return cls(open(path, 'wb'), close_stream=True,
+                   auto_transfer=auto_transfer, **kwds)
+
+    @classmethod
+    def FromStream(cls, stream, auto_transfer=True, total_size=None, **kwds):
+        """Create a new Download object from a stream."""
+        return cls(stream, auto_transfer=auto_transfer, total_size=total_size,
+                   **kwds)
+
+    @classmethod
+    def FromData(cls, stream, json_data, http=None, auto_transfer=None,
+                 **kwds):
+        """Create a new Download object from a stream and serialized data."""
+        info = json.loads(json_data)
+        missing_keys = cls._REQUIRED_SERIALIZATION_KEYS - set(info.keys())
+        if missing_keys:
+            raise exceptions.InvalidDataError(
+                'Invalid serialization data, missing keys: %s' % (
+                    ', '.join(missing_keys)))
+        download = cls.FromStream(stream, **kwds)
+        if auto_transfer is not None:
+            download.auto_transfer = auto_transfer
+        else:
+            download.auto_transfer = info['auto_transfer']
+        setattr(download, '_Download__progress', info['progress'])
+        setattr(download, '_Download__total_size', info['total_size'])
+        download._Initialize(  # pylint: disable=protected-access
+            http, info['url'])
+        return download
+
+    @property
+    def serialization_data(self):
+        self.EnsureInitialized()
+        return {
+            'auto_transfer': self.auto_transfer,
+            'progress': self.progress,
+            'total_size': self.total_size,
+            'url': self.url,
+        }
+
+    @property
+    def total_size(self):
+        return self.__total_size
+
+    def __str__(self):
+        if not self.initialized:
+            return 'Download (uninitialized)'
+        return 'Download with %d/%s bytes transferred from url %s' % (
+            self.progress, self.total_size, self.url)
+
+    def ConfigureRequest(self, http_request, url_builder):
+        url_builder.query_params['alt'] = 'media'
+        # TODO(craigcitro): We need to send range requests because by
+        # default httplib2 stores entire reponses in memory. Override
+        # httplib2's download method (as gsutil does) so that this is not
+        # necessary.
+        http_request.headers['Range'] = 'bytes=0-%d' % (self.chunksize - 1,)
+
+    def __SetTotal(self, info):
+        if 'content-range' in info:
+            _, _, total = info['content-range'].rpartition('/')
+            if total != '*':
+                self.__total_size = int(total)
+        # Note "total_size is None" means we don't know it; if no size
+        # info was returned on our initial range request, that means we
+        # have a 0-byte file. (That last statement has been verified
+        # empirically, but is not clearly documented anywhere.)
+        if self.total_size is None:
+            self.__total_size = 0
+
+    def InitializeDownload(self, http_request, http=None, client=None):
+        """Initialize this download by making a request.
+
+        Args:
+          http_request: The HttpRequest to use to initialize this download.
+          http: The httplib2.Http instance for this request.
+          client: If provided, let this client process the final URL before
+              sending any additional requests. If client is provided and
+              http is not, client.http will be used instead.
+        """
+        self.EnsureUninitialized()
+        if http is None and client is None:
+            raise exceptions.UserError('Must provide client or http.')
+        http = http or client.http
+        if client is not None:
+            http_request.url = client.FinalizeTransferUrl(http_request.url)
+        url = http_request.url
+        if self.auto_transfer:
+            end_byte = self.__ComputeEndByte(0)
+            self.__SetRangeHeader(http_request, 0, end_byte)
+            response = http_wrapper.MakeRequest(
+                self.bytes_http or http, http_request)
+            if response.status_code not in self._ACCEPTABLE_STATUSES:
+                raise exceptions.HttpError.FromResponse(response)
+            self.__initial_response = response
+            self.__SetTotal(response.info)
+            url = response.info.get('content-location', response.request_url)
+        if client is not None:
+            url = client.FinalizeTransferUrl(url)
+        self._Initialize(http, url)
+        # Unless the user has requested otherwise, we want to just
+        # go ahead and pump the bytes now.
+        if self.auto_transfer:
+            self.StreamInChunks()
+
+    def __NormalizeStartEnd(self, start, end=None):
+        if end is not None:
+            if start < 0:
+                raise exceptions.TransferInvalidError(
+                    'Cannot have end index with negative start index')
+            elif start >= self.total_size:
+                raise exceptions.TransferInvalidError(
+                    'Cannot have start index greater than total size')
+            end = min(end, self.total_size - 1)
+            if end < start:
+                raise exceptions.TransferInvalidError(
+                    'Range requested with end[%s] < start[%s]' % (end, start))
+            return start, end
+        else:
+            if start < 0:
+                start = max(0, start + self.total_size)
+            return start, self.total_size - 1
+
+    def __SetRangeHeader(self, request, start, end=None):
+        if start < 0:
+            request.headers['range'] = 'bytes=%d' % start
+        elif end is None:
+            request.headers['range'] = 'bytes=%d-' % start
+        else:
+            request.headers['range'] = 'bytes=%d-%d' % (start, end)
+
+    def __ComputeEndByte(self, start, end=None, use_chunks=True):
+        """Compute the last byte to fetch for this request.
+
+        This is all based on the HTTP spec for Range and
+        Content-Range.
+
+        Note that this is potentially confusing in several ways:
+          * the value for the last byte is 0-based, eg "fetch 10 bytes
+            from the beginning" would return 9 here.
+          * if we have no information about size, and don't want to
+            use the chunksize, we'll return None.
+        See the tests for more examples.
+
+        Args:
+          start: byte to start at.
+          end: (int or None, default: None) Suggested last byte.
+          use_chunks: (bool, default: True) If False, ignore self.chunksize.
+
+        Returns:
+          Last byte to use in a Range header, or None.
+
+        """
+        end_byte = end
+
+        if start < 0 and not self.total_size:
+            return end_byte
+
+        if use_chunks:
+            alternate = start + self.chunksize - 1
+            if end_byte is not None:
+                end_byte = min(end_byte, alternate)
+            else:
+                end_byte = alternate
+
+        if self.total_size:
+            alternate = self.total_size - 1
+            if end_byte is not None:
+                end_byte = min(end_byte, alternate)
+            else:
+                end_byte = alternate
+
+        return end_byte
+
+    def __GetChunk(self, start, end, additional_headers=None):
+        """Retrieve a chunk, and return the full response."""
+        self.EnsureInitialized()
+        request = http_wrapper.Request(url=self.url)
+        self.__SetRangeHeader(request, start, end=end)
+        if additional_headers is not None:
+            request.headers.update(additional_headers)
+        return http_wrapper.MakeRequest(
+            self.bytes_http, request, retry_func=self.retry_func,
+            retries=self.num_retries)
+
+    def __ProcessResponse(self, response):
+        """Process response (by updating self and writing to self.stream)."""
+        if response.status_code not in self._ACCEPTABLE_STATUSES:
+            # We distinguish errors that mean we made a mistake in setting
+            # up the transfer versus something we should attempt again.
+            if response.status_code in (http_client.FORBIDDEN,
+                                        http_client.NOT_FOUND):
+                raise exceptions.HttpError.FromResponse(response)
+            else:
+                raise exceptions.TransferRetryError(response.content)
+        if response.status_code in (http_client.OK,
+                                    http_client.PARTIAL_CONTENT):
+            self.stream.write(response.content)
+            self.__progress += response.length
+            if response.info and 'content-encoding' in response.info:
+                # TODO(craigcitro): Handle the case where this changes over a
+                # download.
+                self.__encoding = response.info['content-encoding']
+        elif response.status_code == http_client.NO_CONTENT:
+            # It's important to write something to the stream for the case
+            # of a 0-byte download to a file, as otherwise python won't
+            # create the file.
+            self.stream.write('')
+        return response
+
+    def GetRange(self, start, end=None, additional_headers=None,
+                 use_chunks=True):
+        """Retrieve a given byte range from this download, inclusive.
+
+        Range must be of one of these three forms:
+        * 0 <= start, end = None: Fetch from start to the end of the file.
+        * 0 <= start <= end: Fetch the bytes from start to end.
+        * start < 0, end = None: Fetch the last -start bytes of the file.
+
+        (These variations correspond to those described in the HTTP 1.1
+        protocol for range headers in RFC 2616, sec. 14.35.1.)
+
+        Args:
+          start: (int) Where to start fetching bytes. (See above.)
+          end: (int, optional) Where to stop fetching bytes. (See above.)
+          additional_headers: (bool, optional) Any additional headers to
+              pass with the request.
+          use_chunks: (bool, default: True) If False, ignore self.chunksize
+              and fetch this range in a single request.
+
+        Returns:
+          None. Streams bytes into self.stream.
+        """
+        self.EnsureInitialized()
+        progress_end_normalized = False
+        if self.total_size is not None:
+            progress, end_byte = self.__NormalizeStartEnd(start, end)
+            progress_end_normalized = True
+        else:
+            progress = start
+            end_byte = end
+        while (not progress_end_normalized or end_byte is None or
+               progress <= end_byte):
+            end_byte = self.__ComputeEndByte(progress, end=end_byte,
+                                             use_chunks=use_chunks)
+            response = self.__GetChunk(progress, end_byte,
+                                       additional_headers=additional_headers)
+            if not progress_end_normalized:
+                self.__SetTotal(response.info)
+                progress, end_byte = self.__NormalizeStartEnd(start, end)
+                progress_end_normalized = True
+            response = self.__ProcessResponse(response)
+            progress += response.length
+            if response.length == 0:
+                raise exceptions.TransferRetryError(
+                    'Zero bytes unexpectedly returned in download response')
+
+    def StreamInChunks(self, callback=None, finish_callback=None,
+                       additional_headers=None):
+        """Stream the entire download in chunks."""
+        self.StreamMedia(callback=callback, finish_callback=finish_callback,
+                         additional_headers=additional_headers,
+                         use_chunks=True)
+
+    def StreamMedia(self, callback=None, finish_callback=None,
+                    additional_headers=None, use_chunks=True):
+        """Stream the entire download.
+
+        Args:
+          callback: (default: None) Callback to call as each chunk is
+              completed.
+          finish_callback: (default: None) Callback to call when the
+              download is complete.
+          additional_headers: (default: None) Additional headers to
+              include in fetching bytes.
+          use_chunks: (bool, default: True) If False, ignore self.chunksize
+              and stream this download in a single request.
+
+        Returns:
+            None. Streams bytes into self.stream.
+        """
+        callback = callback or self.progress_callback
+        finish_callback = finish_callback or self.finish_callback
+
+        self.EnsureInitialized()
+        while True:
+            if self.__initial_response is not None:
+                response = self.__initial_response
+                self.__initial_response = None
+            else:
+                end_byte = self.__ComputeEndByte(self.progress,
+                                                 use_chunks=use_chunks)
+                response = self.__GetChunk(
+                    self.progress, end_byte,
+                    additional_headers=additional_headers)
+            if self.total_size is None:
+                self.__SetTotal(response.info)
+            response = self.__ProcessResponse(response)
+            self._ExecuteCallback(callback, response)
+            if (response.status_code == http_client.OK or
+                    self.progress >= self.total_size):
+                break
+        self._ExecuteCallback(finish_callback, response)
+
+
+class Upload(_Transfer):
+
+    """Data for a single Upload.
+
+    Fields:
+      stream: The stream to upload.
+      mime_type: MIME type of the upload.
+      total_size: (optional) Total upload size for the stream.
+      close_stream: (default: False) Whether or not we should close the
+          stream when finished with the upload.
+      auto_transfer: (default: True) If True, stream all bytes as soon as
+          the upload is created.
+    """
+    _REQUIRED_SERIALIZATION_KEYS = set((
+        'auto_transfer', 'mime_type', 'total_size', 'url'))
+
+    def __init__(self, stream, mime_type, total_size=None, http=None,
+                 close_stream=False, chunksize=None, auto_transfer=True,
+                 progress_callback=None, finish_callback=None,
+                 **kwds):
+        super(Upload, self).__init__(
+            stream, close_stream=close_stream, chunksize=chunksize,
+            auto_transfer=auto_transfer, http=http, **kwds)
+        self.__complete = False
+        self.__final_response = None
+        self.__mime_type = mime_type
+        self.__progress = 0
+        self.__server_chunk_granularity = None
+        self.__strategy = None
+        self.__total_size = None
+
+        self.progress_callback = progress_callback
+        self.finish_callback = finish_callback
+        self.total_size = total_size
+
+    @property
+    def progress(self):
+        return self.__progress
+
+    @classmethod
+    def FromFile(cls, filename, mime_type=None, auto_transfer=True, **kwds):
+        """Create a new Upload object from a filename."""
+        path = os.path.expanduser(filename)
+        if not os.path.exists(path):
+            raise exceptions.NotFoundError('Could not find file %s' % path)
+        if not mime_type:
+            mime_type, _ = mimetypes.guess_type(path)
+            if mime_type is None:
+                raise exceptions.InvalidUserInputError(
+                    'Could not guess mime type for %s' % path)
+        size = os.stat(path).st_size
+        return cls(open(path, 'rb'), mime_type, total_size=size,
+                   close_stream=True, auto_transfer=auto_transfer, **kwds)
+
+    @classmethod
+    def FromStream(cls, stream, mime_type, total_size=None, auto_transfer=True,
+                   **kwds):
+        """Create a new Upload object from a stream."""
+        if mime_type is None:
+            raise exceptions.InvalidUserInputError(
+                'No mime_type specified for stream')
+        return cls(stream, mime_type, total_size=total_size,
+                   close_stream=False, auto_transfer=auto_transfer, **kwds)
+
+    @classmethod
+    def FromData(cls, stream, json_data, http, auto_transfer=None, **kwds):
+        """Create a new Upload of stream from serialized json_data and http."""
+        info = json.loads(json_data)
+        missing_keys = cls._REQUIRED_SERIALIZATION_KEYS - set(info.keys())
+        if missing_keys:
+            raise exceptions.InvalidDataError(
+                'Invalid serialization data, missing keys: %s' % (
+                    ', '.join(missing_keys)))
+        if 'total_size' in kwds:
+            raise exceptions.InvalidUserInputError(
+                'Cannot override total_size on serialized Upload')
+        upload = cls.FromStream(stream, info['mime_type'],
+                                total_size=info.get('total_size'), **kwds)
+        if isinstance(stream, io.IOBase) and not stream.seekable():
+            raise exceptions.InvalidUserInputError(
+                'Cannot restart resumable upload on non-seekable stream')
+        if auto_transfer is not None:
+            upload.auto_transfer = auto_transfer
+        else:
+            upload.auto_transfer = info['auto_transfer']
+        upload.strategy = RESUMABLE_UPLOAD
+        upload._Initialize(  # pylint: disable=protected-access
+            http, info['url'])
+        upload.RefreshResumableUploadState()
+        upload.EnsureInitialized()
+        if upload.auto_transfer:
+            upload.StreamInChunks()
+        return upload
+
+    @property
+    def serialization_data(self):
+        self.EnsureInitialized()
+        if self.strategy != RESUMABLE_UPLOAD:
+            raise exceptions.InvalidDataError(
+                'Serialization only supported for resumable uploads')
+        return {
+            'auto_transfer': self.auto_transfer,
+            'mime_type': self.mime_type,
+            'total_size': self.total_size,
+            'url': self.url,
+        }
+
+    @property
+    def complete(self):
+        return self.__complete
+
+    @property
+    def mime_type(self):
+        return self.__mime_type
+
+    def __str__(self):
+        if not self.initialized:
+            return 'Upload (uninitialized)'
+        return 'Upload with %d/%s bytes transferred for url %s' % (
+            self.progress, self.total_size or '???', self.url)
+
+    @property
+    def strategy(self):
+        return self.__strategy
+
+    @strategy.setter
+    def strategy(self, value):
+        if value not in (SIMPLE_UPLOAD, RESUMABLE_UPLOAD):
+            raise exceptions.UserError((
+                'Invalid value "%s" for upload strategy, must be one of '
+                '"simple" or "resumable".') % value)
+        self.__strategy = value
+
+    @property
+    def total_size(self):
+        return self.__total_size
+
+    @total_size.setter
+    def total_size(self, value):
+        self.EnsureUninitialized()
+        self.__total_size = value
+
+    def __SetDefaultUploadStrategy(self, upload_config, http_request):
+        """Determine and set the default upload strategy for this upload.
+
+        We generally prefer simple or multipart, unless we're forced to
+        use resumable. This happens when any of (1) the upload is too
+        large, (2) the simple endpoint doesn't support multipart requests
+        and we have metadata, or (3) there is no simple upload endpoint.
+
+        Args:
+          upload_config: Configuration for the upload endpoint.
+          http_request: The associated http request.
+
+        Returns:
+          None.
+        """
+        if upload_config.resumable_path is None:
+            self.strategy = SIMPLE_UPLOAD
+        if self.strategy is not None:
+            return
+        strategy = SIMPLE_UPLOAD
+        if (self.total_size is not None and
+                self.total_size > _RESUMABLE_UPLOAD_THRESHOLD):
+            strategy = RESUMABLE_UPLOAD
+        if http_request.body and not upload_config.simple_multipart:
+            strategy = RESUMABLE_UPLOAD
+        if not upload_config.simple_path:
+            strategy = RESUMABLE_UPLOAD
+        self.strategy = strategy
+
+    def ConfigureRequest(self, upload_config, http_request, url_builder):
+        """Configure the request and url for this upload."""
+        # Validate total_size vs. max_size
+        if (self.total_size and upload_config.max_size and
+                self.total_size > upload_config.max_size):
+            raise exceptions.InvalidUserInputError(
+                'Upload too big: %s larger than max size %s' % (
+                    self.total_size, upload_config.max_size))
+        # Validate mime type
+        if not util.AcceptableMimeType(upload_config.accept, self.mime_type):
+            raise exceptions.InvalidUserInputError(
+                'MIME type %s does not match any accepted MIME ranges %s' % (
+                    self.mime_type, upload_config.accept))
+
+        self.__SetDefaultUploadStrategy(upload_config, http_request)
+        if self.strategy == SIMPLE_UPLOAD:
+            url_builder.relative_path = upload_config.simple_path
+            if http_request.body:
+                url_builder.query_params['uploadType'] = 'multipart'
+                self.__ConfigureMultipartRequest(http_request)
+            else:
+                url_builder.query_params['uploadType'] = 'media'
+                self.__ConfigureMediaRequest(http_request)
+        else:
+            url_builder.relative_path = upload_config.resumable_path
+            url_builder.query_params['uploadType'] = 'resumable'
+            self.__ConfigureResumableRequest(http_request)
+
+    def __ConfigureMediaRequest(self, http_request):
+        """Configure http_request as a simple request for this upload."""
+        http_request.headers['content-type'] = self.mime_type
+        http_request.body = self.stream.read()
+        http_request.loggable_body = '<media body>'
+
+    def __ConfigureMultipartRequest(self, http_request):
+        """Configure http_request as a multipart request for this upload."""
+        # This is a multipart/related upload.
+        msg_root = mime_multipart.MIMEMultipart('related')
+        # msg_root should not write out its own headers
+        setattr(msg_root, '_write_headers', lambda self: None)
+
+        # attach the body as one part
+        msg = mime_nonmultipart.MIMENonMultipart(
+            *http_request.headers['content-type'].split('/'))
+        msg.set_payload(http_request.body)
+        msg_root.attach(msg)
+
+        # attach the media as the second part
+        msg = mime_nonmultipart.MIMENonMultipart(*self.mime_type.split('/'))
+        msg['Content-Transfer-Encoding'] = 'binary'
+        msg.set_payload(self.stream.read())
+        msg_root.attach(msg)
+
+        # NOTE: We encode the body, but can't use
+        #       `email.message.Message.as_string` because it prepends
+        #       `> ` to `From ` lines.
+        fp = six.BytesIO()
+        if six.PY3:
+            generator_class = email_generator.BytesGenerator
+        else:
+            generator_class = email_generator.Generator
+        g = generator_class(fp, mangle_from_=False)
+        g.flatten(msg_root, unixfrom=False)
+        http_request.body = fp.getvalue()
+
+        multipart_boundary = msg_root.get_boundary()
+        http_request.headers['content-type'] = (
+            'multipart/related; boundary=%r' % multipart_boundary)
+        if isinstance(multipart_boundary, six.text_type):
+            multipart_boundary = multipart_boundary.encode('ascii')
+
+        body_components = http_request.body.split(multipart_boundary)
+        headers, _, _ = body_components[-2].partition(b'\n\n')
+        body_components[-2] = b'\n\n'.join([headers, b'<media body>\n\n--'])
+        http_request.loggable_body = multipart_boundary.join(body_components)
+
+    def __ConfigureResumableRequest(self, http_request):
+        http_request.headers['X-Upload-Content-Type'] = self.mime_type
+        if self.total_size is not None:
+            http_request.headers[
+                'X-Upload-Content-Length'] = str(self.total_size)
+
+    def RefreshResumableUploadState(self):
+        """Talk to the server and refresh the state of this resumable upload.
+
+        Returns:
+          Response if the upload is complete.
+        """
+        if self.strategy != RESUMABLE_UPLOAD:
+            return
+        self.EnsureInitialized()
+        refresh_request = http_wrapper.Request(
+            url=self.url, http_method='PUT',
+            headers={'Content-Range': 'bytes */*'})
+        refresh_response = http_wrapper.MakeRequest(
+            self.http, refresh_request, redirections=0,
+            retries=self.num_retries)
+        range_header = self._GetRangeHeaderFromResponse(refresh_response)
+        if refresh_response.status_code in (http_client.OK,
+                                            http_client.CREATED):
+            self.__complete = True
+            self.__progress = self.total_size
+            self.stream.seek(self.progress)
+            # If we're finished, the refresh response will contain the metadata
+            # originally requested. Cache it so it can be returned in
+            # StreamInChunks.
+            self.__final_response = refresh_response
+        elif refresh_response.status_code == http_wrapper.RESUME_INCOMPLETE:
+            if range_header is None:
+                self.__progress = 0
+            else:
+                self.__progress = self.__GetLastByte(range_header) + 1
+            self.stream.seek(self.progress)
+        else:
+            raise exceptions.HttpError.FromResponse(refresh_response)
+
+    def _GetRangeHeaderFromResponse(self, response):
+        return response.info.get('Range', response.info.get('range'))
+
+    def InitializeUpload(self, http_request, http=None, client=None):
+        """Initialize this upload from the given http_request."""
+        if self.strategy is None:
+            raise exceptions.UserError(
+                'No upload strategy set; did you call ConfigureRequest?')
+        if http is None and client is None:
+            raise exceptions.UserError('Must provide client or http.')
+        if self.strategy != RESUMABLE_UPLOAD:
+            return
+        http = http or client.http
+        if client is not None:
+            http_request.url = client.FinalizeTransferUrl(http_request.url)
+        self.EnsureUninitialized()
+        http_response = http_wrapper.MakeRequest(http, http_request,
+                                                 retries=self.num_retries)
+        if http_response.status_code != http_client.OK:
+            raise exceptions.HttpError.FromResponse(http_response)
+
+        self.__server_chunk_granularity = http_response.info.get(
+            'X-Goog-Upload-Chunk-Granularity')
+        url = http_response.info['location']
+        if client is not None:
+            url = client.FinalizeTransferUrl(url)
+        self._Initialize(http, url)
+
+        # Unless the user has requested otherwise, we want to just
+        # go ahead and pump the bytes now.
+        if self.auto_transfer:
+            return self.StreamInChunks()
+        return http_response
+
+    def __GetLastByte(self, range_header):
+        _, _, end = range_header.partition('-')
+        # TODO(craigcitro): Validate start == 0?
+        return int(end)
+
+    def __ValidateChunksize(self, chunksize=None):
+        if self.__server_chunk_granularity is None:
+            return
+        chunksize = chunksize or self.chunksize
+        if chunksize % self.__server_chunk_granularity:
+            raise exceptions.ConfigurationValueError(
+                'Server requires chunksize to be a multiple of %d',
+                self.__server_chunk_granularity)
+
+    def __StreamMedia(self, callback=None, finish_callback=None,
+                      additional_headers=None, use_chunks=True):
+        """Helper function for StreamMedia / StreamInChunks."""
+        if self.strategy != RESUMABLE_UPLOAD:
+            raise exceptions.InvalidUserInputError(
+                'Cannot stream non-resumable upload')
+        callback = callback or self.progress_callback
+        finish_callback = finish_callback or self.finish_callback
+        # final_response is set if we resumed an already-completed upload.
+        response = self.__final_response
+        send_func = self.__SendChunk if use_chunks else self.__SendMediaBody
+        if use_chunks:
+            self.__ValidateChunksize(self.chunksize)
+        self.EnsureInitialized()
+        while not self.complete:
+            response = send_func(self.stream.tell(),
+                                 additional_headers=additional_headers)
+            if response.status_code in (http_client.OK, http_client.CREATED):
+                self.__complete = True
+                break
+            self.__progress = self.__GetLastByte(response.info['range'])
+            if self.progress + 1 != self.stream.tell():
+                # TODO(craigcitro): Add a better way to recover here.
+                raise exceptions.CommunicationError(
+                    'Failed to transfer all bytes in chunk, upload paused at '
+                    'byte %d' % self.progress)
+            self._ExecuteCallback(callback, response)
+        if self.__complete and hasattr(self.stream, 'seek'):
+            current_pos = self.stream.tell()
+            self.stream.seek(0, os.SEEK_END)
+            end_pos = self.stream.tell()
+            self.stream.seek(current_pos)
+            if current_pos != end_pos:
+                raise exceptions.TransferInvalidError(
+                    'Upload complete with %s additional bytes left in stream' %
+                    (int(end_pos) - int(current_pos)))
+        self._ExecuteCallback(finish_callback, response)
+        return response
+
+    def StreamMedia(self, callback=None, finish_callback=None,
+                    additional_headers=None):
+        """Send this resumable upload in a single request.
+
+        Args:
+          callback: Progress callback function with inputs
+              (http_wrapper.Response, transfer.Upload)
+          finish_callback: Final callback function with inputs
+              (http_wrapper.Response, transfer.Upload)
+          additional_headers: Dict of headers to include with the upload
+              http_wrapper.Request.
+
+        Returns:
+          http_wrapper.Response of final response.
+        """
+        return self.__StreamMedia(
+            callback=callback, finish_callback=finish_callback,
+            additional_headers=additional_headers, use_chunks=False)
+
+    def StreamInChunks(self, callback=None, finish_callback=None,
+                       additional_headers=None):
+        """Send this (resumable) upload in chunks."""
+        return self.__StreamMedia(
+            callback=callback, finish_callback=finish_callback,
+            additional_headers=additional_headers)
+
+    def __SendMediaRequest(self, request, end):
+        """Request helper function for SendMediaBody & SendChunk."""
+        response = http_wrapper.MakeRequest(
+            self.bytes_http, request, retry_func=self.retry_func,
+            retries=self.num_retries)
+        if response.status_code not in (http_client.OK, http_client.CREATED,
+                                        http_wrapper.RESUME_INCOMPLETE):
+            # We want to reset our state to wherever the server left us
+            # before this failed request, and then raise.
+            self.RefreshResumableUploadState()
+            raise exceptions.HttpError.FromResponse(response)
+        if response.status_code == http_wrapper.RESUME_INCOMPLETE:
+            last_byte = self.__GetLastByte(
+                self._GetRangeHeaderFromResponse(response))
+            if last_byte + 1 != end:
+                self.stream.seek(last_byte)
+        return response
+
+    def __SendMediaBody(self, start, additional_headers=None):
+        """Send the entire media stream in a single request."""
+        self.EnsureInitialized()
+        if self.total_size is None:
+            raise exceptions.TransferInvalidError(
+                'Total size must be known for SendMediaBody')
+        body_stream = stream_slice.StreamSlice(
+            self.stream, self.total_size - start)
+
+        request = http_wrapper.Request(url=self.url, http_method='PUT',
+                                       body=body_stream)
+        request.headers['Content-Type'] = self.mime_type
+        if start == self.total_size:
+            # End of an upload with 0 bytes left to send; just finalize.
+            range_string = 'bytes */%s' % self.total_size
+        else:
+            range_string = 'bytes %s-%s/%s' % (start, self.total_size - 1,
+                                               self.total_size)
+
+        request.headers['Content-Range'] = range_string
+        if additional_headers:
+            request.headers.update(additional_headers)
+
+        return self.__SendMediaRequest(request, self.total_size)
+
+    def __SendChunk(self, start, additional_headers=None):
+        """Send the specified chunk."""
+        self.EnsureInitialized()
+        no_log_body = self.total_size is None
+        if self.total_size is None:
+            # For the streaming resumable case, we need to detect when
+            # we're at the end of the stream.
+            body_stream = buffered_stream.BufferedStream(
+                self.stream, start, self.chunksize)
+            end = body_stream.stream_end_position
+            if body_stream.stream_exhausted:
+                self.__total_size = end
+            # TODO: Here, change body_stream from a stream to a string object,
+            # which means reading a chunk into memory.  This works around
+            # https://code.google.com/p/httplib2/issues/detail?id=176 which can
+            # cause httplib2 to skip bytes on 401's for file objects.
+            # Rework this solution to be more general.
+            body_stream = body_stream.read(self.chunksize)
+        else:
+            end = min(start + self.chunksize, self.total_size)
+            body_stream = stream_slice.StreamSlice(self.stream, end - start)
+        # TODO(craigcitro): Think about clearer errors on "no data in
+        # stream".
+        request = http_wrapper.Request(url=self.url, http_method='PUT',
+                                       body=body_stream)
+        request.headers['Content-Type'] = self.mime_type
+        if no_log_body:
+            # Disable logging of streaming body.
+            # TODO: Remove no_log_body and rework as part of a larger logs
+            # refactor.
+            request.loggable_body = '<media body>'
+        if self.total_size is None:
+            # Streaming resumable upload case, unknown total size.
+            range_string = 'bytes %s-%s/*' % (start, end - 1)
+        elif end == start:
+            # End of an upload with 0 bytes left to send; just finalize.
+            range_string = 'bytes */%s' % self.total_size
+        else:
+            # Normal resumable upload case with known sizes.
+            range_string = 'bytes %s-%s/%s' % (start, end - 1, self.total_size)
+
+        request.headers['Content-Range'] = range_string
+        if additional_headers:
+            request.headers.update(additional_headers)
+
+        return self.__SendMediaRequest(request, end)
diff --git a/apitools/base/py/transfer_test.py b/apitools/base/py/transfer_test.py
new file mode 100644
index 0000000..a4c43e7
--- /dev/null
+++ b/apitools/base/py/transfer_test.py
@@ -0,0 +1,268 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for transfer.py."""
+import string
+
+import mock
+import six
+from six.moves import http_client
+import unittest2
+
+from apitools.base.py import base_api
+from apitools.base.py import http_wrapper
+from apitools.base.py import transfer
+
+
+class TransferTest(unittest2.TestCase):
+
+    def assertRangeAndContentRangeCompatible(self, request, response):
+        request_prefix = 'bytes='
+        self.assertIn('range', request.headers)
+        self.assertTrue(request.headers['range'].startswith(request_prefix))
+        request_range = request.headers['range'][len(request_prefix):]
+
+        response_prefix = 'bytes '
+        self.assertIn('content-range', response.info)
+        response_header = response.info['content-range']
+        self.assertTrue(response_header.startswith(response_prefix))
+        response_range = (
+            response_header[len(response_prefix):].partition('/')[0])
+
+        msg = ('Request range ({0}) not a prefix of '
+               'response_range ({1})').format(
+                   request_range, response_range)
+        self.assertTrue(response_range.startswith(request_range), msg=msg)
+
+    def testComputeEndByte(self):
+        total_size = 100
+        chunksize = 10
+        download = transfer.Download.FromStream(
+            six.StringIO(), chunksize=chunksize, total_size=total_size)
+        self.assertEqual(chunksize - 1,
+                         download._Download__ComputeEndByte(0, end=50))
+
+    def testComputeEndByteReturnNone(self):
+        download = transfer.Download.FromStream(six.StringIO())
+        self.assertIsNone(
+            download._Download__ComputeEndByte(0, use_chunks=False))
+
+    def testComputeEndByteNoChunks(self):
+        total_size = 100
+        download = transfer.Download.FromStream(
+            six.StringIO(), chunksize=10, total_size=total_size)
+        for end in (None, 1000):
+            self.assertEqual(
+                total_size - 1,
+                download._Download__ComputeEndByte(0, end=end,
+                                                   use_chunks=False),
+                msg='Failed on end={0}'.format(end))
+
+    def testComputeEndByteNoTotal(self):
+        download = transfer.Download.FromStream(six.StringIO())
+        default_chunksize = download.chunksize
+        for chunksize in (100, default_chunksize):
+            download.chunksize = chunksize
+            for start in (0, 10):
+                self.assertEqual(
+                    download.chunksize + start - 1,
+                    download._Download__ComputeEndByte(start),
+                    msg='Failed on start={0}, chunksize={1}'.format(
+                        start, chunksize))
+
+    def testComputeEndByteSmallTotal(self):
+        total_size = 100
+        download = transfer.Download.FromStream(six.StringIO(),
+                                                total_size=total_size)
+        for start in (0, 10):
+            self.assertEqual(total_size - 1,
+                             download._Download__ComputeEndByte(start),
+                             msg='Failed on start={0}'.format(start))
+
+    def testGetRange(self):
+        for (start_byte, end_byte) in [(0, 25), (5, 15), (0, 0), (25, 25)]:
+            bytes_http = object()
+            http = object()
+            download_stream = six.StringIO()
+            download = transfer.Download.FromStream(download_stream,
+                                                    total_size=26,
+                                                    auto_transfer=False)
+            download.bytes_http = bytes_http
+            base_url = 'https://part.one/'
+            with mock.patch.object(http_wrapper, 'MakeRequest',
+                                   autospec=True) as make_request:
+                make_request.return_value = http_wrapper.Response(
+                    info={
+                        'content-range': 'bytes %d-%d/26' %
+                                         (start_byte, end_byte),
+                        'status': http_client.OK,
+                    },
+                    content=string.ascii_lowercase[start_byte:end_byte+1],
+                    request_url=base_url,
+                )
+                request = http_wrapper.Request(url='https://part.one/')
+                download.InitializeDownload(request, http=http)
+                download.GetRange(start_byte, end_byte)
+                self.assertEqual(1, make_request.call_count)
+                received_request = make_request.call_args[0][1]
+                self.assertEqual(base_url, received_request.url)
+                self.assertRangeAndContentRangeCompatible(
+                    received_request, make_request.return_value)
+
+    def testNonChunkedDownload(self):
+        bytes_http = object()
+        http = object()
+        download_stream = six.StringIO()
+        download = transfer.Download.FromStream(download_stream, total_size=52)
+        download.bytes_http = bytes_http
+        base_url = 'https://part.one/'
+
+        with mock.patch.object(http_wrapper, 'MakeRequest',
+                               autospec=True) as make_request:
+            make_request.return_value = http_wrapper.Response(
+                info={
+                    'content-range': 'bytes 0-51/52',
+                    'status': http_client.OK,
+                },
+                content=string.ascii_lowercase * 2,
+                request_url=base_url,
+            )
+            request = http_wrapper.Request(url='https://part.one/')
+            download.InitializeDownload(request, http=http)
+            self.assertEqual(1, make_request.call_count)
+            received_request = make_request.call_args[0][1]
+            self.assertEqual(base_url, received_request.url)
+            self.assertRangeAndContentRangeCompatible(
+                received_request, make_request.return_value)
+            download_stream.seek(0)
+            self.assertEqual(string.ascii_lowercase * 2,
+                             download_stream.getvalue())
+
+    def testChunkedDownload(self):
+        bytes_http = object()
+        http = object()
+        download_stream = six.StringIO()
+        download = transfer.Download.FromStream(
+            download_stream, chunksize=26, total_size=52)
+        download.bytes_http = bytes_http
+
+        # Setting autospec on a mock with an iterable side_effect is
+        # currently broken (http://bugs.python.org/issue17826), so
+        # instead we write a little function.
+        def _ReturnBytes(unused_http, http_request,
+                         *unused_args, **unused_kwds):
+            url = http_request.url
+            if url == 'https://part.one/':
+                return http_wrapper.Response(
+                    info={
+                        'content-location': 'https://part.two/',
+                        'content-range': 'bytes 0-25/52',
+                        'status': http_client.PARTIAL_CONTENT,
+                    },
+                    content=string.ascii_lowercase,
+                    request_url='https://part.one/',
+                )
+            elif url == 'https://part.two/':
+                return http_wrapper.Response(
+                    info={
+                        'content-range': 'bytes 26-51/52',
+                        'status': http_client.OK,
+                    },
+                    content=string.ascii_uppercase,
+                    request_url='https://part.two/',
+                )
+            else:
+                self.fail('Unknown URL requested: %s' % url)
+
+        with mock.patch.object(http_wrapper, 'MakeRequest',
+                               autospec=True) as make_request:
+            make_request.side_effect = _ReturnBytes
+            request = http_wrapper.Request(url='https://part.one/')
+            download.InitializeDownload(request, http=http)
+            self.assertEqual(2, make_request.call_count)
+            for call in make_request.call_args_list:
+                self.assertRangeAndContentRangeCompatible(
+                    call[0][1], _ReturnBytes(*call[0]))
+            download_stream.seek(0)
+            self.assertEqual(string.ascii_lowercase + string.ascii_uppercase,
+                             download_stream.getvalue())
+
+    def testMultipartEncoding(self):
+        # This is really a table test for various issues we've seen in
+        # the past; see notes below for particular histories.
+
+        test_cases = [
+            # Python's mime module by default encodes lines that start
+            # with "From " as ">From ", which we need to make sure we
+            # don't run afoul of when sending content that isn't
+            # intended to be so encoded. This test calls out that we
+            # get this right. We test for both the multipart and
+            # non-multipart case.
+            'line one\nFrom \nline two',
+
+            # We had originally used a `six.StringIO` to hold the http
+            # request body in the case of a multipart upload; for
+            # bytes being uploaded in Python3, however, this causes
+            # issues like this:
+            # https://github.com/GoogleCloudPlatform/gcloud-python/issues/1760
+            # We test below to ensure that we don't end up mangling
+            # the body before sending.
+            u'name,main_ingredient\nRäksmörgås,Räkor\nBaguette,Bröd',
+        ]
+
+        for upload_contents in test_cases:
+            multipart_body = '{"body_field_one": 7}'
+            upload_bytes = upload_contents.encode('ascii', 'backslashreplace')
+            upload_config = base_api.ApiUploadInfo(
+                accept=['*/*'],
+                max_size=None,
+                resumable_multipart=True,
+                resumable_path=u'/resumable/upload',
+                simple_multipart=True,
+                simple_path=u'/upload',
+            )
+            url_builder = base_api._UrlBuilder('http://www.uploads.com')
+
+            # Test multipart: having a body argument in http_request forces
+            # multipart here.
+            upload = transfer.Upload.FromStream(
+                six.BytesIO(upload_bytes),
+                'text/plain',
+                total_size=len(upload_bytes))
+            http_request = http_wrapper.Request(
+                'http://www.uploads.com',
+                headers={'content-type': 'text/plain'},
+                body=multipart_body)
+            upload.ConfigureRequest(upload_config, http_request, url_builder)
+            self.assertEqual(
+                'multipart', url_builder.query_params['uploadType'])
+            rewritten_upload_contents = b'\n'.join(
+                http_request.body.split(b'--')[2].splitlines()[1:])
+            self.assertTrue(rewritten_upload_contents.endswith(upload_bytes))
+
+            # Test non-multipart (aka media): no body argument means this is
+            # sent as media.
+            upload = transfer.Upload.FromStream(
+                six.BytesIO(upload_bytes),
+                'text/plain',
+                total_size=len(upload_bytes))
+            http_request = http_wrapper.Request(
+                'http://www.uploads.com',
+                headers={'content-type': 'text/plain'})
+            upload.ConfigureRequest(upload_config, http_request, url_builder)
+            self.assertEqual(url_builder.query_params['uploadType'], 'media')
+            rewritten_upload_contents = http_request.body
+            self.assertTrue(rewritten_upload_contents.endswith(upload_bytes))
diff --git a/apitools/base/py/util.py b/apitools/base/py/util.py
new file mode 100644
index 0000000..112259e
--- /dev/null
+++ b/apitools/base/py/util.py
@@ -0,0 +1,230 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Assorted utilities shared between parts of apitools."""
+
+import collections
+import os
+import random
+
+import six
+from six.moves import http_client
+import six.moves.urllib.error as urllib_error
+import six.moves.urllib.parse as urllib_parse
+import six.moves.urllib.request as urllib_request
+
+from apitools.base.protorpclite import messages
+from apitools.base.py import encoding
+from apitools.base.py import exceptions
+
+__all__ = [
+    'DetectGae',
+    'DetectGce',
+]
+
+_RESERVED_URI_CHARS = r":/?#[]@!$&'()*+,;="
+
+
+def DetectGae():
+    """Determine whether or not we're running on GAE.
+
+    This is based on:
+      https://developers.google.com/appengine/docs/python/#The_Environment
+
+    Returns:
+      True iff we're running on GAE.
+    """
+    server_software = os.environ.get('SERVER_SOFTWARE', '')
+    return (server_software.startswith('Development/') or
+            server_software.startswith('Google App Engine/'))
+
+
+def DetectGce():
+    """Determine whether or not we're running on GCE.
+
+    This is based on:
+      https://cloud.google.com/compute/docs/metadata#runninggce
+
+    Returns:
+      True iff we're running on a GCE instance.
+    """
+    metadata_url = 'http://{}'.format(
+        os.environ.get('GCE_METADATA_ROOT', 'metadata.google.internal'))
+    try:
+        o = urllib_request.build_opener(urllib_request.ProxyHandler({})).open(
+            urllib_request.Request(
+                metadata_url, headers={'Metadata-Flavor': 'Google'}))
+    except urllib_error.URLError:
+        return False
+    return (o.getcode() == http_client.OK and
+            o.headers.get('metadata-flavor') == 'Google')
+
+
+def NormalizeScopes(scope_spec):
+    """Normalize scope_spec to a set of strings."""
+    if isinstance(scope_spec, six.string_types):
+        return set(scope_spec.split(' '))
+    elif isinstance(scope_spec, collections.Iterable):
+        return set(scope_spec)
+    raise exceptions.TypecheckError(
+        'NormalizeScopes expected string or iterable, found %s' % (
+            type(scope_spec),))
+
+
+def Typecheck(arg, arg_type, msg=None):
+    if not isinstance(arg, arg_type):
+        if msg is None:
+            if isinstance(arg_type, tuple):
+                msg = 'Type of arg is "%s", not one of %r' % (
+                    type(arg), arg_type)
+            else:
+                msg = 'Type of arg is "%s", not "%s"' % (type(arg), arg_type)
+        raise exceptions.TypecheckError(msg)
+    return arg
+
+
+def ExpandRelativePath(method_config, params, relative_path=None):
+    """Determine the relative path for request."""
+    path = relative_path or method_config.relative_path or ''
+
+    for param in method_config.path_params:
+        param_template = '{%s}' % param
+        # For more details about "reserved word expansion", see:
+        #   http://tools.ietf.org/html/rfc6570#section-3.2.2
+        reserved_chars = ''
+        reserved_template = '{+%s}' % param
+        if reserved_template in path:
+            reserved_chars = _RESERVED_URI_CHARS
+            path = path.replace(reserved_template, param_template)
+        if param_template not in path:
+            raise exceptions.InvalidUserInputError(
+                'Missing path parameter %s' % param)
+        try:
+            # TODO(craigcitro): Do we want to support some sophisticated
+            # mapping here?
+            value = params[param]
+        except KeyError:
+            raise exceptions.InvalidUserInputError(
+                'Request missing required parameter %s' % param)
+        if value is None:
+            raise exceptions.InvalidUserInputError(
+                'Request missing required parameter %s' % param)
+        try:
+            if not isinstance(value, six.string_types):
+                value = str(value)
+            path = path.replace(param_template,
+                                urllib_parse.quote(value.encode('utf_8'),
+                                                   reserved_chars))
+        except TypeError as e:
+            raise exceptions.InvalidUserInputError(
+                'Error setting required parameter %s to value %s: %s' % (
+                    param, value, e))
+    return path
+
+
+def CalculateWaitForRetry(retry_attempt, max_wait=60):
+    """Calculates amount of time to wait before a retry attempt.
+
+    Wait time grows exponentially with the number of attempts. A
+    random amount of jitter is added to spread out retry attempts from
+    different clients.
+
+    Args:
+      retry_attempt: Retry attempt counter.
+      max_wait: Upper bound for wait time [seconds].
+
+    Returns:
+      Number of seconds to wait before retrying request.
+
+    """
+
+    wait_time = 2 ** retry_attempt
+    max_jitter = wait_time / 4.0
+    wait_time += random.uniform(-max_jitter, max_jitter)
+    return max(1, min(wait_time, max_wait))
+
+
+def AcceptableMimeType(accept_patterns, mime_type):
+    """Return True iff mime_type is acceptable for one of accept_patterns.
+
+    Note that this function assumes that all patterns in accept_patterns
+    will be simple types of the form "type/subtype", where one or both
+    of these can be "*". We do not support parameters (i.e. "; q=") in
+    patterns.
+
+    Args:
+      accept_patterns: list of acceptable MIME types.
+      mime_type: the mime type we would like to match.
+
+    Returns:
+      Whether or not mime_type matches (at least) one of these patterns.
+    """
+    if '/' not in mime_type:
+        raise exceptions.InvalidUserInputError(
+            'Invalid MIME type: "%s"' % mime_type)
+    unsupported_patterns = [p for p in accept_patterns if ';' in p]
+    if unsupported_patterns:
+        raise exceptions.GeneratedClientError(
+            'MIME patterns with parameter unsupported: "%s"' % ', '.join(
+                unsupported_patterns))
+
+    def MimeTypeMatches(pattern, mime_type):
+        """Return True iff mime_type is acceptable for pattern."""
+        # Some systems use a single '*' instead of '*/*'.
+        if pattern == '*':
+            pattern = '*/*'
+        return all(accept in ('*', provided) for accept, provided
+                   in zip(pattern.split('/'), mime_type.split('/')))
+
+    return any(MimeTypeMatches(pattern, mime_type)
+               for pattern in accept_patterns)
+
+
+def MapParamNames(params, request_type):
+    """Reverse parameter remappings for URL construction."""
+    return [encoding.GetCustomJsonFieldMapping(request_type, json_name=p) or p
+            for p in params]
+
+
+def MapRequestParams(params, request_type):
+    """Perform any renames/remappings needed for URL construction.
+
+    Currently, we have several ways to customize JSON encoding, in
+    particular of field names and enums. This works fine for JSON
+    bodies, but also needs to be applied for path and query parameters
+    in the URL.
+
+    This function takes a dictionary from param names to values, and
+    performs any registered mappings. We also need the request type (to
+    look up the mappings).
+
+    Args:
+      params: (dict) Map from param names to values
+      request_type: (protorpc.messages.Message) request type for this API call
+
+    Returns:
+      A new dict of the same size, with all registered mappings applied.
+    """
+    new_params = dict(params)
+    for param_name, value in params.items():
+        field_remapping = encoding.GetCustomJsonFieldMapping(
+            request_type, python_name=param_name)
+        if field_remapping is not None:
+            new_params[field_remapping] = new_params.pop(param_name)
+        if isinstance(value, messages.Enum):
+            new_params[param_name] = encoding.GetCustomJsonEnumMapping(
+                type(value), python_name=str(value)) or str(value)
+    return new_params
diff --git a/apitools/base/py/util_test.py b/apitools/base/py/util_test.py
new file mode 100644
index 0000000..a06d1a9
--- /dev/null
+++ b/apitools/base/py/util_test.py
@@ -0,0 +1,198 @@
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for util.py."""
+import unittest2
+
+from apitools.base.protorpclite import messages
+from apitools.base.py import encoding
+from apitools.base.py import exceptions
+from apitools.base.py import util
+
+
+class MockedMethodConfig(object):
+
+    def __init__(self, relative_path, path_params):
+        self.relative_path = relative_path
+        self.path_params = path_params
+
+
+class MessageWithRemappings(messages.Message):
+
+    class AnEnum(messages.Enum):
+        value_one = 1
+        value_two = 2
+
+    str_field = messages.StringField(1)
+    enum_field = messages.EnumField('AnEnum', 2)
+
+
+encoding.AddCustomJsonFieldMapping(
+    MessageWithRemappings, 'str_field', 'path_field')
+encoding.AddCustomJsonEnumMapping(
+    MessageWithRemappings.AnEnum, 'value_one', 'ONE')
+
+
+class UtilTest(unittest2.TestCase):
+
+    def testExpand(self):
+        method_config_xy = MockedMethodConfig(relative_path='{x}/y/{z}',
+                                              path_params=['x', 'z'])
+        self.assertEquals(
+            util.ExpandRelativePath(method_config_xy, {'x': '1', 'z': '2'}),
+            '1/y/2')
+        self.assertEquals(
+            util.ExpandRelativePath(
+                method_config_xy,
+                {'x': '1', 'z': '2'},
+                relative_path='{x}/y/{z}/q'),
+            '1/y/2/q')
+
+    def testReservedExpansion(self):
+        method_config_reserved = MockedMethodConfig(relative_path='{+x}/baz',
+                                                    path_params=['x'])
+        self.assertEquals('foo/:bar:/baz', util.ExpandRelativePath(
+            method_config_reserved, {'x': 'foo/:bar:'}))
+        method_config_no_reserved = MockedMethodConfig(relative_path='{x}/baz',
+                                                       path_params=['x'])
+        self.assertEquals('foo%2F%3Abar%3A/baz', util.ExpandRelativePath(
+            method_config_no_reserved, {'x': 'foo/:bar:'}))
+
+    def testCalculateWaitForRetry(self):
+        try0 = util.CalculateWaitForRetry(0)
+        self.assertTrue(try0 >= 1.0)
+        self.assertTrue(try0 <= 1.5)
+        try1 = util.CalculateWaitForRetry(1)
+        self.assertTrue(try1 >= 1.0)
+        self.assertTrue(try1 <= 3.0)
+        try2 = util.CalculateWaitForRetry(2)
+        self.assertTrue(try2 >= 2.0)
+        self.assertTrue(try2 <= 6.0)
+        try3 = util.CalculateWaitForRetry(3)
+        self.assertTrue(try3 >= 4.0)
+        self.assertTrue(try3 <= 12.0)
+        try4 = util.CalculateWaitForRetry(4)
+        self.assertTrue(try4 >= 8.0)
+        self.assertTrue(try4 <= 24.0)
+
+        self.assertAlmostEqual(10, util.CalculateWaitForRetry(5, max_wait=10))
+
+    def testTypecheck(self):
+
+        class Class1(object):
+            pass
+
+        class Class2(object):
+            pass
+
+        class Class3(object):
+            pass
+
+        instance_of_class1 = Class1()
+
+        self.assertEquals(
+            instance_of_class1, util.Typecheck(instance_of_class1, Class1))
+
+        self.assertEquals(
+            instance_of_class1,
+            util.Typecheck(instance_of_class1, ((Class1, Class2), Class3)))
+
+        self.assertEquals(
+            instance_of_class1,
+            util.Typecheck(instance_of_class1, (Class1, (Class2, Class3))))
+
+        self.assertEquals(
+            instance_of_class1,
+            util.Typecheck(instance_of_class1, Class1, 'message'))
+
+        self.assertEquals(
+            instance_of_class1,
+            util.Typecheck(
+                instance_of_class1, ((Class1, Class2), Class3), 'message'))
+
+        self.assertEquals(
+            instance_of_class1,
+            util.Typecheck(
+                instance_of_class1, (Class1, (Class2, Class3)), 'message'))
+
+        with self.assertRaises(exceptions.TypecheckError):
+            util.Typecheck(instance_of_class1, Class2)
+
+        with self.assertRaises(exceptions.TypecheckError):
+            util.Typecheck(instance_of_class1, (Class2, Class3))
+
+        with self.assertRaises(exceptions.TypecheckError):
+            util.Typecheck(instance_of_class1, Class2, 'message')
+
+        with self.assertRaises(exceptions.TypecheckError):
+            util.Typecheck(instance_of_class1, (Class2, Class3), 'message')
+
+    def testAcceptableMimeType(self):
+        valid_pairs = (
+            ('*', 'text/plain'),
+            ('*/*', 'text/plain'),
+            ('text/*', 'text/plain'),
+            ('*/plain', 'text/plain'),
+            ('text/plain', 'text/plain'),
+        )
+
+        for accept, mime_type in valid_pairs:
+            self.assertTrue(util.AcceptableMimeType([accept], mime_type))
+
+        invalid_pairs = (
+            ('text/*', 'application/json'),
+            ('text/plain', 'application/json'),
+        )
+
+        for accept, mime_type in invalid_pairs:
+            self.assertFalse(util.AcceptableMimeType([accept], mime_type))
+
+        self.assertTrue(util.AcceptableMimeType(['application/json', '*/*'],
+                                                'text/plain'))
+        self.assertFalse(util.AcceptableMimeType(['application/json', 'img/*'],
+                                                 'text/plain'))
+
+    def testMalformedMimeType(self):
+        self.assertRaises(
+            exceptions.InvalidUserInputError,
+            util.AcceptableMimeType, ['*/*'], 'abcd')
+
+    def testUnsupportedMimeType(self):
+        self.assertRaises(
+            exceptions.GeneratedClientError,
+            util.AcceptableMimeType, ['text/html;q=0.9'], 'text/html')
+
+    def testMapRequestParams(self):
+        params = {
+            'str_field': 'foo',
+            'enum_field': MessageWithRemappings.AnEnum.value_one,
+        }
+        remapped_params = {
+            'path_field': 'foo',
+            'enum_field': 'ONE',
+        }
+        self.assertEqual(remapped_params,
+                         util.MapRequestParams(params, MessageWithRemappings))
+
+        params['enum_field'] = MessageWithRemappings.AnEnum.value_two
+        remapped_params['enum_field'] = 'value_two'
+        self.assertEqual(remapped_params,
+                         util.MapRequestParams(params, MessageWithRemappings))
+
+    def testMapParamNames(self):
+        params = ['path_field', 'enum_field']
+        remapped_params = ['str_field', 'enum_field']
+        self.assertEqual(remapped_params,
+                         util.MapParamNames(params, MessageWithRemappings))
diff --git a/apitools/data/__init__.py b/apitools/data/__init__.py
new file mode 100644
index 0000000..463cb42
--- /dev/null
+++ b/apitools/data/__init__.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Shared __init__.py for apitools."""
+
+from pkgutil import extend_path
+__path__ = extend_path(__path__, __name__)
diff --git a/apitools/data/apitools_client_secrets.json b/apitools/data/apitools_client_secrets.json
new file mode 100644
index 0000000..5761d14
--- /dev/null
+++ b/apitools/data/apitools_client_secrets.json
@@ -0,0 +1,15 @@
+{
+  "installed": {
+    "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
+    "auth_uri": "https://accounts.google.com/o/oauth2/auth",
+    "client_email": "",
+    "client_id": "1042881264118.apps.googleusercontent.com",
+    "client_secret": "x_Tw5K8nnjoRAqULM9PFAC2b",
+    "client_x509_cert_url": "",
+    "redirect_uris": [
+      "urn:ietf:wg:oauth:2.0:oob",
+      "oob"
+    ],
+    "token_uri": "https://accounts.google.com/o/oauth2/token"
+  }
+}
diff --git a/apitools/gen/__init__.py b/apitools/gen/__init__.py
new file mode 100644
index 0000000..463cb42
--- /dev/null
+++ b/apitools/gen/__init__.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Shared __init__.py for apitools."""
+
+from pkgutil import extend_path
+__path__ = extend_path(__path__, __name__)
diff --git a/apitools/gen/client_generation_test.py b/apitools/gen/client_generation_test.py
new file mode 100644
index 0000000..5e7932a
--- /dev/null
+++ b/apitools/gen/client_generation_test.py
@@ -0,0 +1,81 @@
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Test gen_client against all the APIs we use regularly."""
+
+import logging
+import os
+import subprocess
+import tempfile
+
+import unittest2
+
+from apitools.gen import gen_client
+from apitools.gen import test_utils
+
+
+_API_LIST = [
+    'drive.v2',
+    'bigquery.v2',
+    'compute.v1',
+    'storage.v1',
+]
+
+
+class ClientGenerationTest(unittest2.TestCase):
+
+    def setUp(self):
+        super(ClientGenerationTest, self).setUp()
+        self.gen_client_binary = 'gen_client'
+
+    @test_utils.SkipOnWindows
+    @test_utils.RunOnlyOnPython27
+    def testGeneration(self):
+        for api in _API_LIST:
+            with test_utils.TempDir(change_to=True):
+                args = [
+                    self.gen_client_binary,
+                    '--client_id=12345',
+                    '--client_secret=67890',
+                    '--discovery_url=%s' % api,
+                    '--outdir=generated',
+                    '--overwrite',
+                    'client',
+                ]
+                logging.info('Testing API %s with command line: %s',
+                             api, ' '.join(args))
+                retcode = gen_client.main(args)
+                if retcode == 128:
+                    logging.error('Failed to fetch discovery doc, continuing.')
+                    continue
+                self.assertEqual(0, retcode)
+
+                with tempfile.NamedTemporaryFile() as out:
+                    with tempfile.NamedTemporaryFile() as err:
+                        cmdline_args = [
+                            os.path.join(
+                                'generated', api.replace('.', '_') + '.py'),
+                            'help',
+                        ]
+                        retcode = subprocess.call(
+                            cmdline_args, stdout=out, stderr=err)
+                        with open(err.name, 'rb') as f:
+                            err_output = f.read()
+                # appcommands returns 1 on help
+                self.assertEqual(1, retcode)
+                if 'Traceback (most recent call last):' in err_output:
+                    err = '\n======\n%s======\n' % err_output
+                    self.fail(
+                        'Error raised in generated client:' + err)
diff --git a/apitools/gen/command_registry.py b/apitools/gen/command_registry.py
new file mode 100644
index 0000000..486934f
--- /dev/null
+++ b/apitools/gen/command_registry.py
@@ -0,0 +1,608 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Command registry for apitools."""
+
+import logging
+import textwrap
+
+from apitools.base.protorpclite import descriptor
+from apitools.base.protorpclite import messages
+from apitools.gen import extended_descriptor
+
+# This is a code generator; we're purposely verbose.
+# pylint:disable=too-many-statements
+
+_VARIANT_TO_FLAG_TYPE_MAP = {
+    messages.Variant.DOUBLE: 'float',
+    messages.Variant.FLOAT: 'float',
+    messages.Variant.INT64: 'string',
+    messages.Variant.UINT64: 'string',
+    messages.Variant.INT32: 'integer',
+    messages.Variant.BOOL: 'boolean',
+    messages.Variant.STRING: 'string',
+    messages.Variant.MESSAGE: 'string',
+    messages.Variant.BYTES: 'string',
+    messages.Variant.UINT32: 'integer',
+    messages.Variant.ENUM: 'enum',
+    messages.Variant.SINT32: 'integer',
+    messages.Variant.SINT64: 'integer',
+}
+
+
+class FlagInfo(messages.Message):
+
+    """Information about a flag and conversion to a message.
+
+    Fields:
+      name: name of this flag.
+      type: type of the flag.
+      description: description of the flag.
+      default: default value for this flag.
+      enum_values: if this flag is an enum, the list of possible
+          values.
+      required: whether or not this flag is required.
+      fv: name of the flag_values object where this flag should
+          be registered.
+      conversion: template for type conversion.
+      special: (boolean, default: False) If True, this flag doesn't
+          correspond to an attribute on the request.
+    """
+    name = messages.StringField(1)
+    type = messages.StringField(2)
+    description = messages.StringField(3)
+    default = messages.StringField(4)
+    enum_values = messages.StringField(5, repeated=True)
+    required = messages.BooleanField(6, default=False)
+    fv = messages.StringField(7)
+    conversion = messages.StringField(8)
+    special = messages.BooleanField(9, default=False)
+
+
+class ArgInfo(messages.Message):
+
+    """Information about a single positional command argument.
+
+    Fields:
+      name: argument name.
+      description: description of this argument.
+      conversion: template for type conversion.
+    """
+    name = messages.StringField(1)
+    description = messages.StringField(2)
+    conversion = messages.StringField(3)
+
+
+class CommandInfo(messages.Message):
+
+    """Information about a single command.
+
+    Fields:
+      name: name of this command.
+      class_name: name of the apitools_base.NewCmd class for this command.
+      description: description of this command.
+      flags: list of FlagInfo messages for the command-specific flags.
+      args: list of ArgInfo messages for the positional args.
+      request_type: name of the request type for this command.
+      client_method_path: path from the client object to the method
+          this command is wrapping.
+    """
+    name = messages.StringField(1)
+    class_name = messages.StringField(2)
+    description = messages.StringField(3)
+    flags = messages.MessageField(FlagInfo, 4, repeated=True)
+    args = messages.MessageField(ArgInfo, 5, repeated=True)
+    request_type = messages.StringField(6)
+    client_method_path = messages.StringField(7)
+    has_upload = messages.BooleanField(8, default=False)
+    has_download = messages.BooleanField(9, default=False)
+
+
+class CommandRegistry(object):
+
+    """Registry for CLI commands."""
+
+    def __init__(self, package, version, client_info, message_registry,
+                 root_package, base_files_package, protorpc_package, names):
+        self.__package = package
+        self.__version = version
+        self.__client_info = client_info
+        self.__names = names
+        self.__message_registry = message_registry
+        self.__root_package = root_package
+        self.__base_files_package = base_files_package
+        self.__protorpc_package = protorpc_package
+        self.__command_list = []
+        self.__global_flags = []
+
+    def Validate(self):
+        self.__message_registry.Validate()
+
+    def AddGlobalParameters(self, schema):
+        for field in schema.fields:
+            self.__global_flags.append(self.__FlagInfoFromField(field, schema))
+
+    def AddCommandForMethod(self, service_name, method_name, method_info,
+                            request, _):
+        """Add the given method as a command."""
+        command_name = self.__GetCommandName(method_info.method_id)
+        calling_path = '%s.%s' % (service_name, method_name)
+        request_type = self.__message_registry.LookupDescriptor(request)
+        description = method_info.description
+        if not description:
+            description = 'Call the %s method.' % method_info.method_id
+        field_map = dict((f.name, f) for f in request_type.fields)
+        args = []
+        arg_names = []
+        for field_name in method_info.ordered_params:
+            extended_field = field_map[field_name]
+            name = extended_field.name
+            args.append(ArgInfo(
+                name=name,
+                description=extended_field.description,
+                conversion=self.__GetConversion(extended_field, request_type),
+            ))
+            arg_names.append(name)
+        flags = []
+        for extended_field in sorted(request_type.fields,
+                                     key=lambda x: x.name):
+            field = extended_field.field_descriptor
+            if extended_field.name in arg_names:
+                continue
+            if self.__FieldIsRequired(field):
+                logging.warning(
+                    'Required field %s not in ordered_params for command %s',
+                    extended_field.name, command_name)
+            flags.append(self.__FlagInfoFromField(
+                extended_field, request_type, fv='fv'))
+        if method_info.upload_config:
+            # TODO(craigcitro): Consider adding additional flags to allow
+            # determining the filename from the object metadata.
+            upload_flag_info = FlagInfo(
+                name='upload_filename', type='string', default='',
+                description='Filename to use for upload.', fv='fv',
+                special=True)
+            flags.append(upload_flag_info)
+            mime_description = (
+                'MIME type to use for the upload. Only needed if '
+                'the extension on --upload_filename does not determine '
+                'the correct (or any) MIME type.')
+            mime_type_flag_info = FlagInfo(
+                name='upload_mime_type', type='string', default='',
+                description=mime_description, fv='fv', special=True)
+            flags.append(mime_type_flag_info)
+        if method_info.supports_download:
+            download_flag_info = FlagInfo(
+                name='download_filename', type='string', default='',
+                description='Filename to use for download.', fv='fv',
+                special=True)
+            flags.append(download_flag_info)
+            overwrite_description = (
+                'If True, overwrite the existing file when downloading.')
+            overwrite_flag_info = FlagInfo(
+                name='overwrite', type='boolean', default='False',
+                description=overwrite_description, fv='fv', special=True)
+            flags.append(overwrite_flag_info)
+        command_info = CommandInfo(
+            name=command_name,
+            class_name=self.__names.ClassName(command_name),
+            description=description,
+            flags=flags,
+            args=args,
+            request_type=request_type.full_name,
+            client_method_path=calling_path,
+            has_upload=bool(method_info.upload_config),
+            has_download=bool(method_info.supports_download)
+        )
+        self.__command_list.append(command_info)
+
+    def __LookupMessage(self, message, field):
+        message_type = self.__message_registry.LookupDescriptor(
+            '%s.%s' % (message.name, field.type_name))
+        if message_type is None:
+            message_type = self.__message_registry.LookupDescriptor(
+                field.type_name)
+        return message_type
+
+    def __GetCommandName(self, method_id):
+        command_name = method_id
+        prefix = '%s.' % self.__package
+        if command_name.startswith(prefix):
+            command_name = command_name[len(prefix):]
+        command_name = command_name.replace('.', '_')
+        return command_name
+
+    def __GetConversion(self, extended_field, extended_message):
+        """Returns a template for field type."""
+        field = extended_field.field_descriptor
+
+        type_name = ''
+        if field.variant in (messages.Variant.MESSAGE, messages.Variant.ENUM):
+            if field.type_name.startswith('apitools.base.protorpclite.'):
+                type_name = field.type_name
+            else:
+                field_message = self.__LookupMessage(extended_message, field)
+                if field_message is None:
+                    raise ValueError(
+                        'Could not find type for field %s' % field.name)
+                type_name = 'messages.%s' % field_message.full_name
+
+        template = ''
+        if field.variant in (messages.Variant.INT64, messages.Variant.UINT64):
+            template = 'int(%s)'
+        elif field.variant == messages.Variant.MESSAGE:
+            template = 'apitools_base.JsonToMessage(%s, %%s)' % type_name
+        elif field.variant == messages.Variant.ENUM:
+            template = '%s(%%s)' % type_name
+        elif field.variant == messages.Variant.STRING:
+            template = "%s.decode('utf8')"
+
+        if self.__FieldIsRepeated(extended_field.field_descriptor):
+            if template:
+                template = '[%s for x in %%s]' % (template % 'x')
+
+        return template
+
+    def __FieldIsRequired(self, field):
+        return field.label == descriptor.FieldDescriptor.Label.REQUIRED
+
+    def __FieldIsRepeated(self, field):
+        return field.label == descriptor.FieldDescriptor.Label.REPEATED
+
+    def __FlagInfoFromField(self, extended_field, extended_message, fv=''):
+        """Creates FlagInfo object for given field."""
+        field = extended_field.field_descriptor
+        flag_info = FlagInfo()
+        flag_info.name = str(field.name)
+        # TODO(craigcitro): We should key by variant.
+        flag_info.type = _VARIANT_TO_FLAG_TYPE_MAP[field.variant]
+        flag_info.description = extended_field.description
+        if field.default_value:
+            # TODO(craigcitro): Formatting?
+            flag_info.default = field.default_value
+        if flag_info.type == 'enum':
+            # TODO(craigcitro): Does protorpc do this for us?
+            enum_type = self.__LookupMessage(extended_message, field)
+            if enum_type is None:
+                raise ValueError('Cannot find enum type %s', field.type_name)
+            flag_info.enum_values = [x.name for x in enum_type.values]
+            # Note that this choice is completely arbitrary -- but we only
+            # push the value through if the user specifies it, so this
+            # doesn't hurt anything.
+            if flag_info.default is None:
+                flag_info.default = flag_info.enum_values[0]
+        if self.__FieldIsRequired(field):
+            flag_info.required = True
+        flag_info.fv = fv
+        flag_info.conversion = self.__GetConversion(
+            extended_field, extended_message)
+        return flag_info
+
+    def __PrintFlagDeclarations(self, printer):
+        """Writes out command line flag declarations."""
+        package = self.__client_info.package
+        function_name = '_Declare%sFlags' % (package[0].upper() + package[1:])
+        printer()
+        printer()
+        printer('def %s():', function_name)
+        with printer.Indent():
+            printer('"""Declare global flags in an idempotent way."""')
+            printer("if 'api_endpoint' in flags.FLAGS:")
+            with printer.Indent():
+                printer('return')
+            printer('flags.DEFINE_string(')
+            with printer.Indent('    '):
+                printer("'api_endpoint',")
+                printer('%r,', self.__client_info.base_url)
+                printer("'URL of the API endpoint to use.',")
+                printer("short_name='%s_url')", self.__package)
+            printer('flags.DEFINE_string(')
+            with printer.Indent('    '):
+                printer("'history_file',")
+                printer('%r,', '~/.%s.%s.history' %
+                        (self.__package, self.__version))
+                printer("'File with interactive shell history.')")
+            printer('flags.DEFINE_multistring(')
+            with printer.Indent('    '):
+                printer("'add_header', [],")
+                printer("'Additional http headers (as key=value strings). '")
+                printer("'Can be specified multiple times.')")
+            printer('flags.DEFINE_string(')
+            with printer.Indent('    '):
+                printer("'service_account_json_keyfile', '',")
+                printer("'Filename for a JSON service account key downloaded'")
+                printer("' from the Developer Console.')")
+            for flag_info in self.__global_flags:
+                self.__PrintFlag(printer, flag_info)
+        printer()
+        printer()
+        printer('FLAGS = flags.FLAGS')
+        printer('apitools_base_cli.DeclareBaseFlags()')
+        printer('%s()', function_name)
+
+    def __PrintGetGlobalParams(self, printer):
+        """Writes out GetGlobalParamsFromFlags function."""
+        printer('def GetGlobalParamsFromFlags():')
+        with printer.Indent():
+            printer('"""Return a StandardQueryParameters based on flags."""')
+            printer('result = messages.StandardQueryParameters()')
+
+            for flag_info in self.__global_flags:
+                rhs = 'FLAGS.%s' % flag_info.name
+                if flag_info.conversion:
+                    rhs = flag_info.conversion % rhs
+                printer('if FLAGS[%r].present:', flag_info.name)
+                with printer.Indent():
+                    printer('result.%s = %s', flag_info.name, rhs)
+            printer('return result')
+        printer()
+        printer()
+
+    def __PrintGetClient(self, printer):
+        """Writes out GetClientFromFlags function."""
+        printer('def GetClientFromFlags():')
+        with printer.Indent():
+            printer('"""Return a client object, configured from flags."""')
+            printer('log_request = FLAGS.log_request or '
+                    'FLAGS.log_request_response')
+            printer('log_response = FLAGS.log_response or '
+                    'FLAGS.log_request_response')
+            printer('api_endpoint = apitools_base.NormalizeApiEndpoint('
+                    'FLAGS.api_endpoint)')
+            printer("additional_http_headers = dict(x.split('=', 1) for x in "
+                    "FLAGS.add_header)")
+            printer('credentials_args = {')
+            with printer.Indent('    '):
+                printer("'service_account_json_keyfile': os.path.expanduser("
+                        'FLAGS.service_account_json_keyfile)')
+            printer('}')
+            printer('try:')
+            with printer.Indent():
+                printer('client = client_lib.%s(',
+                        self.__client_info.client_class_name)
+                with printer.Indent(indent='    '):
+                    printer('api_endpoint, log_request=log_request,')
+                    printer('log_response=log_response,')
+                    printer('credentials_args=credentials_args,')
+                    printer('additional_http_headers=additional_http_headers)')
+            printer('except apitools_base.CredentialsError as e:')
+            with printer.Indent():
+                printer("print 'Error creating credentials: %%s' %% e")
+                printer('sys.exit(1)')
+            printer('return client')
+        printer()
+        printer()
+
+    def __PrintCommandDocstring(self, printer, command_info):
+        with printer.CommentContext():
+            for line in textwrap.wrap('"""%s' % command_info.description,
+                                      printer.CalculateWidth()):
+                printer(line)
+            extended_descriptor.PrintIndentedDescriptions(
+                printer, command_info.args, 'Args')
+            extended_descriptor.PrintIndentedDescriptions(
+                printer, command_info.flags, 'Flags')
+            printer('"""')
+
+    def __PrintFlag(self, printer, flag_info):
+        """Writes out given flag definition."""
+        printer('flags.DEFINE_%s(', flag_info.type)
+        with printer.Indent(indent='    '):
+            printer('%r,', flag_info.name)
+            printer('%r,', flag_info.default)
+            if flag_info.type == 'enum':
+                printer('%r,', flag_info.enum_values)
+
+            # TODO(craigcitro): Consider using 'drop_whitespace' elsewhere.
+            description_lines = textwrap.wrap(
+                flag_info.description, 75 - len(printer.indent),
+                drop_whitespace=False)
+            for line in description_lines[:-1]:
+                printer('%r', line)
+            last_line = description_lines[-1] if description_lines else ''
+            printer('%r%s', last_line, ',' if flag_info.fv else ')')
+            if flag_info.fv:
+                printer('flag_values=%s)', flag_info.fv)
+        if flag_info.required:
+            printer('flags.MarkFlagAsRequired(%r)', flag_info.name)
+
+    def __PrintPyShell(self, printer):
+        """Writes out PyShell class."""
+        printer('class PyShell(appcommands.Cmd):')
+        printer()
+        with printer.Indent():
+            printer('def Run(self, _):')
+            with printer.Indent():
+                printer(
+                    '"""Run an interactive python shell with the client."""')
+                printer('client = GetClientFromFlags()')
+                printer('params = GetGlobalParamsFromFlags()')
+                printer('for field in params.all_fields():')
+                with printer.Indent():
+                    printer('value = params.get_assigned_value(field.name)')
+                    printer('if value != field.default:')
+                    with printer.Indent():
+                        printer('client.AddGlobalParam(field.name, value)')
+                printer('banner = """')
+                printer('       == %s interactive console ==' % (
+                    self.__client_info.package))
+                printer('             client: a %s client' %
+                        self.__client_info.package)
+                printer('      apitools_base: base apitools module')
+                printer('     messages: the generated messages module')
+                printer('"""')
+                printer('local_vars = {')
+                with printer.Indent(indent='    '):
+                    printer("'apitools_base': apitools_base,")
+                    printer("'client': client,")
+                    printer("'client_lib': client_lib,")
+                    printer("'messages': messages,")
+                printer('}')
+                printer("if platform.system() == 'Linux':")
+                with printer.Indent():
+                    printer('console = apitools_base_cli.ConsoleWithReadline(')
+                    with printer.Indent(indent='    '):
+                        printer('local_vars, histfile=FLAGS.history_file)')
+                printer('else:')
+                with printer.Indent():
+                    printer('console = code.InteractiveConsole(local_vars)')
+                printer('try:')
+                with printer.Indent():
+                    printer('console.interact(banner)')
+                printer('except SystemExit as e:')
+                with printer.Indent():
+                    printer('return e.code')
+        printer()
+        printer()
+
+    def WriteFile(self, printer):
+        """Write a simple CLI (currently just a stub)."""
+        printer('#!/usr/bin/env python')
+        printer('"""CLI for %s, version %s."""',
+                self.__package, self.__version)
+        printer('# NOTE: This file is autogenerated and should not be edited '
+                'by hand.')
+        # TODO(craigcitro): Add a build stamp, along with some other
+        # information.
+        printer()
+        printer('import code')
+        printer('import os')
+        printer('import platform')
+        printer('import sys')
+        printer()
+        printer('from %s import message_types', self.__protorpc_package)
+        printer('from %s import messages', self.__protorpc_package)
+        printer()
+        appcommands_import = 'from google.apputils import appcommands'
+        printer(appcommands_import)
+
+        flags_import = 'import gflags as flags'
+        printer(flags_import)
+        printer()
+        printer('import %s as apitools_base', self.__base_files_package)
+        printer('from %s import cli as apitools_base_cli',
+                self.__base_files_package)
+        import_prefix = ''
+        printer('%simport %s as client_lib',
+                import_prefix, self.__client_info.client_rule_name)
+        printer('%simport %s as messages',
+                import_prefix, self.__client_info.messages_rule_name)
+        self.__PrintFlagDeclarations(printer)
+        printer()
+        printer()
+        self.__PrintGetGlobalParams(printer)
+        self.__PrintGetClient(printer)
+        self.__PrintPyShell(printer)
+        self.__PrintCommands(printer)
+        printer('def main(_):')
+        with printer.Indent():
+            printer("appcommands.AddCmd('pyshell', PyShell)")
+            for command_info in self.__command_list:
+                printer("appcommands.AddCmd('%s', %s)",
+                        command_info.name, command_info.class_name)
+            printer()
+            printer('apitools_base_cli.SetupLogger()')
+            # TODO(craigcitro): Just call SetDefaultCommand as soon as
+            # another appcommands release happens and this exists
+            # externally.
+            printer("if hasattr(appcommands, 'SetDefaultCommand'):")
+            with printer.Indent():
+                printer("appcommands.SetDefaultCommand('pyshell')")
+        printer()
+        printer()
+        printer('run_main = apitools_base_cli.run_main')
+        printer()
+        printer("if __name__ == '__main__':")
+        with printer.Indent():
+            printer('appcommands.Run()')
+
+    def __PrintCommands(self, printer):
+        """Print all commands in this registry using printer."""
+        for command_info in self.__command_list:
+            arg_list = [arg_info.name for arg_info in command_info.args]
+            printer(
+                'class %s(apitools_base_cli.NewCmd):', command_info.class_name)
+            with printer.Indent():
+                printer('"""Command wrapping %s."""',
+                        command_info.client_method_path)
+                printer()
+                printer('usage = """%s%s%s"""',
+                        command_info.name,
+                        ' ' if arg_list else '',
+                        ' '.join('<%s>' % argname for argname in arg_list))
+                printer()
+                printer('def __init__(self, name, fv):')
+                with printer.Indent():
+                    printer('super(%s, self).__init__(name, fv)',
+                            command_info.class_name)
+                    for flag in command_info.flags:
+                        self.__PrintFlag(printer, flag)
+                printer()
+                printer('def RunWithArgs(%s):', ', '.join(['self'] + arg_list))
+                with printer.Indent():
+                    self.__PrintCommandDocstring(printer, command_info)
+                    printer('client = GetClientFromFlags()')
+                    printer('global_params = GetGlobalParamsFromFlags()')
+                    printer(
+                        'request = messages.%s(', command_info.request_type)
+                    with printer.Indent(indent='    '):
+                        for arg in command_info.args:
+                            rhs = arg.name
+                            if arg.conversion:
+                                rhs = arg.conversion % arg.name
+                            printer('%s=%s,', arg.name, rhs)
+                        printer(')')
+                    for flag_info in command_info.flags:
+                        if flag_info.special:
+                            continue
+                        rhs = 'FLAGS.%s' % flag_info.name
+                        if flag_info.conversion:
+                            rhs = flag_info.conversion % rhs
+                        printer('if FLAGS[%r].present:', flag_info.name)
+                        with printer.Indent():
+                            printer('request.%s = %s', flag_info.name, rhs)
+                    call_args = ['request', 'global_params=global_params']
+                    if command_info.has_upload:
+                        call_args.append('upload=upload')
+                        printer('upload = None')
+                        printer('if FLAGS.upload_filename:')
+                        with printer.Indent():
+                            printer('upload = apitools_base.Upload.FromFile(')
+                            printer('    FLAGS.upload_filename, '
+                                    'FLAGS.upload_mime_type,')
+                            printer('    progress_callback='
+                                    'apitools_base.UploadProgressPrinter,')
+                            printer('    finish_callback='
+                                    'apitools_base.UploadCompletePrinter)')
+                    if command_info.has_download:
+                        call_args.append('download=download')
+                        printer('download = None')
+                        printer('if FLAGS.download_filename:')
+                        with printer.Indent():
+                            printer('download = apitools_base.Download.'
+                                    'FromFile(FLAGS.download_filename, '
+                                    'overwrite=FLAGS.overwrite,')
+                            printer('    progress_callback='
+                                    'apitools_base.DownloadProgressPrinter,')
+                            printer('    finish_callback='
+                                    'apitools_base.DownloadCompletePrinter)')
+                    printer(
+                        'result = client.%s(', command_info.client_method_path)
+                    with printer.Indent(indent='    '):
+                        printer('%s)', ', '.join(call_args))
+                    printer('print apitools_base_cli.FormatOutput(result)')
+            printer()
+            printer()
diff --git a/apitools/gen/extended_descriptor.py b/apitools/gen/extended_descriptor.py
new file mode 100644
index 0000000..c5d9909
--- /dev/null
+++ b/apitools/gen/extended_descriptor.py
@@ -0,0 +1,552 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Extended protorpc descriptors.
+
+This takes existing protorpc Descriptor classes and adds extra
+properties not directly supported in proto itself, notably field and
+message descriptions. We need this in order to generate protorpc
+message files with comments.
+
+Note that for most of these classes, we can't simply wrap the existing
+message, since we need to change the type of the subfields. We could
+have a "plain" descriptor attached, but that seems like unnecessary
+bookkeeping. Where possible, we purposely reuse existing tag numbers;
+for new fields, we start numbering at 100.
+"""
+import abc
+import operator
+import textwrap
+
+import six
+
+from apitools.base.protorpclite import descriptor as protorpc_descriptor
+from apitools.base.protorpclite import message_types
+from apitools.base.protorpclite import messages
+import apitools.base.py as apitools_base
+
+
+class ExtendedEnumValueDescriptor(messages.Message):
+
+    """Enum value descriptor with additional fields.
+
+    Fields:
+      name: Name of enumeration value.
+      number: Number of enumeration value.
+      description: Description of this enum value.
+    """
+    name = messages.StringField(1)
+    number = messages.IntegerField(2, variant=messages.Variant.INT32)
+
+    description = messages.StringField(100)
+
+
+class ExtendedEnumDescriptor(messages.Message):
+
+    """Enum class descriptor with additional fields.
+
+    Fields:
+      name: Name of Enum without any qualification.
+      values: Values defined by Enum class.
+      description: Description of this enum class.
+      full_name: Fully qualified name of this enum class.
+      enum_mappings: Mappings from python to JSON names for enum values.
+    """
+
+    class JsonEnumMapping(messages.Message):
+
+        """Mapping from a python name to the wire name for an enum."""
+        python_name = messages.StringField(1)
+        json_name = messages.StringField(2)
+
+    name = messages.StringField(1)
+    values = messages.MessageField(
+        ExtendedEnumValueDescriptor, 2, repeated=True)
+
+    description = messages.StringField(100)
+    full_name = messages.StringField(101)
+    enum_mappings = messages.MessageField(
+        'JsonEnumMapping', 102, repeated=True)
+
+
+class ExtendedFieldDescriptor(messages.Message):
+
+    """Field descriptor with additional fields.
+
+    Fields:
+      field_descriptor: The underlying field descriptor.
+      name: The name of this field.
+      description: Description of this field.
+    """
+    field_descriptor = messages.MessageField(
+        protorpc_descriptor.FieldDescriptor, 100)
+    # We duplicate the names for easier bookkeeping.
+    name = messages.StringField(101)
+    description = messages.StringField(102)
+
+
+class ExtendedMessageDescriptor(messages.Message):
+
+    """Message descriptor with additional fields.
+
+    Fields:
+      name: Name of Message without any qualification.
+      fields: Fields defined for message.
+      message_types: Nested Message classes defined on message.
+      enum_types: Nested Enum classes defined on message.
+      description: Description of this message.
+      full_name: Full qualified name of this message.
+      decorators: Decorators to include in the definition when printing.
+          Printed in the given order from top to bottom (so the last entry
+          is the innermost decorator).
+      alias_for: This type is just an alias for the named type.
+      field_mappings: Mappings from python to json field names.
+    """
+
+    class JsonFieldMapping(messages.Message):
+
+        """Mapping from a python name to the wire name for a field."""
+        python_name = messages.StringField(1)
+        json_name = messages.StringField(2)
+
+    name = messages.StringField(1)
+    fields = messages.MessageField(ExtendedFieldDescriptor, 2, repeated=True)
+    message_types = messages.MessageField(
+        'extended_descriptor.ExtendedMessageDescriptor', 3, repeated=True)
+    enum_types = messages.MessageField(
+        ExtendedEnumDescriptor, 4, repeated=True)
+
+    description = messages.StringField(100)
+    full_name = messages.StringField(101)
+    decorators = messages.StringField(102, repeated=True)
+    alias_for = messages.StringField(103)
+    field_mappings = messages.MessageField(
+        'JsonFieldMapping', 104, repeated=True)
+
+
+class ExtendedFileDescriptor(messages.Message):
+
+    """File descriptor with additional fields.
+
+    Fields:
+      package: Fully qualified name of package that definitions belong to.
+      message_types: Message definitions contained in file.
+      enum_types: Enum definitions contained in file.
+      description: Description of this file.
+      additional_imports: Extra imports used in this package.
+    """
+    package = messages.StringField(2)
+
+    message_types = messages.MessageField(
+        ExtendedMessageDescriptor, 4, repeated=True)
+    enum_types = messages.MessageField(
+        ExtendedEnumDescriptor, 5, repeated=True)
+
+    description = messages.StringField(100)
+    additional_imports = messages.StringField(101, repeated=True)
+
+
+def _WriteFile(file_descriptor, package, version, proto_printer):
+    """Write the given extended file descriptor to the printer."""
+    proto_printer.PrintPreamble(package, version, file_descriptor)
+    _PrintEnums(proto_printer, file_descriptor.enum_types)
+    _PrintMessages(proto_printer, file_descriptor.message_types)
+    custom_json_mappings = _FetchCustomMappings(
+        file_descriptor.enum_types, file_descriptor.package)
+    custom_json_mappings.extend(
+        _FetchCustomMappings(
+            file_descriptor.message_types, file_descriptor.package))
+    for mapping in custom_json_mappings:
+        proto_printer.PrintCustomJsonMapping(mapping)
+
+
+def WriteMessagesFile(file_descriptor, package, version, printer):
+    """Write the given extended file descriptor to out as a message file."""
+    _WriteFile(file_descriptor, package, version,
+               _Proto2Printer(printer))
+
+
+def WritePythonFile(file_descriptor, package, version, printer):
+    """Write the given extended file descriptor to out."""
+    _WriteFile(file_descriptor, package, version,
+               _ProtoRpcPrinter(printer))
+
+
+def PrintIndentedDescriptions(printer, ls, name, prefix=''):
+    if ls:
+        with printer.Indent(indent=prefix):
+            with printer.CommentContext():
+                width = printer.CalculateWidth() - len(prefix)
+                printer()
+                printer(name + ':')
+                for x in ls:
+                    description = '%s: %s' % (x.name, x.description)
+                    for line in textwrap.wrap(description, width,
+                                              initial_indent='  ',
+                                              subsequent_indent='    '):
+                        printer(line)
+
+
+def _FetchCustomMappings(descriptor_ls, package):
+    """Find and return all custom mappings for descriptors in descriptor_ls."""
+    custom_mappings = []
+    for descriptor in descriptor_ls:
+        if isinstance(descriptor, ExtendedEnumDescriptor):
+            custom_mappings.extend(
+                _FormatCustomJsonMapping('Enum', m, descriptor, package)
+                for m in descriptor.enum_mappings)
+        elif isinstance(descriptor, ExtendedMessageDescriptor):
+            custom_mappings.extend(
+                _FormatCustomJsonMapping('Field', m, descriptor, package)
+                for m in descriptor.field_mappings)
+            custom_mappings.extend(
+                _FetchCustomMappings(descriptor.enum_types, package))
+            custom_mappings.extend(
+                _FetchCustomMappings(descriptor.message_types, package))
+    return custom_mappings
+
+
+def _FormatCustomJsonMapping(mapping_type, mapping, descriptor, package):
+    return '\n'.join((
+        'encoding.AddCustomJson%sMapping(' % mapping_type,
+        "    %s, '%s', '%s'," % (descriptor.full_name, mapping.python_name,
+                                 mapping.json_name),
+        '    package=%r)' % package,
+    ))
+
+
+def _EmptyMessage(message_type):
+    return not any((message_type.enum_types,
+                    message_type.message_types,
+                    message_type.fields))
+
+
+class ProtoPrinter(six.with_metaclass(abc.ABCMeta, object)):
+
+    """Interface for proto printers."""
+
+    @abc.abstractmethod
+    def PrintPreamble(self, package, version, file_descriptor):
+        """Print the file docstring and import lines."""
+
+    @abc.abstractmethod
+    def PrintEnum(self, enum_type):
+        """Print the given enum declaration."""
+
+    @abc.abstractmethod
+    def PrintMessage(self, message_type):
+        """Print the given message declaration."""
+
+
+class _Proto2Printer(ProtoPrinter):
+
+    """Printer for proto2 definitions."""
+
+    def __init__(self, printer):
+        self.__printer = printer
+
+    def __PrintEnumCommentLines(self, enum_type):
+        description = enum_type.description or '%s enum type.' % enum_type.name
+        for line in textwrap.wrap(description,
+                                  self.__printer.CalculateWidth() - 3):
+            self.__printer('// %s', line)
+        PrintIndentedDescriptions(self.__printer, enum_type.values, 'Values',
+                                  prefix='// ')
+
+    def __PrintEnumValueCommentLines(self, enum_value):
+        if enum_value.description:
+            width = self.__printer.CalculateWidth() - 3
+            for line in textwrap.wrap(enum_value.description, width):
+                self.__printer('// %s', line)
+
+    def PrintEnum(self, enum_type):
+        self.__PrintEnumCommentLines(enum_type)
+        self.__printer('enum %s {', enum_type.name)
+        with self.__printer.Indent():
+            enum_values = sorted(
+                enum_type.values, key=operator.attrgetter('number'))
+            for enum_value in enum_values:
+                self.__printer()
+                self.__PrintEnumValueCommentLines(enum_value)
+                self.__printer('%s = %s;', enum_value.name, enum_value.number)
+        self.__printer('}')
+        self.__printer()
+
+    def PrintPreamble(self, package, version, file_descriptor):
+        self.__printer('// Generated message classes for %s version %s.',
+                       package, version)
+        self.__printer('// NOTE: This file is autogenerated and should not be '
+                       'edited by hand.')
+        description_lines = textwrap.wrap(file_descriptor.description, 75)
+        if description_lines:
+            self.__printer('//')
+            for line in description_lines:
+                self.__printer('// %s', line)
+        self.__printer()
+        self.__printer('syntax = "proto2";')
+        self.__printer('package %s;', file_descriptor.package)
+
+    def __PrintMessageCommentLines(self, message_type):
+        """Print the description of this message."""
+        description = message_type.description or '%s message type.' % (
+            message_type.name)
+        width = self.__printer.CalculateWidth() - 3
+        for line in textwrap.wrap(description, width):
+            self.__printer('// %s', line)
+        PrintIndentedDescriptions(self.__printer, message_type.enum_types,
+                                  'Enums', prefix='// ')
+        PrintIndentedDescriptions(self.__printer, message_type.message_types,
+                                  'Messages', prefix='// ')
+        PrintIndentedDescriptions(self.__printer, message_type.fields,
+                                  'Fields', prefix='// ')
+
+    def __PrintFieldDescription(self, description):
+        for line in textwrap.wrap(description,
+                                  self.__printer.CalculateWidth() - 3):
+            self.__printer('// %s', line)
+
+    def __PrintFields(self, fields):
+        for extended_field in fields:
+            field = extended_field.field_descriptor
+            field_type = messages.Field.lookup_field_type_by_variant(
+                field.variant)
+            self.__printer()
+            self.__PrintFieldDescription(extended_field.description)
+            label = str(field.label).lower()
+            if field_type in (messages.EnumField, messages.MessageField):
+                proto_type = field.type_name
+            else:
+                proto_type = str(field.variant).lower()
+            default_statement = ''
+            if field.default_value:
+                if field_type in [messages.BytesField, messages.StringField]:
+                    default_value = '"%s"' % field.default_value
+                elif field_type is messages.BooleanField:
+                    default_value = str(field.default_value).lower()
+                else:
+                    default_value = str(field.default_value)
+
+                default_statement = ' [default = %s]' % default_value
+            self.__printer(
+                '%s %s %s = %d%s;',
+                label, proto_type, field.name, field.number, default_statement)
+
+    def PrintMessage(self, message_type):
+        self.__printer()
+        self.__PrintMessageCommentLines(message_type)
+        if _EmptyMessage(message_type):
+            self.__printer('message %s {}', message_type.name)
+            return
+        self.__printer('message %s {', message_type.name)
+        with self.__printer.Indent():
+            _PrintEnums(self, message_type.enum_types)
+            _PrintMessages(self, message_type.message_types)
+            self.__PrintFields(message_type.fields)
+        self.__printer('}')
+
+    def PrintCustomJsonMapping(self, mapping_lines):
+        raise NotImplementedError(
+            'Custom JSON encoding not supported for proto2')
+
+
+class _ProtoRpcPrinter(ProtoPrinter):
+
+    """Printer for ProtoRPC definitions."""
+
+    def __init__(self, printer):
+        self.__printer = printer
+
+    def __PrintClassSeparator(self):
+        self.__printer()
+        if not self.__printer.indent:
+            self.__printer()
+
+    def __PrintEnumDocstringLines(self, enum_type):
+        description = enum_type.description or '%s enum type.' % enum_type.name
+        for line in textwrap.wrap('"""%s' % description,
+                                  self.__printer.CalculateWidth()):
+            self.__printer(line)
+        PrintIndentedDescriptions(self.__printer, enum_type.values, 'Values')
+        self.__printer('"""')
+
+    def PrintEnum(self, enum_type):
+        self.__printer('class %s(_messages.Enum):', enum_type.name)
+        with self.__printer.Indent():
+            self.__PrintEnumDocstringLines(enum_type)
+            enum_values = sorted(
+                enum_type.values, key=operator.attrgetter('number'))
+            for enum_value in enum_values:
+                self.__printer('%s = %s', enum_value.name, enum_value.number)
+            if not enum_type.values:
+                self.__printer('pass')
+        self.__PrintClassSeparator()
+
+    def __PrintAdditionalImports(self, imports):
+        """Print additional imports needed for protorpc."""
+        google_imports = [x for x in imports if 'google' in x]
+        other_imports = [x for x in imports if 'google' not in x]
+        if other_imports:
+            for import_ in sorted(other_imports):
+                self.__printer(import_)
+            self.__printer()
+        # Note: If we ever were going to add imports from this package, we'd
+        # need to sort those out and put them at the end.
+        if google_imports:
+            for import_ in sorted(google_imports):
+                self.__printer(import_)
+            self.__printer()
+
+    def PrintPreamble(self, package, version, file_descriptor):
+        self.__printer('"""Generated message classes for %s version %s.',
+                       package, version)
+        self.__printer()
+        for line in textwrap.wrap(file_descriptor.description, 78):
+            self.__printer(line)
+        self.__printer('"""')
+        self.__printer('# NOTE: This file is autogenerated and should not be '
+                       'edited by hand.')
+        self.__printer()
+        self.__PrintAdditionalImports(file_descriptor.additional_imports)
+        self.__printer()
+        self.__printer("package = '%s'", file_descriptor.package)
+        self.__printer()
+        self.__printer()
+
+    def __PrintMessageDocstringLines(self, message_type):
+        """Print the docstring for this message."""
+        description = message_type.description or '%s message type.' % (
+            message_type.name)
+        short_description = (
+            _EmptyMessage(message_type) and
+            len(description) < (self.__printer.CalculateWidth() - 6))
+        with self.__printer.CommentContext():
+            if short_description:
+                # Note that we use explicit string interpolation here since
+                # we're in comment context.
+                self.__printer('"""%s"""' % description)
+                return
+            for line in textwrap.wrap('"""%s' % description,
+                                      self.__printer.CalculateWidth()):
+                self.__printer(line)
+
+            PrintIndentedDescriptions(self.__printer, message_type.enum_types,
+                                      'Enums')
+            PrintIndentedDescriptions(
+                self.__printer, message_type.message_types, 'Messages')
+            PrintIndentedDescriptions(
+                self.__printer, message_type.fields, 'Fields')
+            self.__printer('"""')
+            self.__printer()
+
+    def PrintMessage(self, message_type):
+        if message_type.alias_for:
+            self.__printer(
+                '%s = %s', message_type.name, message_type.alias_for)
+            self.__PrintClassSeparator()
+            return
+        for decorator in message_type.decorators:
+            self.__printer('@%s', decorator)
+        self.__printer('class %s(_messages.Message):', message_type.name)
+        with self.__printer.Indent():
+            self.__PrintMessageDocstringLines(message_type)
+            _PrintEnums(self, message_type.enum_types)
+            _PrintMessages(self, message_type.message_types)
+            _PrintFields(message_type.fields, self.__printer)
+        self.__PrintClassSeparator()
+
+    def PrintCustomJsonMapping(self, mapping):
+        self.__printer(mapping)
+
+
+def _PrintEnums(proto_printer, enum_types):
+    """Print all enums to the given proto_printer."""
+    enum_types = sorted(enum_types, key=operator.attrgetter('name'))
+    for enum_type in enum_types:
+        proto_printer.PrintEnum(enum_type)
+
+
+def _PrintMessages(proto_printer, message_list):
+    message_list = sorted(message_list, key=operator.attrgetter('name'))
+    for message_type in message_list:
+        proto_printer.PrintMessage(message_type)
+
+
+_MESSAGE_FIELD_MAP = {
+    message_types.DateTimeMessage.definition_name(): (
+        message_types.DateTimeField),
+}
+
+
+def _PrintFields(fields, printer):
+    for extended_field in fields:
+        field = extended_field.field_descriptor
+        printed_field_info = {
+            'name': field.name,
+            'module': '_messages',
+            'type_name': '',
+            'type_format': '',
+            'number': field.number,
+            'label_format': '',
+            'variant_format': '',
+            'default_format': '',
+        }
+
+        message_field = _MESSAGE_FIELD_MAP.get(field.type_name)
+        if message_field:
+            printed_field_info['module'] = '_message_types'
+            field_type = message_field
+        elif field.type_name == 'extra_types.DateField':
+            printed_field_info['module'] = 'extra_types'
+            field_type = apitools_base.DateField
+        else:
+            field_type = messages.Field.lookup_field_type_by_variant(
+                field.variant)
+
+        if field_type in (messages.EnumField, messages.MessageField):
+            printed_field_info['type_format'] = "'%s', " % field.type_name
+
+        if field.label == protorpc_descriptor.FieldDescriptor.Label.REQUIRED:
+            printed_field_info['label_format'] = ', required=True'
+        elif field.label == protorpc_descriptor.FieldDescriptor.Label.REPEATED:
+            printed_field_info['label_format'] = ', repeated=True'
+
+        if field_type.DEFAULT_VARIANT != field.variant:
+            printed_field_info['variant_format'] = (
+                ', variant=_messages.Variant.%s' % field.variant)
+
+        if field.default_value:
+            if field_type in [messages.BytesField, messages.StringField]:
+                default_value = repr(field.default_value)
+            elif field_type is messages.EnumField:
+                try:
+                    default_value = str(int(field.default_value))
+                except ValueError:
+                    default_value = repr(field.default_value)
+            else:
+                default_value = field.default_value
+
+            printed_field_info[
+                'default_format'] = ', default=%s' % (default_value,)
+
+        printed_field_info['type_name'] = field_type.__name__
+        args = ''.join('%%(%s)s' % field for field in (
+            'type_format',
+            'number',
+            'label_format',
+            'variant_format',
+            'default_format'))
+        format_str = '%%(name)s = %%(module)s.%%(type_name)s(%s)' % args
+        printer(format_str % printed_field_info)
diff --git a/apitools/gen/gen_client.py b/apitools/gen/gen_client.py
new file mode 100644
index 0000000..c36fbc4
--- /dev/null
+++ b/apitools/gen/gen_client.py
@@ -0,0 +1,350 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Command-line interface to gen_client."""
+
+import argparse
+import contextlib
+import json
+import logging
+import os
+import pkgutil
+import sys
+
+from apitools.base.py import exceptions
+from apitools.gen import gen_client_lib
+from apitools.gen import util
+
+
+def _CopyLocalFile(filename):
+    with contextlib.closing(open(filename, 'w')) as out:
+        src_data = pkgutil.get_data(
+            'apitools.base.py', filename)
+        if src_data is None:
+            raise exceptions.GeneratedClientError(
+                'Could not find file %s' % filename)
+        out.write(src_data)
+
+
+def _GetDiscoveryDocFromFlags(args):
+    """Get the discovery doc from flags."""
+    if args.discovery_url:
+        try:
+            return util.FetchDiscoveryDoc(args.discovery_url)
+        except exceptions.CommunicationError:
+            raise exceptions.GeneratedClientError(
+                'Could not fetch discovery doc')
+
+    infile = os.path.expanduser(args.infile) or '/dev/stdin'
+    with open(infile) as f:
+        return json.load(f)
+
+
+def _GetCodegenFromFlags(args):
+    """Create a codegen object from flags."""
+    discovery_doc = _GetDiscoveryDocFromFlags(args)
+    names = util.Names(
+        args.strip_prefix,
+        args.experimental_name_convention,
+        args.experimental_capitalize_enums)
+
+    if args.client_json:
+        try:
+            with open(args.client_json) as client_json:
+                f = json.loads(client_json.read())
+                web = f.get('installed', f.get('web', {}))
+                client_id = web.get('client_id')
+                client_secret = web.get('client_secret')
+        except IOError:
+            raise exceptions.NotFoundError(
+                'Failed to open client json file: %s' % args.client_json)
+    else:
+        client_id = args.client_id
+        client_secret = args.client_secret
+
+    if not client_id:
+        logging.warning('No client ID supplied')
+        client_id = ''
+
+    if not client_secret:
+        logging.warning('No client secret supplied')
+        client_secret = ''
+
+    client_info = util.ClientInfo.Create(
+        discovery_doc, args.scope, client_id, client_secret,
+        args.user_agent, names, args.api_key)
+    outdir = os.path.expanduser(args.outdir) or client_info.default_directory
+    if os.path.exists(outdir) and not args.overwrite:
+        raise exceptions.ConfigurationValueError(
+            'Output directory exists, pass --overwrite to replace '
+            'the existing files.')
+    if not os.path.exists(outdir):
+        os.makedirs(outdir)
+
+    return gen_client_lib.DescriptorGenerator(
+        discovery_doc, client_info, names, args.root_package, outdir,
+        base_package=args.base_package,
+        protorpc_package=args.protorpc_package,
+        generate_cli=args.generate_cli,
+        init_wildcards_file=(args.init_file == 'wildcards'),
+        use_proto2=args.experimental_proto2_output,
+        unelidable_request_methods=args.unelidable_request_methods,
+        apitools_version=args.apitools_version)
+
+
+# TODO(craigcitro): Delete this if we don't need this functionality.
+def _WriteBaseFiles(codegen):
+    with util.Chdir(codegen.outdir):
+        _CopyLocalFile('app2.py')
+        _CopyLocalFile('base_api.py')
+        _CopyLocalFile('base_cli.py')
+        _CopyLocalFile('credentials_lib.py')
+        _CopyLocalFile('exceptions.py')
+
+
+def _WriteIntermediateInit(codegen):
+    with open('__init__.py', 'w') as out:
+        codegen.WriteIntermediateInit(out)
+
+
+def _WriteProtoFiles(codegen):
+    with util.Chdir(codegen.outdir):
+        with open(codegen.client_info.messages_proto_file_name, 'w') as out:
+            codegen.WriteMessagesProtoFile(out)
+        with open(codegen.client_info.services_proto_file_name, 'w') as out:
+            codegen.WriteServicesProtoFile(out)
+
+
+def _WriteGeneratedFiles(args, codegen):
+    if codegen.use_proto2:
+        _WriteProtoFiles(codegen)
+    with util.Chdir(codegen.outdir):
+        with open(codegen.client_info.messages_file_name, 'w') as out:
+            codegen.WriteMessagesFile(out)
+        with open(codegen.client_info.client_file_name, 'w') as out:
+            codegen.WriteClientLibrary(out)
+        if args.generate_cli:
+            with open(codegen.client_info.cli_file_name, 'w') as out:
+                codegen.WriteCli(out)
+            os.chmod(codegen.client_info.cli_file_name, 0o755)
+
+
+def _WriteInit(codegen):
+    with util.Chdir(codegen.outdir):
+        with open('__init__.py', 'w') as out:
+            codegen.WriteInit(out)
+
+
+def _WriteSetupPy(codegen):
+    with open('setup.py', 'w') as out:
+        codegen.WriteSetupPy(out)
+
+
+def GenerateClient(args):
+
+    """Driver for client code generation."""
+
+    codegen = _GetCodegenFromFlags(args)
+    if codegen is None:
+        logging.error('Failed to create codegen, exiting.')
+        return 128
+    _WriteGeneratedFiles(args, codegen)
+    if args.init_file != 'none':
+        _WriteInit(codegen)
+
+
+def GeneratePipPackage(args):
+
+    """Generate a client as a pip-installable tarball."""
+
+    discovery_doc = _GetDiscoveryDocFromFlags(args)
+    package = discovery_doc['name']
+    original_outdir = os.path.expanduser(args.outdir)
+    args.outdir = os.path.join(
+        args.outdir, 'apitools/clients/%s' % package)
+    args.root_package = 'apitools.clients.%s' % package
+    args.generate_cli = False
+    codegen = _GetCodegenFromFlags(args)
+    if codegen is None:
+        logging.error('Failed to create codegen, exiting.')
+        return 1
+    _WriteGeneratedFiles(args, codegen)
+    _WriteInit(codegen)
+    with util.Chdir(original_outdir):
+        _WriteSetupPy(codegen)
+        with util.Chdir('apitools'):
+            _WriteIntermediateInit(codegen)
+            with util.Chdir('clients'):
+                _WriteIntermediateInit(codegen)
+
+
+def GenerateProto(args):
+    """Generate just the two proto files for a given API."""
+
+    codegen = _GetCodegenFromFlags(args)
+    _WriteProtoFiles(codegen)
+
+
+class _SplitCommaSeparatedList(argparse.Action):
+
+    def __call__(self, parser, namespace, values, option_string=None):
+        setattr(namespace, self.dest, values.split(','))
+
+
+def main(argv=None):
+    if argv is None:
+        argv = sys.argv
+    parser = argparse.ArgumentParser(
+        description='Apitools Client Code Generator')
+
+    discovery_group = parser.add_mutually_exclusive_group()
+    discovery_group.add_argument(
+        '--infile',
+        help=('Filename for the discovery document. Mutually exclusive with '
+              '--discovery_url'))
+
+    discovery_group.add_argument(
+        '--discovery_url',
+        help=('URL (or "name.version") of the discovery document to use. '
+              'Mutually exclusive with --infile.'))
+
+    parser.add_argument(
+        '--base_package',
+        default='apitools.base.py',
+        help='Base package path of apitools (defaults to apitools.base.py')
+
+    parser.add_argument(
+        '--protorpc_package',
+        default='apitools.base.protorpclite',
+        help=('Base package path of protorpc '
+              '(defaults to apitools.base.protorpclite'))
+
+    parser.add_argument(
+        '--outdir',
+        default='',
+        help='Directory name for output files. (Defaults to the API name.)')
+
+    parser.add_argument(
+        '--overwrite',
+        default=False, action='store_true',
+        help='Only overwrite the output directory if this flag is specified.')
+
+    parser.add_argument(
+        '--root_package',
+        default='',
+        help=('Python import path for where these modules '
+              'should be imported from.'))
+
+    parser.add_argument(
+        '--strip_prefix', nargs='*',
+        default=[],
+        help=('Prefix to strip from type names in the discovery document. '
+              '(May be specified multiple times.)'))
+
+    parser.add_argument(
+        '--api_key',
+        help=('API key to use for API access.'))
+
+    parser.add_argument(
+        '--client_json',
+        help=('Use the given file downloaded from the dev. console for '
+              'client_id and client_secret.'))
+
+    parser.add_argument(
+        '--client_id',
+        default='1042881264118.apps.googleusercontent.com',
+        help='Client ID to use for the generated client.')
+
+    parser.add_argument(
+        '--client_secret',
+        default='x_Tw5K8nnjoRAqULM9PFAC2b',
+        help='Client secret for the generated client.')
+
+    parser.add_argument(
+        '--scope', nargs='*',
+        default=[],
+        help=('Scopes to request in the generated client. '
+              'May be specified more than once.'))
+
+    parser.add_argument(
+        '--user_agent',
+        default='x_Tw5K8nnjoRAqULM9PFAC2b',
+        help=('User agent for the generated client. '
+              'Defaults to <api>-generated/0.1.'))
+
+    parser.add_argument(
+        '--generate_cli', dest='generate_cli', action='store_true',
+        help='If specified (default), a CLI is also generated.')
+    parser.add_argument(
+        '--nogenerate_cli', dest='generate_cli', action='store_false',
+        help='CLI will not be generated.')
+    parser.set_defaults(generate_cli=True)
+
+    parser.add_argument(
+        '--init-file',
+        choices=['none', 'empty', 'wildcards'],
+        type=lambda s: s.lower(),
+        default='wildcards',
+        help='Controls whether and how to generate package __init__.py file.')
+
+    parser.add_argument(
+        '--unelidable_request_methods',
+        action=_SplitCommaSeparatedList,
+        default=[],
+        help=('Full method IDs of methods for which we should NOT try to '
+              'elide the request type. (Should be a comma-separated list.'))
+
+    parser.add_argument(
+        '--apitools_version',
+        default='', dest='apitools_version',
+        help=('Apitools version used as a requirement in generated clients. '
+              'Defaults to version of apitools used to generate the clients.'))
+
+    parser.add_argument(
+        '--experimental_capitalize_enums',
+        default=False, action='store_true',
+        help='Dangerous: attempt to rewrite enum values to be uppercase.')
+
+    parser.add_argument(
+        '--experimental_name_convention',
+        choices=util.Names.NAME_CONVENTIONS,
+        default=util.Names.DEFAULT_NAME_CONVENTION,
+        help='Dangerous: use a particular style for generated names.')
+
+    parser.add_argument(
+        '--experimental_proto2_output',
+        default=False, action='store_true',
+        help='Dangerous: also output a proto2 message file.')
+
+    subparsers = parser.add_subparsers(help='Type of generated code')
+
+    client_parser = subparsers.add_parser(
+        'client', help='Generate apitools client in destination folder')
+    client_parser.set_defaults(func=GenerateClient)
+
+    pip_package_parser = subparsers.add_parser(
+        'pip_package', help='Generate apitools client pip package')
+    pip_package_parser.set_defaults(func=GeneratePipPackage)
+
+    proto_parser = subparsers.add_parser(
+        'proto', help='Generate apitools client protos')
+    proto_parser.set_defaults(func=GenerateProto)
+
+    args = parser.parse_args(argv[1:])
+    return args.func(args) or 0
+
+if __name__ == '__main__':
+    sys.exit(main())
diff --git a/apitools/gen/gen_client_lib.py b/apitools/gen/gen_client_lib.py
new file mode 100644
index 0000000..b910f0f
--- /dev/null
+++ b/apitools/gen/gen_client_lib.py
@@ -0,0 +1,286 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Simple tool for generating a client library.
+
+Relevant links:
+  https://developers.google.com/discovery/v1/reference/apis#resource
+"""
+
+import datetime
+
+from apitools.gen import command_registry
+from apitools.gen import message_registry
+from apitools.gen import service_registry
+from apitools.gen import util
+
+
+def _ApitoolsVersion():
+    """Returns version of the currently installed google-apitools package."""
+    try:
+        import pkg_resources
+    except ImportError:
+        return 'X.X.X'
+    try:
+        return pkg_resources.get_distribution('google-apitools').version
+    except pkg_resources.DistributionNotFound:
+        return 'X.X.X'
+
+
+def _StandardQueryParametersSchema(discovery_doc):
+    """Sets up dict of standard query parameters."""
+    standard_query_schema = {
+        'id': 'StandardQueryParameters',
+        'type': 'object',
+        'description': 'Query parameters accepted by all methods.',
+        'properties': discovery_doc.get('parameters', {}),
+    }
+    # We add an entry for the trace, since Discovery doesn't.
+    standard_query_schema['properties']['trace'] = {
+        'type': 'string',
+        'description': ('A tracing token of the form "token:<tokenid>" '
+                        'to include in api requests.'),
+        'location': 'query',
+    }
+    return standard_query_schema
+
+
+class DescriptorGenerator(object):
+
+    """Code generator for a given discovery document."""
+
+    def __init__(self, discovery_doc, client_info, names, root_package, outdir,
+                 base_package, protorpc_package, generate_cli=False,
+                 init_wildcards_file=True,
+                 use_proto2=False, unelidable_request_methods=None,
+                 apitools_version=''):
+        self.__discovery_doc = discovery_doc
+        self.__client_info = client_info
+        self.__outdir = outdir
+        self.__use_proto2 = use_proto2
+        self.__description = util.CleanDescription(
+            self.__discovery_doc.get('description', ''))
+        self.__package = self.__client_info.package
+        self.__version = self.__client_info.version
+        self.__revision = discovery_doc.get('revision', '1')
+        self.__generate_cli = generate_cli
+        self.__init_wildcards_file = init_wildcards_file
+        self.__root_package = root_package
+        self.__base_files_package = base_package
+        self.__protorpc_package = protorpc_package
+        self.__names = names
+
+        # Order is important here: we need the schemas before we can
+        # define the services.
+        self.__message_registry = message_registry.MessageRegistry(
+            self.__client_info, self.__names, self.__description,
+            self.__root_package, self.__base_files_package,
+            self.__protorpc_package)
+        schemas = self.__discovery_doc.get('schemas', {})
+        for schema_name, schema in sorted(schemas.items()):
+            self.__message_registry.AddDescriptorFromSchema(
+                schema_name, schema)
+
+        # We need to add one more message type for the global parameters.
+        standard_query_schema = _StandardQueryParametersSchema(
+            self.__discovery_doc)
+        self.__message_registry.AddDescriptorFromSchema(
+            standard_query_schema['id'], standard_query_schema)
+
+        # Now that we know all the messages, we need to correct some
+        # fields from MessageFields to EnumFields.
+        self.__message_registry.FixupMessageFields()
+
+        self.__command_registry = command_registry.CommandRegistry(
+            self.__package, self.__version, self.__client_info,
+            self.__message_registry, self.__root_package,
+            self.__base_files_package, self.__protorpc_package,
+            self.__names)
+        self.__command_registry.AddGlobalParameters(
+            self.__message_registry.LookupDescriptorOrDie(
+                'StandardQueryParameters'))
+
+        self.__services_registry = service_registry.ServiceRegistry(
+            self.__client_info,
+            self.__message_registry,
+            self.__command_registry,
+            self.__names,
+            self.__root_package,
+            self.__base_files_package,
+            unelidable_request_methods or [])
+        services = self.__discovery_doc.get('resources', {})
+        for service_name, methods in sorted(services.items()):
+            self.__services_registry.AddServiceFromResource(
+                service_name, methods)
+        # We might also have top-level methods.
+        api_methods = self.__discovery_doc.get('methods', [])
+        if api_methods:
+            self.__services_registry.AddServiceFromResource(
+                'api', {'methods': api_methods})
+        # pylint: disable=protected-access
+        self.__client_info = self.__client_info._replace(
+            scopes=self.__services_registry.scopes)
+
+        # The apitools version that will be used in prerequisites for the
+        # generated packages.
+        self.__apitools_version = (
+            apitools_version if apitools_version else _ApitoolsVersion())
+
+    @property
+    def client_info(self):
+        return self.__client_info
+
+    @property
+    def discovery_doc(self):
+        return self.__discovery_doc
+
+    @property
+    def names(self):
+        return self.__names
+
+    @property
+    def outdir(self):
+        return self.__outdir
+
+    @property
+    def package(self):
+        return self.__package
+
+    @property
+    def use_proto2(self):
+        return self.__use_proto2
+
+    @property
+    def apitools_version(self):
+        return self.__apitools_version
+
+    def _GetPrinter(self, out):
+        printer = util.SimplePrettyPrinter(out)
+        return printer
+
+    def WriteInit(self, out):
+        """Write a simple __init__.py for the generated client."""
+        printer = self._GetPrinter(out)
+        if self.__init_wildcards_file:
+            printer('"""Common imports for generated %s client library."""',
+                    self.__client_info.package)
+            printer('# pylint:disable=wildcard-import')
+        else:
+            printer('"""Package marker file."""')
+        printer()
+        printer('import pkgutil')
+        printer()
+        if self.__init_wildcards_file:
+            printer('from %s import *', self.__base_files_package)
+            if self.__root_package == '.':
+                import_prefix = ''
+            else:
+                import_prefix = '%s.' % self.__root_package
+            if self.__generate_cli:
+                printer('from %s%s import *',
+                        import_prefix, self.__client_info.cli_rule_name)
+            printer('from %s%s import *',
+                    import_prefix, self.__client_info.client_rule_name)
+            printer('from %s%s import *',
+                    import_prefix, self.__client_info.messages_rule_name)
+            printer()
+        printer('__path__ = pkgutil.extend_path(__path__, __name__)')
+
+    def WriteIntermediateInit(self, out):
+        """Write a simple __init__.py for an intermediate directory."""
+        printer = self._GetPrinter(out)
+        printer('#!/usr/bin/env python')
+        printer('"""Shared __init__.py for apitools."""')
+        printer()
+        printer('from pkgutil import extend_path')
+        printer('__path__ = extend_path(__path__, __name__)')
+
+    def WriteSetupPy(self, out):
+        """Write a setup.py for upload to PyPI."""
+        printer = self._GetPrinter(out)
+        year = datetime.datetime.now().year
+        printer('# Copyright %s Google Inc. All Rights Reserved.' % year)
+        printer('#')
+        printer('# Licensed under the Apache License, Version 2.0 (the'
+                '"License");')
+        printer('# you may not use this file except in compliance with '
+                'the License.')
+        printer('# You may obtain a copy of the License at')
+        printer('#')
+        printer('#   http://www.apache.org/licenses/LICENSE-2.0')
+        printer('#')
+        printer('# Unless required by applicable law or agreed to in writing, '
+                'software')
+        printer('# distributed under the License is distributed on an "AS IS" '
+                'BASIS,')
+        printer('# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either '
+                'express or implied.')
+        printer('# See the License for the specific language governing '
+                'permissions and')
+        printer('# limitations under the License.')
+        printer()
+        printer('import setuptools')
+        printer('REQUIREMENTS = [')
+        with printer.Indent(indent='    '):
+            parts = self.apitools_version.split('.')
+            major = parts.pop(0)
+            minor = parts.pop(0)
+            printer('"google-apitools>=%s,~=%s.%s",',
+                    self.apitools_version, major, minor)
+            printer('"httplib2>=0.9",')
+            printer('"oauth2client>=1.4.12",')
+        printer(']')
+        printer('_PACKAGE = "apitools.clients.%s"' % self.__package)
+        printer()
+        printer('setuptools.setup(')
+        # TODO(craigcitro): Allow customization of these options.
+        with printer.Indent(indent='    '):
+            printer('name="google-apitools-%s-%s",',
+                    self.__package, self.__version)
+            printer('version="%s.%s",',
+                    self.apitools_version, self.__revision)
+            printer('description="Autogenerated apitools library for %s",' % (
+                self.__package,))
+            printer('url="https://github.com/google/apitools",')
+            printer('author="Craig Citro",')
+            printer('author_email="craigcitro@google.com",')
+            printer('packages=setuptools.find_packages(),')
+            printer('install_requires=REQUIREMENTS,')
+            printer('classifiers=[')
+            with printer.Indent(indent='    '):
+                printer('"Programming Language :: Python :: 2.7",')
+                printer('"License :: OSI Approved :: Apache Software '
+                        'License",')
+            printer('],')
+            printer('license="Apache 2.0",')
+            printer('keywords="apitools apitools-%s %s",' % (
+                self.__package, self.__package))
+        printer(')')
+
+    def WriteMessagesFile(self, out):
+        self.__message_registry.WriteFile(self._GetPrinter(out))
+
+    def WriteMessagesProtoFile(self, out):
+        self.__message_registry.WriteProtoFile(self._GetPrinter(out))
+
+    def WriteServicesProtoFile(self, out):
+        self.__services_registry.WriteProtoFile(self._GetPrinter(out))
+
+    def WriteClientLibrary(self, out):
+        self.__services_registry.WriteFile(self._GetPrinter(out))
+
+    def WriteCli(self, out):
+        self.__command_registry.WriteFile(self._GetPrinter(out))
diff --git a/apitools/gen/gen_client_test.py b/apitools/gen/gen_client_test.py
new file mode 100644
index 0000000..3be3e7a
--- /dev/null
+++ b/apitools/gen/gen_client_test.py
@@ -0,0 +1,147 @@
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Test for gen_client module."""
+
+import os
+
+import unittest2
+
+from apitools.gen import gen_client
+from apitools.gen import test_utils
+
+
+def GetTestDataPath(*path):
+    return os.path.join(os.path.dirname(__file__), 'testdata', *path)
+
+
+def _GetContent(file_path):
+    with open(file_path) as f:
+        return f.read()
+
+
+@test_utils.RunOnlyOnPython27
+class ClientGenCliTest(unittest2.TestCase):
+
+    def testHelp_NotEnoughArguments(self):
+        with self.assertRaisesRegexp(SystemExit, '0'):
+            with test_utils.CaptureOutput() as (_, err):
+                gen_client.main([gen_client.__file__, '-h'])
+                err_output = err.getvalue()
+                self.assertIn('usage:', err_output)
+                self.assertIn('error: too few arguments', err_output)
+
+    def testGenClient_SimpleDocNoInit(self):
+        with test_utils.TempDir() as tmp_dir_path:
+            gen_client.main([
+                gen_client.__file__,
+                '--generate_cli',
+                '--init-file', 'none',
+                '--infile', GetTestDataPath('dns', 'dns_v1.json'),
+                '--outdir', tmp_dir_path,
+                '--overwrite',
+                '--root_package', 'google.apis',
+                'client'
+            ])
+            expected_files = (
+                set(['dns_v1.py']) |  # CLI files
+                set(['dns_v1_client.py', 'dns_v1_messages.py']))
+            self.assertEquals(expected_files, set(os.listdir(tmp_dir_path)))
+
+    def testGenClient_SimpleDocEmptyInit(self):
+        with test_utils.TempDir() as tmp_dir_path:
+            gen_client.main([
+                gen_client.__file__,
+                '--generate_cli',
+                '--init-file', 'empty',
+                '--infile', GetTestDataPath('dns', 'dns_v1.json'),
+                '--outdir', tmp_dir_path,
+                '--overwrite',
+                '--root_package', 'google.apis',
+                'client'
+            ])
+            expected_files = (
+                set(['dns_v1.py']) |  # CLI files
+                set(['dns_v1_client.py', 'dns_v1_messages.py', '__init__.py']))
+            self.assertEquals(expected_files, set(os.listdir(tmp_dir_path)))
+            init_file = _GetContent(os.path.join(tmp_dir_path, '__init__.py'))
+            self.assertEqual("""\"""Package marker file.\"""
+
+import pkgutil
+
+__path__ = pkgutil.extend_path(__path__, __name__)
+""", init_file)
+
+    def testGenClient_SimpleDocWithV4(self):
+        with test_utils.TempDir() as tmp_dir_path:
+            gen_client.main([
+                gen_client.__file__,
+                '--nogenerate_cli',
+                '--infile', GetTestDataPath('dns', 'dns_v1.json'),
+                '--outdir', tmp_dir_path,
+                '--overwrite',
+                '--apitools_version', '0.4.12',
+                '--root_package', 'google.apis',
+                'client'
+            ])
+            self.assertEquals(
+                set(['dns_v1_client.py', 'dns_v1_messages.py', '__init__.py']),
+                set(os.listdir(tmp_dir_path)))
+
+    def testGenClient_SimpleDocWithV5(self):
+        with test_utils.TempDir() as tmp_dir_path:
+            gen_client.main([
+                gen_client.__file__,
+                '--nogenerate_cli',
+                '--infile', GetTestDataPath('dns', 'dns_v1.json'),
+                '--outdir', tmp_dir_path,
+                '--overwrite',
+                '--apitools_version', '0.5.0',
+                '--root_package', 'google.apis',
+                'client'
+            ])
+            self.assertEquals(
+                set(['dns_v1_client.py', 'dns_v1_messages.py', '__init__.py']),
+                set(os.listdir(tmp_dir_path)))
+
+    def testGenPipPackage_SimpleDoc(self):
+        with test_utils.TempDir() as tmp_dir_path:
+            gen_client.main([
+                gen_client.__file__,
+                '--nogenerate_cli',
+                '--infile', GetTestDataPath('dns', 'dns_v1.json'),
+                '--outdir', tmp_dir_path,
+                '--overwrite',
+                '--root_package', 'google.apis',
+                'pip_package'
+            ])
+            self.assertEquals(
+                set(['apitools', 'setup.py']),
+                set(os.listdir(tmp_dir_path)))
+
+    def testGenProto_SimpleDoc(self):
+        with test_utils.TempDir() as tmp_dir_path:
+            gen_client.main([
+                gen_client.__file__,
+                '--nogenerate_cli',
+                '--infile', GetTestDataPath('dns', 'dns_v1.json'),
+                '--outdir', tmp_dir_path,
+                '--overwrite',
+                '--root_package', 'google.apis',
+                'proto'
+            ])
+            self.assertEquals(
+                set(['dns_v1_messages.proto', 'dns_v1_services.proto']),
+                set(os.listdir(tmp_dir_path)))
diff --git a/apitools/gen/message_registry.py b/apitools/gen/message_registry.py
new file mode 100644
index 0000000..4f004de
--- /dev/null
+++ b/apitools/gen/message_registry.py
@@ -0,0 +1,477 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Message registry for apitools."""
+
+import collections
+import contextlib
+import json
+
+import six
+
+from apitools.base.protorpclite import descriptor
+from apitools.base.protorpclite import messages
+from apitools.gen import extended_descriptor
+from apitools.gen import util
+
+TypeInfo = collections.namedtuple('TypeInfo', ('type_name', 'variant'))
+
+
+class MessageRegistry(object):
+
+    """Registry for message types.
+
+    This closely mirrors a messages.FileDescriptor, but adds additional
+    attributes (such as message and field descriptions) and some extra
+    code for validation and cycle detection.
+    """
+
+    # Type information from these two maps comes from here:
+    #  https://developers.google.com/discovery/v1/type-format
+    PRIMITIVE_TYPE_INFO_MAP = {
+        'string': TypeInfo(type_name='string',
+                           variant=messages.StringField.DEFAULT_VARIANT),
+        'integer': TypeInfo(type_name='integer',
+                            variant=messages.IntegerField.DEFAULT_VARIANT),
+        'boolean': TypeInfo(type_name='boolean',
+                            variant=messages.BooleanField.DEFAULT_VARIANT),
+        'number': TypeInfo(type_name='number',
+                           variant=messages.FloatField.DEFAULT_VARIANT),
+        'any': TypeInfo(type_name='extra_types.JsonValue',
+                        variant=messages.Variant.MESSAGE),
+    }
+
+    PRIMITIVE_FORMAT_MAP = {
+        'int32': TypeInfo(type_name='integer',
+                          variant=messages.Variant.INT32),
+        'uint32': TypeInfo(type_name='integer',
+                           variant=messages.Variant.UINT32),
+        'int64': TypeInfo(type_name='string',
+                          variant=messages.Variant.INT64),
+        'uint64': TypeInfo(type_name='string',
+                           variant=messages.Variant.UINT64),
+        'double': TypeInfo(type_name='number',
+                           variant=messages.Variant.DOUBLE),
+        'float': TypeInfo(type_name='number',
+                          variant=messages.Variant.FLOAT),
+        'byte': TypeInfo(type_name='byte',
+                         variant=messages.BytesField.DEFAULT_VARIANT),
+        'date': TypeInfo(type_name='extra_types.DateField',
+                         variant=messages.Variant.STRING),
+        'date-time': TypeInfo(
+            type_name=('apitools.base.protorpclite.message_types.'
+                       'DateTimeMessage'),
+            variant=messages.Variant.MESSAGE),
+    }
+
+    def __init__(self, client_info, names, description, root_package_dir,
+                 base_files_package, protorpc_package):
+        self.__names = names
+        self.__client_info = client_info
+        self.__package = client_info.package
+        self.__description = util.CleanDescription(description)
+        self.__root_package_dir = root_package_dir
+        self.__base_files_package = base_files_package
+        self.__protorpc_package = protorpc_package
+        self.__file_descriptor = extended_descriptor.ExtendedFileDescriptor(
+            package=self.__package, description=self.__description)
+        # Add required imports
+        self.__file_descriptor.additional_imports = [
+            'from %s import messages as _messages' % self.__protorpc_package,
+        ]
+        # Map from scoped names (i.e. Foo.Bar) to MessageDescriptors.
+        self.__message_registry = collections.OrderedDict()
+        # A set of types that we're currently adding (for cycle detection).
+        self.__nascent_types = set()
+        # A set of types for which we've seen a reference but no
+        # definition; if this set is nonempty, validation fails.
+        self.__unknown_types = set()
+        # Used for tracking paths during message creation
+        self.__current_path = []
+        # Where to register created messages
+        self.__current_env = self.__file_descriptor
+        # TODO(craigcitro): Add a `Finalize` method.
+
+    @property
+    def file_descriptor(self):
+        self.Validate()
+        return self.__file_descriptor
+
+    def WriteProtoFile(self, printer):
+        """Write the messages file to out as proto."""
+        self.Validate()
+        extended_descriptor.WriteMessagesFile(
+            self.__file_descriptor, self.__package, self.__client_info.version,
+            printer)
+
+    def WriteFile(self, printer):
+        """Write the messages file to out."""
+        self.Validate()
+        extended_descriptor.WritePythonFile(
+            self.__file_descriptor, self.__package, self.__client_info.version,
+            printer)
+
+    def Validate(self):
+        mysteries = self.__nascent_types or self.__unknown_types
+        if mysteries:
+            raise ValueError('Malformed MessageRegistry: %s' % mysteries)
+
+    def __ComputeFullName(self, name):
+        return '.'.join(map(six.text_type, self.__current_path[:] + [name]))
+
+    def __AddImport(self, new_import):
+        if new_import not in self.__file_descriptor.additional_imports:
+            self.__file_descriptor.additional_imports.append(new_import)
+
+    def __DeclareDescriptor(self, name):
+        self.__nascent_types.add(self.__ComputeFullName(name))
+
+    def __RegisterDescriptor(self, new_descriptor):
+        """Register the given descriptor in this registry."""
+        if not isinstance(new_descriptor, (
+                extended_descriptor.ExtendedMessageDescriptor,
+                extended_descriptor.ExtendedEnumDescriptor)):
+            raise ValueError('Cannot add descriptor of type %s' % (
+                type(new_descriptor),))
+        full_name = self.__ComputeFullName(new_descriptor.name)
+        if full_name in self.__message_registry:
+            raise ValueError(
+                'Attempt to re-register descriptor %s' % full_name)
+        if full_name not in self.__nascent_types:
+            raise ValueError('Directly adding types is not supported')
+        new_descriptor.full_name = full_name
+        self.__message_registry[full_name] = new_descriptor
+        if isinstance(new_descriptor,
+                      extended_descriptor.ExtendedMessageDescriptor):
+            self.__current_env.message_types.append(new_descriptor)
+        elif isinstance(new_descriptor,
+                        extended_descriptor.ExtendedEnumDescriptor):
+            self.__current_env.enum_types.append(new_descriptor)
+        self.__unknown_types.discard(full_name)
+        self.__nascent_types.remove(full_name)
+
+    def LookupDescriptor(self, name):
+        return self.__GetDescriptorByName(name)
+
+    def LookupDescriptorOrDie(self, name):
+        message_descriptor = self.LookupDescriptor(name)
+        if message_descriptor is None:
+            raise ValueError('No message descriptor named "%s"', name)
+        return message_descriptor
+
+    def __GetDescriptor(self, name):
+        return self.__GetDescriptorByName(self.__ComputeFullName(name))
+
+    def __GetDescriptorByName(self, name):
+        if name in self.__message_registry:
+            return self.__message_registry[name]
+        if name in self.__nascent_types:
+            raise ValueError(
+                'Cannot retrieve type currently being created: %s' % name)
+        return None
+
+    @contextlib.contextmanager
+    def __DescriptorEnv(self, message_descriptor):
+        # TODO(craigcitro): Typecheck?
+        previous_env = self.__current_env
+        self.__current_path.append(message_descriptor.name)
+        self.__current_env = message_descriptor
+        yield
+        self.__current_path.pop()
+        self.__current_env = previous_env
+
+    def AddEnumDescriptor(self, name, description,
+                          enum_values, enum_descriptions):
+        """Add a new EnumDescriptor named name with the given enum values."""
+        message = extended_descriptor.ExtendedEnumDescriptor()
+        message.name = self.__names.ClassName(name)
+        message.description = util.CleanDescription(description)
+        self.__DeclareDescriptor(message.name)
+        for index, (enum_name, enum_description) in enumerate(
+                zip(enum_values, enum_descriptions)):
+            enum_value = extended_descriptor.ExtendedEnumValueDescriptor()
+            enum_value.name = self.__names.NormalizeEnumName(enum_name)
+            if enum_value.name != enum_name:
+                message.enum_mappings.append(
+                    extended_descriptor.ExtendedEnumDescriptor.JsonEnumMapping(
+                        python_name=enum_value.name, json_name=enum_name))
+                self.__AddImport('from %s import encoding' %
+                                 self.__base_files_package)
+            enum_value.number = index
+            enum_value.description = util.CleanDescription(
+                enum_description or '<no description>')
+            message.values.append(enum_value)
+        self.__RegisterDescriptor(message)
+
+    def __DeclareMessageAlias(self, schema, alias_for):
+        """Declare schema as an alias for alias_for."""
+        # TODO(craigcitro): This is a hack. Remove it.
+        message = extended_descriptor.ExtendedMessageDescriptor()
+        message.name = self.__names.ClassName(schema['id'])
+        message.alias_for = alias_for
+        self.__DeclareDescriptor(message.name)
+        self.__AddImport('from %s import extra_types' %
+                         self.__base_files_package)
+        self.__RegisterDescriptor(message)
+
+    def __AddAdditionalProperties(self, message, schema, properties):
+        """Add an additionalProperties field to message."""
+        additional_properties_info = schema['additionalProperties']
+        entries_type_name = self.__AddAdditionalPropertyType(
+            message.name, additional_properties_info)
+        description = util.CleanDescription(
+            additional_properties_info.get('description'))
+        if description is None:
+            description = 'Additional properties of type %s' % message.name
+        attrs = {
+            'items': {
+                '$ref': entries_type_name,
+            },
+            'description': description,
+            'type': 'array',
+        }
+        field_name = 'additionalProperties'
+        message.fields.append(self.__FieldDescriptorFromProperties(
+            field_name, len(properties) + 1, attrs))
+        self.__AddImport('from %s import encoding' % self.__base_files_package)
+        message.decorators.append(
+            'encoding.MapUnrecognizedFields(%r)' % field_name)
+
+    def AddDescriptorFromSchema(self, schema_name, schema):
+        """Add a new MessageDescriptor named schema_name based on schema."""
+        # TODO(craigcitro): Is schema_name redundant?
+        if self.__GetDescriptor(schema_name):
+            return
+        if schema.get('enum'):
+            self.__DeclareEnum(schema_name, schema)
+            return
+        if schema.get('type') == 'any':
+            self.__DeclareMessageAlias(schema, 'extra_types.JsonValue')
+            return
+        if schema.get('type') != 'object':
+            raise ValueError('Cannot create message descriptors for type %s',
+                             schema.get('type'))
+        message = extended_descriptor.ExtendedMessageDescriptor()
+        message.name = self.__names.ClassName(schema['id'])
+        message.description = util.CleanDescription(schema.get(
+            'description', 'A %s object.' % message.name))
+        self.__DeclareDescriptor(message.name)
+        with self.__DescriptorEnv(message):
+            properties = schema.get('properties', {})
+            for index, (name, attrs) in enumerate(sorted(properties.items())):
+                field = self.__FieldDescriptorFromProperties(
+                    name, index + 1, attrs)
+                message.fields.append(field)
+                if field.name != name:
+                    message.field_mappings.append(
+                        type(message).JsonFieldMapping(
+                            python_name=field.name, json_name=name))
+                    self.__AddImport(
+                        'from %s import encoding' % self.__base_files_package)
+            if 'additionalProperties' in schema:
+                self.__AddAdditionalProperties(message, schema, properties)
+        self.__RegisterDescriptor(message)
+
+    def __AddAdditionalPropertyType(self, name, property_schema):
+        """Add a new nested AdditionalProperty message."""
+        new_type_name = 'AdditionalProperty'
+        property_schema = dict(property_schema)
+        # We drop the description here on purpose, so the resulting
+        # messages are less repetitive.
+        property_schema.pop('description', None)
+        description = 'An additional property for a %s object.' % name
+        schema = {
+            'id': new_type_name,
+            'type': 'object',
+            'description': description,
+            'properties': {
+                'key': {
+                    'type': 'string',
+                    'description': 'Name of the additional property.',
+                },
+                'value': property_schema,
+            },
+        }
+        self.AddDescriptorFromSchema(new_type_name, schema)
+        return new_type_name
+
+    def __AddEntryType(self, entry_type_name, entry_schema, parent_name):
+        """Add a type for a list entry."""
+        entry_schema.pop('description', None)
+        description = 'Single entry in a %s.' % parent_name
+        schema = {
+            'id': entry_type_name,
+            'type': 'object',
+            'description': description,
+            'properties': {
+                'entry': {
+                    'type': 'array',
+                    'items': entry_schema,
+                },
+            },
+        }
+        self.AddDescriptorFromSchema(entry_type_name, schema)
+        return entry_type_name
+
+    def __FieldDescriptorFromProperties(self, name, index, attrs):
+        """Create a field descriptor for these attrs."""
+        field = descriptor.FieldDescriptor()
+        field.name = self.__names.CleanName(name)
+        field.number = index
+        field.label = self.__ComputeLabel(attrs)
+        new_type_name_hint = self.__names.ClassName(
+            '%sValue' % self.__names.ClassName(name))
+        type_info = self.__GetTypeInfo(attrs, new_type_name_hint)
+        field.type_name = type_info.type_name
+        field.variant = type_info.variant
+        if 'default' in attrs:
+            # TODO(craigcitro): Correctly handle non-primitive default values.
+            default = attrs['default']
+            if not (field.type_name == 'string' or
+                    field.variant == messages.Variant.ENUM):
+                default = str(json.loads(default))
+            if field.variant == messages.Variant.ENUM:
+                default = self.__names.NormalizeEnumName(default)
+            field.default_value = default
+        extended_field = extended_descriptor.ExtendedFieldDescriptor()
+        extended_field.name = field.name
+        extended_field.description = util.CleanDescription(
+            attrs.get('description', 'A %s attribute.' % field.type_name))
+        extended_field.field_descriptor = field
+        return extended_field
+
+    @staticmethod
+    def __ComputeLabel(attrs):
+        if attrs.get('required', False):
+            return descriptor.FieldDescriptor.Label.REQUIRED
+        elif attrs.get('type') == 'array':
+            return descriptor.FieldDescriptor.Label.REPEATED
+        elif attrs.get('repeated'):
+            return descriptor.FieldDescriptor.Label.REPEATED
+        return descriptor.FieldDescriptor.Label.OPTIONAL
+
+    def __DeclareEnum(self, enum_name, attrs):
+        description = util.CleanDescription(attrs.get('description', ''))
+        enum_values = attrs['enum']
+        enum_descriptions = attrs.get(
+            'enumDescriptions', [''] * len(enum_values))
+        self.AddEnumDescriptor(enum_name, description,
+                               enum_values, enum_descriptions)
+        self.__AddIfUnknown(enum_name)
+        return TypeInfo(type_name=enum_name, variant=messages.Variant.ENUM)
+
+    def __AddIfUnknown(self, type_name):
+        type_name = self.__names.ClassName(type_name)
+        full_type_name = self.__ComputeFullName(type_name)
+        if (full_type_name not in self.__message_registry.keys() and
+                type_name not in self.__message_registry.keys()):
+            self.__unknown_types.add(type_name)
+
+    def __GetTypeInfo(self, attrs, name_hint):
+        """Return a TypeInfo object for attrs, creating one if needed."""
+
+        type_ref = self.__names.ClassName(attrs.get('$ref'))
+        type_name = attrs.get('type')
+        if not (type_ref or type_name):
+            raise ValueError('No type found for %s' % attrs)
+
+        if type_ref:
+            self.__AddIfUnknown(type_ref)
+            # We don't actually know this is a message -- it might be an
+            # enum. However, we can't check that until we've created all the
+            # types, so we come back and fix this up later.
+            return TypeInfo(
+                type_name=type_ref, variant=messages.Variant.MESSAGE)
+
+        if 'enum' in attrs:
+            enum_name = '%sValuesEnum' % name_hint
+            return self.__DeclareEnum(enum_name, attrs)
+
+        if 'format' in attrs:
+            type_info = self.PRIMITIVE_FORMAT_MAP.get(attrs['format'])
+            if type_info is None:
+                # If we don't recognize the format, the spec says we fall back
+                # to just using the type name.
+                if type_name in self.PRIMITIVE_TYPE_INFO_MAP:
+                    return self.PRIMITIVE_TYPE_INFO_MAP[type_name]
+                raise ValueError('Unknown type/format "%s"/"%s"' % (
+                    attrs['format'], type_name))
+            if type_info.type_name.startswith((
+                    'apitools.base.protorpclite.message_types.',
+                    'message_types.')):
+                self.__AddImport(
+                    'from %s import message_types as _message_types' %
+                    self.__protorpc_package)
+            if type_info.type_name.startswith('extra_types.'):
+                self.__AddImport(
+                    'from %s import extra_types' % self.__base_files_package)
+            return type_info
+
+        if type_name in self.PRIMITIVE_TYPE_INFO_MAP:
+            type_info = self.PRIMITIVE_TYPE_INFO_MAP[type_name]
+            if type_info.type_name.startswith('extra_types.'):
+                self.__AddImport(
+                    'from %s import extra_types' % self.__base_files_package)
+            return type_info
+
+        if type_name == 'array':
+            items = attrs.get('items')
+            if not items:
+                raise ValueError('Array type with no item type: %s' % attrs)
+            entry_name_hint = self.__names.ClassName(
+                items.get('title') or '%sListEntry' % name_hint)
+            entry_label = self.__ComputeLabel(items)
+            if entry_label == descriptor.FieldDescriptor.Label.REPEATED:
+                parent_name = self.__names.ClassName(
+                    items.get('title') or name_hint)
+                entry_type_name = self.__AddEntryType(
+                    entry_name_hint, items.get('items'), parent_name)
+                return TypeInfo(type_name=entry_type_name,
+                                variant=messages.Variant.MESSAGE)
+            return self.__GetTypeInfo(items, entry_name_hint)
+        elif type_name == 'any':
+            self.__AddImport('from %s import extra_types' %
+                             self.__base_files_package)
+            return self.PRIMITIVE_TYPE_INFO_MAP['any']
+        elif type_name == 'object':
+            # TODO(craigcitro): Think of a better way to come up with names.
+            if not name_hint:
+                raise ValueError(
+                    'Cannot create subtype without some name hint')
+            schema = dict(attrs)
+            schema['id'] = name_hint
+            self.AddDescriptorFromSchema(name_hint, schema)
+            self.__AddIfUnknown(name_hint)
+            return TypeInfo(
+                type_name=name_hint, variant=messages.Variant.MESSAGE)
+
+        raise ValueError('Unknown type: %s' % type_name)
+
+    def FixupMessageFields(self):
+        for message_type in self.file_descriptor.message_types:
+            self._FixupMessage(message_type)
+
+    def _FixupMessage(self, message_type):
+        with self.__DescriptorEnv(message_type):
+            for field in message_type.fields:
+                if field.field_descriptor.variant == messages.Variant.MESSAGE:
+                    field_type_name = field.field_descriptor.type_name
+                    field_type = self.LookupDescriptor(field_type_name)
+                    if isinstance(field_type,
+                                  extended_descriptor.ExtendedEnumDescriptor):
+                        field.field_descriptor.variant = messages.Variant.ENUM
+            for submessage_type in message_type.message_types:
+                self._FixupMessage(submessage_type)
diff --git a/apitools/gen/service_registry.py b/apitools/gen/service_registry.py
new file mode 100644
index 0000000..9f71592
--- /dev/null
+++ b/apitools/gen/service_registry.py
@@ -0,0 +1,484 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Service registry for apitools."""
+
+import collections
+import logging
+import re
+import textwrap
+
+from apitools.base.py import base_api
+from apitools.gen import util
+
+# We're a code generator. I don't care.
+# pylint:disable=too-many-statements
+
+_MIME_PATTERN_RE = re.compile(r'(?i)[a-z0-9_*-]+/[a-z0-9_*-]+')
+
+
+class ServiceRegistry(object):
+
+    """Registry for service types."""
+
+    def __init__(self, client_info, message_registry, command_registry,
+                 names, root_package, base_files_package,
+                 unelidable_request_methods):
+        self.__client_info = client_info
+        self.__package = client_info.package
+        self.__names = names
+        self.__service_method_info_map = collections.OrderedDict()
+        self.__message_registry = message_registry
+        self.__command_registry = command_registry
+        self.__root_package = root_package
+        self.__base_files_package = base_files_package
+        self.__unelidable_request_methods = unelidable_request_methods
+        self.__all_scopes = set(self.__client_info.scopes)
+
+    def Validate(self):
+        self.__message_registry.Validate()
+
+    @property
+    def scopes(self):
+        return sorted(list(self.__all_scopes))
+
+    def __GetServiceClassName(self, service_name):
+        return self.__names.ClassName(
+            '%sService' % self.__names.ClassName(service_name))
+
+    def __PrintDocstring(self, printer, method_info, method_name, name):
+        """Print a docstring for a service method."""
+        if method_info.description:
+            description = util.CleanDescription(method_info.description)
+            first_line, newline, remaining = method_info.description.partition(
+                '\n')
+            if not first_line.endswith('.'):
+                first_line = '%s.' % first_line
+            description = '%s%s%s' % (first_line, newline, remaining)
+        else:
+            description = '%s method for the %s service.' % (method_name, name)
+        with printer.CommentContext():
+            printer('"""%s' % description)
+        printer()
+        printer('Args:')
+        printer('  request: (%s) input message', method_info.request_type_name)
+        printer('  global_params: (StandardQueryParameters, default: None) '
+                'global arguments')
+        if method_info.upload_config:
+            printer('  upload: (Upload, default: None) If present, upload')
+            printer('      this stream with the request.')
+        if method_info.supports_download:
+            printer(
+                '  download: (Download, default: None) If present, download')
+            printer('      data from the request via this stream.')
+        printer('Returns:')
+        printer('  (%s) The response message.', method_info.response_type_name)
+        printer('"""')
+
+    def __WriteSingleService(
+            self, printer, name, method_info_map, client_class_name):
+        printer()
+        class_name = self.__GetServiceClassName(name)
+        printer('class %s(base_api.BaseApiService):', class_name)
+        with printer.Indent():
+            printer('"""Service class for the %s resource."""', name)
+            printer()
+            printer('_NAME = %s', repr(name))
+
+            # Print the configs for the methods first.
+            printer()
+            printer('def __init__(self, client):')
+            with printer.Indent():
+                printer('super(%s.%s, self).__init__(client)',
+                        client_class_name, class_name)
+                printer('self._upload_configs = {')
+                with printer.Indent(indent='    '):
+                    for method_name, method_info in method_info_map.items():
+                        upload_config = method_info.upload_config
+                        if upload_config is not None:
+                            printer(
+                                "'%s': base_api.ApiUploadInfo(", method_name)
+                            with printer.Indent(indent='    '):
+                                attrs = sorted(
+                                    x.name for x in upload_config.all_fields())
+                                for attr in attrs:
+                                    printer('%s=%r,',
+                                            attr, getattr(upload_config, attr))
+                            printer('),')
+                    printer('}')
+
+            # Now write each method in turn.
+            for method_name, method_info in method_info_map.items():
+                printer()
+                params = ['self', 'request', 'global_params=None']
+                if method_info.upload_config:
+                    params.append('upload=None')
+                if method_info.supports_download:
+                    params.append('download=None')
+                printer('def %s(%s):', method_name, ', '.join(params))
+                with printer.Indent():
+                    self.__PrintDocstring(
+                        printer, method_info, method_name, name)
+                    printer("config = self.GetMethodConfig('%s')", method_name)
+                    upload_config = method_info.upload_config
+                    if upload_config is not None:
+                        printer("upload_config = self.GetUploadConfig('%s')",
+                                method_name)
+                    arg_lines = [
+                        'config, request, global_params=global_params']
+                    if method_info.upload_config:
+                        arg_lines.append(
+                            'upload=upload, upload_config=upload_config')
+                    if method_info.supports_download:
+                        arg_lines.append('download=download')
+                    printer('return self._RunMethod(')
+                    with printer.Indent(indent='    '):
+                        for line in arg_lines[:-1]:
+                            printer('%s,', line)
+                        printer('%s)', arg_lines[-1])
+                printer()
+                printer('{0}.method_config = lambda: base_api.ApiMethodInfo('
+                        .format(method_name))
+                with printer.Indent(indent='    '):
+                    method_info = method_info_map[method_name]
+                    attrs = sorted(
+                        x.name for x in method_info.all_fields())
+                    for attr in attrs:
+                        if attr in ('upload_config', 'description'):
+                            continue
+                        value = getattr(method_info, attr)
+                        if value is not None:
+                            printer('%s=%r,', attr, value)
+                printer(')')
+
+    def __WriteProtoServiceDeclaration(self, printer, name, method_info_map):
+        """Write a single service declaration to a proto file."""
+        printer()
+        printer('service %s {', self.__GetServiceClassName(name))
+        with printer.Indent():
+            for method_name, method_info in method_info_map.items():
+                for line in textwrap.wrap(method_info.description,
+                                          printer.CalculateWidth() - 3):
+                    printer('// %s', line)
+                printer('rpc %s (%s) returns (%s);',
+                        method_name,
+                        method_info.request_type_name,
+                        method_info.response_type_name)
+        printer('}')
+
+    def WriteProtoFile(self, printer):
+        """Write the services in this registry to out as proto."""
+        self.Validate()
+        client_info = self.__client_info
+        printer('// Generated services for %s version %s.',
+                client_info.package, client_info.version)
+        printer()
+        printer('syntax = "proto2";')
+        printer('package %s;', self.__package)
+        printer('import "%s";', client_info.messages_proto_file_name)
+        printer()
+        for name, method_info_map in self.__service_method_info_map.items():
+            self.__WriteProtoServiceDeclaration(printer, name, method_info_map)
+
+    def WriteFile(self, printer):
+        """Write the services in this registry to out."""
+        self.Validate()
+        client_info = self.__client_info
+        printer('"""Generated client library for %s version %s."""',
+                client_info.package, client_info.version)
+        printer('# NOTE: This file is autogenerated and should not be edited '
+                'by hand.')
+        printer('from %s import base_api', self.__base_files_package)
+        if self.__root_package:
+            import_prefix = 'from {0} '.format(self.__root_package)
+        else:
+            import_prefix = ''
+        printer('%simport %s as messages', import_prefix,
+                client_info.messages_rule_name)
+        printer()
+        printer()
+        printer('class %s(base_api.BaseApiClient):',
+                client_info.client_class_name)
+        with printer.Indent():
+            printer(
+                '"""Generated client library for service %s version %s."""',
+                client_info.package, client_info.version)
+            printer()
+            printer('MESSAGES_MODULE = messages')
+            printer('BASE_URL = {0!r}'.format(client_info.base_url))
+            printer()
+            printer('_PACKAGE = {0!r}'.format(client_info.package))
+            printer('_SCOPES = {0!r}'.format(
+                client_info.scopes or
+                ['https://www.googleapis.com/auth/userinfo.email']))
+            printer('_VERSION = {0!r}'.format(client_info.version))
+            printer('_CLIENT_ID = {0!r}'.format(client_info.client_id))
+            printer('_CLIENT_SECRET = {0!r}'.format(client_info.client_secret))
+            printer('_USER_AGENT = {0!r}'.format(client_info.user_agent))
+            printer('_CLIENT_CLASS_NAME = {0!r}'.format(
+                client_info.client_class_name))
+            printer('_URL_VERSION = {0!r}'.format(client_info.url_version))
+            printer('_API_KEY = {0!r}'.format(client_info.api_key))
+            printer()
+            printer("def __init__(self, url='', credentials=None,")
+            with printer.Indent(indent='             '):
+                printer('get_credentials=True, http=None, model=None,')
+                printer('log_request=False, log_response=False,')
+                printer('credentials_args=None, default_global_params=None,')
+                printer('additional_http_headers=None):')
+            with printer.Indent():
+                printer('"""Create a new %s handle."""', client_info.package)
+                printer('url = url or self.BASE_URL')
+                printer(
+                    'super(%s, self).__init__(', client_info.client_class_name)
+                printer('    url, credentials=credentials,')
+                printer('    get_credentials=get_credentials, http=http, '
+                        'model=model,')
+                printer('    log_request=log_request, '
+                        'log_response=log_response,')
+                printer('    credentials_args=credentials_args,')
+                printer('    default_global_params=default_global_params,')
+                printer('    additional_http_headers=additional_http_headers)')
+                for name in self.__service_method_info_map.keys():
+                    printer('self.%s = self.%s(self)',
+                            name, self.__GetServiceClassName(name))
+            for name, method_info in self.__service_method_info_map.items():
+                self.__WriteSingleService(
+                    printer, name, method_info, client_info.client_class_name)
+
+    def __RegisterService(self, service_name, method_info_map):
+        if service_name in self.__service_method_info_map:
+            raise ValueError(
+                'Attempt to re-register descriptor %s' % service_name)
+        self.__service_method_info_map[service_name] = method_info_map
+
+    def __CreateRequestType(self, method_description, body_type=None):
+        """Create a request type for this method."""
+        schema = {}
+        schema['id'] = self.__names.ClassName('%sRequest' % (
+            self.__names.ClassName(method_description['id'], separator='.'),))
+        schema['type'] = 'object'
+        schema['properties'] = collections.OrderedDict()
+        if 'parameterOrder' not in method_description:
+            ordered_parameters = list(method_description.get('parameters', []))
+        else:
+            ordered_parameters = method_description['parameterOrder'][:]
+            for k in method_description['parameters']:
+                if k not in ordered_parameters:
+                    ordered_parameters.append(k)
+        for parameter_name in ordered_parameters:
+            field_name = self.__names.CleanName(parameter_name)
+            field = dict(method_description['parameters'][parameter_name])
+            if 'type' not in field:
+                raise ValueError('No type found in parameter %s' % field)
+            schema['properties'][field_name] = field
+        if body_type is not None:
+            body_field_name = self.__GetRequestField(
+                method_description, body_type)
+            if body_field_name in schema['properties']:
+                raise ValueError('Failed to normalize request resource name')
+            if 'description' not in body_type:
+                body_type['description'] = (
+                    'A %s resource to be passed as the request body.' % (
+                        self.__GetRequestType(body_type),))
+            schema['properties'][body_field_name] = body_type
+        self.__message_registry.AddDescriptorFromSchema(schema['id'], schema)
+        return schema['id']
+
+    def __CreateVoidResponseType(self, method_description):
+        """Create an empty response type."""
+        schema = {}
+        method_name = self.__names.ClassName(
+            method_description['id'], separator='.')
+        schema['id'] = self.__names.ClassName('%sResponse' % method_name)
+        schema['type'] = 'object'
+        schema['description'] = 'An empty %s response.' % method_name
+        self.__message_registry.AddDescriptorFromSchema(schema['id'], schema)
+        return schema['id']
+
+    def __NeedRequestType(self, method_description, request_type):
+        """Determine if this method needs a new request type created."""
+        if not request_type:
+            return True
+        method_id = method_description.get('id', '')
+        if method_id in self.__unelidable_request_methods:
+            return True
+        message = self.__message_registry.LookupDescriptorOrDie(request_type)
+        if message is None:
+            return True
+        field_names = [x.name for x in message.fields]
+        parameters = method_description.get('parameters', {})
+        for param_name, param_info in parameters.items():
+            if (param_info.get('location') != 'path' or
+                    self.__names.CleanName(param_name) not in field_names):
+                break
+        else:
+            return False
+        return True
+
+    def __MaxSizeToInt(self, max_size):
+        """Convert max_size to an int."""
+        size_groups = re.match(r'(?P<size>\d+)(?P<unit>.B)?$', max_size)
+        if size_groups is None:
+            raise ValueError('Could not parse maxSize')
+        size, unit = size_groups.group('size', 'unit')
+        shift = 0
+        if unit is not None:
+            unit_dict = {'KB': 10, 'MB': 20, 'GB': 30, 'TB': 40}
+            shift = unit_dict.get(unit.upper())
+            if shift is None:
+                raise ValueError('Unknown unit %s' % unit)
+        return int(size) * (1 << shift)
+
+    def __ComputeUploadConfig(self, media_upload_config, method_id):
+        """Fill out the upload config for this method."""
+        config = base_api.ApiUploadInfo()
+        if 'maxSize' in media_upload_config:
+            config.max_size = self.__MaxSizeToInt(
+                media_upload_config['maxSize'])
+        if 'accept' not in media_upload_config:
+            logging.warn(
+                'No accept types found for upload configuration in '
+                'method %s, using */*', method_id)
+        config.accept.extend([
+            str(a) for a in media_upload_config.get('accept', '*/*')])
+
+        for accept_pattern in config.accept:
+            if not _MIME_PATTERN_RE.match(accept_pattern):
+                logging.warn('Unexpected MIME type: %s', accept_pattern)
+        protocols = media_upload_config.get('protocols', {})
+        for protocol in ('simple', 'resumable'):
+            media = protocols.get(protocol, {})
+            for attr in ('multipart', 'path'):
+                if attr in media:
+                    setattr(config, '%s_%s' % (protocol, attr), media[attr])
+        return config
+
+    def __ComputeMethodInfo(self, method_description, request, response,
+                            request_field):
+        """Compute the base_api.ApiMethodInfo for this method."""
+        relative_path = self.__names.NormalizeRelativePath(
+            ''.join((self.__client_info.base_path,
+                     method_description['path'])))
+        method_id = method_description['id']
+        ordered_params = []
+        for param_name in method_description.get('parameterOrder', []):
+            param_info = method_description['parameters'][param_name]
+            if param_info.get('required', False):
+                ordered_params.append(param_name)
+        method_info = base_api.ApiMethodInfo(
+            relative_path=relative_path,
+            method_id=method_id,
+            http_method=method_description['httpMethod'],
+            description=util.CleanDescription(
+                method_description.get('description', '')),
+            query_params=[],
+            path_params=[],
+            ordered_params=ordered_params,
+            request_type_name=self.__names.ClassName(request),
+            response_type_name=self.__names.ClassName(response),
+            request_field=request_field,
+        )
+        flat_path = method_description.get('flatPath', None)
+        if flat_path is not None:
+            flat_path = self.__names.NormalizeRelativePath(
+                self.__client_info.base_path + flat_path)
+            if flat_path != relative_path:
+                method_info.flat_path = flat_path
+        if method_description.get('supportsMediaUpload', False):
+            method_info.upload_config = self.__ComputeUploadConfig(
+                method_description.get('mediaUpload'), method_id)
+        method_info.supports_download = method_description.get(
+            'supportsMediaDownload', False)
+        self.__all_scopes.update(method_description.get('scopes', ()))
+        for param, desc in method_description.get('parameters', {}).items():
+            param = self.__names.CleanName(param)
+            location = desc['location']
+            if location == 'query':
+                method_info.query_params.append(param)
+            elif location == 'path':
+                method_info.path_params.append(param)
+            else:
+                raise ValueError(
+                    'Unknown parameter location %s for parameter %s' % (
+                        location, param))
+        method_info.path_params.sort()
+        method_info.query_params.sort()
+        return method_info
+
+    def __BodyFieldName(self, body_type):
+        if body_type is None:
+            return ''
+        return self.__names.FieldName(body_type['$ref'])
+
+    def __GetRequestType(self, body_type):
+        return self.__names.ClassName(body_type.get('$ref'))
+
+    def __GetRequestField(self, method_description, body_type):
+        """Determine the request field for this method."""
+        body_field_name = self.__BodyFieldName(body_type)
+        if body_field_name in method_description.get('parameters', {}):
+            body_field_name = self.__names.FieldName(
+                '%s_resource' % body_field_name)
+        # It's exceedingly unlikely that we'd get two name collisions, which
+        # means it's bound to happen at some point.
+        while body_field_name in method_description.get('parameters', {}):
+            body_field_name = self.__names.FieldName(
+                '%s_body' % body_field_name)
+        return body_field_name
+
+    def AddServiceFromResource(self, service_name, methods):
+        """Add a new service named service_name with the given methods."""
+        method_descriptions = methods.get('methods', {})
+        method_info_map = collections.OrderedDict()
+        items = sorted(method_descriptions.items())
+        for method_name, method_description in items:
+            method_name = self.__names.MethodName(method_name)
+
+            # NOTE: According to the discovery document, if the request or
+            # response is present, it will simply contain a `$ref`.
+            body_type = method_description.get('request')
+            if body_type is None:
+                request_type = None
+            else:
+                request_type = self.__GetRequestType(body_type)
+            if self.__NeedRequestType(method_description, request_type):
+                request = self.__CreateRequestType(
+                    method_description, body_type=body_type)
+                request_field = self.__GetRequestField(
+                    method_description, body_type)
+            else:
+                request = request_type
+                request_field = base_api.REQUEST_IS_BODY
+
+            if 'response' in method_description:
+                response = method_description['response']['$ref']
+            else:
+                response = self.__CreateVoidResponseType(method_description)
+
+            method_info_map[method_name] = self.__ComputeMethodInfo(
+                method_description, request, response, request_field)
+            self.__command_registry.AddCommandForMethod(
+                service_name, method_name, method_info_map[method_name],
+                request, response)
+
+        nested_services = methods.get('resources', {})
+        services = sorted(nested_services.items())
+        for subservice_name, submethods in services:
+            new_service_name = '%s_%s' % (service_name, subservice_name)
+            self.AddServiceFromResource(new_service_name, submethods)
+
+        self.__RegisterService(service_name, method_info_map)
diff --git a/apitools/gen/test_utils.py b/apitools/gen/test_utils.py
new file mode 100644
index 0000000..59eea51
--- /dev/null
+++ b/apitools/gen/test_utils.py
@@ -0,0 +1,58 @@
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Various utilities used in tests."""
+
+import contextlib
+import os
+import tempfile
+import shutil
+import sys
+
+import six
+import unittest2
+
+
+RunOnlyOnPython27 = unittest2.skipUnless(
+    sys.version_info[:2] == (2, 7), 'Only runs in Python 2.7')
+
+SkipOnWindows = unittest2.skipIf(
+    os.name == 'nt', 'Does not run on windows')
+
+
+@contextlib.contextmanager
+def TempDir(change_to=False):
+    if change_to:
+        original_dir = os.getcwd()
+    path = tempfile.mkdtemp()
+    try:
+        if change_to:
+            os.chdir(path)
+        yield path
+    finally:
+        if change_to:
+            os.chdir(original_dir)
+        shutil.rmtree(path)
+
+
+@contextlib.contextmanager
+def CaptureOutput():
+    new_stdout, new_stderr = six.StringIO(), six.StringIO()
+    old_stdout, old_stderr = sys.stdout, sys.stderr
+    try:
+        sys.stdout, sys.stderr = new_stdout, new_stderr
+        yield new_stdout, new_stderr
+    finally:
+        sys.stdout, sys.stderr = old_stdout, old_stderr
diff --git a/apitools/gen/testdata/dns/dns_v1.json b/apitools/gen/testdata/dns/dns_v1.json
new file mode 100644
index 0000000..77c1553
--- /dev/null
+++ b/apitools/gen/testdata/dns/dns_v1.json
@@ -0,0 +1,707 @@
+{
+ "kind": "discovery#restDescription",
+ "discoveryVersion": "v1",
+ "id": "dns:v1",
+ "name": "dns",
+ "version": "v1",
+ "revision": "20150807",
+ "title": "Google Cloud DNS API",
+ "description": "The Google Cloud DNS API provides services for configuring and serving authoritative DNS records.",
+ "ownerDomain": "google.com",
+ "ownerName": "Google",
+ "icons": {
+  "x16": "http://www.google.com/images/icons/product/search-16.gif",
+  "x32": "http://www.google.com/images/icons/product/search-32.gif"
+ },
+ "documentationLink": "https://developers.google.com/cloud-dns",
+ "protocol": "rest",
+ "baseUrl": "https://www.googleapis.com/dns/v1/projects/",
+ "basePath": "/dns/v1/projects/",
+ "rootUrl": "https://www.googleapis.com/",
+ "servicePath": "dns/v1/projects/",
+ "batchPath": "batch",
+ "parameters": {
+  "alt": {
+   "type": "string",
+   "description": "Data format for the response.",
+   "default": "json",
+   "enum": [
+    "json"
+   ],
+   "enumDescriptions": [
+    "Responses with Content-Type of application/json"
+   ],
+   "location": "query"
+  },
+  "fields": {
+   "type": "string",
+   "description": "Selector specifying which fields to include in a partial response.",
+   "location": "query"
+  },
+  "key": {
+   "type": "string",
+   "description": "API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.",
+   "location": "query"
+  },
+  "oauth_token": {
+   "type": "string",
+   "description": "OAuth 2.0 token for the current user.",
+   "location": "query"
+  },
+  "prettyPrint": {
+   "type": "boolean",
+   "description": "Returns response with indentations and line breaks.",
+   "default": "true",
+   "location": "query"
+  },
+  "quotaUser": {
+   "type": "string",
+   "description": "Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.",
+   "location": "query"
+  },
+  "userIp": {
+   "type": "string",
+   "description": "IP address of the site where the request originates. Use this if you want to enforce per-user limits.",
+   "location": "query"
+  }
+ },
+ "auth": {
+  "oauth2": {
+   "scopes": {
+    "https://www.googleapis.com/auth/cloud-platform": {
+     "description": "View and manage your data across Google Cloud Platform services"
+    },
+    "https://www.googleapis.com/auth/cloud-platform.read-only": {
+     "description": "MESSAGE UNDER CONSTRUCTION View your data across Google Cloud Platform services"
+    },
+    "https://www.googleapis.com/auth/ndev.clouddns.readonly": {
+     "description": "View your DNS records hosted by Google Cloud DNS"
+    },
+    "https://www.googleapis.com/auth/ndev.clouddns.readwrite": {
+     "description": "View and manage your DNS records hosted by Google Cloud DNS"
+    }
+   }
+  }
+ },
+ "schemas": {
+  "Change": {
+   "id": "Change",
+   "type": "object",
+   "description": "An atomic update to a collection of ResourceRecordSets.",
+   "properties": {
+    "additions": {
+     "type": "array",
+     "description": "Which ResourceRecordSets to add?",
+     "items": {
+      "$ref": "ResourceRecordSet"
+     }
+    },
+    "deletions": {
+     "type": "array",
+     "description": "Which ResourceRecordSets to remove? Must match existing data exactly.",
+     "items": {
+      "$ref": "ResourceRecordSet"
+     }
+    },
+    "id": {
+     "type": "string",
+     "description": "Unique identifier for the resource; defined by the server (output only)."
+    },
+    "kind": {
+     "type": "string",
+     "description": "Identifies what kind of resource this is. Value: the fixed string \"dns#change\".",
+     "default": "dns#change"
+    },
+    "startTime": {
+     "type": "string",
+     "description": "The time that this operation was started by the server. This is in RFC3339 text format."
+    },
+    "status": {
+     "type": "string",
+     "description": "Status of the operation (output only).",
+     "enum": [
+      "done",
+      "pending"
+     ],
+     "enumDescriptions": [
+      "",
+      ""
+     ]
+    }
+   }
+  },
+  "ChangesListResponse": {
+   "id": "ChangesListResponse",
+   "type": "object",
+   "description": "The response to a request to enumerate Changes to a ResourceRecordSets collection.",
+   "properties": {
+    "changes": {
+     "type": "array",
+     "description": "The requested changes.",
+     "items": {
+      "$ref": "Change"
+     }
+    },
+    "kind": {
+     "type": "string",
+     "description": "Type of resource.",
+     "default": "dns#changesListResponse"
+    },
+    "nextPageToken": {
+     "type": "string",
+     "description": "The presence of this field indicates that there exist more results following your last page of results in pagination order. To fetch them, make another list request using this value as your pagination token.\n\nIn this way you can retrieve the complete contents of even very large collections one page at a time. However, if the contents of the collection change between the first and last paginated list request, the set of all elements returned will be an inconsistent view of the collection. There is no way to retrieve a \"snapshot\" of collections larger than the maximum page size."
+    }
+   }
+  },
+  "ManagedZone": {
+   "id": "ManagedZone",
+   "type": "object",
+   "description": "A zone is a subtree of the DNS namespace under one administrative responsibility. A ManagedZone is a resource that represents a DNS zone hosted by the Cloud DNS service.",
+   "properties": {
+    "creationTime": {
+     "type": "string",
+     "description": "The time that this resource was created on the server. This is in RFC3339 text format. Output only."
+    },
+    "description": {
+     "type": "string",
+     "description": "A mutable string of at most 1024 characters associated with this resource for the user's convenience. Has no effect on the managed zone's function."
+    },
+    "dnsName": {
+     "type": "string",
+     "description": "The DNS name of this managed zone, for instance \"example.com.\"."
+    },
+    "id": {
+     "type": "string",
+     "description": "Unique identifier for the resource; defined by the server (output only)",
+     "format": "uint64"
+    },
+    "kind": {
+     "type": "string",
+     "description": "Identifies what kind of resource this is. Value: the fixed string \"dns#managedZone\".",
+     "default": "dns#managedZone"
+    },
+    "name": {
+     "type": "string",
+     "description": "User assigned name for this resource. Must be unique within the project. The name must be 1-32 characters long, must begin with a letter, end with a letter or digit, and only contain lowercase letters, digits or dashes."
+    },
+    "nameServerSet": {
+     "type": "string",
+     "description": "Optionally specifies the NameServerSet for this ManagedZone. A NameServerSet is a set of DNS name servers that all host the same ManagedZones. Most users will leave this field unset."
+    },
+    "nameServers": {
+     "type": "array",
+     "description": "Delegate your managed_zone to these virtual name servers; defined by the server (output only)",
+     "items": {
+      "type": "string"
+     }
+    }
+   }
+  },
+  "ManagedZonesListResponse": {
+   "id": "ManagedZonesListResponse",
+   "type": "object",
+   "properties": {
+    "kind": {
+     "type": "string",
+     "description": "Type of resource.",
+     "default": "dns#managedZonesListResponse"
+    },
+    "managedZones": {
+     "type": "array",
+     "description": "The managed zone resources.",
+     "items": {
+      "$ref": "ManagedZone"
+     }
+    },
+    "nextPageToken": {
+     "type": "string",
+     "description": "The presence of this field indicates that there exist more results following your last page of results in pagination order. To fetch them, make another list request using this value as your page token.\n\nIn this way you can retrieve the complete contents of even very large collections one page at a time. However, if the contents of the collection change between the first and last paginated list request, the set of all elements returned will be an inconsistent view of the collection. There is no way to retrieve a consistent snapshot of a collection larger than the maximum page size."
+    }
+   }
+  },
+  "Project": {
+   "id": "Project",
+   "type": "object",
+   "description": "A project resource. The project is a top level container for resources including Cloud DNS ManagedZones. Projects can be created only in the APIs console.",
+   "properties": {
+    "id": {
+     "type": "string",
+     "description": "User assigned unique identifier for the resource (output only)."
+    },
+    "kind": {
+     "type": "string",
+     "description": "Identifies what kind of resource this is. Value: the fixed string \"dns#project\".",
+     "default": "dns#project"
+    },
+    "number": {
+     "type": "string",
+     "description": "Unique numeric identifier for the resource; defined by the server (output only).",
+     "format": "uint64"
+    },
+    "quota": {
+     "$ref": "Quota",
+     "description": "Quotas assigned to this project (output only)."
+    }
+   }
+  },
+  "Quota": {
+   "id": "Quota",
+   "type": "object",
+   "description": "Limits associated with a Project.",
+   "properties": {
+    "kind": {
+     "type": "string",
+     "description": "Identifies what kind of resource this is. Value: the fixed string \"dns#quota\".",
+     "default": "dns#quota"
+    },
+    "managedZones": {
+     "type": "integer",
+     "description": "Maximum allowed number of managed zones in the project.",
+     "format": "int32"
+    },
+    "resourceRecordsPerRrset": {
+     "type": "integer",
+     "description": "Maximum allowed number of ResourceRecords per ResourceRecordSet.",
+     "format": "int32"
+    },
+    "rrsetAdditionsPerChange": {
+     "type": "integer",
+     "description": "Maximum allowed number of ResourceRecordSets to add per ChangesCreateRequest.",
+     "format": "int32"
+    },
+    "rrsetDeletionsPerChange": {
+     "type": "integer",
+     "description": "Maximum allowed number of ResourceRecordSets to delete per ChangesCreateRequest.",
+     "format": "int32"
+    },
+    "rrsetsPerManagedZone": {
+     "type": "integer",
+     "description": "Maximum allowed number of ResourceRecordSets per zone in the project.",
+     "format": "int32"
+    },
+    "totalRrdataSizePerChange": {
+     "type": "integer",
+     "description": "Maximum allowed size for total rrdata in one ChangesCreateRequest in bytes.",
+     "format": "int32"
+    }
+   }
+  },
+  "ResourceRecordSet": {
+   "id": "ResourceRecordSet",
+   "type": "object",
+   "description": "A unit of data that will be returned by the DNS servers.",
+   "properties": {
+    "kind": {
+     "type": "string",
+     "description": "Identifies what kind of resource this is. Value: the fixed string \"dns#resourceRecordSet\".",
+     "default": "dns#resourceRecordSet"
+    },
+    "name": {
+     "type": "string",
+     "description": "For example, www.example.com."
+    },
+    "rrdatas": {
+     "type": "array",
+     "description": "As defined in RFC 1035 (section 5) and RFC 1034 (section 3.6.1).",
+     "items": {
+      "type": "string"
+     }
+    },
+    "ttl": {
+     "type": "integer",
+     "description": "Number of seconds that this ResourceRecordSet can be cached by resolvers.",
+     "format": "int32"
+    },
+    "type": {
+     "type": "string",
+     "description": "The identifier of a supported record type, for example, A, AAAA, MX, TXT, and so on."
+    }
+   }
+  },
+  "ResourceRecordSetsListResponse": {
+   "id": "ResourceRecordSetsListResponse",
+   "type": "object",
+   "properties": {
+    "kind": {
+     "type": "string",
+     "description": "Type of resource.",
+     "default": "dns#resourceRecordSetsListResponse"
+    },
+    "nextPageToken": {
+     "type": "string",
+     "description": "The presence of this field indicates that there exist more results following your last page of results in pagination order. To fetch them, make another list request using this value as your pagination token.\n\nIn this way you can retrieve the complete contents of even very large collections one page at a time. However, if the contents of the collection change between the first and last paginated list request, the set of all elements returned will be an inconsistent view of the collection. There is no way to retrieve a consistent snapshot of a collection larger than the maximum page size."
+    },
+    "rrsets": {
+     "type": "array",
+     "description": "The resource record set resources.",
+     "items": {
+      "$ref": "ResourceRecordSet"
+     }
+    }
+   }
+  }
+ },
+ "resources": {
+  "changes": {
+   "methods": {
+    "create": {
+     "id": "dns.changes.create",
+     "path": "{project}/managedZones/{managedZone}/changes",
+     "httpMethod": "POST",
+     "description": "Atomically update the ResourceRecordSet collection.",
+     "parameters": {
+      "managedZone": {
+       "type": "string",
+       "description": "Identifies the managed zone addressed by this request. Can be the managed zone name or id.",
+       "required": true,
+       "location": "path"
+      },
+      "project": {
+       "type": "string",
+       "description": "Identifies the project addressed by this request.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "project",
+      "managedZone"
+     ],
+     "request": {
+      "$ref": "Change"
+     },
+     "response": {
+      "$ref": "Change"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/ndev.clouddns.readwrite"
+     ]
+    },
+    "get": {
+     "id": "dns.changes.get",
+     "path": "{project}/managedZones/{managedZone}/changes/{changeId}",
+     "httpMethod": "GET",
+     "description": "Fetch the representation of an existing Change.",
+     "parameters": {
+      "changeId": {
+       "type": "string",
+       "description": "The identifier of the requested change, from a previous ResourceRecordSetsChangeResponse.",
+       "required": true,
+       "location": "path"
+      },
+      "managedZone": {
+       "type": "string",
+       "description": "Identifies the managed zone addressed by this request. Can be the managed zone name or id.",
+       "required": true,
+       "location": "path"
+      },
+      "project": {
+       "type": "string",
+       "description": "Identifies the project addressed by this request.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "project",
+      "managedZone",
+      "changeId"
+     ],
+     "response": {
+      "$ref": "Change"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only",
+      "https://www.googleapis.com/auth/ndev.clouddns.readonly",
+      "https://www.googleapis.com/auth/ndev.clouddns.readwrite"
+     ]
+    },
+    "list": {
+     "id": "dns.changes.list",
+     "path": "{project}/managedZones/{managedZone}/changes",
+     "httpMethod": "GET",
+     "description": "Enumerate Changes to a ResourceRecordSet collection.",
+     "parameters": {
+      "managedZone": {
+       "type": "string",
+       "description": "Identifies the managed zone addressed by this request. Can be the managed zone name or id.",
+       "required": true,
+       "location": "path"
+      },
+      "maxResults": {
+       "type": "integer",
+       "description": "Optional. Maximum number of results to be returned. If unspecified, the server will decide how many results to return.",
+       "format": "int32",
+       "location": "query"
+      },
+      "pageToken": {
+       "type": "string",
+       "description": "Optional. A tag returned by a previous list request that was truncated. Use this parameter to continue a previous list request.",
+       "location": "query"
+      },
+      "project": {
+       "type": "string",
+       "description": "Identifies the project addressed by this request.",
+       "required": true,
+       "location": "path"
+      },
+      "sortBy": {
+       "type": "string",
+       "description": "Sorting criterion. The only supported value is change sequence.",
+       "default": "changeSequence",
+       "enum": [
+        "changeSequence"
+       ],
+       "enumDescriptions": [
+        ""
+       ],
+       "location": "query"
+      },
+      "sortOrder": {
+       "type": "string",
+       "description": "Sorting order direction: 'ascending' or 'descending'.",
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "project",
+      "managedZone"
+     ],
+     "response": {
+      "$ref": "ChangesListResponse"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only",
+      "https://www.googleapis.com/auth/ndev.clouddns.readonly",
+      "https://www.googleapis.com/auth/ndev.clouddns.readwrite"
+     ]
+    }
+   }
+  },
+  "managedZones": {
+   "methods": {
+    "create": {
+     "id": "dns.managedZones.create",
+     "path": "{project}/managedZones",
+     "httpMethod": "POST",
+     "description": "Create a new ManagedZone.",
+     "parameters": {
+      "project": {
+       "type": "string",
+       "description": "Identifies the project addressed by this request.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "project"
+     ],
+     "request": {
+      "$ref": "ManagedZone"
+     },
+     "response": {
+      "$ref": "ManagedZone"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/ndev.clouddns.readwrite"
+     ]
+    },
+    "delete": {
+     "id": "dns.managedZones.delete",
+     "path": "{project}/managedZones/{managedZone}",
+     "httpMethod": "DELETE",
+     "description": "Delete a previously created ManagedZone.",
+     "parameters": {
+      "managedZone": {
+       "type": "string",
+       "description": "Identifies the managed zone addressed by this request. Can be the managed zone name or id.",
+       "required": true,
+       "location": "path"
+      },
+      "project": {
+       "type": "string",
+       "description": "Identifies the project addressed by this request.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "project",
+      "managedZone"
+     ],
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/ndev.clouddns.readwrite"
+     ]
+    },
+    "get": {
+     "id": "dns.managedZones.get",
+     "path": "{project}/managedZones/{managedZone}",
+     "httpMethod": "GET",
+     "description": "Fetch the representation of an existing ManagedZone.",
+     "parameters": {
+      "managedZone": {
+       "type": "string",
+       "description": "Identifies the managed zone addressed by this request. Can be the managed zone name or id.",
+       "required": true,
+       "location": "path"
+      },
+      "project": {
+       "type": "string",
+       "description": "Identifies the project addressed by this request.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "project",
+      "managedZone"
+     ],
+     "response": {
+      "$ref": "ManagedZone"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only",
+      "https://www.googleapis.com/auth/ndev.clouddns.readonly",
+      "https://www.googleapis.com/auth/ndev.clouddns.readwrite"
+     ]
+    },
+    "list": {
+     "id": "dns.managedZones.list",
+     "path": "{project}/managedZones",
+     "httpMethod": "GET",
+     "description": "Enumerate ManagedZones that have been created but not yet deleted.",
+     "parameters": {
+      "dnsName": {
+       "type": "string",
+       "description": "Restricts the list to return only zones with this domain name.",
+       "location": "query"
+      },
+      "maxResults": {
+       "type": "integer",
+       "description": "Optional. Maximum number of results to be returned. If unspecified, the server will decide how many results to return.",
+       "format": "int32",
+       "location": "query"
+      },
+      "pageToken": {
+       "type": "string",
+       "description": "Optional. A tag returned by a previous list request that was truncated. Use this parameter to continue a previous list request.",
+       "location": "query"
+      },
+      "project": {
+       "type": "string",
+       "description": "Identifies the project addressed by this request.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "project"
+     ],
+     "response": {
+      "$ref": "ManagedZonesListResponse"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only",
+      "https://www.googleapis.com/auth/ndev.clouddns.readonly",
+      "https://www.googleapis.com/auth/ndev.clouddns.readwrite"
+     ]
+    }
+   }
+  },
+  "projects": {
+   "methods": {
+    "get": {
+     "id": "dns.projects.get",
+     "path": "{project}",
+     "httpMethod": "GET",
+     "description": "Fetch the representation of an existing Project.",
+     "parameters": {
+      "project": {
+       "type": "string",
+       "description": "Identifies the project addressed by this request.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "project"
+     ],
+     "response": {
+      "$ref": "Project"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only",
+      "https://www.googleapis.com/auth/ndev.clouddns.readonly",
+      "https://www.googleapis.com/auth/ndev.clouddns.readwrite"
+     ]
+    }
+   }
+  },
+  "resourceRecordSets": {
+   "methods": {
+    "list": {
+     "id": "dns.resourceRecordSets.list",
+     "path": "{project}/managedZones/{managedZone}/rrsets",
+     "httpMethod": "GET",
+     "description": "Enumerate ResourceRecordSets that have been created but not yet deleted.",
+     "parameters": {
+      "managedZone": {
+       "type": "string",
+       "description": "Identifies the managed zone addressed by this request. Can be the managed zone name or id.",
+       "required": true,
+       "location": "path"
+      },
+      "maxResults": {
+       "type": "integer",
+       "description": "Optional. Maximum number of results to be returned. If unspecified, the server will decide how many results to return.",
+       "format": "int32",
+       "location": "query"
+      },
+      "name": {
+       "type": "string",
+       "description": "Restricts the list to return only records with this fully qualified domain name.",
+       "location": "query"
+      },
+      "pageToken": {
+       "type": "string",
+       "description": "Optional. A tag returned by a previous list request that was truncated. Use this parameter to continue a previous list request.",
+       "location": "query"
+      },
+      "project": {
+       "type": "string",
+       "description": "Identifies the project addressed by this request.",
+       "required": true,
+       "location": "path"
+      },
+      "type": {
+       "type": "string",
+       "description": "Restricts the list to return only records of this type. If present, the \"name\" parameter must also be present.",
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "project",
+      "managedZone"
+     ],
+     "response": {
+      "$ref": "ResourceRecordSetsListResponse"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only",
+      "https://www.googleapis.com/auth/ndev.clouddns.readonly",
+      "https://www.googleapis.com/auth/ndev.clouddns.readwrite"
+     ]
+    }
+   }
+  }
+ }
+}
diff --git a/apitools/gen/util.py b/apitools/gen/util.py
new file mode 100644
index 0000000..146b452
--- /dev/null
+++ b/apitools/gen/util.py
@@ -0,0 +1,348 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Assorted utilities shared between parts of apitools."""
+from __future__ import print_function
+
+import collections
+import contextlib
+import json
+import keyword
+import logging
+import os
+import re
+
+import six
+from six.moves import urllib_parse
+import six.moves.urllib.error as urllib_error
+import six.moves.urllib.request as urllib_request
+
+
+class Error(Exception):
+
+    """Base error for apitools generation."""
+
+
+class CommunicationError(Error):
+
+    """Error in network communication."""
+
+
+def _SortLengthFirstKey(a):
+    return -len(a), a
+
+
+class Names(object):
+
+    """Utility class for cleaning and normalizing names in a fixed style."""
+    DEFAULT_NAME_CONVENTION = 'LOWER_CAMEL'
+    NAME_CONVENTIONS = ['LOWER_CAMEL', 'LOWER_WITH_UNDER', 'NONE']
+
+    def __init__(self, strip_prefixes,
+                 name_convention=None,
+                 capitalize_enums=False):
+        self.__strip_prefixes = sorted(strip_prefixes, key=_SortLengthFirstKey)
+        self.__name_convention = (
+            name_convention or self.DEFAULT_NAME_CONVENTION)
+        self.__capitalize_enums = capitalize_enums
+
+    @staticmethod
+    def __FromCamel(name, separator='_'):
+        name = re.sub(r'([a-z0-9])([A-Z])', r'\1%s\2' % separator, name)
+        return name.lower()
+
+    @staticmethod
+    def __ToCamel(name, separator='_'):
+        # TODO(craigcitro): Consider what to do about leading or trailing
+        # underscores (such as `_refValue` in discovery).
+        return ''.join(s[0:1].upper() + s[1:] for s in name.split(separator))
+
+    @staticmethod
+    def __ToLowerCamel(name, separator='_'):
+        name = Names.__ToCamel(name, separator=separator)
+        return name[0].lower() + name[1:]
+
+    def __StripName(self, name):
+        """Strip strip_prefix entries from name."""
+        if not name:
+            return name
+        for prefix in self.__strip_prefixes:
+            if name.startswith(prefix):
+                return name[len(prefix):]
+        return name
+
+    @staticmethod
+    def CleanName(name):
+        """Perform generic name cleaning."""
+        name = re.sub('[^_A-Za-z0-9]', '_', name)
+        if name[0].isdigit():
+            name = '_%s' % name
+        while keyword.iskeyword(name):
+            name = '%s_' % name
+        # If we end up with __ as a prefix, we'll run afoul of python
+        # field renaming, so we manually correct for it.
+        if name.startswith('__'):
+            name = 'f%s' % name
+        return name
+
+    @staticmethod
+    def NormalizeRelativePath(path):
+        """Normalize camelCase entries in path."""
+        path_components = path.split('/')
+        normalized_components = []
+        for component in path_components:
+            if re.match(r'{[A-Za-z0-9_]+}$', component):
+                normalized_components.append(
+                    '{%s}' % Names.CleanName(component[1:-1]))
+            else:
+                normalized_components.append(component)
+        return '/'.join(normalized_components)
+
+    def NormalizeEnumName(self, enum_name):
+        if self.__capitalize_enums:
+            enum_name = enum_name.upper()
+        return self.CleanName(enum_name)
+
+    def ClassName(self, name, separator='_'):
+        """Generate a valid class name from name."""
+        # TODO(craigcitro): Get rid of this case here and in MethodName.
+        if name is None:
+            return name
+        # TODO(craigcitro): This is a hack to handle the case of specific
+        # protorpc class names; clean this up.
+        if name.startswith(('protorpc.', 'message_types.',
+                            'apitools.base.protorpclite.',
+                            'apitools.base.protorpclite.message_types.')):
+            return name
+        name = self.__StripName(name)
+        name = self.__ToCamel(name, separator=separator)
+        return self.CleanName(name)
+
+    def MethodName(self, name, separator='_'):
+        """Generate a valid method name from name."""
+        if name is None:
+            return None
+        name = Names.__ToCamel(name, separator=separator)
+        return Names.CleanName(name)
+
+    def FieldName(self, name):
+        """Generate a valid field name from name."""
+        # TODO(craigcitro): We shouldn't need to strip this name, but some
+        # of the service names here are excessive. Fix the API and then
+        # remove this.
+        name = self.__StripName(name)
+        if self.__name_convention == 'LOWER_CAMEL':
+            name = Names.__ToLowerCamel(name)
+        elif self.__name_convention == 'LOWER_WITH_UNDER':
+            name = Names.__FromCamel(name)
+        return Names.CleanName(name)
+
+
+@contextlib.contextmanager
+def Chdir(dirname, create=True):
+    if not os.path.exists(dirname):
+        if not create:
+            raise OSError('Cannot find directory %s' % dirname)
+        else:
+            os.mkdir(dirname)
+    previous_directory = os.getcwd()
+    try:
+        os.chdir(dirname)
+        yield
+    finally:
+        os.chdir(previous_directory)
+
+
+def NormalizeVersion(version):
+    # Currently, '.' is the only character that might cause us trouble.
+    return version.replace('.', '_')
+
+
+def _ComputePaths(package, version, discovery_doc):
+    full_path = urllib_parse.urljoin(
+        discovery_doc['rootUrl'], discovery_doc['servicePath'])
+    api_path_component = '/'.join((package, version, ''))
+    if api_path_component not in full_path:
+        return full_path, ''
+    prefix, _, suffix = full_path.rpartition(api_path_component)
+    return prefix + api_path_component, suffix
+
+
+class ClientInfo(collections.namedtuple('ClientInfo', (
+        'package', 'scopes', 'version', 'client_id', 'client_secret',
+        'user_agent', 'client_class_name', 'url_version', 'api_key',
+        'base_url', 'base_path'))):
+
+    """Container for client-related info and names."""
+
+    @classmethod
+    def Create(cls, discovery_doc,
+               scope_ls, client_id, client_secret, user_agent, names, api_key):
+        """Create a new ClientInfo object from a discovery document."""
+        scopes = set(
+            discovery_doc.get('auth', {}).get('oauth2', {}).get('scopes', {}))
+        scopes.update(scope_ls)
+        package = discovery_doc['name']
+        url_version = discovery_doc['version']
+        base_url, base_path = _ComputePaths(package, url_version,
+                                            discovery_doc)
+
+        client_info = {
+            'package': package,
+            'version': NormalizeVersion(discovery_doc['version']),
+            'url_version': url_version,
+            'scopes': sorted(list(scopes)),
+            'client_id': client_id,
+            'client_secret': client_secret,
+            'user_agent': user_agent,
+            'api_key': api_key,
+            'base_url': base_url,
+            'base_path': base_path,
+        }
+        client_class_name = '%s%s' % (
+            names.ClassName(client_info['package']),
+            names.ClassName(client_info['version']))
+        client_info['client_class_name'] = client_class_name
+        return cls(**client_info)
+
+    @property
+    def default_directory(self):
+        return self.package
+
+    @property
+    def cli_rule_name(self):
+        return '%s_%s' % (self.package, self.version)
+
+    @property
+    def cli_file_name(self):
+        return '%s.py' % self.cli_rule_name
+
+    @property
+    def client_rule_name(self):
+        return '%s_%s_client' % (self.package, self.version)
+
+    @property
+    def client_file_name(self):
+        return '%s.py' % self.client_rule_name
+
+    @property
+    def messages_rule_name(self):
+        return '%s_%s_messages' % (self.package, self.version)
+
+    @property
+    def services_rule_name(self):
+        return '%s_%s_services' % (self.package, self.version)
+
+    @property
+    def messages_file_name(self):
+        return '%s.py' % self.messages_rule_name
+
+    @property
+    def messages_proto_file_name(self):
+        return '%s.proto' % self.messages_rule_name
+
+    @property
+    def services_proto_file_name(self):
+        return '%s.proto' % self.services_rule_name
+
+
+def CleanDescription(description):
+    """Return a version of description safe for printing in a docstring."""
+    if not isinstance(description, six.string_types):
+        return description
+    return description.replace('"""', '" " "')
+
+
+class SimplePrettyPrinter(object):
+
+    """Simple pretty-printer that supports an indent contextmanager."""
+
+    def __init__(self, out):
+        self.__out = out
+        self.__indent = ''
+        self.__skip = False
+        self.__comment_context = False
+
+    @property
+    def indent(self):
+        return self.__indent
+
+    def CalculateWidth(self, max_width=78):
+        return max_width - len(self.indent)
+
+    @contextlib.contextmanager
+    def Indent(self, indent='  '):
+        previous_indent = self.__indent
+        self.__indent = '%s%s' % (previous_indent, indent)
+        yield
+        self.__indent = previous_indent
+
+    @contextlib.contextmanager
+    def CommentContext(self):
+        """Print without any argument formatting."""
+        old_context = self.__comment_context
+        self.__comment_context = True
+        yield
+        self.__comment_context = old_context
+
+    def __call__(self, *args):
+        if self.__comment_context and args[1:]:
+            raise Error('Cannot do string interpolation in comment context')
+        if args and args[0]:
+            if not self.__comment_context:
+                line = (args[0] % args[1:]).rstrip()
+            else:
+                line = args[0].rstrip()
+            line = line.encode('ascii', 'backslashreplace')
+            print('%s%s' % (self.__indent, line), file=self.__out)
+        else:
+            print('', file=self.__out)
+
+
+def _NormalizeDiscoveryUrls(discovery_url):
+    """Expands a few abbreviations into full discovery urls."""
+    if discovery_url.startswith('http'):
+        return [discovery_url]
+    elif '.' not in discovery_url:
+        raise ValueError('Unrecognized value "%s" for discovery url')
+    api_name, _, api_version = discovery_url.partition('.')
+    return [
+        'https://www.googleapis.com/discovery/v1/apis/%s/%s/rest' % (
+            api_name, api_version),
+        'https://%s.googleapis.com/$discovery/rest?version=%s' % (
+            api_name, api_version),
+    ]
+
+
+def FetchDiscoveryDoc(discovery_url, retries=5):
+    """Fetch the discovery document at the given url."""
+    discovery_urls = _NormalizeDiscoveryUrls(discovery_url)
+    discovery_doc = None
+    last_exception = None
+    for url in discovery_urls:
+        for _ in range(retries):
+            try:
+                discovery_doc = json.loads(urllib_request.urlopen(url).read())
+                break
+            except (urllib_error.HTTPError, urllib_error.URLError) as e:
+                logging.info(
+                    'Attempting to fetch discovery doc again after "%s"', e)
+                last_exception = e
+    if discovery_doc is None:
+        raise CommunicationError(
+            'Could not find discovery doc at any of %s: %s' % (
+                discovery_urls, last_exception))
+    return discovery_doc
diff --git a/apitools/gen/util_test.py b/apitools/gen/util_test.py
new file mode 100644
index 0000000..7cb0739
--- /dev/null
+++ b/apitools/gen/util_test.py
@@ -0,0 +1,39 @@
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for util."""
+import unittest2
+
+from apitools.gen import util
+
+
+class NormalizeVersionTest(unittest2.TestCase):
+
+    def testVersions(self):
+        already_valid = 'v1'
+        self.assertEqual(already_valid, util.NormalizeVersion(already_valid))
+        to_clean = 'v0.1'
+        self.assertEqual('v0_1', util.NormalizeVersion(to_clean))
+
+
+class NamesTest(unittest2.TestCase):
+
+    def testKeywords(self):
+        names = util.Names([''])
+        self.assertEqual('in_', names.CleanName('in'))
+
+    def testNormalizeEnumName(self):
+        names = util.Names([''])
+        self.assertEqual('_0', names.NormalizeEnumName('0'))
diff --git a/apitools/scripts/__init__.py b/apitools/scripts/__init__.py
new file mode 100644
index 0000000..463cb42
--- /dev/null
+++ b/apitools/scripts/__init__.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Shared __init__.py for apitools."""
+
+from pkgutil import extend_path
+__path__ = extend_path(__path__, __name__)
diff --git a/apitools/scripts/oauth2l.py b/apitools/scripts/oauth2l.py
new file mode 100644
index 0000000..cddba0a
--- /dev/null
+++ b/apitools/scripts/oauth2l.py
@@ -0,0 +1,339 @@
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Command-line utility for fetching/inspecting credentials.
+
+oauth2l (pronounced "oauthtool") is a small utility for fetching
+credentials, or inspecting existing credentials. Here we demonstrate
+some sample use:
+
+    $ oauth2l fetch userinfo.email bigquery compute
+    Fetched credentials of type:
+      oauth2client.client.OAuth2Credentials
+    Access token:
+      ya29.abcdefghijklmnopqrstuvwxyz123yessirree
+    $ oauth2l header userinfo.email
+    Authorization: Bearer ya29.zyxwvutsrqpnmolkjihgfedcba
+    $ oauth2l validate thisisnotatoken
+    <exit status: 1>
+    $ oauth2l validate ya29.zyxwvutsrqpnmolkjihgfedcba
+    $ oauth2l scopes ya29.abcdefghijklmnopqrstuvwxyz123yessirree
+    https://www.googleapis.com/auth/bigquery
+    https://www.googleapis.com/auth/compute
+    https://www.googleapis.com/auth/userinfo.email
+
+The `header` command is designed to be easy to use with `curl`:
+
+    $ curl -H "$(oauth2l header bigquery)" \\
+      'https://www.googleapis.com/bigquery/v2/projects'
+    ... lists all projects ...
+
+The token can also be printed in other formats, for easy chaining
+into other programs:
+
+    $ oauth2l fetch -f json_compact userinfo.email
+    <one-line JSON object with credential information>
+    $ oauth2l fetch -f bare drive
+    ya29.suchT0kenManyCredentialsW0Wokyougetthepoint
+
+"""
+
+from __future__ import print_function
+
+import argparse
+import json
+import logging
+import os
+import pkgutil
+import sys
+import textwrap
+
+import oauth2client.client
+from six.moves import http_client
+
+import apitools.base.py as apitools_base
+
+# We could use a generated client here, but it's used for precisely
+# one URL, with one parameter and no worries about URL encoding. Let's
+# go with simple.
+_OAUTH2_TOKENINFO_TEMPLATE = (
+    'https://www.googleapis.com/oauth2/v2/tokeninfo'
+    '?access_token={access_token}'
+)
+
+
+def GetDefaultClientInfo():
+    client_secrets_json = pkgutil.get_data(
+        'apitools.data', 'apitools_client_secrets.json').decode('utf8')
+    client_secrets = json.loads(client_secrets_json)['installed']
+    return {
+        'client_id': client_secrets['client_id'],
+        'client_secret': client_secrets['client_secret'],
+        'user_agent': 'apitools/0.2 oauth2l/0.1',
+    }
+
+
+def GetClientInfoFromFlags(client_secrets):
+    """Fetch client info from args."""
+    if client_secrets:
+        client_secrets_path = os.path.expanduser(client_secrets)
+        if not os.path.exists(client_secrets_path):
+            raise ValueError(
+                'Cannot find file: {0}'.format(client_secrets))
+        with open(client_secrets_path) as client_secrets_file:
+            client_secrets = json.load(client_secrets_file)
+        if 'installed' not in client_secrets:
+            raise ValueError('Provided client ID must be for an installed app')
+        client_secrets = client_secrets['installed']
+        return {
+            'client_id': client_secrets['client_id'],
+            'client_secret': client_secrets['client_secret'],
+            'user_agent': 'apitools/0.2 oauth2l/0.1',
+        }
+    else:
+        return GetDefaultClientInfo()
+
+
+def _ExpandScopes(scopes):
+    scope_prefix = 'https://www.googleapis.com/auth/'
+    return [s if s.startswith('https://') else scope_prefix + s
+            for s in scopes]
+
+
+def _PrettyJson(data):
+    return json.dumps(data, sort_keys=True, indent=4, separators=(',', ': '))
+
+
+def _CompactJson(data):
+    return json.dumps(data, sort_keys=True, separators=(',', ':'))
+
+
+def _AsText(text_or_bytes):
+    if isinstance(text_or_bytes, bytes):
+        return text_or_bytes.decode('utf8')
+    return text_or_bytes
+
+
+def _Format(fmt, credentials):
+    """Format credentials according to fmt."""
+    if fmt == 'bare':
+        return credentials.access_token
+    elif fmt == 'header':
+        return 'Authorization: Bearer %s' % credentials.access_token
+    elif fmt == 'json':
+        return _PrettyJson(json.loads(_AsText(credentials.to_json())))
+    elif fmt == 'json_compact':
+        return _CompactJson(json.loads(_AsText(credentials.to_json())))
+    elif fmt == 'pretty':
+        format_str = textwrap.dedent('\n'.join([
+            'Fetched credentials of type:',
+            '  {credentials_type.__module__}.{credentials_type.__name__}',
+            'Access token:',
+            '  {credentials.access_token}',
+        ]))
+        return format_str.format(credentials=credentials,
+                                 credentials_type=type(credentials))
+    raise ValueError('Unknown format: {0}'.format(fmt))
+
+_FORMATS = set(('bare', 'header', 'json', 'json_compact', 'pretty'))
+
+
+def _GetTokenScopes(access_token):
+    """Return the list of valid scopes for the given token as a list."""
+    url = _OAUTH2_TOKENINFO_TEMPLATE.format(access_token=access_token)
+    response = apitools_base.MakeRequest(
+        apitools_base.GetHttp(), apitools_base.Request(url))
+    if response.status_code not in [http_client.OK, http_client.BAD_REQUEST]:
+        raise apitools_base.HttpError.FromResponse(response)
+    if response.status_code == http_client.BAD_REQUEST:
+        return []
+    return json.loads(_AsText(response.content))['scope'].split(' ')
+
+
+def _ValidateToken(access_token):
+    """Return True iff the provided access token is valid."""
+    return bool(_GetTokenScopes(access_token))
+
+
+def _FetchCredentials(args, client_info=None, credentials_filename=None):
+    """Fetch a credential for the given client_info and scopes."""
+    client_info = client_info or GetClientInfoFromFlags(args.client_secrets)
+    scopes = _ExpandScopes(args.scope)
+    if not scopes:
+        raise ValueError('No scopes provided')
+    credentials_filename = credentials_filename or args.credentials_filename
+    # TODO(craigcitro): Remove this logging nonsense once we quiet the
+    # spurious logging in oauth2client.
+    old_level = logging.getLogger().level
+    logging.getLogger().setLevel(logging.ERROR)
+    credentials = apitools_base.GetCredentials(
+        'oauth2l', scopes, credentials_filename=credentials_filename,
+        service_account_json_keyfile=args.service_account_json_keyfile,
+        oauth2client_args='', **client_info)
+    logging.getLogger().setLevel(old_level)
+    if not _ValidateToken(credentials.access_token):
+        credentials.refresh(apitools_base.GetHttp())
+    return credentials
+
+
+def _Email(args):
+    """Print the email address for this token, if possible."""
+    userinfo = apitools_base.GetUserinfo(
+        oauth2client.client.AccessTokenCredentials(args.access_token,
+                                                   'oauth2l/1.0'))
+    user_email = userinfo.get('email')
+    if user_email:
+        print(user_email)
+
+
+def _Fetch(args):
+    """Fetch a valid access token and display it."""
+    credentials = _FetchCredentials(args)
+    print(_Format(args.credentials_format.lower(), credentials))
+
+
+def _Header(args):
+    """Fetch an access token and display it formatted as an HTTP header."""
+    print(_Format('header', _FetchCredentials(args)))
+
+
+def _Scopes(args):
+    """Print the list of scopes for a valid token."""
+    scopes = _GetTokenScopes(args.access_token)
+    if not scopes:
+        return 1
+    for scope in sorted(scopes):
+        print(scope)
+
+
+def _Userinfo(args):
+    """Print the userinfo for this token, if possible."""
+    userinfo = apitools_base.GetUserinfo(
+        oauth2client.client.AccessTokenCredentials(args.access_token,
+                                                   'oauth2l/1.0'))
+    if args.format == 'json':
+        print(_PrettyJson(userinfo))
+    else:
+        print(_CompactJson(userinfo))
+
+
+def _Validate(args):
+    """Validate an access token. Exits with 0 if valid, 1 otherwise."""
+    return 1 - (_ValidateToken(args.access_token))
+
+
+def _GetParser():
+    """Returns argparse argument parser."""
+    shared_flags = argparse.ArgumentParser(add_help=False)
+    shared_flags.add_argument(
+        '--client_secrets',
+        default='',
+        help=('If specified, use the client ID/secret from the named '
+              'file, which should be a client_secrets.json file '
+              'downloaded from the Developer Console.'))
+    shared_flags.add_argument(
+        '--credentials_filename',
+        default='',
+        help='(optional) Filename for fetching/storing credentials.')
+    shared_flags.add_argument(
+        '--service_account_json_keyfile',
+        default='',
+        help=('Filename for a JSON service account key downloaded from '
+              'the Google Developer Console.'))
+
+    parser = argparse.ArgumentParser(
+        description=__doc__,
+        formatter_class=argparse.RawDescriptionHelpFormatter,
+    )
+    subparsers = parser.add_subparsers(dest='command')
+
+    # email
+    email = subparsers.add_parser('email', help=_Email.__doc__,
+                                  parents=[shared_flags])
+    email.set_defaults(func=_Email)
+    email.add_argument(
+        'access_token',
+        help=('Access token to print associated email address for. Must have '
+              'the userinfo.email scope.'))
+
+    # fetch
+    fetch = subparsers.add_parser('fetch', help=_Fetch.__doc__,
+                                  parents=[shared_flags])
+    fetch.set_defaults(func=_Fetch)
+    fetch.add_argument(
+        '-f', '--credentials_format',
+        default='pretty', choices=sorted(_FORMATS),
+        help='Output format for token.')
+    fetch.add_argument(
+        'scope',
+        nargs='*',
+        help='Scope to fetch. May be provided multiple times.')
+
+    # header
+    header = subparsers.add_parser('header', help=_Header.__doc__,
+                                   parents=[shared_flags])
+    header.set_defaults(func=_Header)
+    header.add_argument(
+        'scope',
+        nargs='*',
+        help='Scope to header. May be provided multiple times.')
+
+    # scopes
+    scopes = subparsers.add_parser('scopes', help=_Scopes.__doc__,
+                                   parents=[shared_flags])
+    scopes.set_defaults(func=_Scopes)
+    scopes.add_argument(
+        'access_token',
+        help=('Scopes associated with this token will be printed.'))
+
+    # userinfo
+    userinfo = subparsers.add_parser('userinfo', help=_Userinfo.__doc__,
+                                     parents=[shared_flags])
+    userinfo.set_defaults(func=_Userinfo)
+    userinfo.add_argument(
+        '-f', '--format',
+        default='json', choices=('json', 'json_compact'),
+        help='Output format for userinfo.')
+    userinfo.add_argument(
+        'access_token',
+        help=('Access token to print associated email address for. Must have '
+              'the userinfo.email scope.'))
+
+    # validate
+    validate = subparsers.add_parser('validate', help=_Validate.__doc__,
+                                     parents=[shared_flags])
+    validate.set_defaults(func=_Validate)
+    validate.add_argument(
+        'access_token',
+        help='Access token to validate.')
+
+    return parser
+
+
+def main(argv=None):
+    argv = argv or sys.argv
+    # Invoke the newly created parser.
+    args = _GetParser().parse_args(argv[1:])
+    try:
+        exit_code = args.func(args)
+    except BaseException as e:
+        print('Error encountered in {0} operation: {1}'.format(
+            args.command, e))
+        return 1
+    return exit_code
+
+
+if __name__ == '__main__':
+    sys.exit(main(sys.argv))
diff --git a/apitools/scripts/oauth2l_test.py b/apitools/scripts/oauth2l_test.py
new file mode 100644
index 0000000..157eb3a
--- /dev/null
+++ b/apitools/scripts/oauth2l_test.py
@@ -0,0 +1,348 @@
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tests for oauth2l."""
+
+import json
+import os
+import sys
+
+import mock
+import oauth2client.client
+import six
+from six.moves import http_client
+import unittest2
+
+import apitools.base.py as apitools_base
+from apitools.scripts import oauth2l
+
+_OAUTH2L_MAIN_RUN = False
+
+
+class _FakeResponse(object):
+
+    def __init__(self, status_code, scopes=None):
+        self.status_code = status_code
+        if self.status_code == http_client.OK:
+            self.content = json.dumps({'scope': ' '.join(scopes or [])})
+        else:
+            self.content = 'Error'
+            self.info = str(http_client.responses[self.status_code])
+            self.request_url = 'some-url'
+
+
+def _GetCommandOutput(command_name, command_argv):
+    orig_stdout = sys.stdout
+    orig_stderr = sys.stderr
+    new_stdout = six.StringIO()
+    new_stderr = six.StringIO()
+    try:
+        sys.stdout = new_stdout
+        sys.stderr = new_stderr
+        oauth2l.main(['oauth2l', command_name] + command_argv)
+    finally:
+        sys.stdout = orig_stdout
+        sys.stderr = orig_stderr
+    new_stdout.seek(0)
+    return new_stdout.getvalue().rstrip()
+
+
+class InvalidCommandTest(unittest2.TestCase):
+
+    def testOutput(self):
+        self.assertRaises(SystemExit,
+                          _GetCommandOutput, 'foo', [])
+
+
+class Oauth2lFormattingTest(unittest2.TestCase):
+
+    def setUp(self):
+        # Set up an access token to use
+        self.access_token = 'ya29.abdefghijklmnopqrstuvwxyz'
+        self.user_agent = 'oauth2l/1.0'
+        self.credentials = oauth2client.client.AccessTokenCredentials(
+            self.access_token, self.user_agent)
+
+    def _Args(self, credentials_format):
+        return ['--credentials_format=' + credentials_format, 'userinfo.email']
+
+    def testFormatBare(self):
+        with mock.patch.object(oauth2l, '_FetchCredentials',
+                               return_value=self.credentials,
+                               autospec=True) as mock_credentials:
+            output = _GetCommandOutput('fetch', self._Args('bare'))
+            self.assertEqual(self.access_token, output)
+            self.assertEqual(1, mock_credentials.call_count)
+
+    def testFormatHeader(self):
+        with mock.patch.object(oauth2l, '_FetchCredentials',
+                               return_value=self.credentials,
+                               autospec=True) as mock_credentials:
+            output = _GetCommandOutput('fetch', self._Args('header'))
+            header = 'Authorization: Bearer %s' % self.access_token
+            self.assertEqual(header, output)
+            self.assertEqual(1, mock_credentials.call_count)
+
+    def testHeaderCommand(self):
+        with mock.patch.object(oauth2l, '_FetchCredentials',
+                               return_value=self.credentials,
+                               autospec=True) as mock_credentials:
+            output = _GetCommandOutput('header', ['userinfo.email'])
+            header = 'Authorization: Bearer %s' % self.access_token
+            self.assertEqual(header, output)
+            self.assertEqual(1, mock_credentials.call_count)
+
+    def testFormatJson(self):
+        with mock.patch.object(oauth2l, '_FetchCredentials',
+                               return_value=self.credentials,
+                               autospec=True) as mock_credentials:
+            output = _GetCommandOutput('fetch', self._Args('json'))
+            output_lines = [l.strip() for l in output.splitlines()]
+            expected_lines = [
+                '"_class": "AccessTokenCredentials",',
+                '"access_token": "%s",' % self.access_token,
+            ]
+            for line in expected_lines:
+                self.assertIn(line, output_lines)
+            self.assertEqual(1, mock_credentials.call_count)
+
+    def testFormatJsonCompact(self):
+        with mock.patch.object(oauth2l, '_FetchCredentials',
+                               return_value=self.credentials,
+                               autospec=True) as mock_credentials:
+            output = _GetCommandOutput('fetch', self._Args('json_compact'))
+            expected_clauses = [
+                '"_class":"AccessTokenCredentials",',
+                '"access_token":"%s",' % self.access_token,
+            ]
+            for clause in expected_clauses:
+                self.assertIn(clause, output)
+            self.assertEqual(1, len(output.splitlines()))
+            self.assertEqual(1, mock_credentials.call_count)
+
+    def testFormatPretty(self):
+        with mock.patch.object(oauth2l, '_FetchCredentials',
+                               return_value=self.credentials,
+                               autospec=True) as mock_credentials:
+            output = _GetCommandOutput('fetch', self._Args('pretty'))
+            expecteds = ['oauth2client.client.AccessTokenCredentials',
+                         self.access_token]
+            for expected in expecteds:
+                self.assertIn(expected, output)
+            self.assertEqual(1, mock_credentials.call_count)
+
+    def testFakeFormat(self):
+        self.assertRaises(ValueError,
+                          oauth2l._Format, 'xml', self.credentials)
+
+
+class TestFetch(unittest2.TestCase):
+
+    def setUp(self):
+        # Set up an access token to use
+        self.access_token = 'ya29.abdefghijklmnopqrstuvwxyz'
+        self.user_agent = 'oauth2l/1.0'
+        self.credentials = oauth2client.client.AccessTokenCredentials(
+            self.access_token, self.user_agent)
+
+    def testNoScopes(self):
+        output = _GetCommandOutput('fetch', [])
+        self.assertEqual(
+            'Error encountered in fetch operation: No scopes provided',
+            output)
+
+    def testScopes(self):
+        expected_scopes = [
+            'https://www.googleapis.com/auth/userinfo.email',
+            'https://www.googleapis.com/auth/cloud-platform',
+        ]
+        with mock.patch.object(apitools_base, 'GetCredentials',
+                               return_value=self.credentials,
+                               autospec=True) as mock_fetch:
+            with mock.patch.object(oauth2l, '_GetTokenScopes',
+                                   return_value=expected_scopes,
+                                   autospec=True) as mock_get_scopes:
+                output = _GetCommandOutput(
+                    'fetch', ['userinfo.email', 'cloud-platform'])
+                self.assertIn(self.access_token, output)
+                self.assertEqual(1, mock_fetch.call_count)
+                args, _ = mock_fetch.call_args
+                self.assertEqual(expected_scopes, args[-1])
+                self.assertEqual(1, mock_get_scopes.call_count)
+                self.assertEqual((self.access_token,),
+                                 mock_get_scopes.call_args[0])
+
+    def testCredentialsRefreshed(self):
+        with mock.patch.object(apitools_base, 'GetCredentials',
+                               return_value=self.credentials,
+                               autospec=True) as mock_fetch:
+            with mock.patch.object(oauth2l, '_ValidateToken',
+                                   return_value=False,
+                                   autospec=True) as mock_validate:
+                with mock.patch.object(self.credentials, 'refresh',
+                                       return_value=None,
+                                       autospec=True) as mock_refresh:
+                    output = _GetCommandOutput('fetch', ['userinfo.email'])
+                    self.assertIn(self.access_token, output)
+                    self.assertEqual(1, mock_fetch.call_count)
+                    self.assertEqual(1, mock_validate.call_count)
+                    self.assertEqual(1, mock_refresh.call_count)
+
+    def testDefaultClientInfo(self):
+        with mock.patch.object(apitools_base, 'GetCredentials',
+                               return_value=self.credentials,
+                               autospec=True) as mock_fetch:
+            with mock.patch.object(oauth2l, '_ValidateToken',
+                                   return_value=True,
+                                   autospec=True) as mock_validate:
+                output = _GetCommandOutput('fetch', ['userinfo.email'])
+                self.assertIn(self.access_token, output)
+                self.assertEqual(1, mock_fetch.call_count)
+                _, kwargs = mock_fetch.call_args
+                self.assertEqual(
+                    '1042881264118.apps.googleusercontent.com',
+                    kwargs['client_id'])
+                self.assertEqual(1, mock_validate.call_count)
+
+    def testMissingClientSecrets(self):
+        self.assertRaises(
+            ValueError,
+            oauth2l.GetClientInfoFromFlags, '/non/existent/file')
+
+    def testWrongClientSecretsFormat(self):
+        client_secrets = os.path.join(
+            os.path.dirname(__file__),
+            'testdata/noninstalled_client_secrets.json')
+        self.assertRaises(
+            ValueError,
+            oauth2l.GetClientInfoFromFlags, client_secrets)
+
+    def testCustomClientInfo(self):
+        client_secrets_path = os.path.join(
+            os.path.dirname(__file__), 'testdata/fake_client_secrets.json')
+        with mock.patch.object(apitools_base, 'GetCredentials',
+                               return_value=self.credentials,
+                               autospec=True) as mock_fetch:
+            with mock.patch.object(oauth2l, '_ValidateToken',
+                                   return_value=True,
+                                   autospec=True) as mock_validate:
+                fetch_args = [
+                    '--client_secrets=' + client_secrets_path,
+                    'userinfo.email']
+                output = _GetCommandOutput('fetch', fetch_args)
+                self.assertIn(self.access_token, output)
+                self.assertEqual(1, mock_fetch.call_count)
+                _, kwargs = mock_fetch.call_args
+                self.assertEqual('144169.apps.googleusercontent.com',
+                                 kwargs['client_id'])
+                self.assertEqual('awesomesecret',
+                                 kwargs['client_secret'])
+                self.assertEqual(1, mock_validate.call_count)
+
+
+class TestOtherCommands(unittest2.TestCase):
+
+    def setUp(self):
+        # Set up an access token to use
+        self.access_token = 'ya29.abdefghijklmnopqrstuvwxyz'
+        self.user_agent = 'oauth2l/1.0'
+        self.credentials = oauth2client.client.AccessTokenCredentials(
+            self.access_token, self.user_agent)
+
+    def testEmail(self):
+        user_info = {'email': 'foo@example.com'}
+        with mock.patch.object(apitools_base, 'GetUserinfo',
+                               return_value=user_info,
+                               autospec=True) as mock_get_userinfo:
+            output = _GetCommandOutput('email', [self.access_token])
+            self.assertEqual(user_info['email'], output)
+            self.assertEqual(1, mock_get_userinfo.call_count)
+            self.assertEqual(self.access_token,
+                             mock_get_userinfo.call_args[0][0].access_token)
+
+    def testNoEmail(self):
+        with mock.patch.object(apitools_base, 'GetUserinfo',
+                               return_value={},
+                               autospec=True) as mock_get_userinfo:
+            output = _GetCommandOutput('email', [self.access_token])
+            self.assertEqual('', output)
+            self.assertEqual(1, mock_get_userinfo.call_count)
+
+    def testUserinfo(self):
+        user_info = {'email': 'foo@example.com'}
+        with mock.patch.object(apitools_base, 'GetUserinfo',
+                               return_value=user_info,
+                               autospec=True) as mock_get_userinfo:
+            output = _GetCommandOutput('userinfo', [self.access_token])
+            self.assertEqual(json.dumps(user_info, indent=4), output)
+            self.assertEqual(1, mock_get_userinfo.call_count)
+            self.assertEqual(self.access_token,
+                             mock_get_userinfo.call_args[0][0].access_token)
+
+    def testUserinfoCompact(self):
+        user_info = {'email': 'foo@example.com'}
+        with mock.patch.object(apitools_base, 'GetUserinfo',
+                               return_value=user_info,
+                               autospec=True) as mock_get_userinfo:
+            output = _GetCommandOutput(
+                'userinfo', ['--format=json_compact', self.access_token])
+            self.assertEqual(json.dumps(user_info, separators=(',', ':')),
+                             output)
+            self.assertEqual(1, mock_get_userinfo.call_count)
+            self.assertEqual(self.access_token,
+                             mock_get_userinfo.call_args[0][0].access_token)
+
+    def testScopes(self):
+        scopes = [u'https://www.googleapis.com/auth/userinfo.email',
+                  u'https://www.googleapis.com/auth/cloud-platform']
+        response = _FakeResponse(http_client.OK, scopes=scopes)
+        with mock.patch.object(apitools_base, 'MakeRequest',
+                               return_value=response,
+                               autospec=True) as mock_make_request:
+            output = _GetCommandOutput('scopes', [self.access_token])
+            self.assertEqual(sorted(scopes), output.splitlines())
+            self.assertEqual(1, mock_make_request.call_count)
+
+    def testValidate(self):
+        scopes = [u'https://www.googleapis.com/auth/userinfo.email',
+                  u'https://www.googleapis.com/auth/cloud-platform']
+        response = _FakeResponse(http_client.OK, scopes=scopes)
+        with mock.patch.object(apitools_base, 'MakeRequest',
+                               return_value=response,
+                               autospec=True) as mock_make_request:
+            output = _GetCommandOutput('validate', [self.access_token])
+            self.assertEqual('', output)
+            self.assertEqual(1, mock_make_request.call_count)
+
+    def testBadResponseCode(self):
+        response = _FakeResponse(http_client.BAD_REQUEST)
+        with mock.patch.object(apitools_base, 'MakeRequest',
+                               return_value=response,
+                               autospec=True) as mock_make_request:
+            output = _GetCommandOutput('scopes', [self.access_token])
+            self.assertEqual('', output)
+            self.assertEqual(1, mock_make_request.call_count)
+
+    def testUnexpectedResponseCode(self):
+        response = _FakeResponse(http_client.INTERNAL_SERVER_ERROR)
+        with mock.patch.object(apitools_base, 'MakeRequest',
+                               return_value=response,
+                               autospec=True) as mock_make_request:
+            output = _GetCommandOutput('scopes', [self.access_token])
+            self.assertIn(str(http_client.responses[response.status_code]),
+                          output)
+            self.assertIn('Error encountered in scopes operation: HttpError',
+                          output)
+            self.assertEqual(1, mock_make_request.call_count)
diff --git a/apitools/scripts/testdata/fake_client_secrets.json b/apitools/scripts/testdata/fake_client_secrets.json
new file mode 100644
index 0000000..f1fabe6
--- /dev/null
+++ b/apitools/scripts/testdata/fake_client_secrets.json
@@ -0,0 +1,15 @@
+{
+  "installed": {
+    "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
+    "auth_uri": "https://accounts.google.com/o/oauth2/auth",
+    "client_email": "",
+    "client_id": "144169.apps.googleusercontent.com",
+    "client_secret": "awesomesecret",
+    "client_x509_cert_url": "",
+    "redirect_uris": [
+      "urn:ietf:wg:oauth:2.0:oob",
+      "oob"
+    ],
+    "token_uri": "https://accounts.google.com/o/oauth2/token"
+  }
+}
diff --git a/apitools/scripts/testdata/noninstalled_client_secrets.json b/apitools/scripts/testdata/noninstalled_client_secrets.json
new file mode 100644
index 0000000..6e67027
--- /dev/null
+++ b/apitools/scripts/testdata/noninstalled_client_secrets.json
@@ -0,0 +1,3 @@
+{
+  "webapp": {}
+}
diff --git a/default.pylintrc b/default.pylintrc
new file mode 100644
index 0000000..7b9c3c4
--- /dev/null
+++ b/default.pylintrc
@@ -0,0 +1,352 @@
+# PyLint config for apitools code.
+#
+# NOTES:
+#
+# - Rules for test / demo code are generated into 'pylintrc_reduced'
+#   as deltas from this configuration by the 'run_pylint.py' script.
+#
+# - 'RATIONALE:  API mapping' as a defense for non-default settings is
+#   based on the fact that this library maps APIs which are outside our
+#   control, and adhering to the out-of-the-box defaults would induce
+#   breakage / complexity in those mappings
+#
+[MASTER]
+
+# Specify a configuration file.
+# DEFAULT:  rcfile=
+
+# Python code to execute, usually for sys.path manipulation such as
+# pygtk.require().
+# DEFAULT: init-hook=
+
+# Profiled execution.
+# DEFAULT:  profile=no
+
+# Add files or directories to the blacklist. They should be base names, not
+# paths.
+# DEFAULT:  ignore=CVS
+# NOTE: This path must be relative due to the use of
+#       os.walk in astroid.modutils.get_module_files.
+
+# Pickle collected data for later comparisons.
+# DEFAULT:  persistent=yes
+
+# List of plugins (as comma separated values of python modules names) to load,
+# usually to register additional checkers.
+# DEFAULT:  load-plugins=
+
+# DEPRECATED
+# DEFAULT:  include-ids=no
+
+# DEPRECATED
+# DEFAULT:  symbols=no
+
+
+[MESSAGES CONTROL]
+
+# TODO: remove cyclic-import.
+disable =
+    cyclic-import,
+    fixme,
+    import-error,
+    locally-disabled,
+    locally-enabled,
+    no-member,
+    no-name-in-module,
+    no-self-use,
+    super-on-old-class,
+    too-many-arguments,
+    too-many-function-args,
+
+
+[REPORTS]
+
+# Set the output format. Available formats are text, parseable, colorized, msvs
+# (visual studio) and html. You can also give a reporter class, eg
+# mypackage.mymodule.MyReporterClass.
+# DEFAULT:  output-format=text
+
+# Put messages in a separate file for each module / package specified on the
+# command line instead of printing them on stdout. Reports (if any) will be
+# written in a file name "pylint_global.[txt|html]".
+# DEFAULT:  files-output=no
+
+# Tells whether to display a full report or only the messages
+# DEFAULT:  reports=yes
+# RATIONALE:  run from Travis / tox, and don't need / want to parse output.
+reports=no
+
+# Python expression which should return a note less than 10 (10 is the highest
+# note). You have access to the variables errors warning, statement which
+# respectively contain the number of errors / warnings messages and the total
+# number of statements analyzed. This is used by the global evaluation report
+# (RP0004).
+# DEFAULT:  evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
+
+# Add a comment according to your evaluation note. This is used by the global
+# evaluation report (RP0004).
+# DEFAULT:  comment=no
+
+# Template used to display messages. This is a python new-style format string
+# used to format the message information. See doc for all details
+#msg-template=
+
+
+[SIMILARITIES]
+
+# Minimum lines number of a similarity.
+# DEFAULT:  min-similarity-lines=4
+min-similarity-lines=15
+
+# Ignore comments when computing similarities.
+# DEFAULT:  ignore-comments=yes
+
+# Ignore docstrings when computing similarities.
+# DEFAULT:  ignore-docstrings=yes
+
+# Ignore imports when computing similarities.
+# DEFAULT:  ignore-imports=no
+ignore-imports=yes
+
+
+[VARIABLES]
+
+# Tells whether we should check for unused import in __init__ files.
+# DEFAULT:  init-import=no
+
+# A regular expression matching the name of dummy variables (i.e. expectedly
+# not used).
+dummy-variables-rgx=^\*{0,2}(_$|unused_|dummy_)
+
+
+# List of additional names supposed to be defined in builtins. Remember that
+# you should avoid to define new builtins when possible.
+# DEFAULT:  additional-builtins=
+
+
+[LOGGING]
+
+# Logging modules to check that the string format arguments are in logging
+# function parameter format
+# DEFAULT:  logging-modules=logging
+
+
+[FORMAT]
+
+# Maximum number of characters on a single line.
+# DEFAULT:  max-line-length=80
+
+# Regexp for a line that is allowed to be longer than the limit.
+# DEFAULT:  ignore-long-lines=^\s*(# )?<?https?://\S+>?$
+
+# Allow the body of an if to be on the same line as the test if there is no
+# else.
+# DEFAULT:  single-line-if-stmt=no
+
+# List of optional constructs for which whitespace checking is disabled
+# DEFAULT:  no-space-check=trailing-comma,dict-separator
+# RATIONALE: pylint ignores whitespace checks around the
+#            constructs "dict-separator" (cases like {1:2}) and
+#            "trailing-comma" (cases like {1: 2, }).
+#            By setting "no-space-check" to empty whitespace checks will be
+#            enforced around both constructs.
+no-space-check =
+
+# Maximum number of lines in a module
+# DEFAULT:  max-module-lines=1000
+max-module-lines=1500
+
+# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
+# tab).
+# DEFAULT:  indent-string='    '
+
+# Number of spaces of indent required inside a hanging or continued line.
+# DEFAULT:  indent-after-paren=4
+
+
+[MISCELLANEOUS]
+
+# List of note tags to take in consideration, separated by a comma.
+# DEFAULT:  notes=FIXME,XXX,TODO
+
+
+[BASIC]
+
+# Regular expression which should only match function or class names that do
+# not require a docstring.
+# DEFAULT:  no-docstring-rgx=__.*__
+no-docstring-rgx=(__.*__|main)
+
+# Minimum line length for functions/classes that require docstrings, shorter
+# ones are exempt.
+# DEFAULT:  docstring-min-length=-1
+docstring-min-length=10
+
+# Regular expression which should only match correct module names. The
+# leading underscore is sanctioned for private modules by Google's style
+# guide.
+module-rgx=^(_?[a-z][a-z0-9_]*)|__init__$
+
+# Regular expression matching correct constant names
+# DEFAULT:  const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
+const-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$
+
+# Regular expression matching correct class attribute names
+# DEFAULT:  class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
+class-attribute-rgx=^(_?[A-Z][A-Z0-9_]*|__[a-z0-9_]+__|_?[a-z][a-z0-9_]*)$
+
+# Regular expression matching correct class names
+# DEFAULT:  class-rgx=[A-Z_][a-zA-Z0-9]+$
+class-rgx=^_?[A-Z][a-zA-Z0-9]*$
+
+# Regular expression which should only match correct function names.
+# 'camel_case' and 'snake_case' group names are used for consistency of naming
+# styles across functions and methods.
+function-rgx=^(?:(?P<camel_case>_?[A-Z][a-zA-Z0-9]*)|(?P<snake_case>_?[a-z][a-z0-9_]*))$
+
+# Regular expression which should only match correct method names.
+# 'camel_case' and 'snake_case' group names are used for consistency of naming
+# styles across functions and methods. 'exempt' indicates a name which is
+# consistent with all naming styles.
+method-rgx=^(?:(?P<exempt>__[a-z0-9_]+__|next)|(?P<camel_case>_{0,2}[A-Z][a-zA-Z0-9]*)|(?P<snake_case>_{0,2}[a-z][a-z0-9_]*))$
+
+# Regular expression matching correct attribute names
+# DEFAULT:  attr-rgx=[a-z_][a-z0-9_]{2,30}$
+attr-rgx=^_{0,2}[a-z][a-z0-9_]*$
+
+# Regular expression matching correct argument names
+# DEFAULT:  argument-rgx=[a-z_][a-z0-9_]{2,30}$
+argument-rgx=^[a-z][a-z0-9_]*$
+
+# Regular expression matching correct variable names
+# DEFAULT:  variable-rgx=[a-z_][a-z0-9_]{2,30}$
+variable-rgx=^[a-z][a-z0-9_]*$
+
+# Regular expression matching correct inline iteration names
+# DEFAULT:  inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
+inlinevar-rgx=^[a-z][a-z0-9_]*$
+
+# Good variable names which should always be accepted, separated by a comma
+# DEFAULT:  good-names=i,j,k,ex,Run,_
+good-names=main,_
+
+# Bad variable names which should always be refused, separated by a comma
+# DEFAULT:  bad-names=foo,bar,baz,toto,tutu,tata
+bad-names=
+
+# List of builtins function names that should not be used, separated by a comma
+# <http://go/python-style#Deprecated_Language_Features>
+bad-functions=input,apply,reduce
+
+
+[TYPECHECK]
+
+# Tells whether missing members accessed in mixin class should be ignored. A
+# mixin class is detected if its name ends with "mixin" (case insensitive).
+# DEFAULT:  ignore-mixin-members=yes
+
+# List of module names for which member attributes should not be checked
+# (useful for modules/projects where namespaces are manipulated during runtime
+# and thus existing member attributes cannot be deduced by static analysis
+# DEFAULT:  ignored-modules=
+
+# List of classes names for which member attributes should not be checked
+# (useful for classes with attributes dynamically set).
+# DEFAULT:  ignored-classes=SQLObject
+
+# When zope mode is activated, add a predefined set of Zope acquired attributes
+# to generated-members.
+# DEFAULT:  zope=no
+
+# List of members which are set dynamically and missed by pylint inference
+# system, and so shouldn't trigger E0201 when accessed. Python regular
+# expressions are accepted.
+# DEFAULT:  generated-members=REQUEST,acl_users,aq_parent
+
+
+[IMPORTS]
+
+# Deprecated modules which should not be used, separated by a comma
+# DEFAULT:  deprecated-modules=regsub,TERMIOS,Bastion,rexec
+
+# Create a graph of every (i.e. internal and external) dependencies in the
+# given file (report RP0402 must not be disabled)
+# DEFAULT:  import-graph=
+
+# Create a graph of external dependencies in the given file (report RP0402 must
+# not be disabled)
+# DEFAULT:  ext-import-graph=
+
+# Create a graph of internal dependencies in the given file (report RP0402 must
+# not be disabled)
+# DEFAULT:  int-import-graph=
+
+
+[CLASSES]
+
+# List of interface methods to ignore, separated by a comma. This is used for
+# instance to not check methods defines in Zope's Interface base class.
+# DEFAULT:  ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
+
+# List of method names used to declare (i.e. assign) instance attributes.
+# DEFAULT:  defining-attr-methods=__init__,__new__,setUp
+
+# List of valid names for the first argument in a class method.
+# DEFAULT:  valid-classmethod-first-arg=cls
+
+# List of valid names for the first argument in a metaclass class method.
+# DEFAULT:  valid-metaclass-classmethod-first-arg=mcs
+
+
+[DESIGN]
+
+# Maximum number of arguments for function / method
+# DEFAULT:  max-args=5
+# RATIONALE:  API-mapping
+max-args = 14
+
+# Argument names that match this expression will be ignored. Default to name
+# with leading underscore
+# DEFAULT:  ignored-argument-names=_.*
+
+# Maximum number of locals for function / method body
+# DEFAULT:  max-locals=15
+max-locals=24
+
+# Maximum number of return / yield for function / method body
+# DEFAULT:  max-returns=6
+max-returns=9
+
+# Maximum number of branch for function / method body
+# DEFAULT:  max-branches=12
+max-branches=21
+
+# Maximum number of statements in function / method body
+# DEFAULT:  max-statements=50
+
+# Maximum number of parents for a class (see R0901).
+# DEFAULT:  max-parents=7
+
+# Maximum number of attributes for a class (see R0902).
+# DEFAULT:  max-attributes=7
+# RATIONALE:  API mapping
+max-attributes=19
+
+# Minimum number of public methods for a class (see R0903).
+# DEFAULT:  min-public-methods=2
+# RATIONALE:  context mgrs may have *no* public methods
+min-public-methods=0
+
+# Maximum number of public methods for a class (see R0904).
+# DEFAULT:  max-public-methods=20
+# RATIONALE:  API mapping
+max-public-methods=40
+
+[ELIF]
+max-nested-blocks=6
+
+[EXCEPTIONS]
+
+# Exceptions that will emit a warning when being caught. Defaults to
+# "Exception"
+# DEFAULT:  overgeneral-exceptions=Exception
diff --git a/ez_setup.py b/ez_setup.py
new file mode 100755
index 0000000..be314e4
--- /dev/null
+++ b/ez_setup.py
@@ -0,0 +1,266 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Bootstrap setuptools installation
+
+If you want to use setuptools in your package's setup.py, just include this
+file in the same directory with it, and add this to the top of your setup.py::
+
+    from ez_setup import use_setuptools
+    use_setuptools()
+
+If you want to require a specific version of setuptools, set a download
+mirror, or use an alternate download directory, you can do so by supplying
+the appropriate options to ``use_setuptools()``.
+
+This file can also be run as a script to install or upgrade setuptools.
+"""
+import sys
+DEFAULT_VERSION = "0.6c11"
+DEFAULT_URL     = "http://pypi.python.org/packages/%s/s/setuptools/" % sys.version[:3]
+
+md5_data = {
+    'setuptools-0.6c10-py2.3.egg': 'ce1e2ab5d3a0256456d9fc13800a7090',
+    'setuptools-0.6c10-py2.4.egg': '57d6d9d6e9b80772c59a53a8433a5dd4',
+    'setuptools-0.6c10-py2.5.egg': 'de46ac8b1c97c895572e5e8596aeb8c7',
+    'setuptools-0.6c10-py2.6.egg': '58ea40aef06da02ce641495523a0b7f5',
+    'setuptools-0.6c11-py2.3.egg': '2baeac6e13d414a9d28e7ba5b5a596de',
+    'setuptools-0.6c11-py2.4.egg': 'bd639f9b0eac4c42497034dec2ec0c2b',
+    'setuptools-0.6c11-py2.5.egg': '64c94f3bf7a72a13ec83e0b24f2749b2',
+    'setuptools-0.6c11-py2.6.egg': 'bfa92100bd772d5a213eedd356d64086',
+    'setuptools-0.6c8-py2.3.egg': '50759d29b349db8cfd807ba8303f1902',
+    'setuptools-0.6c8-py2.4.egg': 'cba38d74f7d483c06e9daa6070cce6de',
+    'setuptools-0.6c8-py2.5.egg': '1721747ee329dc150590a58b3e1ac95b',
+    'setuptools-0.6c9-py2.3.egg': 'a83c4020414807b496e4cfbe08507c03',
+    'setuptools-0.6c9-py2.4.egg': '260a2be2e5388d66bdaee06abec6342a',
+    'setuptools-0.6c9-py2.5.egg': 'fe67c3e5a17b12c0e7c541b7ea43a8e6',
+    'setuptools-0.6c9-py2.6.egg': 'ca37b1ff16fa2ede6e19383e7b59245a',
+}
+
+import sys, os
+try: from hashlib import md5
+except ImportError: from md5 import md5
+
+def _validate_md5(egg_name, data):
+    if egg_name in md5_data:
+        digest = md5(data).hexdigest()
+        if digest != md5_data[egg_name]:
+            print >>sys.stderr, (
+                "md5 validation of %s failed!  (Possible download problem?)"
+                % egg_name
+            )
+            sys.exit(2)
+    return data
+
+def use_setuptools(
+    version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
+    download_delay=15
+):
+    """Automatically find/download setuptools and make it available on sys.path
+
+    `version` should be a valid setuptools version number that is available
+    as an egg for download under the `download_base` URL (which should end with
+    a '/').  `to_dir` is the directory where setuptools will be downloaded, if
+    it is not already available.  If `download_delay` is specified, it should
+    be the number of seconds that will be paused before initiating a download,
+    should one be required.  If an older version of setuptools is installed,
+    this routine will print a message to ``sys.stderr`` and raise SystemExit in
+    an attempt to abort the calling script.
+    """
+    was_imported = 'pkg_resources' in sys.modules or 'setuptools' in sys.modules
+    def do_download():
+        egg = download_setuptools(version, download_base, to_dir, download_delay)
+        sys.path.insert(0, egg)
+        import setuptools; setuptools.bootstrap_install_from = egg
+    try:
+        import pkg_resources
+    except ImportError:
+        return do_download()       
+    try:
+        pkg_resources.require("setuptools>="+version); return
+    except pkg_resources.VersionConflict, e:
+        if was_imported:
+            print >>sys.stderr, (
+            "The required version of setuptools (>=%s) is not available, and\n"
+            "can't be installed while this script is running. Please install\n"
+            " a more recent version first, using 'easy_install -U setuptools'."
+            "\n\n(Currently using %r)"
+            ) % (version, e.args[0])
+            sys.exit(2)
+    except pkg_resources.DistributionNotFound:
+        pass
+
+    del pkg_resources, sys.modules['pkg_resources']    # reload ok
+    return do_download()
+
+def download_setuptools(
+    version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
+    delay = 15
+):
+    """Download setuptools from a specified location and return its filename
+
+    `version` should be a valid setuptools version number that is available
+    as an egg for download under the `download_base` URL (which should end
+    with a '/'). `to_dir` is the directory where the egg will be downloaded.
+    `delay` is the number of seconds to pause before an actual download attempt.
+    """
+    import urllib2, shutil
+    egg_name = "setuptools-%s-py%s.egg" % (version,sys.version[:3])
+    url = download_base + egg_name
+    saveto = os.path.join(to_dir, egg_name)
+    src = dst = None
+    if not os.path.exists(saveto):  # Avoid repeated downloads
+        try:
+            from distutils import log
+            if delay:
+                log.warn("""
+---------------------------------------------------------------------------
+This script requires setuptools version %s to run (even to display
+help).  I will attempt to download it for you (from
+%s), but
+you may need to enable firewall access for this script first.
+I will start the download in %d seconds.
+
+(Note: if this machine does not have network access, please obtain the file
+
+   %s
+
+and place it in this directory before rerunning this script.)
+---------------------------------------------------------------------------""",
+                    version, download_base, delay, url
+                ); from time import sleep; sleep(delay)
+            log.warn("Downloading %s", url)
+            src = urllib2.urlopen(url)
+            # Read/write all in one block, so we don't create a corrupt file
+            # if the download is interrupted.
+            data = _validate_md5(egg_name, src.read())
+            dst = open(saveto,"wb"); dst.write(data)
+        finally:
+            if src: src.close()
+            if dst: dst.close()
+    return os.path.realpath(saveto)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+def main(argv, version=DEFAULT_VERSION):
+    """Install or upgrade setuptools and EasyInstall"""
+    try:
+        import setuptools
+    except ImportError:
+        egg = None
+        try:
+            egg = download_setuptools(version, delay=0)
+            sys.path.insert(0,egg)
+            from setuptools.command.easy_install import main
+            return main(list(argv)+[egg])   # we're done here
+        finally:
+            if egg and os.path.exists(egg):
+                os.unlink(egg)
+    else:
+        if setuptools.__version__ == '0.0.1':
+            print >>sys.stderr, (
+            "You have an obsolete version of setuptools installed.  Please\n"
+            "remove it from your system entirely before rerunning this script."
+            )
+            sys.exit(2)
+
+    req = "setuptools>="+version
+    import pkg_resources
+    try:
+        pkg_resources.require(req)
+    except pkg_resources.VersionConflict:
+        try:
+            from setuptools.command.easy_install import main
+        except ImportError:
+            from easy_install import main
+        main(list(argv)+[download_setuptools(delay=0)])
+        sys.exit(0) # try to force an exit
+    else:
+        if argv:
+            from setuptools.command.easy_install import main
+            main(argv)
+        else:
+            print "Setuptools version",version,"or greater has been installed."
+            print '(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)'
+
+def update_md5(filenames):
+    """Update our built-in md5 registry"""
+
+    import re
+
+    for name in filenames:
+        base = os.path.basename(name)
+        f = open(name,'rb')
+        md5_data[base] = md5(f.read()).hexdigest()
+        f.close()
+
+    data = ["    %r: %r,\n" % it for it in md5_data.items()]
+    data.sort()
+    repl = "".join(data)
+
+    import inspect
+    srcfile = inspect.getsourcefile(sys.modules[__name__])
+    f = open(srcfile, 'rb'); src = f.read(); f.close()
+
+    match = re.search("\nmd5_data = {\n([^}]+)}", src)
+    if not match:
+        print >>sys.stderr, "Internal error!"
+        sys.exit(2)
+
+    src = src[:match.start(1)] + repl + src[match.end(1):]
+    f = open(srcfile,'w')
+    f.write(src)
+    f.close()
+
+
+if __name__=='__main__':
+    if len(sys.argv)>2 and sys.argv[1]=='--md5update':
+        update_md5(sys.argv[2:])
+    else:
+        main(sys.argv[1:])
diff --git a/run_pylint.py b/run_pylint.py
new file mode 100644
index 0000000..c53943f
--- /dev/null
+++ b/run_pylint.py
@@ -0,0 +1,235 @@
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Custom script to run PyLint on apitools codebase.
+
+"Inspired" by the similar script in gcloud-python.
+
+This runs pylint as a script via subprocess in two different
+subprocesses. The first lints the production/library code
+using the default rc file (PRODUCTION_RC). The second lints the
+demo/test code using an rc file (TEST_RC) which allows more style
+violations (hence it has a reduced number of style checks).
+"""
+
+import ConfigParser
+import copy
+import os
+import subprocess
+import sys
+
+
+IGNORED_DIRECTORIES = [
+    'apitools/gen/testdata',
+    'samples/bigquery_sample/bigquery_v2',
+    'samples/dns_sample/dns_v1',
+    'samples/fusiontables_sample/fusiontables_v1',
+    'samples/iam_sample/iam_v1',
+    'samples/servicemanagement_sample/servicemanagement_v1',
+    'samples/storage_sample/storage_v1',
+    'venv',
+]
+IGNORED_FILES = [
+    'ez_setup.py',
+    'run_pylint.py',
+    'setup.py',
+]
+PRODUCTION_RC = 'default.pylintrc'
+TEST_RC = 'reduced.pylintrc'
+TEST_DISABLED_MESSAGES = [
+    'exec-used',
+    'invalid-name',
+    'missing-docstring',
+    'protected-access',
+]
+TEST_RC_ADDITIONS = {
+    'MESSAGES CONTROL': {
+        'disable': ',\n'.join(TEST_DISABLED_MESSAGES),
+    },
+}
+
+
+def read_config(filename):
+    """Reads pylintrc config onto native ConfigParser object."""
+    config = ConfigParser.ConfigParser()
+    with open(filename, 'r') as file_obj:
+        config.readfp(file_obj)
+    return config
+
+
+def make_test_rc(base_rc_filename, additions_dict, target_filename):
+    """Combines a base rc and test additions into single file."""
+    main_cfg = read_config(base_rc_filename)
+
+    # Create fresh config for test, which must extend production.
+    test_cfg = ConfigParser.ConfigParser()
+    test_cfg._sections = copy.deepcopy(main_cfg._sections)
+
+    for section, opts in additions_dict.items():
+        curr_section = test_cfg._sections.setdefault(
+            section, test_cfg._dict())
+        for opt, opt_val in opts.items():
+            curr_val = curr_section.get(opt)
+            if curr_val is None:
+                raise KeyError('Expected to be adding to existing option.')
+            curr_section[opt] = '%s\n%s' % (curr_val, opt_val)
+
+    with open(target_filename, 'w') as file_obj:
+        test_cfg.write(file_obj)
+
+
+def valid_filename(filename):
+    """Checks if a file is a Python file and is not ignored."""
+    for directory in IGNORED_DIRECTORIES:
+        if filename.startswith(directory):
+            return False
+    return (filename.endswith('.py') and
+            filename not in IGNORED_FILES)
+
+
+def is_production_filename(filename):
+    """Checks if the file contains production code.
+
+    :rtype: boolean
+    :returns: Boolean indicating production status.
+    """
+    return not ('demo' in filename or 'test' in filename or
+                filename.startswith('regression'))
+
+
+def get_files_for_linting(allow_limited=True, diff_base=None):
+    """Gets a list of files in the repository.
+
+    By default, returns all files via ``git ls-files``. However, in some cases
+    uses a specific commit or branch (a so-called diff base) to compare
+    against for changed files. (This requires ``allow_limited=True``.)
+
+    To speed up linting on Travis pull requests against master, we manually
+    set the diff base to origin/master. We don't do this on non-pull requests
+    since origin/master will be equivalent to the currently checked out code.
+    One could potentially use ${TRAVIS_COMMIT_RANGE} to find a diff base but
+    this value is not dependable.
+
+    :type allow_limited: boolean
+    :param allow_limited: Boolean indicating if a reduced set of files can
+                          be used.
+
+    :rtype: pair
+    :returns: Tuple of the diff base using the the list of filenames to be
+              linted.
+    """
+    if os.getenv('TRAVIS') == 'true':
+        # In travis, don't default to master.
+        diff_base = None
+
+    if (os.getenv('TRAVIS_BRANCH') == 'master' and
+            os.getenv('TRAVIS_PULL_REQUEST') != 'false'):
+        # In the case of a pull request into master, we want to
+        # diff against HEAD in master.
+        diff_base = 'origin/master'
+
+    if diff_base is not None and allow_limited:
+        result = subprocess.check_output(['git', 'diff', '--name-only',
+                                          diff_base])
+        print 'Using files changed relative to %s:' % (diff_base,)
+        print '-' * 60
+        print result.rstrip('\n')  # Don't print trailing newlines.
+        print '-' * 60
+    else:
+        print 'Diff base not specified, listing all files in repository.'
+        result = subprocess.check_output(['git', 'ls-files'])
+
+    return result.rstrip('\n').split('\n'), diff_base
+
+
+def get_python_files(all_files=None, diff_base=None):
+    """Gets a list of all Python files in the repository that need linting.
+
+    Relies on :func:`get_files_for_linting()` to determine which files should
+    be considered.
+
+    NOTE: This requires ``git`` to be installed and requires that this
+          is run within the ``git`` repository.
+
+    :type all_files: list or ``NoneType``
+    :param all_files: Optional list of files to be linted.
+
+    :rtype: tuple
+    :returns: A tuple containing two lists and a boolean. The first list
+              contains all production files, the next all test/demo files and
+              the boolean indicates if a restricted fileset was used.
+    """
+    using_restricted = False
+    if all_files is None:
+        all_files, diff_base = get_files_for_linting(diff_base=diff_base)
+        using_restricted = diff_base is not None
+
+    library_files = []
+    non_library_files = []
+    for filename in all_files:
+        if valid_filename(filename):
+            if is_production_filename(filename):
+                library_files.append(filename)
+            else:
+                non_library_files.append(filename)
+
+    return library_files, non_library_files, using_restricted
+
+
+def lint_fileset(filenames, rcfile, description):
+    """Lints a group of files using a given rcfile."""
+    # Only lint filenames that exist. For example, 'git diff --name-only'
+    # could spit out deleted / renamed files. Another alternative could
+    # be to use 'git diff --name-status' and filter out files with a
+    # status of 'D'.
+    filenames = [filename for filename in filenames
+                 if os.path.exists(filename)]
+    if filenames:
+        rc_flag = '--rcfile=%s' % (rcfile,)
+        pylint_shell_command = ['pylint', rc_flag] + filenames
+        status_code = subprocess.call(pylint_shell_command)
+        if status_code != 0:
+            error_message = ('Pylint failed on %s with '
+                             'status %d.' % (description, status_code))
+            print >> sys.stderr, error_message
+            sys.exit(status_code)
+    else:
+        print 'Skipping %s, no files to lint.' % (description,)
+
+
+def main(argv):
+    """Script entry point. Lints both sets of files."""
+    diff_base = argv[1] if len(argv) > 1 else None
+    make_test_rc(PRODUCTION_RC, TEST_RC_ADDITIONS, TEST_RC)
+    library_files, non_library_files, using_restricted = get_python_files(
+        diff_base=diff_base)
+    try:
+        lint_fileset(library_files, PRODUCTION_RC, 'library code')
+        lint_fileset(non_library_files, TEST_RC, 'test and demo code')
+    except SystemExit:
+        if not using_restricted:
+            raise
+
+        message = 'Restricted lint failed, expanding to full fileset.'
+        print >> sys.stderr, message
+        all_files, _ = get_files_for_linting(allow_limited=False)
+        library_files, non_library_files, _ = get_python_files(
+            all_files=all_files)
+        lint_fileset(library_files, PRODUCTION_RC, 'library code')
+        lint_fileset(non_library_files, TEST_RC, 'test and demo code')
+
+
+if __name__ == '__main__':
+    main(sys.argv)
diff --git a/samples/__init__.py b/samples/__init__.py
new file mode 100644
index 0000000..58e0d91
--- /dev/null
+++ b/samples/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2016 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/samples/bigquery_sample/bigquery_v2.json b/samples/bigquery_sample/bigquery_v2.json
new file mode 100644
index 0000000..8f4d760
--- /dev/null
+++ b/samples/bigquery_sample/bigquery_v2.json
@@ -0,0 +1,2636 @@
+{
+ "kind": "discovery#restDescription",
+ "discoveryVersion": "v1",
+ "id": "bigquery:v2",
+ "name": "bigquery",
+ "version": "v2",
+ "revision": "20160819",
+ "title": "BigQuery API",
+ "description": "A data platform for customers to create, manage, share and query data.",
+ "ownerDomain": "google.com",
+ "ownerName": "Google",
+ "icons": {
+  "x16": "https://www.google.com/images/icons/product/search-16.gif",
+  "x32": "https://www.google.com/images/icons/product/search-32.gif"
+ },
+ "documentationLink": "https://cloud.google.com/bigquery/",
+ "protocol": "rest",
+ "baseUrl": "https://www.googleapis.com/bigquery/v2/",
+ "basePath": "/bigquery/v2/",
+ "rootUrl": "https://www.googleapis.com/",
+ "servicePath": "bigquery/v2/",
+ "batchPath": "batch",
+ "parameters": {
+  "alt": {
+   "type": "string",
+   "description": "Data format for the response.",
+   "default": "json",
+   "enum": [
+    "json"
+   ],
+   "enumDescriptions": [
+    "Responses with Content-Type of application/json"
+   ],
+   "location": "query"
+  },
+  "fields": {
+   "type": "string",
+   "description": "Selector specifying which fields to include in a partial response.",
+   "location": "query"
+  },
+  "key": {
+   "type": "string",
+   "description": "API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.",
+   "location": "query"
+  },
+  "oauth_token": {
+   "type": "string",
+   "description": "OAuth 2.0 token for the current user.",
+   "location": "query"
+  },
+  "prettyPrint": {
+   "type": "boolean",
+   "description": "Returns response with indentations and line breaks.",
+   "default": "true",
+   "location": "query"
+  },
+  "quotaUser": {
+   "type": "string",
+   "description": "Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.",
+   "location": "query"
+  },
+  "userIp": {
+   "type": "string",
+   "description": "IP address of the site where the request originates. Use this if you want to enforce per-user limits.",
+   "location": "query"
+  }
+ },
+ "auth": {
+  "oauth2": {
+   "scopes": {
+    "https://www.googleapis.com/auth/bigquery": {
+     "description": "View and manage your data in Google BigQuery"
+    },
+    "https://www.googleapis.com/auth/bigquery.insertdata": {
+     "description": "Insert data into Google BigQuery"
+    },
+    "https://www.googleapis.com/auth/cloud-platform": {
+     "description": "View and manage your data across Google Cloud Platform services"
+    },
+    "https://www.googleapis.com/auth/cloud-platform.read-only": {
+     "description": "View your data across Google Cloud Platform services"
+    },
+    "https://www.googleapis.com/auth/devstorage.full_control": {
+     "description": "Manage your data and permissions in Google Cloud Storage"
+    },
+    "https://www.googleapis.com/auth/devstorage.read_only": {
+     "description": "View your data in Google Cloud Storage"
+    },
+    "https://www.googleapis.com/auth/devstorage.read_write": {
+     "description": "Manage your data in Google Cloud Storage"
+    }
+   }
+  }
+ },
+ "schemas": {
+  "BigtableColumn": {
+   "id": "BigtableColumn",
+   "type": "object",
+   "properties": {
+    "encoding": {
+     "type": "string",
+     "description": "[Optional] The encoding of the values when the type is not STRING. Acceptable encoding values are: TEXT - indicates values are alphanumeric text strings. BINARY - indicates values are encoded using HBase Bytes.toBytes family of functions. 'encoding' can also be set at the column family level. However, the setting at this level takes precedence if 'encoding' is set at both levels."
+    },
+    "fieldName": {
+     "type": "string",
+     "description": "[Optional] If the qualifier is not a valid BigQuery field identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided as the column field name and is used as field name in queries."
+    },
+    "onlyReadLatest": {
+     "type": "boolean",
+     "description": "[Optional] If this is set, only the latest version of value in this column are exposed. 'onlyReadLatest' can also be set at the column family level. However, the setting at this level takes precedence if 'onlyReadLatest' is set at both levels."
+    },
+    "qualifierEncoded": {
+     "type": "string",
+     "description": "[Required] Qualifier of the column. Columns in the parent column family that has this exact qualifier are exposed as . field. If the qualifier is valid UTF-8 string, it can be specified in the qualifier_string field. Otherwise, a base-64 encoded value must be set to qualifier_encoded. The column field name is the same as the column qualifier. However, if the qualifier is not a valid BigQuery field identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier must be provided as field_name.",
+     "format": "byte"
+    },
+    "qualifierString": {
+     "type": "string"
+    },
+    "type": {
+     "type": "string",
+     "description": "[Optional] The type to convert the value in cells of this column. The values are expected to be encoded using HBase Bytes.toBytes function when using the BINARY encoding value. Following BigQuery types are allowed (case-sensitive) - BYTES STRING INTEGER FLOAT BOOLEAN Default type is BYTES. 'type' can also be set at the column family level. However, the setting at this level takes precedence if 'type' is set at both levels."
+    }
+   }
+  },
+  "BigtableColumnFamily": {
+   "id": "BigtableColumnFamily",
+   "type": "object",
+   "properties": {
+    "columns": {
+     "type": "array",
+     "description": "[Optional] Lists of columns that should be exposed as individual fields as opposed to a list of (column name, value) pairs. All columns whose qualifier matches a qualifier in this list can be accessed as .. Other columns can be accessed as a list through .Column field.",
+     "items": {
+      "$ref": "BigtableColumn"
+     }
+    },
+    "encoding": {
+     "type": "string",
+     "description": "[Optional] The encoding of the values when the type is not STRING. Acceptable encoding values are: TEXT - indicates values are alphanumeric text strings. BINARY - indicates values are encoded using HBase Bytes.toBytes family of functions. This can be overridden for a specific column by listing that column in 'columns' and specifying an encoding for it."
+    },
+    "familyId": {
+     "type": "string",
+     "description": "Identifier of the column family."
+    },
+    "onlyReadLatest": {
+     "type": "boolean",
+     "description": "[Optional] If this is set only the latest version of value are exposed for all columns in this column family. This can be overridden for a specific column by listing that column in 'columns' and specifying a different setting for that column."
+    },
+    "type": {
+     "type": "string",
+     "description": "[Optional] The type to convert the value in cells of this column family. The values are expected to be encoded using HBase Bytes.toBytes function when using the BINARY encoding value. Following BigQuery types are allowed (case-sensitive) - BYTES STRING INTEGER FLOAT BOOLEAN Default type is BYTES. This can be overridden for a specific column by listing that column in 'columns' and specifying a type for it."
+    }
+   }
+  },
+  "BigtableOptions": {
+   "id": "BigtableOptions",
+   "type": "object",
+   "properties": {
+    "columnFamilies": {
+     "type": "array",
+     "description": "[Optional] List of column families to expose in the table schema along with their types. This list restricts the column families that can be referenced in queries and specifies their value types. You can use this list to do type conversions - see the 'type' field for more details. If you leave this list empty, all column families are present in the table schema and their values are read as BYTES. During a query only the column families referenced in that query are read from Bigtable.",
+     "items": {
+      "$ref": "BigtableColumnFamily"
+     }
+    },
+    "ignoreUnspecifiedColumnFamilies": {
+     "type": "boolean",
+     "description": "[Optional] If field is true, then the column families that are not specified in columnFamilies list are not exposed in the table schema. Otherwise, they are read with BYTES type values. The default value is false."
+    },
+    "readRowkeyAsString": {
+     "type": "boolean",
+     "description": "[Optional] If field is true, then the rowkey column families will be read and converted to string. Otherwise they are read with BYTES type values and users need to manually cast them with CAST if necessary. The default value is false."
+    }
+   }
+  },
+  "CsvOptions": {
+   "id": "CsvOptions",
+   "type": "object",
+   "properties": {
+    "allowJaggedRows": {
+     "type": "boolean",
+     "description": "[Optional] Indicates if BigQuery should accept rows that are missing trailing optional columns. If true, BigQuery treats missing trailing columns as null values. If false, records with missing trailing columns are treated as bad records, and if there are too many bad records, an invalid error is returned in the job result. The default value is false."
+    },
+    "allowQuotedNewlines": {
+     "type": "boolean",
+     "description": "[Optional] Indicates if BigQuery should allow quoted data sections that contain newline characters in a CSV file. The default value is false."
+    },
+    "encoding": {
+     "type": "string",
+     "description": "[Optional] The character encoding of the data. The supported values are UTF-8 or ISO-8859-1. The default value is UTF-8. BigQuery decodes the data after the raw, binary data has been split using the values of the quote and fieldDelimiter properties."
+    },
+    "fieldDelimiter": {
+     "type": "string",
+     "description": "[Optional] The separator for fields in a CSV file. BigQuery converts the string to ISO-8859-1 encoding, and then uses the first byte of the encoded string to split the data in its raw, binary state. BigQuery also supports the escape sequence \"\\t\" to specify a tab separator. The default value is a comma (',')."
+    },
+    "quote": {
+     "type": "string",
+     "description": "[Optional] The value that is used to quote data sections in a CSV file. BigQuery converts the string to ISO-8859-1 encoding, and then uses the first byte of the encoded string to split the data in its raw, binary state. The default value is a double-quote ('\"'). If your data does not contain quoted sections, set the property value to an empty string. If your data contains quoted newline characters, you must also set the allowQuotedNewlines property to true.",
+     "default": "\"",
+     "pattern": ".?"
+    },
+    "skipLeadingRows": {
+     "type": "string",
+     "description": "[Optional] The number of rows at the top of a CSV file that BigQuery will skip when reading the data. The default value is 0. This property is useful if you have header rows in the file that should be skipped.",
+     "format": "int64"
+    }
+   }
+  },
+  "Dataset": {
+   "id": "Dataset",
+   "type": "object",
+   "properties": {
+    "access": {
+     "type": "array",
+     "description": "[Optional] An array of objects that define dataset access for one or more entities. You can set this property when inserting or updating a dataset in order to control who is allowed to access the data. If unspecified at dataset creation time, BigQuery adds default dataset access for the following entities: access.specialGroup: projectReaders; access.role: READER; access.specialGroup: projectWriters; access.role: WRITER; access.specialGroup: projectOwners; access.role: OWNER; access.userByEmail: [dataset creator email]; access.role: OWNER;",
+     "items": {
+      "type": "object",
+      "properties": {
+       "domain": {
+        "type": "string",
+        "description": "[Pick one] A domain to grant access to. Any users signed in with the domain specified will be granted the specified access. Example: \"example.com\"."
+       },
+       "groupByEmail": {
+        "type": "string",
+        "description": "[Pick one] An email address of a Google Group to grant access to."
+       },
+       "role": {
+        "type": "string",
+        "description": "[Required] Describes the rights granted to the user specified by the other member of the access object. The following string values are supported: READER, WRITER, OWNER."
+       },
+       "specialGroup": {
+        "type": "string",
+        "description": "[Pick one] A special group to grant access to. Possible values include: projectOwners: Owners of the enclosing project. projectReaders: Readers of the enclosing project. projectWriters: Writers of the enclosing project. allAuthenticatedUsers: All authenticated BigQuery users."
+       },
+       "userByEmail": {
+        "type": "string",
+        "description": "[Pick one] An email address of a user to grant access to. For example: fred@example.com."
+       },
+       "view": {
+        "$ref": "TableReference",
+        "description": "[Pick one] A view from a different dataset to grant access to. Queries executed against that view will have read access to tables in this dataset. The role field is not required when this field is set. If that view is updated by any user, access to the view needs to be granted again via an update operation."
+       }
+      }
+     }
+    },
+    "creationTime": {
+     "type": "string",
+     "description": "[Output-only] The time when this dataset was created, in milliseconds since the epoch.",
+     "format": "int64"
+    },
+    "datasetReference": {
+     "$ref": "DatasetReference",
+     "description": "[Required] A reference that identifies the dataset."
+    },
+    "defaultTableExpirationMs": {
+     "type": "string",
+     "description": "[Optional] The default lifetime of all tables in the dataset, in milliseconds. The minimum value is 3600000 milliseconds (one hour). Once this property is set, all newly-created tables in the dataset will have an expirationTime property set to the creation time plus the value in this property, and changing the value will only affect new tables, not existing ones. When the expirationTime for a given table is reached, that table will be deleted automatically. If a table's expirationTime is modified or removed before the table expires, or if you provide an explicit expirationTime when creating a table, that value takes precedence over the default expiration time indicated by this property.",
+     "format": "int64"
+    },
+    "description": {
+     "type": "string",
+     "description": "[Optional] A user-friendly description of the dataset."
+    },
+    "etag": {
+     "type": "string",
+     "description": "[Output-only] A hash of the resource."
+    },
+    "friendlyName": {
+     "type": "string",
+     "description": "[Optional] A descriptive name for the dataset."
+    },
+    "id": {
+     "type": "string",
+     "description": "[Output-only] The fully-qualified unique name of the dataset in the format projectId:datasetId. The dataset name without the project name is given in the datasetId field. When creating a new dataset, leave this field blank, and instead specify the datasetId field."
+    },
+    "kind": {
+     "type": "string",
+     "description": "[Output-only] The resource type.",
+     "default": "bigquery#dataset"
+    },
+    "labels": {
+     "type": "object",
+     "description": "[Experimental] The labels associated with this dataset. You can use these to organize and group your datasets. You can set this property when inserting or updating a dataset. Label keys and values can be no longer than 63 characters, can only contain letters, numeric characters, underscores and dashes. International characters are allowed. Label values are optional. Label keys must start with a letter and must be unique within a dataset. Both keys and values are additionally constrained to be \u003c= 128 bytes in size.",
+     "additionalProperties": {
+      "type": "string"
+     }
+    },
+    "lastModifiedTime": {
+     "type": "string",
+     "description": "[Output-only] The date when this dataset or any of its tables was last modified, in milliseconds since the epoch.",
+     "format": "int64"
+    },
+    "location": {
+     "type": "string",
+     "description": "[Experimental] The geographic location where the dataset should reside. Possible values include EU and US. The default value is US."
+    },
+    "selfLink": {
+     "type": "string",
+     "description": "[Output-only] A URL that can be used to access the resource again. You can use this URL in Get or Update requests to the resource."
+    }
+   }
+  },
+  "DatasetList": {
+   "id": "DatasetList",
+   "type": "object",
+   "properties": {
+    "datasets": {
+     "type": "array",
+     "description": "An array of the dataset resources in the project. Each resource contains basic information. For full information about a particular dataset resource, use the Datasets: get method. This property is omitted when there are no datasets in the project.",
+     "items": {
+      "type": "object",
+      "properties": {
+       "datasetReference": {
+        "$ref": "DatasetReference",
+        "description": "The dataset reference. Use this property to access specific parts of the dataset's ID, such as project ID or dataset ID."
+       },
+       "friendlyName": {
+        "type": "string",
+        "description": "A descriptive name for the dataset, if one exists."
+       },
+       "id": {
+        "type": "string",
+        "description": "The fully-qualified, unique, opaque ID of the dataset."
+       },
+       "kind": {
+        "type": "string",
+        "description": "The resource type. This property always returns the value \"bigquery#dataset\".",
+        "default": "bigquery#dataset"
+       },
+       "labels": {
+        "type": "object",
+        "description": "[Experimental] The labels associated with this dataset. You can use these to organize and group your datasets.",
+        "additionalProperties": {
+         "type": "string"
+        }
+       }
+      }
+     }
+    },
+    "etag": {
+     "type": "string",
+     "description": "A hash value of the results page. You can use this property to determine if the page has changed since the last request."
+    },
+    "kind": {
+     "type": "string",
+     "description": "The list type. This property always returns the value \"bigquery#datasetList\".",
+     "default": "bigquery#datasetList"
+    },
+    "nextPageToken": {
+     "type": "string",
+     "description": "A token that can be used to request the next results page. This property is omitted on the final results page."
+    }
+   }
+  },
+  "DatasetReference": {
+   "id": "DatasetReference",
+   "type": "object",
+   "properties": {
+    "datasetId": {
+     "type": "string",
+     "description": "[Required] A unique ID for this dataset, without the project name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.",
+     "annotations": {
+      "required": [
+       "bigquery.datasets.update"
+      ]
+     }
+    },
+    "projectId": {
+     "type": "string",
+     "description": "[Optional] The ID of the project containing this dataset.",
+     "annotations": {
+      "required": [
+       "bigquery.datasets.update"
+      ]
+     }
+    }
+   }
+  },
+  "ErrorProto": {
+   "id": "ErrorProto",
+   "type": "object",
+   "properties": {
+    "debugInfo": {
+     "type": "string",
+     "description": "Debugging information. This property is internal to Google and should not be used."
+    },
+    "location": {
+     "type": "string",
+     "description": "Specifies where the error occurred, if present."
+    },
+    "message": {
+     "type": "string",
+     "description": "A human-readable description of the error."
+    },
+    "reason": {
+     "type": "string",
+     "description": "A short error code that summarizes the error."
+    }
+   }
+  },
+  "ExplainQueryStage": {
+   "id": "ExplainQueryStage",
+   "type": "object",
+   "properties": {
+    "computeRatioAvg": {
+     "type": "number",
+     "description": "Relative amount of time the average shard spent on CPU-bound tasks.",
+     "format": "double"
+    },
+    "computeRatioMax": {
+     "type": "number",
+     "description": "Relative amount of time the slowest shard spent on CPU-bound tasks.",
+     "format": "double"
+    },
+    "id": {
+     "type": "string",
+     "description": "Unique ID for stage within plan.",
+     "format": "int64"
+    },
+    "name": {
+     "type": "string",
+     "description": "Human-readable name for stage."
+    },
+    "readRatioAvg": {
+     "type": "number",
+     "description": "Relative amount of time the average shard spent reading input.",
+     "format": "double"
+    },
+    "readRatioMax": {
+     "type": "number",
+     "description": "Relative amount of time the slowest shard spent reading input.",
+     "format": "double"
+    },
+    "recordsRead": {
+     "type": "string",
+     "description": "Number of records read into the stage.",
+     "format": "int64"
+    },
+    "recordsWritten": {
+     "type": "string",
+     "description": "Number of records written by the stage.",
+     "format": "int64"
+    },
+    "steps": {
+     "type": "array",
+     "description": "List of operations within the stage in dependency order (approximately chronological).",
+     "items": {
+      "$ref": "ExplainQueryStep"
+     }
+    },
+    "waitRatioAvg": {
+     "type": "number",
+     "description": "Relative amount of time the average shard spent waiting to be scheduled.",
+     "format": "double"
+    },
+    "waitRatioMax": {
+     "type": "number",
+     "description": "Relative amount of time the slowest shard spent waiting to be scheduled.",
+     "format": "double"
+    },
+    "writeRatioAvg": {
+     "type": "number",
+     "description": "Relative amount of time the average shard spent on writing output.",
+     "format": "double"
+    },
+    "writeRatioMax": {
+     "type": "number",
+     "description": "Relative amount of time the slowest shard spent on writing output.",
+     "format": "double"
+    }
+   }
+  },
+  "ExplainQueryStep": {
+   "id": "ExplainQueryStep",
+   "type": "object",
+   "properties": {
+    "kind": {
+     "type": "string",
+     "description": "Machine-readable operation type."
+    },
+    "substeps": {
+     "type": "array",
+     "description": "Human-readable stage descriptions.",
+     "items": {
+      "type": "string"
+     }
+    }
+   }
+  },
+  "ExternalDataConfiguration": {
+   "id": "ExternalDataConfiguration",
+   "type": "object",
+   "properties": {
+    "autodetect": {
+     "type": "boolean",
+     "description": "[Experimental] Try to detect schema and format options automatically. Any option specified explicitly will be honored."
+    },
+    "bigtableOptions": {
+     "$ref": "BigtableOptions",
+     "description": "[Optional] Additional options if sourceFormat is set to BIGTABLE."
+    },
+    "compression": {
+     "type": "string",
+     "description": "[Optional] The compression type of the data source. Possible values include GZIP and NONE. The default value is NONE. This setting is ignored for Google Cloud Bigtable, Google Cloud Datastore backups and Avro formats."
+    },
+    "csvOptions": {
+     "$ref": "CsvOptions",
+     "description": "Additional properties to set if sourceFormat is set to CSV."
+    },
+    "googleSheetsOptions": {
+     "$ref": "GoogleSheetsOptions",
+     "description": "[Optional] Additional options if sourceFormat is set to GOOGLE_SHEETS."
+    },
+    "ignoreUnknownValues": {
+     "type": "boolean",
+     "description": "[Optional] Indicates if BigQuery should allow extra values that are not represented in the table schema. If true, the extra values are ignored. If false, records with extra columns are treated as bad records, and if there are too many bad records, an invalid error is returned in the job result. The default value is false. The sourceFormat property determines what BigQuery treats as an extra value: CSV: Trailing columns JSON: Named values that don't match any column names Google Cloud Bigtable: This setting is ignored. Google Cloud Datastore backups: This setting is ignored. Avro: This setting is ignored."
+    },
+    "maxBadRecords": {
+     "type": "integer",
+     "description": "[Optional] The maximum number of bad records that BigQuery can ignore when reading data. If the number of bad records exceeds this value, an invalid error is returned in the job result. The default value is 0, which requires that all records are valid. This setting is ignored for Google Cloud Bigtable, Google Cloud Datastore backups and Avro formats.",
+     "format": "int32"
+    },
+    "schema": {
+     "$ref": "TableSchema",
+     "description": "[Optional] The schema for the data. Schema is required for CSV and JSON formats. Schema is disallowed for Google Cloud Bigtable, Cloud Datastore backups, and Avro formats."
+    },
+    "sourceFormat": {
+     "type": "string",
+     "description": "[Required] The data format. For CSV files, specify \"CSV\". For Google sheets, specify \"GOOGLE_SHEETS\". For newline-delimited JSON, specify \"NEWLINE_DELIMITED_JSON\". For Avro files, specify \"AVRO\". For Google Cloud Datastore backups, specify \"DATASTORE_BACKUP\". [Experimental] For Google Cloud Bigtable, specify \"BIGTABLE\". Please note that reading from Google Cloud Bigtable is experimental and has to be enabled for your project. Please contact Google Cloud Support to enable this for your project."
+    },
+    "sourceUris": {
+     "type": "array",
+     "description": "[Required] The fully-qualified URIs that point to your data in Google Cloud. For Google Cloud Storage URIs: Each URI can contain one '*' wildcard character and it must come after the 'bucket' name. Size limits related to load jobs apply to external data sources. For Google Cloud Bigtable URIs: Exactly one URI can be specified and it has be a fully specified and valid HTTPS URL for a Google Cloud Bigtable table. For Google Cloud Datastore backups, exactly one URI can be specified, and it must end with '.backup_info'. Also, the '*' wildcard character is not allowed.",
+     "items": {
+      "type": "string"
+     }
+    }
+   }
+  },
+  "GetQueryResultsResponse": {
+   "id": "GetQueryResultsResponse",
+   "type": "object",
+   "properties": {
+    "cacheHit": {
+     "type": "boolean",
+     "description": "Whether the query result was fetched from the query cache."
+    },
+    "errors": {
+     "type": "array",
+     "description": "[Output-only] All errors and warnings encountered during the running of the job. Errors here do not necessarily mean that the job has completed or was unsuccessful.",
+     "items": {
+      "$ref": "ErrorProto"
+     }
+    },
+    "etag": {
+     "type": "string",
+     "description": "A hash of this response."
+    },
+    "jobComplete": {
+     "type": "boolean",
+     "description": "Whether the query has completed or not. If rows or totalRows are present, this will always be true. If this is false, totalRows will not be available."
+    },
+    "jobReference": {
+     "$ref": "JobReference",
+     "description": "Reference to the BigQuery Job that was created to run the query. This field will be present even if the original request timed out, in which case GetQueryResults can be used to read the results once the query has completed. Since this API only returns the first page of results, subsequent pages can be fetched via the same mechanism (GetQueryResults)."
+    },
+    "kind": {
+     "type": "string",
+     "description": "The resource type of the response.",
+     "default": "bigquery#getQueryResultsResponse"
+    },
+    "numDmlAffectedRows": {
+     "type": "string",
+     "description": "[Output-only, Experimental] The number of rows affected by a DML statement. Present only for DML statements INSERT, UPDATE or DELETE.",
+     "format": "int64"
+    },
+    "pageToken": {
+     "type": "string",
+     "description": "A token used for paging results."
+    },
+    "rows": {
+     "type": "array",
+     "description": "An object with as many results as can be contained within the maximum permitted reply size. To get any additional rows, you can call GetQueryResults and specify the jobReference returned above. Present only when the query completes successfully.",
+     "items": {
+      "$ref": "TableRow"
+     }
+    },
+    "schema": {
+     "$ref": "TableSchema",
+     "description": "The schema of the results. Present only when the query completes successfully."
+    },
+    "totalBytesProcessed": {
+     "type": "string",
+     "description": "The total number of bytes processed for this query.",
+     "format": "int64"
+    },
+    "totalRows": {
+     "type": "string",
+     "description": "The total number of rows in the complete query result set, which can be more than the number of rows in this single page of results. Present only when the query completes successfully.",
+     "format": "uint64"
+    }
+   }
+  },
+  "GoogleSheetsOptions": {
+   "id": "GoogleSheetsOptions",
+   "type": "object",
+   "properties": {
+    "skipLeadingRows": {
+     "type": "string",
+     "description": "[Optional] The number of rows at the top of a sheet that BigQuery will skip when reading the data. The default value is 0. This property is useful if you have header rows that should be skipped. When autodetect is on, behavior is the following: * skipLeadingRows unspecified - Autodetect tries to detect headers in the first row. If they are not detected, the row is read as data. Otherwise data is read starting from the second row. * skipLeadingRows is 0 - Instructs autodetect that there are no headers and data should be read starting from the first row. * skipLeadingRows = N \u003e 0 - Autodetect skips N-1 rows and tries to detect headers in row N. If headers are not detected, row N is just skipped. Otherwise row N is used to extract column names for the detected schema.",
+     "format": "int64"
+    }
+   }
+  },
+  "Job": {
+   "id": "Job",
+   "type": "object",
+   "properties": {
+    "configuration": {
+     "$ref": "JobConfiguration",
+     "description": "[Required] Describes the job configuration."
+    },
+    "etag": {
+     "type": "string",
+     "description": "[Output-only] A hash of this resource."
+    },
+    "id": {
+     "type": "string",
+     "description": "[Output-only] Opaque ID field of the job"
+    },
+    "jobReference": {
+     "$ref": "JobReference",
+     "description": "[Optional] Reference describing the unique-per-user name of the job."
+    },
+    "kind": {
+     "type": "string",
+     "description": "[Output-only] The type of the resource.",
+     "default": "bigquery#job"
+    },
+    "selfLink": {
+     "type": "string",
+     "description": "[Output-only] A URL that can be used to access this resource again."
+    },
+    "statistics": {
+     "$ref": "JobStatistics",
+     "description": "[Output-only] Information about the job, including starting time and ending time of the job."
+    },
+    "status": {
+     "$ref": "JobStatus",
+     "description": "[Output-only] The status of this job. Examine this value when polling an asynchronous job to see if the job is complete."
+    },
+    "user_email": {
+     "type": "string",
+     "description": "[Output-only] Email address of the user who ran the job."
+    }
+   }
+  },
+  "JobCancelResponse": {
+   "id": "JobCancelResponse",
+   "type": "object",
+   "properties": {
+    "job": {
+     "$ref": "Job",
+     "description": "The final state of the job."
+    },
+    "kind": {
+     "type": "string",
+     "description": "The resource type of the response.",
+     "default": "bigquery#jobCancelResponse"
+    }
+   }
+  },
+  "JobConfiguration": {
+   "id": "JobConfiguration",
+   "type": "object",
+   "properties": {
+    "copy": {
+     "$ref": "JobConfigurationTableCopy",
+     "description": "[Pick one] Copies a table."
+    },
+    "dryRun": {
+     "type": "boolean",
+     "description": "[Optional] If set, don't actually run this job. A valid query will return a mostly empty response with some processing statistics, while an invalid query will return the same error it would if it wasn't a dry run. Behavior of non-query jobs is undefined."
+    },
+    "extract": {
+     "$ref": "JobConfigurationExtract",
+     "description": "[Pick one] Configures an extract job."
+    },
+    "load": {
+     "$ref": "JobConfigurationLoad",
+     "description": "[Pick one] Configures a load job."
+    },
+    "query": {
+     "$ref": "JobConfigurationQuery",
+     "description": "[Pick one] Configures a query job."
+    }
+   }
+  },
+  "JobConfigurationExtract": {
+   "id": "JobConfigurationExtract",
+   "type": "object",
+   "properties": {
+    "compression": {
+     "type": "string",
+     "description": "[Optional] The compression type to use for exported files. Possible values include GZIP and NONE. The default value is NONE."
+    },
+    "destinationFormat": {
+     "type": "string",
+     "description": "[Optional] The exported file format. Possible values include CSV, NEWLINE_DELIMITED_JSON and AVRO. The default value is CSV. Tables with nested or repeated fields cannot be exported as CSV."
+    },
+    "destinationUri": {
+     "type": "string",
+     "description": "[Pick one] DEPRECATED: Use destinationUris instead, passing only one URI as necessary. The fully-qualified Google Cloud Storage URI where the extracted table should be written."
+    },
+    "destinationUris": {
+     "type": "array",
+     "description": "[Pick one] A list of fully-qualified Google Cloud Storage URIs where the extracted table should be written.",
+     "items": {
+      "type": "string"
+     }
+    },
+    "fieldDelimiter": {
+     "type": "string",
+     "description": "[Optional] Delimiter to use between fields in the exported data. Default is ','"
+    },
+    "printHeader": {
+     "type": "boolean",
+     "description": "[Optional] Whether to print out a header row in the results. Default is true.",
+     "default": "true"
+    },
+    "sourceTable": {
+     "$ref": "TableReference",
+     "description": "[Required] A reference to the table being exported."
+    }
+   }
+  },
+  "JobConfigurationLoad": {
+   "id": "JobConfigurationLoad",
+   "type": "object",
+   "properties": {
+    "allowJaggedRows": {
+     "type": "boolean",
+     "description": "[Optional] Accept rows that are missing trailing optional columns. The missing values are treated as nulls. If false, records with missing trailing columns are treated as bad records, and if there are too many bad records, an invalid error is returned in the job result. The default value is false. Only applicable to CSV, ignored for other formats."
+    },
+    "allowQuotedNewlines": {
+     "type": "boolean",
+     "description": "Indicates if BigQuery should allow quoted data sections that contain newline characters in a CSV file. The default value is false."
+    },
+    "autodetect": {
+     "type": "boolean",
+     "description": "[Experimental] Indicates if we should automatically infer the options and schema for CSV and JSON sources."
+    },
+    "createDisposition": {
+     "type": "string",
+     "description": "[Optional] Specifies whether the job is allowed to create new tables. The following values are supported: CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table. CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result. The default value is CREATE_IF_NEEDED. Creation, truncation and append actions occur as one atomic update upon job completion."
+    },
+    "destinationTable": {
+     "$ref": "TableReference",
+     "description": "[Required] The destination table to load the data into."
+    },
+    "encoding": {
+     "type": "string",
+     "description": "[Optional] The character encoding of the data. The supported values are UTF-8 or ISO-8859-1. The default value is UTF-8. BigQuery decodes the data after the raw, binary data has been split using the values of the quote and fieldDelimiter properties."
+    },
+    "fieldDelimiter": {
+     "type": "string",
+     "description": "[Optional] The separator for fields in a CSV file. The separator can be any ISO-8859-1 single-byte character. To use a character in the range 128-255, you must encode the character as UTF8. BigQuery converts the string to ISO-8859-1 encoding, and then uses the first byte of the encoded string to split the data in its raw, binary state. BigQuery also supports the escape sequence \"\\t\" to specify a tab separator. The default value is a comma (',')."
+    },
+    "ignoreUnknownValues": {
+     "type": "boolean",
+     "description": "[Optional] Indicates if BigQuery should allow extra values that are not represented in the table schema. If true, the extra values are ignored. If false, records with extra columns are treated as bad records, and if there are too many bad records, an invalid error is returned in the job result. The default value is false. The sourceFormat property determines what BigQuery treats as an extra value: CSV: Trailing columns JSON: Named values that don't match any column names"
+    },
+    "maxBadRecords": {
+     "type": "integer",
+     "description": "[Optional] The maximum number of bad records that BigQuery can ignore when running the job. If the number of bad records exceeds this value, an invalid error is returned in the job result. The default value is 0, which requires that all records are valid.",
+     "format": "int32"
+    },
+    "projectionFields": {
+     "type": "array",
+     "description": "[Experimental] If sourceFormat is set to \"DATASTORE_BACKUP\", indicates which entity properties to load into BigQuery from a Cloud Datastore backup. Property names are case sensitive and must be top-level properties. If no properties are specified, BigQuery loads all properties. If any named property isn't found in the Cloud Datastore backup, an invalid error is returned in the job result.",
+     "items": {
+      "type": "string"
+     }
+    },
+    "quote": {
+     "type": "string",
+     "description": "[Optional] The value that is used to quote data sections in a CSV file. BigQuery converts the string to ISO-8859-1 encoding, and then uses the first byte of the encoded string to split the data in its raw, binary state. The default value is a double-quote ('\"'). If your data does not contain quoted sections, set the property value to an empty string. If your data contains quoted newline characters, you must also set the allowQuotedNewlines property to true.",
+     "default": "\"",
+     "pattern": ".?"
+    },
+    "schema": {
+     "$ref": "TableSchema",
+     "description": "[Optional] The schema for the destination table. The schema can be omitted if the destination table already exists, or if you're loading data from Google Cloud Datastore."
+    },
+    "schemaInline": {
+     "type": "string",
+     "description": "[Deprecated] The inline schema. For CSV schemas, specify as \"Field1:Type1[,Field2:Type2]*\". For example, \"foo:STRING, bar:INTEGER, baz:FLOAT\"."
+    },
+    "schemaInlineFormat": {
+     "type": "string",
+     "description": "[Deprecated] The format of the schemaInline property."
+    },
+    "schemaUpdateOptions": {
+     "type": "array",
+     "description": "[Experimental] Allows the schema of the desitination table to be updated as a side effect of the load job. Schema update options are supported in two cases: when writeDisposition is WRITE_APPEND; when writeDisposition is WRITE_TRUNCATE and the destination table is a partition of a table, specified by partition decorators. For normal tables, WRITE_TRUNCATE will always overwrite the schema. One or more of the following values are specified: ALLOW_FIELD_ADDITION: allow adding a nullable field to the schema. ALLOW_FIELD_RELAXATION: allow relaxing a required field in the original schema to nullable.",
+     "items": {
+      "type": "string"
+     }
+    },
+    "skipLeadingRows": {
+     "type": "integer",
+     "description": "[Optional] The number of rows at the top of a CSV file that BigQuery will skip when loading the data. The default value is 0. This property is useful if you have header rows in the file that should be skipped.",
+     "format": "int32"
+    },
+    "sourceFormat": {
+     "type": "string",
+     "description": "[Optional] The format of the data files. For CSV files, specify \"CSV\". For datastore backups, specify \"DATASTORE_BACKUP\". For newline-delimited JSON, specify \"NEWLINE_DELIMITED_JSON\". For Avro, specify \"AVRO\". The default value is CSV."
+    },
+    "sourceUris": {
+     "type": "array",
+     "description": "[Required] The fully-qualified URIs that point to your data in Google Cloud Storage. Each URI can contain one '*' wildcard character and it must come after the 'bucket' name.",
+     "items": {
+      "type": "string"
+     }
+    },
+    "writeDisposition": {
+     "type": "string",
+     "description": "[Optional] Specifies the action that occurs if the destination table already exists. The following values are supported: WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data. WRITE_APPEND: If the table already exists, BigQuery appends the data to the table. WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result. The default value is WRITE_APPEND. Each action is atomic and only occurs if BigQuery is able to complete the job successfully. Creation, truncation and append actions occur as one atomic update upon job completion."
+    }
+   }
+  },
+  "JobConfigurationQuery": {
+   "id": "JobConfigurationQuery",
+   "type": "object",
+   "properties": {
+    "allowLargeResults": {
+     "type": "boolean",
+     "description": "If true, allows the query to produce arbitrarily large result tables at a slight cost in performance. Requires destinationTable to be set."
+    },
+    "createDisposition": {
+     "type": "string",
+     "description": "[Optional] Specifies whether the job is allowed to create new tables. The following values are supported: CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table. CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result. The default value is CREATE_IF_NEEDED. Creation, truncation and append actions occur as one atomic update upon job completion."
+    },
+    "defaultDataset": {
+     "$ref": "DatasetReference",
+     "description": "[Optional] Specifies the default dataset to use for unqualified table names in the query."
+    },
+    "destinationTable": {
+     "$ref": "TableReference",
+     "description": "[Optional] Describes the table where the query results should be stored. If not present, a new table will be created to store the results."
+    },
+    "flattenResults": {
+     "type": "boolean",
+     "description": "[Optional] Flattens all nested and repeated fields in the query results. The default value is true. allowLargeResults must be true if this is set to false.",
+     "default": "true"
+    },
+    "maximumBillingTier": {
+     "type": "integer",
+     "description": "[Optional] Limits the billing tier for this job. Queries that have resource usage beyond this tier will fail (without incurring a charge). If unspecified, this will be set to your project default.",
+     "default": "1",
+     "format": "int32"
+    },
+    "maximumBytesBilled": {
+     "type": "string",
+     "description": "[Optional] Limits the bytes billed for this job. Queries that will have bytes billed beyond this limit will fail (without incurring a charge). If unspecified, this will be set to your project default.",
+     "format": "int64"
+    },
+    "preserveNulls": {
+     "type": "boolean",
+     "description": "[Deprecated] This property is deprecated."
+    },
+    "priority": {
+     "type": "string",
+     "description": "[Optional] Specifies a priority for the query. Possible values include INTERACTIVE and BATCH. The default value is INTERACTIVE."
+    },
+    "query": {
+     "type": "string",
+     "description": "[Required] BigQuery SQL query to execute."
+    },
+    "schemaUpdateOptions": {
+     "type": "array",
+     "description": "[Experimental] Allows the schema of the desitination table to be updated as a side effect of the query job. Schema update options are supported in two cases: when writeDisposition is WRITE_APPEND; when writeDisposition is WRITE_TRUNCATE and the destination table is a partition of a table, specified by partition decorators. For normal tables, WRITE_TRUNCATE will always overwrite the schema. One or more of the following values are specified: ALLOW_FIELD_ADDITION: allow adding a nullable field to the schema. ALLOW_FIELD_RELAXATION: allow relaxing a required field in the original schema to nullable.",
+     "items": {
+      "type": "string"
+     }
+    },
+    "tableDefinitions": {
+     "type": "object",
+     "description": "[Optional] If querying an external data source outside of BigQuery, describes the data format, location and other properties of the data source. By defining these properties, the data source can then be queried as if it were a standard BigQuery table.",
+     "additionalProperties": {
+      "$ref": "ExternalDataConfiguration"
+     }
+    },
+    "useLegacySql": {
+     "type": "boolean",
+     "description": "[Experimental] Specifies whether to use BigQuery's legacy SQL dialect for this query. The default value is true. If set to false, the query will use BigQuery's standard SQL: https://cloud.google.com/bigquery/sql-reference/ When useLegacySql is set to false, the values of allowLargeResults and flattenResults are ignored; query will be run as if allowLargeResults is true and flattenResults is false."
+    },
+    "useQueryCache": {
+     "type": "boolean",
+     "description": "[Optional] Whether to look for the result in the query cache. The query cache is a best-effort cache that will be flushed whenever tables in the query are modified. Moreover, the query cache is only available when a query does not have a destination table specified. The default value is true.",
+     "default": "true"
+    },
+    "userDefinedFunctionResources": {
+     "type": "array",
+     "description": "[Experimental] Describes user-defined function resources used in the query.",
+     "items": {
+      "$ref": "UserDefinedFunctionResource"
+     }
+    },
+    "writeDisposition": {
+     "type": "string",
+     "description": "[Optional] Specifies the action that occurs if the destination table already exists. The following values are supported: WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data. WRITE_APPEND: If the table already exists, BigQuery appends the data to the table. WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result. The default value is WRITE_EMPTY. Each action is atomic and only occurs if BigQuery is able to complete the job successfully. Creation, truncation and append actions occur as one atomic update upon job completion."
+    }
+   }
+  },
+  "JobConfigurationTableCopy": {
+   "id": "JobConfigurationTableCopy",
+   "type": "object",
+   "properties": {
+    "createDisposition": {
+     "type": "string",
+     "description": "[Optional] Specifies whether the job is allowed to create new tables. The following values are supported: CREATE_IF_NEEDED: If the table does not exist, BigQuery creates the table. CREATE_NEVER: The table must already exist. If it does not, a 'notFound' error is returned in the job result. The default value is CREATE_IF_NEEDED. Creation, truncation and append actions occur as one atomic update upon job completion."
+    },
+    "destinationTable": {
+     "$ref": "TableReference",
+     "description": "[Required] The destination table"
+    },
+    "sourceTable": {
+     "$ref": "TableReference",
+     "description": "[Pick one] Source table to copy."
+    },
+    "sourceTables": {
+     "type": "array",
+     "description": "[Pick one] Source tables to copy.",
+     "items": {
+      "$ref": "TableReference"
+     }
+    },
+    "writeDisposition": {
+     "type": "string",
+     "description": "[Optional] Specifies the action that occurs if the destination table already exists. The following values are supported: WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the table data. WRITE_APPEND: If the table already exists, BigQuery appends the data to the table. WRITE_EMPTY: If the table already exists and contains data, a 'duplicate' error is returned in the job result. The default value is WRITE_EMPTY. Each action is atomic and only occurs if BigQuery is able to complete the job successfully. Creation, truncation and append actions occur as one atomic update upon job completion."
+    }
+   }
+  },
+  "JobList": {
+   "id": "JobList",
+   "type": "object",
+   "properties": {
+    "etag": {
+     "type": "string",
+     "description": "A hash of this page of results."
+    },
+    "jobs": {
+     "type": "array",
+     "description": "List of jobs that were requested.",
+     "items": {
+      "type": "object",
+      "properties": {
+       "configuration": {
+        "$ref": "JobConfiguration",
+        "description": "[Full-projection-only] Specifies the job configuration."
+       },
+       "errorResult": {
+        "$ref": "ErrorProto",
+        "description": "A result object that will be present only if the job has failed."
+       },
+       "id": {
+        "type": "string",
+        "description": "Unique opaque ID of the job."
+       },
+       "jobReference": {
+        "$ref": "JobReference",
+        "description": "Job reference uniquely identifying the job."
+       },
+       "kind": {
+        "type": "string",
+        "description": "The resource type.",
+        "default": "bigquery#job"
+       },
+       "state": {
+        "type": "string",
+        "description": "Running state of the job. When the state is DONE, errorResult can be checked to determine whether the job succeeded or failed."
+       },
+       "statistics": {
+        "$ref": "JobStatistics",
+        "description": "[Output-only] Information about the job, including starting time and ending time of the job."
+       },
+       "status": {
+        "$ref": "JobStatus",
+        "description": "[Full-projection-only] Describes the state of the job."
+       },
+       "user_email": {
+        "type": "string",
+        "description": "[Full-projection-only] Email address of the user who ran the job."
+       }
+      }
+     }
+    },
+    "kind": {
+     "type": "string",
+     "description": "The resource type of the response.",
+     "default": "bigquery#jobList"
+    },
+    "nextPageToken": {
+     "type": "string",
+     "description": "A token to request the next page of results."
+    }
+   }
+  },
+  "JobReference": {
+   "id": "JobReference",
+   "type": "object",
+   "properties": {
+    "jobId": {
+     "type": "string",
+     "description": "[Required] The ID of the job. The ID must contain only letters (a-z, A-Z), numbers (0-9), underscores (_), or dashes (-). The maximum length is 1,024 characters.",
+     "annotations": {
+      "required": [
+       "bigquery.jobs.getQueryResults"
+      ]
+     }
+    },
+    "projectId": {
+     "type": "string",
+     "description": "[Required] The ID of the project containing this job.",
+     "annotations": {
+      "required": [
+       "bigquery.jobs.getQueryResults"
+      ]
+     }
+    }
+   }
+  },
+  "JobStatistics": {
+   "id": "JobStatistics",
+   "type": "object",
+   "properties": {
+    "creationTime": {
+     "type": "string",
+     "description": "[Output-only] Creation time of this job, in milliseconds since the epoch. This field will be present on all jobs.",
+     "format": "int64"
+    },
+    "endTime": {
+     "type": "string",
+     "description": "[Output-only] End time of this job, in milliseconds since the epoch. This field will be present whenever a job is in the DONE state.",
+     "format": "int64"
+    },
+    "extract": {
+     "$ref": "JobStatistics4",
+     "description": "[Output-only] Statistics for an extract job."
+    },
+    "load": {
+     "$ref": "JobStatistics3",
+     "description": "[Output-only] Statistics for a load job."
+    },
+    "query": {
+     "$ref": "JobStatistics2",
+     "description": "[Output-only] Statistics for a query job."
+    },
+    "startTime": {
+     "type": "string",
+     "description": "[Output-only] Start time of this job, in milliseconds since the epoch. This field will be present when the job transitions from the PENDING state to either RUNNING or DONE.",
+     "format": "int64"
+    },
+    "totalBytesProcessed": {
+     "type": "string",
+     "description": "[Output-only] [Deprecated] Use the bytes processed in the query statistics instead.",
+     "format": "int64"
+    }
+   }
+  },
+  "JobStatistics2": {
+   "id": "JobStatistics2",
+   "type": "object",
+   "properties": {
+    "billingTier": {
+     "type": "integer",
+     "description": "[Output-only] Billing tier for the job.",
+     "format": "int32"
+    },
+    "cacheHit": {
+     "type": "boolean",
+     "description": "[Output-only] Whether the query result was fetched from the query cache."
+    },
+    "numDmlAffectedRows": {
+     "type": "string",
+     "description": "[Output-only, Experimental] The number of rows affected by a DML statement. Present only for DML statements INSERT, UPDATE or DELETE.",
+     "format": "int64"
+    },
+    "queryPlan": {
+     "type": "array",
+     "description": "[Output-only, Experimental] Describes execution plan for the query.",
+     "items": {
+      "$ref": "ExplainQueryStage"
+     }
+    },
+    "referencedTables": {
+     "type": "array",
+     "description": "[Output-only, Experimental] Referenced tables for the job. Queries that reference more than 50 tables will not have a complete list.",
+     "items": {
+      "$ref": "TableReference"
+     }
+    },
+    "schema": {
+     "$ref": "TableSchema",
+     "description": "[Output-only, Experimental] The schema of the results. Present only for successful dry run of non-legacy SQL queries."
+    },
+    "totalBytesBilled": {
+     "type": "string",
+     "description": "[Output-only] Total bytes billed for the job.",
+     "format": "int64"
+    },
+    "totalBytesProcessed": {
+     "type": "string",
+     "description": "[Output-only] Total bytes processed for the job.",
+     "format": "int64"
+    }
+   }
+  },
+  "JobStatistics3": {
+   "id": "JobStatistics3",
+   "type": "object",
+   "properties": {
+    "inputFileBytes": {
+     "type": "string",
+     "description": "[Output-only] Number of bytes of source data in a load job.",
+     "format": "int64"
+    },
+    "inputFiles": {
+     "type": "string",
+     "description": "[Output-only] Number of source files in a load job.",
+     "format": "int64"
+    },
+    "outputBytes": {
+     "type": "string",
+     "description": "[Output-only] Size of the loaded data in bytes. Note that while a load job is in the running state, this value may change.",
+     "format": "int64"
+    },
+    "outputRows": {
+     "type": "string",
+     "description": "[Output-only] Number of rows imported in a load job. Note that while an import job is in the running state, this value may change.",
+     "format": "int64"
+    }
+   }
+  },
+  "JobStatistics4": {
+   "id": "JobStatistics4",
+   "type": "object",
+   "properties": {
+    "destinationUriFileCounts": {
+     "type": "array",
+     "description": "[Output-only] Number of files per destination URI or URI pattern specified in the extract configuration. These values will be in the same order as the URIs specified in the 'destinationUris' field.",
+     "items": {
+      "type": "string",
+      "format": "int64"
+     }
+    }
+   }
+  },
+  "JobStatus": {
+   "id": "JobStatus",
+   "type": "object",
+   "properties": {
+    "errorResult": {
+     "$ref": "ErrorProto",
+     "description": "[Output-only] Final error result of the job. If present, indicates that the job has completed and was unsuccessful."
+    },
+    "errors": {
+     "type": "array",
+     "description": "[Output-only] All errors encountered during the running of the job. Errors here do not necessarily mean that the job has completed or was unsuccessful.",
+     "items": {
+      "$ref": "ErrorProto"
+     }
+    },
+    "state": {
+     "type": "string",
+     "description": "[Output-only] Running state of the job."
+    }
+   }
+  },
+  "JsonObject": {
+   "id": "JsonObject",
+   "type": "object",
+   "description": "Represents a single JSON object.",
+   "additionalProperties": {
+    "$ref": "JsonValue"
+   }
+  },
+  "JsonValue": {
+   "id": "JsonValue",
+   "type": "any"
+  },
+  "ProjectList": {
+   "id": "ProjectList",
+   "type": "object",
+   "properties": {
+    "etag": {
+     "type": "string",
+     "description": "A hash of the page of results"
+    },
+    "kind": {
+     "type": "string",
+     "description": "The type of list.",
+     "default": "bigquery#projectList"
+    },
+    "nextPageToken": {
+     "type": "string",
+     "description": "A token to request the next page of results."
+    },
+    "projects": {
+     "type": "array",
+     "description": "Projects to which you have at least READ access.",
+     "items": {
+      "type": "object",
+      "properties": {
+       "friendlyName": {
+        "type": "string",
+        "description": "A descriptive name for this project."
+       },
+       "id": {
+        "type": "string",
+        "description": "An opaque ID of this project."
+       },
+       "kind": {
+        "type": "string",
+        "description": "The resource type.",
+        "default": "bigquery#project"
+       },
+       "numericId": {
+        "type": "string",
+        "description": "The numeric ID of this project.",
+        "format": "uint64"
+       },
+       "projectReference": {
+        "$ref": "ProjectReference",
+        "description": "A unique reference to this project."
+       }
+      }
+     }
+    },
+    "totalItems": {
+     "type": "integer",
+     "description": "The total number of projects in the list.",
+     "format": "int32"
+    }
+   }
+  },
+  "ProjectReference": {
+   "id": "ProjectReference",
+   "type": "object",
+   "properties": {
+    "projectId": {
+     "type": "string",
+     "description": "[Required] ID of the project. Can be either the numeric ID or the assigned ID of the project."
+    }
+   }
+  },
+  "QueryRequest": {
+   "id": "QueryRequest",
+   "type": "object",
+   "properties": {
+    "defaultDataset": {
+     "$ref": "DatasetReference",
+     "description": "[Optional] Specifies the default datasetId and projectId to assume for any unqualified table names in the query. If not set, all table names in the query string must be qualified in the format 'datasetId.tableId'."
+    },
+    "dryRun": {
+     "type": "boolean",
+     "description": "[Optional] If set to true, BigQuery doesn't run the job. Instead, if the query is valid, BigQuery returns statistics about the job such as how many bytes would be processed. If the query is invalid, an error returns. The default value is false."
+    },
+    "kind": {
+     "type": "string",
+     "description": "The resource type of the request.",
+     "default": "bigquery#queryRequest"
+    },
+    "maxResults": {
+     "type": "integer",
+     "description": "[Optional] The maximum number of rows of data to return per page of results. Setting this flag to a small value such as 1000 and then paging through results might improve reliability when the query result set is large. In addition to this limit, responses are also limited to 10 MB. By default, there is no maximum row count, and only the byte limit applies.",
+     "format": "uint32"
+    },
+    "preserveNulls": {
+     "type": "boolean",
+     "description": "[Deprecated] This property is deprecated."
+    },
+    "query": {
+     "type": "string",
+     "description": "[Required] A query string, following the BigQuery query syntax, of the query to execute. Example: \"SELECT count(f1) FROM [myProjectId:myDatasetId.myTableId]\".",
+     "annotations": {
+      "required": [
+       "bigquery.jobs.query"
+      ]
+     }
+    },
+    "timeoutMs": {
+     "type": "integer",
+     "description": "[Optional] How long to wait for the query to complete, in milliseconds, before the request times out and returns. Note that this is only a timeout for the request, not the query. If the query takes longer to run than the timeout value, the call returns without any results and with the 'jobComplete' flag set to false. You can call GetQueryResults() to wait for the query to complete and read the results. The default value is 10000 milliseconds (10 seconds).",
+     "format": "uint32"
+    },
+    "useLegacySql": {
+     "type": "boolean",
+     "description": "[Experimental] Specifies whether to use BigQuery's legacy SQL dialect for this query. The default value is true. If set to false, the query will use BigQuery's standard SQL: https://cloud.google.com/bigquery/sql-reference/ When useLegacySql is set to false, the values of allowLargeResults and flattenResults are ignored; query will be run as if allowLargeResults is true and flattenResults is false."
+    },
+    "useQueryCache": {
+     "type": "boolean",
+     "description": "[Optional] Whether to look for the result in the query cache. The query cache is a best-effort cache that will be flushed whenever tables in the query are modified. The default value is true.",
+     "default": "true"
+    }
+   }
+  },
+  "QueryResponse": {
+   "id": "QueryResponse",
+   "type": "object",
+   "properties": {
+    "cacheHit": {
+     "type": "boolean",
+     "description": "Whether the query result was fetched from the query cache."
+    },
+    "errors": {
+     "type": "array",
+     "description": "[Output-only] All errors and warnings encountered during the running of the job. Errors here do not necessarily mean that the job has completed or was unsuccessful.",
+     "items": {
+      "$ref": "ErrorProto"
+     }
+    },
+    "jobComplete": {
+     "type": "boolean",
+     "description": "Whether the query has completed or not. If rows or totalRows are present, this will always be true. If this is false, totalRows will not be available."
+    },
+    "jobReference": {
+     "$ref": "JobReference",
+     "description": "Reference to the Job that was created to run the query. This field will be present even if the original request timed out, in which case GetQueryResults can be used to read the results once the query has completed. Since this API only returns the first page of results, subsequent pages can be fetched via the same mechanism (GetQueryResults)."
+    },
+    "kind": {
+     "type": "string",
+     "description": "The resource type.",
+     "default": "bigquery#queryResponse"
+    },
+    "numDmlAffectedRows": {
+     "type": "string",
+     "description": "[Output-only, Experimental] The number of rows affected by a DML statement. Present only for DML statements INSERT, UPDATE or DELETE.",
+     "format": "int64"
+    },
+    "pageToken": {
+     "type": "string",
+     "description": "A token used for paging results."
+    },
+    "rows": {
+     "type": "array",
+     "description": "An object with as many results as can be contained within the maximum permitted reply size. To get any additional rows, you can call GetQueryResults and specify the jobReference returned above.",
+     "items": {
+      "$ref": "TableRow"
+     }
+    },
+    "schema": {
+     "$ref": "TableSchema",
+     "description": "The schema of the results. Present only when the query completes successfully."
+    },
+    "totalBytesProcessed": {
+     "type": "string",
+     "description": "The total number of bytes processed for this query. If this query was a dry run, this is the number of bytes that would be processed if the query were run.",
+     "format": "int64"
+    },
+    "totalRows": {
+     "type": "string",
+     "description": "The total number of rows in the complete query result set, which can be more than the number of rows in this single page of results.",
+     "format": "uint64"
+    }
+   }
+  },
+  "Streamingbuffer": {
+   "id": "Streamingbuffer",
+   "type": "object",
+   "properties": {
+    "estimatedBytes": {
+     "type": "string",
+     "description": "[Output-only] A lower-bound estimate of the number of bytes currently in the streaming buffer.",
+     "format": "uint64"
+    },
+    "estimatedRows": {
+     "type": "string",
+     "description": "[Output-only] A lower-bound estimate of the number of rows currently in the streaming buffer.",
+     "format": "uint64"
+    },
+    "oldestEntryTime": {
+     "type": "string",
+     "description": "[Output-only] Contains the timestamp of the oldest entry in the streaming buffer, in milliseconds since the epoch, if the streaming buffer is available.",
+     "format": "uint64"
+    }
+   }
+  },
+  "Table": {
+   "id": "Table",
+   "type": "object",
+   "properties": {
+    "creationTime": {
+     "type": "string",
+     "description": "[Output-only] The time when this table was created, in milliseconds since the epoch.",
+     "format": "int64"
+    },
+    "description": {
+     "type": "string",
+     "description": "[Optional] A user-friendly description of this table."
+    },
+    "etag": {
+     "type": "string",
+     "description": "[Output-only] A hash of this resource."
+    },
+    "expirationTime": {
+     "type": "string",
+     "description": "[Optional] The time when this table expires, in milliseconds since the epoch. If not present, the table will persist indefinitely. Expired tables will be deleted and their storage reclaimed.",
+     "format": "int64"
+    },
+    "externalDataConfiguration": {
+     "$ref": "ExternalDataConfiguration",
+     "description": "[Optional] Describes the data format, location, and other properties of a table stored outside of BigQuery. By defining these properties, the data source can then be queried as if it were a standard BigQuery table."
+    },
+    "friendlyName": {
+     "type": "string",
+     "description": "[Optional] A descriptive name for this table."
+    },
+    "id": {
+     "type": "string",
+     "description": "[Output-only] An opaque ID uniquely identifying the table."
+    },
+    "kind": {
+     "type": "string",
+     "description": "[Output-only] The type of the resource.",
+     "default": "bigquery#table"
+    },
+    "lastModifiedTime": {
+     "type": "string",
+     "description": "[Output-only] The time when this table was last modified, in milliseconds since the epoch.",
+     "format": "uint64"
+    },
+    "location": {
+     "type": "string",
+     "description": "[Output-only] The geographic location where the table resides. This value is inherited from the dataset."
+    },
+    "numBytes": {
+     "type": "string",
+     "description": "[Output-only] The size of this table in bytes, excluding any data in the streaming buffer.",
+     "format": "int64"
+    },
+    "numLongTermBytes": {
+     "type": "string",
+     "description": "[Output-only] The number of bytes in the table that are considered \"long-term storage\".",
+     "format": "int64"
+    },
+    "numRows": {
+     "type": "string",
+     "description": "[Output-only] The number of rows of data in this table, excluding any data in the streaming buffer.",
+     "format": "uint64"
+    },
+    "schema": {
+     "$ref": "TableSchema",
+     "description": "[Optional] Describes the schema of this table."
+    },
+    "selfLink": {
+     "type": "string",
+     "description": "[Output-only] A URL that can be used to access this resource again."
+    },
+    "streamingBuffer": {
+     "$ref": "Streamingbuffer",
+     "description": "[Output-only] Contains information regarding this table's streaming buffer, if one is present. This field will be absent if the table is not being streamed to or if there is no data in the streaming buffer."
+    },
+    "tableReference": {
+     "$ref": "TableReference",
+     "description": "[Required] Reference describing the ID of this table."
+    },
+    "timePartitioning": {
+     "$ref": "TimePartitioning",
+     "description": "[Experimental] If specified, configures time-based partitioning for this table."
+    },
+    "type": {
+     "type": "string",
+     "description": "[Output-only] Describes the table type. The following values are supported: TABLE: A normal BigQuery table. VIEW: A virtual table defined by a SQL query. EXTERNAL: A table that references data stored in an external storage system, such as Google Cloud Storage. The default value is TABLE."
+    },
+    "view": {
+     "$ref": "ViewDefinition",
+     "description": "[Optional] The view definition."
+    }
+   }
+  },
+  "TableCell": {
+   "id": "TableCell",
+   "type": "object",
+   "properties": {
+    "v": {
+     "type": "any"
+    }
+   }
+  },
+  "TableDataInsertAllRequest": {
+   "id": "TableDataInsertAllRequest",
+   "type": "object",
+   "properties": {
+    "ignoreUnknownValues": {
+     "type": "boolean",
+     "description": "[Optional] Accept rows that contain values that do not match the schema. The unknown values are ignored. Default is false, which treats unknown values as errors."
+    },
+    "kind": {
+     "type": "string",
+     "description": "The resource type of the response.",
+     "default": "bigquery#tableDataInsertAllRequest"
+    },
+    "rows": {
+     "type": "array",
+     "description": "The rows to insert.",
+     "items": {
+      "type": "object",
+      "properties": {
+       "insertId": {
+        "type": "string",
+        "description": "[Optional] A unique ID for each row. BigQuery uses this property to detect duplicate insertion requests on a best-effort basis."
+       },
+       "json": {
+        "$ref": "JsonObject",
+        "description": "[Required] A JSON object that contains a row of data. The object's properties and values must match the destination table's schema."
+       }
+      }
+     }
+    },
+    "skipInvalidRows": {
+     "type": "boolean",
+     "description": "[Optional] Insert all valid rows of a request, even if invalid rows exist. The default value is false, which causes the entire request to fail if any invalid rows exist."
+    },
+    "templateSuffix": {
+     "type": "string",
+     "description": "[Experimental] If specified, treats the destination table as a base template, and inserts the rows into an instance table named \"{destination}{templateSuffix}\". BigQuery will manage creation of the instance table, using the schema of the base template table. See https://cloud.google.com/bigquery/streaming-data-into-bigquery#template-tables for considerations when working with templates tables."
+    }
+   }
+  },
+  "TableDataInsertAllResponse": {
+   "id": "TableDataInsertAllResponse",
+   "type": "object",
+   "properties": {
+    "insertErrors": {
+     "type": "array",
+     "description": "An array of errors for rows that were not inserted.",
+     "items": {
+      "type": "object",
+      "properties": {
+       "errors": {
+        "type": "array",
+        "description": "Error information for the row indicated by the index property.",
+        "items": {
+         "$ref": "ErrorProto"
+        }
+       },
+       "index": {
+        "type": "integer",
+        "description": "The index of the row that error applies to.",
+        "format": "uint32"
+       }
+      }
+     }
+    },
+    "kind": {
+     "type": "string",
+     "description": "The resource type of the response.",
+     "default": "bigquery#tableDataInsertAllResponse"
+    }
+   }
+  },
+  "TableDataList": {
+   "id": "TableDataList",
+   "type": "object",
+   "properties": {
+    "etag": {
+     "type": "string",
+     "description": "A hash of this page of results."
+    },
+    "kind": {
+     "type": "string",
+     "description": "The resource type of the response.",
+     "default": "bigquery#tableDataList"
+    },
+    "pageToken": {
+     "type": "string",
+     "description": "A token used for paging results. Providing this token instead of the startIndex parameter can help you retrieve stable results when an underlying table is changing."
+    },
+    "rows": {
+     "type": "array",
+     "description": "Rows of results.",
+     "items": {
+      "$ref": "TableRow"
+     }
+    },
+    "totalRows": {
+     "type": "string",
+     "description": "The total number of rows in the complete table.",
+     "format": "int64"
+    }
+   }
+  },
+  "TableFieldSchema": {
+   "id": "TableFieldSchema",
+   "type": "object",
+   "properties": {
+    "description": {
+     "type": "string",
+     "description": "[Optional] The field description. The maximum length is 16K characters."
+    },
+    "fields": {
+     "type": "array",
+     "description": "[Optional] Describes the nested schema fields if the type property is set to RECORD.",
+     "items": {
+      "$ref": "TableFieldSchema"
+     }
+    },
+    "mode": {
+     "type": "string",
+     "description": "[Optional] The field mode. Possible values include NULLABLE, REQUIRED and REPEATED. The default value is NULLABLE."
+    },
+    "name": {
+     "type": "string",
+     "description": "[Required] The field name. The name must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_), and must start with a letter or underscore. The maximum length is 128 characters."
+    },
+    "type": {
+     "type": "string",
+     "description": "[Required] The field data type. Possible values include STRING, BYTES, INTEGER, FLOAT, BOOLEAN, TIMESTAMP or RECORD (where RECORD indicates that the field contains a nested schema)."
+    }
+   }
+  },
+  "TableList": {
+   "id": "TableList",
+   "type": "object",
+   "properties": {
+    "etag": {
+     "type": "string",
+     "description": "A hash of this page of results."
+    },
+    "kind": {
+     "type": "string",
+     "description": "The type of list.",
+     "default": "bigquery#tableList"
+    },
+    "nextPageToken": {
+     "type": "string",
+     "description": "A token to request the next page of results."
+    },
+    "tables": {
+     "type": "array",
+     "description": "Tables in the requested dataset.",
+     "items": {
+      "type": "object",
+      "properties": {
+       "friendlyName": {
+        "type": "string",
+        "description": "The user-friendly name for this table."
+       },
+       "id": {
+        "type": "string",
+        "description": "An opaque ID of the table"
+       },
+       "kind": {
+        "type": "string",
+        "description": "The resource type.",
+        "default": "bigquery#table"
+       },
+       "tableReference": {
+        "$ref": "TableReference",
+        "description": "A reference uniquely identifying the table."
+       },
+       "type": {
+        "type": "string",
+        "description": "The type of table. Possible values are: TABLE, VIEW."
+       }
+      }
+     }
+    },
+    "totalItems": {
+     "type": "integer",
+     "description": "The total number of tables in the dataset.",
+     "format": "int32"
+    }
+   }
+  },
+  "TableReference": {
+   "id": "TableReference",
+   "type": "object",
+   "properties": {
+    "datasetId": {
+     "type": "string",
+     "description": "[Required] The ID of the dataset containing this table.",
+     "annotations": {
+      "required": [
+       "bigquery.tables.update"
+      ]
+     }
+    },
+    "projectId": {
+     "type": "string",
+     "description": "[Required] The ID of the project containing this table.",
+     "annotations": {
+      "required": [
+       "bigquery.tables.update"
+      ]
+     }
+    },
+    "tableId": {
+     "type": "string",
+     "description": "[Required] The ID of the table. The ID must contain only letters (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is 1,024 characters.",
+     "annotations": {
+      "required": [
+       "bigquery.tables.update"
+      ]
+     }
+    }
+   }
+  },
+  "TableRow": {
+   "id": "TableRow",
+   "type": "object",
+   "properties": {
+    "f": {
+     "type": "array",
+     "description": "Represents a single row in the result set, consisting of one or more fields.",
+     "items": {
+      "$ref": "TableCell"
+     }
+    }
+   }
+  },
+  "TableSchema": {
+   "id": "TableSchema",
+   "type": "object",
+   "properties": {
+    "fields": {
+     "type": "array",
+     "description": "Describes the fields in a table.",
+     "items": {
+      "$ref": "TableFieldSchema"
+     }
+    }
+   }
+  },
+  "TimePartitioning": {
+   "id": "TimePartitioning",
+   "type": "object",
+   "properties": {
+    "expirationMs": {
+     "type": "string",
+     "description": "[Optional] Number of milliseconds for which to keep the storage for a partition.",
+     "format": "int64"
+    },
+    "type": {
+     "type": "string",
+     "description": "[Required] The only type supported is DAY, which will generate one partition per day based on data loading time."
+    }
+   }
+  },
+  "UserDefinedFunctionResource": {
+   "id": "UserDefinedFunctionResource",
+   "type": "object",
+   "properties": {
+    "inlineCode": {
+     "type": "string",
+     "description": "[Pick one] An inline resource that contains code for a user-defined function (UDF). Providing a inline code resource is equivalent to providing a URI for a file containing the same code."
+    },
+    "resourceUri": {
+     "type": "string",
+     "description": "[Pick one] A code resource to load from a Google Cloud Storage URI (gs://bucket/path)."
+    }
+   }
+  },
+  "ViewDefinition": {
+   "id": "ViewDefinition",
+   "type": "object",
+   "properties": {
+    "query": {
+     "type": "string",
+     "description": "[Required] A query that BigQuery executes when the view is referenced."
+    },
+    "useLegacySql": {
+     "type": "boolean",
+     "description": "[Experimental] Specifies whether to use BigQuery's legacy SQL for this view. The default value is true. If set to false, the view will use BigQuery's standard SQL: https://cloud.google.com/bigquery/sql-reference/ Queries and views that reference this view must use the same flag value."
+    },
+    "userDefinedFunctionResources": {
+     "type": "array",
+     "description": "[Experimental] Describes user-defined function resources used in the query.",
+     "items": {
+      "$ref": "UserDefinedFunctionResource"
+     }
+    }
+   }
+  }
+ },
+ "resources": {
+  "datasets": {
+   "methods": {
+    "delete": {
+     "id": "bigquery.datasets.delete",
+     "path": "projects/{projectId}/datasets/{datasetId}",
+     "httpMethod": "DELETE",
+     "description": "Deletes the dataset specified by the datasetId value. Before you can delete a dataset, you must delete all its tables, either manually or by specifying deleteContents. Immediately after deletion, you can create another dataset with the same name.",
+     "parameters": {
+      "datasetId": {
+       "type": "string",
+       "description": "Dataset ID of dataset being deleted",
+       "required": true,
+       "location": "path"
+      },
+      "deleteContents": {
+       "type": "boolean",
+       "description": "If True, delete all the tables in the dataset. If False and the dataset contains tables, the request will fail. Default is False",
+       "location": "query"
+      },
+      "projectId": {
+       "type": "string",
+       "description": "Project ID of the dataset being deleted",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "projectId",
+      "datasetId"
+     ],
+     "scopes": [
+      "https://www.googleapis.com/auth/bigquery",
+      "https://www.googleapis.com/auth/cloud-platform"
+     ]
+    },
+    "get": {
+     "id": "bigquery.datasets.get",
+     "path": "projects/{projectId}/datasets/{datasetId}",
+     "httpMethod": "GET",
+     "description": "Returns the dataset specified by datasetID.",
+     "parameters": {
+      "datasetId": {
+       "type": "string",
+       "description": "Dataset ID of the requested dataset",
+       "required": true,
+       "location": "path"
+      },
+      "projectId": {
+       "type": "string",
+       "description": "Project ID of the requested dataset",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "projectId",
+      "datasetId"
+     ],
+     "response": {
+      "$ref": "Dataset"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/bigquery",
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only"
+     ]
+    },
+    "insert": {
+     "id": "bigquery.datasets.insert",
+     "path": "projects/{projectId}/datasets",
+     "httpMethod": "POST",
+     "description": "Creates a new empty dataset.",
+     "parameters": {
+      "projectId": {
+       "type": "string",
+       "description": "Project ID of the new dataset",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "projectId"
+     ],
+     "request": {
+      "$ref": "Dataset"
+     },
+     "response": {
+      "$ref": "Dataset"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/bigquery",
+      "https://www.googleapis.com/auth/cloud-platform"
+     ]
+    },
+    "list": {
+     "id": "bigquery.datasets.list",
+     "path": "projects/{projectId}/datasets",
+     "httpMethod": "GET",
+     "description": "Lists all datasets in the specified project to which you have been granted the READER dataset role.",
+     "parameters": {
+      "all": {
+       "type": "boolean",
+       "description": "Whether to list all datasets, including hidden ones",
+       "location": "query"
+      },
+      "filter": {
+       "type": "string",
+       "description": "An expression for filtering the results of the request by label. The syntax is \"labels.[:]\". Multiple filters can be ANDed together by connecting with a space. Example: \"labels.department:receiving labels.active\". See https://cloud.google.com/bigquery/docs/labeling-datasets#filtering_datasets_using_labels for details.",
+       "location": "query"
+      },
+      "maxResults": {
+       "type": "integer",
+       "description": "The maximum number of results to return",
+       "format": "uint32",
+       "location": "query"
+      },
+      "pageToken": {
+       "type": "string",
+       "description": "Page token, returned by a previous call, to request the next page of results",
+       "location": "query"
+      },
+      "projectId": {
+       "type": "string",
+       "description": "Project ID of the datasets to be listed",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "projectId"
+     ],
+     "response": {
+      "$ref": "DatasetList"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/bigquery",
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only"
+     ]
+    },
+    "patch": {
+     "id": "bigquery.datasets.patch",
+     "path": "projects/{projectId}/datasets/{datasetId}",
+     "httpMethod": "PATCH",
+     "description": "Updates information in an existing dataset. The update method replaces the entire dataset resource, whereas the patch method only replaces fields that are provided in the submitted dataset resource. This method supports patch semantics.",
+     "parameters": {
+      "datasetId": {
+       "type": "string",
+       "description": "Dataset ID of the dataset being updated",
+       "required": true,
+       "location": "path"
+      },
+      "projectId": {
+       "type": "string",
+       "description": "Project ID of the dataset being updated",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "projectId",
+      "datasetId"
+     ],
+     "request": {
+      "$ref": "Dataset"
+     },
+     "response": {
+      "$ref": "Dataset"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/bigquery",
+      "https://www.googleapis.com/auth/cloud-platform"
+     ]
+    },
+    "update": {
+     "id": "bigquery.datasets.update",
+     "path": "projects/{projectId}/datasets/{datasetId}",
+     "httpMethod": "PUT",
+     "description": "Updates information in an existing dataset. The update method replaces the entire dataset resource, whereas the patch method only replaces fields that are provided in the submitted dataset resource.",
+     "parameters": {
+      "datasetId": {
+       "type": "string",
+       "description": "Dataset ID of the dataset being updated",
+       "required": true,
+       "location": "path"
+      },
+      "projectId": {
+       "type": "string",
+       "description": "Project ID of the dataset being updated",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "projectId",
+      "datasetId"
+     ],
+     "request": {
+      "$ref": "Dataset"
+     },
+     "response": {
+      "$ref": "Dataset"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/bigquery",
+      "https://www.googleapis.com/auth/cloud-platform"
+     ]
+    }
+   }
+  },
+  "jobs": {
+   "methods": {
+    "cancel": {
+     "id": "bigquery.jobs.cancel",
+     "path": "project/{projectId}/jobs/{jobId}/cancel",
+     "httpMethod": "POST",
+     "description": "Requests that a job be cancelled. This call will return immediately, and the client will need to poll for the job status to see if the cancel completed successfully. Cancelled jobs may still incur costs.",
+     "parameters": {
+      "jobId": {
+       "type": "string",
+       "description": "[Required] Job ID of the job to cancel",
+       "required": true,
+       "location": "path"
+      },
+      "projectId": {
+       "type": "string",
+       "description": "[Required] Project ID of the job to cancel",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "projectId",
+      "jobId"
+     ],
+     "response": {
+      "$ref": "JobCancelResponse"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/bigquery",
+      "https://www.googleapis.com/auth/cloud-platform"
+     ]
+    },
+    "get": {
+     "id": "bigquery.jobs.get",
+     "path": "projects/{projectId}/jobs/{jobId}",
+     "httpMethod": "GET",
+     "description": "Returns information about a specific job. Job information is available for a six month period after creation. Requires that you're the person who ran the job, or have the Is Owner project role.",
+     "parameters": {
+      "jobId": {
+       "type": "string",
+       "description": "[Required] Job ID of the requested job",
+       "required": true,
+       "location": "path"
+      },
+      "projectId": {
+       "type": "string",
+       "description": "[Required] Project ID of the requested job",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "projectId",
+      "jobId"
+     ],
+     "response": {
+      "$ref": "Job"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/bigquery",
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only"
+     ]
+    },
+    "getQueryResults": {
+     "id": "bigquery.jobs.getQueryResults",
+     "path": "projects/{projectId}/queries/{jobId}",
+     "httpMethod": "GET",
+     "description": "Retrieves the results of a query job.",
+     "parameters": {
+      "jobId": {
+       "type": "string",
+       "description": "[Required] Job ID of the query job",
+       "required": true,
+       "location": "path"
+      },
+      "maxResults": {
+       "type": "integer",
+       "description": "Maximum number of results to read",
+       "format": "uint32",
+       "location": "query"
+      },
+      "pageToken": {
+       "type": "string",
+       "description": "Page token, returned by a previous call, to request the next page of results",
+       "location": "query"
+      },
+      "projectId": {
+       "type": "string",
+       "description": "[Required] Project ID of the query job",
+       "required": true,
+       "location": "path"
+      },
+      "startIndex": {
+       "type": "string",
+       "description": "Zero-based index of the starting row",
+       "format": "uint64",
+       "location": "query"
+      },
+      "timeoutMs": {
+       "type": "integer",
+       "description": "How long to wait for the query to complete, in milliseconds, before returning. Default is 10 seconds. If the timeout passes before the job completes, the 'jobComplete' field in the response will be false",
+       "format": "uint32",
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "projectId",
+      "jobId"
+     ],
+     "response": {
+      "$ref": "GetQueryResultsResponse"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/bigquery",
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only"
+     ]
+    },
+    "insert": {
+     "id": "bigquery.jobs.insert",
+     "path": "projects/{projectId}/jobs",
+     "httpMethod": "POST",
+     "description": "Starts a new asynchronous job. Requires the Can View project role.",
+     "parameters": {
+      "projectId": {
+       "type": "string",
+       "description": "Project ID of the project that will be billed for the job",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "projectId"
+     ],
+     "request": {
+      "$ref": "Job"
+     },
+     "response": {
+      "$ref": "Job"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/bigquery",
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control",
+      "https://www.googleapis.com/auth/devstorage.read_only",
+      "https://www.googleapis.com/auth/devstorage.read_write"
+     ],
+     "supportsMediaUpload": true,
+     "mediaUpload": {
+      "accept": [
+       "*/*"
+      ],
+      "protocols": {
+       "simple": {
+        "multipart": true,
+        "path": "/upload/bigquery/v2/projects/{projectId}/jobs"
+       },
+       "resumable": {
+        "multipart": true,
+        "path": "/resumable/upload/bigquery/v2/projects/{projectId}/jobs"
+       }
+      }
+     }
+    },
+    "list": {
+     "id": "bigquery.jobs.list",
+     "path": "projects/{projectId}/jobs",
+     "httpMethod": "GET",
+     "description": "Lists all jobs that you started in the specified project. Job information is available for a six month period after creation. The job list is sorted in reverse chronological order, by job creation time. Requires the Can View project role, or the Is Owner project role if you set the allUsers property.",
+     "parameters": {
+      "allUsers": {
+       "type": "boolean",
+       "description": "Whether to display jobs owned by all users in the project. Default false",
+       "location": "query"
+      },
+      "maxResults": {
+       "type": "integer",
+       "description": "Maximum number of results to return",
+       "format": "uint32",
+       "location": "query"
+      },
+      "pageToken": {
+       "type": "string",
+       "description": "Page token, returned by a previous call, to request the next page of results",
+       "location": "query"
+      },
+      "projectId": {
+       "type": "string",
+       "description": "Project ID of the jobs to list",
+       "required": true,
+       "location": "path"
+      },
+      "projection": {
+       "type": "string",
+       "description": "Restrict information returned to a set of selected fields",
+       "enum": [
+        "full",
+        "minimal"
+       ],
+       "enumDescriptions": [
+        "Includes all job data",
+        "Does not include the job configuration"
+       ],
+       "location": "query"
+      },
+      "stateFilter": {
+       "type": "string",
+       "description": "Filter for job state",
+       "enum": [
+        "done",
+        "pending",
+        "running"
+       ],
+       "enumDescriptions": [
+        "Finished jobs",
+        "Pending jobs",
+        "Running jobs"
+       ],
+       "repeated": true,
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "projectId"
+     ],
+     "response": {
+      "$ref": "JobList"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/bigquery",
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only"
+     ]
+    },
+    "query": {
+     "id": "bigquery.jobs.query",
+     "path": "projects/{projectId}/queries",
+     "httpMethod": "POST",
+     "description": "Runs a BigQuery SQL query synchronously and returns query results if the query completes within a specified timeout.",
+     "parameters": {
+      "projectId": {
+       "type": "string",
+       "description": "Project ID of the project billed for the query",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "projectId"
+     ],
+     "request": {
+      "$ref": "QueryRequest"
+     },
+     "response": {
+      "$ref": "QueryResponse"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/bigquery",
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only"
+     ]
+    }
+   }
+  },
+  "projects": {
+   "methods": {
+    "list": {
+     "id": "bigquery.projects.list",
+     "path": "projects",
+     "httpMethod": "GET",
+     "description": "Lists all projects to which you have been granted any project role.",
+     "parameters": {
+      "maxResults": {
+       "type": "integer",
+       "description": "Maximum number of results to return",
+       "format": "uint32",
+       "location": "query"
+      },
+      "pageToken": {
+       "type": "string",
+       "description": "Page token, returned by a previous call, to request the next page of results",
+       "location": "query"
+      }
+     },
+     "response": {
+      "$ref": "ProjectList"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/bigquery",
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only"
+     ]
+    }
+   }
+  },
+  "tabledata": {
+   "methods": {
+    "insertAll": {
+     "id": "bigquery.tabledata.insertAll",
+     "path": "projects/{projectId}/datasets/{datasetId}/tables/{tableId}/insertAll",
+     "httpMethod": "POST",
+     "description": "Streams data into BigQuery one record at a time without needing to run a load job. Requires the WRITER dataset role.",
+     "parameters": {
+      "datasetId": {
+       "type": "string",
+       "description": "Dataset ID of the destination table.",
+       "required": true,
+       "location": "path"
+      },
+      "projectId": {
+       "type": "string",
+       "description": "Project ID of the destination table.",
+       "required": true,
+       "location": "path"
+      },
+      "tableId": {
+       "type": "string",
+       "description": "Table ID of the destination table.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "projectId",
+      "datasetId",
+      "tableId"
+     ],
+     "request": {
+      "$ref": "TableDataInsertAllRequest"
+     },
+     "response": {
+      "$ref": "TableDataInsertAllResponse"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/bigquery",
+      "https://www.googleapis.com/auth/bigquery.insertdata",
+      "https://www.googleapis.com/auth/cloud-platform"
+     ]
+    },
+    "list": {
+     "id": "bigquery.tabledata.list",
+     "path": "projects/{projectId}/datasets/{datasetId}/tables/{tableId}/data",
+     "httpMethod": "GET",
+     "description": "Retrieves table data from a specified set of rows. Requires the READER dataset role.",
+     "parameters": {
+      "datasetId": {
+       "type": "string",
+       "description": "Dataset ID of the table to read",
+       "required": true,
+       "location": "path"
+      },
+      "maxResults": {
+       "type": "integer",
+       "description": "Maximum number of results to return",
+       "format": "uint32",
+       "location": "query"
+      },
+      "pageToken": {
+       "type": "string",
+       "description": "Page token, returned by a previous call, identifying the result set",
+       "location": "query"
+      },
+      "projectId": {
+       "type": "string",
+       "description": "Project ID of the table to read",
+       "required": true,
+       "location": "path"
+      },
+      "startIndex": {
+       "type": "string",
+       "description": "Zero-based index of the starting row to read",
+       "format": "uint64",
+       "location": "query"
+      },
+      "tableId": {
+       "type": "string",
+       "description": "Table ID of the table to read",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "projectId",
+      "datasetId",
+      "tableId"
+     ],
+     "response": {
+      "$ref": "TableDataList"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/bigquery",
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only"
+     ]
+    }
+   }
+  },
+  "tables": {
+   "methods": {
+    "delete": {
+     "id": "bigquery.tables.delete",
+     "path": "projects/{projectId}/datasets/{datasetId}/tables/{tableId}",
+     "httpMethod": "DELETE",
+     "description": "Deletes the table specified by tableId from the dataset. If the table contains data, all the data will be deleted.",
+     "parameters": {
+      "datasetId": {
+       "type": "string",
+       "description": "Dataset ID of the table to delete",
+       "required": true,
+       "location": "path"
+      },
+      "projectId": {
+       "type": "string",
+       "description": "Project ID of the table to delete",
+       "required": true,
+       "location": "path"
+      },
+      "tableId": {
+       "type": "string",
+       "description": "Table ID of the table to delete",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "projectId",
+      "datasetId",
+      "tableId"
+     ],
+     "scopes": [
+      "https://www.googleapis.com/auth/bigquery",
+      "https://www.googleapis.com/auth/cloud-platform"
+     ]
+    },
+    "get": {
+     "id": "bigquery.tables.get",
+     "path": "projects/{projectId}/datasets/{datasetId}/tables/{tableId}",
+     "httpMethod": "GET",
+     "description": "Gets the specified table resource by table ID. This method does not return the data in the table, it only returns the table resource, which describes the structure of this table.",
+     "parameters": {
+      "datasetId": {
+       "type": "string",
+       "description": "Dataset ID of the requested table",
+       "required": true,
+       "location": "path"
+      },
+      "projectId": {
+       "type": "string",
+       "description": "Project ID of the requested table",
+       "required": true,
+       "location": "path"
+      },
+      "tableId": {
+       "type": "string",
+       "description": "Table ID of the requested table",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "projectId",
+      "datasetId",
+      "tableId"
+     ],
+     "response": {
+      "$ref": "Table"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/bigquery",
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only"
+     ]
+    },
+    "insert": {
+     "id": "bigquery.tables.insert",
+     "path": "projects/{projectId}/datasets/{datasetId}/tables",
+     "httpMethod": "POST",
+     "description": "Creates a new, empty table in the dataset.",
+     "parameters": {
+      "datasetId": {
+       "type": "string",
+       "description": "Dataset ID of the new table",
+       "required": true,
+       "location": "path"
+      },
+      "projectId": {
+       "type": "string",
+       "description": "Project ID of the new table",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "projectId",
+      "datasetId"
+     ],
+     "request": {
+      "$ref": "Table"
+     },
+     "response": {
+      "$ref": "Table"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/bigquery",
+      "https://www.googleapis.com/auth/cloud-platform"
+     ]
+    },
+    "list": {
+     "id": "bigquery.tables.list",
+     "path": "projects/{projectId}/datasets/{datasetId}/tables",
+     "httpMethod": "GET",
+     "description": "Lists all tables in the specified dataset. Requires the READER dataset role.",
+     "parameters": {
+      "datasetId": {
+       "type": "string",
+       "description": "Dataset ID of the tables to list",
+       "required": true,
+       "location": "path"
+      },
+      "maxResults": {
+       "type": "integer",
+       "description": "Maximum number of results to return",
+       "format": "uint32",
+       "location": "query"
+      },
+      "pageToken": {
+       "type": "string",
+       "description": "Page token, returned by a previous call, to request the next page of results",
+       "location": "query"
+      },
+      "projectId": {
+       "type": "string",
+       "description": "Project ID of the tables to list",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "projectId",
+      "datasetId"
+     ],
+     "response": {
+      "$ref": "TableList"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/bigquery",
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only"
+     ]
+    },
+    "patch": {
+     "id": "bigquery.tables.patch",
+     "path": "projects/{projectId}/datasets/{datasetId}/tables/{tableId}",
+     "httpMethod": "PATCH",
+     "description": "Updates information in an existing table. The update method replaces the entire table resource, whereas the patch method only replaces fields that are provided in the submitted table resource. This method supports patch semantics.",
+     "parameters": {
+      "datasetId": {
+       "type": "string",
+       "description": "Dataset ID of the table to update",
+       "required": true,
+       "location": "path"
+      },
+      "projectId": {
+       "type": "string",
+       "description": "Project ID of the table to update",
+       "required": true,
+       "location": "path"
+      },
+      "tableId": {
+       "type": "string",
+       "description": "Table ID of the table to update",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "projectId",
+      "datasetId",
+      "tableId"
+     ],
+     "request": {
+      "$ref": "Table"
+     },
+     "response": {
+      "$ref": "Table"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/bigquery",
+      "https://www.googleapis.com/auth/cloud-platform"
+     ]
+    },
+    "update": {
+     "id": "bigquery.tables.update",
+     "path": "projects/{projectId}/datasets/{datasetId}/tables/{tableId}",
+     "httpMethod": "PUT",
+     "description": "Updates information in an existing table. The update method replaces the entire table resource, whereas the patch method only replaces fields that are provided in the submitted table resource.",
+     "parameters": {
+      "datasetId": {
+       "type": "string",
+       "description": "Dataset ID of the table to update",
+       "required": true,
+       "location": "path"
+      },
+      "projectId": {
+       "type": "string",
+       "description": "Project ID of the table to update",
+       "required": true,
+       "location": "path"
+      },
+      "tableId": {
+       "type": "string",
+       "description": "Table ID of the table to update",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "projectId",
+      "datasetId",
+      "tableId"
+     ],
+     "request": {
+      "$ref": "Table"
+     },
+     "response": {
+      "$ref": "Table"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/bigquery",
+      "https://www.googleapis.com/auth/cloud-platform"
+     ]
+    }
+   }
+  }
+ }
+}
diff --git a/samples/bigquery_sample/bigquery_v2/__init__.py b/samples/bigquery_sample/bigquery_v2/__init__.py
new file mode 100644
index 0000000..2816da8
--- /dev/null
+++ b/samples/bigquery_sample/bigquery_v2/__init__.py
@@ -0,0 +1,5 @@
+"""Package marker file."""
+
+import pkgutil
+
+__path__ = pkgutil.extend_path(__path__, __name__)
diff --git a/samples/bigquery_sample/bigquery_v2/bigquery_v2.py b/samples/bigquery_sample/bigquery_v2/bigquery_v2.py
new file mode 100644
index 0000000..7cd69b5
--- /dev/null
+++ b/samples/bigquery_sample/bigquery_v2/bigquery_v2.py
@@ -0,0 +1,1096 @@
+#!/usr/bin/env python
+"""CLI for bigquery, version v2."""
+# NOTE: This file is autogenerated and should not be edited by hand.
+
+import code
+import os
+import platform
+import sys
+
+from apitools.base.protorpclite import message_types
+from apitools.base.protorpclite import messages
+
+from google.apputils import appcommands
+import gflags as flags
+
+import apitools.base.py as apitools_base
+from apitools.base.py import cli as apitools_base_cli
+import bigquery_v2_client as client_lib
+import bigquery_v2_messages as messages
+
+
+def _DeclareBigqueryFlags():
+  """Declare global flags in an idempotent way."""
+  if 'api_endpoint' in flags.FLAGS:
+    return
+  flags.DEFINE_string(
+      'api_endpoint',
+      u'https://www.googleapis.com/bigquery/v2/',
+      'URL of the API endpoint to use.',
+      short_name='bigquery_url')
+  flags.DEFINE_string(
+      'history_file',
+      u'~/.bigquery.v2.history',
+      'File with interactive shell history.')
+  flags.DEFINE_multistring(
+      'add_header', [],
+      'Additional http headers (as key=value strings). '
+      'Can be specified multiple times.')
+  flags.DEFINE_string(
+      'service_account_json_keyfile', '',
+      'Filename for a JSON service account key downloaded'
+      ' from the Developer Console.')
+  flags.DEFINE_enum(
+      'alt',
+      u'json',
+      [u'json'],
+      u'Data format for the response.')
+  flags.DEFINE_string(
+      'fields',
+      None,
+      u'Selector specifying which fields to include in a partial response.')
+  flags.DEFINE_string(
+      'key',
+      None,
+      u'API key. Your API key identifies your project and provides you with '
+      u'API access, quota, and reports. Required unless you provide an OAuth '
+      u'2.0 token.')
+  flags.DEFINE_string(
+      'oauth_token',
+      None,
+      u'OAuth 2.0 token for the current user.')
+  flags.DEFINE_boolean(
+      'prettyPrint',
+      'True',
+      u'Returns response with indentations and line breaks.')
+  flags.DEFINE_string(
+      'quotaUser',
+      None,
+      u'Available to use for quota purposes for server-side applications. Can'
+      u' be any arbitrary string assigned to a user, but should not exceed 40'
+      u' characters. Overrides userIp if both are provided.')
+  flags.DEFINE_string(
+      'trace',
+      None,
+      'A tracing token of the form "token:<tokenid>" to include in api '
+      'requests.')
+  flags.DEFINE_string(
+      'userIp',
+      None,
+      u'IP address of the site where the request originates. Use this if you '
+      u'want to enforce per-user limits.')
+
+
+FLAGS = flags.FLAGS
+apitools_base_cli.DeclareBaseFlags()
+_DeclareBigqueryFlags()
+
+
+def GetGlobalParamsFromFlags():
+  """Return a StandardQueryParameters based on flags."""
+  result = messages.StandardQueryParameters()
+  if FLAGS['alt'].present:
+    result.alt = messages.StandardQueryParameters.AltValueValuesEnum(FLAGS.alt)
+  if FLAGS['fields'].present:
+    result.fields = FLAGS.fields.decode('utf8')
+  if FLAGS['key'].present:
+    result.key = FLAGS.key.decode('utf8')
+  if FLAGS['oauth_token'].present:
+    result.oauth_token = FLAGS.oauth_token.decode('utf8')
+  if FLAGS['prettyPrint'].present:
+    result.prettyPrint = FLAGS.prettyPrint
+  if FLAGS['quotaUser'].present:
+    result.quotaUser = FLAGS.quotaUser.decode('utf8')
+  if FLAGS['trace'].present:
+    result.trace = FLAGS.trace.decode('utf8')
+  if FLAGS['userIp'].present:
+    result.userIp = FLAGS.userIp.decode('utf8')
+  return result
+
+
+def GetClientFromFlags():
+  """Return a client object, configured from flags."""
+  log_request = FLAGS.log_request or FLAGS.log_request_response
+  log_response = FLAGS.log_response or FLAGS.log_request_response
+  api_endpoint = apitools_base.NormalizeApiEndpoint(FLAGS.api_endpoint)
+  additional_http_headers = dict(x.split('=', 1) for x in FLAGS.add_header)
+  credentials_args = {
+      'service_account_json_keyfile': os.path.expanduser(FLAGS.service_account_json_keyfile)
+  }
+  try:
+    client = client_lib.BigqueryV2(
+        api_endpoint, log_request=log_request,
+        log_response=log_response,
+        credentials_args=credentials_args,
+        additional_http_headers=additional_http_headers)
+  except apitools_base.CredentialsError as e:
+    print 'Error creating credentials: %s' % e
+    sys.exit(1)
+  return client
+
+
+class PyShell(appcommands.Cmd):
+
+  def Run(self, _):
+    """Run an interactive python shell with the client."""
+    client = GetClientFromFlags()
+    params = GetGlobalParamsFromFlags()
+    for field in params.all_fields():
+      value = params.get_assigned_value(field.name)
+      if value != field.default:
+        client.AddGlobalParam(field.name, value)
+    banner = """
+           == bigquery interactive console ==
+                 client: a bigquery client
+          apitools_base: base apitools module
+         messages: the generated messages module
+    """
+    local_vars = {
+        'apitools_base': apitools_base,
+        'client': client,
+        'client_lib': client_lib,
+        'messages': messages,
+    }
+    if platform.system() == 'Linux':
+      console = apitools_base_cli.ConsoleWithReadline(
+          local_vars, histfile=FLAGS.history_file)
+    else:
+      console = code.InteractiveConsole(local_vars)
+    try:
+      console.interact(banner)
+    except SystemExit as e:
+      return e.code
+
+
+class DatasetsDelete(apitools_base_cli.NewCmd):
+  """Command wrapping datasets.Delete."""
+
+  usage = """datasets_delete <projectId> <datasetId>"""
+
+  def __init__(self, name, fv):
+    super(DatasetsDelete, self).__init__(name, fv)
+    flags.DEFINE_boolean(
+        'deleteContents',
+        None,
+        u'If True, delete all the tables in the dataset. If False and the '
+        u'dataset contains tables, the request will fail. Default is False',
+        flag_values=fv)
+
+  def RunWithArgs(self, projectId, datasetId):
+    """Deletes the dataset specified by the datasetId value. Before you can
+    delete a dataset, you must delete all its tables, either manually or by
+    specifying deleteContents. Immediately after deletion, you can create
+    another dataset with the same name.
+
+    Args:
+      projectId: Project ID of the dataset being deleted
+      datasetId: Dataset ID of dataset being deleted
+
+    Flags:
+      deleteContents: If True, delete all the tables in the dataset. If False
+        and the dataset contains tables, the request will fail. Default is
+        False
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.BigqueryDatasetsDeleteRequest(
+        projectId=projectId.decode('utf8'),
+        datasetId=datasetId.decode('utf8'),
+        )
+    if FLAGS['deleteContents'].present:
+      request.deleteContents = FLAGS.deleteContents
+    result = client.datasets.Delete(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class DatasetsGet(apitools_base_cli.NewCmd):
+  """Command wrapping datasets.Get."""
+
+  usage = """datasets_get <projectId> <datasetId>"""
+
+  def __init__(self, name, fv):
+    super(DatasetsGet, self).__init__(name, fv)
+
+  def RunWithArgs(self, projectId, datasetId):
+    """Returns the dataset specified by datasetID.
+
+    Args:
+      projectId: Project ID of the requested dataset
+      datasetId: Dataset ID of the requested dataset
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.BigqueryDatasetsGetRequest(
+        projectId=projectId.decode('utf8'),
+        datasetId=datasetId.decode('utf8'),
+        )
+    result = client.datasets.Get(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class DatasetsInsert(apitools_base_cli.NewCmd):
+  """Command wrapping datasets.Insert."""
+
+  usage = """datasets_insert <projectId>"""
+
+  def __init__(self, name, fv):
+    super(DatasetsInsert, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'dataset',
+        None,
+        u'A Dataset resource to be passed as the request body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, projectId):
+    """Creates a new empty dataset.
+
+    Args:
+      projectId: Project ID of the new dataset
+
+    Flags:
+      dataset: A Dataset resource to be passed as the request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.BigqueryDatasetsInsertRequest(
+        projectId=projectId.decode('utf8'),
+        )
+    if FLAGS['dataset'].present:
+      request.dataset = apitools_base.JsonToMessage(messages.Dataset, FLAGS.dataset)
+    result = client.datasets.Insert(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class DatasetsList(apitools_base_cli.NewCmd):
+  """Command wrapping datasets.List."""
+
+  usage = """datasets_list <projectId>"""
+
+  def __init__(self, name, fv):
+    super(DatasetsList, self).__init__(name, fv)
+    flags.DEFINE_boolean(
+        'all',
+        None,
+        u'Whether to list all datasets, including hidden ones',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'filter',
+        None,
+        u'An expression for filtering the results of the request by label. '
+        u'The syntax is "labels.[:]". Multiple filters can be ANDed together '
+        u'by connecting with a space. Example: "labels.department:receiving '
+        u'labels.active". See https://cloud.google.com/bigquery/docs'
+        u'/labeling-datasets#filtering_datasets_using_labels for details.',
+        flag_values=fv)
+    flags.DEFINE_integer(
+        'maxResults',
+        None,
+        u'The maximum number of results to return',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'pageToken',
+        None,
+        u'Page token, returned by a previous call, to request the next page '
+        u'of results',
+        flag_values=fv)
+
+  def RunWithArgs(self, projectId):
+    """Lists all datasets in the specified project to which you have been
+    granted the READER dataset role.
+
+    Args:
+      projectId: Project ID of the datasets to be listed
+
+    Flags:
+      all: Whether to list all datasets, including hidden ones
+      filter: An expression for filtering the results of the request by label.
+        The syntax is "labels.[:]". Multiple filters can be ANDed together by
+        connecting with a space. Example: "labels.department:receiving
+        labels.active". See https://cloud.google.com/bigquery/docs/labeling-
+        datasets#filtering_datasets_using_labels for details.
+      maxResults: The maximum number of results to return
+      pageToken: Page token, returned by a previous call, to request the next
+        page of results
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.BigqueryDatasetsListRequest(
+        projectId=projectId.decode('utf8'),
+        )
+    if FLAGS['all'].present:
+      request.all = FLAGS.all
+    if FLAGS['filter'].present:
+      request.filter = FLAGS.filter.decode('utf8')
+    if FLAGS['maxResults'].present:
+      request.maxResults = FLAGS.maxResults
+    if FLAGS['pageToken'].present:
+      request.pageToken = FLAGS.pageToken.decode('utf8')
+    result = client.datasets.List(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class DatasetsPatch(apitools_base_cli.NewCmd):
+  """Command wrapping datasets.Patch."""
+
+  usage = """datasets_patch <projectId> <datasetId>"""
+
+  def __init__(self, name, fv):
+    super(DatasetsPatch, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'dataset',
+        None,
+        u'A Dataset resource to be passed as the request body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, projectId, datasetId):
+    """Updates information in an existing dataset. The update method replaces
+    the entire dataset resource, whereas the patch method only replaces fields
+    that are provided in the submitted dataset resource. This method supports
+    patch semantics.
+
+    Args:
+      projectId: Project ID of the dataset being updated
+      datasetId: Dataset ID of the dataset being updated
+
+    Flags:
+      dataset: A Dataset resource to be passed as the request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.BigqueryDatasetsPatchRequest(
+        projectId=projectId.decode('utf8'),
+        datasetId=datasetId.decode('utf8'),
+        )
+    if FLAGS['dataset'].present:
+      request.dataset = apitools_base.JsonToMessage(messages.Dataset, FLAGS.dataset)
+    result = client.datasets.Patch(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class DatasetsUpdate(apitools_base_cli.NewCmd):
+  """Command wrapping datasets.Update."""
+
+  usage = """datasets_update <projectId> <datasetId>"""
+
+  def __init__(self, name, fv):
+    super(DatasetsUpdate, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'dataset',
+        None,
+        u'A Dataset resource to be passed as the request body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, projectId, datasetId):
+    """Updates information in an existing dataset. The update method replaces
+    the entire dataset resource, whereas the patch method only replaces fields
+    that are provided in the submitted dataset resource.
+
+    Args:
+      projectId: Project ID of the dataset being updated
+      datasetId: Dataset ID of the dataset being updated
+
+    Flags:
+      dataset: A Dataset resource to be passed as the request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.BigqueryDatasetsUpdateRequest(
+        projectId=projectId.decode('utf8'),
+        datasetId=datasetId.decode('utf8'),
+        )
+    if FLAGS['dataset'].present:
+      request.dataset = apitools_base.JsonToMessage(messages.Dataset, FLAGS.dataset)
+    result = client.datasets.Update(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class JobsCancel(apitools_base_cli.NewCmd):
+  """Command wrapping jobs.Cancel."""
+
+  usage = """jobs_cancel <projectId> <jobId>"""
+
+  def __init__(self, name, fv):
+    super(JobsCancel, self).__init__(name, fv)
+
+  def RunWithArgs(self, projectId, jobId):
+    """Requests that a job be cancelled. This call will return immediately,
+    and the client will need to poll for the job status to see if the cancel
+    completed successfully. Cancelled jobs may still incur costs.
+
+    Args:
+      projectId: [Required] Project ID of the job to cancel
+      jobId: [Required] Job ID of the job to cancel
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.BigqueryJobsCancelRequest(
+        projectId=projectId.decode('utf8'),
+        jobId=jobId.decode('utf8'),
+        )
+    result = client.jobs.Cancel(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class JobsGet(apitools_base_cli.NewCmd):
+  """Command wrapping jobs.Get."""
+
+  usage = """jobs_get <projectId> <jobId>"""
+
+  def __init__(self, name, fv):
+    super(JobsGet, self).__init__(name, fv)
+
+  def RunWithArgs(self, projectId, jobId):
+    """Returns information about a specific job. Job information is available
+    for a six month period after creation. Requires that you're the person who
+    ran the job, or have the Is Owner project role.
+
+    Args:
+      projectId: [Required] Project ID of the requested job
+      jobId: [Required] Job ID of the requested job
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.BigqueryJobsGetRequest(
+        projectId=projectId.decode('utf8'),
+        jobId=jobId.decode('utf8'),
+        )
+    result = client.jobs.Get(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class JobsGetQueryResults(apitools_base_cli.NewCmd):
+  """Command wrapping jobs.GetQueryResults."""
+
+  usage = """jobs_getQueryResults <projectId> <jobId>"""
+
+  def __init__(self, name, fv):
+    super(JobsGetQueryResults, self).__init__(name, fv)
+    flags.DEFINE_integer(
+        'maxResults',
+        None,
+        u'Maximum number of results to read',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'pageToken',
+        None,
+        u'Page token, returned by a previous call, to request the next page '
+        u'of results',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'startIndex',
+        None,
+        u'Zero-based index of the starting row',
+        flag_values=fv)
+    flags.DEFINE_integer(
+        'timeoutMs',
+        None,
+        u'How long to wait for the query to complete, in milliseconds, before'
+        u' returning. Default is 10 seconds. If the timeout passes before the'
+        u" job completes, the 'jobComplete' field in the response will be "
+        u'false',
+        flag_values=fv)
+
+  def RunWithArgs(self, projectId, jobId):
+    """Retrieves the results of a query job.
+
+    Args:
+      projectId: [Required] Project ID of the query job
+      jobId: [Required] Job ID of the query job
+
+    Flags:
+      maxResults: Maximum number of results to read
+      pageToken: Page token, returned by a previous call, to request the next
+        page of results
+      startIndex: Zero-based index of the starting row
+      timeoutMs: How long to wait for the query to complete, in milliseconds,
+        before returning. Default is 10 seconds. If the timeout passes before
+        the job completes, the 'jobComplete' field in the response will be
+        false
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.BigqueryJobsGetQueryResultsRequest(
+        projectId=projectId.decode('utf8'),
+        jobId=jobId.decode('utf8'),
+        )
+    if FLAGS['maxResults'].present:
+      request.maxResults = FLAGS.maxResults
+    if FLAGS['pageToken'].present:
+      request.pageToken = FLAGS.pageToken.decode('utf8')
+    if FLAGS['startIndex'].present:
+      request.startIndex = int(FLAGS.startIndex)
+    if FLAGS['timeoutMs'].present:
+      request.timeoutMs = FLAGS.timeoutMs
+    result = client.jobs.GetQueryResults(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class JobsInsert(apitools_base_cli.NewCmd):
+  """Command wrapping jobs.Insert."""
+
+  usage = """jobs_insert <projectId>"""
+
+  def __init__(self, name, fv):
+    super(JobsInsert, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'job',
+        None,
+        u'A Job resource to be passed as the request body.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'upload_filename',
+        '',
+        'Filename to use for upload.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'upload_mime_type',
+        '',
+        'MIME type to use for the upload. Only needed if the extension on '
+        '--upload_filename does not determine the correct (or any) MIME '
+        'type.',
+        flag_values=fv)
+
+  def RunWithArgs(self, projectId):
+    """Starts a new asynchronous job. Requires the Can View project role.
+
+    Args:
+      projectId: Project ID of the project that will be billed for the job
+
+    Flags:
+      job: A Job resource to be passed as the request body.
+      upload_filename: Filename to use for upload.
+      upload_mime_type: MIME type to use for the upload. Only needed if the
+        extension on --upload_filename does not determine the correct (or any)
+        MIME type.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.BigqueryJobsInsertRequest(
+        projectId=projectId.decode('utf8'),
+        )
+    if FLAGS['job'].present:
+      request.job = apitools_base.JsonToMessage(messages.Job, FLAGS.job)
+    upload = None
+    if FLAGS.upload_filename:
+      upload = apitools_base.Upload.FromFile(
+          FLAGS.upload_filename, FLAGS.upload_mime_type,
+          progress_callback=apitools_base.UploadProgressPrinter,
+          finish_callback=apitools_base.UploadCompletePrinter)
+    result = client.jobs.Insert(
+        request, global_params=global_params, upload=upload)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class JobsList(apitools_base_cli.NewCmd):
+  """Command wrapping jobs.List."""
+
+  usage = """jobs_list <projectId>"""
+
+  def __init__(self, name, fv):
+    super(JobsList, self).__init__(name, fv)
+    flags.DEFINE_boolean(
+        'allUsers',
+        None,
+        u'Whether to display jobs owned by all users in the project. Default '
+        u'false',
+        flag_values=fv)
+    flags.DEFINE_integer(
+        'maxResults',
+        None,
+        u'Maximum number of results to return',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'pageToken',
+        None,
+        u'Page token, returned by a previous call, to request the next page '
+        u'of results',
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'projection',
+        u'full',
+        [u'full', u'minimal'],
+        u'Restrict information returned to a set of selected fields',
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'stateFilter',
+        u'done',
+        [u'done', u'pending', u'running'],
+        u'Filter for job state',
+        flag_values=fv)
+
+  def RunWithArgs(self, projectId):
+    """Lists all jobs that you started in the specified project. Job
+    information is available for a six month period after creation. The job
+    list is sorted in reverse chronological order, by job creation time.
+    Requires the Can View project role, or the Is Owner project role if you
+    set the allUsers property.
+
+    Args:
+      projectId: Project ID of the jobs to list
+
+    Flags:
+      allUsers: Whether to display jobs owned by all users in the project.
+        Default false
+      maxResults: Maximum number of results to return
+      pageToken: Page token, returned by a previous call, to request the next
+        page of results
+      projection: Restrict information returned to a set of selected fields
+      stateFilter: Filter for job state
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.BigqueryJobsListRequest(
+        projectId=projectId.decode('utf8'),
+        )
+    if FLAGS['allUsers'].present:
+      request.allUsers = FLAGS.allUsers
+    if FLAGS['maxResults'].present:
+      request.maxResults = FLAGS.maxResults
+    if FLAGS['pageToken'].present:
+      request.pageToken = FLAGS.pageToken.decode('utf8')
+    if FLAGS['projection'].present:
+      request.projection = messages.BigqueryJobsListRequest.ProjectionValueValuesEnum(FLAGS.projection)
+    if FLAGS['stateFilter'].present:
+      request.stateFilter = [messages.BigqueryJobsListRequest.StateFilterValueValuesEnum(x) for x in FLAGS.stateFilter]
+    result = client.jobs.List(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class JobsQuery(apitools_base_cli.NewCmd):
+  """Command wrapping jobs.Query."""
+
+  usage = """jobs_query <projectId>"""
+
+  def __init__(self, name, fv):
+    super(JobsQuery, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'queryRequest',
+        None,
+        u'A QueryRequest resource to be passed as the request body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, projectId):
+    """Runs a BigQuery SQL query synchronously and returns query results if
+    the query completes within a specified timeout.
+
+    Args:
+      projectId: Project ID of the project billed for the query
+
+    Flags:
+      queryRequest: A QueryRequest resource to be passed as the request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.BigqueryJobsQueryRequest(
+        projectId=projectId.decode('utf8'),
+        )
+    if FLAGS['queryRequest'].present:
+      request.queryRequest = apitools_base.JsonToMessage(messages.QueryRequest, FLAGS.queryRequest)
+    result = client.jobs.Query(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ProjectsList(apitools_base_cli.NewCmd):
+  """Command wrapping projects.List."""
+
+  usage = """projects_list"""
+
+  def __init__(self, name, fv):
+    super(ProjectsList, self).__init__(name, fv)
+    flags.DEFINE_integer(
+        'maxResults',
+        None,
+        u'Maximum number of results to return',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'pageToken',
+        None,
+        u'Page token, returned by a previous call, to request the next page '
+        u'of results',
+        flag_values=fv)
+
+  def RunWithArgs(self):
+    """Lists all projects to which you have been granted any project role.
+
+    Flags:
+      maxResults: Maximum number of results to return
+      pageToken: Page token, returned by a previous call, to request the next
+        page of results
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.BigqueryProjectsListRequest(
+        )
+    if FLAGS['maxResults'].present:
+      request.maxResults = FLAGS.maxResults
+    if FLAGS['pageToken'].present:
+      request.pageToken = FLAGS.pageToken.decode('utf8')
+    result = client.projects.List(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class TabledataInsertAll(apitools_base_cli.NewCmd):
+  """Command wrapping tabledata.InsertAll."""
+
+  usage = """tabledata_insertAll <projectId> <datasetId> <tableId>"""
+
+  def __init__(self, name, fv):
+    super(TabledataInsertAll, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'tableDataInsertAllRequest',
+        None,
+        u'A TableDataInsertAllRequest resource to be passed as the request '
+        u'body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, projectId, datasetId, tableId):
+    """Streams data into BigQuery one record at a time without needing to run
+    a load job. Requires the WRITER dataset role.
+
+    Args:
+      projectId: Project ID of the destination table.
+      datasetId: Dataset ID of the destination table.
+      tableId: Table ID of the destination table.
+
+    Flags:
+      tableDataInsertAllRequest: A TableDataInsertAllRequest resource to be
+        passed as the request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.BigqueryTabledataInsertAllRequest(
+        projectId=projectId.decode('utf8'),
+        datasetId=datasetId.decode('utf8'),
+        tableId=tableId.decode('utf8'),
+        )
+    if FLAGS['tableDataInsertAllRequest'].present:
+      request.tableDataInsertAllRequest = apitools_base.JsonToMessage(messages.TableDataInsertAllRequest, FLAGS.tableDataInsertAllRequest)
+    result = client.tabledata.InsertAll(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class TabledataList(apitools_base_cli.NewCmd):
+  """Command wrapping tabledata.List."""
+
+  usage = """tabledata_list <projectId> <datasetId> <tableId>"""
+
+  def __init__(self, name, fv):
+    super(TabledataList, self).__init__(name, fv)
+    flags.DEFINE_integer(
+        'maxResults',
+        None,
+        u'Maximum number of results to return',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'pageToken',
+        None,
+        u'Page token, returned by a previous call, identifying the result set',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'startIndex',
+        None,
+        u'Zero-based index of the starting row to read',
+        flag_values=fv)
+
+  def RunWithArgs(self, projectId, datasetId, tableId):
+    """Retrieves table data from a specified set of rows. Requires the READER
+    dataset role.
+
+    Args:
+      projectId: Project ID of the table to read
+      datasetId: Dataset ID of the table to read
+      tableId: Table ID of the table to read
+
+    Flags:
+      maxResults: Maximum number of results to return
+      pageToken: Page token, returned by a previous call, identifying the
+        result set
+      startIndex: Zero-based index of the starting row to read
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.BigqueryTabledataListRequest(
+        projectId=projectId.decode('utf8'),
+        datasetId=datasetId.decode('utf8'),
+        tableId=tableId.decode('utf8'),
+        )
+    if FLAGS['maxResults'].present:
+      request.maxResults = FLAGS.maxResults
+    if FLAGS['pageToken'].present:
+      request.pageToken = FLAGS.pageToken.decode('utf8')
+    if FLAGS['startIndex'].present:
+      request.startIndex = int(FLAGS.startIndex)
+    result = client.tabledata.List(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class TablesDelete(apitools_base_cli.NewCmd):
+  """Command wrapping tables.Delete."""
+
+  usage = """tables_delete <projectId> <datasetId> <tableId>"""
+
+  def __init__(self, name, fv):
+    super(TablesDelete, self).__init__(name, fv)
+
+  def RunWithArgs(self, projectId, datasetId, tableId):
+    """Deletes the table specified by tableId from the dataset. If the table
+    contains data, all the data will be deleted.
+
+    Args:
+      projectId: Project ID of the table to delete
+      datasetId: Dataset ID of the table to delete
+      tableId: Table ID of the table to delete
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.BigqueryTablesDeleteRequest(
+        projectId=projectId.decode('utf8'),
+        datasetId=datasetId.decode('utf8'),
+        tableId=tableId.decode('utf8'),
+        )
+    result = client.tables.Delete(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class TablesGet(apitools_base_cli.NewCmd):
+  """Command wrapping tables.Get."""
+
+  usage = """tables_get <projectId> <datasetId> <tableId>"""
+
+  def __init__(self, name, fv):
+    super(TablesGet, self).__init__(name, fv)
+
+  def RunWithArgs(self, projectId, datasetId, tableId):
+    """Gets the specified table resource by table ID. This method does not
+    return the data in the table, it only returns the table resource, which
+    describes the structure of this table.
+
+    Args:
+      projectId: Project ID of the requested table
+      datasetId: Dataset ID of the requested table
+      tableId: Table ID of the requested table
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.BigqueryTablesGetRequest(
+        projectId=projectId.decode('utf8'),
+        datasetId=datasetId.decode('utf8'),
+        tableId=tableId.decode('utf8'),
+        )
+    result = client.tables.Get(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class TablesInsert(apitools_base_cli.NewCmd):
+  """Command wrapping tables.Insert."""
+
+  usage = """tables_insert <projectId> <datasetId>"""
+
+  def __init__(self, name, fv):
+    super(TablesInsert, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'table',
+        None,
+        u'A Table resource to be passed as the request body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, projectId, datasetId):
+    """Creates a new, empty table in the dataset.
+
+    Args:
+      projectId: Project ID of the new table
+      datasetId: Dataset ID of the new table
+
+    Flags:
+      table: A Table resource to be passed as the request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.BigqueryTablesInsertRequest(
+        projectId=projectId.decode('utf8'),
+        datasetId=datasetId.decode('utf8'),
+        )
+    if FLAGS['table'].present:
+      request.table = apitools_base.JsonToMessage(messages.Table, FLAGS.table)
+    result = client.tables.Insert(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class TablesList(apitools_base_cli.NewCmd):
+  """Command wrapping tables.List."""
+
+  usage = """tables_list <projectId> <datasetId>"""
+
+  def __init__(self, name, fv):
+    super(TablesList, self).__init__(name, fv)
+    flags.DEFINE_integer(
+        'maxResults',
+        None,
+        u'Maximum number of results to return',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'pageToken',
+        None,
+        u'Page token, returned by a previous call, to request the next page '
+        u'of results',
+        flag_values=fv)
+
+  def RunWithArgs(self, projectId, datasetId):
+    """Lists all tables in the specified dataset. Requires the READER dataset
+    role.
+
+    Args:
+      projectId: Project ID of the tables to list
+      datasetId: Dataset ID of the tables to list
+
+    Flags:
+      maxResults: Maximum number of results to return
+      pageToken: Page token, returned by a previous call, to request the next
+        page of results
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.BigqueryTablesListRequest(
+        projectId=projectId.decode('utf8'),
+        datasetId=datasetId.decode('utf8'),
+        )
+    if FLAGS['maxResults'].present:
+      request.maxResults = FLAGS.maxResults
+    if FLAGS['pageToken'].present:
+      request.pageToken = FLAGS.pageToken.decode('utf8')
+    result = client.tables.List(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class TablesPatch(apitools_base_cli.NewCmd):
+  """Command wrapping tables.Patch."""
+
+  usage = """tables_patch <projectId> <datasetId> <tableId>"""
+
+  def __init__(self, name, fv):
+    super(TablesPatch, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'table',
+        None,
+        u'A Table resource to be passed as the request body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, projectId, datasetId, tableId):
+    """Updates information in an existing table. The update method replaces
+    the entire table resource, whereas the patch method only replaces fields
+    that are provided in the submitted table resource. This method supports
+    patch semantics.
+
+    Args:
+      projectId: Project ID of the table to update
+      datasetId: Dataset ID of the table to update
+      tableId: Table ID of the table to update
+
+    Flags:
+      table: A Table resource to be passed as the request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.BigqueryTablesPatchRequest(
+        projectId=projectId.decode('utf8'),
+        datasetId=datasetId.decode('utf8'),
+        tableId=tableId.decode('utf8'),
+        )
+    if FLAGS['table'].present:
+      request.table = apitools_base.JsonToMessage(messages.Table, FLAGS.table)
+    result = client.tables.Patch(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class TablesUpdate(apitools_base_cli.NewCmd):
+  """Command wrapping tables.Update."""
+
+  usage = """tables_update <projectId> <datasetId> <tableId>"""
+
+  def __init__(self, name, fv):
+    super(TablesUpdate, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'table',
+        None,
+        u'A Table resource to be passed as the request body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, projectId, datasetId, tableId):
+    """Updates information in an existing table. The update method replaces
+    the entire table resource, whereas the patch method only replaces fields
+    that are provided in the submitted table resource.
+
+    Args:
+      projectId: Project ID of the table to update
+      datasetId: Dataset ID of the table to update
+      tableId: Table ID of the table to update
+
+    Flags:
+      table: A Table resource to be passed as the request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.BigqueryTablesUpdateRequest(
+        projectId=projectId.decode('utf8'),
+        datasetId=datasetId.decode('utf8'),
+        tableId=tableId.decode('utf8'),
+        )
+    if FLAGS['table'].present:
+      request.table = apitools_base.JsonToMessage(messages.Table, FLAGS.table)
+    result = client.tables.Update(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+def main(_):
+  appcommands.AddCmd('pyshell', PyShell)
+  appcommands.AddCmd('datasets_delete', DatasetsDelete)
+  appcommands.AddCmd('datasets_get', DatasetsGet)
+  appcommands.AddCmd('datasets_insert', DatasetsInsert)
+  appcommands.AddCmd('datasets_list', DatasetsList)
+  appcommands.AddCmd('datasets_patch', DatasetsPatch)
+  appcommands.AddCmd('datasets_update', DatasetsUpdate)
+  appcommands.AddCmd('jobs_cancel', JobsCancel)
+  appcommands.AddCmd('jobs_get', JobsGet)
+  appcommands.AddCmd('jobs_getQueryResults', JobsGetQueryResults)
+  appcommands.AddCmd('jobs_insert', JobsInsert)
+  appcommands.AddCmd('jobs_list', JobsList)
+  appcommands.AddCmd('jobs_query', JobsQuery)
+  appcommands.AddCmd('projects_list', ProjectsList)
+  appcommands.AddCmd('tabledata_insertAll', TabledataInsertAll)
+  appcommands.AddCmd('tabledata_list', TabledataList)
+  appcommands.AddCmd('tables_delete', TablesDelete)
+  appcommands.AddCmd('tables_get', TablesGet)
+  appcommands.AddCmd('tables_insert', TablesInsert)
+  appcommands.AddCmd('tables_list', TablesList)
+  appcommands.AddCmd('tables_patch', TablesPatch)
+  appcommands.AddCmd('tables_update', TablesUpdate)
+
+  apitools_base_cli.SetupLogger()
+  if hasattr(appcommands, 'SetDefaultCommand'):
+    appcommands.SetDefaultCommand('pyshell')
+
+
+run_main = apitools_base_cli.run_main
+
+if __name__ == '__main__':
+  appcommands.Run()
diff --git a/samples/bigquery_sample/bigquery_v2/bigquery_v2_client.py b/samples/bigquery_sample/bigquery_v2/bigquery_v2_client.py
new file mode 100644
index 0000000..363f470
--- /dev/null
+++ b/samples/bigquery_sample/bigquery_v2/bigquery_v2_client.py
@@ -0,0 +1,649 @@
+"""Generated client library for bigquery version v2."""
+# NOTE: This file is autogenerated and should not be edited by hand.
+from apitools.base.py import base_api
+from samples.bigquery_sample.bigquery_v2 import bigquery_v2_messages as messages
+
+
+class BigqueryV2(base_api.BaseApiClient):
+  """Generated client library for service bigquery version v2."""
+
+  MESSAGES_MODULE = messages
+  BASE_URL = u'https://www.googleapis.com/bigquery/v2/'
+
+  _PACKAGE = u'bigquery'
+  _SCOPES = [u'https://www.googleapis.com/auth/bigquery', u'https://www.googleapis.com/auth/bigquery.insertdata', u'https://www.googleapis.com/auth/cloud-platform', u'https://www.googleapis.com/auth/cloud-platform.read-only', u'https://www.googleapis.com/auth/devstorage.full_control', u'https://www.googleapis.com/auth/devstorage.read_only', u'https://www.googleapis.com/auth/devstorage.read_write']
+  _VERSION = u'v2'
+  _CLIENT_ID = '1042881264118.apps.googleusercontent.com'
+  _CLIENT_SECRET = 'x_Tw5K8nnjoRAqULM9PFAC2b'
+  _USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b'
+  _CLIENT_CLASS_NAME = u'BigqueryV2'
+  _URL_VERSION = u'v2'
+  _API_KEY = None
+
+  def __init__(self, url='', credentials=None,
+               get_credentials=True, http=None, model=None,
+               log_request=False, log_response=False,
+               credentials_args=None, default_global_params=None,
+               additional_http_headers=None):
+    """Create a new bigquery handle."""
+    url = url or self.BASE_URL
+    super(BigqueryV2, self).__init__(
+        url, credentials=credentials,
+        get_credentials=get_credentials, http=http, model=model,
+        log_request=log_request, log_response=log_response,
+        credentials_args=credentials_args,
+        default_global_params=default_global_params,
+        additional_http_headers=additional_http_headers)
+    self.datasets = self.DatasetsService(self)
+    self.jobs = self.JobsService(self)
+    self.projects = self.ProjectsService(self)
+    self.tabledata = self.TabledataService(self)
+    self.tables = self.TablesService(self)
+
+  class DatasetsService(base_api.BaseApiService):
+    """Service class for the datasets resource."""
+
+    _NAME = u'datasets'
+
+    def __init__(self, client):
+      super(BigqueryV2.DatasetsService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def Delete(self, request, global_params=None):
+      """Deletes the dataset specified by the datasetId value. Before you can delete a dataset, you must delete all its tables, either manually or by specifying deleteContents. Immediately after deletion, you can create another dataset with the same name.
+
+      Args:
+        request: (BigqueryDatasetsDeleteRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (BigqueryDatasetsDeleteResponse) The response message.
+      """
+      config = self.GetMethodConfig('Delete')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Delete.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'DELETE',
+        method_id=u'bigquery.datasets.delete',
+        ordered_params=[u'projectId', u'datasetId'],
+        path_params=[u'datasetId', u'projectId'],
+        query_params=[u'deleteContents'],
+        relative_path=u'projects/{projectId}/datasets/{datasetId}',
+        request_field='',
+        request_type_name=u'BigqueryDatasetsDeleteRequest',
+        response_type_name=u'BigqueryDatasetsDeleteResponse',
+        supports_download=False,
+    )
+
+    def Get(self, request, global_params=None):
+      """Returns the dataset specified by datasetID.
+
+      Args:
+        request: (BigqueryDatasetsGetRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Dataset) The response message.
+      """
+      config = self.GetMethodConfig('Get')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'bigquery.datasets.get',
+        ordered_params=[u'projectId', u'datasetId'],
+        path_params=[u'datasetId', u'projectId'],
+        query_params=[],
+        relative_path=u'projects/{projectId}/datasets/{datasetId}',
+        request_field='',
+        request_type_name=u'BigqueryDatasetsGetRequest',
+        response_type_name=u'Dataset',
+        supports_download=False,
+    )
+
+    def Insert(self, request, global_params=None):
+      """Creates a new empty dataset.
+
+      Args:
+        request: (BigqueryDatasetsInsertRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Dataset) The response message.
+      """
+      config = self.GetMethodConfig('Insert')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Insert.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'bigquery.datasets.insert',
+        ordered_params=[u'projectId'],
+        path_params=[u'projectId'],
+        query_params=[],
+        relative_path=u'projects/{projectId}/datasets',
+        request_field=u'dataset',
+        request_type_name=u'BigqueryDatasetsInsertRequest',
+        response_type_name=u'Dataset',
+        supports_download=False,
+    )
+
+    def List(self, request, global_params=None):
+      """Lists all datasets in the specified project to which you have been granted the READER dataset role.
+
+      Args:
+        request: (BigqueryDatasetsListRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (DatasetList) The response message.
+      """
+      config = self.GetMethodConfig('List')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'bigquery.datasets.list',
+        ordered_params=[u'projectId'],
+        path_params=[u'projectId'],
+        query_params=[u'all', u'filter', u'maxResults', u'pageToken'],
+        relative_path=u'projects/{projectId}/datasets',
+        request_field='',
+        request_type_name=u'BigqueryDatasetsListRequest',
+        response_type_name=u'DatasetList',
+        supports_download=False,
+    )
+
+    def Patch(self, request, global_params=None):
+      """Updates information in an existing dataset. The update method replaces the entire dataset resource, whereas the patch method only replaces fields that are provided in the submitted dataset resource. This method supports patch semantics.
+
+      Args:
+        request: (BigqueryDatasetsPatchRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Dataset) The response message.
+      """
+      config = self.GetMethodConfig('Patch')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Patch.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PATCH',
+        method_id=u'bigquery.datasets.patch',
+        ordered_params=[u'projectId', u'datasetId'],
+        path_params=[u'datasetId', u'projectId'],
+        query_params=[],
+        relative_path=u'projects/{projectId}/datasets/{datasetId}',
+        request_field=u'dataset',
+        request_type_name=u'BigqueryDatasetsPatchRequest',
+        response_type_name=u'Dataset',
+        supports_download=False,
+    )
+
+    def Update(self, request, global_params=None):
+      """Updates information in an existing dataset. The update method replaces the entire dataset resource, whereas the patch method only replaces fields that are provided in the submitted dataset resource.
+
+      Args:
+        request: (BigqueryDatasetsUpdateRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Dataset) The response message.
+      """
+      config = self.GetMethodConfig('Update')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Update.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PUT',
+        method_id=u'bigquery.datasets.update',
+        ordered_params=[u'projectId', u'datasetId'],
+        path_params=[u'datasetId', u'projectId'],
+        query_params=[],
+        relative_path=u'projects/{projectId}/datasets/{datasetId}',
+        request_field=u'dataset',
+        request_type_name=u'BigqueryDatasetsUpdateRequest',
+        response_type_name=u'Dataset',
+        supports_download=False,
+    )
+
+  class JobsService(base_api.BaseApiService):
+    """Service class for the jobs resource."""
+
+    _NAME = u'jobs'
+
+    def __init__(self, client):
+      super(BigqueryV2.JobsService, self).__init__(client)
+      self._upload_configs = {
+          'Insert': base_api.ApiUploadInfo(
+              accept=['*/*'],
+              max_size=None,
+              resumable_multipart=True,
+              resumable_path=u'/resumable/upload/bigquery/v2/projects/{projectId}/jobs',
+              simple_multipart=True,
+              simple_path=u'/upload/bigquery/v2/projects/{projectId}/jobs',
+          ),
+          }
+
+    def Cancel(self, request, global_params=None):
+      """Requests that a job be cancelled. This call will return immediately, and the client will need to poll for the job status to see if the cancel completed successfully. Cancelled jobs may still incur costs.
+
+      Args:
+        request: (BigqueryJobsCancelRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (JobCancelResponse) The response message.
+      """
+      config = self.GetMethodConfig('Cancel')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Cancel.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'bigquery.jobs.cancel',
+        ordered_params=[u'projectId', u'jobId'],
+        path_params=[u'jobId', u'projectId'],
+        query_params=[],
+        relative_path=u'project/{projectId}/jobs/{jobId}/cancel',
+        request_field='',
+        request_type_name=u'BigqueryJobsCancelRequest',
+        response_type_name=u'JobCancelResponse',
+        supports_download=False,
+    )
+
+    def Get(self, request, global_params=None):
+      """Returns information about a specific job. Job information is available for a six month period after creation. Requires that you're the person who ran the job, or have the Is Owner project role.
+
+      Args:
+        request: (BigqueryJobsGetRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Job) The response message.
+      """
+      config = self.GetMethodConfig('Get')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'bigquery.jobs.get',
+        ordered_params=[u'projectId', u'jobId'],
+        path_params=[u'jobId', u'projectId'],
+        query_params=[],
+        relative_path=u'projects/{projectId}/jobs/{jobId}',
+        request_field='',
+        request_type_name=u'BigqueryJobsGetRequest',
+        response_type_name=u'Job',
+        supports_download=False,
+    )
+
+    def GetQueryResults(self, request, global_params=None):
+      """Retrieves the results of a query job.
+
+      Args:
+        request: (BigqueryJobsGetQueryResultsRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (GetQueryResultsResponse) The response message.
+      """
+      config = self.GetMethodConfig('GetQueryResults')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    GetQueryResults.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'bigquery.jobs.getQueryResults',
+        ordered_params=[u'projectId', u'jobId'],
+        path_params=[u'jobId', u'projectId'],
+        query_params=[u'maxResults', u'pageToken', u'startIndex', u'timeoutMs'],
+        relative_path=u'projects/{projectId}/queries/{jobId}',
+        request_field='',
+        request_type_name=u'BigqueryJobsGetQueryResultsRequest',
+        response_type_name=u'GetQueryResultsResponse',
+        supports_download=False,
+    )
+
+    def Insert(self, request, global_params=None, upload=None):
+      """Starts a new asynchronous job. Requires the Can View project role.
+
+      Args:
+        request: (BigqueryJobsInsertRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+        upload: (Upload, default: None) If present, upload
+            this stream with the request.
+      Returns:
+        (Job) The response message.
+      """
+      config = self.GetMethodConfig('Insert')
+      upload_config = self.GetUploadConfig('Insert')
+      return self._RunMethod(
+          config, request, global_params=global_params,
+          upload=upload, upload_config=upload_config)
+
+    Insert.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'bigquery.jobs.insert',
+        ordered_params=[u'projectId'],
+        path_params=[u'projectId'],
+        query_params=[],
+        relative_path=u'projects/{projectId}/jobs',
+        request_field=u'job',
+        request_type_name=u'BigqueryJobsInsertRequest',
+        response_type_name=u'Job',
+        supports_download=False,
+    )
+
+    def List(self, request, global_params=None):
+      """Lists all jobs that you started in the specified project. Job information is available for a six month period after creation. The job list is sorted in reverse chronological order, by job creation time. Requires the Can View project role, or the Is Owner project role if you set the allUsers property.
+
+      Args:
+        request: (BigqueryJobsListRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (JobList) The response message.
+      """
+      config = self.GetMethodConfig('List')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'bigquery.jobs.list',
+        ordered_params=[u'projectId'],
+        path_params=[u'projectId'],
+        query_params=[u'allUsers', u'maxResults', u'pageToken', u'projection', u'stateFilter'],
+        relative_path=u'projects/{projectId}/jobs',
+        request_field='',
+        request_type_name=u'BigqueryJobsListRequest',
+        response_type_name=u'JobList',
+        supports_download=False,
+    )
+
+    def Query(self, request, global_params=None):
+      """Runs a BigQuery SQL query synchronously and returns query results if the query completes within a specified timeout.
+
+      Args:
+        request: (BigqueryJobsQueryRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (QueryResponse) The response message.
+      """
+      config = self.GetMethodConfig('Query')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Query.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'bigquery.jobs.query',
+        ordered_params=[u'projectId'],
+        path_params=[u'projectId'],
+        query_params=[],
+        relative_path=u'projects/{projectId}/queries',
+        request_field=u'queryRequest',
+        request_type_name=u'BigqueryJobsQueryRequest',
+        response_type_name=u'QueryResponse',
+        supports_download=False,
+    )
+
+  class ProjectsService(base_api.BaseApiService):
+    """Service class for the projects resource."""
+
+    _NAME = u'projects'
+
+    def __init__(self, client):
+      super(BigqueryV2.ProjectsService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def List(self, request, global_params=None):
+      """Lists all projects to which you have been granted any project role.
+
+      Args:
+        request: (BigqueryProjectsListRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ProjectList) The response message.
+      """
+      config = self.GetMethodConfig('List')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'bigquery.projects.list',
+        ordered_params=[],
+        path_params=[],
+        query_params=[u'maxResults', u'pageToken'],
+        relative_path=u'projects',
+        request_field='',
+        request_type_name=u'BigqueryProjectsListRequest',
+        response_type_name=u'ProjectList',
+        supports_download=False,
+    )
+
+  class TabledataService(base_api.BaseApiService):
+    """Service class for the tabledata resource."""
+
+    _NAME = u'tabledata'
+
+    def __init__(self, client):
+      super(BigqueryV2.TabledataService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def InsertAll(self, request, global_params=None):
+      """Streams data into BigQuery one record at a time without needing to run a load job. Requires the WRITER dataset role.
+
+      Args:
+        request: (BigqueryTabledataInsertAllRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (TableDataInsertAllResponse) The response message.
+      """
+      config = self.GetMethodConfig('InsertAll')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    InsertAll.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'bigquery.tabledata.insertAll',
+        ordered_params=[u'projectId', u'datasetId', u'tableId'],
+        path_params=[u'datasetId', u'projectId', u'tableId'],
+        query_params=[],
+        relative_path=u'projects/{projectId}/datasets/{datasetId}/tables/{tableId}/insertAll',
+        request_field=u'tableDataInsertAllRequest',
+        request_type_name=u'BigqueryTabledataInsertAllRequest',
+        response_type_name=u'TableDataInsertAllResponse',
+        supports_download=False,
+    )
+
+    def List(self, request, global_params=None):
+      """Retrieves table data from a specified set of rows. Requires the READER dataset role.
+
+      Args:
+        request: (BigqueryTabledataListRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (TableDataList) The response message.
+      """
+      config = self.GetMethodConfig('List')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'bigquery.tabledata.list',
+        ordered_params=[u'projectId', u'datasetId', u'tableId'],
+        path_params=[u'datasetId', u'projectId', u'tableId'],
+        query_params=[u'maxResults', u'pageToken', u'startIndex'],
+        relative_path=u'projects/{projectId}/datasets/{datasetId}/tables/{tableId}/data',
+        request_field='',
+        request_type_name=u'BigqueryTabledataListRequest',
+        response_type_name=u'TableDataList',
+        supports_download=False,
+    )
+
+  class TablesService(base_api.BaseApiService):
+    """Service class for the tables resource."""
+
+    _NAME = u'tables'
+
+    def __init__(self, client):
+      super(BigqueryV2.TablesService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def Delete(self, request, global_params=None):
+      """Deletes the table specified by tableId from the dataset. If the table contains data, all the data will be deleted.
+
+      Args:
+        request: (BigqueryTablesDeleteRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (BigqueryTablesDeleteResponse) The response message.
+      """
+      config = self.GetMethodConfig('Delete')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Delete.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'DELETE',
+        method_id=u'bigquery.tables.delete',
+        ordered_params=[u'projectId', u'datasetId', u'tableId'],
+        path_params=[u'datasetId', u'projectId', u'tableId'],
+        query_params=[],
+        relative_path=u'projects/{projectId}/datasets/{datasetId}/tables/{tableId}',
+        request_field='',
+        request_type_name=u'BigqueryTablesDeleteRequest',
+        response_type_name=u'BigqueryTablesDeleteResponse',
+        supports_download=False,
+    )
+
+    def Get(self, request, global_params=None):
+      """Gets the specified table resource by table ID. This method does not return the data in the table, it only returns the table resource, which describes the structure of this table.
+
+      Args:
+        request: (BigqueryTablesGetRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Table) The response message.
+      """
+      config = self.GetMethodConfig('Get')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'bigquery.tables.get',
+        ordered_params=[u'projectId', u'datasetId', u'tableId'],
+        path_params=[u'datasetId', u'projectId', u'tableId'],
+        query_params=[],
+        relative_path=u'projects/{projectId}/datasets/{datasetId}/tables/{tableId}',
+        request_field='',
+        request_type_name=u'BigqueryTablesGetRequest',
+        response_type_name=u'Table',
+        supports_download=False,
+    )
+
+    def Insert(self, request, global_params=None):
+      """Creates a new, empty table in the dataset.
+
+      Args:
+        request: (BigqueryTablesInsertRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Table) The response message.
+      """
+      config = self.GetMethodConfig('Insert')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Insert.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'bigquery.tables.insert',
+        ordered_params=[u'projectId', u'datasetId'],
+        path_params=[u'datasetId', u'projectId'],
+        query_params=[],
+        relative_path=u'projects/{projectId}/datasets/{datasetId}/tables',
+        request_field=u'table',
+        request_type_name=u'BigqueryTablesInsertRequest',
+        response_type_name=u'Table',
+        supports_download=False,
+    )
+
+    def List(self, request, global_params=None):
+      """Lists all tables in the specified dataset. Requires the READER dataset role.
+
+      Args:
+        request: (BigqueryTablesListRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (TableList) The response message.
+      """
+      config = self.GetMethodConfig('List')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'bigquery.tables.list',
+        ordered_params=[u'projectId', u'datasetId'],
+        path_params=[u'datasetId', u'projectId'],
+        query_params=[u'maxResults', u'pageToken'],
+        relative_path=u'projects/{projectId}/datasets/{datasetId}/tables',
+        request_field='',
+        request_type_name=u'BigqueryTablesListRequest',
+        response_type_name=u'TableList',
+        supports_download=False,
+    )
+
+    def Patch(self, request, global_params=None):
+      """Updates information in an existing table. The update method replaces the entire table resource, whereas the patch method only replaces fields that are provided in the submitted table resource. This method supports patch semantics.
+
+      Args:
+        request: (BigqueryTablesPatchRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Table) The response message.
+      """
+      config = self.GetMethodConfig('Patch')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Patch.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PATCH',
+        method_id=u'bigquery.tables.patch',
+        ordered_params=[u'projectId', u'datasetId', u'tableId'],
+        path_params=[u'datasetId', u'projectId', u'tableId'],
+        query_params=[],
+        relative_path=u'projects/{projectId}/datasets/{datasetId}/tables/{tableId}',
+        request_field=u'table',
+        request_type_name=u'BigqueryTablesPatchRequest',
+        response_type_name=u'Table',
+        supports_download=False,
+    )
+
+    def Update(self, request, global_params=None):
+      """Updates information in an existing table. The update method replaces the entire table resource, whereas the patch method only replaces fields that are provided in the submitted table resource.
+
+      Args:
+        request: (BigqueryTablesUpdateRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Table) The response message.
+      """
+      config = self.GetMethodConfig('Update')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Update.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PUT',
+        method_id=u'bigquery.tables.update',
+        ordered_params=[u'projectId', u'datasetId', u'tableId'],
+        path_params=[u'datasetId', u'projectId', u'tableId'],
+        query_params=[],
+        relative_path=u'projects/{projectId}/datasets/{datasetId}/tables/{tableId}',
+        request_field=u'table',
+        request_type_name=u'BigqueryTablesUpdateRequest',
+        response_type_name=u'Table',
+        supports_download=False,
+    )
diff --git a/samples/bigquery_sample/bigquery_v2/bigquery_v2_messages.py b/samples/bigquery_sample/bigquery_v2/bigquery_v2_messages.py
new file mode 100644
index 0000000..9b68f9c
--- /dev/null
+++ b/samples/bigquery_sample/bigquery_v2/bigquery_v2_messages.py
@@ -0,0 +1,2050 @@
+"""Generated message classes for bigquery version v2.
+
+A data platform for customers to create, manage, share and query data.
+"""
+# NOTE: This file is autogenerated and should not be edited by hand.
+
+from apitools.base.protorpclite import messages as _messages
+from apitools.base.py import encoding
+from apitools.base.py import extra_types
+
+
+package = 'bigquery'
+
+
+class BigqueryDatasetsDeleteRequest(_messages.Message):
+  """A BigqueryDatasetsDeleteRequest object.
+
+  Fields:
+    datasetId: Dataset ID of dataset being deleted
+    deleteContents: If True, delete all the tables in the dataset. If False
+      and the dataset contains tables, the request will fail. Default is False
+    projectId: Project ID of the dataset being deleted
+  """
+
+  datasetId = _messages.StringField(1, required=True)
+  deleteContents = _messages.BooleanField(2)
+  projectId = _messages.StringField(3, required=True)
+
+
+class BigqueryDatasetsDeleteResponse(_messages.Message):
+  """An empty BigqueryDatasetsDelete response."""
+
+
+class BigqueryDatasetsGetRequest(_messages.Message):
+  """A BigqueryDatasetsGetRequest object.
+
+  Fields:
+    datasetId: Dataset ID of the requested dataset
+    projectId: Project ID of the requested dataset
+  """
+
+  datasetId = _messages.StringField(1, required=True)
+  projectId = _messages.StringField(2, required=True)
+
+
+class BigqueryDatasetsInsertRequest(_messages.Message):
+  """A BigqueryDatasetsInsertRequest object.
+
+  Fields:
+    dataset: A Dataset resource to be passed as the request body.
+    projectId: Project ID of the new dataset
+  """
+
+  dataset = _messages.MessageField('Dataset', 1)
+  projectId = _messages.StringField(2, required=True)
+
+
+class BigqueryDatasetsListRequest(_messages.Message):
+  """A BigqueryDatasetsListRequest object.
+
+  Fields:
+    all: Whether to list all datasets, including hidden ones
+    filter: An expression for filtering the results of the request by label.
+      The syntax is "labels.[:]". Multiple filters can be ANDed together by
+      connecting with a space. Example: "labels.department:receiving
+      labels.active". See https://cloud.google.com/bigquery/docs/labeling-
+      datasets#filtering_datasets_using_labels for details.
+    maxResults: The maximum number of results to return
+    pageToken: Page token, returned by a previous call, to request the next
+      page of results
+    projectId: Project ID of the datasets to be listed
+  """
+
+  all = _messages.BooleanField(1)
+  filter = _messages.StringField(2)
+  maxResults = _messages.IntegerField(3, variant=_messages.Variant.UINT32)
+  pageToken = _messages.StringField(4)
+  projectId = _messages.StringField(5, required=True)
+
+
+class BigqueryDatasetsPatchRequest(_messages.Message):
+  """A BigqueryDatasetsPatchRequest object.
+
+  Fields:
+    dataset: A Dataset resource to be passed as the request body.
+    datasetId: Dataset ID of the dataset being updated
+    projectId: Project ID of the dataset being updated
+  """
+
+  dataset = _messages.MessageField('Dataset', 1)
+  datasetId = _messages.StringField(2, required=True)
+  projectId = _messages.StringField(3, required=True)
+
+
+class BigqueryDatasetsUpdateRequest(_messages.Message):
+  """A BigqueryDatasetsUpdateRequest object.
+
+  Fields:
+    dataset: A Dataset resource to be passed as the request body.
+    datasetId: Dataset ID of the dataset being updated
+    projectId: Project ID of the dataset being updated
+  """
+
+  dataset = _messages.MessageField('Dataset', 1)
+  datasetId = _messages.StringField(2, required=True)
+  projectId = _messages.StringField(3, required=True)
+
+
+class BigqueryJobsCancelRequest(_messages.Message):
+  """A BigqueryJobsCancelRequest object.
+
+  Fields:
+    jobId: [Required] Job ID of the job to cancel
+    projectId: [Required] Project ID of the job to cancel
+  """
+
+  jobId = _messages.StringField(1, required=True)
+  projectId = _messages.StringField(2, required=True)
+
+
+class BigqueryJobsGetQueryResultsRequest(_messages.Message):
+  """A BigqueryJobsGetQueryResultsRequest object.
+
+  Fields:
+    jobId: [Required] Job ID of the query job
+    maxResults: Maximum number of results to read
+    pageToken: Page token, returned by a previous call, to request the next
+      page of results
+    projectId: [Required] Project ID of the query job
+    startIndex: Zero-based index of the starting row
+    timeoutMs: How long to wait for the query to complete, in milliseconds,
+      before returning. Default is 10 seconds. If the timeout passes before
+      the job completes, the 'jobComplete' field in the response will be false
+  """
+
+  jobId = _messages.StringField(1, required=True)
+  maxResults = _messages.IntegerField(2, variant=_messages.Variant.UINT32)
+  pageToken = _messages.StringField(3)
+  projectId = _messages.StringField(4, required=True)
+  startIndex = _messages.IntegerField(5, variant=_messages.Variant.UINT64)
+  timeoutMs = _messages.IntegerField(6, variant=_messages.Variant.UINT32)
+
+
+class BigqueryJobsGetRequest(_messages.Message):
+  """A BigqueryJobsGetRequest object.
+
+  Fields:
+    jobId: [Required] Job ID of the requested job
+    projectId: [Required] Project ID of the requested job
+  """
+
+  jobId = _messages.StringField(1, required=True)
+  projectId = _messages.StringField(2, required=True)
+
+
+class BigqueryJobsInsertRequest(_messages.Message):
+  """A BigqueryJobsInsertRequest object.
+
+  Fields:
+    job: A Job resource to be passed as the request body.
+    projectId: Project ID of the project that will be billed for the job
+  """
+
+  job = _messages.MessageField('Job', 1)
+  projectId = _messages.StringField(2, required=True)
+
+
+class BigqueryJobsListRequest(_messages.Message):
+  """A BigqueryJobsListRequest object.
+
+  Enums:
+    ProjectionValueValuesEnum: Restrict information returned to a set of
+      selected fields
+    StateFilterValueValuesEnum: Filter for job state
+
+  Fields:
+    allUsers: Whether to display jobs owned by all users in the project.
+      Default false
+    maxResults: Maximum number of results to return
+    pageToken: Page token, returned by a previous call, to request the next
+      page of results
+    projectId: Project ID of the jobs to list
+    projection: Restrict information returned to a set of selected fields
+    stateFilter: Filter for job state
+  """
+
+  class ProjectionValueValuesEnum(_messages.Enum):
+    """Restrict information returned to a set of selected fields
+
+    Values:
+      full: Includes all job data
+      minimal: Does not include the job configuration
+    """
+    full = 0
+    minimal = 1
+
+  class StateFilterValueValuesEnum(_messages.Enum):
+    """Filter for job state
+
+    Values:
+      done: Finished jobs
+      pending: Pending jobs
+      running: Running jobs
+    """
+    done = 0
+    pending = 1
+    running = 2
+
+  allUsers = _messages.BooleanField(1)
+  maxResults = _messages.IntegerField(2, variant=_messages.Variant.UINT32)
+  pageToken = _messages.StringField(3)
+  projectId = _messages.StringField(4, required=True)
+  projection = _messages.EnumField('ProjectionValueValuesEnum', 5)
+  stateFilter = _messages.EnumField('StateFilterValueValuesEnum', 6, repeated=True)
+
+
+class BigqueryJobsQueryRequest(_messages.Message):
+  """A BigqueryJobsQueryRequest object.
+
+  Fields:
+    projectId: Project ID of the project billed for the query
+    queryRequest: A QueryRequest resource to be passed as the request body.
+  """
+
+  projectId = _messages.StringField(1, required=True)
+  queryRequest = _messages.MessageField('QueryRequest', 2)
+
+
+class BigqueryProjectsListRequest(_messages.Message):
+  """A BigqueryProjectsListRequest object.
+
+  Fields:
+    maxResults: Maximum number of results to return
+    pageToken: Page token, returned by a previous call, to request the next
+      page of results
+  """
+
+  maxResults = _messages.IntegerField(1, variant=_messages.Variant.UINT32)
+  pageToken = _messages.StringField(2)
+
+
+class BigqueryTabledataInsertAllRequest(_messages.Message):
+  """A BigqueryTabledataInsertAllRequest object.
+
+  Fields:
+    datasetId: Dataset ID of the destination table.
+    projectId: Project ID of the destination table.
+    tableDataInsertAllRequest: A TableDataInsertAllRequest resource to be
+      passed as the request body.
+    tableId: Table ID of the destination table.
+  """
+
+  datasetId = _messages.StringField(1, required=True)
+  projectId = _messages.StringField(2, required=True)
+  tableDataInsertAllRequest = _messages.MessageField('TableDataInsertAllRequest', 3)
+  tableId = _messages.StringField(4, required=True)
+
+
+class BigqueryTabledataListRequest(_messages.Message):
+  """A BigqueryTabledataListRequest object.
+
+  Fields:
+    datasetId: Dataset ID of the table to read
+    maxResults: Maximum number of results to return
+    pageToken: Page token, returned by a previous call, identifying the result
+      set
+    projectId: Project ID of the table to read
+    startIndex: Zero-based index of the starting row to read
+    tableId: Table ID of the table to read
+  """
+
+  datasetId = _messages.StringField(1, required=True)
+  maxResults = _messages.IntegerField(2, variant=_messages.Variant.UINT32)
+  pageToken = _messages.StringField(3)
+  projectId = _messages.StringField(4, required=True)
+  startIndex = _messages.IntegerField(5, variant=_messages.Variant.UINT64)
+  tableId = _messages.StringField(6, required=True)
+
+
+class BigqueryTablesDeleteRequest(_messages.Message):
+  """A BigqueryTablesDeleteRequest object.
+
+  Fields:
+    datasetId: Dataset ID of the table to delete
+    projectId: Project ID of the table to delete
+    tableId: Table ID of the table to delete
+  """
+
+  datasetId = _messages.StringField(1, required=True)
+  projectId = _messages.StringField(2, required=True)
+  tableId = _messages.StringField(3, required=True)
+
+
+class BigqueryTablesDeleteResponse(_messages.Message):
+  """An empty BigqueryTablesDelete response."""
+
+
+class BigqueryTablesGetRequest(_messages.Message):
+  """A BigqueryTablesGetRequest object.
+
+  Fields:
+    datasetId: Dataset ID of the requested table
+    projectId: Project ID of the requested table
+    tableId: Table ID of the requested table
+  """
+
+  datasetId = _messages.StringField(1, required=True)
+  projectId = _messages.StringField(2, required=True)
+  tableId = _messages.StringField(3, required=True)
+
+
+class BigqueryTablesInsertRequest(_messages.Message):
+  """A BigqueryTablesInsertRequest object.
+
+  Fields:
+    datasetId: Dataset ID of the new table
+    projectId: Project ID of the new table
+    table: A Table resource to be passed as the request body.
+  """
+
+  datasetId = _messages.StringField(1, required=True)
+  projectId = _messages.StringField(2, required=True)
+  table = _messages.MessageField('Table', 3)
+
+
+class BigqueryTablesListRequest(_messages.Message):
+  """A BigqueryTablesListRequest object.
+
+  Fields:
+    datasetId: Dataset ID of the tables to list
+    maxResults: Maximum number of results to return
+    pageToken: Page token, returned by a previous call, to request the next
+      page of results
+    projectId: Project ID of the tables to list
+  """
+
+  datasetId = _messages.StringField(1, required=True)
+  maxResults = _messages.IntegerField(2, variant=_messages.Variant.UINT32)
+  pageToken = _messages.StringField(3)
+  projectId = _messages.StringField(4, required=True)
+
+
+class BigqueryTablesPatchRequest(_messages.Message):
+  """A BigqueryTablesPatchRequest object.
+
+  Fields:
+    datasetId: Dataset ID of the table to update
+    projectId: Project ID of the table to update
+    table: A Table resource to be passed as the request body.
+    tableId: Table ID of the table to update
+  """
+
+  datasetId = _messages.StringField(1, required=True)
+  projectId = _messages.StringField(2, required=True)
+  table = _messages.MessageField('Table', 3)
+  tableId = _messages.StringField(4, required=True)
+
+
+class BigqueryTablesUpdateRequest(_messages.Message):
+  """A BigqueryTablesUpdateRequest object.
+
+  Fields:
+    datasetId: Dataset ID of the table to update
+    projectId: Project ID of the table to update
+    table: A Table resource to be passed as the request body.
+    tableId: Table ID of the table to update
+  """
+
+  datasetId = _messages.StringField(1, required=True)
+  projectId = _messages.StringField(2, required=True)
+  table = _messages.MessageField('Table', 3)
+  tableId = _messages.StringField(4, required=True)
+
+
+class BigtableColumn(_messages.Message):
+  """A BigtableColumn object.
+
+  Fields:
+    encoding: [Optional] The encoding of the values when the type is not
+      STRING. Acceptable encoding values are: TEXT - indicates values are
+      alphanumeric text strings. BINARY - indicates values are encoded using
+      HBase Bytes.toBytes family of functions. 'encoding' can also be set at
+      the column family level. However, the setting at this level takes
+      precedence if 'encoding' is set at both levels.
+    fieldName: [Optional] If the qualifier is not a valid BigQuery field
+      identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier
+      must be provided as the column field name and is used as field name in
+      queries.
+    onlyReadLatest: [Optional] If this is set, only the latest version of
+      value in this column are exposed. 'onlyReadLatest' can also be set at
+      the column family level. However, the setting at this level takes
+      precedence if 'onlyReadLatest' is set at both levels.
+    qualifierEncoded: [Required] Qualifier of the column. Columns in the
+      parent column family that has this exact qualifier are exposed as .
+      field. If the qualifier is valid UTF-8 string, it can be specified in
+      the qualifier_string field. Otherwise, a base-64 encoded value must be
+      set to qualifier_encoded. The column field name is the same as the
+      column qualifier. However, if the qualifier is not a valid BigQuery
+      field identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid
+      identifier must be provided as field_name.
+    qualifierString: A string attribute.
+    type: [Optional] The type to convert the value in cells of this column.
+      The values are expected to be encoded using HBase Bytes.toBytes function
+      when using the BINARY encoding value. Following BigQuery types are
+      allowed (case-sensitive) - BYTES STRING INTEGER FLOAT BOOLEAN Default
+      type is BYTES. 'type' can also be set at the column family level.
+      However, the setting at this level takes precedence if 'type' is set at
+      both levels.
+  """
+
+  encoding = _messages.StringField(1)
+  fieldName = _messages.StringField(2)
+  onlyReadLatest = _messages.BooleanField(3)
+  qualifierEncoded = _messages.BytesField(4)
+  qualifierString = _messages.StringField(5)
+  type = _messages.StringField(6)
+
+
+class BigtableColumnFamily(_messages.Message):
+  """A BigtableColumnFamily object.
+
+  Fields:
+    columns: [Optional] Lists of columns that should be exposed as individual
+      fields as opposed to a list of (column name, value) pairs. All columns
+      whose qualifier matches a qualifier in this list can be accessed as ..
+      Other columns can be accessed as a list through .Column field.
+    encoding: [Optional] The encoding of the values when the type is not
+      STRING. Acceptable encoding values are: TEXT - indicates values are
+      alphanumeric text strings. BINARY - indicates values are encoded using
+      HBase Bytes.toBytes family of functions. This can be overridden for a
+      specific column by listing that column in 'columns' and specifying an
+      encoding for it.
+    familyId: Identifier of the column family.
+    onlyReadLatest: [Optional] If this is set only the latest version of value
+      are exposed for all columns in this column family. This can be
+      overridden for a specific column by listing that column in 'columns' and
+      specifying a different setting for that column.
+    type: [Optional] The type to convert the value in cells of this column
+      family. The values are expected to be encoded using HBase Bytes.toBytes
+      function when using the BINARY encoding value. Following BigQuery types
+      are allowed (case-sensitive) - BYTES STRING INTEGER FLOAT BOOLEAN
+      Default type is BYTES. This can be overridden for a specific column by
+      listing that column in 'columns' and specifying a type for it.
+  """
+
+  columns = _messages.MessageField('BigtableColumn', 1, repeated=True)
+  encoding = _messages.StringField(2)
+  familyId = _messages.StringField(3)
+  onlyReadLatest = _messages.BooleanField(4)
+  type = _messages.StringField(5)
+
+
+class BigtableOptions(_messages.Message):
+  """A BigtableOptions object.
+
+  Fields:
+    columnFamilies: [Optional] List of column families to expose in the table
+      schema along with their types. This list restricts the column families
+      that can be referenced in queries and specifies their value types. You
+      can use this list to do type conversions - see the 'type' field for more
+      details. If you leave this list empty, all column families are present
+      in the table schema and their values are read as BYTES. During a query
+      only the column families referenced in that query are read from
+      Bigtable.
+    ignoreUnspecifiedColumnFamilies: [Optional] If field is true, then the
+      column families that are not specified in columnFamilies list are not
+      exposed in the table schema. Otherwise, they are read with BYTES type
+      values. The default value is false.
+    readRowkeyAsString: [Optional] If field is true, then the rowkey column
+      families will be read and converted to string. Otherwise they are read
+      with BYTES type values and users need to manually cast them with CAST if
+      necessary. The default value is false.
+  """
+
+  columnFamilies = _messages.MessageField('BigtableColumnFamily', 1, repeated=True)
+  ignoreUnspecifiedColumnFamilies = _messages.BooleanField(2)
+  readRowkeyAsString = _messages.BooleanField(3)
+
+
+class CsvOptions(_messages.Message):
+  """A CsvOptions object.
+
+  Fields:
+    allowJaggedRows: [Optional] Indicates if BigQuery should accept rows that
+      are missing trailing optional columns. If true, BigQuery treats missing
+      trailing columns as null values. If false, records with missing trailing
+      columns are treated as bad records, and if there are too many bad
+      records, an invalid error is returned in the job result. The default
+      value is false.
+    allowQuotedNewlines: [Optional] Indicates if BigQuery should allow quoted
+      data sections that contain newline characters in a CSV file. The default
+      value is false.
+    encoding: [Optional] The character encoding of the data. The supported
+      values are UTF-8 or ISO-8859-1. The default value is UTF-8. BigQuery
+      decodes the data after the raw, binary data has been split using the
+      values of the quote and fieldDelimiter properties.
+    fieldDelimiter: [Optional] The separator for fields in a CSV file.
+      BigQuery converts the string to ISO-8859-1 encoding, and then uses the
+      first byte of the encoded string to split the data in its raw, binary
+      state. BigQuery also supports the escape sequence "\t" to specify a tab
+      separator. The default value is a comma (',').
+    quote: [Optional] The value that is used to quote data sections in a CSV
+      file. BigQuery converts the string to ISO-8859-1 encoding, and then uses
+      the first byte of the encoded string to split the data in its raw,
+      binary state. The default value is a double-quote ('"'). If your data
+      does not contain quoted sections, set the property value to an empty
+      string. If your data contains quoted newline characters, you must also
+      set the allowQuotedNewlines property to true.
+    skipLeadingRows: [Optional] The number of rows at the top of a CSV file
+      that BigQuery will skip when reading the data. The default value is 0.
+      This property is useful if you have header rows in the file that should
+      be skipped.
+  """
+
+  allowJaggedRows = _messages.BooleanField(1)
+  allowQuotedNewlines = _messages.BooleanField(2)
+  encoding = _messages.StringField(3)
+  fieldDelimiter = _messages.StringField(4)
+  quote = _messages.StringField(5, default=u'"')
+  skipLeadingRows = _messages.IntegerField(6)
+
+
+class Dataset(_messages.Message):
+  """A Dataset object.
+
+  Messages:
+    AccessValueListEntry: A AccessValueListEntry object.
+    LabelsValue: [Experimental] The labels associated with this dataset. You
+      can use these to organize and group your datasets. You can set this
+      property when inserting or updating a dataset. Label keys and values can
+      be no longer than 63 characters, can only contain letters, numeric
+      characters, underscores and dashes. International characters are
+      allowed. Label values are optional. Label keys must start with a letter
+      and must be unique within a dataset. Both keys and values are
+      additionally constrained to be <= 128 bytes in size.
+
+  Fields:
+    access: [Optional] An array of objects that define dataset access for one
+      or more entities. You can set this property when inserting or updating a
+      dataset in order to control who is allowed to access the data. If
+      unspecified at dataset creation time, BigQuery adds default dataset
+      access for the following entities: access.specialGroup: projectReaders;
+      access.role: READER; access.specialGroup: projectWriters; access.role:
+      WRITER; access.specialGroup: projectOwners; access.role: OWNER;
+      access.userByEmail: [dataset creator email]; access.role: OWNER;
+    creationTime: [Output-only] The time when this dataset was created, in
+      milliseconds since the epoch.
+    datasetReference: [Required] A reference that identifies the dataset.
+    defaultTableExpirationMs: [Optional] The default lifetime of all tables in
+      the dataset, in milliseconds. The minimum value is 3600000 milliseconds
+      (one hour). Once this property is set, all newly-created tables in the
+      dataset will have an expirationTime property set to the creation time
+      plus the value in this property, and changing the value will only affect
+      new tables, not existing ones. When the expirationTime for a given table
+      is reached, that table will be deleted automatically. If a table's
+      expirationTime is modified or removed before the table expires, or if
+      you provide an explicit expirationTime when creating a table, that value
+      takes precedence over the default expiration time indicated by this
+      property.
+    description: [Optional] A user-friendly description of the dataset.
+    etag: [Output-only] A hash of the resource.
+    friendlyName: [Optional] A descriptive name for the dataset.
+    id: [Output-only] The fully-qualified unique name of the dataset in the
+      format projectId:datasetId. The dataset name without the project name is
+      given in the datasetId field. When creating a new dataset, leave this
+      field blank, and instead specify the datasetId field.
+    kind: [Output-only] The resource type.
+    labels: [Experimental] The labels associated with this dataset. You can
+      use these to organize and group your datasets. You can set this property
+      when inserting or updating a dataset. Label keys and values can be no
+      longer than 63 characters, can only contain letters, numeric characters,
+      underscores and dashes. International characters are allowed. Label
+      values are optional. Label keys must start with a letter and must be
+      unique within a dataset. Both keys and values are additionally
+      constrained to be <= 128 bytes in size.
+    lastModifiedTime: [Output-only] The date when this dataset or any of its
+      tables was last modified, in milliseconds since the epoch.
+    location: [Experimental] The geographic location where the dataset should
+      reside. Possible values include EU and US. The default value is US.
+    selfLink: [Output-only] A URL that can be used to access the resource
+      again. You can use this URL in Get or Update requests to the resource.
+  """
+
+  class AccessValueListEntry(_messages.Message):
+    """A AccessValueListEntry object.
+
+    Fields:
+      domain: [Pick one] A domain to grant access to. Any users signed in with
+        the domain specified will be granted the specified access. Example:
+        "example.com".
+      groupByEmail: [Pick one] An email address of a Google Group to grant
+        access to.
+      role: [Required] Describes the rights granted to the user specified by
+        the other member of the access object. The following string values are
+        supported: READER, WRITER, OWNER.
+      specialGroup: [Pick one] A special group to grant access to. Possible
+        values include: projectOwners: Owners of the enclosing project.
+        projectReaders: Readers of the enclosing project. projectWriters:
+        Writers of the enclosing project. allAuthenticatedUsers: All
+        authenticated BigQuery users.
+      userByEmail: [Pick one] An email address of a user to grant access to.
+        For example: fred@example.com.
+      view: [Pick one] A view from a different dataset to grant access to.
+        Queries executed against that view will have read access to tables in
+        this dataset. The role field is not required when this field is set.
+        If that view is updated by any user, access to the view needs to be
+        granted again via an update operation.
+    """
+
+    domain = _messages.StringField(1)
+    groupByEmail = _messages.StringField(2)
+    role = _messages.StringField(3)
+    specialGroup = _messages.StringField(4)
+    userByEmail = _messages.StringField(5)
+    view = _messages.MessageField('TableReference', 6)
+
+  @encoding.MapUnrecognizedFields('additionalProperties')
+  class LabelsValue(_messages.Message):
+    """[Experimental] The labels associated with this dataset. You can use
+    these to organize and group your datasets. You can set this property when
+    inserting or updating a dataset. Label keys and values can be no longer
+    than 63 characters, can only contain letters, numeric characters,
+    underscores and dashes. International characters are allowed. Label values
+    are optional. Label keys must start with a letter and must be unique
+    within a dataset. Both keys and values are additionally constrained to be
+    <= 128 bytes in size.
+
+    Messages:
+      AdditionalProperty: An additional property for a LabelsValue object.
+
+    Fields:
+      additionalProperties: Additional properties of type LabelsValue
+    """
+
+    class AdditionalProperty(_messages.Message):
+      """An additional property for a LabelsValue object.
+
+      Fields:
+        key: Name of the additional property.
+        value: A string attribute.
+      """
+
+      key = _messages.StringField(1)
+      value = _messages.StringField(2)
+
+    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
+
+  access = _messages.MessageField('AccessValueListEntry', 1, repeated=True)
+  creationTime = _messages.IntegerField(2)
+  datasetReference = _messages.MessageField('DatasetReference', 3)
+  defaultTableExpirationMs = _messages.IntegerField(4)
+  description = _messages.StringField(5)
+  etag = _messages.StringField(6)
+  friendlyName = _messages.StringField(7)
+  id = _messages.StringField(8)
+  kind = _messages.StringField(9, default=u'bigquery#dataset')
+  labels = _messages.MessageField('LabelsValue', 10)
+  lastModifiedTime = _messages.IntegerField(11)
+  location = _messages.StringField(12)
+  selfLink = _messages.StringField(13)
+
+
+class DatasetList(_messages.Message):
+  """A DatasetList object.
+
+  Messages:
+    DatasetsValueListEntry: A DatasetsValueListEntry object.
+
+  Fields:
+    datasets: An array of the dataset resources in the project. Each resource
+      contains basic information. For full information about a particular
+      dataset resource, use the Datasets: get method. This property is omitted
+      when there are no datasets in the project.
+    etag: A hash value of the results page. You can use this property to
+      determine if the page has changed since the last request.
+    kind: The list type. This property always returns the value
+      "bigquery#datasetList".
+    nextPageToken: A token that can be used to request the next results page.
+      This property is omitted on the final results page.
+  """
+
+  class DatasetsValueListEntry(_messages.Message):
+    """A DatasetsValueListEntry object.
+
+    Messages:
+      LabelsValue: [Experimental] The labels associated with this dataset. You
+        can use these to organize and group your datasets.
+
+    Fields:
+      datasetReference: The dataset reference. Use this property to access
+        specific parts of the dataset's ID, such as project ID or dataset ID.
+      friendlyName: A descriptive name for the dataset, if one exists.
+      id: The fully-qualified, unique, opaque ID of the dataset.
+      kind: The resource type. This property always returns the value
+        "bigquery#dataset".
+      labels: [Experimental] The labels associated with this dataset. You can
+        use these to organize and group your datasets.
+    """
+
+    @encoding.MapUnrecognizedFields('additionalProperties')
+    class LabelsValue(_messages.Message):
+      """[Experimental] The labels associated with this dataset. You can use
+      these to organize and group your datasets.
+
+      Messages:
+        AdditionalProperty: An additional property for a LabelsValue object.
+
+      Fields:
+        additionalProperties: Additional properties of type LabelsValue
+      """
+
+      class AdditionalProperty(_messages.Message):
+        """An additional property for a LabelsValue object.
+
+        Fields:
+          key: Name of the additional property.
+          value: A string attribute.
+        """
+
+        key = _messages.StringField(1)
+        value = _messages.StringField(2)
+
+      additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
+
+    datasetReference = _messages.MessageField('DatasetReference', 1)
+    friendlyName = _messages.StringField(2)
+    id = _messages.StringField(3)
+    kind = _messages.StringField(4, default=u'bigquery#dataset')
+    labels = _messages.MessageField('LabelsValue', 5)
+
+  datasets = _messages.MessageField('DatasetsValueListEntry', 1, repeated=True)
+  etag = _messages.StringField(2)
+  kind = _messages.StringField(3, default=u'bigquery#datasetList')
+  nextPageToken = _messages.StringField(4)
+
+
+class DatasetReference(_messages.Message):
+  """A DatasetReference object.
+
+  Fields:
+    datasetId: [Required] A unique ID for this dataset, without the project
+      name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or
+      underscores (_). The maximum length is 1,024 characters.
+    projectId: [Optional] The ID of the project containing this dataset.
+  """
+
+  datasetId = _messages.StringField(1)
+  projectId = _messages.StringField(2)
+
+
+class ErrorProto(_messages.Message):
+  """A ErrorProto object.
+
+  Fields:
+    debugInfo: Debugging information. This property is internal to Google and
+      should not be used.
+    location: Specifies where the error occurred, if present.
+    message: A human-readable description of the error.
+    reason: A short error code that summarizes the error.
+  """
+
+  debugInfo = _messages.StringField(1)
+  location = _messages.StringField(2)
+  message = _messages.StringField(3)
+  reason = _messages.StringField(4)
+
+
+class ExplainQueryStage(_messages.Message):
+  """A ExplainQueryStage object.
+
+  Fields:
+    computeRatioAvg: Relative amount of time the average shard spent on CPU-
+      bound tasks.
+    computeRatioMax: Relative amount of time the slowest shard spent on CPU-
+      bound tasks.
+    id: Unique ID for stage within plan.
+    name: Human-readable name for stage.
+    readRatioAvg: Relative amount of time the average shard spent reading
+      input.
+    readRatioMax: Relative amount of time the slowest shard spent reading
+      input.
+    recordsRead: Number of records read into the stage.
+    recordsWritten: Number of records written by the stage.
+    steps: List of operations within the stage in dependency order
+      (approximately chronological).
+    waitRatioAvg: Relative amount of time the average shard spent waiting to
+      be scheduled.
+    waitRatioMax: Relative amount of time the slowest shard spent waiting to
+      be scheduled.
+    writeRatioAvg: Relative amount of time the average shard spent on writing
+      output.
+    writeRatioMax: Relative amount of time the slowest shard spent on writing
+      output.
+  """
+
+  computeRatioAvg = _messages.FloatField(1)
+  computeRatioMax = _messages.FloatField(2)
+  id = _messages.IntegerField(3)
+  name = _messages.StringField(4)
+  readRatioAvg = _messages.FloatField(5)
+  readRatioMax = _messages.FloatField(6)
+  recordsRead = _messages.IntegerField(7)
+  recordsWritten = _messages.IntegerField(8)
+  steps = _messages.MessageField('ExplainQueryStep', 9, repeated=True)
+  waitRatioAvg = _messages.FloatField(10)
+  waitRatioMax = _messages.FloatField(11)
+  writeRatioAvg = _messages.FloatField(12)
+  writeRatioMax = _messages.FloatField(13)
+
+
+class ExplainQueryStep(_messages.Message):
+  """A ExplainQueryStep object.
+
+  Fields:
+    kind: Machine-readable operation type.
+    substeps: Human-readable stage descriptions.
+  """
+
+  kind = _messages.StringField(1)
+  substeps = _messages.StringField(2, repeated=True)
+
+
+class ExternalDataConfiguration(_messages.Message):
+  """A ExternalDataConfiguration object.
+
+  Fields:
+    autodetect: [Experimental] Try to detect schema and format options
+      automatically. Any option specified explicitly will be honored.
+    bigtableOptions: [Optional] Additional options if sourceFormat is set to
+      BIGTABLE.
+    compression: [Optional] The compression type of the data source. Possible
+      values include GZIP and NONE. The default value is NONE. This setting is
+      ignored for Google Cloud Bigtable, Google Cloud Datastore backups and
+      Avro formats.
+    csvOptions: Additional properties to set if sourceFormat is set to CSV.
+    googleSheetsOptions: [Optional] Additional options if sourceFormat is set
+      to GOOGLE_SHEETS.
+    ignoreUnknownValues: [Optional] Indicates if BigQuery should allow extra
+      values that are not represented in the table schema. If true, the extra
+      values are ignored. If false, records with extra columns are treated as
+      bad records, and if there are too many bad records, an invalid error is
+      returned in the job result. The default value is false. The sourceFormat
+      property determines what BigQuery treats as an extra value: CSV:
+      Trailing columns JSON: Named values that don't match any column names
+      Google Cloud Bigtable: This setting is ignored. Google Cloud Datastore
+      backups: This setting is ignored. Avro: This setting is ignored.
+    maxBadRecords: [Optional] The maximum number of bad records that BigQuery
+      can ignore when reading data. If the number of bad records exceeds this
+      value, an invalid error is returned in the job result. The default value
+      is 0, which requires that all records are valid. This setting is ignored
+      for Google Cloud Bigtable, Google Cloud Datastore backups and Avro
+      formats.
+    schema: [Optional] The schema for the data. Schema is required for CSV and
+      JSON formats. Schema is disallowed for Google Cloud Bigtable, Cloud
+      Datastore backups, and Avro formats.
+    sourceFormat: [Required] The data format. For CSV files, specify "CSV".
+      For Google sheets, specify "GOOGLE_SHEETS". For newline-delimited JSON,
+      specify "NEWLINE_DELIMITED_JSON". For Avro files, specify "AVRO". For
+      Google Cloud Datastore backups, specify "DATASTORE_BACKUP".
+      [Experimental] For Google Cloud Bigtable, specify "BIGTABLE". Please
+      note that reading from Google Cloud Bigtable is experimental and has to
+      be enabled for your project. Please contact Google Cloud Support to
+      enable this for your project.
+    sourceUris: [Required] The fully-qualified URIs that point to your data in
+      Google Cloud. For Google Cloud Storage URIs: Each URI can contain one
+      '*' wildcard character and it must come after the 'bucket' name. Size
+      limits related to load jobs apply to external data sources. For Google
+      Cloud Bigtable URIs: Exactly one URI can be specified and it has be a
+      fully specified and valid HTTPS URL for a Google Cloud Bigtable table.
+      For Google Cloud Datastore backups, exactly one URI can be specified,
+      and it must end with '.backup_info'. Also, the '*' wildcard character is
+      not allowed.
+  """
+
+  autodetect = _messages.BooleanField(1)
+  bigtableOptions = _messages.MessageField('BigtableOptions', 2)
+  compression = _messages.StringField(3)
+  csvOptions = _messages.MessageField('CsvOptions', 4)
+  googleSheetsOptions = _messages.MessageField('GoogleSheetsOptions', 5)
+  ignoreUnknownValues = _messages.BooleanField(6)
+  maxBadRecords = _messages.IntegerField(7, variant=_messages.Variant.INT32)
+  schema = _messages.MessageField('TableSchema', 8)
+  sourceFormat = _messages.StringField(9)
+  sourceUris = _messages.StringField(10, repeated=True)
+
+
+class GetQueryResultsResponse(_messages.Message):
+  """A GetQueryResultsResponse object.
+
+  Fields:
+    cacheHit: Whether the query result was fetched from the query cache.
+    errors: [Output-only] All errors and warnings encountered during the
+      running of the job. Errors here do not necessarily mean that the job has
+      completed or was unsuccessful.
+    etag: A hash of this response.
+    jobComplete: Whether the query has completed or not. If rows or totalRows
+      are present, this will always be true. If this is false, totalRows will
+      not be available.
+    jobReference: Reference to the BigQuery Job that was created to run the
+      query. This field will be present even if the original request timed
+      out, in which case GetQueryResults can be used to read the results once
+      the query has completed. Since this API only returns the first page of
+      results, subsequent pages can be fetched via the same mechanism
+      (GetQueryResults).
+    kind: The resource type of the response.
+    numDmlAffectedRows: [Output-only, Experimental] The number of rows
+      affected by a DML statement. Present only for DML statements INSERT,
+      UPDATE or DELETE.
+    pageToken: A token used for paging results.
+    rows: An object with as many results as can be contained within the
+      maximum permitted reply size. To get any additional rows, you can call
+      GetQueryResults and specify the jobReference returned above. Present
+      only when the query completes successfully.
+    schema: The schema of the results. Present only when the query completes
+      successfully.
+    totalBytesProcessed: The total number of bytes processed for this query.
+    totalRows: The total number of rows in the complete query result set,
+      which can be more than the number of rows in this single page of
+      results. Present only when the query completes successfully.
+  """
+
+  cacheHit = _messages.BooleanField(1)
+  errors = _messages.MessageField('ErrorProto', 2, repeated=True)
+  etag = _messages.StringField(3)
+  jobComplete = _messages.BooleanField(4)
+  jobReference = _messages.MessageField('JobReference', 5)
+  kind = _messages.StringField(6, default=u'bigquery#getQueryResultsResponse')
+  numDmlAffectedRows = _messages.IntegerField(7)
+  pageToken = _messages.StringField(8)
+  rows = _messages.MessageField('TableRow', 9, repeated=True)
+  schema = _messages.MessageField('TableSchema', 10)
+  totalBytesProcessed = _messages.IntegerField(11)
+  totalRows = _messages.IntegerField(12, variant=_messages.Variant.UINT64)
+
+
+class GoogleSheetsOptions(_messages.Message):
+  """A GoogleSheetsOptions object.
+
+  Fields:
+    skipLeadingRows: [Optional] The number of rows at the top of a sheet that
+      BigQuery will skip when reading the data. The default value is 0. This
+      property is useful if you have header rows that should be skipped. When
+      autodetect is on, behavior is the following: * skipLeadingRows
+      unspecified - Autodetect tries to detect headers in the first row. If
+      they are not detected, the row is read as data. Otherwise data is read
+      starting from the second row. * skipLeadingRows is 0 - Instructs
+      autodetect that there are no headers and data should be read starting
+      from the first row. * skipLeadingRows = N > 0 - Autodetect skips N-1
+      rows and tries to detect headers in row N. If headers are not detected,
+      row N is just skipped. Otherwise row N is used to extract column names
+      for the detected schema.
+  """
+
+  skipLeadingRows = _messages.IntegerField(1)
+
+
+class Job(_messages.Message):
+  """A Job object.
+
+  Fields:
+    configuration: [Required] Describes the job configuration.
+    etag: [Output-only] A hash of this resource.
+    id: [Output-only] Opaque ID field of the job
+    jobReference: [Optional] Reference describing the unique-per-user name of
+      the job.
+    kind: [Output-only] The type of the resource.
+    selfLink: [Output-only] A URL that can be used to access this resource
+      again.
+    statistics: [Output-only] Information about the job, including starting
+      time and ending time of the job.
+    status: [Output-only] The status of this job. Examine this value when
+      polling an asynchronous job to see if the job is complete.
+    user_email: [Output-only] Email address of the user who ran the job.
+  """
+
+  configuration = _messages.MessageField('JobConfiguration', 1)
+  etag = _messages.StringField(2)
+  id = _messages.StringField(3)
+  jobReference = _messages.MessageField('JobReference', 4)
+  kind = _messages.StringField(5, default=u'bigquery#job')
+  selfLink = _messages.StringField(6)
+  statistics = _messages.MessageField('JobStatistics', 7)
+  status = _messages.MessageField('JobStatus', 8)
+  user_email = _messages.StringField(9)
+
+
+class JobCancelResponse(_messages.Message):
+  """A JobCancelResponse object.
+
+  Fields:
+    job: The final state of the job.
+    kind: The resource type of the response.
+  """
+
+  job = _messages.MessageField('Job', 1)
+  kind = _messages.StringField(2, default=u'bigquery#jobCancelResponse')
+
+
+class JobConfiguration(_messages.Message):
+  """A JobConfiguration object.
+
+  Fields:
+    copy: [Pick one] Copies a table.
+    dryRun: [Optional] If set, don't actually run this job. A valid query will
+      return a mostly empty response with some processing statistics, while an
+      invalid query will return the same error it would if it wasn't a dry
+      run. Behavior of non-query jobs is undefined.
+    extract: [Pick one] Configures an extract job.
+    load: [Pick one] Configures a load job.
+    query: [Pick one] Configures a query job.
+  """
+
+  copy = _messages.MessageField('JobConfigurationTableCopy', 1)
+  dryRun = _messages.BooleanField(2)
+  extract = _messages.MessageField('JobConfigurationExtract', 3)
+  load = _messages.MessageField('JobConfigurationLoad', 4)
+  query = _messages.MessageField('JobConfigurationQuery', 5)
+
+
+class JobConfigurationExtract(_messages.Message):
+  """A JobConfigurationExtract object.
+
+  Fields:
+    compression: [Optional] The compression type to use for exported files.
+      Possible values include GZIP and NONE. The default value is NONE.
+    destinationFormat: [Optional] The exported file format. Possible values
+      include CSV, NEWLINE_DELIMITED_JSON and AVRO. The default value is CSV.
+      Tables with nested or repeated fields cannot be exported as CSV.
+    destinationUri: [Pick one] DEPRECATED: Use destinationUris instead,
+      passing only one URI as necessary. The fully-qualified Google Cloud
+      Storage URI where the extracted table should be written.
+    destinationUris: [Pick one] A list of fully-qualified Google Cloud Storage
+      URIs where the extracted table should be written.
+    fieldDelimiter: [Optional] Delimiter to use between fields in the exported
+      data. Default is ','
+    printHeader: [Optional] Whether to print out a header row in the results.
+      Default is true.
+    sourceTable: [Required] A reference to the table being exported.
+  """
+
+  compression = _messages.StringField(1)
+  destinationFormat = _messages.StringField(2)
+  destinationUri = _messages.StringField(3)
+  destinationUris = _messages.StringField(4, repeated=True)
+  fieldDelimiter = _messages.StringField(5)
+  printHeader = _messages.BooleanField(6, default=True)
+  sourceTable = _messages.MessageField('TableReference', 7)
+
+
+class JobConfigurationLoad(_messages.Message):
+  """A JobConfigurationLoad object.
+
+  Fields:
+    allowJaggedRows: [Optional] Accept rows that are missing trailing optional
+      columns. The missing values are treated as nulls. If false, records with
+      missing trailing columns are treated as bad records, and if there are
+      too many bad records, an invalid error is returned in the job result.
+      The default value is false. Only applicable to CSV, ignored for other
+      formats.
+    allowQuotedNewlines: Indicates if BigQuery should allow quoted data
+      sections that contain newline characters in a CSV file. The default
+      value is false.
+    autodetect: [Experimental] Indicates if we should automatically infer the
+      options and schema for CSV and JSON sources.
+    createDisposition: [Optional] Specifies whether the job is allowed to
+      create new tables. The following values are supported: CREATE_IF_NEEDED:
+      If the table does not exist, BigQuery creates the table. CREATE_NEVER:
+      The table must already exist. If it does not, a 'notFound' error is
+      returned in the job result. The default value is CREATE_IF_NEEDED.
+      Creation, truncation and append actions occur as one atomic update upon
+      job completion.
+    destinationTable: [Required] The destination table to load the data into.
+    encoding: [Optional] The character encoding of the data. The supported
+      values are UTF-8 or ISO-8859-1. The default value is UTF-8. BigQuery
+      decodes the data after the raw, binary data has been split using the
+      values of the quote and fieldDelimiter properties.
+    fieldDelimiter: [Optional] The separator for fields in a CSV file. The
+      separator can be any ISO-8859-1 single-byte character. To use a
+      character in the range 128-255, you must encode the character as UTF8.
+      BigQuery converts the string to ISO-8859-1 encoding, and then uses the
+      first byte of the encoded string to split the data in its raw, binary
+      state. BigQuery also supports the escape sequence "\t" to specify a tab
+      separator. The default value is a comma (',').
+    ignoreUnknownValues: [Optional] Indicates if BigQuery should allow extra
+      values that are not represented in the table schema. If true, the extra
+      values are ignored. If false, records with extra columns are treated as
+      bad records, and if there are too many bad records, an invalid error is
+      returned in the job result. The default value is false. The sourceFormat
+      property determines what BigQuery treats as an extra value: CSV:
+      Trailing columns JSON: Named values that don't match any column names
+    maxBadRecords: [Optional] The maximum number of bad records that BigQuery
+      can ignore when running the job. If the number of bad records exceeds
+      this value, an invalid error is returned in the job result. The default
+      value is 0, which requires that all records are valid.
+    projectionFields: [Experimental] If sourceFormat is set to
+      "DATASTORE_BACKUP", indicates which entity properties to load into
+      BigQuery from a Cloud Datastore backup. Property names are case
+      sensitive and must be top-level properties. If no properties are
+      specified, BigQuery loads all properties. If any named property isn't
+      found in the Cloud Datastore backup, an invalid error is returned in the
+      job result.
+    quote: [Optional] The value that is used to quote data sections in a CSV
+      file. BigQuery converts the string to ISO-8859-1 encoding, and then uses
+      the first byte of the encoded string to split the data in its raw,
+      binary state. The default value is a double-quote ('"'). If your data
+      does not contain quoted sections, set the property value to an empty
+      string. If your data contains quoted newline characters, you must also
+      set the allowQuotedNewlines property to true.
+    schema: [Optional] The schema for the destination table. The schema can be
+      omitted if the destination table already exists, or if you're loading
+      data from Google Cloud Datastore.
+    schemaInline: [Deprecated] The inline schema. For CSV schemas, specify as
+      "Field1:Type1[,Field2:Type2]*". For example, "foo:STRING, bar:INTEGER,
+      baz:FLOAT".
+    schemaInlineFormat: [Deprecated] The format of the schemaInline property.
+    schemaUpdateOptions: [Experimental] Allows the schema of the desitination
+      table to be updated as a side effect of the load job. Schema update
+      options are supported in two cases: when writeDisposition is
+      WRITE_APPEND; when writeDisposition is WRITE_TRUNCATE and the
+      destination table is a partition of a table, specified by partition
+      decorators. For normal tables, WRITE_TRUNCATE will always overwrite the
+      schema. One or more of the following values are specified:
+      ALLOW_FIELD_ADDITION: allow adding a nullable field to the schema.
+      ALLOW_FIELD_RELAXATION: allow relaxing a required field in the original
+      schema to nullable.
+    skipLeadingRows: [Optional] The number of rows at the top of a CSV file
+      that BigQuery will skip when loading the data. The default value is 0.
+      This property is useful if you have header rows in the file that should
+      be skipped.
+    sourceFormat: [Optional] The format of the data files. For CSV files,
+      specify "CSV". For datastore backups, specify "DATASTORE_BACKUP". For
+      newline-delimited JSON, specify "NEWLINE_DELIMITED_JSON". For Avro,
+      specify "AVRO". The default value is CSV.
+    sourceUris: [Required] The fully-qualified URIs that point to your data in
+      Google Cloud Storage. Each URI can contain one '*' wildcard character
+      and it must come after the 'bucket' name.
+    writeDisposition: [Optional] Specifies the action that occurs if the
+      destination table already exists. The following values are supported:
+      WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the
+      table data. WRITE_APPEND: If the table already exists, BigQuery appends
+      the data to the table. WRITE_EMPTY: If the table already exists and
+      contains data, a 'duplicate' error is returned in the job result. The
+      default value is WRITE_APPEND. Each action is atomic and only occurs if
+      BigQuery is able to complete the job successfully. Creation, truncation
+      and append actions occur as one atomic update upon job completion.
+  """
+
+  allowJaggedRows = _messages.BooleanField(1)
+  allowQuotedNewlines = _messages.BooleanField(2)
+  autodetect = _messages.BooleanField(3)
+  createDisposition = _messages.StringField(4)
+  destinationTable = _messages.MessageField('TableReference', 5)
+  encoding = _messages.StringField(6)
+  fieldDelimiter = _messages.StringField(7)
+  ignoreUnknownValues = _messages.BooleanField(8)
+  maxBadRecords = _messages.IntegerField(9, variant=_messages.Variant.INT32)
+  projectionFields = _messages.StringField(10, repeated=True)
+  quote = _messages.StringField(11, default=u'"')
+  schema = _messages.MessageField('TableSchema', 12)
+  schemaInline = _messages.StringField(13)
+  schemaInlineFormat = _messages.StringField(14)
+  schemaUpdateOptions = _messages.StringField(15, repeated=True)
+  skipLeadingRows = _messages.IntegerField(16, variant=_messages.Variant.INT32)
+  sourceFormat = _messages.StringField(17)
+  sourceUris = _messages.StringField(18, repeated=True)
+  writeDisposition = _messages.StringField(19)
+
+
+class JobConfigurationQuery(_messages.Message):
+  """A JobConfigurationQuery object.
+
+  Messages:
+    TableDefinitionsValue: [Optional] If querying an external data source
+      outside of BigQuery, describes the data format, location and other
+      properties of the data source. By defining these properties, the data
+      source can then be queried as if it were a standard BigQuery table.
+
+  Fields:
+    allowLargeResults: If true, allows the query to produce arbitrarily large
+      result tables at a slight cost in performance. Requires destinationTable
+      to be set.
+    createDisposition: [Optional] Specifies whether the job is allowed to
+      create new tables. The following values are supported: CREATE_IF_NEEDED:
+      If the table does not exist, BigQuery creates the table. CREATE_NEVER:
+      The table must already exist. If it does not, a 'notFound' error is
+      returned in the job result. The default value is CREATE_IF_NEEDED.
+      Creation, truncation and append actions occur as one atomic update upon
+      job completion.
+    defaultDataset: [Optional] Specifies the default dataset to use for
+      unqualified table names in the query.
+    destinationTable: [Optional] Describes the table where the query results
+      should be stored. If not present, a new table will be created to store
+      the results.
+    flattenResults: [Optional] Flattens all nested and repeated fields in the
+      query results. The default value is true. allowLargeResults must be true
+      if this is set to false.
+    maximumBillingTier: [Optional] Limits the billing tier for this job.
+      Queries that have resource usage beyond this tier will fail (without
+      incurring a charge). If unspecified, this will be set to your project
+      default.
+    maximumBytesBilled: [Optional] Limits the bytes billed for this job.
+      Queries that will have bytes billed beyond this limit will fail (without
+      incurring a charge). If unspecified, this will be set to your project
+      default.
+    preserveNulls: [Deprecated] This property is deprecated.
+    priority: [Optional] Specifies a priority for the query. Possible values
+      include INTERACTIVE and BATCH. The default value is INTERACTIVE.
+    query: [Required] BigQuery SQL query to execute.
+    schemaUpdateOptions: [Experimental] Allows the schema of the desitination
+      table to be updated as a side effect of the query job. Schema update
+      options are supported in two cases: when writeDisposition is
+      WRITE_APPEND; when writeDisposition is WRITE_TRUNCATE and the
+      destination table is a partition of a table, specified by partition
+      decorators. For normal tables, WRITE_TRUNCATE will always overwrite the
+      schema. One or more of the following values are specified:
+      ALLOW_FIELD_ADDITION: allow adding a nullable field to the schema.
+      ALLOW_FIELD_RELAXATION: allow relaxing a required field in the original
+      schema to nullable.
+    tableDefinitions: [Optional] If querying an external data source outside
+      of BigQuery, describes the data format, location and other properties of
+      the data source. By defining these properties, the data source can then
+      be queried as if it were a standard BigQuery table.
+    useLegacySql: [Experimental] Specifies whether to use BigQuery's legacy
+      SQL dialect for this query. The default value is true. If set to false,
+      the query will use BigQuery's standard SQL:
+      https://cloud.google.com/bigquery/sql-reference/ When useLegacySql is
+      set to false, the values of allowLargeResults and flattenResults are
+      ignored; query will be run as if allowLargeResults is true and
+      flattenResults is false.
+    useQueryCache: [Optional] Whether to look for the result in the query
+      cache. The query cache is a best-effort cache that will be flushed
+      whenever tables in the query are modified. Moreover, the query cache is
+      only available when a query does not have a destination table specified.
+      The default value is true.
+    userDefinedFunctionResources: [Experimental] Describes user-defined
+      function resources used in the query.
+    writeDisposition: [Optional] Specifies the action that occurs if the
+      destination table already exists. The following values are supported:
+      WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the
+      table data. WRITE_APPEND: If the table already exists, BigQuery appends
+      the data to the table. WRITE_EMPTY: If the table already exists and
+      contains data, a 'duplicate' error is returned in the job result. The
+      default value is WRITE_EMPTY. Each action is atomic and only occurs if
+      BigQuery is able to complete the job successfully. Creation, truncation
+      and append actions occur as one atomic update upon job completion.
+  """
+
+  @encoding.MapUnrecognizedFields('additionalProperties')
+  class TableDefinitionsValue(_messages.Message):
+    """[Optional] If querying an external data source outside of BigQuery,
+    describes the data format, location and other properties of the data
+    source. By defining these properties, the data source can then be queried
+    as if it were a standard BigQuery table.
+
+    Messages:
+      AdditionalProperty: An additional property for a TableDefinitionsValue
+        object.
+
+    Fields:
+      additionalProperties: Additional properties of type
+        TableDefinitionsValue
+    """
+
+    class AdditionalProperty(_messages.Message):
+      """An additional property for a TableDefinitionsValue object.
+
+      Fields:
+        key: Name of the additional property.
+        value: A ExternalDataConfiguration attribute.
+      """
+
+      key = _messages.StringField(1)
+      value = _messages.MessageField('ExternalDataConfiguration', 2)
+
+    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
+
+  allowLargeResults = _messages.BooleanField(1)
+  createDisposition = _messages.StringField(2)
+  defaultDataset = _messages.MessageField('DatasetReference', 3)
+  destinationTable = _messages.MessageField('TableReference', 4)
+  flattenResults = _messages.BooleanField(5, default=True)
+  maximumBillingTier = _messages.IntegerField(6, variant=_messages.Variant.INT32, default=1)
+  maximumBytesBilled = _messages.IntegerField(7)
+  preserveNulls = _messages.BooleanField(8)
+  priority = _messages.StringField(9)
+  query = _messages.StringField(10)
+  schemaUpdateOptions = _messages.StringField(11, repeated=True)
+  tableDefinitions = _messages.MessageField('TableDefinitionsValue', 12)
+  useLegacySql = _messages.BooleanField(13)
+  useQueryCache = _messages.BooleanField(14, default=True)
+  userDefinedFunctionResources = _messages.MessageField('UserDefinedFunctionResource', 15, repeated=True)
+  writeDisposition = _messages.StringField(16)
+
+
+class JobConfigurationTableCopy(_messages.Message):
+  """A JobConfigurationTableCopy object.
+
+  Fields:
+    createDisposition: [Optional] Specifies whether the job is allowed to
+      create new tables. The following values are supported: CREATE_IF_NEEDED:
+      If the table does not exist, BigQuery creates the table. CREATE_NEVER:
+      The table must already exist. If it does not, a 'notFound' error is
+      returned in the job result. The default value is CREATE_IF_NEEDED.
+      Creation, truncation and append actions occur as one atomic update upon
+      job completion.
+    destinationTable: [Required] The destination table
+    sourceTable: [Pick one] Source table to copy.
+    sourceTables: [Pick one] Source tables to copy.
+    writeDisposition: [Optional] Specifies the action that occurs if the
+      destination table already exists. The following values are supported:
+      WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the
+      table data. WRITE_APPEND: If the table already exists, BigQuery appends
+      the data to the table. WRITE_EMPTY: If the table already exists and
+      contains data, a 'duplicate' error is returned in the job result. The
+      default value is WRITE_EMPTY. Each action is atomic and only occurs if
+      BigQuery is able to complete the job successfully. Creation, truncation
+      and append actions occur as one atomic update upon job completion.
+  """
+
+  createDisposition = _messages.StringField(1)
+  destinationTable = _messages.MessageField('TableReference', 2)
+  sourceTable = _messages.MessageField('TableReference', 3)
+  sourceTables = _messages.MessageField('TableReference', 4, repeated=True)
+  writeDisposition = _messages.StringField(5)
+
+
+class JobList(_messages.Message):
+  """A JobList object.
+
+  Messages:
+    JobsValueListEntry: A JobsValueListEntry object.
+
+  Fields:
+    etag: A hash of this page of results.
+    jobs: List of jobs that were requested.
+    kind: The resource type of the response.
+    nextPageToken: A token to request the next page of results.
+  """
+
+  class JobsValueListEntry(_messages.Message):
+    """A JobsValueListEntry object.
+
+    Fields:
+      configuration: [Full-projection-only] Specifies the job configuration.
+      errorResult: A result object that will be present only if the job has
+        failed.
+      id: Unique opaque ID of the job.
+      jobReference: Job reference uniquely identifying the job.
+      kind: The resource type.
+      state: Running state of the job. When the state is DONE, errorResult can
+        be checked to determine whether the job succeeded or failed.
+      statistics: [Output-only] Information about the job, including starting
+        time and ending time of the job.
+      status: [Full-projection-only] Describes the state of the job.
+      user_email: [Full-projection-only] Email address of the user who ran the
+        job.
+    """
+
+    configuration = _messages.MessageField('JobConfiguration', 1)
+    errorResult = _messages.MessageField('ErrorProto', 2)
+    id = _messages.StringField(3)
+    jobReference = _messages.MessageField('JobReference', 4)
+    kind = _messages.StringField(5, default=u'bigquery#job')
+    state = _messages.StringField(6)
+    statistics = _messages.MessageField('JobStatistics', 7)
+    status = _messages.MessageField('JobStatus', 8)
+    user_email = _messages.StringField(9)
+
+  etag = _messages.StringField(1)
+  jobs = _messages.MessageField('JobsValueListEntry', 2, repeated=True)
+  kind = _messages.StringField(3, default=u'bigquery#jobList')
+  nextPageToken = _messages.StringField(4)
+
+
+class JobReference(_messages.Message):
+  """A JobReference object.
+
+  Fields:
+    jobId: [Required] The ID of the job. The ID must contain only letters
+      (a-z, A-Z), numbers (0-9), underscores (_), or dashes (-). The maximum
+      length is 1,024 characters.
+    projectId: [Required] The ID of the project containing this job.
+  """
+
+  jobId = _messages.StringField(1)
+  projectId = _messages.StringField(2)
+
+
+class JobStatistics(_messages.Message):
+  """A JobStatistics object.
+
+  Fields:
+    creationTime: [Output-only] Creation time of this job, in milliseconds
+      since the epoch. This field will be present on all jobs.
+    endTime: [Output-only] End time of this job, in milliseconds since the
+      epoch. This field will be present whenever a job is in the DONE state.
+    extract: [Output-only] Statistics for an extract job.
+    load: [Output-only] Statistics for a load job.
+    query: [Output-only] Statistics for a query job.
+    startTime: [Output-only] Start time of this job, in milliseconds since the
+      epoch. This field will be present when the job transitions from the
+      PENDING state to either RUNNING or DONE.
+    totalBytesProcessed: [Output-only] [Deprecated] Use the bytes processed in
+      the query statistics instead.
+  """
+
+  creationTime = _messages.IntegerField(1)
+  endTime = _messages.IntegerField(2)
+  extract = _messages.MessageField('JobStatistics4', 3)
+  load = _messages.MessageField('JobStatistics3', 4)
+  query = _messages.MessageField('JobStatistics2', 5)
+  startTime = _messages.IntegerField(6)
+  totalBytesProcessed = _messages.IntegerField(7)
+
+
+class JobStatistics2(_messages.Message):
+  """A JobStatistics2 object.
+
+  Fields:
+    billingTier: [Output-only] Billing tier for the job.
+    cacheHit: [Output-only] Whether the query result was fetched from the
+      query cache.
+    numDmlAffectedRows: [Output-only, Experimental] The number of rows
+      affected by a DML statement. Present only for DML statements INSERT,
+      UPDATE or DELETE.
+    queryPlan: [Output-only, Experimental] Describes execution plan for the
+      query.
+    referencedTables: [Output-only, Experimental] Referenced tables for the
+      job. Queries that reference more than 50 tables will not have a complete
+      list.
+    schema: [Output-only, Experimental] The schema of the results. Present
+      only for successful dry run of non-legacy SQL queries.
+    totalBytesBilled: [Output-only] Total bytes billed for the job.
+    totalBytesProcessed: [Output-only] Total bytes processed for the job.
+  """
+
+  billingTier = _messages.IntegerField(1, variant=_messages.Variant.INT32)
+  cacheHit = _messages.BooleanField(2)
+  numDmlAffectedRows = _messages.IntegerField(3)
+  queryPlan = _messages.MessageField('ExplainQueryStage', 4, repeated=True)
+  referencedTables = _messages.MessageField('TableReference', 5, repeated=True)
+  schema = _messages.MessageField('TableSchema', 6)
+  totalBytesBilled = _messages.IntegerField(7)
+  totalBytesProcessed = _messages.IntegerField(8)
+
+
+class JobStatistics3(_messages.Message):
+  """A JobStatistics3 object.
+
+  Fields:
+    inputFileBytes: [Output-only] Number of bytes of source data in a load
+      job.
+    inputFiles: [Output-only] Number of source files in a load job.
+    outputBytes: [Output-only] Size of the loaded data in bytes. Note that
+      while a load job is in the running state, this value may change.
+    outputRows: [Output-only] Number of rows imported in a load job. Note that
+      while an import job is in the running state, this value may change.
+  """
+
+  inputFileBytes = _messages.IntegerField(1)
+  inputFiles = _messages.IntegerField(2)
+  outputBytes = _messages.IntegerField(3)
+  outputRows = _messages.IntegerField(4)
+
+
+class JobStatistics4(_messages.Message):
+  """A JobStatistics4 object.
+
+  Fields:
+    destinationUriFileCounts: [Output-only] Number of files per destination
+      URI or URI pattern specified in the extract configuration. These values
+      will be in the same order as the URIs specified in the 'destinationUris'
+      field.
+  """
+
+  destinationUriFileCounts = _messages.IntegerField(1, repeated=True)
+
+
+class JobStatus(_messages.Message):
+  """A JobStatus object.
+
+  Fields:
+    errorResult: [Output-only] Final error result of the job. If present,
+      indicates that the job has completed and was unsuccessful.
+    errors: [Output-only] All errors encountered during the running of the
+      job. Errors here do not necessarily mean that the job has completed or
+      was unsuccessful.
+    state: [Output-only] Running state of the job.
+  """
+
+  errorResult = _messages.MessageField('ErrorProto', 1)
+  errors = _messages.MessageField('ErrorProto', 2, repeated=True)
+  state = _messages.StringField(3)
+
+
+@encoding.MapUnrecognizedFields('additionalProperties')
+class JsonObject(_messages.Message):
+  """Represents a single JSON object.
+
+  Messages:
+    AdditionalProperty: An additional property for a JsonObject object.
+
+  Fields:
+    additionalProperties: Additional properties of type JsonObject
+  """
+
+  class AdditionalProperty(_messages.Message):
+    """An additional property for a JsonObject object.
+
+    Fields:
+      key: Name of the additional property.
+      value: A JsonValue attribute.
+    """
+
+    key = _messages.StringField(1)
+    value = _messages.MessageField('JsonValue', 2)
+
+  additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
+
+
+JsonValue = extra_types.JsonValue
+
+
+class ProjectList(_messages.Message):
+  """A ProjectList object.
+
+  Messages:
+    ProjectsValueListEntry: A ProjectsValueListEntry object.
+
+  Fields:
+    etag: A hash of the page of results
+    kind: The type of list.
+    nextPageToken: A token to request the next page of results.
+    projects: Projects to which you have at least READ access.
+    totalItems: The total number of projects in the list.
+  """
+
+  class ProjectsValueListEntry(_messages.Message):
+    """A ProjectsValueListEntry object.
+
+    Fields:
+      friendlyName: A descriptive name for this project.
+      id: An opaque ID of this project.
+      kind: The resource type.
+      numericId: The numeric ID of this project.
+      projectReference: A unique reference to this project.
+    """
+
+    friendlyName = _messages.StringField(1)
+    id = _messages.StringField(2)
+    kind = _messages.StringField(3, default=u'bigquery#project')
+    numericId = _messages.IntegerField(4, variant=_messages.Variant.UINT64)
+    projectReference = _messages.MessageField('ProjectReference', 5)
+
+  etag = _messages.StringField(1)
+  kind = _messages.StringField(2, default=u'bigquery#projectList')
+  nextPageToken = _messages.StringField(3)
+  projects = _messages.MessageField('ProjectsValueListEntry', 4, repeated=True)
+  totalItems = _messages.IntegerField(5, variant=_messages.Variant.INT32)
+
+
+class ProjectReference(_messages.Message):
+  """A ProjectReference object.
+
+  Fields:
+    projectId: [Required] ID of the project. Can be either the numeric ID or
+      the assigned ID of the project.
+  """
+
+  projectId = _messages.StringField(1)
+
+
+class QueryRequest(_messages.Message):
+  """A QueryRequest object.
+
+  Fields:
+    defaultDataset: [Optional] Specifies the default datasetId and projectId
+      to assume for any unqualified table names in the query. If not set, all
+      table names in the query string must be qualified in the format
+      'datasetId.tableId'.
+    dryRun: [Optional] If set to true, BigQuery doesn't run the job. Instead,
+      if the query is valid, BigQuery returns statistics about the job such as
+      how many bytes would be processed. If the query is invalid, an error
+      returns. The default value is false.
+    kind: The resource type of the request.
+    maxResults: [Optional] The maximum number of rows of data to return per
+      page of results. Setting this flag to a small value such as 1000 and
+      then paging through results might improve reliability when the query
+      result set is large. In addition to this limit, responses are also
+      limited to 10 MB. By default, there is no maximum row count, and only
+      the byte limit applies.
+    preserveNulls: [Deprecated] This property is deprecated.
+    query: [Required] A query string, following the BigQuery query syntax, of
+      the query to execute. Example: "SELECT count(f1) FROM
+      [myProjectId:myDatasetId.myTableId]".
+    timeoutMs: [Optional] How long to wait for the query to complete, in
+      milliseconds, before the request times out and returns. Note that this
+      is only a timeout for the request, not the query. If the query takes
+      longer to run than the timeout value, the call returns without any
+      results and with the 'jobComplete' flag set to false. You can call
+      GetQueryResults() to wait for the query to complete and read the
+      results. The default value is 10000 milliseconds (10 seconds).
+    useLegacySql: [Experimental] Specifies whether to use BigQuery's legacy
+      SQL dialect for this query. The default value is true. If set to false,
+      the query will use BigQuery's standard SQL:
+      https://cloud.google.com/bigquery/sql-reference/ When useLegacySql is
+      set to false, the values of allowLargeResults and flattenResults are
+      ignored; query will be run as if allowLargeResults is true and
+      flattenResults is false.
+    useQueryCache: [Optional] Whether to look for the result in the query
+      cache. The query cache is a best-effort cache that will be flushed
+      whenever tables in the query are modified. The default value is true.
+  """
+
+  defaultDataset = _messages.MessageField('DatasetReference', 1)
+  dryRun = _messages.BooleanField(2)
+  kind = _messages.StringField(3, default=u'bigquery#queryRequest')
+  maxResults = _messages.IntegerField(4, variant=_messages.Variant.UINT32)
+  preserveNulls = _messages.BooleanField(5)
+  query = _messages.StringField(6)
+  timeoutMs = _messages.IntegerField(7, variant=_messages.Variant.UINT32)
+  useLegacySql = _messages.BooleanField(8)
+  useQueryCache = _messages.BooleanField(9, default=True)
+
+
+class QueryResponse(_messages.Message):
+  """A QueryResponse object.
+
+  Fields:
+    cacheHit: Whether the query result was fetched from the query cache.
+    errors: [Output-only] All errors and warnings encountered during the
+      running of the job. Errors here do not necessarily mean that the job has
+      completed or was unsuccessful.
+    jobComplete: Whether the query has completed or not. If rows or totalRows
+      are present, this will always be true. If this is false, totalRows will
+      not be available.
+    jobReference: Reference to the Job that was created to run the query. This
+      field will be present even if the original request timed out, in which
+      case GetQueryResults can be used to read the results once the query has
+      completed. Since this API only returns the first page of results,
+      subsequent pages can be fetched via the same mechanism
+      (GetQueryResults).
+    kind: The resource type.
+    numDmlAffectedRows: [Output-only, Experimental] The number of rows
+      affected by a DML statement. Present only for DML statements INSERT,
+      UPDATE or DELETE.
+    pageToken: A token used for paging results.
+    rows: An object with as many results as can be contained within the
+      maximum permitted reply size. To get any additional rows, you can call
+      GetQueryResults and specify the jobReference returned above.
+    schema: The schema of the results. Present only when the query completes
+      successfully.
+    totalBytesProcessed: The total number of bytes processed for this query.
+      If this query was a dry run, this is the number of bytes that would be
+      processed if the query were run.
+    totalRows: The total number of rows in the complete query result set,
+      which can be more than the number of rows in this single page of
+      results.
+  """
+
+  cacheHit = _messages.BooleanField(1)
+  errors = _messages.MessageField('ErrorProto', 2, repeated=True)
+  jobComplete = _messages.BooleanField(3)
+  jobReference = _messages.MessageField('JobReference', 4)
+  kind = _messages.StringField(5, default=u'bigquery#queryResponse')
+  numDmlAffectedRows = _messages.IntegerField(6)
+  pageToken = _messages.StringField(7)
+  rows = _messages.MessageField('TableRow', 8, repeated=True)
+  schema = _messages.MessageField('TableSchema', 9)
+  totalBytesProcessed = _messages.IntegerField(10)
+  totalRows = _messages.IntegerField(11, variant=_messages.Variant.UINT64)
+
+
+class StandardQueryParameters(_messages.Message):
+  """Query parameters accepted by all methods.
+
+  Enums:
+    AltValueValuesEnum: Data format for the response.
+
+  Fields:
+    alt: Data format for the response.
+    fields: Selector specifying which fields to include in a partial response.
+    key: API key. Your API key identifies your project and provides you with
+      API access, quota, and reports. Required unless you provide an OAuth 2.0
+      token.
+    oauth_token: OAuth 2.0 token for the current user.
+    prettyPrint: Returns response with indentations and line breaks.
+    quotaUser: Available to use for quota purposes for server-side
+      applications. Can be any arbitrary string assigned to a user, but should
+      not exceed 40 characters. Overrides userIp if both are provided.
+    trace: A tracing token of the form "token:<tokenid>" to include in api
+      requests.
+    userIp: IP address of the site where the request originates. Use this if
+      you want to enforce per-user limits.
+  """
+
+  class AltValueValuesEnum(_messages.Enum):
+    """Data format for the response.
+
+    Values:
+      json: Responses with Content-Type of application/json
+    """
+    json = 0
+
+  alt = _messages.EnumField('AltValueValuesEnum', 1, default=u'json')
+  fields = _messages.StringField(2)
+  key = _messages.StringField(3)
+  oauth_token = _messages.StringField(4)
+  prettyPrint = _messages.BooleanField(5, default=True)
+  quotaUser = _messages.StringField(6)
+  trace = _messages.StringField(7)
+  userIp = _messages.StringField(8)
+
+
+class Streamingbuffer(_messages.Message):
+  """A Streamingbuffer object.
+
+  Fields:
+    estimatedBytes: [Output-only] A lower-bound estimate of the number of
+      bytes currently in the streaming buffer.
+    estimatedRows: [Output-only] A lower-bound estimate of the number of rows
+      currently in the streaming buffer.
+    oldestEntryTime: [Output-only] Contains the timestamp of the oldest entry
+      in the streaming buffer, in milliseconds since the epoch, if the
+      streaming buffer is available.
+  """
+
+  estimatedBytes = _messages.IntegerField(1, variant=_messages.Variant.UINT64)
+  estimatedRows = _messages.IntegerField(2, variant=_messages.Variant.UINT64)
+  oldestEntryTime = _messages.IntegerField(3, variant=_messages.Variant.UINT64)
+
+
+class Table(_messages.Message):
+  """A Table object.
+
+  Fields:
+    creationTime: [Output-only] The time when this table was created, in
+      milliseconds since the epoch.
+    description: [Optional] A user-friendly description of this table.
+    etag: [Output-only] A hash of this resource.
+    expirationTime: [Optional] The time when this table expires, in
+      milliseconds since the epoch. If not present, the table will persist
+      indefinitely. Expired tables will be deleted and their storage
+      reclaimed.
+    externalDataConfiguration: [Optional] Describes the data format, location,
+      and other properties of a table stored outside of BigQuery. By defining
+      these properties, the data source can then be queried as if it were a
+      standard BigQuery table.
+    friendlyName: [Optional] A descriptive name for this table.
+    id: [Output-only] An opaque ID uniquely identifying the table.
+    kind: [Output-only] The type of the resource.
+    lastModifiedTime: [Output-only] The time when this table was last
+      modified, in milliseconds since the epoch.
+    location: [Output-only] The geographic location where the table resides.
+      This value is inherited from the dataset.
+    numBytes: [Output-only] The size of this table in bytes, excluding any
+      data in the streaming buffer.
+    numLongTermBytes: [Output-only] The number of bytes in the table that are
+      considered "long-term storage".
+    numRows: [Output-only] The number of rows of data in this table, excluding
+      any data in the streaming buffer.
+    schema: [Optional] Describes the schema of this table.
+    selfLink: [Output-only] A URL that can be used to access this resource
+      again.
+    streamingBuffer: [Output-only] Contains information regarding this table's
+      streaming buffer, if one is present. This field will be absent if the
+      table is not being streamed to or if there is no data in the streaming
+      buffer.
+    tableReference: [Required] Reference describing the ID of this table.
+    timePartitioning: [Experimental] If specified, configures time-based
+      partitioning for this table.
+    type: [Output-only] Describes the table type. The following values are
+      supported: TABLE: A normal BigQuery table. VIEW: A virtual table defined
+      by a SQL query. EXTERNAL: A table that references data stored in an
+      external storage system, such as Google Cloud Storage. The default value
+      is TABLE.
+    view: [Optional] The view definition.
+  """
+
+  creationTime = _messages.IntegerField(1)
+  description = _messages.StringField(2)
+  etag = _messages.StringField(3)
+  expirationTime = _messages.IntegerField(4)
+  externalDataConfiguration = _messages.MessageField('ExternalDataConfiguration', 5)
+  friendlyName = _messages.StringField(6)
+  id = _messages.StringField(7)
+  kind = _messages.StringField(8, default=u'bigquery#table')
+  lastModifiedTime = _messages.IntegerField(9, variant=_messages.Variant.UINT64)
+  location = _messages.StringField(10)
+  numBytes = _messages.IntegerField(11)
+  numLongTermBytes = _messages.IntegerField(12)
+  numRows = _messages.IntegerField(13, variant=_messages.Variant.UINT64)
+  schema = _messages.MessageField('TableSchema', 14)
+  selfLink = _messages.StringField(15)
+  streamingBuffer = _messages.MessageField('Streamingbuffer', 16)
+  tableReference = _messages.MessageField('TableReference', 17)
+  timePartitioning = _messages.MessageField('TimePartitioning', 18)
+  type = _messages.StringField(19)
+  view = _messages.MessageField('ViewDefinition', 20)
+
+
+class TableCell(_messages.Message):
+  """A TableCell object.
+
+  Fields:
+    v: A extra_types.JsonValue attribute.
+  """
+
+  v = _messages.MessageField('extra_types.JsonValue', 1)
+
+
+class TableDataInsertAllRequest(_messages.Message):
+  """A TableDataInsertAllRequest object.
+
+  Messages:
+    RowsValueListEntry: A RowsValueListEntry object.
+
+  Fields:
+    ignoreUnknownValues: [Optional] Accept rows that contain values that do
+      not match the schema. The unknown values are ignored. Default is false,
+      which treats unknown values as errors.
+    kind: The resource type of the response.
+    rows: The rows to insert.
+    skipInvalidRows: [Optional] Insert all valid rows of a request, even if
+      invalid rows exist. The default value is false, which causes the entire
+      request to fail if any invalid rows exist.
+    templateSuffix: [Experimental] If specified, treats the destination table
+      as a base template, and inserts the rows into an instance table named
+      "{destination}{templateSuffix}". BigQuery will manage creation of the
+      instance table, using the schema of the base template table. See
+      https://cloud.google.com/bigquery/streaming-data-into-bigquery#template-
+      tables for considerations when working with templates tables.
+  """
+
+  class RowsValueListEntry(_messages.Message):
+    """A RowsValueListEntry object.
+
+    Fields:
+      insertId: [Optional] A unique ID for each row. BigQuery uses this
+        property to detect duplicate insertion requests on a best-effort
+        basis.
+      json: [Required] A JSON object that contains a row of data. The object's
+        properties and values must match the destination table's schema.
+    """
+
+    insertId = _messages.StringField(1)
+    json = _messages.MessageField('JsonObject', 2)
+
+  ignoreUnknownValues = _messages.BooleanField(1)
+  kind = _messages.StringField(2, default=u'bigquery#tableDataInsertAllRequest')
+  rows = _messages.MessageField('RowsValueListEntry', 3, repeated=True)
+  skipInvalidRows = _messages.BooleanField(4)
+  templateSuffix = _messages.StringField(5)
+
+
+class TableDataInsertAllResponse(_messages.Message):
+  """A TableDataInsertAllResponse object.
+
+  Messages:
+    InsertErrorsValueListEntry: A InsertErrorsValueListEntry object.
+
+  Fields:
+    insertErrors: An array of errors for rows that were not inserted.
+    kind: The resource type of the response.
+  """
+
+  class InsertErrorsValueListEntry(_messages.Message):
+    """A InsertErrorsValueListEntry object.
+
+    Fields:
+      errors: Error information for the row indicated by the index property.
+      index: The index of the row that error applies to.
+    """
+
+    errors = _messages.MessageField('ErrorProto', 1, repeated=True)
+    index = _messages.IntegerField(2, variant=_messages.Variant.UINT32)
+
+  insertErrors = _messages.MessageField('InsertErrorsValueListEntry', 1, repeated=True)
+  kind = _messages.StringField(2, default=u'bigquery#tableDataInsertAllResponse')
+
+
+class TableDataList(_messages.Message):
+  """A TableDataList object.
+
+  Fields:
+    etag: A hash of this page of results.
+    kind: The resource type of the response.
+    pageToken: A token used for paging results. Providing this token instead
+      of the startIndex parameter can help you retrieve stable results when an
+      underlying table is changing.
+    rows: Rows of results.
+    totalRows: The total number of rows in the complete table.
+  """
+
+  etag = _messages.StringField(1)
+  kind = _messages.StringField(2, default=u'bigquery#tableDataList')
+  pageToken = _messages.StringField(3)
+  rows = _messages.MessageField('TableRow', 4, repeated=True)
+  totalRows = _messages.IntegerField(5)
+
+
+class TableFieldSchema(_messages.Message):
+  """A TableFieldSchema object.
+
+  Fields:
+    description: [Optional] The field description. The maximum length is 16K
+      characters.
+    fields: [Optional] Describes the nested schema fields if the type property
+      is set to RECORD.
+    mode: [Optional] The field mode. Possible values include NULLABLE,
+      REQUIRED and REPEATED. The default value is NULLABLE.
+    name: [Required] The field name. The name must contain only letters (a-z,
+      A-Z), numbers (0-9), or underscores (_), and must start with a letter or
+      underscore. The maximum length is 128 characters.
+    type: [Required] The field data type. Possible values include STRING,
+      BYTES, INTEGER, FLOAT, BOOLEAN, TIMESTAMP or RECORD (where RECORD
+      indicates that the field contains a nested schema).
+  """
+
+  description = _messages.StringField(1)
+  fields = _messages.MessageField('TableFieldSchema', 2, repeated=True)
+  mode = _messages.StringField(3)
+  name = _messages.StringField(4)
+  type = _messages.StringField(5)
+
+
+class TableList(_messages.Message):
+  """A TableList object.
+
+  Messages:
+    TablesValueListEntry: A TablesValueListEntry object.
+
+  Fields:
+    etag: A hash of this page of results.
+    kind: The type of list.
+    nextPageToken: A token to request the next page of results.
+    tables: Tables in the requested dataset.
+    totalItems: The total number of tables in the dataset.
+  """
+
+  class TablesValueListEntry(_messages.Message):
+    """A TablesValueListEntry object.
+
+    Fields:
+      friendlyName: The user-friendly name for this table.
+      id: An opaque ID of the table
+      kind: The resource type.
+      tableReference: A reference uniquely identifying the table.
+      type: The type of table. Possible values are: TABLE, VIEW.
+    """
+
+    friendlyName = _messages.StringField(1)
+    id = _messages.StringField(2)
+    kind = _messages.StringField(3, default=u'bigquery#table')
+    tableReference = _messages.MessageField('TableReference', 4)
+    type = _messages.StringField(5)
+
+  etag = _messages.StringField(1)
+  kind = _messages.StringField(2, default=u'bigquery#tableList')
+  nextPageToken = _messages.StringField(3)
+  tables = _messages.MessageField('TablesValueListEntry', 4, repeated=True)
+  totalItems = _messages.IntegerField(5, variant=_messages.Variant.INT32)
+
+
+class TableReference(_messages.Message):
+  """A TableReference object.
+
+  Fields:
+    datasetId: [Required] The ID of the dataset containing this table.
+    projectId: [Required] The ID of the project containing this table.
+    tableId: [Required] The ID of the table. The ID must contain only letters
+      (a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is
+      1,024 characters.
+  """
+
+  datasetId = _messages.StringField(1)
+  projectId = _messages.StringField(2)
+  tableId = _messages.StringField(3)
+
+
+class TableRow(_messages.Message):
+  """A TableRow object.
+
+  Fields:
+    f: Represents a single row in the result set, consisting of one or more
+      fields.
+  """
+
+  f = _messages.MessageField('TableCell', 1, repeated=True)
+
+
+class TableSchema(_messages.Message):
+  """A TableSchema object.
+
+  Fields:
+    fields: Describes the fields in a table.
+  """
+
+  fields = _messages.MessageField('TableFieldSchema', 1, repeated=True)
+
+
+class TimePartitioning(_messages.Message):
+  """A TimePartitioning object.
+
+  Fields:
+    expirationMs: [Optional] Number of milliseconds for which to keep the
+      storage for a partition.
+    type: [Required] The only type supported is DAY, which will generate one
+      partition per day based on data loading time.
+  """
+
+  expirationMs = _messages.IntegerField(1)
+  type = _messages.StringField(2)
+
+
+class UserDefinedFunctionResource(_messages.Message):
+  """A UserDefinedFunctionResource object.
+
+  Fields:
+    inlineCode: [Pick one] An inline resource that contains code for a user-
+      defined function (UDF). Providing a inline code resource is equivalent
+      to providing a URI for a file containing the same code.
+    resourceUri: [Pick one] A code resource to load from a Google Cloud
+      Storage URI (gs://bucket/path).
+  """
+
+  inlineCode = _messages.StringField(1)
+  resourceUri = _messages.StringField(2)
+
+
+class ViewDefinition(_messages.Message):
+  """A ViewDefinition object.
+
+  Fields:
+    query: [Required] A query that BigQuery executes when the view is
+      referenced.
+    useLegacySql: [Experimental] Specifies whether to use BigQuery's legacy
+      SQL for this view. The default value is true. If set to false, the view
+      will use BigQuery's standard SQL: https://cloud.google.com/bigquery/sql-
+      reference/ Queries and views that reference this view must use the same
+      flag value.
+    userDefinedFunctionResources: [Experimental] Describes user-defined
+      function resources used in the query.
+  """
+
+  query = _messages.StringField(1)
+  useLegacySql = _messages.BooleanField(2)
+  userDefinedFunctionResources = _messages.MessageField('UserDefinedFunctionResource', 3, repeated=True)
+
+
diff --git a/samples/dns_sample/__init__.py b/samples/dns_sample/__init__.py
new file mode 100644
index 0000000..58e0d91
--- /dev/null
+++ b/samples/dns_sample/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2016 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/samples/dns_sample/dns_v1.json b/samples/dns_sample/dns_v1.json
new file mode 100644
index 0000000..77c1553
--- /dev/null
+++ b/samples/dns_sample/dns_v1.json
@@ -0,0 +1,707 @@
+{
+ "kind": "discovery#restDescription",
+ "discoveryVersion": "v1",
+ "id": "dns:v1",
+ "name": "dns",
+ "version": "v1",
+ "revision": "20150807",
+ "title": "Google Cloud DNS API",
+ "description": "The Google Cloud DNS API provides services for configuring and serving authoritative DNS records.",
+ "ownerDomain": "google.com",
+ "ownerName": "Google",
+ "icons": {
+  "x16": "http://www.google.com/images/icons/product/search-16.gif",
+  "x32": "http://www.google.com/images/icons/product/search-32.gif"
+ },
+ "documentationLink": "https://developers.google.com/cloud-dns",
+ "protocol": "rest",
+ "baseUrl": "https://www.googleapis.com/dns/v1/projects/",
+ "basePath": "/dns/v1/projects/",
+ "rootUrl": "https://www.googleapis.com/",
+ "servicePath": "dns/v1/projects/",
+ "batchPath": "batch",
+ "parameters": {
+  "alt": {
+   "type": "string",
+   "description": "Data format for the response.",
+   "default": "json",
+   "enum": [
+    "json"
+   ],
+   "enumDescriptions": [
+    "Responses with Content-Type of application/json"
+   ],
+   "location": "query"
+  },
+  "fields": {
+   "type": "string",
+   "description": "Selector specifying which fields to include in a partial response.",
+   "location": "query"
+  },
+  "key": {
+   "type": "string",
+   "description": "API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.",
+   "location": "query"
+  },
+  "oauth_token": {
+   "type": "string",
+   "description": "OAuth 2.0 token for the current user.",
+   "location": "query"
+  },
+  "prettyPrint": {
+   "type": "boolean",
+   "description": "Returns response with indentations and line breaks.",
+   "default": "true",
+   "location": "query"
+  },
+  "quotaUser": {
+   "type": "string",
+   "description": "Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.",
+   "location": "query"
+  },
+  "userIp": {
+   "type": "string",
+   "description": "IP address of the site where the request originates. Use this if you want to enforce per-user limits.",
+   "location": "query"
+  }
+ },
+ "auth": {
+  "oauth2": {
+   "scopes": {
+    "https://www.googleapis.com/auth/cloud-platform": {
+     "description": "View and manage your data across Google Cloud Platform services"
+    },
+    "https://www.googleapis.com/auth/cloud-platform.read-only": {
+     "description": "MESSAGE UNDER CONSTRUCTION View your data across Google Cloud Platform services"
+    },
+    "https://www.googleapis.com/auth/ndev.clouddns.readonly": {
+     "description": "View your DNS records hosted by Google Cloud DNS"
+    },
+    "https://www.googleapis.com/auth/ndev.clouddns.readwrite": {
+     "description": "View and manage your DNS records hosted by Google Cloud DNS"
+    }
+   }
+  }
+ },
+ "schemas": {
+  "Change": {
+   "id": "Change",
+   "type": "object",
+   "description": "An atomic update to a collection of ResourceRecordSets.",
+   "properties": {
+    "additions": {
+     "type": "array",
+     "description": "Which ResourceRecordSets to add?",
+     "items": {
+      "$ref": "ResourceRecordSet"
+     }
+    },
+    "deletions": {
+     "type": "array",
+     "description": "Which ResourceRecordSets to remove? Must match existing data exactly.",
+     "items": {
+      "$ref": "ResourceRecordSet"
+     }
+    },
+    "id": {
+     "type": "string",
+     "description": "Unique identifier for the resource; defined by the server (output only)."
+    },
+    "kind": {
+     "type": "string",
+     "description": "Identifies what kind of resource this is. Value: the fixed string \"dns#change\".",
+     "default": "dns#change"
+    },
+    "startTime": {
+     "type": "string",
+     "description": "The time that this operation was started by the server. This is in RFC3339 text format."
+    },
+    "status": {
+     "type": "string",
+     "description": "Status of the operation (output only).",
+     "enum": [
+      "done",
+      "pending"
+     ],
+     "enumDescriptions": [
+      "",
+      ""
+     ]
+    }
+   }
+  },
+  "ChangesListResponse": {
+   "id": "ChangesListResponse",
+   "type": "object",
+   "description": "The response to a request to enumerate Changes to a ResourceRecordSets collection.",
+   "properties": {
+    "changes": {
+     "type": "array",
+     "description": "The requested changes.",
+     "items": {
+      "$ref": "Change"
+     }
+    },
+    "kind": {
+     "type": "string",
+     "description": "Type of resource.",
+     "default": "dns#changesListResponse"
+    },
+    "nextPageToken": {
+     "type": "string",
+     "description": "The presence of this field indicates that there exist more results following your last page of results in pagination order. To fetch them, make another list request using this value as your pagination token.\n\nIn this way you can retrieve the complete contents of even very large collections one page at a time. However, if the contents of the collection change between the first and last paginated list request, the set of all elements returned will be an inconsistent view of the collection. There is no way to retrieve a \"snapshot\" of collections larger than the maximum page size."
+    }
+   }
+  },
+  "ManagedZone": {
+   "id": "ManagedZone",
+   "type": "object",
+   "description": "A zone is a subtree of the DNS namespace under one administrative responsibility. A ManagedZone is a resource that represents a DNS zone hosted by the Cloud DNS service.",
+   "properties": {
+    "creationTime": {
+     "type": "string",
+     "description": "The time that this resource was created on the server. This is in RFC3339 text format. Output only."
+    },
+    "description": {
+     "type": "string",
+     "description": "A mutable string of at most 1024 characters associated with this resource for the user's convenience. Has no effect on the managed zone's function."
+    },
+    "dnsName": {
+     "type": "string",
+     "description": "The DNS name of this managed zone, for instance \"example.com.\"."
+    },
+    "id": {
+     "type": "string",
+     "description": "Unique identifier for the resource; defined by the server (output only)",
+     "format": "uint64"
+    },
+    "kind": {
+     "type": "string",
+     "description": "Identifies what kind of resource this is. Value: the fixed string \"dns#managedZone\".",
+     "default": "dns#managedZone"
+    },
+    "name": {
+     "type": "string",
+     "description": "User assigned name for this resource. Must be unique within the project. The name must be 1-32 characters long, must begin with a letter, end with a letter or digit, and only contain lowercase letters, digits or dashes."
+    },
+    "nameServerSet": {
+     "type": "string",
+     "description": "Optionally specifies the NameServerSet for this ManagedZone. A NameServerSet is a set of DNS name servers that all host the same ManagedZones. Most users will leave this field unset."
+    },
+    "nameServers": {
+     "type": "array",
+     "description": "Delegate your managed_zone to these virtual name servers; defined by the server (output only)",
+     "items": {
+      "type": "string"
+     }
+    }
+   }
+  },
+  "ManagedZonesListResponse": {
+   "id": "ManagedZonesListResponse",
+   "type": "object",
+   "properties": {
+    "kind": {
+     "type": "string",
+     "description": "Type of resource.",
+     "default": "dns#managedZonesListResponse"
+    },
+    "managedZones": {
+     "type": "array",
+     "description": "The managed zone resources.",
+     "items": {
+      "$ref": "ManagedZone"
+     }
+    },
+    "nextPageToken": {
+     "type": "string",
+     "description": "The presence of this field indicates that there exist more results following your last page of results in pagination order. To fetch them, make another list request using this value as your page token.\n\nIn this way you can retrieve the complete contents of even very large collections one page at a time. However, if the contents of the collection change between the first and last paginated list request, the set of all elements returned will be an inconsistent view of the collection. There is no way to retrieve a consistent snapshot of a collection larger than the maximum page size."
+    }
+   }
+  },
+  "Project": {
+   "id": "Project",
+   "type": "object",
+   "description": "A project resource. The project is a top level container for resources including Cloud DNS ManagedZones. Projects can be created only in the APIs console.",
+   "properties": {
+    "id": {
+     "type": "string",
+     "description": "User assigned unique identifier for the resource (output only)."
+    },
+    "kind": {
+     "type": "string",
+     "description": "Identifies what kind of resource this is. Value: the fixed string \"dns#project\".",
+     "default": "dns#project"
+    },
+    "number": {
+     "type": "string",
+     "description": "Unique numeric identifier for the resource; defined by the server (output only).",
+     "format": "uint64"
+    },
+    "quota": {
+     "$ref": "Quota",
+     "description": "Quotas assigned to this project (output only)."
+    }
+   }
+  },
+  "Quota": {
+   "id": "Quota",
+   "type": "object",
+   "description": "Limits associated with a Project.",
+   "properties": {
+    "kind": {
+     "type": "string",
+     "description": "Identifies what kind of resource this is. Value: the fixed string \"dns#quota\".",
+     "default": "dns#quota"
+    },
+    "managedZones": {
+     "type": "integer",
+     "description": "Maximum allowed number of managed zones in the project.",
+     "format": "int32"
+    },
+    "resourceRecordsPerRrset": {
+     "type": "integer",
+     "description": "Maximum allowed number of ResourceRecords per ResourceRecordSet.",
+     "format": "int32"
+    },
+    "rrsetAdditionsPerChange": {
+     "type": "integer",
+     "description": "Maximum allowed number of ResourceRecordSets to add per ChangesCreateRequest.",
+     "format": "int32"
+    },
+    "rrsetDeletionsPerChange": {
+     "type": "integer",
+     "description": "Maximum allowed number of ResourceRecordSets to delete per ChangesCreateRequest.",
+     "format": "int32"
+    },
+    "rrsetsPerManagedZone": {
+     "type": "integer",
+     "description": "Maximum allowed number of ResourceRecordSets per zone in the project.",
+     "format": "int32"
+    },
+    "totalRrdataSizePerChange": {
+     "type": "integer",
+     "description": "Maximum allowed size for total rrdata in one ChangesCreateRequest in bytes.",
+     "format": "int32"
+    }
+   }
+  },
+  "ResourceRecordSet": {
+   "id": "ResourceRecordSet",
+   "type": "object",
+   "description": "A unit of data that will be returned by the DNS servers.",
+   "properties": {
+    "kind": {
+     "type": "string",
+     "description": "Identifies what kind of resource this is. Value: the fixed string \"dns#resourceRecordSet\".",
+     "default": "dns#resourceRecordSet"
+    },
+    "name": {
+     "type": "string",
+     "description": "For example, www.example.com."
+    },
+    "rrdatas": {
+     "type": "array",
+     "description": "As defined in RFC 1035 (section 5) and RFC 1034 (section 3.6.1).",
+     "items": {
+      "type": "string"
+     }
+    },
+    "ttl": {
+     "type": "integer",
+     "description": "Number of seconds that this ResourceRecordSet can be cached by resolvers.",
+     "format": "int32"
+    },
+    "type": {
+     "type": "string",
+     "description": "The identifier of a supported record type, for example, A, AAAA, MX, TXT, and so on."
+    }
+   }
+  },
+  "ResourceRecordSetsListResponse": {
+   "id": "ResourceRecordSetsListResponse",
+   "type": "object",
+   "properties": {
+    "kind": {
+     "type": "string",
+     "description": "Type of resource.",
+     "default": "dns#resourceRecordSetsListResponse"
+    },
+    "nextPageToken": {
+     "type": "string",
+     "description": "The presence of this field indicates that there exist more results following your last page of results in pagination order. To fetch them, make another list request using this value as your pagination token.\n\nIn this way you can retrieve the complete contents of even very large collections one page at a time. However, if the contents of the collection change between the first and last paginated list request, the set of all elements returned will be an inconsistent view of the collection. There is no way to retrieve a consistent snapshot of a collection larger than the maximum page size."
+    },
+    "rrsets": {
+     "type": "array",
+     "description": "The resource record set resources.",
+     "items": {
+      "$ref": "ResourceRecordSet"
+     }
+    }
+   }
+  }
+ },
+ "resources": {
+  "changes": {
+   "methods": {
+    "create": {
+     "id": "dns.changes.create",
+     "path": "{project}/managedZones/{managedZone}/changes",
+     "httpMethod": "POST",
+     "description": "Atomically update the ResourceRecordSet collection.",
+     "parameters": {
+      "managedZone": {
+       "type": "string",
+       "description": "Identifies the managed zone addressed by this request. Can be the managed zone name or id.",
+       "required": true,
+       "location": "path"
+      },
+      "project": {
+       "type": "string",
+       "description": "Identifies the project addressed by this request.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "project",
+      "managedZone"
+     ],
+     "request": {
+      "$ref": "Change"
+     },
+     "response": {
+      "$ref": "Change"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/ndev.clouddns.readwrite"
+     ]
+    },
+    "get": {
+     "id": "dns.changes.get",
+     "path": "{project}/managedZones/{managedZone}/changes/{changeId}",
+     "httpMethod": "GET",
+     "description": "Fetch the representation of an existing Change.",
+     "parameters": {
+      "changeId": {
+       "type": "string",
+       "description": "The identifier of the requested change, from a previous ResourceRecordSetsChangeResponse.",
+       "required": true,
+       "location": "path"
+      },
+      "managedZone": {
+       "type": "string",
+       "description": "Identifies the managed zone addressed by this request. Can be the managed zone name or id.",
+       "required": true,
+       "location": "path"
+      },
+      "project": {
+       "type": "string",
+       "description": "Identifies the project addressed by this request.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "project",
+      "managedZone",
+      "changeId"
+     ],
+     "response": {
+      "$ref": "Change"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only",
+      "https://www.googleapis.com/auth/ndev.clouddns.readonly",
+      "https://www.googleapis.com/auth/ndev.clouddns.readwrite"
+     ]
+    },
+    "list": {
+     "id": "dns.changes.list",
+     "path": "{project}/managedZones/{managedZone}/changes",
+     "httpMethod": "GET",
+     "description": "Enumerate Changes to a ResourceRecordSet collection.",
+     "parameters": {
+      "managedZone": {
+       "type": "string",
+       "description": "Identifies the managed zone addressed by this request. Can be the managed zone name or id.",
+       "required": true,
+       "location": "path"
+      },
+      "maxResults": {
+       "type": "integer",
+       "description": "Optional. Maximum number of results to be returned. If unspecified, the server will decide how many results to return.",
+       "format": "int32",
+       "location": "query"
+      },
+      "pageToken": {
+       "type": "string",
+       "description": "Optional. A tag returned by a previous list request that was truncated. Use this parameter to continue a previous list request.",
+       "location": "query"
+      },
+      "project": {
+       "type": "string",
+       "description": "Identifies the project addressed by this request.",
+       "required": true,
+       "location": "path"
+      },
+      "sortBy": {
+       "type": "string",
+       "description": "Sorting criterion. The only supported value is change sequence.",
+       "default": "changeSequence",
+       "enum": [
+        "changeSequence"
+       ],
+       "enumDescriptions": [
+        ""
+       ],
+       "location": "query"
+      },
+      "sortOrder": {
+       "type": "string",
+       "description": "Sorting order direction: 'ascending' or 'descending'.",
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "project",
+      "managedZone"
+     ],
+     "response": {
+      "$ref": "ChangesListResponse"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only",
+      "https://www.googleapis.com/auth/ndev.clouddns.readonly",
+      "https://www.googleapis.com/auth/ndev.clouddns.readwrite"
+     ]
+    }
+   }
+  },
+  "managedZones": {
+   "methods": {
+    "create": {
+     "id": "dns.managedZones.create",
+     "path": "{project}/managedZones",
+     "httpMethod": "POST",
+     "description": "Create a new ManagedZone.",
+     "parameters": {
+      "project": {
+       "type": "string",
+       "description": "Identifies the project addressed by this request.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "project"
+     ],
+     "request": {
+      "$ref": "ManagedZone"
+     },
+     "response": {
+      "$ref": "ManagedZone"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/ndev.clouddns.readwrite"
+     ]
+    },
+    "delete": {
+     "id": "dns.managedZones.delete",
+     "path": "{project}/managedZones/{managedZone}",
+     "httpMethod": "DELETE",
+     "description": "Delete a previously created ManagedZone.",
+     "parameters": {
+      "managedZone": {
+       "type": "string",
+       "description": "Identifies the managed zone addressed by this request. Can be the managed zone name or id.",
+       "required": true,
+       "location": "path"
+      },
+      "project": {
+       "type": "string",
+       "description": "Identifies the project addressed by this request.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "project",
+      "managedZone"
+     ],
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/ndev.clouddns.readwrite"
+     ]
+    },
+    "get": {
+     "id": "dns.managedZones.get",
+     "path": "{project}/managedZones/{managedZone}",
+     "httpMethod": "GET",
+     "description": "Fetch the representation of an existing ManagedZone.",
+     "parameters": {
+      "managedZone": {
+       "type": "string",
+       "description": "Identifies the managed zone addressed by this request. Can be the managed zone name or id.",
+       "required": true,
+       "location": "path"
+      },
+      "project": {
+       "type": "string",
+       "description": "Identifies the project addressed by this request.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "project",
+      "managedZone"
+     ],
+     "response": {
+      "$ref": "ManagedZone"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only",
+      "https://www.googleapis.com/auth/ndev.clouddns.readonly",
+      "https://www.googleapis.com/auth/ndev.clouddns.readwrite"
+     ]
+    },
+    "list": {
+     "id": "dns.managedZones.list",
+     "path": "{project}/managedZones",
+     "httpMethod": "GET",
+     "description": "Enumerate ManagedZones that have been created but not yet deleted.",
+     "parameters": {
+      "dnsName": {
+       "type": "string",
+       "description": "Restricts the list to return only zones with this domain name.",
+       "location": "query"
+      },
+      "maxResults": {
+       "type": "integer",
+       "description": "Optional. Maximum number of results to be returned. If unspecified, the server will decide how many results to return.",
+       "format": "int32",
+       "location": "query"
+      },
+      "pageToken": {
+       "type": "string",
+       "description": "Optional. A tag returned by a previous list request that was truncated. Use this parameter to continue a previous list request.",
+       "location": "query"
+      },
+      "project": {
+       "type": "string",
+       "description": "Identifies the project addressed by this request.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "project"
+     ],
+     "response": {
+      "$ref": "ManagedZonesListResponse"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only",
+      "https://www.googleapis.com/auth/ndev.clouddns.readonly",
+      "https://www.googleapis.com/auth/ndev.clouddns.readwrite"
+     ]
+    }
+   }
+  },
+  "projects": {
+   "methods": {
+    "get": {
+     "id": "dns.projects.get",
+     "path": "{project}",
+     "httpMethod": "GET",
+     "description": "Fetch the representation of an existing Project.",
+     "parameters": {
+      "project": {
+       "type": "string",
+       "description": "Identifies the project addressed by this request.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "project"
+     ],
+     "response": {
+      "$ref": "Project"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only",
+      "https://www.googleapis.com/auth/ndev.clouddns.readonly",
+      "https://www.googleapis.com/auth/ndev.clouddns.readwrite"
+     ]
+    }
+   }
+  },
+  "resourceRecordSets": {
+   "methods": {
+    "list": {
+     "id": "dns.resourceRecordSets.list",
+     "path": "{project}/managedZones/{managedZone}/rrsets",
+     "httpMethod": "GET",
+     "description": "Enumerate ResourceRecordSets that have been created but not yet deleted.",
+     "parameters": {
+      "managedZone": {
+       "type": "string",
+       "description": "Identifies the managed zone addressed by this request. Can be the managed zone name or id.",
+       "required": true,
+       "location": "path"
+      },
+      "maxResults": {
+       "type": "integer",
+       "description": "Optional. Maximum number of results to be returned. If unspecified, the server will decide how many results to return.",
+       "format": "int32",
+       "location": "query"
+      },
+      "name": {
+       "type": "string",
+       "description": "Restricts the list to return only records with this fully qualified domain name.",
+       "location": "query"
+      },
+      "pageToken": {
+       "type": "string",
+       "description": "Optional. A tag returned by a previous list request that was truncated. Use this parameter to continue a previous list request.",
+       "location": "query"
+      },
+      "project": {
+       "type": "string",
+       "description": "Identifies the project addressed by this request.",
+       "required": true,
+       "location": "path"
+      },
+      "type": {
+       "type": "string",
+       "description": "Restricts the list to return only records of this type. If present, the \"name\" parameter must also be present.",
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "project",
+      "managedZone"
+     ],
+     "response": {
+      "$ref": "ResourceRecordSetsListResponse"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only",
+      "https://www.googleapis.com/auth/ndev.clouddns.readonly",
+      "https://www.googleapis.com/auth/ndev.clouddns.readwrite"
+     ]
+    }
+   }
+  }
+ }
+}
diff --git a/samples/dns_sample/dns_v1/__init__.py b/samples/dns_sample/dns_v1/__init__.py
new file mode 100644
index 0000000..2816da8
--- /dev/null
+++ b/samples/dns_sample/dns_v1/__init__.py
@@ -0,0 +1,5 @@
+"""Package marker file."""
+
+import pkgutil
+
+__path__ = pkgutil.extend_path(__path__, __name__)
diff --git a/samples/dns_sample/dns_v1/dns_v1.py b/samples/dns_sample/dns_v1/dns_v1.py
new file mode 100644
index 0000000..56e54f3
--- /dev/null
+++ b/samples/dns_sample/dns_v1/dns_v1.py
@@ -0,0 +1,554 @@
+#!/usr/bin/env python
+"""CLI for dns, version v1."""
+# NOTE: This file is autogenerated and should not be edited by hand.
+
+import code
+import os
+import platform
+import sys
+
+from apitools.base.protorpclite import message_types
+from apitools.base.protorpclite import messages
+
+from google.apputils import appcommands
+import gflags as flags
+
+import apitools.base.py as apitools_base
+from apitools.base.py import cli as apitools_base_cli
+import dns_v1_client as client_lib
+import dns_v1_messages as messages
+
+
+def _DeclareDnsFlags():
+  """Declare global flags in an idempotent way."""
+  if 'api_endpoint' in flags.FLAGS:
+    return
+  flags.DEFINE_string(
+      'api_endpoint',
+      u'https://www.googleapis.com/dns/v1/',
+      'URL of the API endpoint to use.',
+      short_name='dns_url')
+  flags.DEFINE_string(
+      'history_file',
+      u'~/.dns.v1.history',
+      'File with interactive shell history.')
+  flags.DEFINE_multistring(
+      'add_header', [],
+      'Additional http headers (as key=value strings). '
+      'Can be specified multiple times.')
+  flags.DEFINE_string(
+      'service_account_json_keyfile', '',
+      'Filename for a JSON service account key downloaded'
+      ' from the Developer Console.')
+  flags.DEFINE_enum(
+      'alt',
+      u'json',
+      [u'json'],
+      u'Data format for the response.')
+  flags.DEFINE_string(
+      'fields',
+      None,
+      u'Selector specifying which fields to include in a partial response.')
+  flags.DEFINE_string(
+      'key',
+      None,
+      u'API key. Your API key identifies your project and provides you with '
+      u'API access, quota, and reports. Required unless you provide an OAuth '
+      u'2.0 token.')
+  flags.DEFINE_string(
+      'oauth_token',
+      None,
+      u'OAuth 2.0 token for the current user.')
+  flags.DEFINE_boolean(
+      'prettyPrint',
+      'True',
+      u'Returns response with indentations and line breaks.')
+  flags.DEFINE_string(
+      'quotaUser',
+      None,
+      u'Available to use for quota purposes for server-side applications. Can'
+      u' be any arbitrary string assigned to a user, but should not exceed 40'
+      u' characters. Overrides userIp if both are provided.')
+  flags.DEFINE_string(
+      'trace',
+      None,
+      'A tracing token of the form "token:<tokenid>" to include in api '
+      'requests.')
+  flags.DEFINE_string(
+      'userIp',
+      None,
+      u'IP address of the site where the request originates. Use this if you '
+      u'want to enforce per-user limits.')
+
+
+FLAGS = flags.FLAGS
+apitools_base_cli.DeclareBaseFlags()
+_DeclareDnsFlags()
+
+
+def GetGlobalParamsFromFlags():
+  """Return a StandardQueryParameters based on flags."""
+  result = messages.StandardQueryParameters()
+  if FLAGS['alt'].present:
+    result.alt = messages.StandardQueryParameters.AltValueValuesEnum(FLAGS.alt)
+  if FLAGS['fields'].present:
+    result.fields = FLAGS.fields.decode('utf8')
+  if FLAGS['key'].present:
+    result.key = FLAGS.key.decode('utf8')
+  if FLAGS['oauth_token'].present:
+    result.oauth_token = FLAGS.oauth_token.decode('utf8')
+  if FLAGS['prettyPrint'].present:
+    result.prettyPrint = FLAGS.prettyPrint
+  if FLAGS['quotaUser'].present:
+    result.quotaUser = FLAGS.quotaUser.decode('utf8')
+  if FLAGS['trace'].present:
+    result.trace = FLAGS.trace.decode('utf8')
+  if FLAGS['userIp'].present:
+    result.userIp = FLAGS.userIp.decode('utf8')
+  return result
+
+
+def GetClientFromFlags():
+  """Return a client object, configured from flags."""
+  log_request = FLAGS.log_request or FLAGS.log_request_response
+  log_response = FLAGS.log_response or FLAGS.log_request_response
+  api_endpoint = apitools_base.NormalizeApiEndpoint(FLAGS.api_endpoint)
+  additional_http_headers = dict(x.split('=', 1) for x in FLAGS.add_header)
+  credentials_args = {
+      'service_account_json_keyfile': os.path.expanduser(FLAGS.service_account_json_keyfile)
+  }
+  try:
+    client = client_lib.DnsV1(
+        api_endpoint, log_request=log_request,
+        log_response=log_response,
+        credentials_args=credentials_args,
+        additional_http_headers=additional_http_headers)
+  except apitools_base.CredentialsError as e:
+    print 'Error creating credentials: %s' % e
+    sys.exit(1)
+  return client
+
+
+class PyShell(appcommands.Cmd):
+
+  def Run(self, _):
+    """Run an interactive python shell with the client."""
+    client = GetClientFromFlags()
+    params = GetGlobalParamsFromFlags()
+    for field in params.all_fields():
+      value = params.get_assigned_value(field.name)
+      if value != field.default:
+        client.AddGlobalParam(field.name, value)
+    banner = """
+           == dns interactive console ==
+                 client: a dns client
+          apitools_base: base apitools module
+         messages: the generated messages module
+    """
+    local_vars = {
+        'apitools_base': apitools_base,
+        'client': client,
+        'client_lib': client_lib,
+        'messages': messages,
+    }
+    if platform.system() == 'Linux':
+      console = apitools_base_cli.ConsoleWithReadline(
+          local_vars, histfile=FLAGS.history_file)
+    else:
+      console = code.InteractiveConsole(local_vars)
+    try:
+      console.interact(banner)
+    except SystemExit as e:
+      return e.code
+
+
+class ChangesCreate(apitools_base_cli.NewCmd):
+  """Command wrapping changes.Create."""
+
+  usage = """changes_create <project> <managedZone>"""
+
+  def __init__(self, name, fv):
+    super(ChangesCreate, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'change',
+        None,
+        u'A Change resource to be passed as the request body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, project, managedZone):
+    """Atomically update the ResourceRecordSet collection.
+
+    Args:
+      project: Identifies the project addressed by this request.
+      managedZone: Identifies the managed zone addressed by this request. Can
+        be the managed zone name or id.
+
+    Flags:
+      change: A Change resource to be passed as the request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.DnsChangesCreateRequest(
+        project=project.decode('utf8'),
+        managedZone=managedZone.decode('utf8'),
+        )
+    if FLAGS['change'].present:
+      request.change = apitools_base.JsonToMessage(messages.Change, FLAGS.change)
+    result = client.changes.Create(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ChangesGet(apitools_base_cli.NewCmd):
+  """Command wrapping changes.Get."""
+
+  usage = """changes_get <project> <managedZone> <changeId>"""
+
+  def __init__(self, name, fv):
+    super(ChangesGet, self).__init__(name, fv)
+
+  def RunWithArgs(self, project, managedZone, changeId):
+    """Fetch the representation of an existing Change.
+
+    Args:
+      project: Identifies the project addressed by this request.
+      managedZone: Identifies the managed zone addressed by this request. Can
+        be the managed zone name or id.
+      changeId: The identifier of the requested change, from a previous
+        ResourceRecordSetsChangeResponse.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.DnsChangesGetRequest(
+        project=project.decode('utf8'),
+        managedZone=managedZone.decode('utf8'),
+        changeId=changeId.decode('utf8'),
+        )
+    result = client.changes.Get(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ChangesList(apitools_base_cli.NewCmd):
+  """Command wrapping changes.List."""
+
+  usage = """changes_list <project> <managedZone>"""
+
+  def __init__(self, name, fv):
+    super(ChangesList, self).__init__(name, fv)
+    flags.DEFINE_integer(
+        'maxResults',
+        None,
+        u'Optional. Maximum number of results to be returned. If unspecified,'
+        u' the server will decide how many results to return.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'pageToken',
+        None,
+        u'Optional. A tag returned by a previous list request that was '
+        u'truncated. Use this parameter to continue a previous list request.',
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'sortBy',
+        u'changeSequence',
+        [u'changeSequence'],
+        u'Sorting criterion. The only supported value is change sequence.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'sortOrder',
+        None,
+        u"Sorting order direction: 'ascending' or 'descending'.",
+        flag_values=fv)
+
+  def RunWithArgs(self, project, managedZone):
+    """Enumerate Changes to a ResourceRecordSet collection.
+
+    Args:
+      project: Identifies the project addressed by this request.
+      managedZone: Identifies the managed zone addressed by this request. Can
+        be the managed zone name or id.
+
+    Flags:
+      maxResults: Optional. Maximum number of results to be returned. If
+        unspecified, the server will decide how many results to return.
+      pageToken: Optional. A tag returned by a previous list request that was
+        truncated. Use this parameter to continue a previous list request.
+      sortBy: Sorting criterion. The only supported value is change sequence.
+      sortOrder: Sorting order direction: 'ascending' or 'descending'.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.DnsChangesListRequest(
+        project=project.decode('utf8'),
+        managedZone=managedZone.decode('utf8'),
+        )
+    if FLAGS['maxResults'].present:
+      request.maxResults = FLAGS.maxResults
+    if FLAGS['pageToken'].present:
+      request.pageToken = FLAGS.pageToken.decode('utf8')
+    if FLAGS['sortBy'].present:
+      request.sortBy = messages.DnsChangesListRequest.SortByValueValuesEnum(FLAGS.sortBy)
+    if FLAGS['sortOrder'].present:
+      request.sortOrder = FLAGS.sortOrder.decode('utf8')
+    result = client.changes.List(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ManagedZonesCreate(apitools_base_cli.NewCmd):
+  """Command wrapping managedZones.Create."""
+
+  usage = """managedZones_create <project>"""
+
+  def __init__(self, name, fv):
+    super(ManagedZonesCreate, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'managedZone',
+        None,
+        u'A ManagedZone resource to be passed as the request body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, project):
+    """Create a new ManagedZone.
+
+    Args:
+      project: Identifies the project addressed by this request.
+
+    Flags:
+      managedZone: A ManagedZone resource to be passed as the request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.DnsManagedZonesCreateRequest(
+        project=project.decode('utf8'),
+        )
+    if FLAGS['managedZone'].present:
+      request.managedZone = apitools_base.JsonToMessage(messages.ManagedZone, FLAGS.managedZone)
+    result = client.managedZones.Create(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ManagedZonesDelete(apitools_base_cli.NewCmd):
+  """Command wrapping managedZones.Delete."""
+
+  usage = """managedZones_delete <project> <managedZone>"""
+
+  def __init__(self, name, fv):
+    super(ManagedZonesDelete, self).__init__(name, fv)
+
+  def RunWithArgs(self, project, managedZone):
+    """Delete a previously created ManagedZone.
+
+    Args:
+      project: Identifies the project addressed by this request.
+      managedZone: Identifies the managed zone addressed by this request. Can
+        be the managed zone name or id.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.DnsManagedZonesDeleteRequest(
+        project=project.decode('utf8'),
+        managedZone=managedZone.decode('utf8'),
+        )
+    result = client.managedZones.Delete(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ManagedZonesGet(apitools_base_cli.NewCmd):
+  """Command wrapping managedZones.Get."""
+
+  usage = """managedZones_get <project> <managedZone>"""
+
+  def __init__(self, name, fv):
+    super(ManagedZonesGet, self).__init__(name, fv)
+
+  def RunWithArgs(self, project, managedZone):
+    """Fetch the representation of an existing ManagedZone.
+
+    Args:
+      project: Identifies the project addressed by this request.
+      managedZone: Identifies the managed zone addressed by this request. Can
+        be the managed zone name or id.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.DnsManagedZonesGetRequest(
+        project=project.decode('utf8'),
+        managedZone=managedZone.decode('utf8'),
+        )
+    result = client.managedZones.Get(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ManagedZonesList(apitools_base_cli.NewCmd):
+  """Command wrapping managedZones.List."""
+
+  usage = """managedZones_list <project>"""
+
+  def __init__(self, name, fv):
+    super(ManagedZonesList, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'dnsName',
+        None,
+        u'Restricts the list to return only zones with this domain name.',
+        flag_values=fv)
+    flags.DEFINE_integer(
+        'maxResults',
+        None,
+        u'Optional. Maximum number of results to be returned. If unspecified,'
+        u' the server will decide how many results to return.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'pageToken',
+        None,
+        u'Optional. A tag returned by a previous list request that was '
+        u'truncated. Use this parameter to continue a previous list request.',
+        flag_values=fv)
+
+  def RunWithArgs(self, project):
+    """Enumerate ManagedZones that have been created but not yet deleted.
+
+    Args:
+      project: Identifies the project addressed by this request.
+
+    Flags:
+      dnsName: Restricts the list to return only zones with this domain name.
+      maxResults: Optional. Maximum number of results to be returned. If
+        unspecified, the server will decide how many results to return.
+      pageToken: Optional. A tag returned by a previous list request that was
+        truncated. Use this parameter to continue a previous list request.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.DnsManagedZonesListRequest(
+        project=project.decode('utf8'),
+        )
+    if FLAGS['dnsName'].present:
+      request.dnsName = FLAGS.dnsName.decode('utf8')
+    if FLAGS['maxResults'].present:
+      request.maxResults = FLAGS.maxResults
+    if FLAGS['pageToken'].present:
+      request.pageToken = FLAGS.pageToken.decode('utf8')
+    result = client.managedZones.List(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ProjectsGet(apitools_base_cli.NewCmd):
+  """Command wrapping projects.Get."""
+
+  usage = """projects_get <project>"""
+
+  def __init__(self, name, fv):
+    super(ProjectsGet, self).__init__(name, fv)
+
+  def RunWithArgs(self, project):
+    """Fetch the representation of an existing Project.
+
+    Args:
+      project: Identifies the project addressed by this request.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.DnsProjectsGetRequest(
+        project=project.decode('utf8'),
+        )
+    result = client.projects.Get(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ResourceRecordSetsList(apitools_base_cli.NewCmd):
+  """Command wrapping resourceRecordSets.List."""
+
+  usage = """resourceRecordSets_list <project> <managedZone>"""
+
+  def __init__(self, name, fv):
+    super(ResourceRecordSetsList, self).__init__(name, fv)
+    flags.DEFINE_integer(
+        'maxResults',
+        None,
+        u'Optional. Maximum number of results to be returned. If unspecified,'
+        u' the server will decide how many results to return.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'name',
+        None,
+        u'Restricts the list to return only records with this fully qualified'
+        u' domain name.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'pageToken',
+        None,
+        u'Optional. A tag returned by a previous list request that was '
+        u'truncated. Use this parameter to continue a previous list request.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'type',
+        None,
+        u'Restricts the list to return only records of this type. If present,'
+        u' the "name" parameter must also be present.',
+        flag_values=fv)
+
+  def RunWithArgs(self, project, managedZone):
+    """Enumerate ResourceRecordSets that have been created but not yet
+    deleted.
+
+    Args:
+      project: Identifies the project addressed by this request.
+      managedZone: Identifies the managed zone addressed by this request. Can
+        be the managed zone name or id.
+
+    Flags:
+      maxResults: Optional. Maximum number of results to be returned. If
+        unspecified, the server will decide how many results to return.
+      name: Restricts the list to return only records with this fully
+        qualified domain name.
+      pageToken: Optional. A tag returned by a previous list request that was
+        truncated. Use this parameter to continue a previous list request.
+      type: Restricts the list to return only records of this type. If
+        present, the "name" parameter must also be present.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.DnsResourceRecordSetsListRequest(
+        project=project.decode('utf8'),
+        managedZone=managedZone.decode('utf8'),
+        )
+    if FLAGS['maxResults'].present:
+      request.maxResults = FLAGS.maxResults
+    if FLAGS['name'].present:
+      request.name = FLAGS.name.decode('utf8')
+    if FLAGS['pageToken'].present:
+      request.pageToken = FLAGS.pageToken.decode('utf8')
+    if FLAGS['type'].present:
+      request.type = FLAGS.type.decode('utf8')
+    result = client.resourceRecordSets.List(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+def main(_):
+  appcommands.AddCmd('pyshell', PyShell)
+  appcommands.AddCmd('changes_create', ChangesCreate)
+  appcommands.AddCmd('changes_get', ChangesGet)
+  appcommands.AddCmd('changes_list', ChangesList)
+  appcommands.AddCmd('managedZones_create', ManagedZonesCreate)
+  appcommands.AddCmd('managedZones_delete', ManagedZonesDelete)
+  appcommands.AddCmd('managedZones_get', ManagedZonesGet)
+  appcommands.AddCmd('managedZones_list', ManagedZonesList)
+  appcommands.AddCmd('projects_get', ProjectsGet)
+  appcommands.AddCmd('resourceRecordSets_list', ResourceRecordSetsList)
+
+  apitools_base_cli.SetupLogger()
+  if hasattr(appcommands, 'SetDefaultCommand'):
+    appcommands.SetDefaultCommand('pyshell')
+
+
+run_main = apitools_base_cli.run_main
+
+if __name__ == '__main__':
+  appcommands.Run()
diff --git a/samples/dns_sample/dns_v1/dns_v1_client.py b/samples/dns_sample/dns_v1/dns_v1_client.py
new file mode 100644
index 0000000..b15403c
--- /dev/null
+++ b/samples/dns_sample/dns_v1/dns_v1_client.py
@@ -0,0 +1,314 @@
+"""Generated client library for dns version v1."""
+# NOTE: This file is autogenerated and should not be edited by hand.
+from apitools.base.py import base_api
+from samples.dns_sample.dns_v1 import dns_v1_messages as messages
+
+
+class DnsV1(base_api.BaseApiClient):
+  """Generated client library for service dns version v1."""
+
+  MESSAGES_MODULE = messages
+  BASE_URL = u'https://www.googleapis.com/dns/v1/'
+
+  _PACKAGE = u'dns'
+  _SCOPES = [u'https://www.googleapis.com/auth/cloud-platform', u'https://www.googleapis.com/auth/cloud-platform.read-only', u'https://www.googleapis.com/auth/ndev.clouddns.readonly', u'https://www.googleapis.com/auth/ndev.clouddns.readwrite']
+  _VERSION = u'v1'
+  _CLIENT_ID = '1042881264118.apps.googleusercontent.com'
+  _CLIENT_SECRET = 'x_Tw5K8nnjoRAqULM9PFAC2b'
+  _USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b'
+  _CLIENT_CLASS_NAME = u'DnsV1'
+  _URL_VERSION = u'v1'
+  _API_KEY = None
+
+  def __init__(self, url='', credentials=None,
+               get_credentials=True, http=None, model=None,
+               log_request=False, log_response=False,
+               credentials_args=None, default_global_params=None,
+               additional_http_headers=None):
+    """Create a new dns handle."""
+    url = url or self.BASE_URL
+    super(DnsV1, self).__init__(
+        url, credentials=credentials,
+        get_credentials=get_credentials, http=http, model=model,
+        log_request=log_request, log_response=log_response,
+        credentials_args=credentials_args,
+        default_global_params=default_global_params,
+        additional_http_headers=additional_http_headers)
+    self.changes = self.ChangesService(self)
+    self.managedZones = self.ManagedZonesService(self)
+    self.projects = self.ProjectsService(self)
+    self.resourceRecordSets = self.ResourceRecordSetsService(self)
+
+  class ChangesService(base_api.BaseApiService):
+    """Service class for the changes resource."""
+
+    _NAME = u'changes'
+
+    def __init__(self, client):
+      super(DnsV1.ChangesService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def Create(self, request, global_params=None):
+      """Atomically update the ResourceRecordSet collection.
+
+      Args:
+        request: (DnsChangesCreateRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Change) The response message.
+      """
+      config = self.GetMethodConfig('Create')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Create.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'dns.changes.create',
+        ordered_params=[u'project', u'managedZone'],
+        path_params=[u'managedZone', u'project'],
+        query_params=[],
+        relative_path=u'projects/{project}/managedZones/{managedZone}/changes',
+        request_field=u'change',
+        request_type_name=u'DnsChangesCreateRequest',
+        response_type_name=u'Change',
+        supports_download=False,
+    )
+
+    def Get(self, request, global_params=None):
+      """Fetch the representation of an existing Change.
+
+      Args:
+        request: (DnsChangesGetRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Change) The response message.
+      """
+      config = self.GetMethodConfig('Get')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'dns.changes.get',
+        ordered_params=[u'project', u'managedZone', u'changeId'],
+        path_params=[u'changeId', u'managedZone', u'project'],
+        query_params=[],
+        relative_path=u'projects/{project}/managedZones/{managedZone}/changes/{changeId}',
+        request_field='',
+        request_type_name=u'DnsChangesGetRequest',
+        response_type_name=u'Change',
+        supports_download=False,
+    )
+
+    def List(self, request, global_params=None):
+      """Enumerate Changes to a ResourceRecordSet collection.
+
+      Args:
+        request: (DnsChangesListRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ChangesListResponse) The response message.
+      """
+      config = self.GetMethodConfig('List')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'dns.changes.list',
+        ordered_params=[u'project', u'managedZone'],
+        path_params=[u'managedZone', u'project'],
+        query_params=[u'maxResults', u'pageToken', u'sortBy', u'sortOrder'],
+        relative_path=u'projects/{project}/managedZones/{managedZone}/changes',
+        request_field='',
+        request_type_name=u'DnsChangesListRequest',
+        response_type_name=u'ChangesListResponse',
+        supports_download=False,
+    )
+
+  class ManagedZonesService(base_api.BaseApiService):
+    """Service class for the managedZones resource."""
+
+    _NAME = u'managedZones'
+
+    def __init__(self, client):
+      super(DnsV1.ManagedZonesService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def Create(self, request, global_params=None):
+      """Create a new ManagedZone.
+
+      Args:
+        request: (DnsManagedZonesCreateRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ManagedZone) The response message.
+      """
+      config = self.GetMethodConfig('Create')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Create.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'dns.managedZones.create',
+        ordered_params=[u'project'],
+        path_params=[u'project'],
+        query_params=[],
+        relative_path=u'projects/{project}/managedZones',
+        request_field=u'managedZone',
+        request_type_name=u'DnsManagedZonesCreateRequest',
+        response_type_name=u'ManagedZone',
+        supports_download=False,
+    )
+
+    def Delete(self, request, global_params=None):
+      """Delete a previously created ManagedZone.
+
+      Args:
+        request: (DnsManagedZonesDeleteRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (DnsManagedZonesDeleteResponse) The response message.
+      """
+      config = self.GetMethodConfig('Delete')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Delete.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'DELETE',
+        method_id=u'dns.managedZones.delete',
+        ordered_params=[u'project', u'managedZone'],
+        path_params=[u'managedZone', u'project'],
+        query_params=[],
+        relative_path=u'projects/{project}/managedZones/{managedZone}',
+        request_field='',
+        request_type_name=u'DnsManagedZonesDeleteRequest',
+        response_type_name=u'DnsManagedZonesDeleteResponse',
+        supports_download=False,
+    )
+
+    def Get(self, request, global_params=None):
+      """Fetch the representation of an existing ManagedZone.
+
+      Args:
+        request: (DnsManagedZonesGetRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ManagedZone) The response message.
+      """
+      config = self.GetMethodConfig('Get')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'dns.managedZones.get',
+        ordered_params=[u'project', u'managedZone'],
+        path_params=[u'managedZone', u'project'],
+        query_params=[],
+        relative_path=u'projects/{project}/managedZones/{managedZone}',
+        request_field='',
+        request_type_name=u'DnsManagedZonesGetRequest',
+        response_type_name=u'ManagedZone',
+        supports_download=False,
+    )
+
+    def List(self, request, global_params=None):
+      """Enumerate ManagedZones that have been created but not yet deleted.
+
+      Args:
+        request: (DnsManagedZonesListRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ManagedZonesListResponse) The response message.
+      """
+      config = self.GetMethodConfig('List')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'dns.managedZones.list',
+        ordered_params=[u'project'],
+        path_params=[u'project'],
+        query_params=[u'dnsName', u'maxResults', u'pageToken'],
+        relative_path=u'projects/{project}/managedZones',
+        request_field='',
+        request_type_name=u'DnsManagedZonesListRequest',
+        response_type_name=u'ManagedZonesListResponse',
+        supports_download=False,
+    )
+
+  class ProjectsService(base_api.BaseApiService):
+    """Service class for the projects resource."""
+
+    _NAME = u'projects'
+
+    def __init__(self, client):
+      super(DnsV1.ProjectsService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def Get(self, request, global_params=None):
+      """Fetch the representation of an existing Project.
+
+      Args:
+        request: (DnsProjectsGetRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Project) The response message.
+      """
+      config = self.GetMethodConfig('Get')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'dns.projects.get',
+        ordered_params=[u'project'],
+        path_params=[u'project'],
+        query_params=[],
+        relative_path=u'projects/{project}',
+        request_field='',
+        request_type_name=u'DnsProjectsGetRequest',
+        response_type_name=u'Project',
+        supports_download=False,
+    )
+
+  class ResourceRecordSetsService(base_api.BaseApiService):
+    """Service class for the resourceRecordSets resource."""
+
+    _NAME = u'resourceRecordSets'
+
+    def __init__(self, client):
+      super(DnsV1.ResourceRecordSetsService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def List(self, request, global_params=None):
+      """Enumerate ResourceRecordSets that have been created but not yet deleted.
+
+      Args:
+        request: (DnsResourceRecordSetsListRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ResourceRecordSetsListResponse) The response message.
+      """
+      config = self.GetMethodConfig('List')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'dns.resourceRecordSets.list',
+        ordered_params=[u'project', u'managedZone'],
+        path_params=[u'managedZone', u'project'],
+        query_params=[u'maxResults', u'name', u'pageToken', u'type'],
+        relative_path=u'projects/{project}/managedZones/{managedZone}/rrsets',
+        request_field='',
+        request_type_name=u'DnsResourceRecordSetsListRequest',
+        response_type_name=u'ResourceRecordSetsListResponse',
+        supports_download=False,
+    )
diff --git a/samples/dns_sample/dns_v1/dns_v1_messages.py b/samples/dns_sample/dns_v1/dns_v1_messages.py
new file mode 100644
index 0000000..ef474c4
--- /dev/null
+++ b/samples/dns_sample/dns_v1/dns_v1_messages.py
@@ -0,0 +1,425 @@
+"""Generated message classes for dns version v1.
+
+The Google Cloud DNS API provides services for configuring and serving
+authoritative DNS records.
+"""
+# NOTE: This file is autogenerated and should not be edited by hand.
+
+from apitools.base.protorpclite import messages as _messages
+
+
+package = 'dns'
+
+
+class Change(_messages.Message):
+  """An atomic update to a collection of ResourceRecordSets.
+
+  Enums:
+    StatusValueValuesEnum: Status of the operation (output only).
+
+  Fields:
+    additions: Which ResourceRecordSets to add?
+    deletions: Which ResourceRecordSets to remove? Must match existing data
+      exactly.
+    id: Unique identifier for the resource; defined by the server (output
+      only).
+    kind: Identifies what kind of resource this is. Value: the fixed string
+      "dns#change".
+    startTime: The time that this operation was started by the server. This is
+      in RFC3339 text format.
+    status: Status of the operation (output only).
+  """
+
+  class StatusValueValuesEnum(_messages.Enum):
+    """Status of the operation (output only).
+
+    Values:
+      done: <no description>
+      pending: <no description>
+    """
+    done = 0
+    pending = 1
+
+  additions = _messages.MessageField('ResourceRecordSet', 1, repeated=True)
+  deletions = _messages.MessageField('ResourceRecordSet', 2, repeated=True)
+  id = _messages.StringField(3)
+  kind = _messages.StringField(4, default=u'dns#change')
+  startTime = _messages.StringField(5)
+  status = _messages.EnumField('StatusValueValuesEnum', 6)
+
+
+class ChangesListResponse(_messages.Message):
+  """The response to a request to enumerate Changes to a ResourceRecordSets
+  collection.
+
+  Fields:
+    changes: The requested changes.
+    kind: Type of resource.
+    nextPageToken: The presence of this field indicates that there exist more
+      results following your last page of results in pagination order. To
+      fetch them, make another list request using this value as your
+      pagination token.  In this way you can retrieve the complete contents of
+      even very large collections one page at a time. However, if the contents
+      of the collection change between the first and last paginated list
+      request, the set of all elements returned will be an inconsistent view
+      of the collection. There is no way to retrieve a "snapshot" of
+      collections larger than the maximum page size.
+  """
+
+  changes = _messages.MessageField('Change', 1, repeated=True)
+  kind = _messages.StringField(2, default=u'dns#changesListResponse')
+  nextPageToken = _messages.StringField(3)
+
+
+class DnsChangesCreateRequest(_messages.Message):
+  """A DnsChangesCreateRequest object.
+
+  Fields:
+    change: A Change resource to be passed as the request body.
+    managedZone: Identifies the managed zone addressed by this request. Can be
+      the managed zone name or id.
+    project: Identifies the project addressed by this request.
+  """
+
+  change = _messages.MessageField('Change', 1)
+  managedZone = _messages.StringField(2, required=True)
+  project = _messages.StringField(3, required=True)
+
+
+class DnsChangesGetRequest(_messages.Message):
+  """A DnsChangesGetRequest object.
+
+  Fields:
+    changeId: The identifier of the requested change, from a previous
+      ResourceRecordSetsChangeResponse.
+    managedZone: Identifies the managed zone addressed by this request. Can be
+      the managed zone name or id.
+    project: Identifies the project addressed by this request.
+  """
+
+  changeId = _messages.StringField(1, required=True)
+  managedZone = _messages.StringField(2, required=True)
+  project = _messages.StringField(3, required=True)
+
+
+class DnsChangesListRequest(_messages.Message):
+  """A DnsChangesListRequest object.
+
+  Enums:
+    SortByValueValuesEnum: Sorting criterion. The only supported value is
+      change sequence.
+
+  Fields:
+    managedZone: Identifies the managed zone addressed by this request. Can be
+      the managed zone name or id.
+    maxResults: Optional. Maximum number of results to be returned. If
+      unspecified, the server will decide how many results to return.
+    pageToken: Optional. A tag returned by a previous list request that was
+      truncated. Use this parameter to continue a previous list request.
+    project: Identifies the project addressed by this request.
+    sortBy: Sorting criterion. The only supported value is change sequence.
+    sortOrder: Sorting order direction: 'ascending' or 'descending'.
+  """
+
+  class SortByValueValuesEnum(_messages.Enum):
+    """Sorting criterion. The only supported value is change sequence.
+
+    Values:
+      changeSequence: <no description>
+    """
+    changeSequence = 0
+
+  managedZone = _messages.StringField(1, required=True)
+  maxResults = _messages.IntegerField(2, variant=_messages.Variant.INT32)
+  pageToken = _messages.StringField(3)
+  project = _messages.StringField(4, required=True)
+  sortBy = _messages.EnumField('SortByValueValuesEnum', 5, default=u'changeSequence')
+  sortOrder = _messages.StringField(6)
+
+
+class DnsManagedZonesCreateRequest(_messages.Message):
+  """A DnsManagedZonesCreateRequest object.
+
+  Fields:
+    managedZone: A ManagedZone resource to be passed as the request body.
+    project: Identifies the project addressed by this request.
+  """
+
+  managedZone = _messages.MessageField('ManagedZone', 1)
+  project = _messages.StringField(2, required=True)
+
+
+class DnsManagedZonesDeleteRequest(_messages.Message):
+  """A DnsManagedZonesDeleteRequest object.
+
+  Fields:
+    managedZone: Identifies the managed zone addressed by this request. Can be
+      the managed zone name or id.
+    project: Identifies the project addressed by this request.
+  """
+
+  managedZone = _messages.StringField(1, required=True)
+  project = _messages.StringField(2, required=True)
+
+
+class DnsManagedZonesDeleteResponse(_messages.Message):
+  """An empty DnsManagedZonesDelete response."""
+
+
+class DnsManagedZonesGetRequest(_messages.Message):
+  """A DnsManagedZonesGetRequest object.
+
+  Fields:
+    managedZone: Identifies the managed zone addressed by this request. Can be
+      the managed zone name or id.
+    project: Identifies the project addressed by this request.
+  """
+
+  managedZone = _messages.StringField(1, required=True)
+  project = _messages.StringField(2, required=True)
+
+
+class DnsManagedZonesListRequest(_messages.Message):
+  """A DnsManagedZonesListRequest object.
+
+  Fields:
+    dnsName: Restricts the list to return only zones with this domain name.
+    maxResults: Optional. Maximum number of results to be returned. If
+      unspecified, the server will decide how many results to return.
+    pageToken: Optional. A tag returned by a previous list request that was
+      truncated. Use this parameter to continue a previous list request.
+    project: Identifies the project addressed by this request.
+  """
+
+  dnsName = _messages.StringField(1)
+  maxResults = _messages.IntegerField(2, variant=_messages.Variant.INT32)
+  pageToken = _messages.StringField(3)
+  project = _messages.StringField(4, required=True)
+
+
+class DnsProjectsGetRequest(_messages.Message):
+  """A DnsProjectsGetRequest object.
+
+  Fields:
+    project: Identifies the project addressed by this request.
+  """
+
+  project = _messages.StringField(1, required=True)
+
+
+class DnsResourceRecordSetsListRequest(_messages.Message):
+  """A DnsResourceRecordSetsListRequest object.
+
+  Fields:
+    managedZone: Identifies the managed zone addressed by this request. Can be
+      the managed zone name or id.
+    maxResults: Optional. Maximum number of results to be returned. If
+      unspecified, the server will decide how many results to return.
+    name: Restricts the list to return only records with this fully qualified
+      domain name.
+    pageToken: Optional. A tag returned by a previous list request that was
+      truncated. Use this parameter to continue a previous list request.
+    project: Identifies the project addressed by this request.
+    type: Restricts the list to return only records of this type. If present,
+      the "name" parameter must also be present.
+  """
+
+  managedZone = _messages.StringField(1, required=True)
+  maxResults = _messages.IntegerField(2, variant=_messages.Variant.INT32)
+  name = _messages.StringField(3)
+  pageToken = _messages.StringField(4)
+  project = _messages.StringField(5, required=True)
+  type = _messages.StringField(6)
+
+
+class ManagedZone(_messages.Message):
+  """A zone is a subtree of the DNS namespace under one administrative
+  responsibility. A ManagedZone is a resource that represents a DNS zone
+  hosted by the Cloud DNS service.
+
+  Fields:
+    creationTime: The time that this resource was created on the server. This
+      is in RFC3339 text format. Output only.
+    description: A mutable string of at most 1024 characters associated with
+      this resource for the user's convenience. Has no effect on the managed
+      zone's function.
+    dnsName: The DNS name of this managed zone, for instance "example.com.".
+    id: Unique identifier for the resource; defined by the server (output
+      only)
+    kind: Identifies what kind of resource this is. Value: the fixed string
+      "dns#managedZone".
+    name: User assigned name for this resource. Must be unique within the
+      project. The name must be 1-32 characters long, must begin with a
+      letter, end with a letter or digit, and only contain lowercase letters,
+      digits or dashes.
+    nameServerSet: Optionally specifies the NameServerSet for this
+      ManagedZone. A NameServerSet is a set of DNS name servers that all host
+      the same ManagedZones. Most users will leave this field unset.
+    nameServers: Delegate your managed_zone to these virtual name servers;
+      defined by the server (output only)
+  """
+
+  creationTime = _messages.StringField(1)
+  description = _messages.StringField(2)
+  dnsName = _messages.StringField(3)
+  id = _messages.IntegerField(4, variant=_messages.Variant.UINT64)
+  kind = _messages.StringField(5, default=u'dns#managedZone')
+  name = _messages.StringField(6)
+  nameServerSet = _messages.StringField(7)
+  nameServers = _messages.StringField(8, repeated=True)
+
+
+class ManagedZonesListResponse(_messages.Message):
+  """A ManagedZonesListResponse object.
+
+  Fields:
+    kind: Type of resource.
+    managedZones: The managed zone resources.
+    nextPageToken: The presence of this field indicates that there exist more
+      results following your last page of results in pagination order. To
+      fetch them, make another list request using this value as your page
+      token.  In this way you can retrieve the complete contents of even very
+      large collections one page at a time. However, if the contents of the
+      collection change between the first and last paginated list request, the
+      set of all elements returned will be an inconsistent view of the
+      collection. There is no way to retrieve a consistent snapshot of a
+      collection larger than the maximum page size.
+  """
+
+  kind = _messages.StringField(1, default=u'dns#managedZonesListResponse')
+  managedZones = _messages.MessageField('ManagedZone', 2, repeated=True)
+  nextPageToken = _messages.StringField(3)
+
+
+class Project(_messages.Message):
+  """A project resource. The project is a top level container for resources
+  including Cloud DNS ManagedZones. Projects can be created only in the APIs
+  console.
+
+  Fields:
+    id: User assigned unique identifier for the resource (output only).
+    kind: Identifies what kind of resource this is. Value: the fixed string
+      "dns#project".
+    number: Unique numeric identifier for the resource; defined by the server
+      (output only).
+    quota: Quotas assigned to this project (output only).
+  """
+
+  id = _messages.StringField(1)
+  kind = _messages.StringField(2, default=u'dns#project')
+  number = _messages.IntegerField(3, variant=_messages.Variant.UINT64)
+  quota = _messages.MessageField('Quota', 4)
+
+
+class Quota(_messages.Message):
+  """Limits associated with a Project.
+
+  Fields:
+    kind: Identifies what kind of resource this is. Value: the fixed string
+      "dns#quota".
+    managedZones: Maximum allowed number of managed zones in the project.
+    resourceRecordsPerRrset: Maximum allowed number of ResourceRecords per
+      ResourceRecordSet.
+    rrsetAdditionsPerChange: Maximum allowed number of ResourceRecordSets to
+      add per ChangesCreateRequest.
+    rrsetDeletionsPerChange: Maximum allowed number of ResourceRecordSets to
+      delete per ChangesCreateRequest.
+    rrsetsPerManagedZone: Maximum allowed number of ResourceRecordSets per
+      zone in the project.
+    totalRrdataSizePerChange: Maximum allowed size for total rrdata in one
+      ChangesCreateRequest in bytes.
+  """
+
+  kind = _messages.StringField(1, default=u'dns#quota')
+  managedZones = _messages.IntegerField(2, variant=_messages.Variant.INT32)
+  resourceRecordsPerRrset = _messages.IntegerField(3, variant=_messages.Variant.INT32)
+  rrsetAdditionsPerChange = _messages.IntegerField(4, variant=_messages.Variant.INT32)
+  rrsetDeletionsPerChange = _messages.IntegerField(5, variant=_messages.Variant.INT32)
+  rrsetsPerManagedZone = _messages.IntegerField(6, variant=_messages.Variant.INT32)
+  totalRrdataSizePerChange = _messages.IntegerField(7, variant=_messages.Variant.INT32)
+
+
+class ResourceRecordSet(_messages.Message):
+  """A unit of data that will be returned by the DNS servers.
+
+  Fields:
+    kind: Identifies what kind of resource this is. Value: the fixed string
+      "dns#resourceRecordSet".
+    name: For example, www.example.com.
+    rrdatas: As defined in RFC 1035 (section 5) and RFC 1034 (section 3.6.1).
+    ttl: Number of seconds that this ResourceRecordSet can be cached by
+      resolvers.
+    type: The identifier of a supported record type, for example, A, AAAA, MX,
+      TXT, and so on.
+  """
+
+  kind = _messages.StringField(1, default=u'dns#resourceRecordSet')
+  name = _messages.StringField(2)
+  rrdatas = _messages.StringField(3, repeated=True)
+  ttl = _messages.IntegerField(4, variant=_messages.Variant.INT32)
+  type = _messages.StringField(5)
+
+
+class ResourceRecordSetsListResponse(_messages.Message):
+  """A ResourceRecordSetsListResponse object.
+
+  Fields:
+    kind: Type of resource.
+    nextPageToken: The presence of this field indicates that there exist more
+      results following your last page of results in pagination order. To
+      fetch them, make another list request using this value as your
+      pagination token.  In this way you can retrieve the complete contents of
+      even very large collections one page at a time. However, if the contents
+      of the collection change between the first and last paginated list
+      request, the set of all elements returned will be an inconsistent view
+      of the collection. There is no way to retrieve a consistent snapshot of
+      a collection larger than the maximum page size.
+    rrsets: The resource record set resources.
+  """
+
+  kind = _messages.StringField(1, default=u'dns#resourceRecordSetsListResponse')
+  nextPageToken = _messages.StringField(2)
+  rrsets = _messages.MessageField('ResourceRecordSet', 3, repeated=True)
+
+
+class StandardQueryParameters(_messages.Message):
+  """Query parameters accepted by all methods.
+
+  Enums:
+    AltValueValuesEnum: Data format for the response.
+
+  Fields:
+    alt: Data format for the response.
+    fields: Selector specifying which fields to include in a partial response.
+    key: API key. Your API key identifies your project and provides you with
+      API access, quota, and reports. Required unless you provide an OAuth 2.0
+      token.
+    oauth_token: OAuth 2.0 token for the current user.
+    prettyPrint: Returns response with indentations and line breaks.
+    quotaUser: Available to use for quota purposes for server-side
+      applications. Can be any arbitrary string assigned to a user, but should
+      not exceed 40 characters. Overrides userIp if both are provided.
+    trace: A tracing token of the form "token:<tokenid>" to include in api
+      requests.
+    userIp: IP address of the site where the request originates. Use this if
+      you want to enforce per-user limits.
+  """
+
+  class AltValueValuesEnum(_messages.Enum):
+    """Data format for the response.
+
+    Values:
+      json: Responses with Content-Type of application/json
+    """
+    json = 0
+
+  alt = _messages.EnumField('AltValueValuesEnum', 1, default=u'json')
+  fields = _messages.StringField(2)
+  key = _messages.StringField(3)
+  oauth_token = _messages.StringField(4)
+  prettyPrint = _messages.BooleanField(5, default=True)
+  quotaUser = _messages.StringField(6)
+  trace = _messages.StringField(7)
+  userIp = _messages.StringField(8)
+
+
diff --git a/samples/dns_sample/gen_dns_client_test.py b/samples/dns_sample/gen_dns_client_test.py
new file mode 100644
index 0000000..dff6812
--- /dev/null
+++ b/samples/dns_sample/gen_dns_client_test.py
@@ -0,0 +1,86 @@
+#
+# Copyright 2016 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Test for generated sample module."""
+
+import unittest2
+import six
+
+from apitools.base.py import list_pager
+from apitools.base.py.testing import mock
+
+from samples.dns_sample.dns_v1 import dns_v1_client
+from samples.dns_sample.dns_v1 import dns_v1_messages
+
+
+class DnsGenClientSanityTest(unittest2.TestCase):
+
+    def testBaseUrl(self):
+        self.assertEquals(u'https://www.googleapis.com/dns/v1/',
+                          dns_v1_client.DnsV1.BASE_URL)
+
+    def testMessagesModule(self):
+        self.assertEquals(dns_v1_messages, dns_v1_client.DnsV1.MESSAGES_MODULE)
+
+    def testAttributes(self):
+        inner_classes = set([])
+        for key, value in dns_v1_client.DnsV1.__dict__.items():
+            if isinstance(value, six.class_types):
+                inner_classes.add(key)
+        self.assertEquals(set([
+            'ChangesService',
+            'ProjectsService',
+            'ManagedZonesService',
+            'ResourceRecordSetsService']), inner_classes)
+
+
+class DnsGenClientTest(unittest2.TestCase):
+
+    def setUp(self):
+        self.mocked_dns_v1 = mock.Client(dns_v1_client.DnsV1)
+        self.mocked_dns_v1.Mock()
+        self.addCleanup(self.mocked_dns_v1.Unmock)
+
+    def testFlatPath(self):
+        get_method_config = self.mocked_dns_v1.projects.GetMethodConfig('Get')
+        self.assertIsNone(get_method_config.flat_path)
+        self.assertEquals('projects/{project}',
+                          get_method_config.relative_path)
+
+    def testRecordSetList(self):
+        response_record_set = dns_v1_messages.ResourceRecordSet(
+            kind=u"dns#resourceRecordSet",
+            name=u"zone.com.",
+            rrdatas=[u"1.2.3.4"],
+            ttl=21600,
+            type=u"A")
+        self.mocked_dns_v1.resourceRecordSets.List.Expect(
+            dns_v1_messages.DnsResourceRecordSetsListRequest(
+                project=u'my-project',
+                managedZone=u'test_zone_name',
+                type=u'green',
+                maxResults=100),
+            dns_v1_messages.ResourceRecordSetsListResponse(
+                rrsets=[response_record_set]))
+
+        results = list(list_pager.YieldFromList(
+            self.mocked_dns_v1.resourceRecordSets,
+            dns_v1_messages.DnsResourceRecordSetsListRequest(
+                project='my-project',
+                managedZone='test_zone_name',
+                type='green'),
+            limit=100, field='rrsets'))
+
+        self.assertEquals([response_record_set], results)
diff --git a/samples/fusiontables_sample/__init__.py b/samples/fusiontables_sample/__init__.py
new file mode 100644
index 0000000..58e0d91
--- /dev/null
+++ b/samples/fusiontables_sample/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2016 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/samples/fusiontables_sample/fusiontables_v1.json b/samples/fusiontables_sample/fusiontables_v1.json
new file mode 100644
index 0000000..297c671
--- /dev/null
+++ b/samples/fusiontables_sample/fusiontables_v1.json
@@ -0,0 +1,1826 @@
+{
+ "kind": "discovery#restDescription",
+ "etag": "\"C5oy1hgQsABtYOYIOXWcR3BgYqU/-xDlQ3Z80n_rfxYaz7dDf-mP00c\"",
+ "discoveryVersion": "v1",
+ "id": "fusiontables:v1",
+ "name": "fusiontables",
+ "version": "v1",
+ "revision": "20160526",
+ "title": "Fusion Tables API",
+ "description": "API for working with Fusion Tables data.",
+ "ownerDomain": "google.com",
+ "ownerName": "Google",
+ "icons": {
+  "x16": "http://www.google.com/images/icons/product/search-16.gif",
+  "x32": "http://www.google.com/images/icons/product/search-32.gif"
+ },
+ "documentationLink": "https://developers.google.com/fusiontables",
+ "protocol": "rest",
+ "baseUrl": "https://www.googleapis.com/fusiontables/v1/",
+ "basePath": "/fusiontables/v1/",
+ "rootUrl": "https://www.googleapis.com/",
+ "servicePath": "fusiontables/v1/",
+ "batchPath": "batch",
+ "parameters": {
+  "alt": {
+   "type": "string",
+   "description": "Data format for the response.",
+   "default": "json",
+   "enum": [
+    "csv",
+    "json"
+   ],
+   "enumDescriptions": [
+    "Responses with Content-Type of text/csv",
+    "Responses with Content-Type of application/json"
+   ],
+   "location": "query"
+  },
+  "fields": {
+   "type": "string",
+   "description": "Selector specifying which fields to include in a partial response.",
+   "location": "query"
+  },
+  "key": {
+   "type": "string",
+   "description": "API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.",
+   "location": "query"
+  },
+  "oauth_token": {
+   "type": "string",
+   "description": "OAuth 2.0 token for the current user.",
+   "location": "query"
+  },
+  "prettyPrint": {
+   "type": "boolean",
+   "description": "Returns response with indentations and line breaks.",
+   "default": "true",
+   "location": "query"
+  },
+  "quotaUser": {
+   "type": "string",
+   "description": "Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.",
+   "location": "query"
+  },
+  "userIp": {
+   "type": "string",
+   "description": "IP address of the site where the request originates. Use this if you want to enforce per-user limits.",
+   "location": "query"
+  }
+ },
+ "auth": {
+  "oauth2": {
+   "scopes": {
+    "https://www.googleapis.com/auth/fusiontables": {
+     "description": "Manage your Fusion Tables"
+    },
+    "https://www.googleapis.com/auth/fusiontables.readonly": {
+     "description": "View your Fusion Tables"
+    }
+   }
+  }
+ },
+ "schemas": {
+  "Bucket": {
+   "id": "Bucket",
+   "type": "object",
+   "description": "Specifies the minimum and maximum values, the color, opacity, icon and weight of a bucket within a StyleSetting.",
+   "properties": {
+    "color": {
+     "type": "string",
+     "description": "Color of line or the interior of a polygon in #RRGGBB format."
+    },
+    "icon": {
+     "type": "string",
+     "description": "Icon name used for a point."
+    },
+    "max": {
+     "type": "number",
+     "description": "Maximum value in the selected column for a row to be styled according to the bucket color, opacity, icon, or weight.",
+     "format": "double"
+    },
+    "min": {
+     "type": "number",
+     "description": "Minimum value in the selected column for a row to be styled according to the bucket color, opacity, icon, or weight.",
+     "format": "double"
+    },
+    "opacity": {
+     "type": "number",
+     "description": "Opacity of the color: 0.0 (transparent) to 1.0 (opaque).",
+     "format": "double"
+    },
+    "weight": {
+     "type": "integer",
+     "description": "Width of a line (in pixels).",
+     "format": "int32"
+    }
+   }
+  },
+  "Column": {
+   "id": "Column",
+   "type": "object",
+   "description": "Specifies the id, name and type of a column in a table.",
+   "properties": {
+    "baseColumn": {
+     "type": "object",
+     "description": "Optional identifier of the base column. If present, this column is derived from the specified base column.",
+     "properties": {
+      "columnId": {
+       "type": "integer",
+       "description": "The id of the column in the base table from which this column is derived.",
+       "format": "int32"
+      },
+      "tableIndex": {
+       "type": "integer",
+       "description": "Offset to the entry in the list of base tables in the table definition.",
+       "format": "int32"
+      }
+     }
+    },
+    "columnId": {
+     "type": "integer",
+     "description": "Identifier for the column.",
+     "format": "int32"
+    },
+    "description": {
+     "type": "string",
+     "description": "Optional column description."
+    },
+    "graph_predicate": {
+     "type": "string",
+     "description": "Optional column predicate. Used to map table to graph data model (subject,predicate,object) See http://www.w3.org/TR/2014/REC-rdf11-concepts-20140225/#data-model"
+    },
+    "kind": {
+     "type": "string",
+     "description": "Type name: a template for an individual column.",
+     "default": "fusiontables#column"
+    },
+    "name": {
+     "type": "string",
+     "description": "Required name of the column.",
+     "annotations": {
+      "required": [
+       "fusiontables.column.insert"
+      ]
+     }
+    },
+    "type": {
+     "type": "string",
+     "description": "Required type of the column.",
+     "annotations": {
+      "required": [
+       "fusiontables.column.insert"
+      ]
+     }
+    }
+   }
+  },
+  "ColumnList": {
+   "id": "ColumnList",
+   "type": "object",
+   "description": "Represents a list of columns in a table.",
+   "properties": {
+    "items": {
+     "type": "array",
+     "description": "List of all requested columns.",
+     "items": {
+      "$ref": "Column"
+     }
+    },
+    "kind": {
+     "type": "string",
+     "description": "Type name: a list of all columns.",
+     "default": "fusiontables#columnList"
+    },
+    "nextPageToken": {
+     "type": "string",
+     "description": "Token used to access the next page of this result. No token is displayed if there are no more pages left."
+    },
+    "totalItems": {
+     "type": "integer",
+     "description": "Total number of columns for the table.",
+     "format": "int32"
+    }
+   }
+  },
+  "Geometry": {
+   "id": "Geometry",
+   "type": "object",
+   "description": "Represents a Geometry object.",
+   "properties": {
+    "geometries": {
+     "type": "array",
+     "description": "The list of geometries in this geometry collection.",
+     "items": {
+      "type": "any"
+     }
+    },
+    "geometry": {
+     "type": "any"
+    },
+    "type": {
+     "type": "string",
+     "description": "Type: A collection of geometries.",
+     "default": "GeometryCollection"
+    }
+   }
+  },
+  "Import": {
+   "id": "Import",
+   "type": "object",
+   "description": "Represents an import request.",
+   "properties": {
+    "kind": {
+     "type": "string",
+     "description": "Type name: a template for an import request.",
+     "default": "fusiontables#import"
+    },
+    "numRowsReceived": {
+     "type": "string",
+     "description": "The number of rows received from the import request.",
+     "format": "int64"
+    }
+   }
+  },
+  "Line": {
+   "id": "Line",
+   "type": "object",
+   "description": "Represents a line geometry.",
+   "properties": {
+    "coordinates": {
+     "type": "array",
+     "description": "The coordinates that define the line.",
+     "items": {
+      "type": "array",
+      "items": {
+       "type": "number",
+       "format": "double"
+      }
+     }
+    },
+    "type": {
+     "type": "string",
+     "description": "Type: A line geometry.",
+     "default": "LineString"
+    }
+   }
+  },
+  "LineStyle": {
+   "id": "LineStyle",
+   "type": "object",
+   "description": "Represents a LineStyle within a StyleSetting",
+   "properties": {
+    "strokeColor": {
+     "type": "string",
+     "description": "Color of the line in #RRGGBB format."
+    },
+    "strokeColorStyler": {
+     "$ref": "StyleFunction",
+     "description": "Column-value, gradient or buckets styler that is used to determine the line color and opacity."
+    },
+    "strokeOpacity": {
+     "type": "number",
+     "description": "Opacity of the line : 0.0 (transparent) to 1.0 (opaque).",
+     "format": "double"
+    },
+    "strokeWeight": {
+     "type": "integer",
+     "description": "Width of the line in pixels.",
+     "format": "int32"
+    },
+    "strokeWeightStyler": {
+     "$ref": "StyleFunction",
+     "description": "Column-value or bucket styler that is used to determine the width of the line."
+    }
+   }
+  },
+  "Point": {
+   "id": "Point",
+   "type": "object",
+   "description": "Represents a point object.",
+   "properties": {
+    "coordinates": {
+     "type": "array",
+     "description": "The coordinates that define the point.",
+     "items": {
+      "type": "number",
+      "format": "double"
+     }
+    },
+    "type": {
+     "type": "string",
+     "description": "Point: A point geometry.",
+     "default": "Point"
+    }
+   }
+  },
+  "PointStyle": {
+   "id": "PointStyle",
+   "type": "object",
+   "description": "Represents a PointStyle within a StyleSetting",
+   "properties": {
+    "iconName": {
+     "type": "string",
+     "description": "Name of the icon. Use values defined in http://www.google.com/fusiontables/DataSource?dsrcid=308519"
+    },
+    "iconStyler": {
+     "$ref": "StyleFunction",
+     "description": "Column or a bucket value from which the icon name is to be determined."
+    }
+   }
+  },
+  "Polygon": {
+   "id": "Polygon",
+   "type": "object",
+   "description": "Represents a polygon object.",
+   "properties": {
+    "coordinates": {
+     "type": "array",
+     "description": "The coordinates that define the polygon.",
+     "items": {
+      "type": "array",
+      "items": {
+       "type": "array",
+       "items": {
+        "type": "number",
+        "format": "double"
+       }
+      }
+     }
+    },
+    "type": {
+     "type": "string",
+     "description": "Type: A polygon geometry.",
+     "default": "Polygon"
+    }
+   }
+  },
+  "PolygonStyle": {
+   "id": "PolygonStyle",
+   "type": "object",
+   "description": "Represents a PolygonStyle within a StyleSetting",
+   "properties": {
+    "fillColor": {
+     "type": "string",
+     "description": "Color of the interior of the polygon in #RRGGBB format."
+    },
+    "fillColorStyler": {
+     "$ref": "StyleFunction",
+     "description": "Column-value, gradient, or bucket styler that is used to determine the interior color and opacity of the polygon."
+    },
+    "fillOpacity": {
+     "type": "number",
+     "description": "Opacity of the interior of the polygon: 0.0 (transparent) to 1.0 (opaque).",
+     "format": "double"
+    },
+    "strokeColor": {
+     "type": "string",
+     "description": "Color of the polygon border in #RRGGBB format."
+    },
+    "strokeColorStyler": {
+     "$ref": "StyleFunction",
+     "description": "Column-value, gradient or buckets styler that is used to determine the border color and opacity."
+    },
+    "strokeOpacity": {
+     "type": "number",
+     "description": "Opacity of the polygon border: 0.0 (transparent) to 1.0 (opaque).",
+     "format": "double"
+    },
+    "strokeWeight": {
+     "type": "integer",
+     "description": "Width of the polyon border in pixels.",
+     "format": "int32"
+    },
+    "strokeWeightStyler": {
+     "$ref": "StyleFunction",
+     "description": "Column-value or bucket styler that is used to determine the width of the polygon border."
+    }
+   }
+  },
+  "Sqlresponse": {
+   "id": "Sqlresponse",
+   "type": "object",
+   "description": "Represents a response to an sql statement.",
+   "properties": {
+    "columns": {
+     "type": "array",
+     "description": "Columns in the table.",
+     "items": {
+      "type": "string"
+     }
+    },
+    "kind": {
+     "type": "string",
+     "description": "Type name: a template for an individual table.",
+     "default": "fusiontables#sqlresponse"
+    },
+    "rows": {
+     "type": "array",
+     "description": "The rows in the table. For each cell we print out whatever cell value (e.g., numeric, string) exists. Thus it is important that each cell contains only one value.",
+     "items": {
+      "type": "array",
+      "items": {
+       "type": "any"
+      }
+     }
+    }
+   }
+  },
+  "StyleFunction": {
+   "id": "StyleFunction",
+   "type": "object",
+   "description": "Represents a StyleFunction within a StyleSetting",
+   "properties": {
+    "buckets": {
+     "type": "array",
+     "description": "Bucket function that assigns a style based on the range a column value falls into.",
+     "items": {
+      "$ref": "Bucket"
+     }
+    },
+    "columnName": {
+     "type": "string",
+     "description": "Name of the column whose value is used in the style.",
+     "annotations": {
+      "required": [
+       "fusiontables.style.insert"
+      ]
+     }
+    },
+    "gradient": {
+     "type": "object",
+     "description": "Gradient function that interpolates a range of colors based on column value.",
+     "properties": {
+      "colors": {
+       "type": "array",
+       "description": "Array with two or more colors.",
+       "items": {
+        "type": "object",
+        "properties": {
+         "color": {
+          "type": "string",
+          "description": "Color in #RRGGBB format."
+         },
+         "opacity": {
+          "type": "number",
+          "description": "Opacity of the color: 0.0 (transparent) to 1.0 (opaque).",
+          "format": "double"
+         }
+        }
+       }
+      },
+      "max": {
+       "type": "number",
+       "description": "Higher-end of the interpolation range: rows with this value will be assigned to colors[n-1].",
+       "format": "double"
+      },
+      "min": {
+       "type": "number",
+       "description": "Lower-end of the interpolation range: rows with this value will be assigned to colors[0].",
+       "format": "double"
+      }
+     }
+    },
+    "kind": {
+     "type": "string",
+     "description": "Stylers can be one of three kinds: \"fusiontables#fromColumn\" if the column value is to be used as is, i.e., the column values can have colors in #RRGGBBAA format or integer line widths or icon names; \"fusiontables#gradient\" if the styling of the row is to be based on applying the gradient function on the column value; or \"fusiontables#buckets\" if the styling is to based on the bucket into which the the column value falls."
+    }
+   }
+  },
+  "StyleSetting": {
+   "id": "StyleSetting",
+   "type": "object",
+   "description": "Represents a complete StyleSettings object. The primary key is a combination of the tableId and a styleId.",
+   "properties": {
+    "kind": {
+     "type": "string",
+     "description": "Type name: an individual style setting. A StyleSetting contains the style defintions for points, lines, and polygons in a table. Since a table can have any one or all of them, a style definition can have point, line and polygon style definitions.",
+     "default": "fusiontables#styleSetting"
+    },
+    "markerOptions": {
+     "$ref": "PointStyle",
+     "description": "Style definition for points in the table."
+    },
+    "name": {
+     "type": "string",
+     "description": "Optional name for the style setting."
+    },
+    "polygonOptions": {
+     "$ref": "PolygonStyle",
+     "description": "Style definition for polygons in the table."
+    },
+    "polylineOptions": {
+     "$ref": "LineStyle",
+     "description": "Style definition for lines in the table."
+    },
+    "styleId": {
+     "type": "integer",
+     "description": "Identifier for the style setting (unique only within tables).",
+     "format": "int32"
+    },
+    "tableId": {
+     "type": "string",
+     "description": "Identifier for the table."
+    }
+   }
+  },
+  "StyleSettingList": {
+   "id": "StyleSettingList",
+   "type": "object",
+   "description": "Represents a list of styles for a given table.",
+   "properties": {
+    "items": {
+     "type": "array",
+     "description": "All requested style settings.",
+     "items": {
+      "$ref": "StyleSetting"
+     }
+    },
+    "kind": {
+     "type": "string",
+     "description": "Type name: in this case, a list of style settings.",
+     "default": "fusiontables#styleSettingList"
+    },
+    "nextPageToken": {
+     "type": "string",
+     "description": "Token used to access the next page of this result. No token is displayed if there are no more pages left."
+    },
+    "totalItems": {
+     "type": "integer",
+     "description": "Total number of styles for the table.",
+     "format": "int32"
+    }
+   }
+  },
+  "Table": {
+   "id": "Table",
+   "type": "object",
+   "description": "Represents a table. Specifies the name, whether it is exportable, description, attribution, and attribution link.",
+   "properties": {
+    "attribution": {
+     "type": "string",
+     "description": "Optional attribution assigned to the table."
+    },
+    "attributionLink": {
+     "type": "string",
+     "description": "Optional link for attribution."
+    },
+    "baseTableIds": {
+     "type": "array",
+     "description": "Optional base table identifier if this table is a view or merged table.",
+     "items": {
+      "type": "string"
+     }
+    },
+    "columns": {
+     "type": "array",
+     "description": "Columns in the table.",
+     "items": {
+      "$ref": "Column"
+     },
+     "annotations": {
+      "required": [
+       "fusiontables.table.insert",
+       "fusiontables.table.update"
+      ]
+     }
+    },
+    "description": {
+     "type": "string",
+     "description": "Optional description assigned to the table."
+    },
+    "isExportable": {
+     "type": "boolean",
+     "description": "Variable for whether table is exportable.",
+     "annotations": {
+      "required": [
+       "fusiontables.table.insert",
+       "fusiontables.table.update"
+      ]
+     }
+    },
+    "kind": {
+     "type": "string",
+     "description": "Type name: a template for an individual table.",
+     "default": "fusiontables#table"
+    },
+    "name": {
+     "type": "string",
+     "description": "Name assigned to a table.",
+     "annotations": {
+      "required": [
+       "fusiontables.table.insert",
+       "fusiontables.table.update"
+      ]
+     }
+    },
+    "sql": {
+     "type": "string",
+     "description": "Optional sql that encodes the table definition for derived tables."
+    },
+    "tableId": {
+     "type": "string",
+     "description": "Encrypted unique alphanumeric identifier for the table."
+    }
+   }
+  },
+  "TableList": {
+   "id": "TableList",
+   "type": "object",
+   "description": "Represents a list of tables.",
+   "properties": {
+    "items": {
+     "type": "array",
+     "description": "List of all requested tables.",
+     "items": {
+      "$ref": "Table"
+     }
+    },
+    "kind": {
+     "type": "string",
+     "description": "Type name: a list of all tables.",
+     "default": "fusiontables#tableList"
+    },
+    "nextPageToken": {
+     "type": "string",
+     "description": "Token used to access the next page of this result. No token is displayed if there are no more pages left."
+    }
+   }
+  },
+  "Task": {
+   "id": "Task",
+   "type": "object",
+   "description": "Specifies the identifier, name, and type of a task in a table.",
+   "properties": {
+    "kind": {
+     "type": "string",
+     "description": "Type of the resource. This is always \"fusiontables#task\".",
+     "default": "fusiontables#task"
+    },
+    "progress": {
+     "type": "string",
+     "description": "An indication of task progress."
+    },
+    "started": {
+     "type": "boolean",
+     "description": "false while the table is busy with some other task. true if this background task is currently running."
+    },
+    "taskId": {
+     "type": "string",
+     "description": "Identifier for the task.",
+     "format": "int64"
+    },
+    "type": {
+     "type": "string",
+     "description": "Type of background task. One of  DELETE_ROWS Deletes one or more rows from the table. ADD_ROWS \"Adds one or more rows to a table. Includes importing data into a new table and importing more rows into an existing table. ADD_COLUMN Adds a new column to the table. CHANGE_TYPE Changes the type of a column."
+    }
+   }
+  },
+  "TaskList": {
+   "id": "TaskList",
+   "type": "object",
+   "description": "Represents a list of tasks for a table.",
+   "properties": {
+    "items": {
+     "type": "array",
+     "description": "List of all requested tasks.",
+     "items": {
+      "$ref": "Task"
+     }
+    },
+    "kind": {
+     "type": "string",
+     "description": "Type of the resource. This is always \"fusiontables#taskList\".",
+     "default": "fusiontables#taskList"
+    },
+    "nextPageToken": {
+     "type": "string",
+     "description": "Token used to access the next page of this result. No token is displayed if there are no more pages left."
+    },
+    "totalItems": {
+     "type": "integer",
+     "description": "Total number of tasks for the table.",
+     "format": "int32"
+    }
+   }
+  },
+  "Template": {
+   "id": "Template",
+   "type": "object",
+   "description": "Represents the contents of InfoWindow templates.",
+   "properties": {
+    "automaticColumnNames": {
+     "type": "array",
+     "description": "List of columns from which the template is to be automatically constructed. Only one of body or automaticColumns can be specified.",
+     "items": {
+      "type": "string"
+     }
+    },
+    "body": {
+     "type": "string",
+     "description": "Body of the template. It contains HTML with {column_name} to insert values from a particular column. The body is sanitized to remove certain tags, e.g., script. Only one of body or automaticColumns can be specified."
+    },
+    "kind": {
+     "type": "string",
+     "description": "Type name: a template for the info window contents. The template can either include an HTML body or a list of columns from which the template is computed automatically.",
+     "default": "fusiontables#template"
+    },
+    "name": {
+     "type": "string",
+     "description": "Optional name assigned to a template."
+    },
+    "tableId": {
+     "type": "string",
+     "description": "Identifier for the table for which the template is defined."
+    },
+    "templateId": {
+     "type": "integer",
+     "description": "Identifier for the template, unique within the context of a particular table.",
+     "format": "int32"
+    }
+   }
+  },
+  "TemplateList": {
+   "id": "TemplateList",
+   "type": "object",
+   "description": "Represents a list of templates for a given table.",
+   "properties": {
+    "items": {
+     "type": "array",
+     "description": "List of all requested templates.",
+     "items": {
+      "$ref": "Template"
+     }
+    },
+    "kind": {
+     "type": "string",
+     "description": "Type name: a list of all templates.",
+     "default": "fusiontables#templateList"
+    },
+    "nextPageToken": {
+     "type": "string",
+     "description": "Token used to access the next page of this result. No token is displayed if there are no more pages left."
+    },
+    "totalItems": {
+     "type": "integer",
+     "description": "Total number of templates for the table.",
+     "format": "int32"
+    }
+   }
+  }
+ },
+ "resources": {
+  "column": {
+   "methods": {
+    "delete": {
+     "id": "fusiontables.column.delete",
+     "path": "tables/{tableId}/columns/{columnId}",
+     "httpMethod": "DELETE",
+     "description": "Deletes the column.",
+     "parameters": {
+      "columnId": {
+       "type": "string",
+       "description": "Name or identifier for the column being deleted.",
+       "required": true,
+       "location": "path"
+      },
+      "tableId": {
+       "type": "string",
+       "description": "Table from which the column is being deleted.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId",
+      "columnId"
+     ],
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables"
+     ]
+    },
+    "get": {
+     "id": "fusiontables.column.get",
+     "path": "tables/{tableId}/columns/{columnId}",
+     "httpMethod": "GET",
+     "description": "Retrieves a specific column by its id.",
+     "parameters": {
+      "columnId": {
+       "type": "string",
+       "description": "Name or identifier for the column that is being requested.",
+       "required": true,
+       "location": "path"
+      },
+      "tableId": {
+       "type": "string",
+       "description": "Table to which the column belongs.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId",
+      "columnId"
+     ],
+     "response": {
+      "$ref": "Column"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables",
+      "https://www.googleapis.com/auth/fusiontables.readonly"
+     ]
+    },
+    "insert": {
+     "id": "fusiontables.column.insert",
+     "path": "tables/{tableId}/columns",
+     "httpMethod": "POST",
+     "description": "Adds a new column to the table.",
+     "parameters": {
+      "tableId": {
+       "type": "string",
+       "description": "Table for which a new column is being added.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId"
+     ],
+     "request": {
+      "$ref": "Column"
+     },
+     "response": {
+      "$ref": "Column"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables"
+     ]
+    },
+    "list": {
+     "id": "fusiontables.column.list",
+     "path": "tables/{tableId}/columns",
+     "httpMethod": "GET",
+     "description": "Retrieves a list of columns.",
+     "parameters": {
+      "maxResults": {
+       "type": "integer",
+       "description": "Maximum number of columns to return. Optional. Default is 5.",
+       "format": "uint32",
+       "minimum": "0",
+       "location": "query"
+      },
+      "pageToken": {
+       "type": "string",
+       "description": "Continuation token specifying which result page to return. Optional.",
+       "location": "query"
+      },
+      "tableId": {
+       "type": "string",
+       "description": "Table whose columns are being listed.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId"
+     ],
+     "response": {
+      "$ref": "ColumnList"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables",
+      "https://www.googleapis.com/auth/fusiontables.readonly"
+     ]
+    },
+    "patch": {
+     "id": "fusiontables.column.patch",
+     "path": "tables/{tableId}/columns/{columnId}",
+     "httpMethod": "PATCH",
+     "description": "Updates the name or type of an existing column. This method supports patch semantics.",
+     "parameters": {
+      "columnId": {
+       "type": "string",
+       "description": "Name or identifier for the column that is being updated.",
+       "required": true,
+       "location": "path"
+      },
+      "tableId": {
+       "type": "string",
+       "description": "Table for which the column is being updated.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId",
+      "columnId"
+     ],
+     "request": {
+      "$ref": "Column"
+     },
+     "response": {
+      "$ref": "Column"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables"
+     ]
+    },
+    "update": {
+     "id": "fusiontables.column.update",
+     "path": "tables/{tableId}/columns/{columnId}",
+     "httpMethod": "PUT",
+     "description": "Updates the name or type of an existing column.",
+     "parameters": {
+      "columnId": {
+       "type": "string",
+       "description": "Name or identifier for the column that is being updated.",
+       "required": true,
+       "location": "path"
+      },
+      "tableId": {
+       "type": "string",
+       "description": "Table for which the column is being updated.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId",
+      "columnId"
+     ],
+     "request": {
+      "$ref": "Column"
+     },
+     "response": {
+      "$ref": "Column"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables"
+     ]
+    }
+   }
+  },
+  "query": {
+   "methods": {
+    "sql": {
+     "id": "fusiontables.query.sql",
+     "path": "query",
+     "httpMethod": "POST",
+     "description": "Executes an SQL SELECT/INSERT/UPDATE/DELETE/SHOW/DESCRIBE/CREATE statement.",
+     "parameters": {
+      "hdrs": {
+       "type": "boolean",
+       "description": "Should column names be included (in the first row)?. Default is true.",
+       "location": "query"
+      },
+      "sql": {
+       "type": "string",
+       "description": "An SQL SELECT/SHOW/DESCRIBE/INSERT/UPDATE/DELETE/CREATE statement.",
+       "required": true,
+       "location": "query"
+      },
+      "typed": {
+       "type": "boolean",
+       "description": "Should typed values be returned in the (JSON) response -- numbers for numeric values and parsed geometries for KML values? Default is true.",
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "sql"
+     ],
+     "response": {
+      "$ref": "Sqlresponse"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables",
+      "https://www.googleapis.com/auth/fusiontables.readonly"
+     ],
+     "supportsMediaDownload": true,
+     "useMediaDownloadService": true
+    },
+    "sqlGet": {
+     "id": "fusiontables.query.sqlGet",
+     "path": "query",
+     "httpMethod": "GET",
+     "description": "Executes an SQL SELECT/SHOW/DESCRIBE statement.",
+     "parameters": {
+      "hdrs": {
+       "type": "boolean",
+       "description": "Should column names be included (in the first row)?. Default is true.",
+       "location": "query"
+      },
+      "sql": {
+       "type": "string",
+       "description": "An SQL SELECT/SHOW/DESCRIBE statement.",
+       "required": true,
+       "location": "query"
+      },
+      "typed": {
+       "type": "boolean",
+       "description": "Should typed values be returned in the (JSON) response -- numbers for numeric values and parsed geometries for KML values? Default is true.",
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "sql"
+     ],
+     "response": {
+      "$ref": "Sqlresponse"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables",
+      "https://www.googleapis.com/auth/fusiontables.readonly"
+     ],
+     "supportsMediaDownload": true,
+     "useMediaDownloadService": true
+    }
+   }
+  },
+  "style": {
+   "methods": {
+    "delete": {
+     "id": "fusiontables.style.delete",
+     "path": "tables/{tableId}/styles/{styleId}",
+     "httpMethod": "DELETE",
+     "description": "Deletes a style.",
+     "parameters": {
+      "styleId": {
+       "type": "integer",
+       "description": "Identifier (within a table) for the style being deleted",
+       "required": true,
+       "format": "int32",
+       "location": "path"
+      },
+      "tableId": {
+       "type": "string",
+       "description": "Table from which the style is being deleted",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId",
+      "styleId"
+     ],
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables"
+     ]
+    },
+    "get": {
+     "id": "fusiontables.style.get",
+     "path": "tables/{tableId}/styles/{styleId}",
+     "httpMethod": "GET",
+     "description": "Gets a specific style.",
+     "parameters": {
+      "styleId": {
+       "type": "integer",
+       "description": "Identifier (integer) for a specific style in a table",
+       "required": true,
+       "format": "int32",
+       "location": "path"
+      },
+      "tableId": {
+       "type": "string",
+       "description": "Table to which the requested style belongs",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId",
+      "styleId"
+     ],
+     "response": {
+      "$ref": "StyleSetting"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables",
+      "https://www.googleapis.com/auth/fusiontables.readonly"
+     ]
+    },
+    "insert": {
+     "id": "fusiontables.style.insert",
+     "path": "tables/{tableId}/styles",
+     "httpMethod": "POST",
+     "description": "Adds a new style for the table.",
+     "parameters": {
+      "tableId": {
+       "type": "string",
+       "description": "Table for which a new style is being added",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId"
+     ],
+     "request": {
+      "$ref": "StyleSetting"
+     },
+     "response": {
+      "$ref": "StyleSetting"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables"
+     ]
+    },
+    "list": {
+     "id": "fusiontables.style.list",
+     "path": "tables/{tableId}/styles",
+     "httpMethod": "GET",
+     "description": "Retrieves a list of styles.",
+     "parameters": {
+      "maxResults": {
+       "type": "integer",
+       "description": "Maximum number of styles to return. Optional. Default is 5.",
+       "format": "uint32",
+       "minimum": "0",
+       "location": "query"
+      },
+      "pageToken": {
+       "type": "string",
+       "description": "Continuation token specifying which result page to return. Optional.",
+       "location": "query"
+      },
+      "tableId": {
+       "type": "string",
+       "description": "Table whose styles are being listed",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId"
+     ],
+     "response": {
+      "$ref": "StyleSettingList"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables",
+      "https://www.googleapis.com/auth/fusiontables.readonly"
+     ]
+    },
+    "patch": {
+     "id": "fusiontables.style.patch",
+     "path": "tables/{tableId}/styles/{styleId}",
+     "httpMethod": "PATCH",
+     "description": "Updates an existing style. This method supports patch semantics.",
+     "parameters": {
+      "styleId": {
+       "type": "integer",
+       "description": "Identifier (within a table) for the style being updated.",
+       "required": true,
+       "format": "int32",
+       "location": "path"
+      },
+      "tableId": {
+       "type": "string",
+       "description": "Table whose style is being updated.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId",
+      "styleId"
+     ],
+     "request": {
+      "$ref": "StyleSetting"
+     },
+     "response": {
+      "$ref": "StyleSetting"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables"
+     ]
+    },
+    "update": {
+     "id": "fusiontables.style.update",
+     "path": "tables/{tableId}/styles/{styleId}",
+     "httpMethod": "PUT",
+     "description": "Updates an existing style.",
+     "parameters": {
+      "styleId": {
+       "type": "integer",
+       "description": "Identifier (within a table) for the style being updated.",
+       "required": true,
+       "format": "int32",
+       "location": "path"
+      },
+      "tableId": {
+       "type": "string",
+       "description": "Table whose style is being updated.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId",
+      "styleId"
+     ],
+     "request": {
+      "$ref": "StyleSetting"
+     },
+     "response": {
+      "$ref": "StyleSetting"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables"
+     ]
+    }
+   }
+  },
+  "table": {
+   "methods": {
+    "copy": {
+     "id": "fusiontables.table.copy",
+     "path": "tables/{tableId}/copy",
+     "httpMethod": "POST",
+     "description": "Copies a table.",
+     "parameters": {
+      "copyPresentation": {
+       "type": "boolean",
+       "description": "Whether to also copy tabs, styles, and templates. Default is false.",
+       "location": "query"
+      },
+      "tableId": {
+       "type": "string",
+       "description": "ID of the table that is being copied.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId"
+     ],
+     "response": {
+      "$ref": "Table"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables",
+      "https://www.googleapis.com/auth/fusiontables.readonly"
+     ]
+    },
+    "delete": {
+     "id": "fusiontables.table.delete",
+     "path": "tables/{tableId}",
+     "httpMethod": "DELETE",
+     "description": "Deletes a table.",
+     "parameters": {
+      "tableId": {
+       "type": "string",
+       "description": "ID of the table that is being deleted.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId"
+     ],
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables"
+     ]
+    },
+    "get": {
+     "id": "fusiontables.table.get",
+     "path": "tables/{tableId}",
+     "httpMethod": "GET",
+     "description": "Retrieves a specific table by its id.",
+     "parameters": {
+      "tableId": {
+       "type": "string",
+       "description": "Identifier(ID) for the table being requested.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId"
+     ],
+     "response": {
+      "$ref": "Table"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables",
+      "https://www.googleapis.com/auth/fusiontables.readonly"
+     ]
+    },
+    "importRows": {
+     "id": "fusiontables.table.importRows",
+     "path": "tables/{tableId}/import",
+     "httpMethod": "POST",
+     "description": "Import more rows into a table.",
+     "parameters": {
+      "delimiter": {
+       "type": "string",
+       "description": "The delimiter used to separate cell values. This can only consist of a single character. Default is ','.",
+       "location": "query"
+      },
+      "encoding": {
+       "type": "string",
+       "description": "The encoding of the content. Default is UTF-8. Use 'auto-detect' if you are unsure of the encoding.",
+       "location": "query"
+      },
+      "endLine": {
+       "type": "integer",
+       "description": "The index of the last line from which to start importing, exclusive. Thus, the number of imported lines is endLine - startLine. If this parameter is not provided, the file will be imported until the last line of the file. If endLine is negative, then the imported content will exclude the last endLine lines. That is, if endline is negative, no line will be imported whose index is greater than N + endLine where N is the number of lines in the file, and the number of imported lines will be N + endLine - startLine.",
+       "format": "int32",
+       "location": "query"
+      },
+      "isStrict": {
+       "type": "boolean",
+       "description": "Whether the CSV must have the same number of values for each row. If false, rows with fewer values will be padded with empty values. Default is true.",
+       "location": "query"
+      },
+      "startLine": {
+       "type": "integer",
+       "description": "The index of the first line from which to start importing, inclusive. Default is 0.",
+       "format": "int32",
+       "location": "query"
+      },
+      "tableId": {
+       "type": "string",
+       "description": "The table into which new rows are being imported.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId"
+     ],
+     "response": {
+      "$ref": "Import"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables"
+     ],
+     "supportsMediaUpload": true,
+     "mediaUpload": {
+      "accept": [
+       "application/octet-stream"
+      ],
+      "maxSize": "250MB",
+      "protocols": {
+       "simple": {
+        "multipart": true,
+        "path": "/upload/fusiontables/v1/tables/{tableId}/import"
+       },
+       "resumable": {
+        "multipart": true,
+        "path": "/resumable/upload/fusiontables/v1/tables/{tableId}/import"
+       }
+      }
+     }
+    },
+    "importTable": {
+     "id": "fusiontables.table.importTable",
+     "path": "tables/import",
+     "httpMethod": "POST",
+     "description": "Import a new table.",
+     "parameters": {
+      "delimiter": {
+       "type": "string",
+       "description": "The delimiter used to separate cell values. This can only consist of a single character. Default is ','.",
+       "location": "query"
+      },
+      "encoding": {
+       "type": "string",
+       "description": "The encoding of the content. Default is UTF-8. Use 'auto-detect' if you are unsure of the encoding.",
+       "location": "query"
+      },
+      "name": {
+       "type": "string",
+       "description": "The name to be assigned to the new table.",
+       "required": true,
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "name"
+     ],
+     "response": {
+      "$ref": "Table"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables"
+     ],
+     "supportsMediaUpload": true,
+     "mediaUpload": {
+      "accept": [
+       "application/octet-stream"
+      ],
+      "maxSize": "250MB",
+      "protocols": {
+       "simple": {
+        "multipart": true,
+        "path": "/upload/fusiontables/v1/tables/import"
+       },
+       "resumable": {
+        "multipart": true,
+        "path": "/resumable/upload/fusiontables/v1/tables/import"
+       }
+      }
+     }
+    },
+    "insert": {
+     "id": "fusiontables.table.insert",
+     "path": "tables",
+     "httpMethod": "POST",
+     "description": "Creates a new table.",
+     "request": {
+      "$ref": "Table"
+     },
+     "response": {
+      "$ref": "Table"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables"
+     ]
+    },
+    "list": {
+     "id": "fusiontables.table.list",
+     "path": "tables",
+     "httpMethod": "GET",
+     "description": "Retrieves a list of tables a user owns.",
+     "parameters": {
+      "maxResults": {
+       "type": "integer",
+       "description": "Maximum number of styles to return. Optional. Default is 5.",
+       "format": "uint32",
+       "minimum": "0",
+       "location": "query"
+      },
+      "pageToken": {
+       "type": "string",
+       "description": "Continuation token specifying which result page to return. Optional.",
+       "location": "query"
+      }
+     },
+     "response": {
+      "$ref": "TableList"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables",
+      "https://www.googleapis.com/auth/fusiontables.readonly"
+     ]
+    },
+    "patch": {
+     "id": "fusiontables.table.patch",
+     "path": "tables/{tableId}",
+     "httpMethod": "PATCH",
+     "description": "Updates an existing table. Unless explicitly requested, only the name, description, and attribution will be updated. This method supports patch semantics.",
+     "parameters": {
+      "replaceViewDefinition": {
+       "type": "boolean",
+       "description": "Should the view definition also be updated? The specified view definition replaces the existing one. Only a view can be updated with a new definition.",
+       "location": "query"
+      },
+      "tableId": {
+       "type": "string",
+       "description": "ID of the table that is being updated.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId"
+     ],
+     "request": {
+      "$ref": "Table"
+     },
+     "response": {
+      "$ref": "Table"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables"
+     ]
+    },
+    "update": {
+     "id": "fusiontables.table.update",
+     "path": "tables/{tableId}",
+     "httpMethod": "PUT",
+     "description": "Updates an existing table. Unless explicitly requested, only the name, description, and attribution will be updated.",
+     "parameters": {
+      "replaceViewDefinition": {
+       "type": "boolean",
+       "description": "Should the view definition also be updated? The specified view definition replaces the existing one. Only a view can be updated with a new definition.",
+       "location": "query"
+      },
+      "tableId": {
+       "type": "string",
+       "description": "ID of the table that is being updated.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId"
+     ],
+     "request": {
+      "$ref": "Table"
+     },
+     "response": {
+      "$ref": "Table"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables"
+     ]
+    }
+   }
+  },
+  "task": {
+   "methods": {
+    "delete": {
+     "id": "fusiontables.task.delete",
+     "path": "tables/{tableId}/tasks/{taskId}",
+     "httpMethod": "DELETE",
+     "description": "Deletes the task, unless already started.",
+     "parameters": {
+      "tableId": {
+       "type": "string",
+       "description": "Table from which the task is being deleted.",
+       "required": true,
+       "location": "path"
+      },
+      "taskId": {
+       "type": "string",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId",
+      "taskId"
+     ],
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables"
+     ]
+    },
+    "get": {
+     "id": "fusiontables.task.get",
+     "path": "tables/{tableId}/tasks/{taskId}",
+     "httpMethod": "GET",
+     "description": "Retrieves a specific task by its id.",
+     "parameters": {
+      "tableId": {
+       "type": "string",
+       "description": "Table to which the task belongs.",
+       "required": true,
+       "location": "path"
+      },
+      "taskId": {
+       "type": "string",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId",
+      "taskId"
+     ],
+     "response": {
+      "$ref": "Task"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables",
+      "https://www.googleapis.com/auth/fusiontables.readonly"
+     ]
+    },
+    "list": {
+     "id": "fusiontables.task.list",
+     "path": "tables/{tableId}/tasks",
+     "httpMethod": "GET",
+     "description": "Retrieves a list of tasks.",
+     "parameters": {
+      "maxResults": {
+       "type": "integer",
+       "description": "Maximum number of columns to return. Optional. Default is 5.",
+       "format": "uint32",
+       "minimum": "0",
+       "location": "query"
+      },
+      "pageToken": {
+       "type": "string",
+       "location": "query"
+      },
+      "startIndex": {
+       "type": "integer",
+       "format": "uint32",
+       "location": "query"
+      },
+      "tableId": {
+       "type": "string",
+       "description": "Table whose tasks are being listed.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId"
+     ],
+     "response": {
+      "$ref": "TaskList"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables",
+      "https://www.googleapis.com/auth/fusiontables.readonly"
+     ]
+    }
+   }
+  },
+  "template": {
+   "methods": {
+    "delete": {
+     "id": "fusiontables.template.delete",
+     "path": "tables/{tableId}/templates/{templateId}",
+     "httpMethod": "DELETE",
+     "description": "Deletes a template",
+     "parameters": {
+      "tableId": {
+       "type": "string",
+       "description": "Table from which the template is being deleted",
+       "required": true,
+       "location": "path"
+      },
+      "templateId": {
+       "type": "integer",
+       "description": "Identifier for the template which is being deleted",
+       "required": true,
+       "format": "int32",
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId",
+      "templateId"
+     ],
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables"
+     ]
+    },
+    "get": {
+     "id": "fusiontables.template.get",
+     "path": "tables/{tableId}/templates/{templateId}",
+     "httpMethod": "GET",
+     "description": "Retrieves a specific template by its id",
+     "parameters": {
+      "tableId": {
+       "type": "string",
+       "description": "Table to which the template belongs",
+       "required": true,
+       "location": "path"
+      },
+      "templateId": {
+       "type": "integer",
+       "description": "Identifier for the template that is being requested",
+       "required": true,
+       "format": "int32",
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId",
+      "templateId"
+     ],
+     "response": {
+      "$ref": "Template"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables",
+      "https://www.googleapis.com/auth/fusiontables.readonly"
+     ]
+    },
+    "insert": {
+     "id": "fusiontables.template.insert",
+     "path": "tables/{tableId}/templates",
+     "httpMethod": "POST",
+     "description": "Creates a new template for the table.",
+     "parameters": {
+      "tableId": {
+       "type": "string",
+       "description": "Table for which a new template is being created",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId"
+     ],
+     "request": {
+      "$ref": "Template"
+     },
+     "response": {
+      "$ref": "Template"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables"
+     ]
+    },
+    "list": {
+     "id": "fusiontables.template.list",
+     "path": "tables/{tableId}/templates",
+     "httpMethod": "GET",
+     "description": "Retrieves a list of templates.",
+     "parameters": {
+      "maxResults": {
+       "type": "integer",
+       "description": "Maximum number of templates to return. Optional. Default is 5.",
+       "format": "uint32",
+       "minimum": "0",
+       "location": "query"
+      },
+      "pageToken": {
+       "type": "string",
+       "description": "Continuation token specifying which results page to return. Optional.",
+       "location": "query"
+      },
+      "tableId": {
+       "type": "string",
+       "description": "Identifier for the table whose templates are being requested",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId"
+     ],
+     "response": {
+      "$ref": "TemplateList"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables",
+      "https://www.googleapis.com/auth/fusiontables.readonly"
+     ]
+    },
+    "patch": {
+     "id": "fusiontables.template.patch",
+     "path": "tables/{tableId}/templates/{templateId}",
+     "httpMethod": "PATCH",
+     "description": "Updates an existing template. This method supports patch semantics.",
+     "parameters": {
+      "tableId": {
+       "type": "string",
+       "description": "Table to which the updated template belongs",
+       "required": true,
+       "location": "path"
+      },
+      "templateId": {
+       "type": "integer",
+       "description": "Identifier for the template that is being updated",
+       "required": true,
+       "format": "int32",
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId",
+      "templateId"
+     ],
+     "request": {
+      "$ref": "Template"
+     },
+     "response": {
+      "$ref": "Template"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables"
+     ]
+    },
+    "update": {
+     "id": "fusiontables.template.update",
+     "path": "tables/{tableId}/templates/{templateId}",
+     "httpMethod": "PUT",
+     "description": "Updates an existing template",
+     "parameters": {
+      "tableId": {
+       "type": "string",
+       "description": "Table to which the updated template belongs",
+       "required": true,
+       "location": "path"
+      },
+      "templateId": {
+       "type": "integer",
+       "description": "Identifier for the template that is being updated",
+       "required": true,
+       "format": "int32",
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "tableId",
+      "templateId"
+     ],
+     "request": {
+      "$ref": "Template"
+     },
+     "response": {
+      "$ref": "Template"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/fusiontables"
+     ]
+    }
+   }
+  }
+ }
+}
diff --git a/samples/fusiontables_sample/fusiontables_v1/__init__.py b/samples/fusiontables_sample/fusiontables_v1/__init__.py
new file mode 100644
index 0000000..2816da8
--- /dev/null
+++ b/samples/fusiontables_sample/fusiontables_v1/__init__.py
@@ -0,0 +1,5 @@
+"""Package marker file."""
+
+import pkgutil
+
+__path__ = pkgutil.extend_path(__path__, __name__)
diff --git a/samples/fusiontables_sample/fusiontables_v1/fusiontables_v1.py b/samples/fusiontables_sample/fusiontables_v1/fusiontables_v1.py
new file mode 100644
index 0000000..8bb2c8a
--- /dev/null
+++ b/samples/fusiontables_sample/fusiontables_v1/fusiontables_v1.py
@@ -0,0 +1,1797 @@
+#!/usr/bin/env python
+"""CLI for fusiontables, version v1."""
+# NOTE: This file is autogenerated and should not be edited by hand.
+
+import code
+import os
+import platform
+import sys
+
+from apitools.base.protorpclite import message_types
+from apitools.base.protorpclite import messages
+
+from google.apputils import appcommands
+import gflags as flags
+
+import apitools.base.py as apitools_base
+from apitools.base.py import cli as apitools_base_cli
+import fusiontables_v1_client as client_lib
+import fusiontables_v1_messages as messages
+
+
+def _DeclareFusiontablesFlags():
+  """Declare global flags in an idempotent way."""
+  if 'api_endpoint' in flags.FLAGS:
+    return
+  flags.DEFINE_string(
+      'api_endpoint',
+      u'https://www.googleapis.com/fusiontables/v1/',
+      'URL of the API endpoint to use.',
+      short_name='fusiontables_url')
+  flags.DEFINE_string(
+      'history_file',
+      u'~/.fusiontables.v1.history',
+      'File with interactive shell history.')
+  flags.DEFINE_multistring(
+      'add_header', [],
+      'Additional http headers (as key=value strings). '
+      'Can be specified multiple times.')
+  flags.DEFINE_string(
+      'service_account_json_keyfile', '',
+      'Filename for a JSON service account key downloaded'
+      ' from the Developer Console.')
+  flags.DEFINE_enum(
+      'alt',
+      u'json',
+      [u'csv', u'json'],
+      u'Data format for the response.')
+  flags.DEFINE_string(
+      'fields',
+      None,
+      u'Selector specifying which fields to include in a partial response.')
+  flags.DEFINE_string(
+      'key',
+      None,
+      u'API key. Your API key identifies your project and provides you with '
+      u'API access, quota, and reports. Required unless you provide an OAuth '
+      u'2.0 token.')
+  flags.DEFINE_string(
+      'oauth_token',
+      None,
+      u'OAuth 2.0 token for the current user.')
+  flags.DEFINE_boolean(
+      'prettyPrint',
+      'True',
+      u'Returns response with indentations and line breaks.')
+  flags.DEFINE_string(
+      'quotaUser',
+      None,
+      u'Available to use for quota purposes for server-side applications. Can'
+      u' be any arbitrary string assigned to a user, but should not exceed 40'
+      u' characters. Overrides userIp if both are provided.')
+  flags.DEFINE_string(
+      'trace',
+      None,
+      'A tracing token of the form "token:<tokenid>" to include in api '
+      'requests.')
+  flags.DEFINE_string(
+      'userIp',
+      None,
+      u'IP address of the site where the request originates. Use this if you '
+      u'want to enforce per-user limits.')
+
+
+FLAGS = flags.FLAGS
+apitools_base_cli.DeclareBaseFlags()
+_DeclareFusiontablesFlags()
+
+
+def GetGlobalParamsFromFlags():
+  """Return a StandardQueryParameters based on flags."""
+  result = messages.StandardQueryParameters()
+  if FLAGS['alt'].present:
+    result.alt = messages.StandardQueryParameters.AltValueValuesEnum(FLAGS.alt)
+  if FLAGS['fields'].present:
+    result.fields = FLAGS.fields.decode('utf8')
+  if FLAGS['key'].present:
+    result.key = FLAGS.key.decode('utf8')
+  if FLAGS['oauth_token'].present:
+    result.oauth_token = FLAGS.oauth_token.decode('utf8')
+  if FLAGS['prettyPrint'].present:
+    result.prettyPrint = FLAGS.prettyPrint
+  if FLAGS['quotaUser'].present:
+    result.quotaUser = FLAGS.quotaUser.decode('utf8')
+  if FLAGS['trace'].present:
+    result.trace = FLAGS.trace.decode('utf8')
+  if FLAGS['userIp'].present:
+    result.userIp = FLAGS.userIp.decode('utf8')
+  return result
+
+
+def GetClientFromFlags():
+  """Return a client object, configured from flags."""
+  log_request = FLAGS.log_request or FLAGS.log_request_response
+  log_response = FLAGS.log_response or FLAGS.log_request_response
+  api_endpoint = apitools_base.NormalizeApiEndpoint(FLAGS.api_endpoint)
+  additional_http_headers = dict(x.split('=', 1) for x in FLAGS.add_header)
+  credentials_args = {
+      'service_account_json_keyfile': os.path.expanduser(FLAGS.service_account_json_keyfile)
+  }
+  try:
+    client = client_lib.FusiontablesV1(
+        api_endpoint, log_request=log_request,
+        log_response=log_response,
+        credentials_args=credentials_args,
+        additional_http_headers=additional_http_headers)
+  except apitools_base.CredentialsError as e:
+    print 'Error creating credentials: %s' % e
+    sys.exit(1)
+  return client
+
+
+class PyShell(appcommands.Cmd):
+
+  def Run(self, _):
+    """Run an interactive python shell with the client."""
+    client = GetClientFromFlags()
+    params = GetGlobalParamsFromFlags()
+    for field in params.all_fields():
+      value = params.get_assigned_value(field.name)
+      if value != field.default:
+        client.AddGlobalParam(field.name, value)
+    banner = """
+           == fusiontables interactive console ==
+                 client: a fusiontables client
+          apitools_base: base apitools module
+         messages: the generated messages module
+    """
+    local_vars = {
+        'apitools_base': apitools_base,
+        'client': client,
+        'client_lib': client_lib,
+        'messages': messages,
+    }
+    if platform.system() == 'Linux':
+      console = apitools_base_cli.ConsoleWithReadline(
+          local_vars, histfile=FLAGS.history_file)
+    else:
+      console = code.InteractiveConsole(local_vars)
+    try:
+      console.interact(banner)
+    except SystemExit as e:
+      return e.code
+
+
+class ColumnDelete(apitools_base_cli.NewCmd):
+  """Command wrapping column.Delete."""
+
+  usage = """column_delete <tableId> <columnId>"""
+
+  def __init__(self, name, fv):
+    super(ColumnDelete, self).__init__(name, fv)
+
+  def RunWithArgs(self, tableId, columnId):
+    """Deletes the column.
+
+    Args:
+      tableId: Table from which the column is being deleted.
+      columnId: Name or identifier for the column being deleted.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.FusiontablesColumnDeleteRequest(
+        tableId=tableId.decode('utf8'),
+        columnId=columnId.decode('utf8'),
+        )
+    result = client.column.Delete(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ColumnGet(apitools_base_cli.NewCmd):
+  """Command wrapping column.Get."""
+
+  usage = """column_get <tableId> <columnId>"""
+
+  def __init__(self, name, fv):
+    super(ColumnGet, self).__init__(name, fv)
+
+  def RunWithArgs(self, tableId, columnId):
+    """Retrieves a specific column by its id.
+
+    Args:
+      tableId: Table to which the column belongs.
+      columnId: Name or identifier for the column that is being requested.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.FusiontablesColumnGetRequest(
+        tableId=tableId.decode('utf8'),
+        columnId=columnId.decode('utf8'),
+        )
+    result = client.column.Get(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ColumnInsert(apitools_base_cli.NewCmd):
+  """Command wrapping column.Insert."""
+
+  usage = """column_insert <tableId>"""
+
+  def __init__(self, name, fv):
+    super(ColumnInsert, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'column',
+        None,
+        u'A Column resource to be passed as the request body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, tableId):
+    """Adds a new column to the table.
+
+    Args:
+      tableId: Table for which a new column is being added.
+
+    Flags:
+      column: A Column resource to be passed as the request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.FusiontablesColumnInsertRequest(
+        tableId=tableId.decode('utf8'),
+        )
+    if FLAGS['column'].present:
+      request.column = apitools_base.JsonToMessage(messages.Column, FLAGS.column)
+    result = client.column.Insert(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ColumnList(apitools_base_cli.NewCmd):
+  """Command wrapping column.List."""
+
+  usage = """column_list <tableId>"""
+
+  def __init__(self, name, fv):
+    super(ColumnList, self).__init__(name, fv)
+    flags.DEFINE_integer(
+        'maxResults',
+        None,
+        u'Maximum number of columns to return. Optional. Default is 5.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'pageToken',
+        None,
+        u'Continuation token specifying which result page to return. '
+        u'Optional.',
+        flag_values=fv)
+
+  def RunWithArgs(self, tableId):
+    """Retrieves a list of columns.
+
+    Args:
+      tableId: Table whose columns are being listed.
+
+    Flags:
+      maxResults: Maximum number of columns to return. Optional. Default is 5.
+      pageToken: Continuation token specifying which result page to return.
+        Optional.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.FusiontablesColumnListRequest(
+        tableId=tableId.decode('utf8'),
+        )
+    if FLAGS['maxResults'].present:
+      request.maxResults = FLAGS.maxResults
+    if FLAGS['pageToken'].present:
+      request.pageToken = FLAGS.pageToken.decode('utf8')
+    result = client.column.List(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ColumnPatch(apitools_base_cli.NewCmd):
+  """Command wrapping column.Patch."""
+
+  usage = """column_patch <tableId> <columnId>"""
+
+  def __init__(self, name, fv):
+    super(ColumnPatch, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'column',
+        None,
+        u'A Column resource to be passed as the request body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, tableId, columnId):
+    """Updates the name or type of an existing column. This method supports
+    patch semantics.
+
+    Args:
+      tableId: Table for which the column is being updated.
+      columnId: Name or identifier for the column that is being updated.
+
+    Flags:
+      column: A Column resource to be passed as the request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.FusiontablesColumnPatchRequest(
+        tableId=tableId.decode('utf8'),
+        columnId=columnId.decode('utf8'),
+        )
+    if FLAGS['column'].present:
+      request.column = apitools_base.JsonToMessage(messages.Column, FLAGS.column)
+    result = client.column.Patch(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ColumnUpdate(apitools_base_cli.NewCmd):
+  """Command wrapping column.Update."""
+
+  usage = """column_update <tableId> <columnId>"""
+
+  def __init__(self, name, fv):
+    super(ColumnUpdate, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'column',
+        None,
+        u'A Column resource to be passed as the request body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, tableId, columnId):
+    """Updates the name or type of an existing column.
+
+    Args:
+      tableId: Table for which the column is being updated.
+      columnId: Name or identifier for the column that is being updated.
+
+    Flags:
+      column: A Column resource to be passed as the request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.FusiontablesColumnUpdateRequest(
+        tableId=tableId.decode('utf8'),
+        columnId=columnId.decode('utf8'),
+        )
+    if FLAGS['column'].present:
+      request.column = apitools_base.JsonToMessage(messages.Column, FLAGS.column)
+    result = client.column.Update(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class QuerySql(apitools_base_cli.NewCmd):
+  """Command wrapping query.Sql."""
+
+  usage = """query_sql <sql>"""
+
+  def __init__(self, name, fv):
+    super(QuerySql, self).__init__(name, fv)
+    flags.DEFINE_boolean(
+        'hdrs',
+        None,
+        u'Should column names be included (in the first row)?. Default is '
+        u'true.',
+        flag_values=fv)
+    flags.DEFINE_boolean(
+        'typed',
+        None,
+        u'Should typed values be returned in the (JSON) response -- numbers '
+        u'for numeric values and parsed geometries for KML values? Default is'
+        u' true.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'download_filename',
+        '',
+        'Filename to use for download.',
+        flag_values=fv)
+    flags.DEFINE_boolean(
+        'overwrite',
+        'False',
+        'If True, overwrite the existing file when downloading.',
+        flag_values=fv)
+
+  def RunWithArgs(self, sql):
+    """Executes an SQL SELECT/INSERT/UPDATE/DELETE/SHOW/DESCRIBE/CREATE
+    statement.
+
+    Args:
+      sql: An SQL SELECT/SHOW/DESCRIBE/INSERT/UPDATE/DELETE/CREATE statement.
+
+    Flags:
+      hdrs: Should column names be included (in the first row)?. Default is
+        true.
+      typed: Should typed values be returned in the (JSON) response -- numbers
+        for numeric values and parsed geometries for KML values? Default is
+        true.
+      download_filename: Filename to use for download.
+      overwrite: If True, overwrite the existing file when downloading.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.FusiontablesQuerySqlRequest(
+        sql=sql.decode('utf8'),
+        )
+    if FLAGS['hdrs'].present:
+      request.hdrs = FLAGS.hdrs
+    if FLAGS['typed'].present:
+      request.typed = FLAGS.typed
+    download = None
+    if FLAGS.download_filename:
+      download = apitools_base.Download.FromFile(FLAGS.download_filename, overwrite=FLAGS.overwrite,
+          progress_callback=apitools_base.DownloadProgressPrinter,
+          finish_callback=apitools_base.DownloadCompletePrinter)
+    result = client.query.Sql(
+        request, global_params=global_params, download=download)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class QuerySqlGet(apitools_base_cli.NewCmd):
+  """Command wrapping query.SqlGet."""
+
+  usage = """query_sqlGet <sql>"""
+
+  def __init__(self, name, fv):
+    super(QuerySqlGet, self).__init__(name, fv)
+    flags.DEFINE_boolean(
+        'hdrs',
+        None,
+        u'Should column names be included (in the first row)?. Default is '
+        u'true.',
+        flag_values=fv)
+    flags.DEFINE_boolean(
+        'typed',
+        None,
+        u'Should typed values be returned in the (JSON) response -- numbers '
+        u'for numeric values and parsed geometries for KML values? Default is'
+        u' true.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'download_filename',
+        '',
+        'Filename to use for download.',
+        flag_values=fv)
+    flags.DEFINE_boolean(
+        'overwrite',
+        'False',
+        'If True, overwrite the existing file when downloading.',
+        flag_values=fv)
+
+  def RunWithArgs(self, sql):
+    """Executes an SQL SELECT/SHOW/DESCRIBE statement.
+
+    Args:
+      sql: An SQL SELECT/SHOW/DESCRIBE statement.
+
+    Flags:
+      hdrs: Should column names be included (in the first row)?. Default is
+        true.
+      typed: Should typed values be returned in the (JSON) response -- numbers
+        for numeric values and parsed geometries for KML values? Default is
+        true.
+      download_filename: Filename to use for download.
+      overwrite: If True, overwrite the existing file when downloading.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.FusiontablesQuerySqlGetRequest(
+        sql=sql.decode('utf8'),
+        )
+    if FLAGS['hdrs'].present:
+      request.hdrs = FLAGS.hdrs
+    if FLAGS['typed'].present:
+      request.typed = FLAGS.typed
+    download = None
+    if FLAGS.download_filename:
+      download = apitools_base.Download.FromFile(FLAGS.download_filename, overwrite=FLAGS.overwrite,
+          progress_callback=apitools_base.DownloadProgressPrinter,
+          finish_callback=apitools_base.DownloadCompletePrinter)
+    result = client.query.SqlGet(
+        request, global_params=global_params, download=download)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class StyleDelete(apitools_base_cli.NewCmd):
+  """Command wrapping style.Delete."""
+
+  usage = """style_delete <tableId> <styleId>"""
+
+  def __init__(self, name, fv):
+    super(StyleDelete, self).__init__(name, fv)
+
+  def RunWithArgs(self, tableId, styleId):
+    """Deletes a style.
+
+    Args:
+      tableId: Table from which the style is being deleted
+      styleId: Identifier (within a table) for the style being deleted
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.FusiontablesStyleDeleteRequest(
+        tableId=tableId.decode('utf8'),
+        styleId=styleId,
+        )
+    result = client.style.Delete(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class StyleGet(apitools_base_cli.NewCmd):
+  """Command wrapping style.Get."""
+
+  usage = """style_get <tableId> <styleId>"""
+
+  def __init__(self, name, fv):
+    super(StyleGet, self).__init__(name, fv)
+
+  def RunWithArgs(self, tableId, styleId):
+    """Gets a specific style.
+
+    Args:
+      tableId: Table to which the requested style belongs
+      styleId: Identifier (integer) for a specific style in a table
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.FusiontablesStyleGetRequest(
+        tableId=tableId.decode('utf8'),
+        styleId=styleId,
+        )
+    result = client.style.Get(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class StyleInsert(apitools_base_cli.NewCmd):
+  """Command wrapping style.Insert."""
+
+  usage = """style_insert <tableId>"""
+
+  def __init__(self, name, fv):
+    super(StyleInsert, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'kind',
+        u'fusiontables#styleSetting',
+        u'Type name: an individual style setting. A StyleSetting contains the'
+        u' style defintions for points, lines, and polygons in a table. Since'
+        u' a table can have any one or all of them, a style definition can '
+        u'have point, line and polygon style definitions.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'markerOptions',
+        None,
+        u'Style definition for points in the table.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'name',
+        None,
+        u'Optional name for the style setting.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'polygonOptions',
+        None,
+        u'Style definition for polygons in the table.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'polylineOptions',
+        None,
+        u'Style definition for lines in the table.',
+        flag_values=fv)
+    flags.DEFINE_integer(
+        'styleId',
+        None,
+        u'Identifier for the style setting (unique only within tables).',
+        flag_values=fv)
+
+  def RunWithArgs(self, tableId):
+    """Adds a new style for the table.
+
+    Args:
+      tableId: Identifier for the table.
+
+    Flags:
+      kind: Type name: an individual style setting. A StyleSetting contains
+        the style defintions for points, lines, and polygons in a table. Since
+        a table can have any one or all of them, a style definition can have
+        point, line and polygon style definitions.
+      markerOptions: Style definition for points in the table.
+      name: Optional name for the style setting.
+      polygonOptions: Style definition for polygons in the table.
+      polylineOptions: Style definition for lines in the table.
+      styleId: Identifier for the style setting (unique only within tables).
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StyleSetting(
+        tableId=tableId.decode('utf8'),
+        )
+    if FLAGS['kind'].present:
+      request.kind = FLAGS.kind.decode('utf8')
+    if FLAGS['markerOptions'].present:
+      request.markerOptions = apitools_base.JsonToMessage(messages.PointStyle, FLAGS.markerOptions)
+    if FLAGS['name'].present:
+      request.name = FLAGS.name.decode('utf8')
+    if FLAGS['polygonOptions'].present:
+      request.polygonOptions = apitools_base.JsonToMessage(messages.PolygonStyle, FLAGS.polygonOptions)
+    if FLAGS['polylineOptions'].present:
+      request.polylineOptions = apitools_base.JsonToMessage(messages.LineStyle, FLAGS.polylineOptions)
+    if FLAGS['styleId'].present:
+      request.styleId = FLAGS.styleId
+    result = client.style.Insert(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class StyleList(apitools_base_cli.NewCmd):
+  """Command wrapping style.List."""
+
+  usage = """style_list <tableId>"""
+
+  def __init__(self, name, fv):
+    super(StyleList, self).__init__(name, fv)
+    flags.DEFINE_integer(
+        'maxResults',
+        None,
+        u'Maximum number of styles to return. Optional. Default is 5.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'pageToken',
+        None,
+        u'Continuation token specifying which result page to return. '
+        u'Optional.',
+        flag_values=fv)
+
+  def RunWithArgs(self, tableId):
+    """Retrieves a list of styles.
+
+    Args:
+      tableId: Table whose styles are being listed
+
+    Flags:
+      maxResults: Maximum number of styles to return. Optional. Default is 5.
+      pageToken: Continuation token specifying which result page to return.
+        Optional.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.FusiontablesStyleListRequest(
+        tableId=tableId.decode('utf8'),
+        )
+    if FLAGS['maxResults'].present:
+      request.maxResults = FLAGS.maxResults
+    if FLAGS['pageToken'].present:
+      request.pageToken = FLAGS.pageToken.decode('utf8')
+    result = client.style.List(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class StylePatch(apitools_base_cli.NewCmd):
+  """Command wrapping style.Patch."""
+
+  usage = """style_patch <tableId> <styleId>"""
+
+  def __init__(self, name, fv):
+    super(StylePatch, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'kind',
+        u'fusiontables#styleSetting',
+        u'Type name: an individual style setting. A StyleSetting contains the'
+        u' style defintions for points, lines, and polygons in a table. Since'
+        u' a table can have any one or all of them, a style definition can '
+        u'have point, line and polygon style definitions.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'markerOptions',
+        None,
+        u'Style definition for points in the table.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'name',
+        None,
+        u'Optional name for the style setting.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'polygonOptions',
+        None,
+        u'Style definition for polygons in the table.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'polylineOptions',
+        None,
+        u'Style definition for lines in the table.',
+        flag_values=fv)
+
+  def RunWithArgs(self, tableId, styleId):
+    """Updates an existing style. This method supports patch semantics.
+
+    Args:
+      tableId: Identifier for the table.
+      styleId: Identifier for the style setting (unique only within tables).
+
+    Flags:
+      kind: Type name: an individual style setting. A StyleSetting contains
+        the style defintions for points, lines, and polygons in a table. Since
+        a table can have any one or all of them, a style definition can have
+        point, line and polygon style definitions.
+      markerOptions: Style definition for points in the table.
+      name: Optional name for the style setting.
+      polygonOptions: Style definition for polygons in the table.
+      polylineOptions: Style definition for lines in the table.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StyleSetting(
+        tableId=tableId.decode('utf8'),
+        styleId=styleId,
+        )
+    if FLAGS['kind'].present:
+      request.kind = FLAGS.kind.decode('utf8')
+    if FLAGS['markerOptions'].present:
+      request.markerOptions = apitools_base.JsonToMessage(messages.PointStyle, FLAGS.markerOptions)
+    if FLAGS['name'].present:
+      request.name = FLAGS.name.decode('utf8')
+    if FLAGS['polygonOptions'].present:
+      request.polygonOptions = apitools_base.JsonToMessage(messages.PolygonStyle, FLAGS.polygonOptions)
+    if FLAGS['polylineOptions'].present:
+      request.polylineOptions = apitools_base.JsonToMessage(messages.LineStyle, FLAGS.polylineOptions)
+    result = client.style.Patch(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class StyleUpdate(apitools_base_cli.NewCmd):
+  """Command wrapping style.Update."""
+
+  usage = """style_update <tableId> <styleId>"""
+
+  def __init__(self, name, fv):
+    super(StyleUpdate, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'kind',
+        u'fusiontables#styleSetting',
+        u'Type name: an individual style setting. A StyleSetting contains the'
+        u' style defintions for points, lines, and polygons in a table. Since'
+        u' a table can have any one or all of them, a style definition can '
+        u'have point, line and polygon style definitions.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'markerOptions',
+        None,
+        u'Style definition for points in the table.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'name',
+        None,
+        u'Optional name for the style setting.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'polygonOptions',
+        None,
+        u'Style definition for polygons in the table.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'polylineOptions',
+        None,
+        u'Style definition for lines in the table.',
+        flag_values=fv)
+
+  def RunWithArgs(self, tableId, styleId):
+    """Updates an existing style.
+
+    Args:
+      tableId: Identifier for the table.
+      styleId: Identifier for the style setting (unique only within tables).
+
+    Flags:
+      kind: Type name: an individual style setting. A StyleSetting contains
+        the style defintions for points, lines, and polygons in a table. Since
+        a table can have any one or all of them, a style definition can have
+        point, line and polygon style definitions.
+      markerOptions: Style definition for points in the table.
+      name: Optional name for the style setting.
+      polygonOptions: Style definition for polygons in the table.
+      polylineOptions: Style definition for lines in the table.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StyleSetting(
+        tableId=tableId.decode('utf8'),
+        styleId=styleId,
+        )
+    if FLAGS['kind'].present:
+      request.kind = FLAGS.kind.decode('utf8')
+    if FLAGS['markerOptions'].present:
+      request.markerOptions = apitools_base.JsonToMessage(messages.PointStyle, FLAGS.markerOptions)
+    if FLAGS['name'].present:
+      request.name = FLAGS.name.decode('utf8')
+    if FLAGS['polygonOptions'].present:
+      request.polygonOptions = apitools_base.JsonToMessage(messages.PolygonStyle, FLAGS.polygonOptions)
+    if FLAGS['polylineOptions'].present:
+      request.polylineOptions = apitools_base.JsonToMessage(messages.LineStyle, FLAGS.polylineOptions)
+    result = client.style.Update(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class TableCopy(apitools_base_cli.NewCmd):
+  """Command wrapping table.Copy."""
+
+  usage = """table_copy <tableId>"""
+
+  def __init__(self, name, fv):
+    super(TableCopy, self).__init__(name, fv)
+    flags.DEFINE_boolean(
+        'copyPresentation',
+        None,
+        u'Whether to also copy tabs, styles, and templates. Default is false.',
+        flag_values=fv)
+
+  def RunWithArgs(self, tableId):
+    """Copies a table.
+
+    Args:
+      tableId: ID of the table that is being copied.
+
+    Flags:
+      copyPresentation: Whether to also copy tabs, styles, and templates.
+        Default is false.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.FusiontablesTableCopyRequest(
+        tableId=tableId.decode('utf8'),
+        )
+    if FLAGS['copyPresentation'].present:
+      request.copyPresentation = FLAGS.copyPresentation
+    result = client.table.Copy(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class TableDelete(apitools_base_cli.NewCmd):
+  """Command wrapping table.Delete."""
+
+  usage = """table_delete <tableId>"""
+
+  def __init__(self, name, fv):
+    super(TableDelete, self).__init__(name, fv)
+
+  def RunWithArgs(self, tableId):
+    """Deletes a table.
+
+    Args:
+      tableId: ID of the table that is being deleted.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.FusiontablesTableDeleteRequest(
+        tableId=tableId.decode('utf8'),
+        )
+    result = client.table.Delete(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class TableGet(apitools_base_cli.NewCmd):
+  """Command wrapping table.Get."""
+
+  usage = """table_get <tableId>"""
+
+  def __init__(self, name, fv):
+    super(TableGet, self).__init__(name, fv)
+
+  def RunWithArgs(self, tableId):
+    """Retrieves a specific table by its id.
+
+    Args:
+      tableId: Identifier(ID) for the table being requested.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.FusiontablesTableGetRequest(
+        tableId=tableId.decode('utf8'),
+        )
+    result = client.table.Get(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class TableImportRows(apitools_base_cli.NewCmd):
+  """Command wrapping table.ImportRows."""
+
+  usage = """table_importRows <tableId>"""
+
+  def __init__(self, name, fv):
+    super(TableImportRows, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'delimiter',
+        None,
+        u'The delimiter used to separate cell values. This can only consist '
+        u"of a single character. Default is ','.",
+        flag_values=fv)
+    flags.DEFINE_string(
+        'encoding',
+        None,
+        u"The encoding of the content. Default is UTF-8. Use 'auto-detect' if"
+        u' you are unsure of the encoding.',
+        flag_values=fv)
+    flags.DEFINE_integer(
+        'endLine',
+        None,
+        u'The index of the last line from which to start importing, '
+        u'exclusive. Thus, the number of imported lines is endLine - '
+        u'startLine. If this parameter is not provided, the file will be '
+        u'imported until the last line of the file. If endLine is negative, '
+        u'then the imported content will exclude the last endLine lines. That'
+        u' is, if endline is negative, no line will be imported whose index '
+        u'is greater than N + endLine where N is the number of lines in the '
+        u'file, and the number of imported lines will be N + endLine - '
+        u'startLine.',
+        flag_values=fv)
+    flags.DEFINE_boolean(
+        'isStrict',
+        None,
+        u'Whether the CSV must have the same number of values for each row. '
+        u'If false, rows with fewer values will be padded with empty values. '
+        u'Default is true.',
+        flag_values=fv)
+    flags.DEFINE_integer(
+        'startLine',
+        None,
+        u'The index of the first line from which to start importing, '
+        u'inclusive. Default is 0.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'upload_filename',
+        '',
+        'Filename to use for upload.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'upload_mime_type',
+        '',
+        'MIME type to use for the upload. Only needed if the extension on '
+        '--upload_filename does not determine the correct (or any) MIME '
+        'type.',
+        flag_values=fv)
+
+  def RunWithArgs(self, tableId):
+    """Import more rows into a table.
+
+    Args:
+      tableId: The table into which new rows are being imported.
+
+    Flags:
+      delimiter: The delimiter used to separate cell values. This can only
+        consist of a single character. Default is ','.
+      encoding: The encoding of the content. Default is UTF-8. Use 'auto-
+        detect' if you are unsure of the encoding.
+      endLine: The index of the last line from which to start importing,
+        exclusive. Thus, the number of imported lines is endLine - startLine.
+        If this parameter is not provided, the file will be imported until the
+        last line of the file. If endLine is negative, then the imported
+        content will exclude the last endLine lines. That is, if endline is
+        negative, no line will be imported whose index is greater than N +
+        endLine where N is the number of lines in the file, and the number of
+        imported lines will be N + endLine - startLine.
+      isStrict: Whether the CSV must have the same number of values for each
+        row. If false, rows with fewer values will be padded with empty
+        values. Default is true.
+      startLine: The index of the first line from which to start importing,
+        inclusive. Default is 0.
+      upload_filename: Filename to use for upload.
+      upload_mime_type: MIME type to use for the upload. Only needed if the
+        extension on --upload_filename does not determine the correct (or any)
+        MIME type.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.FusiontablesTableImportRowsRequest(
+        tableId=tableId.decode('utf8'),
+        )
+    if FLAGS['delimiter'].present:
+      request.delimiter = FLAGS.delimiter.decode('utf8')
+    if FLAGS['encoding'].present:
+      request.encoding = FLAGS.encoding.decode('utf8')
+    if FLAGS['endLine'].present:
+      request.endLine = FLAGS.endLine
+    if FLAGS['isStrict'].present:
+      request.isStrict = FLAGS.isStrict
+    if FLAGS['startLine'].present:
+      request.startLine = FLAGS.startLine
+    upload = None
+    if FLAGS.upload_filename:
+      upload = apitools_base.Upload.FromFile(
+          FLAGS.upload_filename, FLAGS.upload_mime_type,
+          progress_callback=apitools_base.UploadProgressPrinter,
+          finish_callback=apitools_base.UploadCompletePrinter)
+    result = client.table.ImportRows(
+        request, global_params=global_params, upload=upload)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class TableImportTable(apitools_base_cli.NewCmd):
+  """Command wrapping table.ImportTable."""
+
+  usage = """table_importTable <name>"""
+
+  def __init__(self, name, fv):
+    super(TableImportTable, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'delimiter',
+        None,
+        u'The delimiter used to separate cell values. This can only consist '
+        u"of a single character. Default is ','.",
+        flag_values=fv)
+    flags.DEFINE_string(
+        'encoding',
+        None,
+        u"The encoding of the content. Default is UTF-8. Use 'auto-detect' if"
+        u' you are unsure of the encoding.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'upload_filename',
+        '',
+        'Filename to use for upload.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'upload_mime_type',
+        '',
+        'MIME type to use for the upload. Only needed if the extension on '
+        '--upload_filename does not determine the correct (or any) MIME '
+        'type.',
+        flag_values=fv)
+
+  def RunWithArgs(self, name):
+    """Import a new table.
+
+    Args:
+      name: The name to be assigned to the new table.
+
+    Flags:
+      delimiter: The delimiter used to separate cell values. This can only
+        consist of a single character. Default is ','.
+      encoding: The encoding of the content. Default is UTF-8. Use 'auto-
+        detect' if you are unsure of the encoding.
+      upload_filename: Filename to use for upload.
+      upload_mime_type: MIME type to use for the upload. Only needed if the
+        extension on --upload_filename does not determine the correct (or any)
+        MIME type.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.FusiontablesTableImportTableRequest(
+        name=name.decode('utf8'),
+        )
+    if FLAGS['delimiter'].present:
+      request.delimiter = FLAGS.delimiter.decode('utf8')
+    if FLAGS['encoding'].present:
+      request.encoding = FLAGS.encoding.decode('utf8')
+    upload = None
+    if FLAGS.upload_filename:
+      upload = apitools_base.Upload.FromFile(
+          FLAGS.upload_filename, FLAGS.upload_mime_type,
+          progress_callback=apitools_base.UploadProgressPrinter,
+          finish_callback=apitools_base.UploadCompletePrinter)
+    result = client.table.ImportTable(
+        request, global_params=global_params, upload=upload)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class TableInsert(apitools_base_cli.NewCmd):
+  """Command wrapping table.Insert."""
+
+  usage = """table_insert"""
+
+  def __init__(self, name, fv):
+    super(TableInsert, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'attribution',
+        None,
+        u'Optional attribution assigned to the table.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'attributionLink',
+        None,
+        u'Optional link for attribution.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'baseTableIds',
+        None,
+        u'Optional base table identifier if this table is a view or merged '
+        u'table.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'columns',
+        None,
+        u'Columns in the table.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'description',
+        None,
+        u'Optional description assigned to the table.',
+        flag_values=fv)
+    flags.DEFINE_boolean(
+        'isExportable',
+        None,
+        u'Variable for whether table is exportable.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'kind',
+        u'fusiontables#table',
+        u'Type name: a template for an individual table.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'name',
+        None,
+        u'Name assigned to a table.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'sql',
+        None,
+        u'Optional sql that encodes the table definition for derived tables.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'tableId',
+        None,
+        u'Encrypted unique alphanumeric identifier for the table.',
+        flag_values=fv)
+
+  def RunWithArgs(self):
+    """Creates a new table.
+
+    Flags:
+      attribution: Optional attribution assigned to the table.
+      attributionLink: Optional link for attribution.
+      baseTableIds: Optional base table identifier if this table is a view or
+        merged table.
+      columns: Columns in the table.
+      description: Optional description assigned to the table.
+      isExportable: Variable for whether table is exportable.
+      kind: Type name: a template for an individual table.
+      name: Name assigned to a table.
+      sql: Optional sql that encodes the table definition for derived tables.
+      tableId: Encrypted unique alphanumeric identifier for the table.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.Table(
+        )
+    if FLAGS['attribution'].present:
+      request.attribution = FLAGS.attribution.decode('utf8')
+    if FLAGS['attributionLink'].present:
+      request.attributionLink = FLAGS.attributionLink.decode('utf8')
+    if FLAGS['baseTableIds'].present:
+      request.baseTableIds = [x.decode('utf8') for x in FLAGS.baseTableIds]
+    if FLAGS['columns'].present:
+      request.columns = [apitools_base.JsonToMessage(messages.Column, x) for x in FLAGS.columns]
+    if FLAGS['description'].present:
+      request.description = FLAGS.description.decode('utf8')
+    if FLAGS['isExportable'].present:
+      request.isExportable = FLAGS.isExportable
+    if FLAGS['kind'].present:
+      request.kind = FLAGS.kind.decode('utf8')
+    if FLAGS['name'].present:
+      request.name = FLAGS.name.decode('utf8')
+    if FLAGS['sql'].present:
+      request.sql = FLAGS.sql.decode('utf8')
+    if FLAGS['tableId'].present:
+      request.tableId = FLAGS.tableId.decode('utf8')
+    result = client.table.Insert(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class TableList(apitools_base_cli.NewCmd):
+  """Command wrapping table.List."""
+
+  usage = """table_list"""
+
+  def __init__(self, name, fv):
+    super(TableList, self).__init__(name, fv)
+    flags.DEFINE_integer(
+        'maxResults',
+        None,
+        u'Maximum number of styles to return. Optional. Default is 5.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'pageToken',
+        None,
+        u'Continuation token specifying which result page to return. '
+        u'Optional.',
+        flag_values=fv)
+
+  def RunWithArgs(self):
+    """Retrieves a list of tables a user owns.
+
+    Flags:
+      maxResults: Maximum number of styles to return. Optional. Default is 5.
+      pageToken: Continuation token specifying which result page to return.
+        Optional.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.FusiontablesTableListRequest(
+        )
+    if FLAGS['maxResults'].present:
+      request.maxResults = FLAGS.maxResults
+    if FLAGS['pageToken'].present:
+      request.pageToken = FLAGS.pageToken.decode('utf8')
+    result = client.table.List(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class TablePatch(apitools_base_cli.NewCmd):
+  """Command wrapping table.Patch."""
+
+  usage = """table_patch <tableId>"""
+
+  def __init__(self, name, fv):
+    super(TablePatch, self).__init__(name, fv)
+    flags.DEFINE_boolean(
+        'replaceViewDefinition',
+        None,
+        u'Should the view definition also be updated? The specified view '
+        u'definition replaces the existing one. Only a view can be updated '
+        u'with a new definition.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'table',
+        None,
+        u'A Table resource to be passed as the request body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, tableId):
+    """Updates an existing table. Unless explicitly requested, only the name,
+    description, and attribution will be updated. This method supports patch
+    semantics.
+
+    Args:
+      tableId: ID of the table that is being updated.
+
+    Flags:
+      replaceViewDefinition: Should the view definition also be updated? The
+        specified view definition replaces the existing one. Only a view can
+        be updated with a new definition.
+      table: A Table resource to be passed as the request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.FusiontablesTablePatchRequest(
+        tableId=tableId.decode('utf8'),
+        )
+    if FLAGS['replaceViewDefinition'].present:
+      request.replaceViewDefinition = FLAGS.replaceViewDefinition
+    if FLAGS['table'].present:
+      request.table = apitools_base.JsonToMessage(messages.Table, FLAGS.table)
+    result = client.table.Patch(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class TableUpdate(apitools_base_cli.NewCmd):
+  """Command wrapping table.Update."""
+
+  usage = """table_update <tableId>"""
+
+  def __init__(self, name, fv):
+    super(TableUpdate, self).__init__(name, fv)
+    flags.DEFINE_boolean(
+        'replaceViewDefinition',
+        None,
+        u'Should the view definition also be updated? The specified view '
+        u'definition replaces the existing one. Only a view can be updated '
+        u'with a new definition.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'table',
+        None,
+        u'A Table resource to be passed as the request body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, tableId):
+    """Updates an existing table. Unless explicitly requested, only the name,
+    description, and attribution will be updated.
+
+    Args:
+      tableId: ID of the table that is being updated.
+
+    Flags:
+      replaceViewDefinition: Should the view definition also be updated? The
+        specified view definition replaces the existing one. Only a view can
+        be updated with a new definition.
+      table: A Table resource to be passed as the request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.FusiontablesTableUpdateRequest(
+        tableId=tableId.decode('utf8'),
+        )
+    if FLAGS['replaceViewDefinition'].present:
+      request.replaceViewDefinition = FLAGS.replaceViewDefinition
+    if FLAGS['table'].present:
+      request.table = apitools_base.JsonToMessage(messages.Table, FLAGS.table)
+    result = client.table.Update(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class TaskDelete(apitools_base_cli.NewCmd):
+  """Command wrapping task.Delete."""
+
+  usage = """task_delete <tableId> <taskId>"""
+
+  def __init__(self, name, fv):
+    super(TaskDelete, self).__init__(name, fv)
+
+  def RunWithArgs(self, tableId, taskId):
+    """Deletes the task, unless already started.
+
+    Args:
+      tableId: Table from which the task is being deleted.
+      taskId: A string attribute.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.FusiontablesTaskDeleteRequest(
+        tableId=tableId.decode('utf8'),
+        taskId=taskId.decode('utf8'),
+        )
+    result = client.task.Delete(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class TaskGet(apitools_base_cli.NewCmd):
+  """Command wrapping task.Get."""
+
+  usage = """task_get <tableId> <taskId>"""
+
+  def __init__(self, name, fv):
+    super(TaskGet, self).__init__(name, fv)
+
+  def RunWithArgs(self, tableId, taskId):
+    """Retrieves a specific task by its id.
+
+    Args:
+      tableId: Table to which the task belongs.
+      taskId: A string attribute.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.FusiontablesTaskGetRequest(
+        tableId=tableId.decode('utf8'),
+        taskId=taskId.decode('utf8'),
+        )
+    result = client.task.Get(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class TaskList(apitools_base_cli.NewCmd):
+  """Command wrapping task.List."""
+
+  usage = """task_list <tableId>"""
+
+  def __init__(self, name, fv):
+    super(TaskList, self).__init__(name, fv)
+    flags.DEFINE_integer(
+        'maxResults',
+        None,
+        u'Maximum number of columns to return. Optional. Default is 5.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'pageToken',
+        None,
+        'A string attribute.',
+        flag_values=fv)
+    flags.DEFINE_integer(
+        'startIndex',
+        None,
+        'A integer attribute.',
+        flag_values=fv)
+
+  def RunWithArgs(self, tableId):
+    """Retrieves a list of tasks.
+
+    Args:
+      tableId: Table whose tasks are being listed.
+
+    Flags:
+      maxResults: Maximum number of columns to return. Optional. Default is 5.
+      pageToken: A string attribute.
+      startIndex: A integer attribute.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.FusiontablesTaskListRequest(
+        tableId=tableId.decode('utf8'),
+        )
+    if FLAGS['maxResults'].present:
+      request.maxResults = FLAGS.maxResults
+    if FLAGS['pageToken'].present:
+      request.pageToken = FLAGS.pageToken.decode('utf8')
+    if FLAGS['startIndex'].present:
+      request.startIndex = FLAGS.startIndex
+    result = client.task.List(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class TemplateDelete(apitools_base_cli.NewCmd):
+  """Command wrapping template.Delete."""
+
+  usage = """template_delete <tableId> <templateId>"""
+
+  def __init__(self, name, fv):
+    super(TemplateDelete, self).__init__(name, fv)
+
+  def RunWithArgs(self, tableId, templateId):
+    """Deletes a template
+
+    Args:
+      tableId: Table from which the template is being deleted
+      templateId: Identifier for the template which is being deleted
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.FusiontablesTemplateDeleteRequest(
+        tableId=tableId.decode('utf8'),
+        templateId=templateId,
+        )
+    result = client.template.Delete(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class TemplateGet(apitools_base_cli.NewCmd):
+  """Command wrapping template.Get."""
+
+  usage = """template_get <tableId> <templateId>"""
+
+  def __init__(self, name, fv):
+    super(TemplateGet, self).__init__(name, fv)
+
+  def RunWithArgs(self, tableId, templateId):
+    """Retrieves a specific template by its id
+
+    Args:
+      tableId: Table to which the template belongs
+      templateId: Identifier for the template that is being requested
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.FusiontablesTemplateGetRequest(
+        tableId=tableId.decode('utf8'),
+        templateId=templateId,
+        )
+    result = client.template.Get(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class TemplateInsert(apitools_base_cli.NewCmd):
+  """Command wrapping template.Insert."""
+
+  usage = """template_insert <tableId>"""
+
+  def __init__(self, name, fv):
+    super(TemplateInsert, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'automaticColumnNames',
+        None,
+        u'List of columns from which the template is to be automatically '
+        u'constructed. Only one of body or automaticColumns can be specified.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'body',
+        None,
+        u'Body of the template. It contains HTML with {column_name} to insert'
+        u' values from a particular column. The body is sanitized to remove '
+        u'certain tags, e.g., script. Only one of body or automaticColumns '
+        u'can be specified.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'kind',
+        u'fusiontables#template',
+        u'Type name: a template for the info window contents. The template '
+        u'can either include an HTML body or a list of columns from which the'
+        u' template is computed automatically.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'name',
+        None,
+        u'Optional name assigned to a template.',
+        flag_values=fv)
+    flags.DEFINE_integer(
+        'templateId',
+        None,
+        u'Identifier for the template, unique within the context of a '
+        u'particular table.',
+        flag_values=fv)
+
+  def RunWithArgs(self, tableId):
+    """Creates a new template for the table.
+
+    Args:
+      tableId: Identifier for the table for which the template is defined.
+
+    Flags:
+      automaticColumnNames: List of columns from which the template is to be
+        automatically constructed. Only one of body or automaticColumns can be
+        specified.
+      body: Body of the template. It contains HTML with {column_name} to
+        insert values from a particular column. The body is sanitized to
+        remove certain tags, e.g., script. Only one of body or
+        automaticColumns can be specified.
+      kind: Type name: a template for the info window contents. The template
+        can either include an HTML body or a list of columns from which the
+        template is computed automatically.
+      name: Optional name assigned to a template.
+      templateId: Identifier for the template, unique within the context of a
+        particular table.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.Template(
+        tableId=tableId.decode('utf8'),
+        )
+    if FLAGS['automaticColumnNames'].present:
+      request.automaticColumnNames = [x.decode('utf8') for x in FLAGS.automaticColumnNames]
+    if FLAGS['body'].present:
+      request.body = FLAGS.body.decode('utf8')
+    if FLAGS['kind'].present:
+      request.kind = FLAGS.kind.decode('utf8')
+    if FLAGS['name'].present:
+      request.name = FLAGS.name.decode('utf8')
+    if FLAGS['templateId'].present:
+      request.templateId = FLAGS.templateId
+    result = client.template.Insert(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class TemplateList(apitools_base_cli.NewCmd):
+  """Command wrapping template.List."""
+
+  usage = """template_list <tableId>"""
+
+  def __init__(self, name, fv):
+    super(TemplateList, self).__init__(name, fv)
+    flags.DEFINE_integer(
+        'maxResults',
+        None,
+        u'Maximum number of templates to return. Optional. Default is 5.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'pageToken',
+        None,
+        u'Continuation token specifying which results page to return. '
+        u'Optional.',
+        flag_values=fv)
+
+  def RunWithArgs(self, tableId):
+    """Retrieves a list of templates.
+
+    Args:
+      tableId: Identifier for the table whose templates are being requested
+
+    Flags:
+      maxResults: Maximum number of templates to return. Optional. Default is
+        5.
+      pageToken: Continuation token specifying which results page to return.
+        Optional.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.FusiontablesTemplateListRequest(
+        tableId=tableId.decode('utf8'),
+        )
+    if FLAGS['maxResults'].present:
+      request.maxResults = FLAGS.maxResults
+    if FLAGS['pageToken'].present:
+      request.pageToken = FLAGS.pageToken.decode('utf8')
+    result = client.template.List(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class TemplatePatch(apitools_base_cli.NewCmd):
+  """Command wrapping template.Patch."""
+
+  usage = """template_patch <tableId> <templateId>"""
+
+  def __init__(self, name, fv):
+    super(TemplatePatch, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'automaticColumnNames',
+        None,
+        u'List of columns from which the template is to be automatically '
+        u'constructed. Only one of body or automaticColumns can be specified.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'body',
+        None,
+        u'Body of the template. It contains HTML with {column_name} to insert'
+        u' values from a particular column. The body is sanitized to remove '
+        u'certain tags, e.g., script. Only one of body or automaticColumns '
+        u'can be specified.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'kind',
+        u'fusiontables#template',
+        u'Type name: a template for the info window contents. The template '
+        u'can either include an HTML body or a list of columns from which the'
+        u' template is computed automatically.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'name',
+        None,
+        u'Optional name assigned to a template.',
+        flag_values=fv)
+
+  def RunWithArgs(self, tableId, templateId):
+    """Updates an existing template. This method supports patch semantics.
+
+    Args:
+      tableId: Identifier for the table for which the template is defined.
+      templateId: Identifier for the template, unique within the context of a
+        particular table.
+
+    Flags:
+      automaticColumnNames: List of columns from which the template is to be
+        automatically constructed. Only one of body or automaticColumns can be
+        specified.
+      body: Body of the template. It contains HTML with {column_name} to
+        insert values from a particular column. The body is sanitized to
+        remove certain tags, e.g., script. Only one of body or
+        automaticColumns can be specified.
+      kind: Type name: a template for the info window contents. The template
+        can either include an HTML body or a list of columns from which the
+        template is computed automatically.
+      name: Optional name assigned to a template.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.Template(
+        tableId=tableId.decode('utf8'),
+        templateId=templateId,
+        )
+    if FLAGS['automaticColumnNames'].present:
+      request.automaticColumnNames = [x.decode('utf8') for x in FLAGS.automaticColumnNames]
+    if FLAGS['body'].present:
+      request.body = FLAGS.body.decode('utf8')
+    if FLAGS['kind'].present:
+      request.kind = FLAGS.kind.decode('utf8')
+    if FLAGS['name'].present:
+      request.name = FLAGS.name.decode('utf8')
+    result = client.template.Patch(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class TemplateUpdate(apitools_base_cli.NewCmd):
+  """Command wrapping template.Update."""
+
+  usage = """template_update <tableId> <templateId>"""
+
+  def __init__(self, name, fv):
+    super(TemplateUpdate, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'automaticColumnNames',
+        None,
+        u'List of columns from which the template is to be automatically '
+        u'constructed. Only one of body or automaticColumns can be specified.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'body',
+        None,
+        u'Body of the template. It contains HTML with {column_name} to insert'
+        u' values from a particular column. The body is sanitized to remove '
+        u'certain tags, e.g., script. Only one of body or automaticColumns '
+        u'can be specified.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'kind',
+        u'fusiontables#template',
+        u'Type name: a template for the info window contents. The template '
+        u'can either include an HTML body or a list of columns from which the'
+        u' template is computed automatically.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'name',
+        None,
+        u'Optional name assigned to a template.',
+        flag_values=fv)
+
+  def RunWithArgs(self, tableId, templateId):
+    """Updates an existing template
+
+    Args:
+      tableId: Identifier for the table for which the template is defined.
+      templateId: Identifier for the template, unique within the context of a
+        particular table.
+
+    Flags:
+      automaticColumnNames: List of columns from which the template is to be
+        automatically constructed. Only one of body or automaticColumns can be
+        specified.
+      body: Body of the template. It contains HTML with {column_name} to
+        insert values from a particular column. The body is sanitized to
+        remove certain tags, e.g., script. Only one of body or
+        automaticColumns can be specified.
+      kind: Type name: a template for the info window contents. The template
+        can either include an HTML body or a list of columns from which the
+        template is computed automatically.
+      name: Optional name assigned to a template.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.Template(
+        tableId=tableId.decode('utf8'),
+        templateId=templateId,
+        )
+    if FLAGS['automaticColumnNames'].present:
+      request.automaticColumnNames = [x.decode('utf8') for x in FLAGS.automaticColumnNames]
+    if FLAGS['body'].present:
+      request.body = FLAGS.body.decode('utf8')
+    if FLAGS['kind'].present:
+      request.kind = FLAGS.kind.decode('utf8')
+    if FLAGS['name'].present:
+      request.name = FLAGS.name.decode('utf8')
+    result = client.template.Update(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+def main(_):
+  appcommands.AddCmd('pyshell', PyShell)
+  appcommands.AddCmd('column_delete', ColumnDelete)
+  appcommands.AddCmd('column_get', ColumnGet)
+  appcommands.AddCmd('column_insert', ColumnInsert)
+  appcommands.AddCmd('column_list', ColumnList)
+  appcommands.AddCmd('column_patch', ColumnPatch)
+  appcommands.AddCmd('column_update', ColumnUpdate)
+  appcommands.AddCmd('query_sql', QuerySql)
+  appcommands.AddCmd('query_sqlGet', QuerySqlGet)
+  appcommands.AddCmd('style_delete', StyleDelete)
+  appcommands.AddCmd('style_get', StyleGet)
+  appcommands.AddCmd('style_insert', StyleInsert)
+  appcommands.AddCmd('style_list', StyleList)
+  appcommands.AddCmd('style_patch', StylePatch)
+  appcommands.AddCmd('style_update', StyleUpdate)
+  appcommands.AddCmd('table_copy', TableCopy)
+  appcommands.AddCmd('table_delete', TableDelete)
+  appcommands.AddCmd('table_get', TableGet)
+  appcommands.AddCmd('table_importRows', TableImportRows)
+  appcommands.AddCmd('table_importTable', TableImportTable)
+  appcommands.AddCmd('table_insert', TableInsert)
+  appcommands.AddCmd('table_list', TableList)
+  appcommands.AddCmd('table_patch', TablePatch)
+  appcommands.AddCmd('table_update', TableUpdate)
+  appcommands.AddCmd('task_delete', TaskDelete)
+  appcommands.AddCmd('task_get', TaskGet)
+  appcommands.AddCmd('task_list', TaskList)
+  appcommands.AddCmd('template_delete', TemplateDelete)
+  appcommands.AddCmd('template_get', TemplateGet)
+  appcommands.AddCmd('template_insert', TemplateInsert)
+  appcommands.AddCmd('template_list', TemplateList)
+  appcommands.AddCmd('template_patch', TemplatePatch)
+  appcommands.AddCmd('template_update', TemplateUpdate)
+
+  apitools_base_cli.SetupLogger()
+  if hasattr(appcommands, 'SetDefaultCommand'):
+    appcommands.SetDefaultCommand('pyshell')
+
+
+run_main = apitools_base_cli.run_main
+
+if __name__ == '__main__':
+  appcommands.Run()
diff --git a/samples/fusiontables_sample/fusiontables_v1/fusiontables_v1_client.py b/samples/fusiontables_sample/fusiontables_v1/fusiontables_v1_client.py
new file mode 100644
index 0000000..3376aa3
--- /dev/null
+++ b/samples/fusiontables_sample/fusiontables_v1/fusiontables_v1_client.py
@@ -0,0 +1,964 @@
+"""Generated client library for fusiontables version v1."""
+# NOTE: This file is autogenerated and should not be edited by hand.
+from apitools.base.py import base_api
+from samples.fusiontables_sample.fusiontables_v1 import fusiontables_v1_messages as messages
+
+
+class FusiontablesV1(base_api.BaseApiClient):
+  """Generated client library for service fusiontables version v1."""
+
+  MESSAGES_MODULE = messages
+  BASE_URL = u'https://www.googleapis.com/fusiontables/v1/'
+
+  _PACKAGE = u'fusiontables'
+  _SCOPES = [u'https://www.googleapis.com/auth/fusiontables', u'https://www.googleapis.com/auth/fusiontables.readonly']
+  _VERSION = u'v1'
+  _CLIENT_ID = '1042881264118.apps.googleusercontent.com'
+  _CLIENT_SECRET = 'x_Tw5K8nnjoRAqULM9PFAC2b'
+  _USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b'
+  _CLIENT_CLASS_NAME = u'FusiontablesV1'
+  _URL_VERSION = u'v1'
+  _API_KEY = None
+
+  def __init__(self, url='', credentials=None,
+               get_credentials=True, http=None, model=None,
+               log_request=False, log_response=False,
+               credentials_args=None, default_global_params=None,
+               additional_http_headers=None):
+    """Create a new fusiontables handle."""
+    url = url or self.BASE_URL
+    super(FusiontablesV1, self).__init__(
+        url, credentials=credentials,
+        get_credentials=get_credentials, http=http, model=model,
+        log_request=log_request, log_response=log_response,
+        credentials_args=credentials_args,
+        default_global_params=default_global_params,
+        additional_http_headers=additional_http_headers)
+    self.column = self.ColumnService(self)
+    self.query = self.QueryService(self)
+    self.style = self.StyleService(self)
+    self.table = self.TableService(self)
+    self.task = self.TaskService(self)
+    self.template = self.TemplateService(self)
+
+  class ColumnService(base_api.BaseApiService):
+    """Service class for the column resource."""
+
+    _NAME = u'column'
+
+    def __init__(self, client):
+      super(FusiontablesV1.ColumnService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def Delete(self, request, global_params=None):
+      """Deletes the column.
+
+      Args:
+        request: (FusiontablesColumnDeleteRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (FusiontablesColumnDeleteResponse) The response message.
+      """
+      config = self.GetMethodConfig('Delete')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Delete.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'DELETE',
+        method_id=u'fusiontables.column.delete',
+        ordered_params=[u'tableId', u'columnId'],
+        path_params=[u'columnId', u'tableId'],
+        query_params=[],
+        relative_path=u'tables/{tableId}/columns/{columnId}',
+        request_field='',
+        request_type_name=u'FusiontablesColumnDeleteRequest',
+        response_type_name=u'FusiontablesColumnDeleteResponse',
+        supports_download=False,
+    )
+
+    def Get(self, request, global_params=None):
+      """Retrieves a specific column by its id.
+
+      Args:
+        request: (FusiontablesColumnGetRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Column) The response message.
+      """
+      config = self.GetMethodConfig('Get')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'fusiontables.column.get',
+        ordered_params=[u'tableId', u'columnId'],
+        path_params=[u'columnId', u'tableId'],
+        query_params=[],
+        relative_path=u'tables/{tableId}/columns/{columnId}',
+        request_field='',
+        request_type_name=u'FusiontablesColumnGetRequest',
+        response_type_name=u'Column',
+        supports_download=False,
+    )
+
+    def Insert(self, request, global_params=None):
+      """Adds a new column to the table.
+
+      Args:
+        request: (FusiontablesColumnInsertRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Column) The response message.
+      """
+      config = self.GetMethodConfig('Insert')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Insert.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'fusiontables.column.insert',
+        ordered_params=[u'tableId'],
+        path_params=[u'tableId'],
+        query_params=[],
+        relative_path=u'tables/{tableId}/columns',
+        request_field=u'column',
+        request_type_name=u'FusiontablesColumnInsertRequest',
+        response_type_name=u'Column',
+        supports_download=False,
+    )
+
+    def List(self, request, global_params=None):
+      """Retrieves a list of columns.
+
+      Args:
+        request: (FusiontablesColumnListRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ColumnList) The response message.
+      """
+      config = self.GetMethodConfig('List')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'fusiontables.column.list',
+        ordered_params=[u'tableId'],
+        path_params=[u'tableId'],
+        query_params=[u'maxResults', u'pageToken'],
+        relative_path=u'tables/{tableId}/columns',
+        request_field='',
+        request_type_name=u'FusiontablesColumnListRequest',
+        response_type_name=u'ColumnList',
+        supports_download=False,
+    )
+
+    def Patch(self, request, global_params=None):
+      """Updates the name or type of an existing column. This method supports patch semantics.
+
+      Args:
+        request: (FusiontablesColumnPatchRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Column) The response message.
+      """
+      config = self.GetMethodConfig('Patch')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Patch.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PATCH',
+        method_id=u'fusiontables.column.patch',
+        ordered_params=[u'tableId', u'columnId'],
+        path_params=[u'columnId', u'tableId'],
+        query_params=[],
+        relative_path=u'tables/{tableId}/columns/{columnId}',
+        request_field=u'column',
+        request_type_name=u'FusiontablesColumnPatchRequest',
+        response_type_name=u'Column',
+        supports_download=False,
+    )
+
+    def Update(self, request, global_params=None):
+      """Updates the name or type of an existing column.
+
+      Args:
+        request: (FusiontablesColumnUpdateRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Column) The response message.
+      """
+      config = self.GetMethodConfig('Update')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Update.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PUT',
+        method_id=u'fusiontables.column.update',
+        ordered_params=[u'tableId', u'columnId'],
+        path_params=[u'columnId', u'tableId'],
+        query_params=[],
+        relative_path=u'tables/{tableId}/columns/{columnId}',
+        request_field=u'column',
+        request_type_name=u'FusiontablesColumnUpdateRequest',
+        response_type_name=u'Column',
+        supports_download=False,
+    )
+
+  class QueryService(base_api.BaseApiService):
+    """Service class for the query resource."""
+
+    _NAME = u'query'
+
+    def __init__(self, client):
+      super(FusiontablesV1.QueryService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def Sql(self, request, global_params=None, download=None):
+      """Executes an SQL SELECT/INSERT/UPDATE/DELETE/SHOW/DESCRIBE/CREATE statement.
+
+      Args:
+        request: (FusiontablesQuerySqlRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+        download: (Download, default: None) If present, download
+            data from the request via this stream.
+      Returns:
+        (Sqlresponse) The response message.
+      """
+      config = self.GetMethodConfig('Sql')
+      return self._RunMethod(
+          config, request, global_params=global_params,
+          download=download)
+
+    Sql.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'fusiontables.query.sql',
+        ordered_params=[u'sql'],
+        path_params=[],
+        query_params=[u'hdrs', u'sql', u'typed'],
+        relative_path=u'query',
+        request_field='',
+        request_type_name=u'FusiontablesQuerySqlRequest',
+        response_type_name=u'Sqlresponse',
+        supports_download=True,
+    )
+
+    def SqlGet(self, request, global_params=None, download=None):
+      """Executes an SQL SELECT/SHOW/DESCRIBE statement.
+
+      Args:
+        request: (FusiontablesQuerySqlGetRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+        download: (Download, default: None) If present, download
+            data from the request via this stream.
+      Returns:
+        (Sqlresponse) The response message.
+      """
+      config = self.GetMethodConfig('SqlGet')
+      return self._RunMethod(
+          config, request, global_params=global_params,
+          download=download)
+
+    SqlGet.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'fusiontables.query.sqlGet',
+        ordered_params=[u'sql'],
+        path_params=[],
+        query_params=[u'hdrs', u'sql', u'typed'],
+        relative_path=u'query',
+        request_field='',
+        request_type_name=u'FusiontablesQuerySqlGetRequest',
+        response_type_name=u'Sqlresponse',
+        supports_download=True,
+    )
+
+  class StyleService(base_api.BaseApiService):
+    """Service class for the style resource."""
+
+    _NAME = u'style'
+
+    def __init__(self, client):
+      super(FusiontablesV1.StyleService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def Delete(self, request, global_params=None):
+      """Deletes a style.
+
+      Args:
+        request: (FusiontablesStyleDeleteRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (FusiontablesStyleDeleteResponse) The response message.
+      """
+      config = self.GetMethodConfig('Delete')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Delete.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'DELETE',
+        method_id=u'fusiontables.style.delete',
+        ordered_params=[u'tableId', u'styleId'],
+        path_params=[u'styleId', u'tableId'],
+        query_params=[],
+        relative_path=u'tables/{tableId}/styles/{styleId}',
+        request_field='',
+        request_type_name=u'FusiontablesStyleDeleteRequest',
+        response_type_name=u'FusiontablesStyleDeleteResponse',
+        supports_download=False,
+    )
+
+    def Get(self, request, global_params=None):
+      """Gets a specific style.
+
+      Args:
+        request: (FusiontablesStyleGetRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (StyleSetting) The response message.
+      """
+      config = self.GetMethodConfig('Get')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'fusiontables.style.get',
+        ordered_params=[u'tableId', u'styleId'],
+        path_params=[u'styleId', u'tableId'],
+        query_params=[],
+        relative_path=u'tables/{tableId}/styles/{styleId}',
+        request_field='',
+        request_type_name=u'FusiontablesStyleGetRequest',
+        response_type_name=u'StyleSetting',
+        supports_download=False,
+    )
+
+    def Insert(self, request, global_params=None):
+      """Adds a new style for the table.
+
+      Args:
+        request: (StyleSetting) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (StyleSetting) The response message.
+      """
+      config = self.GetMethodConfig('Insert')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Insert.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'fusiontables.style.insert',
+        ordered_params=[u'tableId'],
+        path_params=[u'tableId'],
+        query_params=[],
+        relative_path=u'tables/{tableId}/styles',
+        request_field='<request>',
+        request_type_name=u'StyleSetting',
+        response_type_name=u'StyleSetting',
+        supports_download=False,
+    )
+
+    def List(self, request, global_params=None):
+      """Retrieves a list of styles.
+
+      Args:
+        request: (FusiontablesStyleListRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (StyleSettingList) The response message.
+      """
+      config = self.GetMethodConfig('List')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'fusiontables.style.list',
+        ordered_params=[u'tableId'],
+        path_params=[u'tableId'],
+        query_params=[u'maxResults', u'pageToken'],
+        relative_path=u'tables/{tableId}/styles',
+        request_field='',
+        request_type_name=u'FusiontablesStyleListRequest',
+        response_type_name=u'StyleSettingList',
+        supports_download=False,
+    )
+
+    def Patch(self, request, global_params=None):
+      """Updates an existing style. This method supports patch semantics.
+
+      Args:
+        request: (StyleSetting) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (StyleSetting) The response message.
+      """
+      config = self.GetMethodConfig('Patch')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Patch.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PATCH',
+        method_id=u'fusiontables.style.patch',
+        ordered_params=[u'tableId', u'styleId'],
+        path_params=[u'styleId', u'tableId'],
+        query_params=[],
+        relative_path=u'tables/{tableId}/styles/{styleId}',
+        request_field='<request>',
+        request_type_name=u'StyleSetting',
+        response_type_name=u'StyleSetting',
+        supports_download=False,
+    )
+
+    def Update(self, request, global_params=None):
+      """Updates an existing style.
+
+      Args:
+        request: (StyleSetting) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (StyleSetting) The response message.
+      """
+      config = self.GetMethodConfig('Update')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Update.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PUT',
+        method_id=u'fusiontables.style.update',
+        ordered_params=[u'tableId', u'styleId'],
+        path_params=[u'styleId', u'tableId'],
+        query_params=[],
+        relative_path=u'tables/{tableId}/styles/{styleId}',
+        request_field='<request>',
+        request_type_name=u'StyleSetting',
+        response_type_name=u'StyleSetting',
+        supports_download=False,
+    )
+
+  class TableService(base_api.BaseApiService):
+    """Service class for the table resource."""
+
+    _NAME = u'table'
+
+    def __init__(self, client):
+      super(FusiontablesV1.TableService, self).__init__(client)
+      self._upload_configs = {
+          'ImportRows': base_api.ApiUploadInfo(
+              accept=['application/octet-stream'],
+              max_size=262144000,
+              resumable_multipart=True,
+              resumable_path=u'/resumable/upload/fusiontables/v1/tables/{tableId}/import',
+              simple_multipart=True,
+              simple_path=u'/upload/fusiontables/v1/tables/{tableId}/import',
+          ),
+          'ImportTable': base_api.ApiUploadInfo(
+              accept=['application/octet-stream'],
+              max_size=262144000,
+              resumable_multipart=True,
+              resumable_path=u'/resumable/upload/fusiontables/v1/tables/import',
+              simple_multipart=True,
+              simple_path=u'/upload/fusiontables/v1/tables/import',
+          ),
+          }
+
+    def Copy(self, request, global_params=None):
+      """Copies a table.
+
+      Args:
+        request: (FusiontablesTableCopyRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Table) The response message.
+      """
+      config = self.GetMethodConfig('Copy')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Copy.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'fusiontables.table.copy',
+        ordered_params=[u'tableId'],
+        path_params=[u'tableId'],
+        query_params=[u'copyPresentation'],
+        relative_path=u'tables/{tableId}/copy',
+        request_field='',
+        request_type_name=u'FusiontablesTableCopyRequest',
+        response_type_name=u'Table',
+        supports_download=False,
+    )
+
+    def Delete(self, request, global_params=None):
+      """Deletes a table.
+
+      Args:
+        request: (FusiontablesTableDeleteRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (FusiontablesTableDeleteResponse) The response message.
+      """
+      config = self.GetMethodConfig('Delete')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Delete.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'DELETE',
+        method_id=u'fusiontables.table.delete',
+        ordered_params=[u'tableId'],
+        path_params=[u'tableId'],
+        query_params=[],
+        relative_path=u'tables/{tableId}',
+        request_field='',
+        request_type_name=u'FusiontablesTableDeleteRequest',
+        response_type_name=u'FusiontablesTableDeleteResponse',
+        supports_download=False,
+    )
+
+    def Get(self, request, global_params=None):
+      """Retrieves a specific table by its id.
+
+      Args:
+        request: (FusiontablesTableGetRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Table) The response message.
+      """
+      config = self.GetMethodConfig('Get')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'fusiontables.table.get',
+        ordered_params=[u'tableId'],
+        path_params=[u'tableId'],
+        query_params=[],
+        relative_path=u'tables/{tableId}',
+        request_field='',
+        request_type_name=u'FusiontablesTableGetRequest',
+        response_type_name=u'Table',
+        supports_download=False,
+    )
+
+    def ImportRows(self, request, global_params=None, upload=None):
+      """Import more rows into a table.
+
+      Args:
+        request: (FusiontablesTableImportRowsRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+        upload: (Upload, default: None) If present, upload
+            this stream with the request.
+      Returns:
+        (Import) The response message.
+      """
+      config = self.GetMethodConfig('ImportRows')
+      upload_config = self.GetUploadConfig('ImportRows')
+      return self._RunMethod(
+          config, request, global_params=global_params,
+          upload=upload, upload_config=upload_config)
+
+    ImportRows.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'fusiontables.table.importRows',
+        ordered_params=[u'tableId'],
+        path_params=[u'tableId'],
+        query_params=[u'delimiter', u'encoding', u'endLine', u'isStrict', u'startLine'],
+        relative_path=u'tables/{tableId}/import',
+        request_field='',
+        request_type_name=u'FusiontablesTableImportRowsRequest',
+        response_type_name=u'Import',
+        supports_download=False,
+    )
+
+    def ImportTable(self, request, global_params=None, upload=None):
+      """Import a new table.
+
+      Args:
+        request: (FusiontablesTableImportTableRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+        upload: (Upload, default: None) If present, upload
+            this stream with the request.
+      Returns:
+        (Table) The response message.
+      """
+      config = self.GetMethodConfig('ImportTable')
+      upload_config = self.GetUploadConfig('ImportTable')
+      return self._RunMethod(
+          config, request, global_params=global_params,
+          upload=upload, upload_config=upload_config)
+
+    ImportTable.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'fusiontables.table.importTable',
+        ordered_params=[u'name'],
+        path_params=[],
+        query_params=[u'delimiter', u'encoding', u'name'],
+        relative_path=u'tables/import',
+        request_field='',
+        request_type_name=u'FusiontablesTableImportTableRequest',
+        response_type_name=u'Table',
+        supports_download=False,
+    )
+
+    def Insert(self, request, global_params=None):
+      """Creates a new table.
+
+      Args:
+        request: (Table) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Table) The response message.
+      """
+      config = self.GetMethodConfig('Insert')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Insert.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'fusiontables.table.insert',
+        ordered_params=[],
+        path_params=[],
+        query_params=[],
+        relative_path=u'tables',
+        request_field='<request>',
+        request_type_name=u'Table',
+        response_type_name=u'Table',
+        supports_download=False,
+    )
+
+    def List(self, request, global_params=None):
+      """Retrieves a list of tables a user owns.
+
+      Args:
+        request: (FusiontablesTableListRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (TableList) The response message.
+      """
+      config = self.GetMethodConfig('List')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'fusiontables.table.list',
+        ordered_params=[],
+        path_params=[],
+        query_params=[u'maxResults', u'pageToken'],
+        relative_path=u'tables',
+        request_field='',
+        request_type_name=u'FusiontablesTableListRequest',
+        response_type_name=u'TableList',
+        supports_download=False,
+    )
+
+    def Patch(self, request, global_params=None):
+      """Updates an existing table. Unless explicitly requested, only the name, description, and attribution will be updated. This method supports patch semantics.
+
+      Args:
+        request: (FusiontablesTablePatchRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Table) The response message.
+      """
+      config = self.GetMethodConfig('Patch')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Patch.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PATCH',
+        method_id=u'fusiontables.table.patch',
+        ordered_params=[u'tableId'],
+        path_params=[u'tableId'],
+        query_params=[u'replaceViewDefinition'],
+        relative_path=u'tables/{tableId}',
+        request_field=u'table',
+        request_type_name=u'FusiontablesTablePatchRequest',
+        response_type_name=u'Table',
+        supports_download=False,
+    )
+
+    def Update(self, request, global_params=None):
+      """Updates an existing table. Unless explicitly requested, only the name, description, and attribution will be updated.
+
+      Args:
+        request: (FusiontablesTableUpdateRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Table) The response message.
+      """
+      config = self.GetMethodConfig('Update')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Update.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PUT',
+        method_id=u'fusiontables.table.update',
+        ordered_params=[u'tableId'],
+        path_params=[u'tableId'],
+        query_params=[u'replaceViewDefinition'],
+        relative_path=u'tables/{tableId}',
+        request_field=u'table',
+        request_type_name=u'FusiontablesTableUpdateRequest',
+        response_type_name=u'Table',
+        supports_download=False,
+    )
+
+  class TaskService(base_api.BaseApiService):
+    """Service class for the task resource."""
+
+    _NAME = u'task'
+
+    def __init__(self, client):
+      super(FusiontablesV1.TaskService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def Delete(self, request, global_params=None):
+      """Deletes the task, unless already started.
+
+      Args:
+        request: (FusiontablesTaskDeleteRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (FusiontablesTaskDeleteResponse) The response message.
+      """
+      config = self.GetMethodConfig('Delete')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Delete.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'DELETE',
+        method_id=u'fusiontables.task.delete',
+        ordered_params=[u'tableId', u'taskId'],
+        path_params=[u'tableId', u'taskId'],
+        query_params=[],
+        relative_path=u'tables/{tableId}/tasks/{taskId}',
+        request_field='',
+        request_type_name=u'FusiontablesTaskDeleteRequest',
+        response_type_name=u'FusiontablesTaskDeleteResponse',
+        supports_download=False,
+    )
+
+    def Get(self, request, global_params=None):
+      """Retrieves a specific task by its id.
+
+      Args:
+        request: (FusiontablesTaskGetRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Task) The response message.
+      """
+      config = self.GetMethodConfig('Get')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'fusiontables.task.get',
+        ordered_params=[u'tableId', u'taskId'],
+        path_params=[u'tableId', u'taskId'],
+        query_params=[],
+        relative_path=u'tables/{tableId}/tasks/{taskId}',
+        request_field='',
+        request_type_name=u'FusiontablesTaskGetRequest',
+        response_type_name=u'Task',
+        supports_download=False,
+    )
+
+    def List(self, request, global_params=None):
+      """Retrieves a list of tasks.
+
+      Args:
+        request: (FusiontablesTaskListRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (TaskList) The response message.
+      """
+      config = self.GetMethodConfig('List')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'fusiontables.task.list',
+        ordered_params=[u'tableId'],
+        path_params=[u'tableId'],
+        query_params=[u'maxResults', u'pageToken', u'startIndex'],
+        relative_path=u'tables/{tableId}/tasks',
+        request_field='',
+        request_type_name=u'FusiontablesTaskListRequest',
+        response_type_name=u'TaskList',
+        supports_download=False,
+    )
+
+  class TemplateService(base_api.BaseApiService):
+    """Service class for the template resource."""
+
+    _NAME = u'template'
+
+    def __init__(self, client):
+      super(FusiontablesV1.TemplateService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def Delete(self, request, global_params=None):
+      """Deletes a template.
+
+      Args:
+        request: (FusiontablesTemplateDeleteRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (FusiontablesTemplateDeleteResponse) The response message.
+      """
+      config = self.GetMethodConfig('Delete')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Delete.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'DELETE',
+        method_id=u'fusiontables.template.delete',
+        ordered_params=[u'tableId', u'templateId'],
+        path_params=[u'tableId', u'templateId'],
+        query_params=[],
+        relative_path=u'tables/{tableId}/templates/{templateId}',
+        request_field='',
+        request_type_name=u'FusiontablesTemplateDeleteRequest',
+        response_type_name=u'FusiontablesTemplateDeleteResponse',
+        supports_download=False,
+    )
+
+    def Get(self, request, global_params=None):
+      """Retrieves a specific template by its id.
+
+      Args:
+        request: (FusiontablesTemplateGetRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Template) The response message.
+      """
+      config = self.GetMethodConfig('Get')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'fusiontables.template.get',
+        ordered_params=[u'tableId', u'templateId'],
+        path_params=[u'tableId', u'templateId'],
+        query_params=[],
+        relative_path=u'tables/{tableId}/templates/{templateId}',
+        request_field='',
+        request_type_name=u'FusiontablesTemplateGetRequest',
+        response_type_name=u'Template',
+        supports_download=False,
+    )
+
+    def Insert(self, request, global_params=None):
+      """Creates a new template for the table.
+
+      Args:
+        request: (Template) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Template) The response message.
+      """
+      config = self.GetMethodConfig('Insert')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Insert.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'fusiontables.template.insert',
+        ordered_params=[u'tableId'],
+        path_params=[u'tableId'],
+        query_params=[],
+        relative_path=u'tables/{tableId}/templates',
+        request_field='<request>',
+        request_type_name=u'Template',
+        response_type_name=u'Template',
+        supports_download=False,
+    )
+
+    def List(self, request, global_params=None):
+      """Retrieves a list of templates.
+
+      Args:
+        request: (FusiontablesTemplateListRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (TemplateList) The response message.
+      """
+      config = self.GetMethodConfig('List')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'fusiontables.template.list',
+        ordered_params=[u'tableId'],
+        path_params=[u'tableId'],
+        query_params=[u'maxResults', u'pageToken'],
+        relative_path=u'tables/{tableId}/templates',
+        request_field='',
+        request_type_name=u'FusiontablesTemplateListRequest',
+        response_type_name=u'TemplateList',
+        supports_download=False,
+    )
+
+    def Patch(self, request, global_params=None):
+      """Updates an existing template. This method supports patch semantics.
+
+      Args:
+        request: (Template) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Template) The response message.
+      """
+      config = self.GetMethodConfig('Patch')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Patch.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PATCH',
+        method_id=u'fusiontables.template.patch',
+        ordered_params=[u'tableId', u'templateId'],
+        path_params=[u'tableId', u'templateId'],
+        query_params=[],
+        relative_path=u'tables/{tableId}/templates/{templateId}',
+        request_field='<request>',
+        request_type_name=u'Template',
+        response_type_name=u'Template',
+        supports_download=False,
+    )
+
+    def Update(self, request, global_params=None):
+      """Updates an existing template.
+
+      Args:
+        request: (Template) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Template) The response message.
+      """
+      config = self.GetMethodConfig('Update')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Update.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PUT',
+        method_id=u'fusiontables.template.update',
+        ordered_params=[u'tableId', u'templateId'],
+        path_params=[u'tableId', u'templateId'],
+        query_params=[],
+        relative_path=u'tables/{tableId}/templates/{templateId}',
+        request_field='<request>',
+        request_type_name=u'Template',
+        response_type_name=u'Template',
+        supports_download=False,
+    )
diff --git a/samples/fusiontables_sample/fusiontables_v1/fusiontables_v1_messages.py b/samples/fusiontables_sample/fusiontables_v1/fusiontables_v1_messages.py
new file mode 100644
index 0000000..15f878e
--- /dev/null
+++ b/samples/fusiontables_sample/fusiontables_v1/fusiontables_v1_messages.py
@@ -0,0 +1,936 @@
+"""Generated message classes for fusiontables version v1.
+
+API for working with Fusion Tables data.
+"""
+# NOTE: This file is autogenerated and should not be edited by hand.
+
+from apitools.base.protorpclite import messages as _messages
+from apitools.base.py import extra_types
+
+
+package = 'fusiontables'
+
+
+class Bucket(_messages.Message):
+  """Specifies the minimum and maximum values, the color, opacity, icon and
+  weight of a bucket within a StyleSetting.
+
+  Fields:
+    color: Color of line or the interior of a polygon in #RRGGBB format.
+    icon: Icon name used for a point.
+    max: Maximum value in the selected column for a row to be styled according
+      to the bucket color, opacity, icon, or weight.
+    min: Minimum value in the selected column for a row to be styled according
+      to the bucket color, opacity, icon, or weight.
+    opacity: Opacity of the color: 0.0 (transparent) to 1.0 (opaque).
+    weight: Width of a line (in pixels).
+  """
+
+  color = _messages.StringField(1)
+  icon = _messages.StringField(2)
+  max = _messages.FloatField(3)
+  min = _messages.FloatField(4)
+  opacity = _messages.FloatField(5)
+  weight = _messages.IntegerField(6, variant=_messages.Variant.INT32)
+
+
+class Column(_messages.Message):
+  """Specifies the id, name and type of a column in a table.
+
+  Messages:
+    BaseColumnValue: Optional identifier of the base column. If present, this
+      column is derived from the specified base column.
+
+  Fields:
+    baseColumn: Optional identifier of the base column. If present, this
+      column is derived from the specified base column.
+    columnId: Identifier for the column.
+    description: Optional column description.
+    graph_predicate: Optional column predicate. Used to map table to graph
+      data model (subject,predicate,object) See http://www.w3.org/TR/2014/REC-
+      rdf11-concepts-20140225/#data-model
+    kind: Type name: a template for an individual column.
+    name: Required name of the column.
+    type: Required type of the column.
+  """
+
+  class BaseColumnValue(_messages.Message):
+    """Optional identifier of the base column. If present, this column is
+    derived from the specified base column.
+
+    Fields:
+      columnId: The id of the column in the base table from which this column
+        is derived.
+      tableIndex: Offset to the entry in the list of base tables in the table
+        definition.
+    """
+
+    columnId = _messages.IntegerField(1, variant=_messages.Variant.INT32)
+    tableIndex = _messages.IntegerField(2, variant=_messages.Variant.INT32)
+
+  baseColumn = _messages.MessageField('BaseColumnValue', 1)
+  columnId = _messages.IntegerField(2, variant=_messages.Variant.INT32)
+  description = _messages.StringField(3)
+  graph_predicate = _messages.StringField(4)
+  kind = _messages.StringField(5, default=u'fusiontables#column')
+  name = _messages.StringField(6)
+  type = _messages.StringField(7)
+
+
+class ColumnList(_messages.Message):
+  """Represents a list of columns in a table.
+
+  Fields:
+    items: List of all requested columns.
+    kind: Type name: a list of all columns.
+    nextPageToken: Token used to access the next page of this result. No token
+      is displayed if there are no more pages left.
+    totalItems: Total number of columns for the table.
+  """
+
+  items = _messages.MessageField('Column', 1, repeated=True)
+  kind = _messages.StringField(2, default=u'fusiontables#columnList')
+  nextPageToken = _messages.StringField(3)
+  totalItems = _messages.IntegerField(4, variant=_messages.Variant.INT32)
+
+
+class FusiontablesColumnDeleteRequest(_messages.Message):
+  """A FusiontablesColumnDeleteRequest object.
+
+  Fields:
+    columnId: Name or identifier for the column being deleted.
+    tableId: Table from which the column is being deleted.
+  """
+
+  columnId = _messages.StringField(1, required=True)
+  tableId = _messages.StringField(2, required=True)
+
+
+class FusiontablesColumnDeleteResponse(_messages.Message):
+  """An empty FusiontablesColumnDelete response."""
+
+
+class FusiontablesColumnGetRequest(_messages.Message):
+  """A FusiontablesColumnGetRequest object.
+
+  Fields:
+    columnId: Name or identifier for the column that is being requested.
+    tableId: Table to which the column belongs.
+  """
+
+  columnId = _messages.StringField(1, required=True)
+  tableId = _messages.StringField(2, required=True)
+
+
+class FusiontablesColumnInsertRequest(_messages.Message):
+  """A FusiontablesColumnInsertRequest object.
+
+  Fields:
+    column: A Column resource to be passed as the request body.
+    tableId: Table for which a new column is being added.
+  """
+
+  column = _messages.MessageField('Column', 1)
+  tableId = _messages.StringField(2, required=True)
+
+
+class FusiontablesColumnListRequest(_messages.Message):
+  """A FusiontablesColumnListRequest object.
+
+  Fields:
+    maxResults: Maximum number of columns to return. Optional. Default is 5.
+    pageToken: Continuation token specifying which result page to return.
+      Optional.
+    tableId: Table whose columns are being listed.
+  """
+
+  maxResults = _messages.IntegerField(1, variant=_messages.Variant.UINT32)
+  pageToken = _messages.StringField(2)
+  tableId = _messages.StringField(3, required=True)
+
+
+class FusiontablesColumnPatchRequest(_messages.Message):
+  """A FusiontablesColumnPatchRequest object.
+
+  Fields:
+    column: A Column resource to be passed as the request body.
+    columnId: Name or identifier for the column that is being updated.
+    tableId: Table for which the column is being updated.
+  """
+
+  column = _messages.MessageField('Column', 1)
+  columnId = _messages.StringField(2, required=True)
+  tableId = _messages.StringField(3, required=True)
+
+
+class FusiontablesColumnUpdateRequest(_messages.Message):
+  """A FusiontablesColumnUpdateRequest object.
+
+  Fields:
+    column: A Column resource to be passed as the request body.
+    columnId: Name or identifier for the column that is being updated.
+    tableId: Table for which the column is being updated.
+  """
+
+  column = _messages.MessageField('Column', 1)
+  columnId = _messages.StringField(2, required=True)
+  tableId = _messages.StringField(3, required=True)
+
+
+class FusiontablesQuerySqlGetRequest(_messages.Message):
+  """A FusiontablesQuerySqlGetRequest object.
+
+  Fields:
+    hdrs: Should column names be included (in the first row)?. Default is
+      true.
+    sql: An SQL SELECT/SHOW/DESCRIBE statement.
+    typed: Should typed values be returned in the (JSON) response -- numbers
+      for numeric values and parsed geometries for KML values? Default is
+      true.
+  """
+
+  hdrs = _messages.BooleanField(1)
+  sql = _messages.StringField(2, required=True)
+  typed = _messages.BooleanField(3)
+
+
+class FusiontablesQuerySqlRequest(_messages.Message):
+  """A FusiontablesQuerySqlRequest object.
+
+  Fields:
+    hdrs: Should column names be included (in the first row)?. Default is
+      true.
+    sql: An SQL SELECT/SHOW/DESCRIBE/INSERT/UPDATE/DELETE/CREATE statement.
+    typed: Should typed values be returned in the (JSON) response -- numbers
+      for numeric values and parsed geometries for KML values? Default is
+      true.
+  """
+
+  hdrs = _messages.BooleanField(1)
+  sql = _messages.StringField(2, required=True)
+  typed = _messages.BooleanField(3)
+
+
+class FusiontablesStyleDeleteRequest(_messages.Message):
+  """A FusiontablesStyleDeleteRequest object.
+
+  Fields:
+    styleId: Identifier (within a table) for the style being deleted
+    tableId: Table from which the style is being deleted
+  """
+
+  styleId = _messages.IntegerField(1, required=True, variant=_messages.Variant.INT32)
+  tableId = _messages.StringField(2, required=True)
+
+
+class FusiontablesStyleDeleteResponse(_messages.Message):
+  """An empty FusiontablesStyleDelete response."""
+
+
+class FusiontablesStyleGetRequest(_messages.Message):
+  """A FusiontablesStyleGetRequest object.
+
+  Fields:
+    styleId: Identifier (integer) for a specific style in a table
+    tableId: Table to which the requested style belongs
+  """
+
+  styleId = _messages.IntegerField(1, required=True, variant=_messages.Variant.INT32)
+  tableId = _messages.StringField(2, required=True)
+
+
+class FusiontablesStyleListRequest(_messages.Message):
+  """A FusiontablesStyleListRequest object.
+
+  Fields:
+    maxResults: Maximum number of styles to return. Optional. Default is 5.
+    pageToken: Continuation token specifying which result page to return.
+      Optional.
+    tableId: Table whose styles are being listed
+  """
+
+  maxResults = _messages.IntegerField(1, variant=_messages.Variant.UINT32)
+  pageToken = _messages.StringField(2)
+  tableId = _messages.StringField(3, required=True)
+
+
+class FusiontablesTableCopyRequest(_messages.Message):
+  """A FusiontablesTableCopyRequest object.
+
+  Fields:
+    copyPresentation: Whether to also copy tabs, styles, and templates.
+      Default is false.
+    tableId: ID of the table that is being copied.
+  """
+
+  copyPresentation = _messages.BooleanField(1)
+  tableId = _messages.StringField(2, required=True)
+
+
+class FusiontablesTableDeleteRequest(_messages.Message):
+  """A FusiontablesTableDeleteRequest object.
+
+  Fields:
+    tableId: ID of the table that is being deleted.
+  """
+
+  tableId = _messages.StringField(1, required=True)
+
+
+class FusiontablesTableDeleteResponse(_messages.Message):
+  """An empty FusiontablesTableDelete response."""
+
+
+class FusiontablesTableGetRequest(_messages.Message):
+  """A FusiontablesTableGetRequest object.
+
+  Fields:
+    tableId: Identifier(ID) for the table being requested.
+  """
+
+  tableId = _messages.StringField(1, required=True)
+
+
+class FusiontablesTableImportRowsRequest(_messages.Message):
+  """A FusiontablesTableImportRowsRequest object.
+
+  Fields:
+    delimiter: The delimiter used to separate cell values. This can only
+      consist of a single character. Default is ','.
+    encoding: The encoding of the content. Default is UTF-8. Use 'auto-detect'
+      if you are unsure of the encoding.
+    endLine: The index of the last line from which to start importing,
+      exclusive. Thus, the number of imported lines is endLine - startLine. If
+      this parameter is not provided, the file will be imported until the last
+      line of the file. If endLine is negative, then the imported content will
+      exclude the last endLine lines. That is, if endline is negative, no line
+      will be imported whose index is greater than N + endLine where N is the
+      number of lines in the file, and the number of imported lines will be N
+      + endLine - startLine.
+    isStrict: Whether the CSV must have the same number of values for each
+      row. If false, rows with fewer values will be padded with empty values.
+      Default is true.
+    startLine: The index of the first line from which to start importing,
+      inclusive. Default is 0.
+    tableId: The table into which new rows are being imported.
+  """
+
+  delimiter = _messages.StringField(1)
+  encoding = _messages.StringField(2)
+  endLine = _messages.IntegerField(3, variant=_messages.Variant.INT32)
+  isStrict = _messages.BooleanField(4)
+  startLine = _messages.IntegerField(5, variant=_messages.Variant.INT32)
+  tableId = _messages.StringField(6, required=True)
+
+
+class FusiontablesTableImportTableRequest(_messages.Message):
+  """A FusiontablesTableImportTableRequest object.
+
+  Fields:
+    delimiter: The delimiter used to separate cell values. This can only
+      consist of a single character. Default is ','.
+    encoding: The encoding of the content. Default is UTF-8. Use 'auto-detect'
+      if you are unsure of the encoding.
+    name: The name to be assigned to the new table.
+  """
+
+  delimiter = _messages.StringField(1)
+  encoding = _messages.StringField(2)
+  name = _messages.StringField(3, required=True)
+
+
+class FusiontablesTableListRequest(_messages.Message):
+  """A FusiontablesTableListRequest object.
+
+  Fields:
+    maxResults: Maximum number of styles to return. Optional. Default is 5.
+    pageToken: Continuation token specifying which result page to return.
+      Optional.
+  """
+
+  maxResults = _messages.IntegerField(1, variant=_messages.Variant.UINT32)
+  pageToken = _messages.StringField(2)
+
+
+class FusiontablesTablePatchRequest(_messages.Message):
+  """A FusiontablesTablePatchRequest object.
+
+  Fields:
+    replaceViewDefinition: Should the view definition also be updated? The
+      specified view definition replaces the existing one. Only a view can be
+      updated with a new definition.
+    table: A Table resource to be passed as the request body.
+    tableId: ID of the table that is being updated.
+  """
+
+  replaceViewDefinition = _messages.BooleanField(1)
+  table = _messages.MessageField('Table', 2)
+  tableId = _messages.StringField(3, required=True)
+
+
+class FusiontablesTableUpdateRequest(_messages.Message):
+  """A FusiontablesTableUpdateRequest object.
+
+  Fields:
+    replaceViewDefinition: Should the view definition also be updated? The
+      specified view definition replaces the existing one. Only a view can be
+      updated with a new definition.
+    table: A Table resource to be passed as the request body.
+    tableId: ID of the table that is being updated.
+  """
+
+  replaceViewDefinition = _messages.BooleanField(1)
+  table = _messages.MessageField('Table', 2)
+  tableId = _messages.StringField(3, required=True)
+
+
+class FusiontablesTaskDeleteRequest(_messages.Message):
+  """A FusiontablesTaskDeleteRequest object.
+
+  Fields:
+    tableId: Table from which the task is being deleted.
+    taskId: A string attribute.
+  """
+
+  tableId = _messages.StringField(1, required=True)
+  taskId = _messages.StringField(2, required=True)
+
+
+class FusiontablesTaskDeleteResponse(_messages.Message):
+  """An empty FusiontablesTaskDelete response."""
+
+
+class FusiontablesTaskGetRequest(_messages.Message):
+  """A FusiontablesTaskGetRequest object.
+
+  Fields:
+    tableId: Table to which the task belongs.
+    taskId: A string attribute.
+  """
+
+  tableId = _messages.StringField(1, required=True)
+  taskId = _messages.StringField(2, required=True)
+
+
+class FusiontablesTaskListRequest(_messages.Message):
+  """A FusiontablesTaskListRequest object.
+
+  Fields:
+    maxResults: Maximum number of columns to return. Optional. Default is 5.
+    pageToken: A string attribute.
+    startIndex: A integer attribute.
+    tableId: Table whose tasks are being listed.
+  """
+
+  maxResults = _messages.IntegerField(1, variant=_messages.Variant.UINT32)
+  pageToken = _messages.StringField(2)
+  startIndex = _messages.IntegerField(3, variant=_messages.Variant.UINT32)
+  tableId = _messages.StringField(4, required=True)
+
+
+class FusiontablesTemplateDeleteRequest(_messages.Message):
+  """A FusiontablesTemplateDeleteRequest object.
+
+  Fields:
+    tableId: Table from which the template is being deleted
+    templateId: Identifier for the template which is being deleted
+  """
+
+  tableId = _messages.StringField(1, required=True)
+  templateId = _messages.IntegerField(2, required=True, variant=_messages.Variant.INT32)
+
+
+class FusiontablesTemplateDeleteResponse(_messages.Message):
+  """An empty FusiontablesTemplateDelete response."""
+
+
+class FusiontablesTemplateGetRequest(_messages.Message):
+  """A FusiontablesTemplateGetRequest object.
+
+  Fields:
+    tableId: Table to which the template belongs
+    templateId: Identifier for the template that is being requested
+  """
+
+  tableId = _messages.StringField(1, required=True)
+  templateId = _messages.IntegerField(2, required=True, variant=_messages.Variant.INT32)
+
+
+class FusiontablesTemplateListRequest(_messages.Message):
+  """A FusiontablesTemplateListRequest object.
+
+  Fields:
+    maxResults: Maximum number of templates to return. Optional. Default is 5.
+    pageToken: Continuation token specifying which results page to return.
+      Optional.
+    tableId: Identifier for the table whose templates are being requested
+  """
+
+  maxResults = _messages.IntegerField(1, variant=_messages.Variant.UINT32)
+  pageToken = _messages.StringField(2)
+  tableId = _messages.StringField(3, required=True)
+
+
+class Geometry(_messages.Message):
+  """Represents a Geometry object.
+
+  Fields:
+    geometries: The list of geometries in this geometry collection.
+    geometry: A extra_types.JsonValue attribute.
+    type: Type: A collection of geometries.
+  """
+
+  geometries = _messages.MessageField('extra_types.JsonValue', 1, repeated=True)
+  geometry = _messages.MessageField('extra_types.JsonValue', 2)
+  type = _messages.StringField(3, default=u'GeometryCollection')
+
+
+class Import(_messages.Message):
+  """Represents an import request.
+
+  Fields:
+    kind: Type name: a template for an import request.
+    numRowsReceived: The number of rows received from the import request.
+  """
+
+  kind = _messages.StringField(1, default=u'fusiontables#import')
+  numRowsReceived = _messages.IntegerField(2)
+
+
+class Line(_messages.Message):
+  """Represents a line geometry.
+
+  Messages:
+    CoordinatesValueListEntry: Single entry in a CoordinatesValue.
+
+  Fields:
+    coordinates: The coordinates that define the line.
+    type: Type: A line geometry.
+  """
+
+  class CoordinatesValueListEntry(_messages.Message):
+    """Single entry in a CoordinatesValue.
+
+    Fields:
+      entry: A number attribute.
+    """
+
+    entry = _messages.FloatField(1, repeated=True)
+
+  coordinates = _messages.MessageField('CoordinatesValueListEntry', 1, repeated=True)
+  type = _messages.StringField(2, default=u'LineString')
+
+
+class LineStyle(_messages.Message):
+  """Represents a LineStyle within a StyleSetting
+
+  Fields:
+    strokeColor: Color of the line in #RRGGBB format.
+    strokeColorStyler: Column-value, gradient or buckets styler that is used
+      to determine the line color and opacity.
+    strokeOpacity: Opacity of the line : 0.0 (transparent) to 1.0 (opaque).
+    strokeWeight: Width of the line in pixels.
+    strokeWeightStyler: Column-value or bucket styler that is used to
+      determine the width of the line.
+  """
+
+  strokeColor = _messages.StringField(1)
+  strokeColorStyler = _messages.MessageField('StyleFunction', 2)
+  strokeOpacity = _messages.FloatField(3)
+  strokeWeight = _messages.IntegerField(4, variant=_messages.Variant.INT32)
+  strokeWeightStyler = _messages.MessageField('StyleFunction', 5)
+
+
+class Point(_messages.Message):
+  """Represents a point object.
+
+  Fields:
+    coordinates: The coordinates that define the point.
+    type: Point: A point geometry.
+  """
+
+  coordinates = _messages.FloatField(1, repeated=True)
+  type = _messages.StringField(2, default=u'Point')
+
+
+class PointStyle(_messages.Message):
+  """Represents a PointStyle within a StyleSetting
+
+  Fields:
+    iconName: Name of the icon. Use values defined in
+      http://www.google.com/fusiontables/DataSource?dsrcid=308519
+    iconStyler: Column or a bucket value from which the icon name is to be
+      determined.
+  """
+
+  iconName = _messages.StringField(1)
+  iconStyler = _messages.MessageField('StyleFunction', 2)
+
+
+class Polygon(_messages.Message):
+  """Represents a polygon object.
+
+  Messages:
+    CoordinatesValueListEntry: Single entry in a CoordinatesValue.
+
+  Fields:
+    coordinates: The coordinates that define the polygon.
+    type: Type: A polygon geometry.
+  """
+
+  class CoordinatesValueListEntry(_messages.Message):
+    """Single entry in a CoordinatesValue.
+
+    Messages:
+      EntryValueListEntry: Single entry in a EntryValue.
+
+    Fields:
+      entry: A EntryValueListEntry attribute.
+    """
+
+    class EntryValueListEntry(_messages.Message):
+      """Single entry in a EntryValue.
+
+      Fields:
+        entry: A number attribute.
+      """
+
+      entry = _messages.FloatField(1, repeated=True)
+
+    entry = _messages.MessageField('EntryValueListEntry', 1, repeated=True)
+
+  coordinates = _messages.MessageField('CoordinatesValueListEntry', 1, repeated=True)
+  type = _messages.StringField(2, default=u'Polygon')
+
+
+class PolygonStyle(_messages.Message):
+  """Represents a PolygonStyle within a StyleSetting
+
+  Fields:
+    fillColor: Color of the interior of the polygon in #RRGGBB format.
+    fillColorStyler: Column-value, gradient, or bucket styler that is used to
+      determine the interior color and opacity of the polygon.
+    fillOpacity: Opacity of the interior of the polygon: 0.0 (transparent) to
+      1.0 (opaque).
+    strokeColor: Color of the polygon border in #RRGGBB format.
+    strokeColorStyler: Column-value, gradient or buckets styler that is used
+      to determine the border color and opacity.
+    strokeOpacity: Opacity of the polygon border: 0.0 (transparent) to 1.0
+      (opaque).
+    strokeWeight: Width of the polyon border in pixels.
+    strokeWeightStyler: Column-value or bucket styler that is used to
+      determine the width of the polygon border.
+  """
+
+  fillColor = _messages.StringField(1)
+  fillColorStyler = _messages.MessageField('StyleFunction', 2)
+  fillOpacity = _messages.FloatField(3)
+  strokeColor = _messages.StringField(4)
+  strokeColorStyler = _messages.MessageField('StyleFunction', 5)
+  strokeOpacity = _messages.FloatField(6)
+  strokeWeight = _messages.IntegerField(7, variant=_messages.Variant.INT32)
+  strokeWeightStyler = _messages.MessageField('StyleFunction', 8)
+
+
+class Sqlresponse(_messages.Message):
+  """Represents a response to an sql statement.
+
+  Messages:
+    RowsValueListEntry: Single entry in a RowsValue.
+
+  Fields:
+    columns: Columns in the table.
+    kind: Type name: a template for an individual table.
+    rows: The rows in the table. For each cell we print out whatever cell
+      value (e.g., numeric, string) exists. Thus it is important that each
+      cell contains only one value.
+  """
+
+  class RowsValueListEntry(_messages.Message):
+    """Single entry in a RowsValue.
+
+    Fields:
+      entry: A extra_types.JsonValue attribute.
+    """
+
+    entry = _messages.MessageField('extra_types.JsonValue', 1, repeated=True)
+
+  columns = _messages.StringField(1, repeated=True)
+  kind = _messages.StringField(2, default=u'fusiontables#sqlresponse')
+  rows = _messages.MessageField('RowsValueListEntry', 3, repeated=True)
+
+
+class StandardQueryParameters(_messages.Message):
+  """Query parameters accepted by all methods.
+
+  Enums:
+    AltValueValuesEnum: Data format for the response.
+
+  Fields:
+    alt: Data format for the response.
+    fields: Selector specifying which fields to include in a partial response.
+    key: API key. Your API key identifies your project and provides you with
+      API access, quota, and reports. Required unless you provide an OAuth 2.0
+      token.
+    oauth_token: OAuth 2.0 token for the current user.
+    prettyPrint: Returns response with indentations and line breaks.
+    quotaUser: Available to use for quota purposes for server-side
+      applications. Can be any arbitrary string assigned to a user, but should
+      not exceed 40 characters. Overrides userIp if both are provided.
+    trace: A tracing token of the form "token:<tokenid>" to include in api
+      requests.
+    userIp: IP address of the site where the request originates. Use this if
+      you want to enforce per-user limits.
+  """
+
+  class AltValueValuesEnum(_messages.Enum):
+    """Data format for the response.
+
+    Values:
+      csv: Responses with Content-Type of text/csv
+      json: Responses with Content-Type of application/json
+    """
+    csv = 0
+    json = 1
+
+  alt = _messages.EnumField('AltValueValuesEnum', 1, default=u'json')
+  fields = _messages.StringField(2)
+  key = _messages.StringField(3)
+  oauth_token = _messages.StringField(4)
+  prettyPrint = _messages.BooleanField(5, default=True)
+  quotaUser = _messages.StringField(6)
+  trace = _messages.StringField(7)
+  userIp = _messages.StringField(8)
+
+
+class StyleFunction(_messages.Message):
+  """Represents a StyleFunction within a StyleSetting
+
+  Messages:
+    GradientValue: Gradient function that interpolates a range of colors based
+      on column value.
+
+  Fields:
+    buckets: Bucket function that assigns a style based on the range a column
+      value falls into.
+    columnName: Name of the column whose value is used in the style.
+    gradient: Gradient function that interpolates a range of colors based on
+      column value.
+    kind: Stylers can be one of three kinds: "fusiontables#fromColumn" if the
+      column value is to be used as is, i.e., the column values can have
+      colors in #RRGGBBAA format or integer line widths or icon names;
+      "fusiontables#gradient" if the styling of the row is to be based on
+      applying the gradient function on the column value; or
+      "fusiontables#buckets" if the styling is to based on the bucket into
+      which the the column value falls.
+  """
+
+  class GradientValue(_messages.Message):
+    """Gradient function that interpolates a range of colors based on column
+    value.
+
+    Messages:
+      ColorsValueListEntry: A ColorsValueListEntry object.
+
+    Fields:
+      colors: Array with two or more colors.
+      max: Higher-end of the interpolation range: rows with this value will be
+        assigned to colors[n-1].
+      min: Lower-end of the interpolation range: rows with this value will be
+        assigned to colors[0].
+    """
+
+    class ColorsValueListEntry(_messages.Message):
+      """A ColorsValueListEntry object.
+
+      Fields:
+        color: Color in #RRGGBB format.
+        opacity: Opacity of the color: 0.0 (transparent) to 1.0 (opaque).
+      """
+
+      color = _messages.StringField(1)
+      opacity = _messages.FloatField(2)
+
+    colors = _messages.MessageField('ColorsValueListEntry', 1, repeated=True)
+    max = _messages.FloatField(2)
+    min = _messages.FloatField(3)
+
+  buckets = _messages.MessageField('Bucket', 1, repeated=True)
+  columnName = _messages.StringField(2)
+  gradient = _messages.MessageField('GradientValue', 3)
+  kind = _messages.StringField(4)
+
+
+class StyleSetting(_messages.Message):
+  """Represents a complete StyleSettings object. The primary key is a
+  combination of the tableId and a styleId.
+
+  Fields:
+    kind: Type name: an individual style setting. A StyleSetting contains the
+      style defintions for points, lines, and polygons in a table. Since a
+      table can have any one or all of them, a style definition can have
+      point, line and polygon style definitions.
+    markerOptions: Style definition for points in the table.
+    name: Optional name for the style setting.
+    polygonOptions: Style definition for polygons in the table.
+    polylineOptions: Style definition for lines in the table.
+    styleId: Identifier for the style setting (unique only within tables).
+    tableId: Identifier for the table.
+  """
+
+  kind = _messages.StringField(1, default=u'fusiontables#styleSetting')
+  markerOptions = _messages.MessageField('PointStyle', 2)
+  name = _messages.StringField(3)
+  polygonOptions = _messages.MessageField('PolygonStyle', 4)
+  polylineOptions = _messages.MessageField('LineStyle', 5)
+  styleId = _messages.IntegerField(6, variant=_messages.Variant.INT32)
+  tableId = _messages.StringField(7)
+
+
+class StyleSettingList(_messages.Message):
+  """Represents a list of styles for a given table.
+
+  Fields:
+    items: All requested style settings.
+    kind: Type name: in this case, a list of style settings.
+    nextPageToken: Token used to access the next page of this result. No token
+      is displayed if there are no more pages left.
+    totalItems: Total number of styles for the table.
+  """
+
+  items = _messages.MessageField('StyleSetting', 1, repeated=True)
+  kind = _messages.StringField(2, default=u'fusiontables#styleSettingList')
+  nextPageToken = _messages.StringField(3)
+  totalItems = _messages.IntegerField(4, variant=_messages.Variant.INT32)
+
+
+class Table(_messages.Message):
+  """Represents a table. Specifies the name, whether it is exportable,
+  description, attribution, and attribution link.
+
+  Fields:
+    attribution: Optional attribution assigned to the table.
+    attributionLink: Optional link for attribution.
+    baseTableIds: Optional base table identifier if this table is a view or
+      merged table.
+    columns: Columns in the table.
+    description: Optional description assigned to the table.
+    isExportable: Variable for whether table is exportable.
+    kind: Type name: a template for an individual table.
+    name: Name assigned to a table.
+    sql: Optional sql that encodes the table definition for derived tables.
+    tableId: Encrypted unique alphanumeric identifier for the table.
+  """
+
+  attribution = _messages.StringField(1)
+  attributionLink = _messages.StringField(2)
+  baseTableIds = _messages.StringField(3, repeated=True)
+  columns = _messages.MessageField('Column', 4, repeated=True)
+  description = _messages.StringField(5)
+  isExportable = _messages.BooleanField(6)
+  kind = _messages.StringField(7, default=u'fusiontables#table')
+  name = _messages.StringField(8)
+  sql = _messages.StringField(9)
+  tableId = _messages.StringField(10)
+
+
+class TableList(_messages.Message):
+  """Represents a list of tables.
+
+  Fields:
+    items: List of all requested tables.
+    kind: Type name: a list of all tables.
+    nextPageToken: Token used to access the next page of this result. No token
+      is displayed if there are no more pages left.
+  """
+
+  items = _messages.MessageField('Table', 1, repeated=True)
+  kind = _messages.StringField(2, default=u'fusiontables#tableList')
+  nextPageToken = _messages.StringField(3)
+
+
+class Task(_messages.Message):
+  """Specifies the identifier, name, and type of a task in a table.
+
+  Fields:
+    kind: Type of the resource. This is always "fusiontables#task".
+    progress: An indication of task progress.
+    started: false while the table is busy with some other task. true if this
+      background task is currently running.
+    taskId: Identifier for the task.
+    type: Type of background task. One of  DELETE_ROWS Deletes one or more
+      rows from the table. ADD_ROWS "Adds one or more rows to a table.
+      Includes importing data into a new table and importing more rows into an
+      existing table. ADD_COLUMN Adds a new column to the table. CHANGE_TYPE
+      Changes the type of a column.
+  """
+
+  kind = _messages.StringField(1, default=u'fusiontables#task')
+  progress = _messages.StringField(2)
+  started = _messages.BooleanField(3)
+  taskId = _messages.IntegerField(4)
+  type = _messages.StringField(5)
+
+
+class TaskList(_messages.Message):
+  """Represents a list of tasks for a table.
+
+  Fields:
+    items: List of all requested tasks.
+    kind: Type of the resource. This is always "fusiontables#taskList".
+    nextPageToken: Token used to access the next page of this result. No token
+      is displayed if there are no more pages left.
+    totalItems: Total number of tasks for the table.
+  """
+
+  items = _messages.MessageField('Task', 1, repeated=True)
+  kind = _messages.StringField(2, default=u'fusiontables#taskList')
+  nextPageToken = _messages.StringField(3)
+  totalItems = _messages.IntegerField(4, variant=_messages.Variant.INT32)
+
+
+class Template(_messages.Message):
+  """Represents the contents of InfoWindow templates.
+
+  Fields:
+    automaticColumnNames: List of columns from which the template is to be
+      automatically constructed. Only one of body or automaticColumns can be
+      specified.
+    body: Body of the template. It contains HTML with {column_name} to insert
+      values from a particular column. The body is sanitized to remove certain
+      tags, e.g., script. Only one of body or automaticColumns can be
+      specified.
+    kind: Type name: a template for the info window contents. The template can
+      either include an HTML body or a list of columns from which the template
+      is computed automatically.
+    name: Optional name assigned to a template.
+    tableId: Identifier for the table for which the template is defined.
+    templateId: Identifier for the template, unique within the context of a
+      particular table.
+  """
+
+  automaticColumnNames = _messages.StringField(1, repeated=True)
+  body = _messages.StringField(2)
+  kind = _messages.StringField(3, default=u'fusiontables#template')
+  name = _messages.StringField(4)
+  tableId = _messages.StringField(5)
+  templateId = _messages.IntegerField(6, variant=_messages.Variant.INT32)
+
+
+class TemplateList(_messages.Message):
+  """Represents a list of templates for a given table.
+
+  Fields:
+    items: List of all requested templates.
+    kind: Type name: a list of all templates.
+    nextPageToken: Token used to access the next page of this result. No token
+      is displayed if there are no more pages left.
+    totalItems: Total number of templates for the table.
+  """
+
+  items = _messages.MessageField('Template', 1, repeated=True)
+  kind = _messages.StringField(2, default=u'fusiontables#templateList')
+  nextPageToken = _messages.StringField(3)
+  totalItems = _messages.IntegerField(4, variant=_messages.Variant.INT32)
+
+
diff --git a/samples/iam_sample/__init__.py b/samples/iam_sample/__init__.py
new file mode 100644
index 0000000..58e0d91
--- /dev/null
+++ b/samples/iam_sample/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2016 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/samples/iam_sample/iam_client_test.py b/samples/iam_sample/iam_client_test.py
new file mode 100644
index 0000000..39d25a4
--- /dev/null
+++ b/samples/iam_sample/iam_client_test.py
@@ -0,0 +1,77 @@
+#
+# Copyright 2016 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Test for generated sample module."""
+
+import unittest2
+import six
+
+from apitools.base.py.testing import mock
+
+from samples.iam_sample.iam_v1 import iam_v1_client  # nopep8
+from samples.iam_sample.iam_v1 import iam_v1_messages  # nopep8
+
+
+class DnsGenClientSanityTest(unittest2.TestCase):
+
+    def testBaseUrl(self):
+        self.assertEquals(u'https://iam.googleapis.com/',
+                          iam_v1_client.IamV1.BASE_URL)
+
+    def testMessagesModule(self):
+        self.assertEquals(iam_v1_messages, iam_v1_client.IamV1.MESSAGES_MODULE)
+
+    def testAttributes(self):
+        inner_classes = set([])
+        for key, value in iam_v1_client.IamV1.__dict__.items():
+            if isinstance(value, six.class_types):
+                inner_classes.add(key)
+        self.assertEquals(set([
+            'IamPoliciesService',
+            'ProjectsService',
+            'ProjectsServiceAccountsKeysService',
+            'ProjectsServiceAccountsService',
+            'RolesService']), inner_classes)
+
+
+class IamGenClientTest(unittest2.TestCase):
+
+    def setUp(self):
+        self.mocked_iam_v1 = mock.Client(iam_v1_client.IamV1)
+        self.mocked_iam_v1.Mock()
+        self.addCleanup(self.mocked_iam_v1.Unmock)
+
+    def testFlatPath(self):
+        get_method_config = (self.mocked_iam_v1.projects_serviceAccounts_keys
+                             .GetMethodConfig('Get'))
+        self.assertEquals('v1/projects/{projectsId}/serviceAccounts'
+                          '/{serviceAccountsId}/keys/{keysId}',
+                          get_method_config.flat_path)
+        self.assertEquals('v1/{+name}', get_method_config.relative_path)
+
+    def testServiceAccountsKeysList(self):
+        response_key = iam_v1_messages.ServiceAccountKey(
+            name=u'test-key')
+        self.mocked_iam_v1.projects_serviceAccounts_keys.List.Expect(
+            iam_v1_messages.IamProjectsServiceAccountsKeysListRequest(
+                name=u'test-service-account.'),
+            iam_v1_messages.ListServiceAccountKeysResponse(
+                keys=[response_key]))
+
+        result = self.mocked_iam_v1.projects_serviceAccounts_keys.List(
+            iam_v1_messages.IamProjectsServiceAccountsKeysListRequest(
+                name=u'test-service-account.'))
+
+        self.assertEquals([response_key], result.keys)
diff --git a/samples/iam_sample/iam_v1.json b/samples/iam_sample/iam_v1.json
new file mode 100644
index 0000000..8e9480e
--- /dev/null
+++ b/samples/iam_sample/iam_v1.json
@@ -0,0 +1,1220 @@
+{
+  "kind": "discovery#restDescription",
+  "discoveryVersion": "v1",
+  "id": "iam:v1",
+  "name": "iam",
+  "canonicalName": "iam",
+  "version": "v1",
+  "revision": "0",
+  "title": "Google Identity and Access Management (IAM) API",
+  "description": "Manages identity and access control for Google Cloud Platform resources, including the creation of service accounts, which you can use to authenticate to Google and make API calls.",
+  "ownerDomain": "google.com",
+  "ownerName": "Google",
+  "icons": {
+    "x16": "http://www.google.com/images/icons/product/search-16.gif",
+    "x32": "http://www.google.com/images/icons/product/search-32.gif"
+   },
+  "documentationLink": "https://cloud.google.com/iam/",
+  "protocol": "rest",
+  "rootUrl": "https://iam.googleapis.com/",
+  "servicePath": "",
+  "baseUrl": "https://iam.googleapis.com/",
+  "batchPath": "batch",
+  "version_module": "True",
+  "parameters": {
+    "access_token": {
+      "type": "string",
+      "description": "OAuth access token.",
+      "location": "query"
+    },
+    "alt": {
+      "type": "string",
+      "description": "Data format for response.",
+      "default": "json",
+      "enum": [
+        "json",
+        "media",
+        "proto"
+      ],
+      "enumDescriptions": [
+        "Responses with Content-Type of application/json",
+        "Media download with context-dependent Content-Type",
+        "Responses with Content-Type of application/x-protobuf"
+      ],
+      "location": "query"
+    },
+    "bearer_token": {
+      "type": "string",
+      "description": "OAuth bearer token.",
+      "location": "query"
+    },
+    "callback": {
+      "type": "string",
+      "description": "JSONP",
+      "location": "query"
+    },
+    "fields": {
+      "type": "string",
+      "description": "Selector specifying which fields to include in a partial response.",
+      "location": "query"
+    },
+    "key": {
+      "type": "string",
+      "description": "API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.",
+      "location": "query"
+    },
+    "oauth_token": {
+      "type": "string",
+      "description": "OAuth 2.0 token for the current user.",
+      "location": "query"
+    },
+    "pp": {
+      "type": "boolean",
+      "description": "Pretty-print response.",
+      "default": "true",
+      "location": "query"
+    },
+    "prettyPrint": {
+      "type": "boolean",
+      "description": "Returns response with indentations and line breaks.",
+      "default": "true",
+      "location": "query"
+    },
+    "quotaUser": {
+      "type": "string",
+      "description": "Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.",
+      "location": "query"
+    },
+    "upload_protocol": {
+      "type": "string",
+      "description": "Upload protocol for media (e.g. \"raw\", \"multipart\").",
+      "location": "query"
+    },
+    "uploadType": {
+      "type": "string",
+      "description": "Legacy upload protocol for media (e.g. \"media\", \"multipart\").",
+      "location": "query"
+    },
+    "$.xgafv": {
+      "type": "string",
+      "description": "V1 error format.",
+      "enum": [
+        "1",
+        "2"
+      ],
+      "enumDescriptions": [
+        "v1 error format",
+        "v2 error format"
+      ],
+      "location": "query"
+    }
+  },
+  "auth": {
+    "oauth2": {
+      "scopes": {
+        "https://www.googleapis.com/auth/cloud-platform": {
+          "description": "View and manage your data across Google Cloud Platform services"
+        }
+      }
+    }
+  },
+  "schemas": {
+    "ListServiceAccountsResponse": {
+      "id": "ListServiceAccountsResponse",
+      "description": "The service account list response.",
+      "type": "object",
+      "properties": {
+        "accounts": {
+          "description": "The list of matching service accounts.",
+          "type": "array",
+          "items": {
+            "$ref": "ServiceAccount"
+          }
+        },
+        "nextPageToken": {
+          "description": "To retrieve the next page of results, set\nListServiceAccountsRequest.page_token\nto this value.",
+          "type": "string"
+        }
+      }
+    },
+    "ServiceAccount": {
+      "id": "ServiceAccount",
+      "description": "A service account in the Identity and Access Management API.\n\nTo create a service account, specify the `project_id` and the `account_id`\nfor the account.  The `account_id` is unique within the project, and is used\nto generate the service account email address and a stable\n`unique_id`.\n\nAll other methods can identify the service account using the format\n`projects\/{project}\/serviceAccounts\/{account}`.\nUsing `-` as a wildcard for the project will infer the project from\nthe account. The `account` value can be the `email` address or the\n`unique_id` of the service account.",
+      "type": "object",
+      "properties": {
+        "name": {
+          "description": "The resource name of the service account in the following format:\n`projects\/{project}\/serviceAccounts\/{account}`.\n\nRequests using `-` as a wildcard for the project will infer the project\nfrom the `account` and the `account` value can be the `email` address or\nthe `unique_id` of the service account.\n\nIn responses the resource name will always be in the format\n`projects\/{project}\/serviceAccounts\/{email}`.",
+          "type": "string"
+        },
+        "projectId": {
+          "description": "@OutputOnly The id of the project that owns the service account.",
+          "type": "string"
+        },
+        "uniqueId": {
+          "description": "@OutputOnly The unique and stable id of the service account.",
+          "type": "string"
+        },
+        "email": {
+          "description": "@OutputOnly The email address of the service account.",
+          "type": "string"
+        },
+        "displayName": {
+          "description": "Optional. A user-specified description of the service account.  Must be\nfewer than 100 UTF-8 bytes.",
+          "type": "string"
+        },
+        "etag": {
+          "description": "Used to perform a consistent read-modify-write.",
+          "type": "string",
+          "format": "byte"
+        },
+        "description": {
+          "description": "Optional. A user-specified opaque description of the service account.",
+          "type": "string"
+        },
+        "oauth2ClientId": {
+          "description": "@OutputOnly. The OAuth2 client id for the service account.\nThis is used in conjunction with the OAuth2 clientconfig API to make\nthree legged OAuth2 (3LO) flows to access the data of Google users.",
+          "type": "string"
+        }
+      }
+    },
+    "CreateServiceAccountRequest": {
+      "id": "CreateServiceAccountRequest",
+      "description": "The service account create request.",
+      "type": "object",
+      "properties": {
+        "accountId": {
+          "description": "Required. The account id that is used to generate the service account\nemail address and a stable unique id. It is unique within a project,\nmust be 1-63 characters long, and match the regular expression\n`[a-z]([-a-z0-9]*[a-z0-9])` to comply with RFC1035.",
+          "type": "string"
+        },
+        "serviceAccount": {
+          "description": "The ServiceAccount resource to create.\nCurrently, only the following values are user assignable:\n`display_name` .",
+          "$ref": "ServiceAccount"
+        }
+      }
+    },
+    "Empty": {
+      "id": "Empty",
+      "description": "A generic empty message that you can re-use to avoid defining duplicated\nempty messages in your APIs. A typical example is to use it as the request\nor the response type of an API method. For instance:\n\n    service Foo {\n      rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);\n    }\n\nThe JSON representation for `Empty` is empty JSON object `{}`.",
+      "type": "object",
+      "properties": {
+      }
+    },
+    "ListServiceAccountKeysResponse": {
+      "id": "ListServiceAccountKeysResponse",
+      "description": "The service account keys list response.",
+      "type": "object",
+      "properties": {
+        "keys": {
+          "description": "The public keys for the service account.",
+          "type": "array",
+          "items": {
+            "$ref": "ServiceAccountKey"
+          }
+        }
+      }
+    },
+    "ServiceAccountKey": {
+      "id": "ServiceAccountKey",
+      "description": "Represents a service account key.\n\nA service account has two sets of key-pairs: user-managed, and\nsystem-managed.\n\nUser-managed key-pairs can be created and deleted by users.  Users are\nresponsible for rotating these keys periodically to ensure security of\ntheir service accounts.  Users retain the private key of these key-pairs,\nand Google retains ONLY the public key.\n\nSystem-managed key-pairs are managed automatically by Google, and rotated\ndaily without user intervention.  The private key never leaves Google's\nservers to maximize security.\n\nPublic keys for all service accounts are also published at the OAuth2\nService Account API.",
+      "type": "object",
+      "properties": {
+        "name": {
+          "description": "The resource name of the service account key in the following format\n`projects\/{project}\/serviceAccounts\/{account}\/keys\/{key}`.",
+          "type": "string"
+        },
+        "privateKeyType": {
+          "description": "The output format for the private key.\nOnly provided in `CreateServiceAccountKey` responses, not\nin `GetServiceAccountKey` or `ListServiceAccountKey` responses.\n\nGoogle never exposes system-managed private keys, and never retains\nuser-managed private keys.",
+          "enumDescriptions": [
+            "Unspecified. Equivalent to `TYPE_GOOGLE_CREDENTIALS_FILE`.",
+            "PKCS12 format.\nThe password for the PKCS12 file is `notasecret`.\nFor more information, see https:\/\/tools.ietf.org\/html\/rfc7292.",
+            "Google Credentials File format."
+          ],
+          "type": "string",
+          "enum": [
+            "TYPE_UNSPECIFIED",
+            "TYPE_PKCS12_FILE",
+            "TYPE_GOOGLE_CREDENTIALS_FILE"
+          ]
+        },
+        "privateKeyData": {
+          "description": "The private key data. Only provided in `CreateServiceAccountKey`\nresponses.",
+          "type": "string",
+          "format": "byte"
+        },
+        "publicKeyData": {
+          "description": "The public key data. Only provided in `GetServiceAccountKey` responses.",
+          "type": "string",
+          "format": "byte"
+        },
+        "validAfterTime": {
+          "description": "The key can be used after this timestamp.",
+          "type": "string",
+          "format": "google-datetime"
+        },
+        "validBeforeTime": {
+          "description": "The key can be used before this timestamp.",
+          "type": "string",
+          "format": "google-datetime"
+        }
+      }
+    },
+    "CreateServiceAccountKeyRequest": {
+      "id": "CreateServiceAccountKeyRequest",
+      "description": "The service account key create request.",
+      "type": "object",
+      "properties": {
+        "privateKeyType": {
+          "description": "The output format of the private key. `GOOGLE_CREDENTIALS_FILE` is the\ndefault output format.",
+          "enumDescriptions": [
+            "Unspecified. Equivalent to `TYPE_GOOGLE_CREDENTIALS_FILE`.",
+            "PKCS12 format.\nThe password for the PKCS12 file is `notasecret`.\nFor more information, see https:\/\/tools.ietf.org\/html\/rfc7292.",
+            "Google Credentials File format."
+          ],
+          "type": "string",
+          "enum": [
+            "TYPE_UNSPECIFIED",
+            "TYPE_PKCS12_FILE",
+            "TYPE_GOOGLE_CREDENTIALS_FILE"
+          ]
+        }
+      }
+    },
+    "SignBlobRequest": {
+      "id": "SignBlobRequest",
+      "description": "The service account sign blob request.",
+      "type": "object",
+      "properties": {
+        "bytesToSign": {
+          "description": "The bytes to sign.",
+          "type": "string",
+          "format": "byte"
+        }
+      }
+    },
+    "SignBlobResponse": {
+      "id": "SignBlobResponse",
+      "description": "The service account sign blob response.",
+      "type": "object",
+      "properties": {
+        "keyId": {
+          "description": "The id of the key used to sign the blob.",
+          "type": "string"
+        },
+        "signature": {
+          "description": "The signed blob.",
+          "type": "string",
+          "format": "byte"
+        }
+      }
+    },
+    "SignJwtRequest": {
+      "id": "SignJwtRequest",
+      "description": "The service account sign JWT request.",
+      "type": "object",
+      "properties": {
+        "payload": {
+          "description": "The JWT payload to sign, a JSON JWT Claim set.",
+          "type": "string"
+        }
+      }
+    },
+    "SignJwtResponse": {
+      "id": "SignJwtResponse",
+      "description": "The service account sign JWT response.",
+      "type": "object",
+      "properties": {
+        "keyId": {
+          "description": "The id of the key used to sign the JWT.",
+          "type": "string"
+        },
+        "signedJwt": {
+          "description": "The signed JWT.",
+          "type": "string"
+        }
+      }
+    },
+    "Policy": {
+      "id": "Policy",
+      "description": "Defines an Identity and Access Management (IAM) policy. It is used to\nspecify access control policies for Cloud Platform resources.\n\n\nA `Policy` consists of a list of `bindings`. A `Binding` binds a list of\n`members` to a `role`, where the members can be user accounts, Google groups,\nGoogle domains, and service accounts. A `role` is a named list of permissions\ndefined by IAM.\n\n**Example**\n\n    {\n      \"bindings\": [\n        {\n          \"role\": \"roles\/owner\",\n          \"members\": [\n            \"user:mike@example.com\",\n            \"group:admins@example.com\",\n            \"domain:google.com\",\n            \"serviceAccount:my-other-app@appspot.gserviceaccount.com\",\n          ]\n        },\n        {\n          \"role\": \"roles\/viewer\",\n          \"members\": [\"user:sean@example.com\"]\n        }\n      ]\n    }\n\nFor a description of IAM and its features, see the\n[IAM developer's guide](https:\/\/cloud.google.com\/iam).",
+      "type": "object",
+      "properties": {
+        "version": {
+          "description": "Version of the `Policy`. The default version is 0.",
+          "type": "integer",
+          "format": "int32"
+        },
+        "bindings": {
+          "description": "Associates a list of `members` to a `role`.\nMultiple `bindings` must not be specified for the same `role`.\n`bindings` with no members will result in an error.",
+          "type": "array",
+          "items": {
+            "$ref": "Binding"
+          }
+        },
+        "auditConfigs": {
+          "description": "Specifies audit logging configs for \"data access\".\n\"data access\": generally refers to data reads\/writes and admin reads.\n\"admin activity\": generally refers to admin writes.\n\nNote: `AuditConfig` doesn't apply to \"admin activity\", which always\nenables audit logging.",
+          "type": "array",
+          "items": {
+            "$ref": "AuditConfig"
+          }
+        },
+        "rules": {
+          "description": "If more than one rule is specified, the rules are applied in the following\nmanner:\n- All matching LOG rules are always applied.\n- If any DENY\/DENY_WITH_LOG rule matches, permission is denied.\n  Logging will be applied if one or more matching rule requires logging.\n- Otherwise, if any ALLOW\/ALLOW_WITH_LOG rule matches, permission is\n  granted.\n  Logging will be applied if one or more matching rule requires logging.\n- Otherwise, if no rule applies, permission is denied.",
+          "type": "array",
+          "items": {
+            "$ref": "Rule"
+          }
+        },
+        "etag": {
+          "description": "`etag` is used for optimistic concurrency control as a way to help\nprevent simultaneous updates of a policy from overwriting each other.\nIt is strongly suggested that systems make use of the `etag` in the\nread-modify-write cycle to perform policy updates in order to avoid race\nconditions: An `etag` is returned in the response to `getIamPolicy`, and\nsystems are expected to put that etag in the request to `setIamPolicy` to\nensure that their change will be applied to the same version of the policy.\n\nIf no `etag` is provided in the call to `setIamPolicy`, then the existing\npolicy is overwritten blindly.",
+          "type": "string",
+          "format": "byte"
+        },
+        "iamOwned": {
+
+          "type": "boolean"
+        }
+      }
+    },
+    "Binding": {
+      "id": "Binding",
+      "description": "Associates `members` with a `role`.",
+      "type": "object",
+      "properties": {
+        "role": {
+          "description": "Role that is assigned to `members`.\nFor example, `roles\/viewer`, `roles\/editor`, or `roles\/owner`.\nRequired",
+          "type": "string"
+        },
+        "members": {
+          "description": "Specifies the identities requesting access for a Cloud Platform resource.\n`members` can have the following values:\n\n* `allUsers`: A special identifier that represents anyone who is\n   on the internet; with or without a Google account.\n\n* `allAuthenticatedUsers`: A special identifier that represents anyone\n   who is authenticated with a Google account or a service account.\n\n* `user:{emailid}`: An email address that represents a specific Google\n   account. For example, `alice@gmail.com` or `joe@example.com`.\n\n* `serviceAccount:{emailid}`: An email address that represents a service\n   account. For example, `my-other-app@appspot.gserviceaccount.com`.\n\n* `group:{emailid}`: An email address that represents a Google group.\n   For example, `admins@example.com`.\n\n* `domain:{domain}`: A Google Apps domain name that represents all the\n   users of that domain. For example, `google.com` or `example.com`.\n\n\n",
+          "type": "array",
+          "items": {
+            "type": "string"
+          }
+        }
+      }
+    },
+    "AuditConfig": {
+      "id": "AuditConfig",
+      "description": "Enables \"data access\" audit logging for a service and specifies a list\nof members that are log-exempted.",
+      "type": "object",
+      "properties": {
+        "service": {
+          "description": "Specifies a service that will be enabled for \"data access\" audit\nlogging.\nFor example, `resourcemanager`, `storage`, `compute`.\n`allServices` is a special value that covers all services.",
+          "type": "string"
+        },
+        "exemptedMembers": {
+          "description": "Specifies the identities that are exempted from \"data access\" audit\nlogging for the `service` specified above.\nFollows the same format of Binding.members.",
+          "type": "array",
+          "items": {
+            "type": "string"
+          }
+        }
+      }
+    },
+    "Rule": {
+      "id": "Rule",
+      "description": "A rule to be applied in a Policy.",
+      "type": "object",
+      "properties": {
+        "description": {
+          "description": "Human-readable description of the rule.",
+          "type": "string"
+        },
+        "permissions": {
+          "description": "A permission is a string of form '<service>.<resource type>.<verb>'\n(e.g., 'storage.buckets.list'). A value of '*' matches all permissions,\nand a verb part of '*' (e.g., 'storage.buckets.*') matches all verbs.",
+          "type": "array",
+          "items": {
+            "type": "string"
+          }
+        },
+        "action": {
+          "description": "Required",
+          "enumDescriptions": [
+            "Default no action.",
+            "Matching 'Entries' grant access.",
+            "Matching 'Entries' grant access and the caller promises to log\nthe request per the returned log_configs.",
+            "Matching 'Entries' deny access.",
+            "Matching 'Entries' deny access and the caller promises to log\nthe request per the returned log_configs.",
+            "Matching 'Entries' tell IAM.Check callers to generate logs."
+          ],
+          "type": "string",
+          "enum": [
+            "NO_ACTION",
+            "ALLOW",
+            "ALLOW_WITH_LOG",
+            "DENY",
+            "DENY_WITH_LOG",
+            "LOG"
+          ]
+        },
+        "in": {
+          "description": "If one or more 'in' clauses are specified, the rule matches if\nthe PRINCIPAL\/AUTHORITY_SELECTOR is in at least one of these entries.",
+          "type": "array",
+          "items": {
+            "type": "string"
+          }
+        },
+        "notIn": {
+          "description": "If one or more 'not_in' clauses are specified, the rule matches\nif the PRINCIPAL\/AUTHORITY_SELECTOR is in none of the entries.\nThe format for in and not_in entries is the same as for members in a\nBinding (see google\/iam\/v1\/policy.proto).",
+          "type": "array",
+          "items": {
+            "type": "string"
+          }
+        },
+        "conditions": {
+          "description": "Additional restrictions that must be met",
+          "type": "array",
+          "items": {
+            "$ref": "Condition"
+          }
+        },
+        "logConfig": {
+          "description": "The config returned to callers of tech.iam.IAM.CheckPolicy for any entries\nthat match the LOG action.",
+          "type": "array",
+          "items": {
+            "$ref": "LogConfig"
+          }
+        }
+      }
+    },
+    "Condition": {
+      "id": "Condition",
+      "description": "A condition to be met.",
+      "type": "object",
+      "properties": {
+        "iam": {
+          "description": "Trusted attributes supplied by the IAM system.",
+          "enumDescriptions": [
+            "Default non-attribute.",
+            "Either principal or (if present) authority",
+            "selector\nAlways the original principal, but making clear"
+          ],
+          "type": "string",
+          "enum": [
+            "NO_ATTR",
+            "AUTHORITY",
+            "ATTRIBUTION"
+          ]
+        },
+        "sys": {
+          "description": "Trusted attributes supplied by any service that owns resources and uses\nthe IAM system for access control.",
+          "enumDescriptions": [
+            "Default non-attribute type",
+            "Region of the resource",
+            "Service name",
+            "Resource name",
+            "IP address of the caller"
+          ],
+          "type": "string",
+          "enum": [
+            "NO_ATTR",
+            "REGION",
+            "SERVICE",
+            "NAME",
+            "IP"
+          ]
+        },
+        "svc": {
+          "description": "Trusted attributes discharged by the service.",
+          "type": "string"
+        },
+        "op": {
+          "description": "An operator to apply the subject with.",
+          "enumDescriptions": [
+            "Default no-op.",
+            "DEPRECATED. Use IN instead.",
+            "DEPRECATED. Use NOT_IN instead.",
+            "Set-inclusion check.",
+            "Set-exclusion check.",
+            "Subject is discharged"
+          ],
+          "type": "string",
+          "enum": [
+            "NO_OP",
+            "EQUALS",
+            "NOT_EQUALS",
+            "IN",
+            "NOT_IN",
+            "DISCHARGED"
+          ]
+        },
+        "value": {
+          "description": "DEPRECATED. Use 'values' instead.",
+          "type": "string"
+        },
+        "values": {
+          "description": "The objects of the condition. This is mutually exclusive with 'value'.",
+          "type": "array",
+          "items": {
+            "type": "string"
+          }
+        }
+      }
+    },
+    "LogConfig": {
+      "id": "LogConfig",
+      "description": "Specifies what kind of log the caller must write\nIncrement a streamz counter with the specified metric and field names.\n\nMetric names should start with a '\/', generally be lowercase-only,\nand end in \"_count\". Field names should not contain an initial slash.\nThe actual exported metric names will have \"\/iam\/policy\" prepended.\n\nField names correspond to IAM request parameters and field values are\ntheir respective values.\n\nAt present the only supported field names are\n   - \"iam_principal\", corresponding to IAMContext.principal;\n   - \"\" (empty string), resulting in one aggretated counter with no field.\n\nExamples:\n  counter { metric: \"\/debug_access_count\"  field: \"iam_principal\" }\n  ==> increment counter \/iam\/policy\/backend_debug_access_count\n                        {iam_principal=[value of IAMContext.principal]}\n\nAt this time we do not support:\n* multiple field names (though this may be supported in the future)\n* decrementing the counter\n* incrementing it by anything other than 1",
+      "type": "object",
+      "properties": {
+        "counter": {
+          "description": "Counter options.",
+          "$ref": "CounterOptions"
+        },
+        "dataAccess": {
+          "description": "Data access options.",
+          "$ref": "DataAccessOptions"
+        },
+        "cloudAudit": {
+          "description": "Cloud audit options.",
+          "$ref": "CloudAuditOptions"
+        }
+      }
+    },
+    "CounterOptions": {
+      "id": "CounterOptions",
+      "description": "Options for counters",
+      "type": "object",
+      "properties": {
+        "metric": {
+          "description": "The metric to update.",
+          "type": "string"
+        },
+        "field": {
+          "description": "The field value to attribute.",
+          "type": "string"
+        }
+      }
+    },
+    "DataAccessOptions": {
+      "id": "DataAccessOptions",
+      "description": "Write a Data Access (Gin) log",
+      "type": "object",
+      "properties": {
+      }
+    },
+    "CloudAuditOptions": {
+      "id": "CloudAuditOptions",
+      "description": "Write a Cloud Audit log",
+      "type": "object",
+      "properties": {
+      }
+    },
+    "SetIamPolicyRequest": {
+      "id": "SetIamPolicyRequest",
+      "description": "Request message for `SetIamPolicy` method.",
+      "type": "object",
+      "properties": {
+        "policy": {
+          "description": "REQUIRED: The complete policy to be applied to the `resource`. The size of\nthe policy is limited to a few 10s of KB. An empty policy is a\nvalid policy but certain Cloud Platform services (such as Projects)\nmight reject them.",
+          "$ref": "Policy"
+        }
+      }
+    },
+    "TestIamPermissionsRequest": {
+      "id": "TestIamPermissionsRequest",
+      "description": "Request message for `TestIamPermissions` method.",
+      "type": "object",
+      "properties": {
+        "permissions": {
+          "description": "The set of permissions to check for the `resource`. Permissions with\nwildcards (such as '*' or 'storage.*') are not allowed. For more\ninformation see\nIAM Overview.",
+          "type": "array",
+          "items": {
+            "type": "string"
+          }
+        }
+      }
+    },
+    "TestIamPermissionsResponse": {
+      "id": "TestIamPermissionsResponse",
+      "description": "Response message for `TestIamPermissions` method.",
+      "type": "object",
+      "properties": {
+        "permissions": {
+          "description": "A subset of `TestPermissionsRequest.permissions` that the caller is\nallowed.",
+          "type": "array",
+          "items": {
+            "type": "string"
+          }
+        }
+      }
+    },
+    "QueryGrantableRolesRequest": {
+      "id": "QueryGrantableRolesRequest",
+      "description": "The grantable role query request.",
+      "type": "object",
+      "properties": {
+        "fullResourceName": {
+          "description": "Required. The full resource name to query from the list of grantable roles.\n\nThe name follows the Google Cloud Platform resource format.\nFor example, a Cloud Platform project with id `my-project` will be named\n`\/\/cloudresourcemanager.googleapis.com\/projects\/my-project`.",
+          "type": "string"
+        }
+      }
+    },
+    "QueryGrantableRolesResponse": {
+      "id": "QueryGrantableRolesResponse",
+      "description": "The grantable role query response.",
+      "type": "object",
+      "properties": {
+        "roles": {
+          "description": "The list of matching roles.",
+          "type": "array",
+          "items": {
+            "$ref": "Role"
+          }
+        }
+      }
+    },
+    "Role": {
+      "id": "Role",
+      "description": "A role in the Identity and Access Management API.",
+      "type": "object",
+      "properties": {
+        "name": {
+          "description": "The name of the role.\n\nExamples of roles names are:\n`roles\/editor`, `roles\/viewer` and `roles\/logging.viewer`.",
+          "type": "string"
+        },
+        "title": {
+          "description": "Optional.  A human-readable title for the role.  Typically this\nis limited to 100 UTF-8 bytes.",
+          "type": "string"
+        },
+        "description": {
+          "description": "Optional.  A human-readable description for the role.",
+          "type": "string"
+        },
+        "apiTokens": {
+
+          "type": "array",
+          "items": {
+            "type": "string"
+          }
+        }
+      }
+    },
+    "GetPolicyDetailsRequest": {
+      "id": "GetPolicyDetailsRequest",
+      "description": "The request to get the current policy and the policies on the inherited\nresources the user has access to.",
+      "type": "object",
+      "properties": {
+        "fullResourcePath": {
+          "description": "REQUIRED: The full resource path of the current policy being\nrequested, e.g., `\/\/dataflow.googleapis.com\/projects\/..\/jobs\/..`.",
+          "type": "string"
+        },
+        "pageToken": {
+          "description": "Optional pagination token returned in an earlier\nGetPolicyDetailsResponse.next_page_token\nresponse.",
+          "type": "string"
+        },
+        "pageSize": {
+          "description": "Limit on the number of policies to include in the response.\nFurther accounts can subsequently be obtained by including the\nGetPolicyDetailsResponse.next_page_token\nin a subsequent request.\nIf zero, the default page size 20 will be used.\nMust be given a value in range [0, 100], otherwise an invalid argument\nerror will be returned.",
+          "type": "integer",
+          "format": "int32"
+        }
+      }
+    },
+    "GetPolicyDetailsResponse": {
+      "id": "GetPolicyDetailsResponse",
+      "description": "The response to the `GetPolicyDetailsRequest` containing the current policy and\nthe policies on the inherited resources the user has access to.",
+      "type": "object",
+      "properties": {
+        "policies": {
+          "description": "The current policy and all the inherited policies the user has\naccess to.",
+          "type": "array",
+          "items": {
+            "$ref": "PolicyDetail"
+          }
+        },
+        "nextPageToken": {
+          "description": "To retrieve the next page of results, set\nGetPolicyDetailsRequest.page_token\nto this value.\nIf this value is empty, then there are not any further policies that the\nuser has access to.\nThe lifetime is 60 minutes. An \"Expired pagination token\" error will be\nreturned if exceeded.",
+          "type": "string"
+        }
+      }
+    },
+    "PolicyDetail": {
+      "id": "PolicyDetail",
+      "description": "A policy and its full resource path.",
+      "type": "object",
+      "properties": {
+        "policy": {
+          "description": "The policy of a `resource\/project\/folder`.",
+          "$ref": "Policy"
+        },
+        "fullResourcePath": {
+          "description": "The full resource path of the policy\ne.g., `\/\/dataflow.googleapis.com\/projects\/..\/jobs\/..`.\nNote that a resource and its inherited resource have different\n`full_resource_path`.",
+          "type": "string"
+        }
+      }
+    }
+  },
+  "resources": {
+    "projects": {
+      "resources": {
+        "serviceAccounts": {
+          "methods": {
+            "list": {
+              "id": "iam.projects.serviceAccounts.list",
+              "path": "v1/{+name}/serviceAccounts",
+              "flatPath": "v1/projects/{projectsId}/serviceAccounts",
+              "httpMethod": "GET",
+              "description": "Lists ServiceAccounts for a project.",
+              "parameters": {
+                "name": {
+                  "description": "Required. The resource name of the project associated with the service\naccounts, such as `projects\/my-project-123`.",
+                  "location": "path",
+                  "required": true,
+                  "pattern": "^projects\/[^\/]*$",
+                  "type": "string"
+                },
+                "pageSize": {
+                  "description": "Optional limit on the number of service accounts to include in the\nresponse. Further accounts can subsequently be obtained by including the\nListServiceAccountsResponse.next_page_token\nin a subsequent request.",
+                  "location": "query",
+                  "type": "integer",
+                  "format": "int32"
+                },
+                "pageToken": {
+                  "description": "Optional pagination token returned in an earlier\nListServiceAccountsResponse.next_page_token.",
+                  "location": "query",
+                  "type": "string"
+                },
+                "removeDeletedServiceAccounts": {
+                  "description": "Do not list service accounts deleted from Gaia.\n<b><font color=\"red\">DO NOT INCLUDE IN EXTERNAL DOCUMENTATION<\/font><\/b>.",
+                  "location": "query",
+                  "type": "boolean"
+                }
+              },
+              "parameterOrder": [
+                "name"
+              ],
+              "response": {
+                "$ref": "ListServiceAccountsResponse"
+              },
+              "scopes": [
+                "https://www.googleapis.com/auth/cloud-platform"
+              ]
+            },
+            "get": {
+              "id": "iam.projects.serviceAccounts.get",
+              "path": "v1/{+name}",
+              "flatPath": "v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}",
+              "httpMethod": "GET",
+              "description": "Gets a ServiceAccount.",
+              "parameters": {
+                "name": {
+                  "description": "The resource name of the service account in the following format:\n`projects\/{project}\/serviceAccounts\/{account}`.\nUsing `-` as a wildcard for the project will infer the project from\nthe account. The `account` value can be the `email` address or the\n`unique_id` of the service account.",
+                  "location": "path",
+                  "required": true,
+                  "pattern": "^projects\/[^\/]*\/serviceAccounts\/[^\/]*$",
+                  "type": "string"
+                }
+              },
+              "parameterOrder": [
+                "name"
+              ],
+              "response": {
+                "$ref": "ServiceAccount"
+              },
+              "scopes": [
+                "https://www.googleapis.com/auth/cloud-platform"
+              ]
+            },
+            "create": {
+              "id": "iam.projects.serviceAccounts.create",
+              "path": "v1/{+name}/serviceAccounts",
+              "flatPath": "v1/projects/{projectsId}/serviceAccounts",
+              "httpMethod": "POST",
+              "description": "Creates a ServiceAccount\nand returns it.",
+              "parameters": {
+                "name": {
+                  "description": "Required. The resource name of the project associated with the service\naccounts, such as `projects\/my-project-123`.",
+                  "location": "path",
+                  "required": true,
+                  "pattern": "^projects\/[^\/]*$",
+                  "type": "string"
+                }
+              },
+              "parameterOrder": [
+                "name"
+              ],
+              "request": {
+                "$ref": "CreateServiceAccountRequest"
+              },
+              "response": {
+                "$ref": "ServiceAccount"
+              },
+              "scopes": [
+                "https://www.googleapis.com/auth/cloud-platform"
+              ]
+            },
+            "update": {
+              "id": "iam.projects.serviceAccounts.update",
+              "path": "v1/{+name}",
+              "flatPath": "v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}",
+              "httpMethod": "PUT",
+              "description": "Updates a ServiceAccount.\n\nCurrently, only the following fields are updatable:\n`display_name` .\nThe `etag` is mandatory.",
+              "parameters": {
+                "name": {
+                  "description": "The resource name of the service account in the following format:\n`projects\/{project}\/serviceAccounts\/{account}`.\n\nRequests using `-` as a wildcard for the project will infer the project\nfrom the `account` and the `account` value can be the `email` address or\nthe `unique_id` of the service account.\n\nIn responses the resource name will always be in the format\n`projects\/{project}\/serviceAccounts\/{email}`.",
+                  "location": "path",
+                  "required": true,
+                  "pattern": "^projects\/[^\/]*\/serviceAccounts\/[^\/]*$",
+                  "type": "string"
+                }
+              },
+              "parameterOrder": [
+                "name"
+              ],
+              "request": {
+                "$ref": "ServiceAccount"
+              },
+              "response": {
+                "$ref": "ServiceAccount"
+              },
+              "scopes": [
+                "https://www.googleapis.com/auth/cloud-platform"
+              ]
+            },
+            "delete": {
+              "id": "iam.projects.serviceAccounts.delete",
+              "path": "v1/{+name}",
+              "flatPath": "v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}",
+              "httpMethod": "DELETE",
+              "description": "Deletes a ServiceAccount.",
+              "parameters": {
+                "name": {
+                  "description": "The resource name of the service account in the following format:\n`projects\/{project}\/serviceAccounts\/{account}`.\nUsing `-` as a wildcard for the project will infer the project from\nthe account. The `account` value can be the `email` address or the\n`unique_id` of the service account.",
+                  "location": "path",
+                  "required": true,
+                  "pattern": "^projects\/[^\/]*\/serviceAccounts\/[^\/]*$",
+                  "type": "string"
+                }
+              },
+              "parameterOrder": [
+                "name"
+              ],
+              "response": {
+                "$ref": "Empty"
+              },
+              "scopes": [
+                "https://www.googleapis.com/auth/cloud-platform"
+              ]
+            },
+            "signBlob": {
+              "id": "iam.projects.serviceAccounts.signBlob",
+              "path": "v1/{+name}:signBlob",
+              "flatPath": "v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}:signBlob",
+              "httpMethod": "POST",
+              "description": "Signs a blob using a service account's system-managed private key.",
+              "parameters": {
+                "name": {
+                  "description": "The resource name of the service account in the following format:\n`projects\/{project}\/serviceAccounts\/{account}`.\nUsing `-` as a wildcard for the project will infer the project from\nthe account. The `account` value can be the `email` address or the\n`unique_id` of the service account.",
+                  "location": "path",
+                  "required": true,
+                  "pattern": "^projects\/[^\/]*\/serviceAccounts\/[^\/]*$",
+                  "type": "string"
+                }
+              },
+              "parameterOrder": [
+                "name"
+              ],
+              "request": {
+                "$ref": "SignBlobRequest"
+              },
+              "response": {
+                "$ref": "SignBlobResponse"
+              },
+              "scopes": [
+                "https://www.googleapis.com/auth/cloud-platform"
+              ]
+            },
+            "signJwt": {
+              "id": "iam.projects.serviceAccounts.signJwt",
+              "path": "v1/{+name}:signJwt",
+              "flatPath": "v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}:signJwt",
+              "httpMethod": "POST",
+              "description": "Signs a JWT using a service account's system-managed private key.\n\nIf no `exp` (expiry) time is contained in the claims, we will\nprovide an expiry of one hour in the future. If an expiry\nof more than one hour in the future is requested, the request\nwill fail.",
+              "parameters": {
+                "name": {
+                  "description": "The resource name of the service account in the following format:\n`projects\/{project}\/serviceAccounts\/{account}`.\nUsing `-` as a wildcard for the project will infer the project from\nthe account. The `account` value can be the `email` address or the\n`unique_id` of the service account.",
+                  "location": "path",
+                  "required": true,
+                  "pattern": "^projects\/[^\/]*\/serviceAccounts\/[^\/]*$",
+                  "type": "string"
+                }
+              },
+              "parameterOrder": [
+                "name"
+              ],
+              "request": {
+                "$ref": "SignJwtRequest"
+              },
+              "response": {
+                "$ref": "SignJwtResponse"
+              },
+              "scopes": [
+                "https://www.googleapis.com/auth/cloud-platform"
+              ]
+            },
+            "getIamPolicy": {
+              "id": "iam.projects.serviceAccounts.getIamPolicy",
+              "path": "v1/{+resource}:getIamPolicy",
+              "flatPath": "v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}:getIamPolicy",
+              "httpMethod": "POST",
+              "description": "Returns the IAM access control policy for specified IAM resource.",
+              "parameters": {
+                "resource": {
+                  "description": "REQUIRED: The resource for which the policy is being requested.\n`resource` is usually specified as a path, such as\n`projects\/*project*\/zones\/*zone*\/disks\/*disk*`.\n\nThe format for the path specified in this value is resource specific and\nis specified in the `getIamPolicy` documentation.",
+                  "location": "path",
+                  "required": true,
+                  "pattern": "^projects\/[^\/]*\/serviceAccounts\/[^\/]*$",
+                  "type": "string"
+                }
+              },
+              "parameterOrder": [
+                "resource"
+              ],
+              "response": {
+                "$ref": "Policy"
+              },
+              "scopes": [
+                "https://www.googleapis.com/auth/cloud-platform"
+              ]
+            },
+            "setIamPolicy": {
+              "id": "iam.projects.serviceAccounts.setIamPolicy",
+              "path": "v1/{+resource}:setIamPolicy",
+              "flatPath": "v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}:setIamPolicy",
+              "httpMethod": "POST",
+              "description": "Sets the IAM access control policy for the specified IAM resource.",
+              "parameters": {
+                "resource": {
+                  "description": "REQUIRED: The resource for which the policy is being specified.\n`resource` is usually specified as a path, such as\n`projects\/*project*\/zones\/*zone*\/disks\/*disk*`.\n\nThe format for the path specified in this value is resource specific and\nis specified in the `setIamPolicy` documentation.",
+                  "location": "path",
+                  "required": true,
+                  "pattern": "^projects\/[^\/]*\/serviceAccounts\/[^\/]*$",
+                  "type": "string"
+                }
+              },
+              "parameterOrder": [
+                "resource"
+              ],
+              "request": {
+                "$ref": "SetIamPolicyRequest"
+              },
+              "response": {
+                "$ref": "Policy"
+              },
+              "scopes": [
+                "https://www.googleapis.com/auth/cloud-platform"
+              ]
+            },
+            "testIamPermissions": {
+              "id": "iam.projects.serviceAccounts.testIamPermissions",
+              "path": "v1/{+resource}:testIamPermissions",
+              "flatPath": "v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}:testIamPermissions",
+              "httpMethod": "POST",
+              "description": "Tests the specified permissions against the IAM access control policy\nfor the specified IAM resource.",
+              "parameters": {
+                "resource": {
+                  "description": "REQUIRED: The resource for which the policy detail is being requested.\n`resource` is usually specified as a path, such as\n`projects\/*project*\/zones\/*zone*\/disks\/*disk*`.\n\nThe format for the path specified in this value is resource specific and\nis specified in the `testIamPermissions` documentation.",
+                  "location": "path",
+                  "required": true,
+                  "pattern": "^projects\/[^\/]*\/serviceAccounts\/[^\/]*$",
+                  "type": "string"
+                }
+              },
+              "parameterOrder": [
+                "resource"
+              ],
+              "request": {
+                "$ref": "TestIamPermissionsRequest"
+              },
+              "response": {
+                "$ref": "TestIamPermissionsResponse"
+              },
+              "scopes": [
+                "https://www.googleapis.com/auth/cloud-platform"
+              ]
+            }
+          }
+          ,
+          "resources": {
+            "keys": {
+              "methods": {
+                "list": {
+                  "id": "iam.projects.serviceAccounts.keys.list",
+                  "path": "v1/{+name}/keys",
+                  "flatPath": "v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}/keys",
+                  "httpMethod": "GET",
+                  "description": "Lists ServiceAccountKeys.",
+                  "parameters": {
+                    "name": {
+                      "description": "The resource name of the service account in the following format:\n`projects\/{project}\/serviceAccounts\/{account}`.\n\nUsing `-` as a wildcard for the project, will infer the project from\nthe account. The `account` value can be the `email` address or the\n`unique_id` of the service account.",
+                      "location": "path",
+                      "required": true,
+                      "pattern": "^projects\/[^\/]*\/serviceAccounts\/[^\/]*$",
+                      "type": "string"
+                    },
+                    "keyTypes": {
+                      "description": "Filters the types of keys the user wants to include in the list\nresponse. Duplicate key types are not allowed. If no key type\nis provided, all keys are returned.",
+                      "location": "query",
+                      "repeated": true,
+                      "type": "string",
+                      "enum": [
+                        "KEY_TYPE_UNSPECIFIED",
+                        "USER_MANAGED",
+                        "SYSTEM_MANAGED"
+                      ]
+                    }
+                  },
+                  "parameterOrder": [
+                    "name"
+                  ],
+                  "response": {
+                    "$ref": "ListServiceAccountKeysResponse"
+                  },
+                  "scopes": [
+                    "https://www.googleapis.com/auth/cloud-platform"
+                  ]
+                },
+                "get": {
+                  "id": "iam.projects.serviceAccounts.keys.get",
+                  "path": "v1/{+name}",
+                  "flatPath": "v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}/keys/{keysId}",
+                  "httpMethod": "GET",
+                  "description": "Gets the ServiceAccountKey\nby key id.",
+                  "parameters": {
+                    "name": {
+                      "description": "The resource name of the service account key in the following format:\n`projects\/{project}\/serviceAccounts\/{account}\/keys\/{key}`.\n\nUsing `-` as a wildcard for the project will infer the project from\nthe account. The `account` value can be the `email` address or the\n`unique_id` of the service account.",
+                      "location": "path",
+                      "required": true,
+                      "pattern": "^projects\/[^\/]*\/serviceAccounts\/[^\/]*\/keys\/[^\/]*$",
+                      "type": "string"
+                    },
+                    "publicKeyType": {
+                      "description": "The output format of the public key requested.\nX509_PEM is the default output format.",
+                      "location": "query",
+                      "type": "string",
+                      "enum": [
+                        "TYPE_NONE",
+                        "TYPE_X509_PEM_FILE",
+                        "TYPE_RAW_PUBLIC_KEY"
+                      ]
+                    }
+                  },
+                  "parameterOrder": [
+                    "name"
+                  ],
+                  "response": {
+                    "$ref": "ServiceAccountKey"
+                  },
+                  "scopes": [
+                    "https://www.googleapis.com/auth/cloud-platform"
+                  ]
+                },
+                "create": {
+                  "id": "iam.projects.serviceAccounts.keys.create",
+                  "path": "v1/{+name}/keys",
+                  "flatPath": "v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}/keys",
+                  "httpMethod": "POST",
+                  "description": "Creates a ServiceAccountKey\nand returns it.",
+                  "parameters": {
+                    "name": {
+                      "description": "The resource name of the service account in the following format:\n`projects\/{project}\/serviceAccounts\/{account}`.\nUsing `-` as a wildcard for the project will infer the project from\nthe account. The `account` value can be the `email` address or the\n`unique_id` of the service account.",
+                      "location": "path",
+                      "required": true,
+                      "pattern": "^projects\/[^\/]*\/serviceAccounts\/[^\/]*$",
+                      "type": "string"
+                    }
+                  },
+                  "parameterOrder": [
+                    "name"
+                  ],
+                  "request": {
+                    "$ref": "CreateServiceAccountKeyRequest"
+                  },
+                  "response": {
+                    "$ref": "ServiceAccountKey"
+                  },
+                  "scopes": [
+                    "https://www.googleapis.com/auth/cloud-platform"
+                  ]
+                },
+                "delete": {
+                  "id": "iam.projects.serviceAccounts.keys.delete",
+                  "path": "v1/{+name}",
+                  "flatPath": "v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}/keys/{keysId}",
+                  "httpMethod": "DELETE",
+                  "description": "Deletes a ServiceAccountKey.",
+                  "parameters": {
+                    "name": {
+                      "description": "The resource name of the service account key in the following format:\n`projects\/{project}\/serviceAccounts\/{account}\/keys\/{key}`.\nUsing `-` as a wildcard for the project will infer the project from\nthe account. The `account` value can be the `email` address or the\n`unique_id` of the service account.",
+                      "location": "path",
+                      "required": true,
+                      "pattern": "^projects\/[^\/]*\/serviceAccounts\/[^\/]*\/keys\/[^\/]*$",
+                      "type": "string"
+                    }
+                  },
+                  "parameterOrder": [
+                    "name"
+                  ],
+                  "response": {
+                    "$ref": "Empty"
+                  },
+                  "scopes": [
+                    "https://www.googleapis.com/auth/cloud-platform"
+                  ]
+                }
+              }
+            }
+          }
+        }
+      }
+    },
+    "roles": {
+      "methods": {
+        "queryGrantableRoles": {
+          "id": "iam.roles.queryGrantableRoles",
+          "path": "v1/roles:queryGrantableRoles",
+          "flatPath": "v1/roles:queryGrantableRoles",
+          "httpMethod": "POST",
+          "description": "Queries roles that can be granted on a particular resource.",
+          "parameters": {
+          },
+          "parameterOrder": [
+          ],
+          "request": {
+            "$ref": "QueryGrantableRolesRequest"
+          },
+          "response": {
+            "$ref": "QueryGrantableRolesResponse"
+          },
+          "scopes": [
+            "https://www.googleapis.com/auth/cloud-platform"
+          ]
+        }
+      }
+    },
+    "iamPolicies": {
+      "methods": {
+        "getPolicyDetails": {
+          "id": "iam.iamPolicies.getPolicyDetails",
+          "path": "v1/iamPolicies:getPolicyDetails",
+          "flatPath": "v1/iamPolicies:getPolicyDetails",
+          "httpMethod": "POST",
+          "description": "Returns the current IAM policy and the policies on the inherited resources\nthat the user has access to.",
+          "parameters": {
+          },
+          "parameterOrder": [
+          ],
+          "request": {
+            "$ref": "GetPolicyDetailsRequest"
+          },
+          "response": {
+            "$ref": "GetPolicyDetailsResponse"
+          },
+          "scopes": [
+            "https://www.googleapis.com/auth/cloud-platform"
+          ]
+        }
+      }
+    }
+  },
+  "basePath": ""
+}
diff --git a/samples/iam_sample/iam_v1/__init__.py b/samples/iam_sample/iam_v1/__init__.py
new file mode 100644
index 0000000..2816da8
--- /dev/null
+++ b/samples/iam_sample/iam_v1/__init__.py
@@ -0,0 +1,5 @@
+"""Package marker file."""
+
+import pkgutil
+
+__path__ = pkgutil.extend_path(__path__, __name__)
diff --git a/samples/iam_sample/iam_v1/iam_v1.py b/samples/iam_sample/iam_v1/iam_v1.py
new file mode 100644
index 0000000..da9750e
--- /dev/null
+++ b/samples/iam_sample/iam_v1/iam_v1.py
@@ -0,0 +1,921 @@
+#!/usr/bin/env python
+"""CLI for iam, version v1."""
+# NOTE: This file is autogenerated and should not be edited by hand.
+
+import code
+import os
+import platform
+import sys
+
+from apitools.base.protorpclite import message_types
+from apitools.base.protorpclite import messages
+
+from google.apputils import appcommands
+import gflags as flags
+
+import apitools.base.py as apitools_base
+from apitools.base.py import cli as apitools_base_cli
+import iam_v1_client as client_lib
+import iam_v1_messages as messages
+
+
+def _DeclareIamFlags():
+  """Declare global flags in an idempotent way."""
+  if 'api_endpoint' in flags.FLAGS:
+    return
+  flags.DEFINE_string(
+      'api_endpoint',
+      u'https://iam.googleapis.com/',
+      'URL of the API endpoint to use.',
+      short_name='iam_url')
+  flags.DEFINE_string(
+      'history_file',
+      u'~/.iam.v1.history',
+      'File with interactive shell history.')
+  flags.DEFINE_multistring(
+      'add_header', [],
+      'Additional http headers (as key=value strings). '
+      'Can be specified multiple times.')
+  flags.DEFINE_string(
+      'service_account_json_keyfile', '',
+      'Filename for a JSON service account key downloaded'
+      ' from the Developer Console.')
+  flags.DEFINE_enum(
+      'f__xgafv',
+      u'_1',
+      [u'_1', u'_2'],
+      u'V1 error format.')
+  flags.DEFINE_string(
+      'access_token',
+      None,
+      u'OAuth access token.')
+  flags.DEFINE_enum(
+      'alt',
+      u'json',
+      [u'json', u'media', u'proto'],
+      u'Data format for response.')
+  flags.DEFINE_string(
+      'bearer_token',
+      None,
+      u'OAuth bearer token.')
+  flags.DEFINE_string(
+      'callback',
+      None,
+      u'JSONP')
+  flags.DEFINE_string(
+      'fields',
+      None,
+      u'Selector specifying which fields to include in a partial response.')
+  flags.DEFINE_string(
+      'key',
+      None,
+      u'API key. Your API key identifies your project and provides you with '
+      u'API access, quota, and reports. Required unless you provide an OAuth '
+      u'2.0 token.')
+  flags.DEFINE_string(
+      'oauth_token',
+      None,
+      u'OAuth 2.0 token for the current user.')
+  flags.DEFINE_boolean(
+      'pp',
+      'True',
+      u'Pretty-print response.')
+  flags.DEFINE_boolean(
+      'prettyPrint',
+      'True',
+      u'Returns response with indentations and line breaks.')
+  flags.DEFINE_string(
+      'quotaUser',
+      None,
+      u'Available to use for quota purposes for server-side applications. Can'
+      u' be any arbitrary string assigned to a user, but should not exceed 40'
+      u' characters.')
+  flags.DEFINE_string(
+      'trace',
+      None,
+      'A tracing token of the form "token:<tokenid>" to include in api '
+      'requests.')
+  flags.DEFINE_string(
+      'uploadType',
+      None,
+      u'Legacy upload protocol for media (e.g. "media", "multipart").')
+  flags.DEFINE_string(
+      'upload_protocol',
+      None,
+      u'Upload protocol for media (e.g. "raw", "multipart").')
+
+
+FLAGS = flags.FLAGS
+apitools_base_cli.DeclareBaseFlags()
+_DeclareIamFlags()
+
+
+def GetGlobalParamsFromFlags():
+  """Return a StandardQueryParameters based on flags."""
+  result = messages.StandardQueryParameters()
+  if FLAGS['f__xgafv'].present:
+    result.f__xgafv = messages.StandardQueryParameters.FXgafvValueValuesEnum(FLAGS.f__xgafv)
+  if FLAGS['access_token'].present:
+    result.access_token = FLAGS.access_token.decode('utf8')
+  if FLAGS['alt'].present:
+    result.alt = messages.StandardQueryParameters.AltValueValuesEnum(FLAGS.alt)
+  if FLAGS['bearer_token'].present:
+    result.bearer_token = FLAGS.bearer_token.decode('utf8')
+  if FLAGS['callback'].present:
+    result.callback = FLAGS.callback.decode('utf8')
+  if FLAGS['fields'].present:
+    result.fields = FLAGS.fields.decode('utf8')
+  if FLAGS['key'].present:
+    result.key = FLAGS.key.decode('utf8')
+  if FLAGS['oauth_token'].present:
+    result.oauth_token = FLAGS.oauth_token.decode('utf8')
+  if FLAGS['pp'].present:
+    result.pp = FLAGS.pp
+  if FLAGS['prettyPrint'].present:
+    result.prettyPrint = FLAGS.prettyPrint
+  if FLAGS['quotaUser'].present:
+    result.quotaUser = FLAGS.quotaUser.decode('utf8')
+  if FLAGS['trace'].present:
+    result.trace = FLAGS.trace.decode('utf8')
+  if FLAGS['uploadType'].present:
+    result.uploadType = FLAGS.uploadType.decode('utf8')
+  if FLAGS['upload_protocol'].present:
+    result.upload_protocol = FLAGS.upload_protocol.decode('utf8')
+  return result
+
+
+def GetClientFromFlags():
+  """Return a client object, configured from flags."""
+  log_request = FLAGS.log_request or FLAGS.log_request_response
+  log_response = FLAGS.log_response or FLAGS.log_request_response
+  api_endpoint = apitools_base.NormalizeApiEndpoint(FLAGS.api_endpoint)
+  additional_http_headers = dict(x.split('=', 1) for x in FLAGS.add_header)
+  credentials_args = {
+      'service_account_json_keyfile': os.path.expanduser(FLAGS.service_account_json_keyfile)
+  }
+  try:
+    client = client_lib.IamV1(
+        api_endpoint, log_request=log_request,
+        log_response=log_response,
+        credentials_args=credentials_args,
+        additional_http_headers=additional_http_headers)
+  except apitools_base.CredentialsError as e:
+    print 'Error creating credentials: %s' % e
+    sys.exit(1)
+  return client
+
+
+class PyShell(appcommands.Cmd):
+
+  def Run(self, _):
+    """Run an interactive python shell with the client."""
+    client = GetClientFromFlags()
+    params = GetGlobalParamsFromFlags()
+    for field in params.all_fields():
+      value = params.get_assigned_value(field.name)
+      if value != field.default:
+        client.AddGlobalParam(field.name, value)
+    banner = """
+           == iam interactive console ==
+                 client: a iam client
+          apitools_base: base apitools module
+         messages: the generated messages module
+    """
+    local_vars = {
+        'apitools_base': apitools_base,
+        'client': client,
+        'client_lib': client_lib,
+        'messages': messages,
+    }
+    if platform.system() == 'Linux':
+      console = apitools_base_cli.ConsoleWithReadline(
+          local_vars, histfile=FLAGS.history_file)
+    else:
+      console = code.InteractiveConsole(local_vars)
+    try:
+      console.interact(banner)
+    except SystemExit as e:
+      return e.code
+
+
+class IamPoliciesGetPolicyDetails(apitools_base_cli.NewCmd):
+  """Command wrapping iamPolicies.GetPolicyDetails."""
+
+  usage = """iamPolicies_getPolicyDetails"""
+
+  def __init__(self, name, fv):
+    super(IamPoliciesGetPolicyDetails, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'fullResourcePath',
+        None,
+        u'REQUIRED: The full resource path of the current policy being '
+        u'requested, e.g., `//dataflow.googleapis.com/projects/../jobs/..`.',
+        flag_values=fv)
+    flags.DEFINE_integer(
+        'pageSize',
+        None,
+        u'Limit on the number of policies to include in the response. Further'
+        u' accounts can subsequently be obtained by including the '
+        u'GetPolicyDetailsResponse.next_page_token in a subsequent request. '
+        u'If zero, the default page size 20 will be used. Must be given a '
+        u'value in range [0, 100], otherwise an invalid argument error will '
+        u'be returned.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'pageToken',
+        None,
+        u'Optional pagination token returned in an earlier '
+        u'GetPolicyDetailsResponse.next_page_token response.',
+        flag_values=fv)
+
+  def RunWithArgs(self):
+    """Returns the current IAM policy and the policies on the inherited
+    resources that the user has access to.
+
+    Flags:
+      fullResourcePath: REQUIRED: The full resource path of the current policy
+        being requested, e.g.,
+        `//dataflow.googleapis.com/projects/../jobs/..`.
+      pageSize: Limit on the number of policies to include in the response.
+        Further accounts can subsequently be obtained by including the
+        GetPolicyDetailsResponse.next_page_token in a subsequent request. If
+        zero, the default page size 20 will be used. Must be given a value in
+        range [0, 100], otherwise an invalid argument error will be returned.
+      pageToken: Optional pagination token returned in an earlier
+        GetPolicyDetailsResponse.next_page_token response.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.GetPolicyDetailsRequest(
+        )
+    if FLAGS['fullResourcePath'].present:
+      request.fullResourcePath = FLAGS.fullResourcePath.decode('utf8')
+    if FLAGS['pageSize'].present:
+      request.pageSize = FLAGS.pageSize
+    if FLAGS['pageToken'].present:
+      request.pageToken = FLAGS.pageToken.decode('utf8')
+    result = client.iamPolicies.GetPolicyDetails(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ProjectsServiceAccountsCreate(apitools_base_cli.NewCmd):
+  """Command wrapping projects_serviceAccounts.Create."""
+
+  usage = """projects_serviceAccounts_create <name>"""
+
+  def __init__(self, name, fv):
+    super(ProjectsServiceAccountsCreate, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'createServiceAccountRequest',
+        None,
+        u'A CreateServiceAccountRequest resource to be passed as the request '
+        u'body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, name):
+    """Creates a ServiceAccount and returns it.
+
+    Args:
+      name: Required. The resource name of the project associated with the
+        service accounts, such as `projects/my-project-123`.
+
+    Flags:
+      createServiceAccountRequest: A CreateServiceAccountRequest resource to
+        be passed as the request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.IamProjectsServiceAccountsCreateRequest(
+        name=name.decode('utf8'),
+        )
+    if FLAGS['createServiceAccountRequest'].present:
+      request.createServiceAccountRequest = apitools_base.JsonToMessage(messages.CreateServiceAccountRequest, FLAGS.createServiceAccountRequest)
+    result = client.projects_serviceAccounts.Create(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ProjectsServiceAccountsDelete(apitools_base_cli.NewCmd):
+  """Command wrapping projects_serviceAccounts.Delete."""
+
+  usage = """projects_serviceAccounts_delete <name>"""
+
+  def __init__(self, name, fv):
+    super(ProjectsServiceAccountsDelete, self).__init__(name, fv)
+
+  def RunWithArgs(self, name):
+    """Deletes a ServiceAccount.
+
+    Args:
+      name: The resource name of the service account in the following format:
+        `projects/{project}/serviceAccounts/{account}`. Using `-` as a
+        wildcard for the project will infer the project from the account. The
+        `account` value can be the `email` address or the `unique_id` of the
+        service account.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.IamProjectsServiceAccountsDeleteRequest(
+        name=name.decode('utf8'),
+        )
+    result = client.projects_serviceAccounts.Delete(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ProjectsServiceAccountsGet(apitools_base_cli.NewCmd):
+  """Command wrapping projects_serviceAccounts.Get."""
+
+  usage = """projects_serviceAccounts_get <name>"""
+
+  def __init__(self, name, fv):
+    super(ProjectsServiceAccountsGet, self).__init__(name, fv)
+
+  def RunWithArgs(self, name):
+    """Gets a ServiceAccount.
+
+    Args:
+      name: The resource name of the service account in the following format:
+        `projects/{project}/serviceAccounts/{account}`. Using `-` as a
+        wildcard for the project will infer the project from the account. The
+        `account` value can be the `email` address or the `unique_id` of the
+        service account.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.IamProjectsServiceAccountsGetRequest(
+        name=name.decode('utf8'),
+        )
+    result = client.projects_serviceAccounts.Get(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ProjectsServiceAccountsGetIamPolicy(apitools_base_cli.NewCmd):
+  """Command wrapping projects_serviceAccounts.GetIamPolicy."""
+
+  usage = """projects_serviceAccounts_getIamPolicy <resource>"""
+
+  def __init__(self, name, fv):
+    super(ProjectsServiceAccountsGetIamPolicy, self).__init__(name, fv)
+
+  def RunWithArgs(self, resource):
+    """Returns the IAM access control policy for specified IAM resource.
+
+    Args:
+      resource: REQUIRED: The resource for which the policy is being
+        requested. `resource` is usually specified as a path, such as
+        `projects/*project*/zones/*zone*/disks/*disk*`.  The format for the
+        path specified in this value is resource specific and is specified in
+        the `getIamPolicy` documentation.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.IamProjectsServiceAccountsGetIamPolicyRequest(
+        resource=resource.decode('utf8'),
+        )
+    result = client.projects_serviceAccounts.GetIamPolicy(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ProjectsServiceAccountsList(apitools_base_cli.NewCmd):
+  """Command wrapping projects_serviceAccounts.List."""
+
+  usage = """projects_serviceAccounts_list <name>"""
+
+  def __init__(self, name, fv):
+    super(ProjectsServiceAccountsList, self).__init__(name, fv)
+    flags.DEFINE_integer(
+        'pageSize',
+        None,
+        u'Optional limit on the number of service accounts to include in the '
+        u'response. Further accounts can subsequently be obtained by '
+        u'including the ListServiceAccountsResponse.next_page_token in a '
+        u'subsequent request.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'pageToken',
+        None,
+        u'Optional pagination token returned in an earlier '
+        u'ListServiceAccountsResponse.next_page_token.',
+        flag_values=fv)
+    flags.DEFINE_boolean(
+        'removeDeletedServiceAccounts',
+        None,
+        u'Do not list service accounts deleted from Gaia. <b><font '
+        u'color="red">DO NOT INCLUDE IN EXTERNAL DOCUMENTATION</font></b>.',
+        flag_values=fv)
+
+  def RunWithArgs(self, name):
+    """Lists ServiceAccounts for a project.
+
+    Args:
+      name: Required. The resource name of the project associated with the
+        service accounts, such as `projects/my-project-123`.
+
+    Flags:
+      pageSize: Optional limit on the number of service accounts to include in
+        the response. Further accounts can subsequently be obtained by
+        including the ListServiceAccountsResponse.next_page_token in a
+        subsequent request.
+      pageToken: Optional pagination token returned in an earlier
+        ListServiceAccountsResponse.next_page_token.
+      removeDeletedServiceAccounts: Do not list service accounts deleted from
+        Gaia. <b><font color="red">DO NOT INCLUDE IN EXTERNAL
+        DOCUMENTATION</font></b>.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.IamProjectsServiceAccountsListRequest(
+        name=name.decode('utf8'),
+        )
+    if FLAGS['pageSize'].present:
+      request.pageSize = FLAGS.pageSize
+    if FLAGS['pageToken'].present:
+      request.pageToken = FLAGS.pageToken.decode('utf8')
+    if FLAGS['removeDeletedServiceAccounts'].present:
+      request.removeDeletedServiceAccounts = FLAGS.removeDeletedServiceAccounts
+    result = client.projects_serviceAccounts.List(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ProjectsServiceAccountsSetIamPolicy(apitools_base_cli.NewCmd):
+  """Command wrapping projects_serviceAccounts.SetIamPolicy."""
+
+  usage = """projects_serviceAccounts_setIamPolicy <resource>"""
+
+  def __init__(self, name, fv):
+    super(ProjectsServiceAccountsSetIamPolicy, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'setIamPolicyRequest',
+        None,
+        u'A SetIamPolicyRequest resource to be passed as the request body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, resource):
+    """Sets the IAM access control policy for the specified IAM resource.
+
+    Args:
+      resource: REQUIRED: The resource for which the policy is being
+        specified. `resource` is usually specified as a path, such as
+        `projects/*project*/zones/*zone*/disks/*disk*`.  The format for the
+        path specified in this value is resource specific and is specified in
+        the `setIamPolicy` documentation.
+
+    Flags:
+      setIamPolicyRequest: A SetIamPolicyRequest resource to be passed as the
+        request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.IamProjectsServiceAccountsSetIamPolicyRequest(
+        resource=resource.decode('utf8'),
+        )
+    if FLAGS['setIamPolicyRequest'].present:
+      request.setIamPolicyRequest = apitools_base.JsonToMessage(messages.SetIamPolicyRequest, FLAGS.setIamPolicyRequest)
+    result = client.projects_serviceAccounts.SetIamPolicy(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ProjectsServiceAccountsSignBlob(apitools_base_cli.NewCmd):
+  """Command wrapping projects_serviceAccounts.SignBlob."""
+
+  usage = """projects_serviceAccounts_signBlob <name>"""
+
+  def __init__(self, name, fv):
+    super(ProjectsServiceAccountsSignBlob, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'signBlobRequest',
+        None,
+        u'A SignBlobRequest resource to be passed as the request body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, name):
+    """Signs a blob using a service account's system-managed private key.
+
+    Args:
+      name: The resource name of the service account in the following format:
+        `projects/{project}/serviceAccounts/{account}`. Using `-` as a
+        wildcard for the project will infer the project from the account. The
+        `account` value can be the `email` address or the `unique_id` of the
+        service account.
+
+    Flags:
+      signBlobRequest: A SignBlobRequest resource to be passed as the request
+        body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.IamProjectsServiceAccountsSignBlobRequest(
+        name=name.decode('utf8'),
+        )
+    if FLAGS['signBlobRequest'].present:
+      request.signBlobRequest = apitools_base.JsonToMessage(messages.SignBlobRequest, FLAGS.signBlobRequest)
+    result = client.projects_serviceAccounts.SignBlob(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ProjectsServiceAccountsSignJwt(apitools_base_cli.NewCmd):
+  """Command wrapping projects_serviceAccounts.SignJwt."""
+
+  usage = """projects_serviceAccounts_signJwt <name>"""
+
+  def __init__(self, name, fv):
+    super(ProjectsServiceAccountsSignJwt, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'signJwtRequest',
+        None,
+        u'A SignJwtRequest resource to be passed as the request body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, name):
+    """Signs a JWT using a service account's system-managed private key.  If
+    no `exp` (expiry) time is contained in the claims, we will provide an
+    expiry of one hour in the future. If an expiry of more than one hour in
+    the future is requested, the request will fail.
+
+    Args:
+      name: The resource name of the service account in the following format:
+        `projects/{project}/serviceAccounts/{account}`. Using `-` as a
+        wildcard for the project will infer the project from the account. The
+        `account` value can be the `email` address or the `unique_id` of the
+        service account.
+
+    Flags:
+      signJwtRequest: A SignJwtRequest resource to be passed as the request
+        body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.IamProjectsServiceAccountsSignJwtRequest(
+        name=name.decode('utf8'),
+        )
+    if FLAGS['signJwtRequest'].present:
+      request.signJwtRequest = apitools_base.JsonToMessage(messages.SignJwtRequest, FLAGS.signJwtRequest)
+    result = client.projects_serviceAccounts.SignJwt(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ProjectsServiceAccountsTestIamPermissions(apitools_base_cli.NewCmd):
+  """Command wrapping projects_serviceAccounts.TestIamPermissions."""
+
+  usage = """projects_serviceAccounts_testIamPermissions <resource>"""
+
+  def __init__(self, name, fv):
+    super(ProjectsServiceAccountsTestIamPermissions, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'testIamPermissionsRequest',
+        None,
+        u'A TestIamPermissionsRequest resource to be passed as the request '
+        u'body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, resource):
+    """Tests the specified permissions against the IAM access control policy
+    for the specified IAM resource.
+
+    Args:
+      resource: REQUIRED: The resource for which the policy detail is being
+        requested. `resource` is usually specified as a path, such as
+        `projects/*project*/zones/*zone*/disks/*disk*`.  The format for the
+        path specified in this value is resource specific and is specified in
+        the `testIamPermissions` documentation.
+
+    Flags:
+      testIamPermissionsRequest: A TestIamPermissionsRequest resource to be
+        passed as the request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.IamProjectsServiceAccountsTestIamPermissionsRequest(
+        resource=resource.decode('utf8'),
+        )
+    if FLAGS['testIamPermissionsRequest'].present:
+      request.testIamPermissionsRequest = apitools_base.JsonToMessage(messages.TestIamPermissionsRequest, FLAGS.testIamPermissionsRequest)
+    result = client.projects_serviceAccounts.TestIamPermissions(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ProjectsServiceAccountsUpdate(apitools_base_cli.NewCmd):
+  """Command wrapping projects_serviceAccounts.Update."""
+
+  usage = """projects_serviceAccounts_update <name>"""
+
+  def __init__(self, name, fv):
+    super(ProjectsServiceAccountsUpdate, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'description',
+        None,
+        u'Optional. A user-specified opaque description of the service '
+        u'account.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'displayName',
+        None,
+        u'Optional. A user-specified description of the service account.  '
+        u'Must be fewer than 100 UTF-8 bytes.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'email',
+        None,
+        u'@OutputOnly The email address of the service account.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'etag',
+        None,
+        u'Used to perform a consistent read-modify-write.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'oauth2ClientId',
+        None,
+        u'@OutputOnly. The OAuth2 client id for the service account. This is '
+        u'used in conjunction with the OAuth2 clientconfig API to make three '
+        u'legged OAuth2 (3LO) flows to access the data of Google users.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'projectId',
+        None,
+        u'@OutputOnly The id of the project that owns the service account.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'uniqueId',
+        None,
+        u'@OutputOnly The unique and stable id of the service account.',
+        flag_values=fv)
+
+  def RunWithArgs(self, name):
+    """Updates a ServiceAccount.  Currently, only the following fields are
+    updatable: `display_name` . The `etag` is mandatory.
+
+    Args:
+      name: The resource name of the service account in the following format:
+        `projects/{project}/serviceAccounts/{account}`.  Requests using `-` as
+        a wildcard for the project will infer the project from the `account`
+        and the `account` value can be the `email` address or the `unique_id`
+        of the service account.  In responses the resource name will always be
+        in the format `projects/{project}/serviceAccounts/{email}`.
+
+    Flags:
+      description: Optional. A user-specified opaque description of the
+        service account.
+      displayName: Optional. A user-specified description of the service
+        account.  Must be fewer than 100 UTF-8 bytes.
+      email: @OutputOnly The email address of the service account.
+      etag: Used to perform a consistent read-modify-write.
+      oauth2ClientId: @OutputOnly. The OAuth2 client id for the service
+        account. This is used in conjunction with the OAuth2 clientconfig API
+        to make three legged OAuth2 (3LO) flows to access the data of Google
+        users.
+      projectId: @OutputOnly The id of the project that owns the service
+        account.
+      uniqueId: @OutputOnly The unique and stable id of the service account.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ServiceAccount(
+        name=name.decode('utf8'),
+        )
+    if FLAGS['description'].present:
+      request.description = FLAGS.description.decode('utf8')
+    if FLAGS['displayName'].present:
+      request.displayName = FLAGS.displayName.decode('utf8')
+    if FLAGS['email'].present:
+      request.email = FLAGS.email.decode('utf8')
+    if FLAGS['etag'].present:
+      request.etag = FLAGS.etag
+    if FLAGS['oauth2ClientId'].present:
+      request.oauth2ClientId = FLAGS.oauth2ClientId.decode('utf8')
+    if FLAGS['projectId'].present:
+      request.projectId = FLAGS.projectId.decode('utf8')
+    if FLAGS['uniqueId'].present:
+      request.uniqueId = FLAGS.uniqueId.decode('utf8')
+    result = client.projects_serviceAccounts.Update(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ProjectsServiceAccountsKeysCreate(apitools_base_cli.NewCmd):
+  """Command wrapping projects_serviceAccounts_keys.Create."""
+
+  usage = """projects_serviceAccounts_keys_create <name>"""
+
+  def __init__(self, name, fv):
+    super(ProjectsServiceAccountsKeysCreate, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'createServiceAccountKeyRequest',
+        None,
+        u'A CreateServiceAccountKeyRequest resource to be passed as the '
+        u'request body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, name):
+    """Creates a ServiceAccountKey and returns it.
+
+    Args:
+      name: The resource name of the service account in the following format:
+        `projects/{project}/serviceAccounts/{account}`. Using `-` as a
+        wildcard for the project will infer the project from the account. The
+        `account` value can be the `email` address or the `unique_id` of the
+        service account.
+
+    Flags:
+      createServiceAccountKeyRequest: A CreateServiceAccountKeyRequest
+        resource to be passed as the request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.IamProjectsServiceAccountsKeysCreateRequest(
+        name=name.decode('utf8'),
+        )
+    if FLAGS['createServiceAccountKeyRequest'].present:
+      request.createServiceAccountKeyRequest = apitools_base.JsonToMessage(messages.CreateServiceAccountKeyRequest, FLAGS.createServiceAccountKeyRequest)
+    result = client.projects_serviceAccounts_keys.Create(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ProjectsServiceAccountsKeysDelete(apitools_base_cli.NewCmd):
+  """Command wrapping projects_serviceAccounts_keys.Delete."""
+
+  usage = """projects_serviceAccounts_keys_delete <name>"""
+
+  def __init__(self, name, fv):
+    super(ProjectsServiceAccountsKeysDelete, self).__init__(name, fv)
+
+  def RunWithArgs(self, name):
+    """Deletes a ServiceAccountKey.
+
+    Args:
+      name: The resource name of the service account key in the following
+        format: `projects/{project}/serviceAccounts/{account}/keys/{key}`.
+        Using `-` as a wildcard for the project will infer the project from
+        the account. The `account` value can be the `email` address or the
+        `unique_id` of the service account.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.IamProjectsServiceAccountsKeysDeleteRequest(
+        name=name.decode('utf8'),
+        )
+    result = client.projects_serviceAccounts_keys.Delete(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ProjectsServiceAccountsKeysGet(apitools_base_cli.NewCmd):
+  """Command wrapping projects_serviceAccounts_keys.Get."""
+
+  usage = """projects_serviceAccounts_keys_get <name>"""
+
+  def __init__(self, name, fv):
+    super(ProjectsServiceAccountsKeysGet, self).__init__(name, fv)
+    flags.DEFINE_enum(
+        'publicKeyType',
+        u'TYPE_NONE',
+        [u'TYPE_NONE', u'TYPE_X509_PEM_FILE', u'TYPE_RAW_PUBLIC_KEY'],
+        u'The output format of the public key requested. X509_PEM is the '
+        u'default output format.',
+        flag_values=fv)
+
+  def RunWithArgs(self, name):
+    """Gets the ServiceAccountKey by key id.
+
+    Args:
+      name: The resource name of the service account key in the following
+        format: `projects/{project}/serviceAccounts/{account}/keys/{key}`.
+        Using `-` as a wildcard for the project will infer the project from
+        the account. The `account` value can be the `email` address or the
+        `unique_id` of the service account.
+
+    Flags:
+      publicKeyType: The output format of the public key requested. X509_PEM
+        is the default output format.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.IamProjectsServiceAccountsKeysGetRequest(
+        name=name.decode('utf8'),
+        )
+    if FLAGS['publicKeyType'].present:
+      request.publicKeyType = messages.IamProjectsServiceAccountsKeysGetRequest.PublicKeyTypeValueValuesEnum(FLAGS.publicKeyType)
+    result = client.projects_serviceAccounts_keys.Get(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ProjectsServiceAccountsKeysList(apitools_base_cli.NewCmd):
+  """Command wrapping projects_serviceAccounts_keys.List."""
+
+  usage = """projects_serviceAccounts_keys_list <name>"""
+
+  def __init__(self, name, fv):
+    super(ProjectsServiceAccountsKeysList, self).__init__(name, fv)
+    flags.DEFINE_enum(
+        'keyTypes',
+        u'KEY_TYPE_UNSPECIFIED',
+        [u'KEY_TYPE_UNSPECIFIED', u'USER_MANAGED', u'SYSTEM_MANAGED'],
+        u'Filters the types of keys the user wants to include in the list '
+        u'response. Duplicate key types are not allowed. If no key type is '
+        u'provided, all keys are returned.',
+        flag_values=fv)
+
+  def RunWithArgs(self, name):
+    """Lists ServiceAccountKeys.
+
+    Args:
+      name: The resource name of the service account in the following format:
+        `projects/{project}/serviceAccounts/{account}`.  Using `-` as a
+        wildcard for the project, will infer the project from the account. The
+        `account` value can be the `email` address or the `unique_id` of the
+        service account.
+
+    Flags:
+      keyTypes: Filters the types of keys the user wants to include in the
+        list response. Duplicate key types are not allowed. If no key type is
+        provided, all keys are returned.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.IamProjectsServiceAccountsKeysListRequest(
+        name=name.decode('utf8'),
+        )
+    if FLAGS['keyTypes'].present:
+      request.keyTypes = [messages.IamProjectsServiceAccountsKeysListRequest.KeyTypesValueValuesEnum(x) for x in FLAGS.keyTypes]
+    result = client.projects_serviceAccounts_keys.List(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class RolesQueryGrantableRoles(apitools_base_cli.NewCmd):
+  """Command wrapping roles.QueryGrantableRoles."""
+
+  usage = """roles_queryGrantableRoles"""
+
+  def __init__(self, name, fv):
+    super(RolesQueryGrantableRoles, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'fullResourceName',
+        None,
+        u'Required. The full resource name to query from the list of '
+        u'grantable roles.  The name follows the Google Cloud Platform '
+        u'resource format. For example, a Cloud Platform project with id `my-'
+        u'project` will be named '
+        u'`//cloudresourcemanager.googleapis.com/projects/my-project`.',
+        flag_values=fv)
+
+  def RunWithArgs(self):
+    """Queries roles that can be granted on a particular resource.
+
+    Flags:
+      fullResourceName: Required. The full resource name to query from the
+        list of grantable roles.  The name follows the Google Cloud Platform
+        resource format. For example, a Cloud Platform project with id `my-
+        project` will be named `//cloudresourcemanager.googleapis.com/projects
+        /my-project`.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.QueryGrantableRolesRequest(
+        )
+    if FLAGS['fullResourceName'].present:
+      request.fullResourceName = FLAGS.fullResourceName.decode('utf8')
+    result = client.roles.QueryGrantableRoles(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+def main(_):
+  appcommands.AddCmd('pyshell', PyShell)
+  appcommands.AddCmd('iamPolicies_getPolicyDetails', IamPoliciesGetPolicyDetails)
+  appcommands.AddCmd('projects_serviceAccounts_create', ProjectsServiceAccountsCreate)
+  appcommands.AddCmd('projects_serviceAccounts_delete', ProjectsServiceAccountsDelete)
+  appcommands.AddCmd('projects_serviceAccounts_get', ProjectsServiceAccountsGet)
+  appcommands.AddCmd('projects_serviceAccounts_getIamPolicy', ProjectsServiceAccountsGetIamPolicy)
+  appcommands.AddCmd('projects_serviceAccounts_list', ProjectsServiceAccountsList)
+  appcommands.AddCmd('projects_serviceAccounts_setIamPolicy', ProjectsServiceAccountsSetIamPolicy)
+  appcommands.AddCmd('projects_serviceAccounts_signBlob', ProjectsServiceAccountsSignBlob)
+  appcommands.AddCmd('projects_serviceAccounts_signJwt', ProjectsServiceAccountsSignJwt)
+  appcommands.AddCmd('projects_serviceAccounts_testIamPermissions', ProjectsServiceAccountsTestIamPermissions)
+  appcommands.AddCmd('projects_serviceAccounts_update', ProjectsServiceAccountsUpdate)
+  appcommands.AddCmd('projects_serviceAccounts_keys_create', ProjectsServiceAccountsKeysCreate)
+  appcommands.AddCmd('projects_serviceAccounts_keys_delete', ProjectsServiceAccountsKeysDelete)
+  appcommands.AddCmd('projects_serviceAccounts_keys_get', ProjectsServiceAccountsKeysGet)
+  appcommands.AddCmd('projects_serviceAccounts_keys_list', ProjectsServiceAccountsKeysList)
+  appcommands.AddCmd('roles_queryGrantableRoles', RolesQueryGrantableRoles)
+
+  apitools_base_cli.SetupLogger()
+  if hasattr(appcommands, 'SetDefaultCommand'):
+    appcommands.SetDefaultCommand('pyshell')
+
+
+run_main = apitools_base_cli.run_main
+
+if __name__ == '__main__':
+  appcommands.Run()
diff --git a/samples/iam_sample/iam_v1/iam_v1_client.py b/samples/iam_sample/iam_v1/iam_v1_client.py
new file mode 100644
index 0000000..883c4d4
--- /dev/null
+++ b/samples/iam_sample/iam_v1/iam_v1_client.py
@@ -0,0 +1,535 @@
+"""Generated client library for iam version v1."""
+# NOTE: This file is autogenerated and should not be edited by hand.
+from apitools.base.py import base_api
+from samples.iam_sample.iam_v1 import iam_v1_messages as messages
+
+
+class IamV1(base_api.BaseApiClient):
+  """Generated client library for service iam version v1."""
+
+  MESSAGES_MODULE = messages
+  BASE_URL = u'https://iam.googleapis.com/'
+
+  _PACKAGE = u'iam'
+  _SCOPES = [u'https://www.googleapis.com/auth/cloud-platform']
+  _VERSION = u'v1'
+  _CLIENT_ID = '1042881264118.apps.googleusercontent.com'
+  _CLIENT_SECRET = 'x_Tw5K8nnjoRAqULM9PFAC2b'
+  _USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b'
+  _CLIENT_CLASS_NAME = u'IamV1'
+  _URL_VERSION = u'v1'
+  _API_KEY = None
+
+  def __init__(self, url='', credentials=None,
+               get_credentials=True, http=None, model=None,
+               log_request=False, log_response=False,
+               credentials_args=None, default_global_params=None,
+               additional_http_headers=None):
+    """Create a new iam handle."""
+    url = url or self.BASE_URL
+    super(IamV1, self).__init__(
+        url, credentials=credentials,
+        get_credentials=get_credentials, http=http, model=model,
+        log_request=log_request, log_response=log_response,
+        credentials_args=credentials_args,
+        default_global_params=default_global_params,
+        additional_http_headers=additional_http_headers)
+    self.iamPolicies = self.IamPoliciesService(self)
+    self.projects_serviceAccounts_keys = self.ProjectsServiceAccountsKeysService(self)
+    self.projects_serviceAccounts = self.ProjectsServiceAccountsService(self)
+    self.projects = self.ProjectsService(self)
+    self.roles = self.RolesService(self)
+
+  class IamPoliciesService(base_api.BaseApiService):
+    """Service class for the iamPolicies resource."""
+
+    _NAME = u'iamPolicies'
+
+    def __init__(self, client):
+      super(IamV1.IamPoliciesService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def GetPolicyDetails(self, request, global_params=None):
+      """Returns the current IAM policy and the policies on the inherited resources.
+that the user has access to.
+
+      Args:
+        request: (GetPolicyDetailsRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (GetPolicyDetailsResponse) The response message.
+      """
+      config = self.GetMethodConfig('GetPolicyDetails')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    GetPolicyDetails.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'iam.iamPolicies.getPolicyDetails',
+        ordered_params=[],
+        path_params=[],
+        query_params=[],
+        relative_path=u'v1/iamPolicies:getPolicyDetails',
+        request_field='<request>',
+        request_type_name=u'GetPolicyDetailsRequest',
+        response_type_name=u'GetPolicyDetailsResponse',
+        supports_download=False,
+    )
+
+  class ProjectsServiceAccountsKeysService(base_api.BaseApiService):
+    """Service class for the projects_serviceAccounts_keys resource."""
+
+    _NAME = u'projects_serviceAccounts_keys'
+
+    def __init__(self, client):
+      super(IamV1.ProjectsServiceAccountsKeysService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def Create(self, request, global_params=None):
+      """Creates a ServiceAccountKey.
+and returns it.
+
+      Args:
+        request: (IamProjectsServiceAccountsKeysCreateRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ServiceAccountKey) The response message.
+      """
+      config = self.GetMethodConfig('Create')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Create.method_config = lambda: base_api.ApiMethodInfo(
+        flat_path=u'v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}/keys',
+        http_method=u'POST',
+        method_id=u'iam.projects.serviceAccounts.keys.create',
+        ordered_params=[u'name'],
+        path_params=[u'name'],
+        query_params=[],
+        relative_path=u'v1/{+name}/keys',
+        request_field=u'createServiceAccountKeyRequest',
+        request_type_name=u'IamProjectsServiceAccountsKeysCreateRequest',
+        response_type_name=u'ServiceAccountKey',
+        supports_download=False,
+    )
+
+    def Delete(self, request, global_params=None):
+      """Deletes a ServiceAccountKey.
+
+      Args:
+        request: (IamProjectsServiceAccountsKeysDeleteRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Empty) The response message.
+      """
+      config = self.GetMethodConfig('Delete')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Delete.method_config = lambda: base_api.ApiMethodInfo(
+        flat_path=u'v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}/keys/{keysId}',
+        http_method=u'DELETE',
+        method_id=u'iam.projects.serviceAccounts.keys.delete',
+        ordered_params=[u'name'],
+        path_params=[u'name'],
+        query_params=[],
+        relative_path=u'v1/{+name}',
+        request_field='',
+        request_type_name=u'IamProjectsServiceAccountsKeysDeleteRequest',
+        response_type_name=u'Empty',
+        supports_download=False,
+    )
+
+    def Get(self, request, global_params=None):
+      """Gets the ServiceAccountKey.
+by key id.
+
+      Args:
+        request: (IamProjectsServiceAccountsKeysGetRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ServiceAccountKey) The response message.
+      """
+      config = self.GetMethodConfig('Get')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        flat_path=u'v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}/keys/{keysId}',
+        http_method=u'GET',
+        method_id=u'iam.projects.serviceAccounts.keys.get',
+        ordered_params=[u'name'],
+        path_params=[u'name'],
+        query_params=[u'publicKeyType'],
+        relative_path=u'v1/{+name}',
+        request_field='',
+        request_type_name=u'IamProjectsServiceAccountsKeysGetRequest',
+        response_type_name=u'ServiceAccountKey',
+        supports_download=False,
+    )
+
+    def List(self, request, global_params=None):
+      """Lists ServiceAccountKeys.
+
+      Args:
+        request: (IamProjectsServiceAccountsKeysListRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ListServiceAccountKeysResponse) The response message.
+      """
+      config = self.GetMethodConfig('List')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        flat_path=u'v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}/keys',
+        http_method=u'GET',
+        method_id=u'iam.projects.serviceAccounts.keys.list',
+        ordered_params=[u'name'],
+        path_params=[u'name'],
+        query_params=[u'keyTypes'],
+        relative_path=u'v1/{+name}/keys',
+        request_field='',
+        request_type_name=u'IamProjectsServiceAccountsKeysListRequest',
+        response_type_name=u'ListServiceAccountKeysResponse',
+        supports_download=False,
+    )
+
+  class ProjectsServiceAccountsService(base_api.BaseApiService):
+    """Service class for the projects_serviceAccounts resource."""
+
+    _NAME = u'projects_serviceAccounts'
+
+    def __init__(self, client):
+      super(IamV1.ProjectsServiceAccountsService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def Create(self, request, global_params=None):
+      """Creates a ServiceAccount.
+and returns it.
+
+      Args:
+        request: (IamProjectsServiceAccountsCreateRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ServiceAccount) The response message.
+      """
+      config = self.GetMethodConfig('Create')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Create.method_config = lambda: base_api.ApiMethodInfo(
+        flat_path=u'v1/projects/{projectsId}/serviceAccounts',
+        http_method=u'POST',
+        method_id=u'iam.projects.serviceAccounts.create',
+        ordered_params=[u'name'],
+        path_params=[u'name'],
+        query_params=[],
+        relative_path=u'v1/{+name}/serviceAccounts',
+        request_field=u'createServiceAccountRequest',
+        request_type_name=u'IamProjectsServiceAccountsCreateRequest',
+        response_type_name=u'ServiceAccount',
+        supports_download=False,
+    )
+
+    def Delete(self, request, global_params=None):
+      """Deletes a ServiceAccount.
+
+      Args:
+        request: (IamProjectsServiceAccountsDeleteRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Empty) The response message.
+      """
+      config = self.GetMethodConfig('Delete')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Delete.method_config = lambda: base_api.ApiMethodInfo(
+        flat_path=u'v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}',
+        http_method=u'DELETE',
+        method_id=u'iam.projects.serviceAccounts.delete',
+        ordered_params=[u'name'],
+        path_params=[u'name'],
+        query_params=[],
+        relative_path=u'v1/{+name}',
+        request_field='',
+        request_type_name=u'IamProjectsServiceAccountsDeleteRequest',
+        response_type_name=u'Empty',
+        supports_download=False,
+    )
+
+    def Get(self, request, global_params=None):
+      """Gets a ServiceAccount.
+
+      Args:
+        request: (IamProjectsServiceAccountsGetRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ServiceAccount) The response message.
+      """
+      config = self.GetMethodConfig('Get')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        flat_path=u'v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}',
+        http_method=u'GET',
+        method_id=u'iam.projects.serviceAccounts.get',
+        ordered_params=[u'name'],
+        path_params=[u'name'],
+        query_params=[],
+        relative_path=u'v1/{+name}',
+        request_field='',
+        request_type_name=u'IamProjectsServiceAccountsGetRequest',
+        response_type_name=u'ServiceAccount',
+        supports_download=False,
+    )
+
+    def GetIamPolicy(self, request, global_params=None):
+      """Returns the IAM access control policy for specified IAM resource.
+
+      Args:
+        request: (IamProjectsServiceAccountsGetIamPolicyRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Policy) The response message.
+      """
+      config = self.GetMethodConfig('GetIamPolicy')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    GetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
+        flat_path=u'v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}:getIamPolicy',
+        http_method=u'POST',
+        method_id=u'iam.projects.serviceAccounts.getIamPolicy',
+        ordered_params=[u'resource'],
+        path_params=[u'resource'],
+        query_params=[],
+        relative_path=u'v1/{+resource}:getIamPolicy',
+        request_field='',
+        request_type_name=u'IamProjectsServiceAccountsGetIamPolicyRequest',
+        response_type_name=u'Policy',
+        supports_download=False,
+    )
+
+    def List(self, request, global_params=None):
+      """Lists ServiceAccounts for a project.
+
+      Args:
+        request: (IamProjectsServiceAccountsListRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ListServiceAccountsResponse) The response message.
+      """
+      config = self.GetMethodConfig('List')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        flat_path=u'v1/projects/{projectsId}/serviceAccounts',
+        http_method=u'GET',
+        method_id=u'iam.projects.serviceAccounts.list',
+        ordered_params=[u'name'],
+        path_params=[u'name'],
+        query_params=[u'pageSize', u'pageToken', u'removeDeletedServiceAccounts'],
+        relative_path=u'v1/{+name}/serviceAccounts',
+        request_field='',
+        request_type_name=u'IamProjectsServiceAccountsListRequest',
+        response_type_name=u'ListServiceAccountsResponse',
+        supports_download=False,
+    )
+
+    def SetIamPolicy(self, request, global_params=None):
+      """Sets the IAM access control policy for the specified IAM resource.
+
+      Args:
+        request: (IamProjectsServiceAccountsSetIamPolicyRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Policy) The response message.
+      """
+      config = self.GetMethodConfig('SetIamPolicy')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    SetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
+        flat_path=u'v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}:setIamPolicy',
+        http_method=u'POST',
+        method_id=u'iam.projects.serviceAccounts.setIamPolicy',
+        ordered_params=[u'resource'],
+        path_params=[u'resource'],
+        query_params=[],
+        relative_path=u'v1/{+resource}:setIamPolicy',
+        request_field=u'setIamPolicyRequest',
+        request_type_name=u'IamProjectsServiceAccountsSetIamPolicyRequest',
+        response_type_name=u'Policy',
+        supports_download=False,
+    )
+
+    def SignBlob(self, request, global_params=None):
+      """Signs a blob using a service account's system-managed private key.
+
+      Args:
+        request: (IamProjectsServiceAccountsSignBlobRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (SignBlobResponse) The response message.
+      """
+      config = self.GetMethodConfig('SignBlob')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    SignBlob.method_config = lambda: base_api.ApiMethodInfo(
+        flat_path=u'v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}:signBlob',
+        http_method=u'POST',
+        method_id=u'iam.projects.serviceAccounts.signBlob',
+        ordered_params=[u'name'],
+        path_params=[u'name'],
+        query_params=[],
+        relative_path=u'v1/{+name}:signBlob',
+        request_field=u'signBlobRequest',
+        request_type_name=u'IamProjectsServiceAccountsSignBlobRequest',
+        response_type_name=u'SignBlobResponse',
+        supports_download=False,
+    )
+
+    def SignJwt(self, request, global_params=None):
+      """Signs a JWT using a service account's system-managed private key.
+
+If no `exp` (expiry) time is contained in the claims, we will
+provide an expiry of one hour in the future. If an expiry
+of more than one hour in the future is requested, the request
+will fail.
+
+      Args:
+        request: (IamProjectsServiceAccountsSignJwtRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (SignJwtResponse) The response message.
+      """
+      config = self.GetMethodConfig('SignJwt')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    SignJwt.method_config = lambda: base_api.ApiMethodInfo(
+        flat_path=u'v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}:signJwt',
+        http_method=u'POST',
+        method_id=u'iam.projects.serviceAccounts.signJwt',
+        ordered_params=[u'name'],
+        path_params=[u'name'],
+        query_params=[],
+        relative_path=u'v1/{+name}:signJwt',
+        request_field=u'signJwtRequest',
+        request_type_name=u'IamProjectsServiceAccountsSignJwtRequest',
+        response_type_name=u'SignJwtResponse',
+        supports_download=False,
+    )
+
+    def TestIamPermissions(self, request, global_params=None):
+      """Tests the specified permissions against the IAM access control policy.
+for the specified IAM resource.
+
+      Args:
+        request: (IamProjectsServiceAccountsTestIamPermissionsRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (TestIamPermissionsResponse) The response message.
+      """
+      config = self.GetMethodConfig('TestIamPermissions')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
+        flat_path=u'v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}:testIamPermissions',
+        http_method=u'POST',
+        method_id=u'iam.projects.serviceAccounts.testIamPermissions',
+        ordered_params=[u'resource'],
+        path_params=[u'resource'],
+        query_params=[],
+        relative_path=u'v1/{+resource}:testIamPermissions',
+        request_field=u'testIamPermissionsRequest',
+        request_type_name=u'IamProjectsServiceAccountsTestIamPermissionsRequest',
+        response_type_name=u'TestIamPermissionsResponse',
+        supports_download=False,
+    )
+
+    def Update(self, request, global_params=None):
+      """Updates a ServiceAccount.
+
+Currently, only the following fields are updatable:
+`display_name` .
+The `etag` is mandatory.
+
+      Args:
+        request: (ServiceAccount) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ServiceAccount) The response message.
+      """
+      config = self.GetMethodConfig('Update')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Update.method_config = lambda: base_api.ApiMethodInfo(
+        flat_path=u'v1/projects/{projectsId}/serviceAccounts/{serviceAccountsId}',
+        http_method=u'PUT',
+        method_id=u'iam.projects.serviceAccounts.update',
+        ordered_params=[u'name'],
+        path_params=[u'name'],
+        query_params=[],
+        relative_path=u'v1/{+name}',
+        request_field='<request>',
+        request_type_name=u'ServiceAccount',
+        response_type_name=u'ServiceAccount',
+        supports_download=False,
+    )
+
+  class ProjectsService(base_api.BaseApiService):
+    """Service class for the projects resource."""
+
+    _NAME = u'projects'
+
+    def __init__(self, client):
+      super(IamV1.ProjectsService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+  class RolesService(base_api.BaseApiService):
+    """Service class for the roles resource."""
+
+    _NAME = u'roles'
+
+    def __init__(self, client):
+      super(IamV1.RolesService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def QueryGrantableRoles(self, request, global_params=None):
+      """Queries roles that can be granted on a particular resource.
+
+      Args:
+        request: (QueryGrantableRolesRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (QueryGrantableRolesResponse) The response message.
+      """
+      config = self.GetMethodConfig('QueryGrantableRoles')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    QueryGrantableRoles.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'iam.roles.queryGrantableRoles',
+        ordered_params=[],
+        path_params=[],
+        query_params=[],
+        relative_path=u'v1/roles:queryGrantableRoles',
+        request_field='<request>',
+        request_type_name=u'QueryGrantableRolesRequest',
+        response_type_name=u'QueryGrantableRolesResponse',
+        supports_download=False,
+    )
diff --git a/samples/iam_sample/iam_v1/iam_v1_messages.py b/samples/iam_sample/iam_v1/iam_v1_messages.py
new file mode 100644
index 0000000..1db85b0
--- /dev/null
+++ b/samples/iam_sample/iam_v1/iam_v1_messages.py
@@ -0,0 +1,964 @@
+"""Generated message classes for iam version v1.
+
+Manages identity and access control for Google Cloud Platform resources,
+including the creation of service accounts, which you can use to authenticate
+to Google and make API calls.
+"""
+# NOTE: This file is autogenerated and should not be edited by hand.
+
+from apitools.base.protorpclite import messages as _messages
+from apitools.base.py import encoding
+
+
+package = 'iam'
+
+
+class AuditConfig(_messages.Message):
+  """Enables "data access" audit logging for a service and specifies a list of
+  members that are log-exempted.
+
+  Fields:
+    exemptedMembers: Specifies the identities that are exempted from "data
+      access" audit logging for the `service` specified above. Follows the
+      same format of Binding.members.
+    service: Specifies a service that will be enabled for "data access" audit
+      logging. For example, `resourcemanager`, `storage`, `compute`.
+      `allServices` is a special value that covers all services.
+  """
+
+  exemptedMembers = _messages.StringField(1, repeated=True)
+  service = _messages.StringField(2)
+
+
+class Binding(_messages.Message):
+  """Associates `members` with a `role`.
+
+  Fields:
+    members: Specifies the identities requesting access for a Cloud Platform
+      resource. `members` can have the following values:  * `allUsers`: A
+      special identifier that represents anyone who is    on the internet;
+      with or without a Google account.  * `allAuthenticatedUsers`: A special
+      identifier that represents anyone    who is authenticated with a Google
+      account or a service account.  * `user:{emailid}`: An email address that
+      represents a specific Google    account. For example, `alice@gmail.com`
+      or `joe@example.com`.  * `serviceAccount:{emailid}`: An email address
+      that represents a service    account. For example, `my-other-
+      app@appspot.gserviceaccount.com`.  * `group:{emailid}`: An email address
+      that represents a Google group.    For example, `admins@example.com`.  *
+      `domain:{domain}`: A Google Apps domain name that represents all the
+      users of that domain. For example, `google.com` or `example.com`.
+    role: Role that is assigned to `members`. For example, `roles/viewer`,
+      `roles/editor`, or `roles/owner`. Required
+  """
+
+  members = _messages.StringField(1, repeated=True)
+  role = _messages.StringField(2)
+
+
+class CloudAuditOptions(_messages.Message):
+  """Write a Cloud Audit log"""
+
+
+class Condition(_messages.Message):
+  """A condition to be met.
+
+  Enums:
+    IamValueValuesEnum: Trusted attributes supplied by the IAM system.
+    OpValueValuesEnum: An operator to apply the subject with.
+    SysValueValuesEnum: Trusted attributes supplied by any service that owns
+      resources and uses the IAM system for access control.
+
+  Fields:
+    iam: Trusted attributes supplied by the IAM system.
+    op: An operator to apply the subject with.
+    svc: Trusted attributes discharged by the service.
+    sys: Trusted attributes supplied by any service that owns resources and
+      uses the IAM system for access control.
+    value: DEPRECATED. Use 'values' instead.
+    values: The objects of the condition. This is mutually exclusive with
+      'value'.
+  """
+
+  class IamValueValuesEnum(_messages.Enum):
+    """Trusted attributes supplied by the IAM system.
+
+    Values:
+      NO_ATTR: Default non-attribute.
+      AUTHORITY: Either principal or (if present) authority
+      ATTRIBUTION: selector Always the original principal, but making clear
+    """
+    NO_ATTR = 0
+    AUTHORITY = 1
+    ATTRIBUTION = 2
+
+  class OpValueValuesEnum(_messages.Enum):
+    """An operator to apply the subject with.
+
+    Values:
+      NO_OP: Default no-op.
+      EQUALS: DEPRECATED. Use IN instead.
+      NOT_EQUALS: DEPRECATED. Use NOT_IN instead.
+      IN: Set-inclusion check.
+      NOT_IN: Set-exclusion check.
+      DISCHARGED: Subject is discharged
+    """
+    NO_OP = 0
+    EQUALS = 1
+    NOT_EQUALS = 2
+    IN = 3
+    NOT_IN = 4
+    DISCHARGED = 5
+
+  class SysValueValuesEnum(_messages.Enum):
+    """Trusted attributes supplied by any service that owns resources and uses
+    the IAM system for access control.
+
+    Values:
+      NO_ATTR: Default non-attribute type
+      REGION: Region of the resource
+      SERVICE: Service name
+      NAME: Resource name
+      IP: IP address of the caller
+    """
+    NO_ATTR = 0
+    REGION = 1
+    SERVICE = 2
+    NAME = 3
+    IP = 4
+
+  iam = _messages.EnumField('IamValueValuesEnum', 1)
+  op = _messages.EnumField('OpValueValuesEnum', 2)
+  svc = _messages.StringField(3)
+  sys = _messages.EnumField('SysValueValuesEnum', 4)
+  value = _messages.StringField(5)
+  values = _messages.StringField(6, repeated=True)
+
+
+class CounterOptions(_messages.Message):
+  """Options for counters
+
+  Fields:
+    field: The field value to attribute.
+    metric: The metric to update.
+  """
+
+  field = _messages.StringField(1)
+  metric = _messages.StringField(2)
+
+
+class CreateServiceAccountKeyRequest(_messages.Message):
+  """The service account key create request.
+
+  Enums:
+    PrivateKeyTypeValueValuesEnum: The output format of the private key.
+      `GOOGLE_CREDENTIALS_FILE` is the default output format.
+
+  Fields:
+    privateKeyType: The output format of the private key.
+      `GOOGLE_CREDENTIALS_FILE` is the default output format.
+  """
+
+  class PrivateKeyTypeValueValuesEnum(_messages.Enum):
+    """The output format of the private key. `GOOGLE_CREDENTIALS_FILE` is the
+    default output format.
+
+    Values:
+      TYPE_UNSPECIFIED: Unspecified. Equivalent to
+        `TYPE_GOOGLE_CREDENTIALS_FILE`.
+      TYPE_PKCS12_FILE: PKCS12 format. The password for the PKCS12 file is
+        `notasecret`. For more information, see
+        https://tools.ietf.org/html/rfc7292.
+      TYPE_GOOGLE_CREDENTIALS_FILE: Google Credentials File format.
+    """
+    TYPE_UNSPECIFIED = 0
+    TYPE_PKCS12_FILE = 1
+    TYPE_GOOGLE_CREDENTIALS_FILE = 2
+
+  privateKeyType = _messages.EnumField('PrivateKeyTypeValueValuesEnum', 1)
+
+
+class CreateServiceAccountRequest(_messages.Message):
+  """The service account create request.
+
+  Fields:
+    accountId: Required. The account id that is used to generate the service
+      account email address and a stable unique id. It is unique within a
+      project, must be 1-63 characters long, and match the regular expression
+      `[a-z]([-a-z0-9]*[a-z0-9])` to comply with RFC1035.
+    serviceAccount: The ServiceAccount resource to create. Currently, only the
+      following values are user assignable: `display_name` .
+  """
+
+  accountId = _messages.StringField(1)
+  serviceAccount = _messages.MessageField('ServiceAccount', 2)
+
+
+class DataAccessOptions(_messages.Message):
+  """Write a Data Access (Gin) log"""
+
+
+class Empty(_messages.Message):
+  """A generic empty message that you can re-use to avoid defining duplicated
+  empty messages in your APIs. A typical example is to use it as the request
+  or the response type of an API method. For instance:      service Foo {
+  rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);     }  The
+  JSON representation for `Empty` is empty JSON object `{}`.
+  """
+
+
+
+class GetPolicyDetailsRequest(_messages.Message):
+  """The request to get the current policy and the policies on the inherited
+  resources the user has access to.
+
+  Fields:
+    fullResourcePath: REQUIRED: The full resource path of the current policy
+      being requested, e.g., `//dataflow.googleapis.com/projects/../jobs/..`.
+    pageSize: Limit on the number of policies to include in the response.
+      Further accounts can subsequently be obtained by including the
+      GetPolicyDetailsResponse.next_page_token in a subsequent request. If
+      zero, the default page size 20 will be used. Must be given a value in
+      range [0, 100], otherwise an invalid argument error will be returned.
+    pageToken: Optional pagination token returned in an earlier
+      GetPolicyDetailsResponse.next_page_token response.
+  """
+
+  fullResourcePath = _messages.StringField(1)
+  pageSize = _messages.IntegerField(2, variant=_messages.Variant.INT32)
+  pageToken = _messages.StringField(3)
+
+
+class GetPolicyDetailsResponse(_messages.Message):
+  """The response to the `GetPolicyDetailsRequest` containing the current
+  policy and the policies on the inherited resources the user has access to.
+
+  Fields:
+    nextPageToken: To retrieve the next page of results, set
+      GetPolicyDetailsRequest.page_token to this value. If this value is
+      empty, then there are not any further policies that the user has access
+      to. The lifetime is 60 minutes. An "Expired pagination token" error will
+      be returned if exceeded.
+    policies: The current policy and all the inherited policies the user has
+      access to.
+  """
+
+  nextPageToken = _messages.StringField(1)
+  policies = _messages.MessageField('PolicyDetail', 2, repeated=True)
+
+
+class IamProjectsServiceAccountsCreateRequest(_messages.Message):
+  """A IamProjectsServiceAccountsCreateRequest object.
+
+  Fields:
+    createServiceAccountRequest: A CreateServiceAccountRequest resource to be
+      passed as the request body.
+    name: Required. The resource name of the project associated with the
+      service accounts, such as `projects/my-project-123`.
+  """
+
+  createServiceAccountRequest = _messages.MessageField('CreateServiceAccountRequest', 1)
+  name = _messages.StringField(2, required=True)
+
+
+class IamProjectsServiceAccountsDeleteRequest(_messages.Message):
+  """A IamProjectsServiceAccountsDeleteRequest object.
+
+  Fields:
+    name: The resource name of the service account in the following format:
+      `projects/{project}/serviceAccounts/{account}`. Using `-` as a wildcard
+      for the project will infer the project from the account. The `account`
+      value can be the `email` address or the `unique_id` of the service
+      account.
+  """
+
+  name = _messages.StringField(1, required=True)
+
+
+class IamProjectsServiceAccountsGetIamPolicyRequest(_messages.Message):
+  """A IamProjectsServiceAccountsGetIamPolicyRequest object.
+
+  Fields:
+    resource: REQUIRED: The resource for which the policy is being requested.
+      `resource` is usually specified as a path, such as
+      `projects/*project*/zones/*zone*/disks/*disk*`.  The format for the path
+      specified in this value is resource specific and is specified in the
+      `getIamPolicy` documentation.
+  """
+
+  resource = _messages.StringField(1, required=True)
+
+
+class IamProjectsServiceAccountsGetRequest(_messages.Message):
+  """A IamProjectsServiceAccountsGetRequest object.
+
+  Fields:
+    name: The resource name of the service account in the following format:
+      `projects/{project}/serviceAccounts/{account}`. Using `-` as a wildcard
+      for the project will infer the project from the account. The `account`
+      value can be the `email` address or the `unique_id` of the service
+      account.
+  """
+
+  name = _messages.StringField(1, required=True)
+
+
+class IamProjectsServiceAccountsKeysCreateRequest(_messages.Message):
+  """A IamProjectsServiceAccountsKeysCreateRequest object.
+
+  Fields:
+    createServiceAccountKeyRequest: A CreateServiceAccountKeyRequest resource
+      to be passed as the request body.
+    name: The resource name of the service account in the following format:
+      `projects/{project}/serviceAccounts/{account}`. Using `-` as a wildcard
+      for the project will infer the project from the account. The `account`
+      value can be the `email` address or the `unique_id` of the service
+      account.
+  """
+
+  createServiceAccountKeyRequest = _messages.MessageField('CreateServiceAccountKeyRequest', 1)
+  name = _messages.StringField(2, required=True)
+
+
+class IamProjectsServiceAccountsKeysDeleteRequest(_messages.Message):
+  """A IamProjectsServiceAccountsKeysDeleteRequest object.
+
+  Fields:
+    name: The resource name of the service account key in the following
+      format: `projects/{project}/serviceAccounts/{account}/keys/{key}`. Using
+      `-` as a wildcard for the project will infer the project from the
+      account. The `account` value can be the `email` address or the
+      `unique_id` of the service account.
+  """
+
+  name = _messages.StringField(1, required=True)
+
+
+class IamProjectsServiceAccountsKeysGetRequest(_messages.Message):
+  """A IamProjectsServiceAccountsKeysGetRequest object.
+
+  Enums:
+    PublicKeyTypeValueValuesEnum: The output format of the public key
+      requested. X509_PEM is the default output format.
+
+  Fields:
+    name: The resource name of the service account key in the following
+      format: `projects/{project}/serviceAccounts/{account}/keys/{key}`.
+      Using `-` as a wildcard for the project will infer the project from the
+      account. The `account` value can be the `email` address or the
+      `unique_id` of the service account.
+    publicKeyType: The output format of the public key requested. X509_PEM is
+      the default output format.
+  """
+
+  class PublicKeyTypeValueValuesEnum(_messages.Enum):
+    """The output format of the public key requested. X509_PEM is the default
+    output format.
+
+    Values:
+      TYPE_NONE: <no description>
+      TYPE_X509_PEM_FILE: <no description>
+      TYPE_RAW_PUBLIC_KEY: <no description>
+    """
+    TYPE_NONE = 0
+    TYPE_X509_PEM_FILE = 1
+    TYPE_RAW_PUBLIC_KEY = 2
+
+  name = _messages.StringField(1, required=True)
+  publicKeyType = _messages.EnumField('PublicKeyTypeValueValuesEnum', 2)
+
+
+class IamProjectsServiceAccountsKeysListRequest(_messages.Message):
+  """A IamProjectsServiceAccountsKeysListRequest object.
+
+  Enums:
+    KeyTypesValueValuesEnum: Filters the types of keys the user wants to
+      include in the list response. Duplicate key types are not allowed. If no
+      key type is provided, all keys are returned.
+
+  Fields:
+    keyTypes: Filters the types of keys the user wants to include in the list
+      response. Duplicate key types are not allowed. If no key type is
+      provided, all keys are returned.
+    name: The resource name of the service account in the following format:
+      `projects/{project}/serviceAccounts/{account}`.  Using `-` as a wildcard
+      for the project, will infer the project from the account. The `account`
+      value can be the `email` address or the `unique_id` of the service
+      account.
+  """
+
+  class KeyTypesValueValuesEnum(_messages.Enum):
+    """Filters the types of keys the user wants to include in the list
+    response. Duplicate key types are not allowed. If no key type is provided,
+    all keys are returned.
+
+    Values:
+      KEY_TYPE_UNSPECIFIED: <no description>
+      USER_MANAGED: <no description>
+      SYSTEM_MANAGED: <no description>
+    """
+    KEY_TYPE_UNSPECIFIED = 0
+    USER_MANAGED = 1
+    SYSTEM_MANAGED = 2
+
+  keyTypes = _messages.EnumField('KeyTypesValueValuesEnum', 1, repeated=True)
+  name = _messages.StringField(2, required=True)
+
+
+class IamProjectsServiceAccountsListRequest(_messages.Message):
+  """A IamProjectsServiceAccountsListRequest object.
+
+  Fields:
+    name: Required. The resource name of the project associated with the
+      service accounts, such as `projects/my-project-123`.
+    pageSize: Optional limit on the number of service accounts to include in
+      the response. Further accounts can subsequently be obtained by including
+      the ListServiceAccountsResponse.next_page_token in a subsequent request.
+    pageToken: Optional pagination token returned in an earlier
+      ListServiceAccountsResponse.next_page_token.
+    removeDeletedServiceAccounts: Do not list service accounts deleted from
+      Gaia. <b><font color="red">DO NOT INCLUDE IN EXTERNAL
+      DOCUMENTATION</font></b>.
+  """
+
+  name = _messages.StringField(1, required=True)
+  pageSize = _messages.IntegerField(2, variant=_messages.Variant.INT32)
+  pageToken = _messages.StringField(3)
+  removeDeletedServiceAccounts = _messages.BooleanField(4)
+
+
+class IamProjectsServiceAccountsSetIamPolicyRequest(_messages.Message):
+  """A IamProjectsServiceAccountsSetIamPolicyRequest object.
+
+  Fields:
+    resource: REQUIRED: The resource for which the policy is being specified.
+      `resource` is usually specified as a path, such as
+      `projects/*project*/zones/*zone*/disks/*disk*`.  The format for the path
+      specified in this value is resource specific and is specified in the
+      `setIamPolicy` documentation.
+    setIamPolicyRequest: A SetIamPolicyRequest resource to be passed as the
+      request body.
+  """
+
+  resource = _messages.StringField(1, required=True)
+  setIamPolicyRequest = _messages.MessageField('SetIamPolicyRequest', 2)
+
+
+class IamProjectsServiceAccountsSignBlobRequest(_messages.Message):
+  """A IamProjectsServiceAccountsSignBlobRequest object.
+
+  Fields:
+    name: The resource name of the service account in the following format:
+      `projects/{project}/serviceAccounts/{account}`. Using `-` as a wildcard
+      for the project will infer the project from the account. The `account`
+      value can be the `email` address or the `unique_id` of the service
+      account.
+    signBlobRequest: A SignBlobRequest resource to be passed as the request
+      body.
+  """
+
+  name = _messages.StringField(1, required=True)
+  signBlobRequest = _messages.MessageField('SignBlobRequest', 2)
+
+
+class IamProjectsServiceAccountsSignJwtRequest(_messages.Message):
+  """A IamProjectsServiceAccountsSignJwtRequest object.
+
+  Fields:
+    name: The resource name of the service account in the following format:
+      `projects/{project}/serviceAccounts/{account}`. Using `-` as a wildcard
+      for the project will infer the project from the account. The `account`
+      value can be the `email` address or the `unique_id` of the service
+      account.
+    signJwtRequest: A SignJwtRequest resource to be passed as the request
+      body.
+  """
+
+  name = _messages.StringField(1, required=True)
+  signJwtRequest = _messages.MessageField('SignJwtRequest', 2)
+
+
+class IamProjectsServiceAccountsTestIamPermissionsRequest(_messages.Message):
+  """A IamProjectsServiceAccountsTestIamPermissionsRequest object.
+
+  Fields:
+    resource: REQUIRED: The resource for which the policy detail is being
+      requested. `resource` is usually specified as a path, such as
+      `projects/*project*/zones/*zone*/disks/*disk*`.  The format for the path
+      specified in this value is resource specific and is specified in the
+      `testIamPermissions` documentation.
+    testIamPermissionsRequest: A TestIamPermissionsRequest resource to be
+      passed as the request body.
+  """
+
+  resource = _messages.StringField(1, required=True)
+  testIamPermissionsRequest = _messages.MessageField('TestIamPermissionsRequest', 2)
+
+
+class ListServiceAccountKeysResponse(_messages.Message):
+  """The service account keys list response.
+
+  Fields:
+    keys: The public keys for the service account.
+  """
+
+  keys = _messages.MessageField('ServiceAccountKey', 1, repeated=True)
+
+
+class ListServiceAccountsResponse(_messages.Message):
+  """The service account list response.
+
+  Fields:
+    accounts: The list of matching service accounts.
+    nextPageToken: To retrieve the next page of results, set
+      ListServiceAccountsRequest.page_token to this value.
+  """
+
+  accounts = _messages.MessageField('ServiceAccount', 1, repeated=True)
+  nextPageToken = _messages.StringField(2)
+
+
+class LogConfig(_messages.Message):
+  """Specifies what kind of log the caller must write Increment a streamz
+  counter with the specified metric and field names.  Metric names should
+  start with a '/', generally be lowercase-only, and end in "_count". Field
+  names should not contain an initial slash. The actual exported metric names
+  will have "/iam/policy" prepended.  Field names correspond to IAM request
+  parameters and field values are their respective values.  At present the
+  only supported field names are    - "iam_principal", corresponding to
+  IAMContext.principal;    - "" (empty string), resulting in one aggretated
+  counter with no field.  Examples:   counter { metric: "/debug_access_count"
+  field: "iam_principal" }   ==> increment counter
+  /iam/policy/backend_debug_access_count
+  {iam_principal=[value of IAMContext.principal]}  At this time we do not
+  support: * multiple field names (though this may be supported in the future)
+  * decrementing the counter * incrementing it by anything other than 1
+
+  Fields:
+    cloudAudit: Cloud audit options.
+    counter: Counter options.
+    dataAccess: Data access options.
+  """
+
+  cloudAudit = _messages.MessageField('CloudAuditOptions', 1)
+  counter = _messages.MessageField('CounterOptions', 2)
+  dataAccess = _messages.MessageField('DataAccessOptions', 3)
+
+
+class Policy(_messages.Message):
+  """Defines an Identity and Access Management (IAM) policy. It is used to
+  specify access control policies for Cloud Platform resources.   A `Policy`
+  consists of a list of `bindings`. A `Binding` binds a list of `members` to a
+  `role`, where the members can be user accounts, Google groups, Google
+  domains, and service accounts. A `role` is a named list of permissions
+  defined by IAM.  **Example**      {       "bindings": [         {
+  "role": "roles/owner",           "members": [
+  "user:mike@example.com",             "group:admins@example.com",
+  "domain:google.com",             "serviceAccount:my-other-
+  app@appspot.gserviceaccount.com",           ]         },         {
+  "role": "roles/viewer",           "members": ["user:sean@example.com"]
+  }       ]     }  For a description of IAM and its features, see the [IAM
+  developer's guide](https://cloud.google.com/iam).
+
+  Fields:
+    auditConfigs: Specifies audit logging configs for "data access". "data
+      access": generally refers to data reads/writes and admin reads. "admin
+      activity": generally refers to admin writes.  Note: `AuditConfig`
+      doesn't apply to "admin activity", which always enables audit logging.
+    bindings: Associates a list of `members` to a `role`. Multiple `bindings`
+      must not be specified for the same `role`. `bindings` with no members
+      will result in an error.
+    etag: `etag` is used for optimistic concurrency control as a way to help
+      prevent simultaneous updates of a policy from overwriting each other. It
+      is strongly suggested that systems make use of the `etag` in the read-
+      modify-write cycle to perform policy updates in order to avoid race
+      conditions: An `etag` is returned in the response to `getIamPolicy`, and
+      systems are expected to put that etag in the request to `setIamPolicy`
+      to ensure that their change will be applied to the same version of the
+      policy.  If no `etag` is provided in the call to `setIamPolicy`, then
+      the existing policy is overwritten blindly.
+    iamOwned: A boolean attribute.
+    rules: If more than one rule is specified, the rules are applied in the
+      following manner: - All matching LOG rules are always applied. - If any
+      DENY/DENY_WITH_LOG rule matches, permission is denied.   Logging will be
+      applied if one or more matching rule requires logging. - Otherwise, if
+      any ALLOW/ALLOW_WITH_LOG rule matches, permission is   granted.
+      Logging will be applied if one or more matching rule requires logging. -
+      Otherwise, if no rule applies, permission is denied.
+    version: Version of the `Policy`. The default version is 0.
+  """
+
+  auditConfigs = _messages.MessageField('AuditConfig', 1, repeated=True)
+  bindings = _messages.MessageField('Binding', 2, repeated=True)
+  etag = _messages.BytesField(3)
+  iamOwned = _messages.BooleanField(4)
+  rules = _messages.MessageField('Rule', 5, repeated=True)
+  version = _messages.IntegerField(6, variant=_messages.Variant.INT32)
+
+
+class PolicyDetail(_messages.Message):
+  """A policy and its full resource path.
+
+  Fields:
+    fullResourcePath: The full resource path of the policy e.g.,
+      `//dataflow.googleapis.com/projects/../jobs/..`. Note that a resource
+      and its inherited resource have different `full_resource_path`.
+    policy: The policy of a `resource/project/folder`.
+  """
+
+  fullResourcePath = _messages.StringField(1)
+  policy = _messages.MessageField('Policy', 2)
+
+
+class QueryGrantableRolesRequest(_messages.Message):
+  """The grantable role query request.
+
+  Fields:
+    fullResourceName: Required. The full resource name to query from the list
+      of grantable roles.  The name follows the Google Cloud Platform resource
+      format. For example, a Cloud Platform project with id `my-project` will
+      be named `//cloudresourcemanager.googleapis.com/projects/my-project`.
+  """
+
+  fullResourceName = _messages.StringField(1)
+
+
+class QueryGrantableRolesResponse(_messages.Message):
+  """The grantable role query response.
+
+  Fields:
+    roles: The list of matching roles.
+  """
+
+  roles = _messages.MessageField('Role', 1, repeated=True)
+
+
+class Role(_messages.Message):
+  """A role in the Identity and Access Management API.
+
+  Fields:
+    apiTokens: A string attribute.
+    description: Optional.  A human-readable description for the role.
+    name: The name of the role.  Examples of roles names are: `roles/editor`,
+      `roles/viewer` and `roles/logging.viewer`.
+    title: Optional.  A human-readable title for the role.  Typically this is
+      limited to 100 UTF-8 bytes.
+  """
+
+  apiTokens = _messages.StringField(1, repeated=True)
+  description = _messages.StringField(2)
+  name = _messages.StringField(3)
+  title = _messages.StringField(4)
+
+
+class Rule(_messages.Message):
+  """A rule to be applied in a Policy.
+
+  Enums:
+    ActionValueValuesEnum: Required
+
+  Fields:
+    action: Required
+    conditions: Additional restrictions that must be met
+    description: Human-readable description of the rule.
+    in_: If one or more 'in' clauses are specified, the rule matches if the
+      PRINCIPAL/AUTHORITY_SELECTOR is in at least one of these entries.
+    logConfig: The config returned to callers of tech.iam.IAM.CheckPolicy for
+      any entries that match the LOG action.
+    notIn: If one or more 'not_in' clauses are specified, the rule matches if
+      the PRINCIPAL/AUTHORITY_SELECTOR is in none of the entries. The format
+      for in and not_in entries is the same as for members in a Binding (see
+      google/iam/v1/policy.proto).
+    permissions: A permission is a string of form '<service>.<resource
+      type>.<verb>' (e.g., 'storage.buckets.list'). A value of '*' matches all
+      permissions, and a verb part of '*' (e.g., 'storage.buckets.*') matches
+      all verbs.
+  """
+
+  class ActionValueValuesEnum(_messages.Enum):
+    """Required
+
+    Values:
+      NO_ACTION: Default no action.
+      ALLOW: Matching 'Entries' grant access.
+      ALLOW_WITH_LOG: Matching 'Entries' grant access and the caller promises
+        to log the request per the returned log_configs.
+      DENY: Matching 'Entries' deny access.
+      DENY_WITH_LOG: Matching 'Entries' deny access and the caller promises to
+        log the request per the returned log_configs.
+      LOG: Matching 'Entries' tell IAM.Check callers to generate logs.
+    """
+    NO_ACTION = 0
+    ALLOW = 1
+    ALLOW_WITH_LOG = 2
+    DENY = 3
+    DENY_WITH_LOG = 4
+    LOG = 5
+
+  action = _messages.EnumField('ActionValueValuesEnum', 1)
+  conditions = _messages.MessageField('Condition', 2, repeated=True)
+  description = _messages.StringField(3)
+  in_ = _messages.StringField(4, repeated=True)
+  logConfig = _messages.MessageField('LogConfig', 5, repeated=True)
+  notIn = _messages.StringField(6, repeated=True)
+  permissions = _messages.StringField(7, repeated=True)
+
+
+class ServiceAccount(_messages.Message):
+  """A service account in the Identity and Access Management API.  To create a
+  service account, specify the `project_id` and the `account_id` for the
+  account.  The `account_id` is unique within the project, and is used to
+  generate the service account email address and a stable `unique_id`.  All
+  other methods can identify the service account using the format
+  `projects/{project}/serviceAccounts/{account}`. Using `-` as a wildcard for
+  the project will infer the project from the account. The `account` value can
+  be the `email` address or the `unique_id` of the service account.
+
+  Fields:
+    description: Optional. A user-specified opaque description of the service
+      account.
+    displayName: Optional. A user-specified description of the service
+      account.  Must be fewer than 100 UTF-8 bytes.
+    email: @OutputOnly The email address of the service account.
+    etag: Used to perform a consistent read-modify-write.
+    name: The resource name of the service account in the following format:
+      `projects/{project}/serviceAccounts/{account}`.  Requests using `-` as a
+      wildcard for the project will infer the project from the `account` and
+      the `account` value can be the `email` address or the `unique_id` of the
+      service account.  In responses the resource name will always be in the
+      format `projects/{project}/serviceAccounts/{email}`.
+    oauth2ClientId: @OutputOnly. The OAuth2 client id for the service account.
+      This is used in conjunction with the OAuth2 clientconfig API to make
+      three legged OAuth2 (3LO) flows to access the data of Google users.
+    projectId: @OutputOnly The id of the project that owns the service
+      account.
+    uniqueId: @OutputOnly The unique and stable id of the service account.
+  """
+
+  description = _messages.StringField(1)
+  displayName = _messages.StringField(2)
+  email = _messages.StringField(3)
+  etag = _messages.BytesField(4)
+  name = _messages.StringField(5)
+  oauth2ClientId = _messages.StringField(6)
+  projectId = _messages.StringField(7)
+  uniqueId = _messages.StringField(8)
+
+
+class ServiceAccountKey(_messages.Message):
+  """Represents a service account key.  A service account has two sets of key-
+  pairs: user-managed, and system-managed.  User-managed key-pairs can be
+  created and deleted by users.  Users are responsible for rotating these keys
+  periodically to ensure security of their service accounts.  Users retain the
+  private key of these key-pairs, and Google retains ONLY the public key.
+  System-managed key-pairs are managed automatically by Google, and rotated
+  daily without user intervention.  The private key never leaves Google's
+  servers to maximize security.  Public keys for all service accounts are also
+  published at the OAuth2 Service Account API.
+
+  Enums:
+    PrivateKeyTypeValueValuesEnum: The output format for the private key. Only
+      provided in `CreateServiceAccountKey` responses, not in
+      `GetServiceAccountKey` or `ListServiceAccountKey` responses.  Google
+      never exposes system-managed private keys, and never retains user-
+      managed private keys.
+
+  Fields:
+    name: The resource name of the service account key in the following format
+      `projects/{project}/serviceAccounts/{account}/keys/{key}`.
+    privateKeyData: The private key data. Only provided in
+      `CreateServiceAccountKey` responses.
+    privateKeyType: The output format for the private key. Only provided in
+      `CreateServiceAccountKey` responses, not in `GetServiceAccountKey` or
+      `ListServiceAccountKey` responses.  Google never exposes system-managed
+      private keys, and never retains user-managed private keys.
+    publicKeyData: The public key data. Only provided in
+      `GetServiceAccountKey` responses.
+    validAfterTime: The key can be used after this timestamp.
+    validBeforeTime: The key can be used before this timestamp.
+  """
+
+  class PrivateKeyTypeValueValuesEnum(_messages.Enum):
+    """The output format for the private key. Only provided in
+    `CreateServiceAccountKey` responses, not in `GetServiceAccountKey` or
+    `ListServiceAccountKey` responses.  Google never exposes system-managed
+    private keys, and never retains user-managed private keys.
+
+    Values:
+      TYPE_UNSPECIFIED: Unspecified. Equivalent to
+        `TYPE_GOOGLE_CREDENTIALS_FILE`.
+      TYPE_PKCS12_FILE: PKCS12 format. The password for the PKCS12 file is
+        `notasecret`. For more information, see
+        https://tools.ietf.org/html/rfc7292.
+      TYPE_GOOGLE_CREDENTIALS_FILE: Google Credentials File format.
+    """
+    TYPE_UNSPECIFIED = 0
+    TYPE_PKCS12_FILE = 1
+    TYPE_GOOGLE_CREDENTIALS_FILE = 2
+
+  name = _messages.StringField(1)
+  privateKeyData = _messages.BytesField(2)
+  privateKeyType = _messages.EnumField('PrivateKeyTypeValueValuesEnum', 3)
+  publicKeyData = _messages.BytesField(4)
+  validAfterTime = _messages.StringField(5)
+  validBeforeTime = _messages.StringField(6)
+
+
+class SetIamPolicyRequest(_messages.Message):
+  """Request message for `SetIamPolicy` method.
+
+  Fields:
+    policy: REQUIRED: The complete policy to be applied to the `resource`. The
+      size of the policy is limited to a few 10s of KB. An empty policy is a
+      valid policy but certain Cloud Platform services (such as Projects)
+      might reject them.
+  """
+
+  policy = _messages.MessageField('Policy', 1)
+
+
+class SignBlobRequest(_messages.Message):
+  """The service account sign blob request.
+
+  Fields:
+    bytesToSign: The bytes to sign.
+  """
+
+  bytesToSign = _messages.BytesField(1)
+
+
+class SignBlobResponse(_messages.Message):
+  """The service account sign blob response.
+
+  Fields:
+    keyId: The id of the key used to sign the blob.
+    signature: The signed blob.
+  """
+
+  keyId = _messages.StringField(1)
+  signature = _messages.BytesField(2)
+
+
+class SignJwtRequest(_messages.Message):
+  """The service account sign JWT request.
+
+  Fields:
+    payload: The JWT payload to sign, a JSON JWT Claim set.
+  """
+
+  payload = _messages.StringField(1)
+
+
+class SignJwtResponse(_messages.Message):
+  """The service account sign JWT response.
+
+  Fields:
+    keyId: The id of the key used to sign the JWT.
+    signedJwt: The signed JWT.
+  """
+
+  keyId = _messages.StringField(1)
+  signedJwt = _messages.StringField(2)
+
+
+class StandardQueryParameters(_messages.Message):
+  """Query parameters accepted by all methods.
+
+  Enums:
+    FXgafvValueValuesEnum: V1 error format.
+    AltValueValuesEnum: Data format for response.
+
+  Fields:
+    f__xgafv: V1 error format.
+    access_token: OAuth access token.
+    alt: Data format for response.
+    bearer_token: OAuth bearer token.
+    callback: JSONP
+    fields: Selector specifying which fields to include in a partial response.
+    key: API key. Your API key identifies your project and provides you with
+      API access, quota, and reports. Required unless you provide an OAuth 2.0
+      token.
+    oauth_token: OAuth 2.0 token for the current user.
+    pp: Pretty-print response.
+    prettyPrint: Returns response with indentations and line breaks.
+    quotaUser: Available to use for quota purposes for server-side
+      applications. Can be any arbitrary string assigned to a user, but should
+      not exceed 40 characters.
+    trace: A tracing token of the form "token:<tokenid>" to include in api
+      requests.
+    uploadType: Legacy upload protocol for media (e.g. "media", "multipart").
+    upload_protocol: Upload protocol for media (e.g. "raw", "multipart").
+  """
+
+  class AltValueValuesEnum(_messages.Enum):
+    """Data format for response.
+
+    Values:
+      json: Responses with Content-Type of application/json
+      media: Media download with context-dependent Content-Type
+      proto: Responses with Content-Type of application/x-protobuf
+    """
+    json = 0
+    media = 1
+    proto = 2
+
+  class FXgafvValueValuesEnum(_messages.Enum):
+    """V1 error format.
+
+    Values:
+      _1: v1 error format
+      _2: v2 error format
+    """
+    _1 = 0
+    _2 = 1
+
+  f__xgafv = _messages.EnumField('FXgafvValueValuesEnum', 1)
+  access_token = _messages.StringField(2)
+  alt = _messages.EnumField('AltValueValuesEnum', 3, default=u'json')
+  bearer_token = _messages.StringField(4)
+  callback = _messages.StringField(5)
+  fields = _messages.StringField(6)
+  key = _messages.StringField(7)
+  oauth_token = _messages.StringField(8)
+  pp = _messages.BooleanField(9, default=True)
+  prettyPrint = _messages.BooleanField(10, default=True)
+  quotaUser = _messages.StringField(11)
+  trace = _messages.StringField(12)
+  uploadType = _messages.StringField(13)
+  upload_protocol = _messages.StringField(14)
+
+
+class TestIamPermissionsRequest(_messages.Message):
+  """Request message for `TestIamPermissions` method.
+
+  Fields:
+    permissions: The set of permissions to check for the `resource`.
+      Permissions with wildcards (such as '*' or 'storage.*') are not allowed.
+      For more information see IAM Overview.
+  """
+
+  permissions = _messages.StringField(1, repeated=True)
+
+
+class TestIamPermissionsResponse(_messages.Message):
+  """Response message for `TestIamPermissions` method.
+
+  Fields:
+    permissions: A subset of `TestPermissionsRequest.permissions` that the
+      caller is allowed.
+  """
+
+  permissions = _messages.StringField(1, repeated=True)
+
+
+encoding.AddCustomJsonFieldMapping(
+    Rule, 'in_', 'in',
+    package=u'iam')
+encoding.AddCustomJsonFieldMapping(
+    StandardQueryParameters, 'f__xgafv', '$.xgafv',
+    package=u'iam')
+encoding.AddCustomJsonEnumMapping(
+    StandardQueryParameters.FXgafvValueValuesEnum, '_1', '1',
+    package=u'iam')
+encoding.AddCustomJsonEnumMapping(
+    StandardQueryParameters.FXgafvValueValuesEnum, '_2', '2',
+    package=u'iam')
diff --git a/samples/regenerate_samples.py b/samples/regenerate_samples.py
new file mode 100644
index 0000000..9d41795
--- /dev/null
+++ b/samples/regenerate_samples.py
@@ -0,0 +1,54 @@
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Script to regenerate samples with latest client generator."""
+
+import os
+import subprocess
+
+_GEN_CLIENT_BINARY = 'gen_client'
+
+_SAMPLES = [
+    'bigquery_sample/bigquery_v2.json',
+    'dns_sample/dns_v1.json',
+    'iam_sample/iam_v1.json',
+    'fusiontables_sample/fusiontables_v1.json',
+    'servicemanagement_sample/servicemanagement_v1.json',
+    'storage_sample/storage_v1.json',
+]
+
+
+def _Generate(samples):
+    for sample in samples:
+        sample_dir, sample_doc = os.path.split(sample)
+        name, ext = os.path.splitext(sample_doc)
+        if ext != '.json':
+            raise RuntimeError('Expected .json discovery doc [{0}]'
+                               .format(sample))
+        api_name, api_version = name.split('_')
+        args = [
+            _GEN_CLIENT_BINARY,
+            '--infile', sample,
+            '--init-file', 'empty',
+            '--outdir={0}'.format(os.path.join(sample_dir, name)),
+            '--overwrite',
+            '--root_package',
+            'samples.{0}_sample.{0}_{1}'.format(api_name, api_version),
+            'client',
+        ]
+        subprocess.check_call(args)
+
+
+if __name__ == '__main__':
+    _Generate(_SAMPLES)
diff --git a/samples/servicemanagement_sample/__init__.py b/samples/servicemanagement_sample/__init__.py
new file mode 100644
index 0000000..58e0d91
--- /dev/null
+++ b/samples/servicemanagement_sample/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2016 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/samples/servicemanagement_sample/messages_test.py b/samples/servicemanagement_sample/messages_test.py
new file mode 100644
index 0000000..a62dbd7
--- /dev/null
+++ b/samples/servicemanagement_sample/messages_test.py
@@ -0,0 +1,56 @@
+#
+# Copyright 2016 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Test for generated servicemanagement messages module."""
+
+import unittest2
+
+from apitools.base.py import extra_types
+
+from samples.servicemanagement_sample.servicemanagement_v1 \
+    import servicemanagement_v1_messages as messages  # nopep8
+
+
+class MessagesTest(unittest2.TestCase):
+
+    def testInstantiateMessageWithAdditionalProperties(self):
+        PROJECT_NAME = 'test-project'
+        SERVICE_NAME = 'test-service'
+        SERVICE_VERSION = '1.0'
+
+        prop = messages.Operation.ResponseValue.AdditionalProperty
+        messages.Operation(
+            name='operation-12345-67890',
+            done=False,
+            response=messages.Operation.ResponseValue(
+                additionalProperties=[
+                    prop(key='producerProjectId',
+                         value=extra_types.JsonValue(
+                             string_value=PROJECT_NAME)),
+                    prop(key='serviceName',
+                         value=extra_types.JsonValue(
+                             string_value=SERVICE_NAME)),
+                    prop(key='serviceConfig',
+                         value=extra_types.JsonValue(
+                             object_value=extra_types.JsonObject(
+                                 properties=[
+                                     extra_types.JsonObject.Property(
+                                         key='id',
+                                         value=extra_types.JsonValue(
+                                             string_value=SERVICE_VERSION)
+                                     )
+                                 ])
+                         ))
+                ]))
diff --git a/samples/servicemanagement_sample/servicemanagement_v1.json b/samples/servicemanagement_sample/servicemanagement_v1.json
new file mode 100644
index 0000000..55e2518
--- /dev/null
+++ b/samples/servicemanagement_sample/servicemanagement_v1.json
@@ -0,0 +1,3382 @@
+{
+  "kind": "discovery#restDescription",
+  "discoveryVersion": "v1",
+  "id": "servicemanagement:v1",
+  "name": "servicemanagement",
+  "version": "v1",
+  "revision": "0",
+  "title": "Google Service Management API",
+  "description": "The service management API for Google Cloud Platform",
+  "ownerDomain": "google.com",
+  "ownerName": "Google",
+  "icons": {
+    "x16": "http://www.google.com/images/icons/product/search-16.gif",
+    "x32": "http://www.google.com/images/icons/product/search-32.gif"
+   },
+  "documentationLink": "https://cloud.google.com/service-management/",
+  "protocol": "rest",
+  "rootUrl": "https://servicemanagement.googleapis.com/",
+  "servicePath": "",
+  "baseUrl": "https://servicemanagement.googleapis.com/",
+  "batchPath": "batch",
+  "parameters": {
+    "access_token": {
+      "type": "string",
+      "description": "OAuth access token.",
+      "location": "query"
+    },
+    "alt": {
+      "type": "string",
+      "description": "Data format for response.",
+      "default": "json",
+      "enum": [
+        "json",
+        "media",
+        "proto"
+      ],
+      "enumDescriptions": [
+        "Responses with Content-Type of application/json",
+        "Media download with context-dependent Content-Type",
+        "Responses with Content-Type of application/x-protobuf"
+      ],
+      "location": "query"
+    },
+    "bearer_token": {
+      "type": "string",
+      "description": "OAuth bearer token.",
+      "location": "query"
+    },
+    "callback": {
+      "type": "string",
+      "description": "JSONP",
+      "location": "query"
+    },
+    "fields": {
+      "type": "string",
+      "description": "Selector specifying which fields to include in a partial response.",
+      "location": "query"
+    },
+    "key": {
+      "type": "string",
+      "description": "API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.",
+      "location": "query"
+    },
+    "oauth_token": {
+      "type": "string",
+      "description": "OAuth 2.0 token for the current user.",
+      "location": "query"
+    },
+    "pp": {
+      "type": "boolean",
+      "description": "Pretty-print response.",
+      "default": "true",
+      "location": "query"
+    },
+    "prettyPrint": {
+      "type": "boolean",
+      "description": "Returns response with indentations and line breaks.",
+      "default": "true",
+      "location": "query"
+    },
+    "quotaUser": {
+      "type": "string",
+      "description": "Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.",
+      "location": "query"
+    },
+    "upload_protocol": {
+      "type": "string",
+      "description": "Upload protocol for media (e.g. \"raw\", \"multipart\").",
+      "location": "query"
+    },
+    "uploadType": {
+      "type": "string",
+      "description": "Legacy upload protocol for media (e.g. \"media\", \"multipart\").",
+      "location": "query"
+    },
+    "$.xgafv": {
+      "type": "string",
+      "description": "V1 error format.",
+      "enum": [
+        "1",
+        "2"
+      ],
+      "enumDescriptions": [
+        "v1 error format",
+        "v2 error format"
+      ],
+      "location": "query"
+    }
+  },
+  "auth": {
+    "oauth2": {
+      "scopes": {
+        "https://www.googleapis.com/auth/cloud-platform": {
+          "description": "View and manage your data across Google Cloud Platform services"
+        },
+        "https://www.googleapis.com/auth/service.management": {
+          "description": "Manage your Google API service configuration"
+        }
+      }
+    }
+  },
+  "schemas": {
+    "ListServicesResponse": {
+      "id": "ListServicesResponse",
+      "description": "Response message for `ListServices` method.",
+      "type": "object",
+      "properties": {
+        "services": {
+          "description": "The results of the query.",
+          "type": "array",
+          "items": {
+            "$ref": "ManagedService"
+          }
+        },
+        "nextPageToken": {
+          "description": "Token that can be passed to `ListServices` to resume a paginated query.",
+          "type": "string"
+        }
+      }
+    },
+    "ManagedService": {
+      "id": "ManagedService",
+      "description": "The full representation of an API Service that is managed by the\n`ServiceManager` API.  Includes both the service configuration, as well as\nother control plane deployment related information.",
+      "type": "object",
+      "properties": {
+        "serviceName": {
+          "description": "The name of the service.  See the `ServiceManager` overview for naming\nrequirements.  This name must match `google.api.Service.name` in the\n`service_config` field.",
+          "type": "string"
+        },
+        "producerProjectId": {
+          "description": "ID of the project that produces and owns this service.",
+          "type": "string"
+        },
+        "generation": {
+          "description": "A server-assigned monotonically increasing number that changes whenever a\nmutation is made to the `ManagedService` or any of its components via the\n`ServiceManager` API.",
+          "type": "string",
+          "format": "int64"
+        },
+        "serviceConfig": {
+          "description": "The service's generated configuration.",
+          "$ref": "Service"
+        },
+        "configSource": {
+          "description": "User-supplied source configuration for the service. This is distinct from\nthe generated configuration provided in `google.api.Service`.\nThis is NOT populated on GetService calls at the moment.\nNOTE: Any upsert operation that contains both a service_config\nand a config_source is considered invalid and will result in\nan error being returned.",
+          "$ref": "ConfigSource"
+        },
+        "operations": {
+          "description": "Read-only view of pending operations affecting this resource, if requested.",
+          "type": "array",
+          "items": {
+            "$ref": "Operation"
+          }
+        },
+        "projectSettings": {
+          "description": "Read-only view of settings for a particular consumer project, if requested.",
+          "$ref": "ProjectSettings"
+        }
+      }
+    },
+    "Service": {
+      "id": "Service",
+      "description": "`Service` is the root object of the configuration schema. It\ndescribes basic information like the name of the service and the\nexposed API interfaces, and delegates other aspects to configuration\nsub-sections.\n\nExample:\n\n    type: google.api.Service\n    config_version: 1\n    name: calendar.googleapis.com\n    title: Google Calendar API\n    apis:\n    - name: google.calendar.Calendar\n    backend:\n      rules:\n      - selector: \"*\"\n        address: calendar.example.com",
+      "type": "object",
+      "properties": {
+        "configVersion": {
+          "description": "The version of the service configuration. The config version may\ninfluence interpretation of the configuration, for example, to\ndetermine defaults. This is documented together with applicable\noptions. The current default for the config version itself is `3`.",
+          "type": "integer",
+          "format": "uint32"
+        },
+        "name": {
+          "description": "The DNS address at which this service is available,\ne.g. `calendar.googleapis.com`.",
+          "type": "string"
+        },
+        "id": {
+          "description": "A unique ID for a specific instance of this message, typically assigned\nby the client for tracking purpose. If empty, the server may choose to\ngenerate one instead.",
+          "type": "string"
+        },
+        "title": {
+          "description": "The product title associated with this service.",
+          "type": "string"
+        },
+        "producerProjectId": {
+          "description": "The id of the Google developer project that owns the service.\nMembers of this project can manage the service configuration,\nmanage consumption of the service, etc.",
+          "type": "string"
+        },
+        "apis": {
+          "description": "A list of API interfaces exported by this service. Only the `name` field\nof the google.protobuf.Api needs to be provided by the configuration\nauthor, as the remaining fields will be derived from the IDL during the\nnormalization process. It is an error to specify an API interface here\nwhich cannot be resolved against the associated IDL files.",
+          "type": "array",
+          "items": {
+            "$ref": "Api"
+          }
+        },
+        "types": {
+          "description": "A list of all proto message types included in this API service.\nTypes referenced directly or indirectly by the `apis` are\nautomatically included.  Messages which are not referenced but\nshall be included, such as types used by the `google.protobuf.Any` type,\nshould be listed here by name. Example:\n\n    types:\n    - name: google.protobuf.Int32",
+          "type": "array",
+          "items": {
+            "$ref": "Type"
+          }
+        },
+        "enums": {
+          "description": "A list of all enum types included in this API service.  Enums\nreferenced directly or indirectly by the `apis` are automatically\nincluded.  Enums which are not referenced but shall be included\nshould be listed here by name. Example:\n\n    enums:\n    - name: google.someapi.v1.SomeEnum",
+          "type": "array",
+          "items": {
+            "$ref": "Enum"
+          }
+        },
+        "documentation": {
+          "description": "Additional API documentation.",
+          "$ref": "Documentation"
+        },
+        "visibility": {
+          "description": "API visibility configuration.",
+          "$ref": "Visibility"
+        },
+        "backend": {
+          "description": "API backend configuration.",
+          "$ref": "Backend"
+        },
+        "http": {
+          "description": "HTTP configuration.",
+          "$ref": "Http"
+        },
+        "quota": {
+          "description": "Quota configuration.",
+          "$ref": "Quota"
+        },
+        "authentication": {
+          "description": "Auth configuration.",
+          "$ref": "Authentication"
+        },
+        "context": {
+          "description": "Context configuration.",
+          "$ref": "Context"
+        },
+        "usage": {
+          "description": "Configuration controlling usage of this service.",
+          "$ref": "Usage"
+        },
+        "customError": {
+          "description": "Custom error configuration.",
+          "$ref": "CustomError"
+        },
+        "projectProperties": {
+          "description": "Configuration of per-consumer project properties.",
+          "$ref": "ProjectProperties"
+        },
+        "control": {
+          "description": "Configuration for the service control plane.",
+          "$ref": "Control"
+        },
+        "logs": {
+          "description": "Defines the logs used by this service.",
+          "type": "array",
+          "items": {
+            "$ref": "LogDescriptor"
+          }
+        },
+        "metrics": {
+          "description": "Defines the metrics used by this service.",
+          "type": "array",
+          "items": {
+            "$ref": "MetricDescriptor"
+          }
+        },
+        "monitoredResources": {
+          "description": "Defines the monitored resources used by this service. This is required\nby the Service.monitoring and Service.logging configurations.\n",
+          "type": "array",
+          "items": {
+            "$ref": "MonitoredResourceDescriptor"
+          }
+        },
+        "billing": {
+          "description": "Billing configuration of the service.",
+          "$ref": "Billing"
+        },
+        "logging": {
+          "description": "Logging configuration of the service.",
+          "$ref": "Logging"
+        },
+        "monitoring": {
+          "description": "Monitoring configuration of the service.",
+          "$ref": "Monitoring"
+        },
+        "systemParameters": {
+          "description": "Configuration for system parameters.",
+          "$ref": "SystemParameters"
+        },
+        "systemTypes": {
+          "description": "A list of all proto message types included in this API service.\nIt serves similar purpose as [google.api.Service.types], except that\nthese types are not needed by user-defined APIs. Therefore, they will not\nshow up in the generated discovery doc. This field should only be used\nto define system APIs in ESF.",
+          "type": "array",
+          "items": {
+            "$ref": "Type"
+          }
+        }
+      }
+    },
+    "Api": {
+      "id": "Api",
+      "description": "Api is a light-weight descriptor for a protocol buffer service.",
+      "type": "object",
+      "properties": {
+        "name": {
+          "description": "The fully qualified name of this api, including package name\nfollowed by the api's simple name.",
+          "type": "string"
+        },
+        "methods": {
+          "description": "The methods of this api, in unspecified order.",
+          "type": "array",
+          "items": {
+            "$ref": "Method"
+          }
+        },
+        "options": {
+          "description": "Any metadata attached to the API.",
+          "type": "array",
+          "items": {
+            "$ref": "Option"
+          }
+        },
+        "version": {
+          "description": "A version string for this api. If specified, must have the form\n`major-version.minor-version`, as in `1.10`. If the minor version\nis omitted, it defaults to zero. If the entire version field is\nempty, the major version is derived from the package name, as\noutlined below. If the field is not empty, the version in the\npackage name will be verified to be consistent with what is\nprovided here.\n\nThe versioning schema uses [semantic\nversioning](http:\/\/semver.org) where the major version number\nindicates a breaking change and the minor version an additive,\nnon-breaking change. Both version numbers are signals to users\nwhat to expect from different versions, and should be carefully\nchosen based on the product plan.\n\nThe major version is also reflected in the package name of the\nAPI, which must end in `v<major-version>`, as in\n`google.feature.v1`. For major versions 0 and 1, the suffix can\nbe omitted. Zero major versions must only be used for\nexperimental, none-GA apis.\n\n",
+          "type": "string"
+        },
+        "sourceContext": {
+          "description": "Source context for the protocol buffer service represented by this\nmessage.",
+          "$ref": "SourceContext"
+        },
+        "mixins": {
+          "description": "Included APIs. See Mixin.",
+          "type": "array",
+          "items": {
+            "$ref": "Mixin"
+          }
+        },
+        "syntax": {
+          "description": "The source syntax of the service.",
+          "enumDescriptions": [
+            "Syntax `proto2`.",
+            "Syntax `proto3`."
+          ],
+          "type": "string",
+          "enum": [
+            "SYNTAX_PROTO2",
+            "SYNTAX_PROTO3"
+          ]
+        }
+      }
+    },
+    "Method": {
+      "id": "Method",
+      "description": "Method represents a method of an api.",
+      "type": "object",
+      "properties": {
+        "name": {
+          "description": "The simple name of this method.",
+          "type": "string"
+        },
+        "requestTypeUrl": {
+          "description": "A URL of the input message type.",
+          "type": "string"
+        },
+        "requestStreaming": {
+          "description": "If true, the request is streamed.",
+          "type": "boolean"
+        },
+        "responseTypeUrl": {
+          "description": "The URL of the output message type.",
+          "type": "string"
+        },
+        "responseStreaming": {
+          "description": "If true, the response is streamed.",
+          "type": "boolean"
+        },
+        "options": {
+          "description": "Any metadata attached to the method.",
+          "type": "array",
+          "items": {
+            "$ref": "Option"
+          }
+        },
+        "syntax": {
+          "description": "The source syntax of this method.",
+          "enumDescriptions": [
+            "Syntax `proto2`.",
+            "Syntax `proto3`."
+          ],
+          "type": "string",
+          "enum": [
+            "SYNTAX_PROTO2",
+            "SYNTAX_PROTO3"
+          ]
+        }
+      }
+    },
+    "Option": {
+      "id": "Option",
+      "description": "A protocol buffer option, which can be attached to a message, field,\nenumeration, etc.",
+      "type": "object",
+      "properties": {
+        "name": {
+          "description": "The option's name. For example, `\"java_package\"`.",
+          "type": "string"
+        },
+        "value": {
+          "description": "The option's value. For example, `\"com.google.protobuf\"`.",
+          "type": "object",
+          "additionalProperties": {
+            "type": "any",
+            "description": "Properties of the object. Contains field @type with type URL."
+          }
+        }
+      }
+    },
+    "SourceContext": {
+      "id": "SourceContext",
+      "description": "`SourceContext` represents information about the source of a\nprotobuf element, like the file in which it is defined.",
+      "type": "object",
+      "properties": {
+        "fileName": {
+          "description": "The path-qualified name of the .proto file that contained the associated\nprotobuf element.  For example: `\"google\/protobuf\/source_context.proto\"`.",
+          "type": "string"
+        }
+      }
+    },
+    "Mixin": {
+      "id": "Mixin",
+      "description": "Declares an API to be included in this API. The including API must\nredeclare all the methods from the included API, but documentation\nand options are inherited as follows:\n\n- If after comment and whitespace stripping, the documentation\n  string of the redeclared method is empty, it will be inherited\n  from the original method.\n\n- Each annotation belonging to the service config (http,\n  visibility) which is not set in the redeclared method will be\n  inherited.\n\n- If an http annotation is inherited, the path pattern will be\n  modified as follows. Any version prefix will be replaced by the\n  version of the including API plus the root path if specified.\n\nExample of a simple mixin:\n\n    package google.acl.v1;\n    service AccessControl {\n      \/\/ Get the underlying ACL object.\n      rpc GetAcl(GetAclRequest) returns (Acl) {\n        option (google.api.http).get = \"\/v1\/{resource=**}:getAcl\";\n      }\n    }\n\n    package google.storage.v2;\n    service Storage {\n      \/\/       rpc GetAcl(GetAclRequest) returns (Acl);\n\n      \/\/ Get a data record.\n      rpc GetData(GetDataRequest) returns (Data) {\n        option (google.api.http).get = \"\/v2\/{resource=**}\";\n      }\n    }\n\nExample of a mixin configuration:\n\n    apis:\n    - name: google.storage.v2.Storage\n      mixins:\n      - name: google.acl.v1.AccessControl\n\nThe mixin construct implies that all methods in `AccessControl` are\nalso declared with same name and request\/response types in\n`Storage`. A documentation generator or annotation processor will\nsee the effective `Storage.GetAcl` method after inherting\ndocumentation and annotations as follows:\n\n    service Storage {\n      \/\/ Get the underlying ACL object.\n      rpc GetAcl(GetAclRequest) returns (Acl) {\n        option (google.api.http).get = \"\/v2\/{resource=**}:getAcl\";\n      }\n      ...\n    }\n\nNote how the version in the path pattern changed from `v1` to `v2`.\n\nIf the `root` field in the mixin is specified, it should be a\nrelative path under which inherited HTTP paths are placed. Example:\n\n    apis:\n    - name: google.storage.v2.Storage\n      mixins:\n      - name: google.acl.v1.AccessControl\n        root: acls\n\nThis implies the following inherited HTTP annotation:\n\n    service Storage {\n      \/\/ Get the underlying ACL object.\n      rpc GetAcl(GetAclRequest) returns (Acl) {\n        option (google.api.http).get = \"\/v2\/acls\/{resource=**}:getAcl\";\n      }\n      ...\n    }",
+      "type": "object",
+      "properties": {
+        "name": {
+          "description": "The fully qualified name of the API which is included.",
+          "type": "string"
+        },
+        "root": {
+          "description": "If non-empty specifies a path under which inherited HTTP paths\nare rooted.",
+          "type": "string"
+        }
+      }
+    },
+    "Type": {
+      "id": "Type",
+      "description": "A protocol buffer message type.",
+      "type": "object",
+      "properties": {
+        "name": {
+          "description": "The fully qualified message name.",
+          "type": "string"
+        },
+        "fields": {
+          "description": "The list of fields.",
+          "type": "array",
+          "items": {
+            "$ref": "Field"
+          }
+        },
+        "oneofs": {
+          "description": "The list of types appearing in `oneof` definitions in this type.",
+          "type": "array",
+          "items": {
+            "type": "string"
+          }
+        },
+        "options": {
+          "description": "The protocol buffer options.",
+          "type": "array",
+          "items": {
+            "$ref": "Option"
+          }
+        },
+        "sourceContext": {
+          "description": "The source context.",
+          "$ref": "SourceContext"
+        },
+        "syntax": {
+          "description": "The source syntax.",
+          "enumDescriptions": [
+            "Syntax `proto2`.",
+            "Syntax `proto3`."
+          ],
+          "type": "string",
+          "enum": [
+            "SYNTAX_PROTO2",
+            "SYNTAX_PROTO3"
+          ]
+        }
+      }
+    },
+    "Field": {
+      "id": "Field",
+      "description": "A single field of a message type.",
+      "type": "object",
+      "properties": {
+        "kind": {
+          "description": "The field type.",
+          "enumDescriptions": [
+            "Field type unknown.",
+            "Field type double.",
+            "Field type float.",
+            "Field type int64.",
+            "Field type uint64.",
+            "Field type int32.",
+            "Field type fixed64.",
+            "Field type fixed32.",
+            "Field type bool.",
+            "Field type string.",
+            "Field type group. Proto2 syntax only, and deprecated.",
+            "Field type message.",
+            "Field type bytes.",
+            "Field type uint32.",
+            "Field type enum.",
+            "Field type sfixed32.",
+            "Field type sfixed64.",
+            "Field type sint32.",
+            "Field type sint64."
+          ],
+          "type": "string",
+          "enum": [
+            "TYPE_UNKNOWN",
+            "TYPE_DOUBLE",
+            "TYPE_FLOAT",
+            "TYPE_INT64",
+            "TYPE_UINT64",
+            "TYPE_INT32",
+            "TYPE_FIXED64",
+            "TYPE_FIXED32",
+            "TYPE_BOOL",
+            "TYPE_STRING",
+            "TYPE_GROUP",
+            "TYPE_MESSAGE",
+            "TYPE_BYTES",
+            "TYPE_UINT32",
+            "TYPE_ENUM",
+            "TYPE_SFIXED32",
+            "TYPE_SFIXED64",
+            "TYPE_SINT32",
+            "TYPE_SINT64"
+          ]
+        },
+        "cardinality": {
+          "description": "The field cardinality.",
+          "enumDescriptions": [
+            "For fields with unknown cardinality.",
+            "For optional fields.",
+            "For required fields. Proto2 syntax only.",
+            "For repeated fields."
+          ],
+          "type": "string",
+          "enum": [
+            "CARDINALITY_UNKNOWN",
+            "CARDINALITY_OPTIONAL",
+            "CARDINALITY_REQUIRED",
+            "CARDINALITY_REPEATED"
+          ]
+        },
+        "number": {
+          "description": "The field number.",
+          "type": "integer",
+          "format": "int32"
+        },
+        "name": {
+          "description": "The field name.",
+          "type": "string"
+        },
+        "typeUrl": {
+          "description": "The field type URL, without the scheme, for message or enumeration\ntypes. Example: `\"type.googleapis.com\/google.protobuf.Timestamp\"`.",
+          "type": "string"
+        },
+        "oneofIndex": {
+          "description": "The index of the field type in `Type.oneofs`, for message or enumeration\ntypes. The first type has index 1; zero means the type is not in the list.",
+          "type": "integer",
+          "format": "int32"
+        },
+        "packed": {
+          "description": "Whether to use alternative packed wire representation.",
+          "type": "boolean"
+        },
+        "options": {
+          "description": "The protocol buffer options.",
+          "type": "array",
+          "items": {
+            "$ref": "Option"
+          }
+        },
+        "jsonName": {
+          "description": "The field JSON name.",
+          "type": "string"
+        },
+        "defaultValue": {
+          "description": "The string value of the default value of this field. Proto2 syntax only.",
+          "type": "string"
+        }
+      }
+    },
+    "Enum": {
+      "id": "Enum",
+      "description": "Enum type definition.",
+      "type": "object",
+      "properties": {
+        "name": {
+          "description": "Enum type name.",
+          "type": "string"
+        },
+        "enumvalue": {
+          "description": "Enum value definitions.",
+          "type": "array",
+          "items": {
+            "$ref": "EnumValue"
+          }
+        },
+        "options": {
+          "description": "Protocol buffer options.",
+          "type": "array",
+          "items": {
+            "$ref": "Option"
+          }
+        },
+        "sourceContext": {
+          "description": "The source context.",
+          "$ref": "SourceContext"
+        },
+        "syntax": {
+          "description": "The source syntax.",
+          "enumDescriptions": [
+            "Syntax `proto2`.",
+            "Syntax `proto3`."
+          ],
+          "type": "string",
+          "enum": [
+            "SYNTAX_PROTO2",
+            "SYNTAX_PROTO3"
+          ]
+        }
+      }
+    },
+    "EnumValue": {
+      "id": "EnumValue",
+      "description": "Enum value definition.",
+      "type": "object",
+      "properties": {
+        "name": {
+          "description": "Enum value name.",
+          "type": "string"
+        },
+        "number": {
+          "description": "Enum value number.",
+          "type": "integer",
+          "format": "int32"
+        },
+        "options": {
+          "description": "Protocol buffer options.",
+          "type": "array",
+          "items": {
+            "$ref": "Option"
+          }
+        }
+      }
+    },
+    "Documentation": {
+      "id": "Documentation",
+      "description": "`Documentation` provides the information for describing a service.\n\nExample:\n<pre><code>documentation:\n  summary: >\n    The Google Calendar API gives access\n    to most calendar features.\n  pages:\n  - name: Overview\n    content: &#40;== include google\/foo\/overview.md ==&#41;\n  - name: Tutorial\n    content: &#40;== include google\/foo\/tutorial.md ==&#41;\n    subpages;\n    - name: Java\n      content: &#40;== include google\/foo\/tutorial_java.md ==&#41;\n  rules:\n  - selector: google.calendar.Calendar.Get\n    description: >\n      ...\n  - selector: google.calendar.Calendar.Put\n    description: >\n      ...\n<\/code><\/pre>\nDocumentation is provided in markdown syntax. In addition to\nstandard markdown features, definition lists, tables and fenced\ncode blocks are supported. Section headers can be provided and are\ninterpreted relative to the section nesting of the context where\na documentation fragment is embedded.\n\nDocumentation from the IDL is merged with documentation defined\nvia the config at normalization time, where documentation provided\nby config rules overrides IDL provided.\n\nA number of constructs specific to the API platform are supported\nin documentation text.\n\nIn order to reference a proto element, the following\nnotation can be used:\n<pre><code>&#91;fully.qualified.proto.name]&#91;]<\/code><\/pre>\nTo override the display text used for the link, this can be used:\n<pre><code>&#91;display text]&#91;fully.qualified.proto.name]<\/code><\/pre>\nText can be excluded from doc using the following notation:\n<pre><code>&#40;-- internal comment --&#41;<\/code><\/pre>\nComments can be made conditional using a visibility label. The below\ntext will be only rendered if the `BETA` label is available:\n<pre><code>&#40;--BETA: comment for BETA users --&#41;<\/code><\/pre>\nA few directives are available in documentation. Note that\ndirectives must appear on a single line to be properly\nidentified. The `include` directive includes a markdown file from\nan external source:\n<pre><code>&#40;== include path\/to\/file ==&#41;<\/code><\/pre>\nThe `resource_for` directive marks a message to be the resource of\na collection in REST view. If it is not specified, tools attempt\nto infer the resource from the operations in a collection:\n<pre><code>&#40;== resource_for v1.shelves.books ==&#41;<\/code><\/pre>\nThe directive `suppress_warning` does not directly affect documentation\nand is documented together with service config validation.",
+      "type": "object",
+      "properties": {
+        "summary": {
+          "description": "A short summary of what the service does. Can only be provided by\nplain text.",
+          "type": "string"
+        },
+        "pages": {
+          "description": "The top level pages for the documentation set.",
+          "type": "array",
+          "items": {
+            "$ref": "Page"
+          }
+        },
+        "rules": {
+          "description": "Documentation rules for individual elements of the service.",
+          "type": "array",
+          "items": {
+            "$ref": "DocumentationRule"
+          }
+        },
+        "documentationRootUrl": {
+          "description": "The URL to the root of documentation.",
+          "type": "string"
+        },
+        "overview": {
+          "description": "Declares a single overview page. For example:\n<pre><code>documentation:\n  summary: ...\n  overview: &#40;== include overview.md ==&#41;\n<\/code><\/pre>\nThis is a shortcut for the following declaration (using pages style):\n<pre><code>documentation:\n  summary: ...\n  pages:\n  - name: Overview\n    content: &#40;== include overview.md ==&#41;\n<\/code><\/pre>\nNote: you cannot specify both `overview` field and `pages` field.",
+          "type": "string"
+        }
+      }
+    },
+    "Page": {
+      "id": "Page",
+      "description": "Represents a documentation page. A page can contain subpages to represent\nnested documentation set structure.",
+      "type": "object",
+      "properties": {
+        "name": {
+          "description": "The name of the page. It will be used as an identity of the page to\ngenerate URI of the page, text of the link to this page in navigation,\netc. The full page name (start from the root page name to this page\nconcatenated with `.`) can be used as reference to the page in your\ndocumentation. For example:\n<pre><code>pages:\n- name: Tutorial\n  content: &#40;== include tutorial.md ==&#41;\n  subpages:\n  - name: Java\n    content: &#40;== include tutorial_java.md ==&#41;\n<\/code><\/pre>\nYou can reference `Java` page using Markdown reference link syntax:\n`Java`.",
+          "type": "string"
+        },
+        "content": {
+          "description": "The Markdown content of the page. You can use <code>&#40;== include {path} ==&#41;<\/code>\nto include content from a Markdown file.",
+          "type": "string"
+        },
+        "subpages": {
+          "description": "Subpages of this page. The order of subpages specified here will be\nhonored in the generated docset.",
+          "type": "array",
+          "items": {
+            "$ref": "Page"
+          }
+        }
+      }
+    },
+    "DocumentationRule": {
+      "id": "DocumentationRule",
+      "description": "A documentation rule provides information about individual API elements.",
+      "type": "object",
+      "properties": {
+        "selector": {
+          "description": "The selector is a comma-separated list of patterns. Each pattern is a\nqualified name of the element which may end in \"*\", indicating a wildcard.\nWildcards are only allowed at the end and for a whole component of the\nqualified name, i.e. \"foo.*\" is ok, but not \"foo.b*\" or \"foo.*.bar\". To\nspecify a default for all applicable elements, the whole pattern \"*\"\nis used.",
+          "type": "string"
+        },
+        "description": {
+          "description": "Description of the selected API(s).",
+          "type": "string"
+        },
+        "deprecationDescription": {
+          "description": "Deprecation description of the selected element(s). It can be provided if an\nelement is marked as `deprecated`.",
+          "type": "string"
+        }
+      }
+    },
+    "Visibility": {
+      "id": "Visibility",
+      "description": "`Visibility` defines restrictions for the visibility of service\nelements.  Restrictions are specified using visibility labels\n(e.g., TRUSTED_TESTER) that are elsewhere linked to users and projects.\n\nUsers and projects can have access to more than one visibility label. The\neffective visibility for multiple labels is the union of each label's\nelements, plus any unrestricted elements.\n\nIf an element and its parents have no restrictions, visibility is\nunconditionally granted.\n\nExample:\n\n    visibility:\n      rules:\n      - selector: google.calendar.Calendar.EnhancedSearch\n        restriction: TRUSTED_TESTER\n      - selector: google.calendar.Calendar.Delegate\n        restriction: GOOGLE_INTERNAL\n\nHere, all methods are publicly visible except for the restricted methods\nEnhancedSearch and Delegate.",
+      "type": "object",
+      "properties": {
+        "rules": {
+          "description": "A list of visibility rules providing visibility configuration for\nindividual API elements.",
+          "type": "array",
+          "items": {
+            "$ref": "VisibilityRule"
+          }
+        },
+        "enforceRuntimeVisibility": {
+          "description": "Controls whether visibility rules are enforced at runtime for requests to\nall APIs and methods.\n\nIf true, requests without method visibility will receive a\nNOT_FOUND error, and any non-visible fields will be scrubbed from\nthe response messages. In service config version 0, the default is false.\nIn later config versions, it's true.\n\nNote, the `enforce_runtime_visibility` specified in a visibility rule\noverrides this setting for the APIs or methods asscoiated with the rule.",
+          "type": "boolean"
+        }
+      }
+    },
+    "VisibilityRule": {
+      "id": "VisibilityRule",
+      "description": "A visibility rule provides visibility configuration for an individual API\nelement.",
+      "type": "object",
+      "properties": {
+        "selector": {
+          "description": "Selects methods, messages, fields, enums, etc. to which this rule applies.\n\nRefer to selector for syntax details.",
+          "type": "string"
+        },
+        "restriction": {
+          "description": "Lists the visibility labels for this rule. Any of the listed labels grants\nvisibility to the element.\n\nIf a rule has multiple labels, removing one of the labels but not all of\nthem can break clients.\n\nExample:\n\n    visibility:\n      rules:\n      - selector: google.calendar.Calendar.EnhancedSearch\n        restriction: GOOGLE_INTERNAL, TRUSTED_TESTER\n\nRemoving GOOGLE_INTERNAL from this restriction will break clients that\nrely on this method and only had access to it through GOOGLE_INTERNAL.",
+          "type": "string"
+        },
+        "enforceRuntimeVisibility": {
+          "description": "Controls whether visibility is enforced at runtime for requests to an API\nmethod. This setting has meaning only when the selector applies to a method\nor an API.\n\nIf true, requests without method visibility will receive a\nNOT_FOUND error, and any non-visible fields will be scrubbed from\nthe response messages. The default is determined by the value of\ngoogle.api.Visibility.enforce_runtime_visibility.",
+          "type": "boolean"
+        }
+      }
+    },
+    "Backend": {
+      "id": "Backend",
+      "description": "`Backend` defines the backend configuration for a service.",
+      "type": "object",
+      "properties": {
+        "rules": {
+          "description": "A list of backend rules providing configuration for individual API\nelements.",
+          "type": "array",
+          "items": {
+            "$ref": "BackendRule"
+          }
+        }
+      }
+    },
+    "BackendRule": {
+      "id": "BackendRule",
+      "description": "A backend rule provides configuration for an individual API element.",
+      "type": "object",
+      "properties": {
+        "selector": {
+          "description": "Selects the methods to which this rule applies.\n\nRefer to selector for syntax details.",
+          "type": "string"
+        },
+        "address": {
+          "description": "The address of the API backend.\n",
+          "type": "string"
+        },
+        "deadline": {
+          "description": "The number of seconds to wait for a response from a request.  The\ndefault depends on the deployment context.",
+          "type": "number",
+          "format": "double"
+        }
+      }
+    },
+    "Http": {
+      "id": "Http",
+      "description": "Defines the HTTP configuration for a service. It contains a list of\nHttpRule, each specifying the mapping of an RPC method\nto one or more HTTP REST API methods.",
+      "type": "object",
+      "properties": {
+        "rules": {
+          "description": "A list of HTTP rules for configuring the HTTP REST API methods.",
+          "type": "array",
+          "items": {
+            "$ref": "HttpRule"
+          }
+        }
+      }
+    },
+    "HttpRule": {
+      "id": "HttpRule",
+      "description": "`HttpRule` defines the mapping of an RPC method to one or more HTTP\nREST APIs.  The mapping determines what portions of the request\nmessage are populated from the path, query parameters, or body of\nthe HTTP request.  The mapping is typically specified as an\n`google.api.http` annotation, see \"google\/api\/annotations.proto\"\nfor details.\n\nThe mapping consists of a field specifying the path template and\nmethod kind.  The path template can refer to fields in the request\nmessage, as in the example below which describes a REST GET\noperation on a resource collection of messages:\n\n```proto\nservice Messaging {\n  rpc GetMessage(GetMessageRequest) returns (Message) {\n    option (google.api.http).get = \"\/v1\/messages\/{message_id}\/{sub.subfield}\";\n  }\n}\nmessage GetMessageRequest {\n  message SubMessage {\n    string subfield = 1;\n  }\n  string message_id = 1; \/\/ mapped to the URL\n  SubMessage sub = 2;    \/\/ `sub.subfield` is url-mapped\n}\nmessage Message {\n  string text = 1; \/\/ content of the resource\n}\n```\n\nThis definition enables an automatic, bidrectional mapping of HTTP\nJSON to RPC. Example:\n\nHTTP | RPC\n-----|-----\n`GET \/v1\/messages\/123456\/foo`  | `GetMessage(message_id: \"123456\" sub: SubMessage(subfield: \"foo\"))`\n\nIn general, not only fields but also field paths can be referenced\nfrom a path pattern. Fields mapped to the path pattern cannot be\nrepeated and must have a primitive (non-message) type.\n\nAny fields in the request message which are not bound by the path\npattern automatically become (optional) HTTP query\nparameters. Assume the following definition of the request message:\n\n```proto\nmessage GetMessageRequest {\n  message SubMessage {\n    string subfield = 1;\n  }\n  string message_id = 1; \/\/ mapped to the URL\n  int64 revision = 2;    \/\/ becomes a parameter\n  SubMessage sub = 3;    \/\/ `sub.subfield` becomes a parameter\n}\n```\n\nThis enables a HTTP JSON to RPC mapping as below:\n\nHTTP | RPC\n-----|-----\n`GET \/v1\/messages\/123456?revision=2&sub.subfield=foo` | `GetMessage(message_id: \"123456\" revision: 2 sub: SubMessage(subfield: \"foo\"))`\n\nNote that fields which are mapped to HTTP parameters must have a\nprimitive type or a repeated primitive type. Message types are not\nallowed. In the case of a repeated type, the parameter can be\nrepeated in the URL, as in `...?param=A&param=B`.\n\nFor HTTP method kinds which allow a request body, the `body` field\nspecifies the mapping. Consider a REST update method on the\nmessage resource collection:\n\n```proto\nservice Messaging {\n  rpc UpdateMessage(UpdateMessageRequest) returns (Message) {\n    option (google.api.http) = {\n      put: \"\/v1\/messages\/{message_id}\"\n      body: \"message\"\n    };\n  }\n}\nmessage UpdateMessageRequest {\n  string message_id = 1; \/\/ mapped to the URL\n  Message message = 2;   \/\/ mapped to the body\n}\n```\n\nThe following HTTP JSON to RPC mapping is enabled, where the\nrepresentation of the JSON in the request body is determined by\nprotos JSON encoding:\n\nHTTP | RPC\n-----|-----\n`PUT \/v1\/messages\/123456 { \"text\": \"Hi!\" }` | `UpdateMessage(message_id: \"123456\" message { text: \"Hi!\" })`\n\nThe special name `*` can be used in the body mapping to define that\nevery field not bound by the path template should be mapped to the\nrequest body.  This enables the following alternative definition of\nthe update method:\n\n```proto\nservice Messaging {\n  rpc UpdateMessage(Message) returns (Message) {\n    option (google.api.http) = {\n      put: \"\/v1\/messages\/{message_id}\"\n      body: \"*\"\n    };\n  }\n}\nmessage Message {\n  string message_id = 1;\n  string text = 2;\n}\n```\n\nThe following HTTP JSON to RPC mapping is enabled:\n\nHTTP | RPC\n-----|-----\n`PUT \/v1\/messages\/123456 { \"text\": \"Hi!\" }` | `UpdateMessage(message_id: \"123456\" text: \"Hi!\")`\n\nNote that when using `*` in the body mapping, it is not possible to\nhave HTTP parameters, as all fields not bound by the path end in\nthe body. This makes this option more rarely used in practice of\ndefining REST APIs. The common usage of `*` is in custom methods\nwhich don't use the URL at all for transferring data.\n\nIt is possible to define multiple HTTP methods for one RPC by using\nthe `additional_bindings` option. Example:\n\n```proto\nservice Messaging {\n  rpc GetMessage(GetMessageRequest) returns (Message) {\n    option (google.api.http) = {\n      get: \"\/v1\/messages\/{message_id}\"\n      additional_bindings {\n        get: \"\/v1\/users\/{user_id}\/messages\/{message_id}\"\n      }\n    };\n  }\n}\nmessage GetMessageRequest {\n  string message_id = 1;\n  string user_id = 2;\n}\n```\n\nThis enables the following two alternative HTTP JSON to RPC\nmappings:\n\nHTTP | RPC\n-----|-----\n`GET \/v1\/messages\/123456` | `GetMessage(message_id: \"123456\")`\n`GET \/v1\/users\/me\/messages\/123456` | `GetMessage(user_id: \"me\" message_id: \"123456\")`\n\n# Rules for HTTP mapping\n\nThe rules for mapping HTTP path, query parameters, and body fields\nto the request message are as follows:\n\n1. The `body` field specifies either `*` or a field path, or is\n   omitted. If omitted, it assumes there is no HTTP body.\n2. Leaf fields (recursive expansion of nested messages in the\n   request) can be classified into three types:\n    (a) Matched in the URL template.\n    (b) Covered by body (if body is `*`, everything except (a) fields;\n        else everything under the body field)\n    (c) All other fields.\n3. URL query parameters found in the HTTP request are mapped to (c) fields.\n4. Any body sent with an HTTP request can contain only (b) fields.\n\nThe syntax of the path template is as follows:\n\n    Template = \"\/\" Segments [ Verb ] ;\n    Segments = Segment { \"\/\" Segment } ;\n    Segment  = \"*\" | \"**\" | LITERAL | Variable ;\n    Variable = \"{\" FieldPath [ \"=\" Segments ] \"}\" ;\n    FieldPath = IDENT { \".\" IDENT } ;\n    Verb     = \":\" LITERAL ;\n\nThe syntax `*` matches a single path segment. It follows the semantics of\n[RFC 6570](https:\/\/tools.ietf.org\/html\/rfc6570) Section 3.2.2 Simple String\nExpansion.\n\nThe syntax `**` matches zero or more path segments. It follows the semantics\nof [RFC 6570](https:\/\/tools.ietf.org\/html\/rfc6570) Section 3.2.3 Reserved\nExpansion.\n\nThe syntax `LITERAL` matches literal text in the URL path.\n\nThe syntax `Variable` matches the entire path as specified by its template;\nthis nested template must not contain further variables. If a variable\nmatches a single path segment, its template may be omitted, e.g. `{var}`\nis equivalent to `{var=*}`.\n\nNOTE: the field paths in variables and in the `body` must not refer to\nrepeated fields or map fields.\n\nUse CustomHttpPattern to specify any HTTP method that is not included in the\n`pattern` field, such as HEAD, or \"*\" to leave the HTTP method unspecified for\na given URL path rule. The wild-card rule is useful for services that provide\ncontent to Web (HTML) clients.",
+      "type": "object",
+      "properties": {
+        "selector": {
+          "description": "Selects methods to which this rule applies.\n\nRefer to selector for syntax details.",
+          "type": "string"
+        },
+        "get": {
+          "description": "Used for listing and getting information about resources.",
+          "type": "string"
+        },
+        "put": {
+          "description": "Used for updating a resource.",
+          "type": "string"
+        },
+        "post": {
+          "description": "Used for creating a resource.",
+          "type": "string"
+        },
+        "delete": {
+          "description": "Used for deleting a resource.",
+          "type": "string"
+        },
+        "patch": {
+          "description": "Used for updating a resource.",
+          "type": "string"
+        },
+        "custom": {
+          "description": "Custom pattern is used for defining custom verbs.",
+          "$ref": "CustomHttpPattern"
+        },
+        "body": {
+          "description": "The name of the request field whose value is mapped to the HTTP body, or\n`*` for mapping all fields not captured by the path pattern to the HTTP\nbody. NOTE: the referred field must not be a repeated field.",
+          "type": "string"
+        },
+        "mediaUpload": {
+          "description": "Do not use this. For media support, add instead\n[][google.bytestream.RestByteStream] as an API to your\nconfiguration.",
+          "$ref": "MediaUpload"
+        },
+        "mediaDownload": {
+          "description": "Do not use this. For media support, add instead\n[][google.bytestream.RestByteStream] as an API to your\nconfiguration.",
+          "$ref": "MediaDownload"
+        },
+        "additionalBindings": {
+          "description": "Additional HTTP bindings for the selector. Nested bindings must\nnot contain an `additional_bindings` field themselves (that is,\nthe nesting may only be one level deep).",
+          "type": "array",
+          "items": {
+            "$ref": "HttpRule"
+          }
+        }
+      }
+    },
+    "CustomHttpPattern": {
+      "id": "CustomHttpPattern",
+      "description": "A custom pattern is used for defining custom HTTP verb.",
+      "type": "object",
+      "properties": {
+        "kind": {
+          "description": "The name of this custom HTTP verb.",
+          "type": "string"
+        },
+        "path": {
+          "description": "The path matched by this custom verb.",
+          "type": "string"
+        }
+      }
+    },
+    "MediaUpload": {
+      "id": "MediaUpload",
+      "description": "Do not use this. For media support, add instead\n[][google.bytestream.RestByteStream] as an API to your\nconfiguration.",
+      "type": "object",
+      "properties": {
+        "enabled": {
+          "description": "Whether upload is enabled.",
+          "type": "boolean"
+        }
+      }
+    },
+    "MediaDownload": {
+      "id": "MediaDownload",
+      "description": "Do not use this. For media support, add instead\n[][google.bytestream.RestByteStream] as an API to your\nconfiguration.",
+      "type": "object",
+      "properties": {
+        "enabled": {
+          "description": "Whether download is enabled.",
+          "type": "boolean"
+        }
+      }
+    },
+    "Quota": {
+      "id": "Quota",
+      "description": "Quota configuration helps to achieve fairness and budgeting in service\nusage.\n\n- Fairness is achieved through the use of short-term quota limits\n  that are usually defined over a time window of several seconds or\n  minutes. When such a limit is applied, for example at the user\n  level, it ensures that no single user will monopolize the service\n  or a given customer's allocated portion of it.\n- Budgeting is achieved through the use of long-term quota limits\n  that are usually defined over a time window of one or more\n  days. These limits help client application developers predict the\n  usage and help budgeting.\n\nQuota enforcement uses a simple token-based algorithm for resource sharing.\n\nThe quota configuration structure is as follows:\n\n- `QuotaLimit` defines a single enforceable limit with a specified\n  token amount that can be consumed over a specific duration and\n  applies to a particular entity, like a project or an end user. If\n  the limit applies to a user, each user making the request will\n  get the specified number of tokens to consume. When the tokens\n  run out, the requests from that user will be blocked until the\n  duration elapses and the next duration window starts.\n\n- `QuotaGroup` groups a set of quota limits.\n\n- `QuotaRule` maps a method to a set of quota groups. This allows\n  sharing of quota groups across methods as well as one method\n  consuming tokens from more than one quota group. When a group\n  contains multiple limits, requests to a method consuming tokens\n  from that group must satisfy all the limits in that group.\n\nExample:\n\n    quota:\n      groups:\n      - name: ReadGroup\n        limits:\n        - description: Daily Limit\n          name: ProjectQpd\n          default_limit: 10000\n          duration: 1d\n          limit_by: CLIENT_PROJECT\n\n        - description: Per-second Limit\n          name: UserQps\n          default_limit: 20000\n          duration: 100s\n          limit_by: USER\n\n      - name: WriteGroup\n        limits:\n        - description: Daily Limit\n          name: ProjectQpd\n          default_limit: 1000\n          max_limit: 1000\n          duration: 1d\n          limit_by: CLIENT_PROJECT\n\n        - description: Per-second Limit\n          name: UserQps\n          default_limit: 2000\n          max_limit: 4000\n          duration: 100s\n          limit_by: USER\n\n      rules:\n      - selector: \"*\"\n        groups:\n        - group: ReadGroup\n      - selector: google.calendar.Calendar.Update\n        groups:\n        - group: WriteGroup\n          cost: 2\n      - selector: google.calendar.Calendar.Delete\n        groups:\n        - group: WriteGroup\n\nHere, the configuration defines two quota groups: ReadGroup and WriteGroup,\neach defining its own daily and per-second limits. Note that One Platform\nenforces per-second limits averaged over a duration of 100 seconds. The rules\nmap ReadGroup for all methods, except for the Update and Delete methods.\nThese two methods consume from WriteGroup, with Update method consuming at\ntwice the rate as Delete method.\n\nMultiple quota groups can be specified for a method. The quota limits in all\nof those groups will be enforced. Example:\n\n    quota:\n      groups:\n      - name: WriteGroup\n        limits:\n        - description: Daily Limit\n          name: ProjectQpd\n          default_limit: 1000\n          max_limit: 1000\n          duration: 1d\n          limit_by: CLIENT_PROJECT\n\n        - description: Per-second Limit\n          name: UserQps\n          default_limit: 2000\n          max_limit: 4000\n          duration: 100s\n          limit_by: USER\n\n      - name: StorageGroup\n        limits:\n        - description: Storage Quota\n          name: StorageQuota\n          default_limit: 1000\n          duration: 0\n          limit_by: USER\n\n      rules:\n      - selector: google.calendar.Calendar.Create\n        groups:\n        - group: StorageGroup\n        - group: WriteGroup\n      - selector: google.calendar.Calendar.Delete\n        groups:\n        - group: StorageGroup\n\nIn the above example, the Create and Delete methods manage the user's\nstorage space. In addition, Create method uses WriteGroup to manage the\nrequests. In this case, requests to Create method need to satisfy all quota\nlimits defined in both quota groups.\n\nOne can disable quota for selected method(s) identified by the selector by\nsetting disable_quota to ture. For example,\n\n      rules:\n      - selector: \"*\"\n        group:\n        - group ReadGroup\n      - selector: google.calendar.Calendar.Select\n        disable_quota: true\n",
+      "type": "object",
+      "properties": {
+        "groups": {
+          "description": "List of `QuotaGroup` definitions for the service.",
+          "type": "array",
+          "items": {
+            "$ref": "QuotaGroup"
+          }
+        },
+        "rules": {
+          "description": "List of `QuotaRule` definitions, each one mapping a selected method to one\nor more quota groups.",
+          "type": "array",
+          "items": {
+            "$ref": "QuotaRule"
+          }
+        }
+      }
+    },
+    "QuotaGroup": {
+      "id": "QuotaGroup",
+      "description": "`QuotaGroup` defines a set of quota limits to enforce.",
+      "type": "object",
+      "properties": {
+        "name": {
+          "description": "Name of this quota group. Must be unique within the service.\n\nQuota group name is used as part of the id for quota limits. Once the quota\ngroup has been put into use, the name of the quota group should be\nimmutable.",
+          "type": "string"
+        },
+        "description": {
+          "description": "User-visible description of this quota group.",
+          "type": "string"
+        },
+        "limits": {
+          "description": "Quota limits to be enforced when this quota group is used. A request must\nsatisfy all the limits in a group for it to be permitted.",
+          "type": "array",
+          "items": {
+            "$ref": "QuotaLimit"
+          }
+        },
+        "billable": {
+          "description": "Indicates if the quota limits defined in this quota group apply to\nconsumers who have active billing. Quota limits defined in billable\ngroups will be applied only to consumers who have active billing. The\namount of tokens consumed from billable quota group will also be reported\nfor billing. Quota limits defined in non-billable groups will be applied\nonly to consumers who have no active billing.",
+          "type": "boolean"
+        }
+      }
+    },
+    "QuotaLimit": {
+      "id": "QuotaLimit",
+      "description": "`QuotaLimit` defines a specific limit that applies over a specified duration\nfor a limit type. There can be at most one limit for a duration and limit\ntype combination defined within a `QuotaGroup`.",
+      "type": "object",
+      "properties": {
+        "name": {
+          "description": "Name of the quota limit.  Must be unique within the quota group.\nThis name is used to refer to the limit when overriding the limit on\na per-project basis.  If a name is not provided, it will be generated\nfrom the limit_by and duration fields.\n\nThe maximum length of the limit name is 64 characters.\n\nThe name of a limit is used as a unique identifier for this limit.\nTherefore, once a limit has been put into use, its name should be\nimmutable. You can use the display_name field to provide a user-friendly\nname for the limit. The display name can be evolved over time without\naffecting the identity of the limit.\n",
+          "type": "string"
+        },
+        "limitBy": {
+          "description": "Limit type to use for enforcing this quota limit. Each unique value gets\nthe defined number of tokens to consume from. For a quota limit that uses\nuser type, each user making requests through the same client application\nproject will get his\/her own pool of tokens to consume, whereas for a limit\nthat uses client project type, all users making requests through the same\nclient application project share a single pool of tokens.",
+          "enumDescriptions": [
+            "ID of the project owned by the client application developer making the\nrequest.",
+            "ID of the end user making the request using the client application."
+          ],
+          "type": "string",
+          "enum": [
+            "CLIENT_PROJECT",
+            "USER"
+          ]
+        },
+        "description": {
+          "description": "Optional. User-visible, extended description for this quota limit.\nShould be used only when more context is needed to understand this limit\nthan provided by the limit's display name (see: `display_name`).",
+          "type": "string"
+        },
+        "defaultLimit": {
+          "description": "Default number of tokens that can be consumed during the specified\nduration. This is the number of tokens assigned when a client\napplication developer activates the service for his\/her project.\n\nSpecifying a value of 0 will block all requests. This can be used if you\nare provisioning quota to selected consumers and blocking others.\nSimilarly, a value of -1 will indicate an unlimited quota. No other\nnegative values are allowed.",
+          "type": "string",
+          "format": "int64"
+        },
+        "maxLimit": {
+          "description": "Maximum number of tokens that can be consumed during the specified\nduration. Client application developers can override the default limit up\nto this maximum. If specified, this value cannot be set to a value less\nthan the default limit. If not specified, it is set to the default limit.\n\nTo allow clients to apply overrides with no upper bound, set this to -1,\nindicating unlimited maximum quota.",
+          "type": "string",
+          "format": "int64"
+        },
+        "freeTier": {
+          "description": "Free tier value displayed in the Developers Console for this limit.\nThe free tier is the number of tokens that will be subtracted from the\nbilled amount when billing is enabled.\nThis field can only be set on a limit with duration \"1d\", in a billable\ngroup; it is invalid on any other limit. If this field is not set, it\ndefaults to 0, indicating that there is no free tier for this service.",
+          "type": "string",
+          "format": "int64"
+        },
+        "duration": {
+          "description": "Duration of this limit in textual notation. Example: \"100s\", \"24h\", \"1d\".\nFor duration longer than a day, only multiple of days is supported. We\nsupport only \"100s\" and \"1d\" for now. Additional support will be added in\nthe future. \"0\" indicates indefinite duration.",
+          "type": "string"
+        },
+        "displayName": {
+          "description": "User-visible display name for this limit.\nOptional. If not set, the UI will provide a default display name based on\nthe quota configuration. This field can be used to override the default\ndisplay name generated from the configuration.",
+          "type": "string"
+        }
+      }
+    },
+    "QuotaRule": {
+      "id": "QuotaRule",
+      "description": "`QuotaRule` maps a method to a set of `QuotaGroup`s.",
+      "type": "object",
+      "properties": {
+        "selector": {
+          "description": "Selects methods to which this rule applies.\n\nRefer to selector for syntax details.",
+          "type": "string"
+        },
+        "groups": {
+          "description": "Quota groups to be used for this method. This supports associating a cost\nwith each quota group.",
+          "type": "array",
+          "items": {
+            "$ref": "QuotaGroupMapping"
+          }
+        },
+        "disableQuota": {
+          "description": "Indicates if quota checking should be enforced. Quota will be disabled for\nmethods without quota rules or with quota rules having this field set to\ntrue. When this field is set to true, no quota group mapping is allowed.",
+          "type": "boolean"
+        }
+      }
+    },
+    "QuotaGroupMapping": {
+      "id": "QuotaGroupMapping",
+      "description": "A quota group mapping.",
+      "type": "object",
+      "properties": {
+        "group": {
+          "description": "The `QuotaGroup.name` of the group. Requests for the mapped methods will\nconsume tokens from each of the limits defined in this group.",
+          "type": "string"
+        },
+        "cost": {
+          "description": "Number of tokens to consume for each request. This allows different cost\nto be associated with different methods that consume from the same quota\ngroup. By default, each request will cost one token.",
+          "type": "integer",
+          "format": "int32"
+        }
+      }
+    },
+    "Authentication": {
+      "id": "Authentication",
+      "description": "`Authentication` defines the authentication configuration for an API.\n\nExample for an API targeted for external use:\n\n    name: calendar.googleapis.com\n    authentication:\n      rules:\n      - selector: \"*\"\n        oauth:\n          canonical_scopes: https:\/\/www.googleapis.com\/auth\/calendar\n\n      - selector: google.calendar.Delegate\n        oauth:\n          canonical_scopes: https:\/\/www.googleapis.com\/auth\/calendar.read",
+      "type": "object",
+      "properties": {
+        "rules": {
+          "description": "Individual rules for authentication.",
+          "type": "array",
+          "items": {
+            "$ref": "AuthenticationRule"
+          }
+        },
+        "providers": {
+          "description": "Defines a set of authentication providers that a service supports.",
+          "type": "array",
+          "items": {
+            "$ref": "AuthProvider"
+          }
+        }
+      }
+    },
+    "AuthenticationRule": {
+      "id": "AuthenticationRule",
+      "description": "Authentication rules for the service.\n\nBy default, if a method has any authentication requirements, every request\nmust include a valid credential matching one of the requirements.\nIt's an error to include more than one kind of credential in a single\nrequest.\n\nIf a method doesn't have any auth requirements, request credentials will be\nignored.\n",
+      "type": "object",
+      "properties": {
+        "selector": {
+          "description": "Selects the methods to which this rule applies.\n\nRefer to selector for syntax details.",
+          "type": "string"
+        },
+        "oauth": {
+          "description": "The requirements for OAuth credentials.",
+          "$ref": "OAuthRequirements"
+        },
+        "allowWithoutCredential": {
+          "description": "Whether to allow requests without a credential.  If quota is enabled, an\nAPI key is required for such request to pass the quota check.\n",
+          "type": "boolean"
+        },
+        "requirements": {
+          "description": "Requirements for additional authentication providers.",
+          "type": "array",
+          "items": {
+            "$ref": "AuthRequirement"
+          }
+        }
+      }
+    },
+    "OAuthRequirements": {
+      "id": "OAuthRequirements",
+      "description": "OAuth scopes are a way to define data and permissions on data. For example,\nthere are scopes defined for \"Read-only access to Google Calendar\" and\n\"Access to Cloud Platform\". Users can consent to a scope for an application,\ngiving it permission to access that data on their behalf.\n\nOAuth scope specifications should be fairly coarse grained; a user will need\nto see and understand the text description of what your scope means.\n\nIn most cases: use one or at most two OAuth scopes for an entire family of\nproducts. If your product has multiple APIs, you should probably be sharing\nthe OAuth scope across all of those APIs.\n\nWhen you need finer grained OAuth consent screens: talk with your product\nmanagement about how developers will use them in practice.\n\nPlease note that even though each of the canonical scopes is enough for a\nrequest to be accepted and passed to the backend, a request can still fail\ndue to the backend requiring additional scopes or permissions.\n",
+      "type": "object",
+      "properties": {
+        "canonicalScopes": {
+          "description": "The list of publicly documented OAuth scopes that are allowed access. An\nOAuth token containing any of these scopes will be accepted.\n\nExample:\n\n     canonical_scopes: https:\/\/www.googleapis.com\/auth\/calendar,\n                       https:\/\/www.googleapis.com\/auth\/calendar.read",
+          "type": "string"
+        }
+      }
+    },
+    "AuthRequirement": {
+      "id": "AuthRequirement",
+      "description": "User-defined authentication requirements, including support for\n[JSON Web Token (JWT)](https:\/\/tools.ietf.org\/html\/draft-ietf-oauth-json-web-token-32).",
+      "type": "object",
+      "properties": {
+        "providerId": {
+          "description": "id from authentication provider.\n\nExample:\n\n    provider_id: bookstore_auth",
+          "type": "string"
+        },
+        "audiences": {
+          "description": "The list of JWT\n[audiences](https:\/\/tools.ietf.org\/html\/draft-ietf-oauth-json-web-token-32#section-4.1.3).\nthat are allowed to access. A JWT containing any of these audiences will\nbe accepted. When this setting is absent, only JWTs with audience\n\"https:\/\/Service_name\/API_name\"\nwill be accepted. For example, if no audiences are in the setting,\nLibraryService API will only accept JWTs with the following audience\n\"https:\/\/library-example.googleapis.com\/google.example.library.v1.LibraryService\".\n\nExample:\n\n    audiences: bookstore_android.apps.googleusercontent.com,\n               bookstore_web.apps.googleusercontent.com",
+          "type": "string"
+        }
+      }
+    },
+    "AuthProvider": {
+      "id": "AuthProvider",
+      "description": "Configuration for an anthentication provider, including support for\n[JSON Web Token (JWT)](https:\/\/tools.ietf.org\/html\/draft-ietf-oauth-json-web-token-32).",
+      "type": "object",
+      "properties": {
+        "id": {
+          "description": "The unique identifier of the auth provider. It will be referred to by\n`AuthRequirement.provider_id`.\n\nExample: \"bookstore_auth\".",
+          "type": "string"
+        },
+        "issuer": {
+          "description": "Identifies the principal that issued the JWT. See\nhttps:\/\/tools.ietf.org\/html\/draft-ietf-oauth-json-web-token-32#section-4.1.1\nUsually a URL or an email address.\n\nExample: https:\/\/securetoken.google.com\nExample: 1234567-compute@developer.gserviceaccount.com",
+          "type": "string"
+        },
+        "jwksUri": {
+          "description": "URL of the provider's public key set to validate signature of the JWT. See\n[OpenID Discovery](https:\/\/openid.net\/specs\/openid-connect-discovery-1_0.html#ProviderMetadata).\nOptional if the key set document:\n - can be retrieved from\n   [OpenID Discovery](https:\/\/openid.net\/specs\/openid-connect-discovery-1_0.html\n   of the issuer.\n - can be inferred from the email domain of the issuer (e.g. a Google service account).\n\nExample: https:\/\/www.googleapis.com\/oauth2\/v1\/certs",
+          "type": "string"
+        }
+      }
+    },
+    "Context": {
+      "id": "Context",
+      "description": "`Context` defines which contexts an API requests.\n\nExample:\n\n    context:\n      rules:\n      - selector: \"*\"\n        requested:\n        - google.rpc.context.ProjectContext\n        - google.rpc.context.OriginContext\n\nThe above specifies that all methods in the API request\n`google.rpc.context.ProjectContext` and\n`google.rpc.context.OriginContext`.\n\nAvailable context types are defined in package\n`google.rpc.context`.",
+      "type": "object",
+      "properties": {
+        "rules": {
+          "description": "List of rules for context, applicable to methods.",
+          "type": "array",
+          "items": {
+            "$ref": "ContextRule"
+          }
+        }
+      }
+    },
+    "ContextRule": {
+      "id": "ContextRule",
+      "description": "A context rule provides information about the context for an individual API\nelement.",
+      "type": "object",
+      "properties": {
+        "selector": {
+          "description": "Selects the methods to which this rule applies.\n\nRefer to selector for syntax details.",
+          "type": "string"
+        },
+        "requested": {
+          "description": "A list of full type names of requested contexts.",
+          "type": "array",
+          "items": {
+            "type": "string"
+          }
+        },
+        "provided": {
+          "description": "A list of full type names of provided contexts.",
+          "type": "array",
+          "items": {
+            "type": "string"
+          }
+        }
+      }
+    },
+    "Usage": {
+      "id": "Usage",
+      "description": "Configuration controlling usage of a service.",
+      "type": "object",
+      "properties": {
+        "serviceAccess": {
+          "description": "Controls which users can see or activate the service.",
+          "enumDescriptions": [
+            "The service can only be seen\/used by users identified in the service's\naccess control policy.\n\nIf the service has not been whitelisted by your domain administrator\nfor out-of-org publishing, then this mode will be treated like\nORG_RESTRICTED.",
+            "The service can be seen\/used by anyone.\n\nIf the service has not been whitelisted by your domain administrator\nfor out-of-org publishing, then this mode will be treated like\nORG_PUBLIC.\n\nThe discovery document for the service will also be public and allow\nunregistered access.",
+            "The service can be seen\/used by users identified in the service's\naccess control policy and they are within the organization that owns the\nservice.\n\nAccess is further constrained to the group\ncontrolled by the administrator of the project\/org that owns the\nservice.",
+            "The service can be seen\/used by the group of users controlled by the\nadministrator of the project\/org that owns the service."
+          ],
+          "type": "string",
+          "enum": [
+            "RESTRICTED",
+            "PUBLIC",
+            "ORG_RESTRICTED",
+            "ORG_PUBLIC"
+          ]
+        },
+        "requirements": {
+          "description": "Requirements that must be satisfied before a consumer project can use the\nservice. Each requirement is of the form <service.name>\/<requirement-id>;\nfor example 'serviceusage.googleapis.com\/billing-enabled'.",
+          "type": "array",
+          "items": {
+            "type": "string"
+          }
+        },
+        "dependsOnServices": {
+          "description": "Services that must be activated in order for this service to be used.\nThe set of services activated as a result of these relations are all\nactivated in parallel with no guaranteed order of activation.\nEach string is a service name, e.g. `calendar.googleapis.com`.",
+          "type": "array",
+          "items": {
+            "type": "string"
+          }
+        },
+        "activationHooks": {
+          "description": "Services that must be contacted before a consumer can begin using the\nservice. Each service will be contacted in sequence, and, if any activation\ncall fails, the entire activation will fail. Each hook is of the form\n<service.name>\/<hook-id>, where <hook-id> is optional; for example:\n'robotservice.googleapis.com\/default'.",
+          "type": "array",
+          "items": {
+            "type": "string"
+          }
+        },
+        "deactivationHooks": {
+          "description": "Services that must be contacted before a consumer can deactivate a\nservice. Each service will be contacted in sequence, and, if any\ndeactivation call fails, the entire deactivation will fail. Each hook is\nof the form <service.name>\/<hook-id>, where <hook-id> is optional; for\nexample:\n'compute.googleapis.com\/'.",
+          "type": "array",
+          "items": {
+            "type": "string"
+          }
+        },
+        "rules": {
+          "description": "Individual rules for configuring usage on selected methods.",
+          "type": "array",
+          "items": {
+            "$ref": "UsageRule"
+          }
+        }
+      }
+    },
+    "UsageRule": {
+      "id": "UsageRule",
+      "description": "Usage configuration rules for the service.\n\nNOTE: Under development.\n\n\nUse this rule to configure unregistered calls for the service. Unregistered\ncalls are calls that do not contain consumer project identity.\n(Example: calls that do not contain an API key).\nBy default, API methods do not allow unregistered calls, and each method call\nmust be identified by a consumer project identity. Use this rule to\nallow\/disallow unregistered calls.\n\nExample of an API that wants to allow unregistered calls for entire service.\n\n    usage:\n      rules:\n      - selector: \"*\"\n        allow_unregistered_calls: true\n\nExample of a method that wants to allow unregistered calls.\n\n    usage:\n      rules:\n      - selector: \"google.example.library.v1.LibraryService.CreateBook\"\n        allow_unregistered_calls: true",
+      "type": "object",
+      "properties": {
+        "selector": {
+          "description": "Selects the methods to which this rule applies. Use '*' to indicate all\nmethods in all APIs.\n\nRefer to selector for syntax details.",
+          "type": "string"
+        },
+        "allowUnregisteredCalls": {
+          "description": "True, if the method allows unregistered calls; false otherwise.",
+          "type": "boolean"
+        }
+      }
+    },
+    "CustomError": {
+      "id": "CustomError",
+      "description": "Customize service error responses.  For example, list any service\nspecific protobuf types that can appear in error detail lists of\nerror responses.\n\nExample:\n\n    custom_error:\n      types:\n      - google.foo.v1.CustomError\n      - google.foo.v1.AnotherError\n",
+      "type": "object",
+      "properties": {
+        "rules": {
+          "description": "The list of custom error rules to select to which messages this should\napply.",
+          "type": "array",
+          "items": {
+            "$ref": "CustomErrorRule"
+          }
+        },
+        "types": {
+          "description": "The list of custom error detail types, e.g. 'google.foo.v1.CustomError'.",
+          "type": "array",
+          "items": {
+            "type": "string"
+          }
+        }
+      }
+    },
+    "CustomErrorRule": {
+      "id": "CustomErrorRule",
+      "description": "A custom error rule.",
+      "type": "object",
+      "properties": {
+        "selector": {
+          "description": "Selects messages to which this rule applies.\n\nRefer to selector for syntax details.",
+          "type": "string"
+        },
+        "isErrorType": {
+          "description": "Mark this message as possible payload in error response.  Otherwise,\nobjects of this type will be filtered when they appear in error payload.",
+          "type": "boolean"
+        }
+      }
+    },
+    "ProjectProperties": {
+      "id": "ProjectProperties",
+      "description": "A descriptor for defining project properties for a service. One service may\nhave many consumer projects, and the service may want to behave differently\ndepending on some properties on the project. For example, a project may be\nassociated with a school, or a business, or a government agency, a business\ntype property on the project may affect how a service responds to the client.\nThis descriptor defines which properties are allowed to be set on a project.\n\nExample:\n\n   project_properties:\n     properties:\n     - name: NO_WATERMARK\n       type: BOOL\n       description: Allows usage of the API without watermarks.\n     - name: EXTENDED_TILE_CACHE_PERIOD\n       type: INT64",
+      "type": "object",
+      "properties": {
+        "properties": {
+          "description": "List of per consumer project-specific properties.",
+          "type": "array",
+          "items": {
+            "$ref": "Property"
+          }
+        }
+      }
+    },
+    "Property": {
+      "id": "Property",
+      "description": "Defines project properties.\n\nAPI services can define properties that can be assigned to consumer projects\nso that backends can perform response customization without having to make\nadditional calls or maintain additional storage. For example, Maps API\ndefines properties that controls map tile cache period, or whether to embed a\nwatermark in a result.\n\nThese values can be set via API producer console. Only API providers can\ndefine and set these properties.",
+      "type": "object",
+      "properties": {
+        "name": {
+          "description": "The name of the property (a.k.a key).",
+          "type": "string"
+        },
+        "type": {
+          "description": "The type of this property.",
+          "enumDescriptions": [
+            "The type is unspecified, and will result in an error.",
+            "The type is `int64`.",
+            "The type is `bool`.",
+            "The type is `string`.",
+            "The type is 'double'."
+          ],
+          "type": "string",
+          "enum": [
+            "UNSPECIFIED",
+            "INT64",
+            "BOOL",
+            "STRING",
+            "DOUBLE"
+          ]
+        },
+        "description": {
+          "description": "The description of the property",
+          "type": "string"
+        }
+      }
+    },
+    "Control": {
+      "id": "Control",
+      "description": "Selects and configures the service controller used by the service.  The\nservice controller handles features like abuse, quota, billing, logging,\nmonitoring, etc.\n",
+      "type": "object",
+      "properties": {
+        "environment": {
+          "description": "The service control environment to use. If empty, no control plane\nfeature (like quota and billing) will be enabled.",
+          "type": "string"
+        }
+      }
+    },
+    "LogDescriptor": {
+      "id": "LogDescriptor",
+      "description": "A description of a log type. Example in YAML format:\n\n    - name: library.googleapis.com\/activity_history\n      description: The history of borrowing and returning library items.\n      display_name: Activity\n      labels:\n      - key: \/customer_id\n        description: Identifier of a library customer",
+      "type": "object",
+      "properties": {
+        "name": {
+          "description": "The name of the log. It must be less than 512 characters long and can\ninclude the following characters: upper- and lower-case alphanumeric\ncharacters [A-Za-z0-9], and punctuation characters including\nslash, underscore, hyphen, period [\/_-.].",
+          "type": "string"
+        },
+        "labels": {
+          "description": "The set of labels that are available to describe a specific log entry.\nRuntime requests that contain labels not specified here are\nconsidered invalid.",
+          "type": "array",
+          "items": {
+            "$ref": "LabelDescriptor"
+          }
+        },
+        "description": {
+          "description": "A human-readable description of this log. This information appears in\nthe documentation and can contain details.",
+          "type": "string"
+        },
+        "displayName": {
+          "description": "The human-readable name for this log. This information appears on\nthe user interface and should be concise.",
+          "type": "string"
+        }
+      }
+    },
+    "LabelDescriptor": {
+      "id": "LabelDescriptor",
+      "description": "A description of a label.",
+      "type": "object",
+      "properties": {
+        "key": {
+          "description": "The label key.",
+          "type": "string"
+        },
+        "valueType": {
+          "description": "The type of data that can be assigned to the label.",
+          "enumDescriptions": [
+            "A variable-length string. This is the default.",
+            "Boolean; true or false.",
+            "A 64-bit signed integer."
+          ],
+          "type": "string",
+          "enum": [
+            "STRING",
+            "BOOL",
+            "INT64"
+          ]
+        },
+        "description": {
+          "description": "A human-readable description for the label.",
+          "type": "string"
+        }
+      }
+    },
+    "MetricDescriptor": {
+      "id": "MetricDescriptor",
+      "description": "Defines a metric type and its schema.",
+      "type": "object",
+      "properties": {
+        "name": {
+          "description": "Resource name. The format of the name may vary between different\nimplementations. For examples:\n\n    projects\/{project_id}\/metricDescriptors\/{type=**}\n    metricDescriptors\/{type=**}",
+          "type": "string"
+        },
+        "type": {
+          "description": "The metric type including a DNS name prefix, for example\n`\"compute.googleapis.com\/instance\/cpu\/utilization\"`. Metric types\nshould use a natural hierarchical grouping such as the following:\n\n    compute.googleapis.com\/instance\/cpu\/utilization\n    compute.googleapis.com\/instance\/disk\/read_ops_count\n    compute.googleapis.com\/instance\/network\/received_bytes_count\n\nNote that if the metric type changes, the monitoring data will be\ndiscontinued, and anything depends on it will break, such as monitoring\ndashboards, alerting rules and quota limits. Therefore, once a metric has\nbeen published, its type should be immutable.",
+          "type": "string"
+        },
+        "labels": {
+          "description": "The set of labels that can be used to describe a specific instance of this\nmetric type. For example, the\n`compute.googleapis.com\/instance\/network\/received_bytes_count` metric type\nhas a label, `loadbalanced`, that specifies whether the traffic was\nreceived through a load balanced IP address.",
+          "type": "array",
+          "items": {
+            "$ref": "LabelDescriptor"
+          }
+        },
+        "metricKind": {
+          "description": "Whether the metric records instantaneous values, changes to a value, etc.",
+          "enumDescriptions": [
+            "Do not use this default value.",
+            "Instantaneous measurements of a varying quantity.",
+            "Changes over non-overlapping time intervals.",
+            "Cumulative value over time intervals that can overlap.\nThe overlapping intervals must have the same start time."
+          ],
+          "type": "string",
+          "enum": [
+            "METRIC_KIND_UNSPECIFIED",
+            "GAUGE",
+            "DELTA",
+            "CUMULATIVE"
+          ]
+        },
+        "valueType": {
+          "description": "Whether the measurement is an integer, a floating-point number, etc.",
+          "enumDescriptions": [
+            "Do not use this default value.",
+            "The value is a boolean.\nThis value type can be used only if the metric kind is `GAUGE`.",
+            "The value is a signed 64-bit integer.",
+            "The value is a double precision floating point number.",
+            "The value is a text string.\nThis value type can be used only if the metric kind is `GAUGE`.",
+            "The value is a `Distribution`.",
+            "The value is money."
+          ],
+          "type": "string",
+          "enum": [
+            "VALUE_TYPE_UNSPECIFIED",
+            "BOOL",
+            "INT64",
+            "DOUBLE",
+            "STRING",
+            "DISTRIBUTION",
+            "MONEY"
+          ]
+        },
+        "unit": {
+          "description": "The unit in which the metric value is reported. It is only applicable\nif the `value_type` is `INT64`, `DOUBLE`, or `DISTRIBUTION`. The\nsupported units are a subset of [The Unified Code for Units of\nMeasure](http:\/\/unitsofmeasure.org\/ucum.html) standard:\n\n**Basic units (UNIT)**\n\n* `bit`   bit\n* `By`    byte\n* `s`     second\n* `min`   minute\n* `h`     hour\n* `d`     day\n\n**Prefixes (PREFIX)**\n\n* `k`     kilo    (10**3)\n* `M`     mega    (10**6)\n* `G`     giga    (10**9)\n* `T`     tera    (10**12)\n* `P`     peta    (10**15)\n* `E`     exa     (10**18)\n* `Z`     zetta   (10**21)\n* `Y`     yotta   (10**24)\n* `m`     milli   (10**-3)\n* `u`     micro   (10**-6)\n* `n`     nano    (10**-9)\n* `p`     pico    (10**-12)\n* `f`     femto   (10**-15)\n* `a`     atto    (10**-18)\n* `z`     zepto   (10**-21)\n* `y`     yocto   (10**-24)\n* `Ki`    kibi    (2**10)\n* `Mi`    mebi    (2**20)\n* `Gi`    gibi    (2**30)\n* `Ti`    tebi    (2**40)\n\n**Grammar**\n\nThe grammar includes the dimensionless unit `1`, such as `1\/s`.\n\nThe grammar also includes these connectors:\n\n* `\/`    division (as an infix operator, e.g. `1\/s`).\n* `.`    multiplication (as an infix operator, e.g. `GBy.d`)\n\nThe grammar for a unit is as follows:\n\n    Expression = Component { \".\" Component } { \"\/\" Component } ;\n\n    Component = [ PREFIX ] UNIT [ Annotation ]\n              | Annotation\n              | \"1\"\n              ;\n\n    Annotation = \"{\" NAME \"}\" ;\n\nNotes:\n\n* `Annotation` is just a comment if it follows a `UNIT` and is\n   equivalent to `1` if it is used alone. For examples,\n   `{requests}\/s == 1\/s`, `By{transmitted}\/s == By\/s`.\n* `NAME` is a sequence of non-blank printable ASCII characters not\n   containing '{' or '}'.",
+          "type": "string"
+        },
+        "description": {
+          "description": "A detailed description of the metric, which can be used in documentation.",
+          "type": "string"
+        },
+        "displayName": {
+          "description": "A concise name for the metric, which can be displayed in user interfaces.\nUse sentence case without an ending period, for example \"Request count\".",
+          "type": "string"
+        }
+      }
+    },
+    "MonitoredResourceDescriptor": {
+      "id": "MonitoredResourceDescriptor",
+      "description": "An object that describes the schema of a MonitoredResource object using a\ntype name and a set of labels.  For example, the monitored resource\ndescriptor for Google Compute Engine VM instances has a type of\n`\"gce_instance\"` and specifies the use of the labels `\"instance_id\"` and\n`\"zone\"` to identify particular VM instances.\n\nDifferent APIs can support different monitored resource types. APIs generally\nprovide a `list` method that returns the monitored resource descriptors used\nby the API.",
+      "type": "object",
+      "properties": {
+        "name": {
+          "description": "Optional. The resource name of the monitored resource descriptor:\n`\"projects\/{project_id}\/monitoredResourceDescriptors\/{type}\"` where\n{type} is the value of the `type` field in this object and\n{project_id} is a project ID that provides API-specific context for\naccessing the type.  APIs that do not use project information can use the\nresource name format `\"monitoredResourceDescriptors\/{type}\"`.",
+          "type": "string"
+        },
+        "type": {
+          "description": "Required. The monitored resource type. For example, the type\n`\"cloudsql_database\"` represents databases in Google Cloud SQL.\nThe maximum length of this value is 256 characters.",
+          "type": "string"
+        },
+        "displayName": {
+          "description": "Optional. A concise name for the monitored resource type that might be\ndisplayed in user interfaces. For example, `\"Google Cloud SQL Database\"`.",
+          "type": "string"
+        },
+        "description": {
+          "description": "Optional. A detailed description of the monitored resource type that might\nbe used in documentation.",
+          "type": "string"
+        },
+        "labels": {
+          "description": "Required. A set of labels used to describe instances of this monitored\nresource type. For example, an individual Google Cloud SQL database is\nidentified by values for the labels `\"database_id\"` and `\"zone\"`.",
+          "type": "array",
+          "items": {
+            "$ref": "LabelDescriptor"
+          }
+        }
+      }
+    },
+    "Billing": {
+      "id": "Billing",
+      "description": "Billing related configuration of the service.\n\nThe following example shows how to configure metrics for billing:\n\n    metrics:\n    - name: library.googleapis.com\/read_calls\n      metric_kind: DELTA\n      value_type: INT64\n    - name: library.googleapis.com\/write_calls\n      metric_kind: DELTA\n      value_type: INT64\n    billing:\n      metrics:\n      - library.googleapis.com\/read_calls\n      - library.googleapis.com\/write_calls\n\nThe next example shows how to enable billing status check and customize the\ncheck behavior. It makes sure billing status check is included in the `Check`\nmethod of [Service Control API](https:\/\/cloud.google.com\/service-control\/).\nIn the example, \"google.storage.Get\" method can be served when the billing\nstatus is either `current` or `delinquent`, while \"google.storage.Write\"\nmethod can only be served when the billing status is `current`:\n\n    billing:\n      rules:\n      - selector: google.storage.Get\n        allowed_statuses:\n        - current\n        - delinquent\n      - selector: google.storage.Write\n        allowed_statuses: current\n\nMostly services should only allow `current` status when serving requests.\nIn addition, services can choose to allow both `current` and `delinquent`\nstatuses when serving read-only requests to resources. If there's no\nmatching selector for operation, no billing status check will be performed.\n",
+      "type": "object",
+      "properties": {
+        "metrics": {
+          "description": "Names of the metrics to report to billing. Each name must\nbe defined in Service.metrics section.",
+          "type": "array",
+          "items": {
+            "type": "string"
+          }
+        },
+        "rules": {
+          "description": "A list of billing status rules for configuring billing status check.",
+          "type": "array",
+          "items": {
+            "$ref": "BillingStatusRule"
+          }
+        },
+        "areaUnderCurveParams": {
+          "description": "Per resource grouping for delta billing based resource configs.",
+          "type": "array",
+          "items": {
+            "$ref": "AreaUnderCurveParams"
+          }
+        }
+      }
+    },
+    "BillingStatusRule": {
+      "id": "BillingStatusRule",
+      "description": "Defines the billing status requirements for operations.\n\nWhen used with\n[Service Control API](https:\/\/cloud.google.com\/service-control\/), the\nfollowing statuses are supported:\n\n- **current**: the associated billing account is up to date and capable of\n               paying for resource usages.\n- **delinquent**: the associated billing account has a correctable problem,\n                  such as late payment.\n\nMostly services should only allow `current` status when serving requests.\nIn addition, services can choose to allow both `current` and `delinquent`\nstatuses when serving read-only requests to resources. If the list of\nallowed_statuses is empty, it means no billing requirement.\n",
+      "type": "object",
+      "properties": {
+        "selector": {
+          "description": "Selects the operation names to which this rule applies.\nRefer to selector for syntax details.",
+          "type": "string"
+        },
+        "allowedStatuses": {
+          "description": "Allowed billing statuses. The billing status check passes if the actual\nbilling status matches any of the provided values here.",
+          "type": "array",
+          "items": {
+            "type": "string"
+          }
+        }
+      }
+    },
+    "AreaUnderCurveParams": {
+      "id": "AreaUnderCurveParams",
+      "description": "AreaUnderCurveParams groups the metrics relevant to generating\nduration based metric from base (snapshot) metric and delta\n(change) metric.  The generated metric has two dimensions:\n   resource usage metric and the duration the metric applies.\n\nEssentially the generated metric is the Area Under Curve(AUC) of\nthe \"duration - resource\" usage curve. This AUC metric is readily\nappliable to billing since \"billable resource usage\" depends on\nresource usage and duration of the resource used.\n\nA service config may contain multiple resources and corresponding\nmetrics. AreaUnderCurveParams groups the relevant ones: which\nsnapshot_metric and change_metric are used to produce which\ngenerated_metric.\n",
+      "type": "object",
+      "properties": {
+        "snapshotMetric": {
+          "description": "Total usage of a resource at a particular timestamp. This should be\na GAUGE metric.",
+          "type": "string"
+        },
+        "changeMetric": {
+          "description": "Change of resource usage at a particular timestamp. This should a\nDELTA metric.",
+          "type": "string"
+        },
+        "generatedMetric": {
+          "description": "Metric generated from snapshot_metric and change_metric. This\nis also a DELTA metric.",
+          "type": "string"
+        }
+      }
+    },
+    "Logging": {
+      "id": "Logging",
+      "description": "Logging configuration of the service.\n\nThe following example shows how to configure logs to be sent to the\nproducer and consumer projects. In the example,\nthe `library.googleapis.com\/activity_history` log is\nsent to both the producer and consumer projects, whereas\nthe `library.googleapis.com\/purchase_history` log is only sent to the\nproducer project:\n\n    monitored_resources:\n    - type: library.googleapis.com\/branch\n      labels:\n      - key: \/city\n        description: The city where the library branch is located in.\n      - key: \/name\n        description: The name of the branch.\n    logs:\n    - name: library.googleapis.com\/activity_history\n      labels:\n      - key: \/customer_id\n    - name: library.googleapis.com\/purchase_history\n    logging:\n      producer_destinations:\n      - monitored_resource: library.googleapis.com\/branch\n        logs:\n        - library.googleapis.com\/activity_history\n        - library.googleapis.com\/purchase_history\n      consumer_destinations:\n      - monitored_resource: library.googleapis.com\/branch\n        logs:\n        - library.googleapis.com\/activity_history\n",
+      "type": "object",
+      "properties": {
+        "producerDestinations": {
+          "description": "Logging configurations for sending logs to the producer project.\nThere can be multiple producer destinations, each one must have a\ndifferent monitored resource type. A log can be used in at most\none producer destination.",
+          "type": "array",
+          "items": {
+            "$ref": "LoggingDestination"
+          }
+        },
+        "consumerDestinations": {
+          "description": "Logging configurations for sending logs to the consumer project.\nThere can be multiple consumer destinations, each one must have a\ndifferent monitored resource type. A log can be used in at most\none consumer destination.",
+          "type": "array",
+          "items": {
+            "$ref": "LoggingDestination"
+          }
+        }
+      }
+    },
+    "LoggingDestination": {
+      "id": "LoggingDestination",
+      "description": "Configuration of a specific logging destination (the producer project\nor the consumer project).",
+      "type": "object",
+      "properties": {
+        "monitoredResource": {
+          "description": "The monitored resource type. The type must be defined in\nService.monitored_resources section.",
+          "type": "string"
+        },
+        "logs": {
+          "description": "Names of the logs to be sent to this destination. Each name must\nbe defined in the Service.logs section.",
+          "type": "array",
+          "items": {
+            "type": "string"
+          }
+        }
+      }
+    },
+    "Monitoring": {
+      "id": "Monitoring",
+      "description": "Monitoring configuration of the service.\n\nThe example below shows how to configure monitored resources and metrics\nfor monitoring. In the example, a monitored resource and two metrics are\ndefined. The `library.googleapis.com\/book\/returned_count` metric is sent\nto both producer and consumer projects, whereas the\n`library.googleapis.com\/book\/overdue_count` metric is only sent to the\nconsumer project.\n\n    monitored_resources:\n    - type: library.googleapis.com\/branch\n      labels:\n      - key: \/city\n        description: The city where the library branch is located in.\n      - key: \/name\n        description: The name of the branch.\n    metrics:\n    - name: library.googleapis.com\/book\/returned_count\n      metric_kind: DELTA\n      value_type: INT64\n      labels:\n      - key: \/customer_id\n    - name: library.googleapis.com\/book\/overdue_count\n      metric_kind: GAUGE\n      value_type: INT64\n      labels:\n      - key: \/customer_id\n    monitoring:\n      producer_destinations:\n      - monitored_resource: library.googleapis.com\/branch\n        metrics:\n        - library.googleapis.com\/book\/returned_count\n      consumer_destinations:\n      - monitored_resource: library.googleapis.com\/branch\n        metrics:\n        - library.googleapis.com\/book\/returned_count\n        - library.googleapis.com\/book\/overdue_count\n",
+      "type": "object",
+      "properties": {
+        "producerDestinations": {
+          "description": "Monitoring configurations for sending metrics to the producer project.\nThere can be multiple producer destinations, each one must have a\ndifferent monitored resource type. A metric can be used in at most\none producer destination.",
+          "type": "array",
+          "items": {
+            "$ref": "MonitoringDestination"
+          }
+        },
+        "consumerDestinations": {
+          "description": "Monitoring configurations for sending metrics to the consumer project.\nThere can be multiple consumer destinations, each one must have a\ndifferent monitored resource type. A metric can be used in at most\none consumer destination.",
+          "type": "array",
+          "items": {
+            "$ref": "MonitoringDestination"
+          }
+        }
+      }
+    },
+    "MonitoringDestination": {
+      "id": "MonitoringDestination",
+      "description": "Configuration of a specific monitoring destination (the producer project\nor the consumer project).",
+      "type": "object",
+      "properties": {
+        "monitoredResource": {
+          "description": "The monitored resource type. The type must be defined in\nService.monitored_resources section.",
+          "type": "string"
+        },
+        "metrics": {
+          "description": "Names of the metrics to report to this monitoring destination.\nEach name must be defined in Service.metrics section.",
+          "type": "array",
+          "items": {
+            "type": "string"
+          }
+        }
+      }
+    },
+    "SystemParameters": {
+      "id": "SystemParameters",
+      "description": "### System parameter configuration\n\nA system parameter is a special kind of parameter defined by the API\nsystem, not by an individual API. It is typically mapped to an HTTP header\nand\/or a URL query parameter. This configuration specifies which methods\nchange the names of the system parameters.",
+      "type": "object",
+      "properties": {
+        "rules": {
+          "description": "Define system parameters.\n\nThe parameters defined here will override the default parameters\nimplemented by the system. If this field is missing from the service\nconfig, default system parameters will be used. Default system parameters\nand names is implementation-dependent.\n\nExample: define api key and alt name for all methods\n\nsystem_parameters\n  rules:\n    - selector: \"*\"\n      parameters:\n        - name: api_key\n          url_query_parameter: api_key\n        - name: alt\n          http_header: Response-Content-Type\n\nExample: define 2 api key names for a specific method.\n\nsystem_parameters\n  rules:\n    - selector: \"\/ListShelves\"\n      parameters:\n        - name: api_key\n          http_header: Api-Key1\n        - name: api_key\n          http_header: Api-Key2",
+          "type": "array",
+          "items": {
+            "$ref": "SystemParameterRule"
+          }
+        }
+      }
+    },
+    "SystemParameterRule": {
+      "id": "SystemParameterRule",
+      "description": "Define a system parameter rule mapping system parameter definitions to\nmethods.",
+      "type": "object",
+      "properties": {
+        "selector": {
+          "description": "Selects the methods to which this rule applies. Use '*' to indicate all\nmethods in all APIs.\n\nRefer to selector for syntax details.",
+          "type": "string"
+        },
+        "parameters": {
+          "description": "Define parameters. Multiple names may be defined for a parameter.\nFor a given method call, only one of them should be used. If multiple\nnames are used the behavior is implementation-dependent.\nIf none of the specified names are present the behavior is\nparameter-dependent.",
+          "type": "array",
+          "items": {
+            "$ref": "SystemParameter"
+          }
+        }
+      }
+    },
+    "SystemParameter": {
+      "id": "SystemParameter",
+      "description": "Define a parameter's name and location. The parameter may be passed as either\nan HTTP header or a URL query parameter, and if both are passed the behavior\nis implementation-dependent.",
+      "type": "object",
+      "properties": {
+        "name": {
+          "description": "Define the name of the parameter, such as \"api_key\", \"alt\", \"callback\",\nand etc. It is case sensitive.",
+          "type": "string"
+        },
+        "httpHeader": {
+          "description": "Define the HTTP header name to use for the parameter. It is case\ninsensitive.",
+          "type": "string"
+        },
+        "urlQueryParameter": {
+          "description": "Define the URL query parameter name to use for the parameter. It is case\nsensitive.",
+          "type": "string"
+        }
+      }
+    },
+    "ConfigSource": {
+      "id": "ConfigSource",
+      "description": "Represents a user-specified configuration for a service (as opposed to the\nthe generated service config form provided by `google.api.Service`). This is\nmeant to encode service config as manipulated directly by customers,\nrather than the config form resulting from toolchain generation and\nnormalization.",
+      "type": "object",
+      "properties": {
+        "id": {
+          "description": "A unique ID for a specific instance of this message, typically assigned\nby the client for tracking purpose. If empty, the server may choose to\ngenerate one instead.",
+          "type": "string"
+        },
+        "options": {
+          "description": "Options to cover use of source config within ServiceManager and tools",
+          "$ref": "ConfigOptions"
+        },
+        "files": {
+          "description": "Set of source configuration files that are used to generate a service\nconfig (`google.api.Service`).",
+          "type": "array",
+          "items": {
+            "$ref": "ConfigFile"
+          }
+        },
+        "openApiSpec": {
+          "description": "OpenAPI specification",
+          "$ref": "OpenApiSpec"
+        },
+        "protoSpec": {
+          "description": "Protocol buffer API specification",
+          "$ref": "ProtoSpec"
+        }
+      }
+    },
+    "ConfigOptions": {
+      "id": "ConfigOptions",
+      "description": "A set of options to cover use of source config within `ServiceManager`\nand related tools.",
+      "type": "object",
+      "properties": {
+      }
+    },
+    "ConfigFile": {
+      "id": "ConfigFile",
+      "description": "Generic specification of a source configuration file",
+      "type": "object",
+      "properties": {
+        "filePath": {
+          "description": "The file name of the configuration file (full or relative path).",
+          "type": "string"
+        },
+        "contents": {
+          "description": "DEPRECATED. The contents of the configuration file. Use file_contents\nmoving forward.",
+          "type": "string"
+        },
+        "fileContents": {
+          "description": "The bytes that constitute the file.",
+          "type": "string",
+          "format": "byte"
+        },
+        "fileType": {
+          "description": "The kind of configuration file represented. This is used to determine\nthe method for generating `google.api.Service` using this file.",
+          "enumDescriptions": [
+            "Unknown file type.",
+            "YAML-specification of service.",
+            "OpenAPI specification, serialized in JSON.",
+            "OpenAPI specification, serialized in YAML.",
+            "FileDescriptorSet, generated by protoc.\n\nTo generate, use protoc with imports and source info included.\nFor an example test.proto file, the following command would put the value\nin a new file named out.pb.\n\n$protoc --include_imports --include_source_info test.proto -o out.pb"
+          ],
+          "type": "string",
+          "enum": [
+            "FILE_TYPE_UNSPECIFIED",
+            "SERVICE_CONFIG_YAML",
+            "OPEN_API_JSON",
+            "OPEN_API_YAML",
+            "FILE_DESCRIPTOR_SET_PROTO"
+          ]
+        }
+      }
+    },
+    "OpenApiSpec": {
+      "id": "OpenApiSpec",
+      "description": "A collection of OpenAPI specification files.",
+      "type": "object",
+      "properties": {
+        "openApiFiles": {
+          "description": "Individual files.",
+          "type": "array",
+          "items": {
+            "$ref": "ConfigFile"
+          }
+        }
+      }
+    },
+    "ProtoSpec": {
+      "id": "ProtoSpec",
+      "description": "A collection of protocol buffer service specification files.",
+      "type": "object",
+      "properties": {
+        "protoDescriptor": {
+          "description": "A complete descriptor of a protocol buffer specification",
+          "$ref": "ProtoDescriptor"
+        }
+      }
+    },
+    "ProtoDescriptor": {
+      "id": "ProtoDescriptor",
+      "description": "Contains a serialized protoc-generated protocol buffer message descriptor set\nalong with a URL that describes the type of the descriptor message.",
+      "type": "object",
+      "properties": {
+        "typeUrl": {
+          "description": "A URL\/resource name whose content describes the type of the\nserialized protocol buffer message.\n\nOnly 'type.googleapis.com\/google.protobuf.FileDescriptorSet' is supported.\nIf the type_url is not specificed,\n'type.googleapis.com\/google.protobuf.FileDescriptorSet' will be assumed.\n",
+          "type": "string"
+        },
+        "value": {
+          "description": "Must be a valid serialized protocol buffer descriptor set.\n\nTo generate, use protoc with imports and source info included.\nFor an example test.proto file, the following command would put the value\nin a new file named descriptor.pb.\n\n$protoc --include_imports --include_source_info test.proto -o descriptor.pb",
+          "type": "string",
+          "format": "byte"
+        }
+      }
+    },
+    "Operation": {
+      "id": "Operation",
+      "description": "This resource represents a long-running operation that is the result of a\nnetwork API call.",
+      "type": "object",
+      "properties": {
+        "name": {
+          "description": "The server-assigned name, which is only unique within the same service that\noriginally returns it. If you use the default HTTP mapping, the\n`name` should have the format of `operations\/some\/unique\/name`.",
+          "type": "string"
+        },
+        "metadata": {
+          "description": "Service-specific metadata associated with the operation.  It typically\ncontains progress information and common metadata such as create time.\nSome services might not provide such metadata.  Any method that returns a\nlong-running operation should document the metadata type, if any.",
+          "type": "object",
+          "additionalProperties": {
+            "type": "any",
+            "description": "Properties of the object. Contains field @type with type URL."
+          }
+        },
+        "done": {
+          "description": "If the value is `false`, it means the operation is still in progress.\nIf true, the operation is completed, and either `error` or `response` is\navailable.",
+          "type": "boolean"
+        },
+        "error": {
+          "description": "The error result of the operation in case of failure.",
+          "$ref": "Status"
+        },
+        "response": {
+          "description": "The normal response of the operation in case of success.  If the original\nmethod returns no data on success, such as `Delete`, the response is\n`google.protobuf.Empty`.  If the original method is standard\n`Get`\/`Create`\/`Update`, the response should be the resource.  For other\nmethods, the response should have the type `XxxResponse`, where `Xxx`\nis the original method name.  For example, if the original method name\nis `TakeSnapshot()`, the inferred response type is\n`TakeSnapshotResponse`.",
+          "type": "object",
+          "additionalProperties": {
+            "type": "any",
+            "description": "Properties of the object. Contains field @type with type URL."
+          }
+        }
+      }
+    },
+    "Status": {
+      "id": "Status",
+      "description": "The `Status` type defines a logical error model that is suitable for different\nprogramming environments, including REST APIs and RPC APIs. It is used by\n[gRPC](https:\/\/github.com\/grpc). The error model is designed to be:\n\n- Simple to use and understand for most users\n- Flexible enough to meet unexpected needs\n\n# Overview\n\nThe `Status` message contains three pieces of data: error code, error message,\nand error details. The error code should be an enum value of\ngoogle.rpc.Code, but it may accept additional error codes if needed.  The\nerror message should be a developer-facing English message that helps\ndevelopers *understand* and *resolve* the error. If a localized user-facing\nerror message is needed, put the localized message in the error details or\nlocalize it in the client. The optional error details may contain arbitrary\ninformation about the error. There is a predefined set of error detail types\nin the package `google.rpc` which can be used for common error conditions.\n\n# Language mapping\n\nThe `Status` message is the logical representation of the error model, but it\nis not necessarily the actual wire format. When the `Status` message is\nexposed in different client libraries and different wire protocols, it can be\nmapped differently. For example, it will likely be mapped to some exceptions\nin Java, but more likely mapped to some error codes in C.\n\n# Other uses\n\nThe error model and the `Status` message can be used in a variety of\nenvironments, either with or without APIs, to provide a\nconsistent developer experience across different environments.\n\nExample uses of this error model include:\n\n- Partial errors. If a service needs to return partial errors to the client,\n    it may embed the `Status` in the normal response to indicate the partial\n    errors.\n\n- Workflow errors. A typical workflow has multiple steps. Each step may\n    have a `Status` message for error reporting purpose.\n\n- Batch operations. If a client uses batch request and batch response, the\n    `Status` message should be used directly inside batch response, one for\n    each error sub-response.\n\n- Asynchronous operations. If an API call embeds asynchronous operation\n    results in its response, the status of those operations should be\n    represented directly using the `Status` message.\n\n- Logging. If some API errors are stored in logs, the message `Status` could\n    be used directly after any stripping needed for security\/privacy reasons.",
+      "type": "object",
+      "properties": {
+        "code": {
+          "description": "The status code, which should be an enum value of google.rpc.Code.",
+          "type": "integer",
+          "format": "int32"
+        },
+        "message": {
+          "description": "A developer-facing error message, which should be in English. Any\nuser-facing error message should be localized and sent in the\ngoogle.rpc.Status.details field, or localized by the client.",
+          "type": "string"
+        },
+        "details": {
+          "description": "A list of messages that carry the error details.  There will be a\ncommon set of message types for APIs to use.",
+          "type": "array",
+          "items": {
+            "type": "object",
+            "additionalProperties": {
+              "type": "any",
+              "description": "Properties of the object. Contains field @type with type URL."
+            }
+          }
+        }
+      }
+    },
+    "ProjectSettings": {
+      "id": "ProjectSettings",
+      "description": "Settings that control how a consumer project uses a service.",
+      "type": "object",
+      "properties": {
+        "serviceName": {
+          "description": "The name of the service.  See the `ServiceManager` overview for naming\nrequirements.",
+          "type": "string"
+        },
+        "consumerProjectId": {
+          "description": "ID for the project consuming this service.",
+          "type": "string"
+        },
+        "usageSettings": {
+          "description": "Settings that control whether this service is usable by the consumer\nproject.",
+          "$ref": "UsageSettings"
+        },
+        "quotaSettings": {
+          "description": "Settings that control how much or how fast the service can be used by the\nconsumer project.",
+          "$ref": "QuotaSettings"
+        },
+        "visibilitySettings": {
+          "description": "Settings that control which features of the service are visible to the\nconsumer project.",
+          "$ref": "VisibilitySettings"
+        },
+        "properties": {
+          "description": "Service-defined per-consumer properties.\n\nA key-value mapping a string key to a google.protobuf.ListValue proto.\nValues in the list are typed as defined in the Service configuration's\nconsumer.properties field.",
+          "type": "object",
+          "additionalProperties": {
+            "type": "array",
+            "items": {
+              "type": "any"
+            }
+          }
+        },
+        "operations": {
+          "description": "Read-only view of pending operations affecting this resource, if requested.",
+          "type": "array",
+          "items": {
+            "$ref": "Operation"
+          }
+        }
+      }
+    },
+    "UsageSettings": {
+      "id": "UsageSettings",
+      "description": "Usage settings for a consumer of a service.",
+      "type": "object",
+      "properties": {
+        "consumerEnableStatus": {
+          "description": "Consumer controlled setting to enable\/disable use of this service by the\nconsumer project. The default value of this is controlled by the service\nconfiguration.",
+          "enumDescriptions": [
+            "The service is disabled.",
+            "The service is enabled."
+          ],
+          "type": "string",
+          "enum": [
+            "DISABLED",
+            "ENABLED"
+          ]
+        }
+      }
+    },
+    "QuotaSettings": {
+      "id": "QuotaSettings",
+      "description": "Per-consumer overrides for quota settings. See google\/api\/quota.proto\nfor the corresponding service configuration which provides the default\nvalues.",
+      "type": "object",
+      "properties": {
+        "consumerOverrides": {
+          "description": "Quota overrides set by the consumer. Consumer overrides will only have\nan effect up to the max_limit specified in the service config, or the\nthe producer override, if one exists.\n\nThe key for this map is one of the following:\n\n- '<GROUP_NAME>\/<LIMIT_NAME>' for quotas defined within quota groups,\nwhere GROUP_NAME is the google.api.QuotaGroup.name field and\nLIMIT_NAME is the google.api.QuotaLimit.name field from the service\nconfig.  For example: 'ReadGroup\/ProjectDaily'.\n\n- '<LIMIT_NAME>' for quotas defined without quota groups, where LIMIT_NAME\nis the google.api.QuotaLimit.name field from the service config. For\nexample: 'borrowedCountPerOrganization'.",
+          "type": "object",
+          "additionalProperties": {
+            "$ref": "QuotaLimitOverride"
+          }
+        },
+        "producerOverrides": {
+          "description": "Quota overrides set by the producer. Note that if a consumer override is\nalso specified, then the minimum of the two will be used. This allows\nconsumers to cap their usage voluntarily.\n\nThe key for this map is one of the following:\n\n- '<GROUP_NAME>\/<LIMIT_NAME>' for quotas defined within quota groups,\nwhere GROUP_NAME is the google.api.QuotaGroup.name field and\nLIMIT_NAME is the google.api.QuotaLimit.name field from the service\nconfig.  For example: 'ReadGroup\/ProjectDaily'.\n\n- '<LIMIT_NAME>' for quotas defined without quota groups, where LIMIT_NAME\nis the google.api.QuotaLimit.name field from the service config. For\nexample: 'borrowedCountPerOrganization'.",
+          "type": "object",
+          "additionalProperties": {
+            "$ref": "QuotaLimitOverride"
+          }
+        },
+        "effectiveQuotas": {
+          "description": "The effective quota limits for each group, derived from the service\ndefaults together with any producer or consumer overrides.\nFor each limit, the effective value is the minimum of the producer\nand consumer overrides if either is present, or else the service default\nif neither is present.\nDEPRECATED. Use effective_quota_groups instead.",
+          "type": "object",
+          "additionalProperties": {
+            "$ref": "QuotaLimitOverride"
+          }
+        },
+        "variableTermQuotas": {
+          "description": "Quotas that are active over a specified time period. Only writeable\nby the producer.",
+          "type": "array",
+          "items": {
+            "$ref": "VariableTermQuota"
+          }
+        },
+        "effectiveQuotaGroups": {
+          "description": "Use this field for quota limits defined under quota groups.\nCombines service quota configuration and project-specific settings, as\na map from quota group name to the effective quota information for that\ngroup.\nOutput-only.",
+          "type": "array",
+          "items": {
+            "$ref": "EffectiveQuotaGroup"
+          }
+        }
+      }
+    },
+    "QuotaLimitOverride": {
+      "id": "QuotaLimitOverride",
+      "description": "Specifies a custom quota limit that is applied for this consumer project.\nThis overrides the default value in google.api.QuotaLimit.",
+      "type": "object",
+      "properties": {
+        "limit": {
+          "description": "The new limit for this project.\nMay be -1 (unlimited), 0 (block), or any positive integer.",
+          "type": "string",
+          "format": "int64"
+        },
+        "unlimited": {
+          "description": "Indicates the override is to provide unlimited quota.  If true,\nany value set for limit will be ignored.\nDEPRECATED. Use a limit value of -1 instead.",
+          "type": "boolean"
+        }
+      }
+    },
+    "VariableTermQuota": {
+      "id": "VariableTermQuota",
+      "description": "A variable term quota is a bucket of tokens that is consumed over a\nspecified (usually long) time period. When present, it overrides any\n\"1d\" duration per-project quota specified on the group.\n\nVariable terms run from midnight to midnight, start_date to end_date\n(inclusive) in the America\/Los_Angeles time zone.",
+      "type": "object",
+      "properties": {
+        "groupName": {
+          "description": "The quota group that has the variable term quota applied to it.\nThis must be a google.api.QuotaGroup.name specified in the\nservice configuration.",
+          "type": "string"
+        },
+        "startDate": {
+          "description": "The beginning of the active period for the variable term quota.\nYYYYMMdd date format, e.g. 20140730.",
+          "type": "string"
+        },
+        "endDate": {
+          "description": "The effective end of the active period for the variable term quota\n(inclusive). This must be no more than 5 years after start_date.\nYYYYMMdd date format, e.g. 20140730.",
+          "type": "string"
+        },
+        "displayEndDate": {
+          "description": "The displayed end of the active period for the variable term quota.\nThis may be before the effective end to give the user a grace period.\nYYYYMMdd date format, e.g. 20140730.",
+          "type": "string"
+        },
+        "createTime": {
+          "description": "Time when this variable term quota was created. If multiple quotas\nare simultaneously active, then the quota with the latest create_time\nis the effective one.",
+          "type": "string",
+          "format": "google-datetime"
+        },
+        "limit": {
+          "description": "The number of tokens available during the configured term.",
+          "type": "string",
+          "format": "int64"
+        },
+        "quotaUsage": {
+          "description": "The usage data of this quota.",
+          "$ref": "QuotaUsage"
+        }
+      }
+    },
+    "QuotaUsage": {
+      "id": "QuotaUsage",
+      "description": "Specifies the used quota amount for a quota limit at a particular time.",
+      "type": "object",
+      "properties": {
+        "usage": {
+          "description": "The used quota value at the \"query_time\".",
+          "type": "string",
+          "format": "int64"
+        },
+        "startTime": {
+          "description": "The time the quota duration started.",
+          "type": "string",
+          "format": "google-datetime"
+        },
+        "endTime": {
+          "description": "The time the quota duration ended.",
+          "type": "string",
+          "format": "google-datetime"
+        },
+        "queryTime": {
+          "description": "The time the quota usage data was queried.",
+          "type": "string",
+          "format": "google-datetime"
+        }
+      }
+    },
+    "EffectiveQuotaGroup": {
+      "id": "EffectiveQuotaGroup",
+      "description": "An effective quota group contains both the metadata for a quota group\nas derived from the service config, and the effective limits in that\ngroup as calculated from producer and consumer overrides together with\nservice defaults.",
+      "type": "object",
+      "properties": {
+        "baseGroup": {
+          "description": "The service configuration for this quota group, minus the quota limits,\nwhich are replaced by the effective limits below.",
+          "$ref": "QuotaGroup"
+        },
+        "billingInteraction": {
+
+          "enumDescriptions": [
+            "The interaction between this quota group and the project billing status\nis unspecified.",
+            "This quota group is enforced only when the consumer project\nis not billable.",
+            "This quota group is enforced only when the consumer project\nis billable.",
+            "This quota group is enforced regardless of the consumer project's\nbilling status."
+          ],
+          "type": "string",
+          "enum": [
+            "BILLING_INTERACTION_UNSPECIFIED",
+            "NONBILLABLE_ONLY",
+            "BILLABLE_ONLY",
+            "ANY_BILLING_STATUS"
+          ]
+        },
+        "quotas": {
+          "description": "The usage and limit information for each limit within this quota group.",
+          "type": "array",
+          "items": {
+            "$ref": "QuotaInfo"
+          }
+        }
+      }
+    },
+    "QuotaInfo": {
+      "id": "QuotaInfo",
+      "description": "Metadata about an individual quota, containing usage and limit information.",
+      "type": "object",
+      "properties": {
+        "limit": {
+          "description": "The effective limit for this quota.",
+          "$ref": "EffectiveQuotaLimit"
+        },
+        "currentUsage": {
+          "description": "The usage data for this quota as it applies to the current limit.",
+          "$ref": "QuotaUsage"
+        },
+        "historicalUsage": {
+          "description": "The historical usage data of this quota limit. Currently it is only\navailable for daily quota limit, that is, base_limit.duration = \"1d\".",
+          "type": "array",
+          "items": {
+            "$ref": "QuotaUsage"
+          }
+        }
+      }
+    },
+    "EffectiveQuotaLimit": {
+      "id": "EffectiveQuotaLimit",
+      "description": "An effective quota limit contains the metadata for a quota limit\nas derived from the service config, together with fields that describe\nthe effective limit value and what overrides can be applied to it.",
+      "type": "object",
+      "properties": {
+        "baseLimit": {
+          "description": "The service's configuration for this quota limit.",
+          "$ref": "QuotaLimit"
+        },
+        "key": {
+          "description": "The key used to identify this limit when applying overrides.\nThe consumer_overrides and producer_overrides maps are keyed\nby strings of the form \"QuotaGroupName\/QuotaLimitName\".",
+          "type": "string"
+        },
+        "maxConsumerOverrideAllowed": {
+          "description": "The maximum override value that a consumer may specify.",
+          "type": "string",
+          "format": "int64"
+        },
+        "effectiveLimit": {
+          "description": "The effective limit value, based on the stored producer and consumer\noverrides and the service defaults.",
+          "type": "string",
+          "format": "int64"
+        }
+      }
+    },
+    "VisibilitySettings": {
+      "id": "VisibilitySettings",
+      "description": "Settings that control which features of the service are visible to the\nconsumer project.",
+      "type": "object",
+      "properties": {
+        "visibilityLabels": {
+          "description": "The set of visibility labels that are used to determine what API surface is\nvisible to calls made by this project. The visible surface is a union of\nthe surface features associated with each label listed here, plus the\npublicly visible (unrestricted) surface.\n\nThe service producer may add or remove labels at any time. The service\nconsumer may add a label if the calling user has been granted permission\nto do so by the producer.  The service consumer may also remove any label\nat any time.",
+          "type": "array",
+          "items": {
+            "type": "string"
+          }
+        }
+      }
+    },
+    "ListServiceConfigsResponse": {
+      "id": "ListServiceConfigsResponse",
+      "description": "Response message for ListServiceConfigs method.",
+      "type": "object",
+      "properties": {
+        "serviceConfigs": {
+          "description": "The list of service config resources.",
+          "type": "array",
+          "items": {
+            "$ref": "Service"
+          }
+        },
+        "nextPageToken": {
+          "description": "The token of the next page of results.",
+          "type": "string"
+        }
+      }
+    },
+    "SubmitConfigSourceRequest": {
+      "id": "SubmitConfigSourceRequest",
+      "description": "Request message for SubmitConfigSource method.",
+      "type": "object",
+      "properties": {
+        "configSource": {
+          "description": "The source configuration for the service.",
+          "$ref": "ConfigSource"
+        },
+        "validateOnly": {
+          "description": "Optional. If set, this will result in the generation of a\n`google.api.Service` configuration based on the `ConfigSource` provided,\nbut the generated config and the sources will NOT be persisted.",
+          "type": "boolean"
+        }
+      }
+    },
+    "ConvertConfigRequest": {
+      "id": "ConvertConfigRequest",
+      "description": "Request message for `ConvertConfig` method.",
+      "type": "object",
+      "properties": {
+        "swaggerSpec": {
+          "description": "The swagger specification for an API.",
+          "$ref": "SwaggerSpec"
+        },
+        "openApiSpec": {
+          "description": "The OpenAPI specification for an API.",
+          "$ref": "OpenApiSpec"
+        },
+        "serviceName": {
+          "description": "The service name to use for constructing the normalized service\nconfiguration equivalent of the provided configuration specification.",
+          "type": "string"
+        },
+        "configSpec": {
+          "description": "Input configuration\nFor this version of API, the supported type is OpenApiSpec",
+          "type": "object",
+          "additionalProperties": {
+            "type": "any",
+            "description": "Properties of the object. Contains field @type with type URL."
+          }
+        }
+      }
+    },
+    "SwaggerSpec": {
+      "id": "SwaggerSpec",
+      "description": "A collection of swagger specification files.",
+      "type": "object",
+      "properties": {
+        "swaggerFiles": {
+          "description": "The individual files.",
+          "type": "array",
+          "items": {
+            "$ref": "File"
+          }
+        }
+      }
+    },
+    "File": {
+      "id": "File",
+      "description": "A single swagger specification file.",
+      "type": "object",
+      "properties": {
+        "path": {
+          "description": "The relative path of the swagger spec file.",
+          "type": "string"
+        },
+        "contents": {
+          "description": "The contents of the swagger spec file.",
+          "type": "string"
+        }
+      }
+    },
+    "ConvertConfigResponse": {
+      "id": "ConvertConfigResponse",
+      "description": "Response message for `ConvertConfig` method.",
+      "type": "object",
+      "properties": {
+        "serviceConfig": {
+          "description": "The service configuration. Not set if errors occured during conversion.",
+          "$ref": "Service"
+        },
+        "diagnostics": {
+          "description": "Any errors or warnings that occured during config conversion.",
+          "type": "array",
+          "items": {
+            "$ref": "Diagnostic"
+          }
+        }
+      }
+    },
+    "Diagnostic": {
+      "id": "Diagnostic",
+      "description": "A collection that represents a diagnostic message (error or warning)",
+      "type": "object",
+      "properties": {
+        "location": {
+          "description": "Location of the cause or context of the diagnostic information.",
+          "type": "string"
+        },
+        "kind": {
+          "description": "The kind of diagnostic information provided.",
+          "enumDescriptions": [
+            "Warnings and errors",
+            "Only errors"
+          ],
+          "type": "string",
+          "enum": [
+            "WARNING",
+            "ERROR"
+          ]
+        },
+        "message": {
+          "description": "The string message of the diagnostic information.",
+          "type": "string"
+        }
+      }
+    },
+    "EnableServiceRequest": {
+      "id": "EnableServiceRequest",
+      "description": "Request message for EnableService method.",
+      "type": "object",
+      "properties": {
+        "consumerId": {
+          "description": "The identity of consumer resource which service enablement will be\napplied to.\n\nThe Google Service Management implementation accepts the following\nforms: \"project:<project_id>\", \"project_number:<project_number>\".\n\nNote: this is made compatible with\ngoogle.api.servicecontrol.v1.Operation.consumer_id.",
+          "type": "string"
+        }
+      }
+    },
+    "DisableServiceRequest": {
+      "id": "DisableServiceRequest",
+      "description": "Request message for DisableService method.",
+      "type": "object",
+      "properties": {
+        "consumerId": {
+          "description": "The identity of consumer resource which service disablement will be\napplied to.\n\nThe Google Service Management implementation accepts the following\nforms: \"project:<project_id>\", \"project_number:<project_number>\".\n\nNote: this is made compatible with\ngoogle.api.servicecontrol.v1.Operation.consumer_id.",
+          "type": "string"
+        }
+      }
+    },
+    "ServiceAccessPolicy": {
+      "id": "ServiceAccessPolicy",
+      "description": "Policy describing who can access a service and any visibility labels on that\nservice.",
+      "type": "object",
+      "properties": {
+        "serviceName": {
+          "description": "The service protected by this policy.",
+          "type": "string"
+        },
+        "accessList": {
+          "description": "ACL for access to the unrestricted surface of the service.",
+          "$ref": "ServiceAccessList"
+        },
+        "visibilityLabelAccessLists": {
+          "description": "ACLs for access to restricted parts of the service.  The map key is the\nvisibility label that is being controlled.  Note that access to any label\nalso implies access to the unrestricted surface.",
+          "type": "object",
+          "additionalProperties": {
+            "$ref": "ServiceAccessList"
+          }
+        }
+      }
+    },
+    "ServiceAccessList": {
+      "id": "ServiceAccessList",
+      "description": "List of users and groups that are granted access to a service or visibility\nlabel.",
+      "type": "object",
+      "properties": {
+        "members": {
+          "description": "Members that are granted access.\n\n- \"user:{$user_email}\" - Grant access to an individual user\n- \"group:{$group_email}\" - Grant access to direct members of the group\n- \"domain:{$domain}\" - Grant access to all members of the domain. For now,\n     domain membership check will be similar to Devconsole\/TT check:\n     compare domain part of the user email to configured domain name.\n     When IAM integration is complete, this will be replaced with IAM\n     check.",
+          "type": "array",
+          "items": {
+            "type": "string"
+          }
+        }
+      }
+    },
+    "QueryUserAccessResponse": {
+      "id": "QueryUserAccessResponse",
+      "description": "Request message for QueryUserAccess method.",
+      "type": "object",
+      "properties": {
+        "canAccessService": {
+          "description": "True if the user can access the service and any unrestricted API surface.",
+          "type": "boolean"
+        },
+        "accessibleVisibilityLabels": {
+          "description": "Any visibility labels on the service that are accessible by the user.",
+          "type": "array",
+          "items": {
+            "type": "string"
+          }
+        }
+      }
+    },
+    "CustomerSettings": {
+      "id": "CustomerSettings",
+      "description": "Settings that control how a customer (identified by a billing account) uses\na service",
+      "type": "object",
+      "properties": {
+        "serviceName": {
+          "description": "The name of the service.  See the `ServiceManager` overview for naming\nrequirements.",
+          "type": "string"
+        },
+        "customerId": {
+          "description": "ID for the customer that consumes the service (see above).\nThe supported types of customers are:\n\n1. domain:{domain}\nA Google Apps domain name. For example, google.com.\n\n2. billingAccount:{billing_account_id}\nA Google Cloud Plafrom billing account. For Example, 123456-7890ab-cdef12.\n",
+          "type": "string"
+        },
+        "quotaSettings": {
+          "description": "Settings that control how much or how fast the service can be used by the\nconsumer projects owned by the customer collectively.",
+          "$ref": "QuotaSettings"
+        }
+      }
+    },
+    "CompositeOperationMetadata": {
+      "id": "CompositeOperationMetadata",
+      "description": "Metadata for composite operations.",
+      "type": "object",
+      "properties": {
+        "childOperations": {
+          "description": "The child operations. The details of the asynchronous\nchild operations are stored in a separate row and not in this\nmetadata. Only the operation name is stored here.",
+          "type": "array",
+          "items": {
+            "$ref": "Operation"
+          }
+        },
+        "originalRequest": {
+          "description": "Original request that triggered this operation.",
+          "type": "object",
+          "additionalProperties": {
+            "type": "any",
+            "description": "Properties of the object. Contains field @type with type URL."
+          }
+        },
+        "responseFieldMasks": {
+          "description": "Defines which part of the response a child operation will contribute.\nEach key of the map is the name of a child operation. Each value is a\nfield mask that identifies what that child operation contributes to the\nresponse, for example, \"quota_settings\", \"visiblity_settings\", etc.",
+          "type": "object",
+          "additionalProperties": {
+            "type": "string",
+            "format": "google-fieldmask"
+          }
+        },
+        "persisted": {
+          "description": "Indicates whether the requested state change has been persisted. Once this\nfield is set, it is guaranteed to propagate to all backends eventually, but\nit may not be visible immediately. Clients that are not concerned with\nwaiting on propagation can stop polling the operation once the persisted\nfield is set",
+          "type": "boolean"
+        }
+      }
+    },
+    "OperationMetadata": {
+      "id": "OperationMetadata",
+      "description": "The metadata associated with a long running operation resource.",
+      "type": "object",
+      "properties": {
+        "resourceNames": {
+          "description": "The full name of the resources that this operation is directly\nassociated with.",
+          "type": "array",
+          "items": {
+            "type": "string"
+          }
+        },
+        "steps": {
+          "description": "Detailed status information for each step. The order is undetermined.",
+          "type": "array",
+          "items": {
+            "$ref": "Step"
+          }
+        },
+        "progressPercentage": {
+          "description": "Percentage of completion of this operation, ranging from 0 to 100.",
+          "type": "integer",
+          "format": "int32"
+        },
+        "startTime": {
+          "description": "The start time of the operation.",
+          "type": "string",
+          "format": "google-datetime"
+        }
+      }
+    },
+    "Step": {
+      "id": "Step",
+      "description": "Represents the status of one operation step.",
+      "type": "object",
+      "properties": {
+        "description": {
+          "description": "The short description of the step.",
+          "type": "string"
+        },
+        "status": {
+          "description": "The status code.",
+          "enumDescriptions": [
+            "Unspecifed code.",
+            "The step has completed without errors.",
+            "The step has not started yet.",
+            "The step is in progress.",
+            "The step has completed with errors."
+          ],
+          "type": "string",
+          "enum": [
+            "STATUS_UNSPECIFIED",
+            "DONE",
+            "NOT_STARTED",
+            "IN_PROGRESS",
+            "FAILED"
+          ]
+        }
+      }
+    }
+  },
+  "resources": {
+    "services": {
+      "methods": {
+        "list": {
+          "id": "servicemanagement.services.list",
+          "path": "v1/services",
+          "flatPath": "v1/services",
+          "httpMethod": "GET",
+          "description": "Lists all managed services. If the `consumer_project_id` is specified,\nthe project's settings for the specified service are also returned.",
+          "parameters": {
+            "producerProjectId": {
+              "description": "Include services produced by the specified project.",
+              "location": "query",
+              "type": "string"
+            },
+            "category": {
+              "description": "Include services only in the specified category. Supported categories are\nservicemanagement.googleapis.com\/categories\/google-services or\nservicemanagement.googleapis.com\/categories\/play-games.",
+              "location": "query",
+              "type": "string"
+            },
+            "consumerProjectId": {
+              "description": "Include services consumed by the specified project.\n\nIf project_settings is expanded, then this field controls which project\nproject_settings is populated for.",
+              "location": "query",
+              "type": "string"
+            },
+            "expand": {
+              "description": "Fields to expand in any results.  By default, the following fields\nare not fully included in list results:\n- `operations`\n- `project_settings`\n- `project_settings.operations`\n- `quota_usage` (It requires `project_settings`)",
+              "location": "query",
+              "type": "string",
+              "format": "google-fieldmask"
+            },
+            "pageSize": {
+              "description": "Requested size of the next page of data.",
+              "location": "query",
+              "type": "integer",
+              "format": "int32"
+            },
+            "pageToken": {
+              "description": "Token identifying which result to start with; returned by a previous list\ncall.",
+              "location": "query",
+              "type": "string"
+            }
+          },
+          "parameterOrder": [
+          ],
+          "response": {
+            "$ref": "ListServicesResponse"
+          },
+          "scopes": [
+            "https://www.googleapis.com/auth/cloud-platform",
+            "https://www.googleapis.com/auth/service.management"
+          ]
+        },
+        "get": {
+          "id": "servicemanagement.services.get",
+          "path": "v1/services/{serviceName}",
+          "flatPath": "v1/services/{serviceName}",
+          "httpMethod": "GET",
+          "description": "Gets a managed service. If the `consumer_project_id` is specified,\nthe project's settings for the specified service are also returned.",
+          "parameters": {
+            "serviceName": {
+              "description": "The name of the service.  See the `ServiceManager` overview for naming\nrequirements.  For example: `example.googleapis.com`.",
+              "location": "path",
+              "required": true,
+              "type": "string"
+            },
+            "expand": {
+              "description": "Fields to expand in any results.  By default, the following fields\nare not present in the result:\n- `operations`\n- `project_settings`\n- `project_settings.operations`\n- `quota_usage` (It requires `project_settings`)\n- `historical_quota_usage` (It requires `project_settings`)",
+              "location": "query",
+              "type": "string",
+              "format": "google-fieldmask"
+            },
+            "consumerProjectId": {
+              "description": "If project_settings is expanded, return settings for the specified\nconsumer project.",
+              "location": "query",
+              "type": "string"
+            },
+            "view": {
+              "description": "If project_settings is expanded, request only fields for the specified\nview.",
+              "location": "query",
+              "type": "string",
+              "enum": [
+                "PROJECT_SETTINGS_VIEW_UNSPECIFIED",
+                "CONSUMER_VIEW",
+                "PRODUCER_VIEW",
+                "ALL"
+              ]
+            }
+          },
+          "parameterOrder": [
+            "serviceName"
+          ],
+          "response": {
+            "$ref": "ManagedService"
+          },
+          "scopes": [
+            "https://www.googleapis.com/auth/cloud-platform",
+            "https://www.googleapis.com/auth/service.management"
+          ]
+        },
+        "create": {
+          "id": "servicemanagement.services.create",
+          "path": "v1/services",
+          "flatPath": "v1/services",
+          "httpMethod": "POST",
+          "description": "Creates a new managed service.\n\nOperation<response: ManagedService>",
+          "parameters": {
+          },
+          "parameterOrder": [
+          ],
+          "request": {
+            "$ref": "ManagedService"
+          },
+          "response": {
+            "$ref": "Operation"
+          },
+          "scopes": [
+            "https://www.googleapis.com/auth/cloud-platform",
+            "https://www.googleapis.com/auth/service.management"
+          ]
+        },
+        "delete": {
+          "id": "servicemanagement.services.delete",
+          "path": "v1/services/{serviceName}",
+          "flatPath": "v1/services/{serviceName}",
+          "httpMethod": "DELETE",
+          "description": "Deletes a managed service.\n\nOperation<response: google.protobuf.Empty>",
+          "parameters": {
+            "serviceName": {
+              "description": "The name of the service.  See the `ServiceManager` overview for naming\nrequirements.  For example: `example.googleapis.com`.",
+              "location": "path",
+              "required": true,
+              "type": "string"
+            }
+          },
+          "parameterOrder": [
+            "serviceName"
+          ],
+          "response": {
+            "$ref": "Operation"
+          },
+          "scopes": [
+            "https://www.googleapis.com/auth/cloud-platform",
+            "https://www.googleapis.com/auth/service.management"
+          ]
+        },
+        "getConfig": {
+          "id": "servicemanagement.services.getConfig",
+          "path": "v1/services/{serviceName}/config",
+          "flatPath": "v1/services/{serviceName}/config",
+          "httpMethod": "GET",
+          "description": "Gets a service config (version) for a managed service. If `config_id` is\nnot specified, the latest service config will be returned.",
+          "parameters": {
+            "serviceName": {
+              "description": "The name of the service.  See the `ServiceManager` overview for naming\nrequirements.  For example: `example.googleapis.com`.",
+              "location": "path",
+              "required": true,
+              "type": "string"
+            },
+            "configId": {
+              "description": "The id of the service config resource.\nOptional. If it is not specified, the latest version of config will be\nreturned.",
+              "location": "query",
+              "type": "string"
+            }
+          },
+          "parameterOrder": [
+            "serviceName"
+          ],
+          "response": {
+            "$ref": "Service"
+          },
+          "scopes": [
+            "https://www.googleapis.com/auth/cloud-platform",
+            "https://www.googleapis.com/auth/service.management"
+          ]
+        },
+        "convertConfig": {
+          "id": "servicemanagement.services.convertConfig",
+          "path": "v1/services:convertConfig",
+          "flatPath": "v1/services:convertConfig",
+          "httpMethod": "POST",
+          "description": "DEPRECATED. `SubmitConfigSource` with `validate_only=true` will provide\nconfig conversion moving forward.\n\nConverts an API specification (e.g. Swagger spec) to an\nequivalent `google.api.Service`.",
+          "parameters": {
+          },
+          "parameterOrder": [
+          ],
+          "request": {
+            "$ref": "ConvertConfigRequest"
+          },
+          "response": {
+            "$ref": "ConvertConfigResponse"
+          },
+          "scopes": [
+            "https://www.googleapis.com/auth/cloud-platform",
+            "https://www.googleapis.com/auth/service.management"
+          ]
+        },
+        "enable": {
+          "id": "servicemanagement.services.enable",
+          "path": "v1/services/{serviceName}:enable",
+          "flatPath": "v1/services/{serviceName}:enable",
+          "httpMethod": "POST",
+          "description": "Enable a managed service for a project with default setting.\nIf the managed service has dependencies, they will be enabled as well.\n\nOperation<response: EnableServiceResponse>\n",
+          "parameters": {
+            "serviceName": {
+              "description": "Name of the service to enable. Specifying an unknown service name will\ncause the request to fail.",
+              "location": "path",
+              "required": true,
+              "type": "string"
+            }
+          },
+          "parameterOrder": [
+            "serviceName"
+          ],
+          "request": {
+            "$ref": "EnableServiceRequest"
+          },
+          "response": {
+            "$ref": "Operation"
+          },
+          "scopes": [
+            "https://www.googleapis.com/auth/cloud-platform",
+            "https://www.googleapis.com/auth/service.management"
+          ]
+        },
+        "disable": {
+          "id": "servicemanagement.services.disable",
+          "path": "v1/services/{serviceName}:disable",
+          "flatPath": "v1/services/{serviceName}:disable",
+          "httpMethod": "POST",
+          "description": "Disable a managed service for a project.\nGoogle Service Management will only disable the managed service even if\nthere are other services depend on the managed service.\n\nOperation<response: DisableServiceResponse>\n",
+          "parameters": {
+            "serviceName": {
+              "description": "Name of the service to disable. Specifying an unknown service name\nwill cause the request to fail.",
+              "location": "path",
+              "required": true,
+              "type": "string"
+            }
+          },
+          "parameterOrder": [
+            "serviceName"
+          ],
+          "request": {
+            "$ref": "DisableServiceRequest"
+          },
+          "response": {
+            "$ref": "Operation"
+          },
+          "scopes": [
+            "https://www.googleapis.com/auth/cloud-platform",
+            "https://www.googleapis.com/auth/service.management"
+          ]
+        },
+        "update": {
+          "id": "servicemanagement.services.update",
+          "path": "v1/services/{serviceName}",
+          "flatPath": "v1/services/{serviceName}",
+          "httpMethod": "PUT",
+          "description": "Updates the configuration of a service.  If the specified service does not\nalready exist, then it is created.\n\nOperation<response: ManagedService>",
+          "parameters": {
+            "serviceName": {
+              "description": "The name of the service.  See the `ServiceManager` overview for naming\nrequirements.  For example: `example.googleapis.com`.",
+              "location": "path",
+              "required": true,
+              "type": "string"
+            },
+            "updateMask": {
+              "description": "A mask specifying which fields to update.\nUpdate mask has been deprecated on UpdateService service method. Please\nuse PatchService method instead to do partial updates.",
+              "location": "query",
+              "type": "string",
+              "format": "google-fieldmask"
+            }
+          },
+          "parameterOrder": [
+            "serviceName"
+          ],
+          "request": {
+            "$ref": "ManagedService"
+          },
+          "response": {
+            "$ref": "Operation"
+          },
+          "scopes": [
+            "https://www.googleapis.com/auth/cloud-platform",
+            "https://www.googleapis.com/auth/service.management"
+          ]
+        },
+        "patch": {
+          "id": "servicemanagement.services.patch",
+          "path": "v1/services/{serviceName}",
+          "flatPath": "v1/services/{serviceName}",
+          "httpMethod": "PATCH",
+          "description": "Updates the specified subset of the configuration. If the specified service\ndoes not exists the patch operation fails.\n\nOperation<response: ManagedService>",
+          "parameters": {
+            "serviceName": {
+              "description": "The name of the service.  See the `ServiceManager` overview for naming\nrequirements.  For example: `example.googleapis.com`.",
+              "location": "path",
+              "required": true,
+              "type": "string"
+            },
+            "updateMask": {
+              "description": "A mask specifying which fields to update.",
+              "location": "query",
+              "type": "string",
+              "format": "google-fieldmask"
+            }
+          },
+          "parameterOrder": [
+            "serviceName"
+          ],
+          "request": {
+            "$ref": "ManagedService"
+          },
+          "response": {
+            "$ref": "Operation"
+          },
+          "scopes": [
+            "https://www.googleapis.com/auth/cloud-platform",
+            "https://www.googleapis.com/auth/service.management"
+          ]
+        },
+        "updateConfig": {
+          "id": "servicemanagement.services.updateConfig",
+          "path": "v1/services/{serviceName}/config",
+          "flatPath": "v1/services/{serviceName}/config",
+          "httpMethod": "PUT",
+          "description": "Updates the specified subset of the service resource. Equivalent to\ncalling `UpdateService` with only the `service_config` field updated.\n\nOperation<response: google.api.Service>",
+          "parameters": {
+            "serviceName": {
+              "description": "The name of the service.  See the `ServiceManager` overview for naming\nrequirements.  For example: `example.googleapis.com`.",
+              "location": "path",
+              "required": true,
+              "type": "string"
+            },
+            "updateMask": {
+              "description": "A mask specifying which fields to update.\nUpdate mask has been deprecated on UpdateServiceConfig service method.\nPlease use PatchServiceConfig method instead to do partial updates.",
+              "location": "query",
+              "type": "string",
+              "format": "google-fieldmask"
+            }
+          },
+          "parameterOrder": [
+            "serviceName"
+          ],
+          "request": {
+            "$ref": "Service"
+          },
+          "response": {
+            "$ref": "Operation"
+          },
+          "scopes": [
+            "https://www.googleapis.com/auth/cloud-platform",
+            "https://www.googleapis.com/auth/service.management"
+          ]
+        },
+        "patchConfig": {
+          "id": "servicemanagement.services.patchConfig",
+          "path": "v1/services/{serviceName}/config",
+          "flatPath": "v1/services/{serviceName}/config",
+          "httpMethod": "PATCH",
+          "description": "Updates the specified subset of the service resource. Equivalent to\ncalling `PatchService` with only the `service_config` field updated.\n\nOperation<response: google.api.Service>",
+          "parameters": {
+            "serviceName": {
+              "description": "The name of the service.  See the `ServiceManager` overview for naming\nrequirements.  For example: `example.googleapis.com`.",
+              "location": "path",
+              "required": true,
+              "type": "string"
+            },
+            "updateMask": {
+              "description": "A mask specifying which fields to update.",
+              "location": "query",
+              "type": "string",
+              "format": "google-fieldmask"
+            }
+          },
+          "parameterOrder": [
+            "serviceName"
+          ],
+          "request": {
+            "$ref": "Service"
+          },
+          "response": {
+            "$ref": "Operation"
+          },
+          "scopes": [
+            "https://www.googleapis.com/auth/cloud-platform",
+            "https://www.googleapis.com/auth/service.management"
+          ]
+        },
+        "getAccessPolicy": {
+          "id": "servicemanagement.services.getAccessPolicy",
+          "path": "v1/services/{serviceName}/accessPolicy",
+          "flatPath": "v1/services/{serviceName}/accessPolicy",
+          "httpMethod": "GET",
+          "description": "Producer method to retrieve current policy.",
+          "parameters": {
+            "serviceName": {
+              "description": "The name of the service.  For example: `example.googleapis.com`.",
+              "location": "path",
+              "required": true,
+              "type": "string"
+            }
+          },
+          "parameterOrder": [
+            "serviceName"
+          ],
+          "response": {
+            "$ref": "ServiceAccessPolicy"
+          },
+          "scopes": [
+            "https://www.googleapis.com/auth/cloud-platform",
+            "https://www.googleapis.com/auth/service.management"
+          ]
+        },
+        "updateAccessPolicy": {
+          "id": "servicemanagement.services.updateAccessPolicy",
+          "path": "v1/services/{serviceName}/accessPolicy",
+          "flatPath": "v1/services/{serviceName}/accessPolicy",
+          "httpMethod": "PUT",
+          "description": "Producer method to update the current policy.  This method will return an\nerror if the policy is too large (more than 50 entries across all lists).",
+          "parameters": {
+            "serviceName": {
+              "description": "The name of the service.  For example: `example.googleapis.com`.\nIf set, policy's service_name should be same as this one.",
+              "location": "path",
+              "required": true,
+              "type": "string"
+            }
+          },
+          "parameterOrder": [
+            "serviceName"
+          ],
+          "request": {
+            "$ref": "ServiceAccessPolicy"
+          },
+          "response": {
+            "$ref": "ServiceAccessPolicy"
+          },
+          "scopes": [
+            "https://www.googleapis.com/auth/cloud-platform",
+            "https://www.googleapis.com/auth/service.management"
+          ]
+        }
+      }
+      ,
+      "resources": {
+        "configs": {
+          "methods": {
+            "list": {
+              "id": "servicemanagement.services.configs.list",
+              "path": "v1/services/{serviceName}/configs",
+              "flatPath": "v1/services/{serviceName}/configs",
+              "httpMethod": "GET",
+              "description": "Lists the history of the service config for a managed service,\nfrom the newest to the oldest.\n",
+              "parameters": {
+                "serviceName": {
+                  "description": "The name of the service.  See the `ServiceManager` overview for naming\nrequirements.  For example: `example.googleapis.com`.",
+                  "location": "path",
+                  "required": true,
+                  "type": "string"
+                },
+                "pageToken": {
+                  "description": "The token of the page to retrieve.",
+                  "location": "query",
+                  "type": "string"
+                },
+                "pageSize": {
+                  "description": "The max number of items to include in the response list.",
+                  "location": "query",
+                  "type": "integer",
+                  "format": "int32"
+                }
+              },
+              "parameterOrder": [
+                "serviceName"
+              ],
+              "response": {
+                "$ref": "ListServiceConfigsResponse"
+              },
+              "scopes": [
+                "https://www.googleapis.com/auth/cloud-platform",
+                "https://www.googleapis.com/auth/service.management"
+              ]
+            },
+            "get": {
+              "id": "servicemanagement.services.configs.get",
+              "path": "v1/services/{serviceName}/configs/{configId}",
+              "flatPath": "v1/services/{serviceName}/configs/{configId}",
+              "httpMethod": "GET",
+              "description": "Gets a service config (version) for a managed service. If `config_id` is\nnot specified, the latest service config will be returned.",
+              "parameters": {
+                "serviceName": {
+                  "description": "The name of the service.  See the `ServiceManager` overview for naming\nrequirements.  For example: `example.googleapis.com`.",
+                  "location": "path",
+                  "required": true,
+                  "type": "string"
+                },
+                "configId": {
+                  "description": "The id of the service config resource.\nOptional. If it is not specified, the latest version of config will be\nreturned.",
+                  "location": "path",
+                  "required": true,
+                  "type": "string"
+                }
+              },
+              "parameterOrder": [
+                "serviceName",
+                "configId"
+              ],
+              "response": {
+                "$ref": "Service"
+              },
+              "scopes": [
+                "https://www.googleapis.com/auth/cloud-platform",
+                "https://www.googleapis.com/auth/service.management"
+              ]
+            },
+            "create": {
+              "id": "servicemanagement.services.configs.create",
+              "path": "v1/services/{serviceName}/configs",
+              "flatPath": "v1/services/{serviceName}/configs",
+              "httpMethod": "POST",
+              "description": "Creates a new service config (version) for a managed service. This method\nonly stores the service config, but does not apply the service config to\nany backend services.\n",
+              "parameters": {
+                "serviceName": {
+                  "description": "The name of the service.  See the `ServiceManager` overview for naming\nrequirements.  For example: `example.googleapis.com`.",
+                  "location": "path",
+                  "required": true,
+                  "type": "string"
+                }
+              },
+              "parameterOrder": [
+                "serviceName"
+              ],
+              "request": {
+                "$ref": "Service"
+              },
+              "response": {
+                "$ref": "Service"
+              },
+              "scopes": [
+                "https://www.googleapis.com/auth/cloud-platform",
+                "https://www.googleapis.com/auth/service.management"
+              ]
+            },
+            "submit": {
+              "id": "servicemanagement.services.configs.submit",
+              "path": "v1/services/{serviceName}/configs:submit",
+              "flatPath": "v1/services/{serviceName}/configs:submit",
+              "httpMethod": "POST",
+              "description": "Creates a new service config (version) for a managed service based on\nuser-supplied configuration sources files (for example: OpenAPI\nSpecification). This method stores the source configurations as well as the\ngenerated service config. It does NOT apply the service config to any\nbackend services.\n\nOperation<response: SubmitConfigSourceResponse>\n",
+              "parameters": {
+                "serviceName": {
+                  "description": "The name of the service.  See the `ServiceManager` overview for naming\nrequirements.  For example: `example.googleapis.com`.",
+                  "location": "path",
+                  "required": true,
+                  "type": "string"
+                }
+              },
+              "parameterOrder": [
+                "serviceName"
+              ],
+              "request": {
+                "$ref": "SubmitConfigSourceRequest"
+              },
+              "response": {
+                "$ref": "Operation"
+              },
+              "scopes": [
+                "https://www.googleapis.com/auth/cloud-platform",
+                "https://www.googleapis.com/auth/service.management"
+              ]
+            }
+          }
+        },
+        "accessPolicy": {
+          "methods": {
+            "query": {
+              "id": "servicemanagement.services.accessPolicy.query",
+              "path": "v1/services/{serviceName}/accessPolicy:query",
+              "flatPath": "v1/services/{serviceName}/accessPolicy:query",
+              "httpMethod": "POST",
+              "description": "Method to query the accessibility of a service and any associated\nvisibility labels for a specified user.\n\nMembers of the producer project may call this method and specify any user.\n\nAny user may call this method, but must specify their own email address.\nIn this case the method will return NOT_FOUND if the user has no access to\nthe service.",
+              "parameters": {
+                "serviceName": {
+                  "description": "The service to query access for.",
+                  "location": "path",
+                  "required": true,
+                  "type": "string"
+                },
+                "userEmail": {
+                  "description": "The user to query access for.",
+                  "location": "query",
+                  "type": "string"
+                }
+              },
+              "parameterOrder": [
+                "serviceName"
+              ],
+              "response": {
+                "$ref": "QueryUserAccessResponse"
+              },
+              "scopes": [
+                "https://www.googleapis.com/auth/cloud-platform",
+                "https://www.googleapis.com/auth/service.management"
+              ]
+            }
+          }
+        },
+        "customerSettings": {
+          "methods": {
+            "get": {
+              "id": "servicemanagement.services.customerSettings.get",
+              "path": "v1/services/{serviceName}/customerSettings/{customerId}",
+              "flatPath": "v1/services/{serviceName}/customerSettings/{customerId}",
+              "httpMethod": "GET",
+              "description": "Retrieves the settings that control the specified customer's usage of the\nservice.",
+              "parameters": {
+                "serviceName": {
+                  "description": "The name of the service.  See the `ServiceManager` overview for naming\nrequirements.  For example: `example.googleapis.com`. This field is\nrequired.",
+                  "location": "path",
+                  "required": true,
+                  "type": "string"
+                },
+                "customerId": {
+                  "description": "ID for the customer. See the comment for `CustomerSettings.customer_id`\nfield of message for its format. This field is required.",
+                  "location": "path",
+                  "required": true,
+                  "type": "string"
+                },
+                "expand": {
+                  "description": "Fields to expand in any results.",
+                  "location": "query",
+                  "type": "string",
+                  "format": "google-fieldmask"
+                },
+                "view": {
+                  "description": "Request only fields for the specified view.",
+                  "location": "query",
+                  "type": "string",
+                  "enum": [
+                    "PROJECT_SETTINGS_VIEW_UNSPECIFIED",
+                    "CONSUMER_VIEW",
+                    "PRODUCER_VIEW",
+                    "ALL"
+                  ]
+                }
+              },
+              "parameterOrder": [
+                "serviceName",
+                "customerId"
+              ],
+              "response": {
+                "$ref": "CustomerSettings"
+              },
+              "scopes": [
+                "https://www.googleapis.com/auth/cloud-platform",
+                "https://www.googleapis.com/auth/service.management"
+              ]
+            },
+            "patch": {
+              "id": "servicemanagement.services.customerSettings.patch",
+              "path": "v1/services/{serviceName}/customerSettings/{customerId}",
+              "flatPath": "v1/services/{serviceName}/customerSettings/{customerId}",
+              "httpMethod": "PATCH",
+              "description": "Updates specified subset of the settings that control the specified\ncustomer's usage of the service.  Attempts to update a field not\ncontrolled by the caller will result in an access denied error.\n\nOperation<response: CustomerSettings>",
+              "parameters": {
+                "serviceName": {
+                  "description": "The name of the service.  See the `ServiceManager` overview for naming\nrequirements.  For example: `example.googleapis.com`. This field is\nrequired.",
+                  "location": "path",
+                  "required": true,
+                  "type": "string"
+                },
+                "customerId": {
+                  "description": "ID for the customer. See the comment for `CustomerSettings.customer_id`\nfield of message for its format. This field is required.",
+                  "location": "path",
+                  "required": true,
+                  "type": "string"
+                },
+                "updateMask": {
+                  "description": "The field mask specifying which fields are to be updated.",
+                  "location": "query",
+                  "type": "string",
+                  "format": "google-fieldmask"
+                }
+              },
+              "parameterOrder": [
+                "serviceName",
+                "customerId"
+              ],
+              "request": {
+                "$ref": "CustomerSettings"
+              },
+              "response": {
+                "$ref": "Operation"
+              },
+              "scopes": [
+                "https://www.googleapis.com/auth/cloud-platform",
+                "https://www.googleapis.com/auth/service.management"
+              ]
+            }
+          }
+        },
+        "projectSettings": {
+          "methods": {
+            "get": {
+              "id": "servicemanagement.services.projectSettings.get",
+              "path": "v1/services/{serviceName}/projectSettings/{consumerProjectId}",
+              "flatPath": "v1/services/{serviceName}/projectSettings/{consumerProjectId}",
+              "httpMethod": "GET",
+              "description": "Retrieves the settings that control the specified consumer project's usage\nof the service.",
+              "parameters": {
+                "serviceName": {
+                  "description": "The name of the service.  See the `ServiceManager` overview for naming\nrequirements.  For example: `example.googleapis.com`.",
+                  "location": "path",
+                  "required": true,
+                  "type": "string"
+                },
+                "consumerProjectId": {
+                  "description": "The project ID of the consumer.",
+                  "location": "path",
+                  "required": true,
+                  "type": "string"
+                },
+                "expand": {
+                  "description": "Fields to expand in any results.  By default, the following fields\nare not present in the result:\n- `operations`\n- `quota_usage`",
+                  "location": "query",
+                  "type": "string",
+                  "format": "google-fieldmask"
+                },
+                "view": {
+                  "description": "Request only the fields for the specified view.",
+                  "location": "query",
+                  "type": "string",
+                  "enum": [
+                    "PROJECT_SETTINGS_VIEW_UNSPECIFIED",
+                    "CONSUMER_VIEW",
+                    "PRODUCER_VIEW",
+                    "ALL"
+                  ]
+                }
+              },
+              "parameterOrder": [
+                "serviceName",
+                "consumerProjectId"
+              ],
+              "response": {
+                "$ref": "ProjectSettings"
+              },
+              "scopes": [
+                "https://www.googleapis.com/auth/cloud-platform",
+                "https://www.googleapis.com/auth/service.management"
+              ]
+            },
+            "update": {
+              "id": "servicemanagement.services.projectSettings.update",
+              "path": "v1/services/{serviceName}/projectSettings/{consumerProjectId}",
+              "flatPath": "v1/services/{serviceName}/projectSettings/{consumerProjectId}",
+              "httpMethod": "PUT",
+              "description": "NOTE: Currently unsupported.  Use PatchProjectSettings instead.\n\nUpdates the settings that control the specified consumer project's usage\nof the service.  Attempts to update a field not controlled by the caller\nwill result in an access denied error.\n\nOperation<response: ProjectSettings>",
+              "parameters": {
+                "serviceName": {
+                  "description": "The name of the service.  See the `ServiceManager` overview for naming\nrequirements.  For example: `example.googleapis.com`.",
+                  "location": "path",
+                  "required": true,
+                  "type": "string"
+                },
+                "consumerProjectId": {
+                  "description": "The project ID of the consumer.",
+                  "location": "path",
+                  "required": true,
+                  "type": "string"
+                }
+              },
+              "parameterOrder": [
+                "serviceName",
+                "consumerProjectId"
+              ],
+              "request": {
+                "$ref": "ProjectSettings"
+              },
+              "response": {
+                "$ref": "Operation"
+              },
+              "scopes": [
+                "https://www.googleapis.com/auth/cloud-platform",
+                "https://www.googleapis.com/auth/service.management"
+              ]
+            },
+            "patch": {
+              "id": "servicemanagement.services.projectSettings.patch",
+              "path": "v1/services/{serviceName}/projectSettings/{consumerProjectId}",
+              "flatPath": "v1/services/{serviceName}/projectSettings/{consumerProjectId}",
+              "httpMethod": "PATCH",
+              "description": "Updates specified subset of the settings that control the specified\nconsumer project's usage of the service.  Attempts to update a field not\ncontrolled by the caller will result in an access denied error.\n\nOperation<response: ProjectSettings>",
+              "parameters": {
+                "serviceName": {
+                  "description": "The name of the service.  See the `ServiceManager` overview for naming\nrequirements.  For example: `example.googleapis.com`.",
+                  "location": "path",
+                  "required": true,
+                  "type": "string"
+                },
+                "consumerProjectId": {
+                  "description": "The project ID of the consumer.",
+                  "location": "path",
+                  "required": true,
+                  "type": "string"
+                },
+                "updateMask": {
+                  "description": "The field mask specifying which fields are to be updated.",
+                  "location": "query",
+                  "type": "string",
+                  "format": "google-fieldmask"
+                }
+              },
+              "parameterOrder": [
+                "serviceName",
+                "consumerProjectId"
+              ],
+              "request": {
+                "$ref": "ProjectSettings"
+              },
+              "response": {
+                "$ref": "Operation"
+              },
+              "scopes": [
+                "https://www.googleapis.com/auth/cloud-platform",
+                "https://www.googleapis.com/auth/service.management"
+              ]
+            }
+          }
+        }
+      }
+    },
+    "v1": {
+      "methods": {
+        "convertConfig": {
+          "id": "servicemanagement.convertConfig",
+          "path": "v1:convertConfig",
+          "flatPath": "v1:convertConfig",
+          "httpMethod": "POST",
+          "description": "DEPRECATED. `SubmitConfigSource` with `validate_only=true` will provide\nconfig conversion moving forward.\n\nConverts an API specification (e.g. Swagger spec) to an\nequivalent `google.api.Service`.",
+          "parameters": {
+          },
+          "parameterOrder": [
+          ],
+          "request": {
+            "$ref": "ConvertConfigRequest"
+          },
+          "response": {
+            "$ref": "ConvertConfigResponse"
+          },
+          "scopes": [
+            "https://www.googleapis.com/auth/cloud-platform",
+            "https://www.googleapis.com/auth/service.management"
+          ]
+        }
+      }
+    },
+    "operations": {
+      "methods": {
+        "get": {
+          "id": "servicemanagement.operations.get",
+          "path": "v1/operations/{operationsId}",
+          "flatPath": "v1/operations/{operationsId}",
+          "httpMethod": "GET",
+          "description": "Gets the latest state of a long-running operation.  Clients can use this\nmethod to poll the operation result at intervals as recommended by the API\nservice.",
+          "parameters": {
+            "operationsId": {
+              "description": "Part of `name`. The name of the operation resource.",
+              "location": "path",
+              "required": true,
+              "type": "string"
+            }
+          },
+          "parameterOrder": [
+            "operationsId"
+          ],
+          "response": {
+            "$ref": "Operation"
+          },
+          "scopes": [
+            "https://www.googleapis.com/auth/cloud-platform",
+            "https://www.googleapis.com/auth/service.management"
+          ]
+        }
+      }
+    }
+  },
+  "basePath": ""
+}
diff --git a/samples/servicemanagement_sample/servicemanagement_v1/__init__.py b/samples/servicemanagement_sample/servicemanagement_v1/__init__.py
new file mode 100644
index 0000000..2816da8
--- /dev/null
+++ b/samples/servicemanagement_sample/servicemanagement_v1/__init__.py
@@ -0,0 +1,5 @@
+"""Package marker file."""
+
+import pkgutil
+
+__path__ = pkgutil.extend_path(__path__, __name__)
diff --git a/samples/servicemanagement_sample/servicemanagement_v1/servicemanagement_v1.py b/samples/servicemanagement_sample/servicemanagement_v1/servicemanagement_v1.py
new file mode 100644
index 0000000..d1a4ab8
--- /dev/null
+++ b/samples/servicemanagement_sample/servicemanagement_v1/servicemanagement_v1.py
@@ -0,0 +1,1520 @@
+#!/usr/bin/env python
+"""CLI for servicemanagement, version v1."""
+# NOTE: This file is autogenerated and should not be edited by hand.
+
+import code
+import os
+import platform
+import sys
+
+from apitools.base.protorpclite import message_types
+from apitools.base.protorpclite import messages
+
+from google.apputils import appcommands
+import gflags as flags
+
+import apitools.base.py as apitools_base
+from apitools.base.py import cli as apitools_base_cli
+import servicemanagement_v1_client as client_lib
+import servicemanagement_v1_messages as messages
+
+
+def _DeclareServicemanagementFlags():
+  """Declare global flags in an idempotent way."""
+  if 'api_endpoint' in flags.FLAGS:
+    return
+  flags.DEFINE_string(
+      'api_endpoint',
+      u'https://servicemanagement.googleapis.com/',
+      'URL of the API endpoint to use.',
+      short_name='servicemanagement_url')
+  flags.DEFINE_string(
+      'history_file',
+      u'~/.servicemanagement.v1.history',
+      'File with interactive shell history.')
+  flags.DEFINE_multistring(
+      'add_header', [],
+      'Additional http headers (as key=value strings). '
+      'Can be specified multiple times.')
+  flags.DEFINE_string(
+      'service_account_json_keyfile', '',
+      'Filename for a JSON service account key downloaded'
+      ' from the Developer Console.')
+  flags.DEFINE_enum(
+      'f__xgafv',
+      u'_1',
+      [u'_1', u'_2'],
+      u'V1 error format.')
+  flags.DEFINE_string(
+      'access_token',
+      None,
+      u'OAuth access token.')
+  flags.DEFINE_enum(
+      'alt',
+      u'json',
+      [u'json', u'media', u'proto'],
+      u'Data format for response.')
+  flags.DEFINE_string(
+      'bearer_token',
+      None,
+      u'OAuth bearer token.')
+  flags.DEFINE_string(
+      'callback',
+      None,
+      u'JSONP')
+  flags.DEFINE_string(
+      'fields',
+      None,
+      u'Selector specifying which fields to include in a partial response.')
+  flags.DEFINE_string(
+      'key',
+      None,
+      u'API key. Your API key identifies your project and provides you with '
+      u'API access, quota, and reports. Required unless you provide an OAuth '
+      u'2.0 token.')
+  flags.DEFINE_string(
+      'oauth_token',
+      None,
+      u'OAuth 2.0 token for the current user.')
+  flags.DEFINE_boolean(
+      'pp',
+      'True',
+      u'Pretty-print response.')
+  flags.DEFINE_boolean(
+      'prettyPrint',
+      'True',
+      u'Returns response with indentations and line breaks.')
+  flags.DEFINE_string(
+      'quotaUser',
+      None,
+      u'Available to use for quota purposes for server-side applications. Can'
+      u' be any arbitrary string assigned to a user, but should not exceed 40'
+      u' characters.')
+  flags.DEFINE_string(
+      'trace',
+      None,
+      'A tracing token of the form "token:<tokenid>" to include in api '
+      'requests.')
+  flags.DEFINE_string(
+      'uploadType',
+      None,
+      u'Legacy upload protocol for media (e.g. "media", "multipart").')
+  flags.DEFINE_string(
+      'upload_protocol',
+      None,
+      u'Upload protocol for media (e.g. "raw", "multipart").')
+
+
+FLAGS = flags.FLAGS
+apitools_base_cli.DeclareBaseFlags()
+_DeclareServicemanagementFlags()
+
+
+def GetGlobalParamsFromFlags():
+  """Return a StandardQueryParameters based on flags."""
+  result = messages.StandardQueryParameters()
+  if FLAGS['f__xgafv'].present:
+    result.f__xgafv = messages.StandardQueryParameters.FXgafvValueValuesEnum(FLAGS.f__xgafv)
+  if FLAGS['access_token'].present:
+    result.access_token = FLAGS.access_token.decode('utf8')
+  if FLAGS['alt'].present:
+    result.alt = messages.StandardQueryParameters.AltValueValuesEnum(FLAGS.alt)
+  if FLAGS['bearer_token'].present:
+    result.bearer_token = FLAGS.bearer_token.decode('utf8')
+  if FLAGS['callback'].present:
+    result.callback = FLAGS.callback.decode('utf8')
+  if FLAGS['fields'].present:
+    result.fields = FLAGS.fields.decode('utf8')
+  if FLAGS['key'].present:
+    result.key = FLAGS.key.decode('utf8')
+  if FLAGS['oauth_token'].present:
+    result.oauth_token = FLAGS.oauth_token.decode('utf8')
+  if FLAGS['pp'].present:
+    result.pp = FLAGS.pp
+  if FLAGS['prettyPrint'].present:
+    result.prettyPrint = FLAGS.prettyPrint
+  if FLAGS['quotaUser'].present:
+    result.quotaUser = FLAGS.quotaUser.decode('utf8')
+  if FLAGS['trace'].present:
+    result.trace = FLAGS.trace.decode('utf8')
+  if FLAGS['uploadType'].present:
+    result.uploadType = FLAGS.uploadType.decode('utf8')
+  if FLAGS['upload_protocol'].present:
+    result.upload_protocol = FLAGS.upload_protocol.decode('utf8')
+  return result
+
+
+def GetClientFromFlags():
+  """Return a client object, configured from flags."""
+  log_request = FLAGS.log_request or FLAGS.log_request_response
+  log_response = FLAGS.log_response or FLAGS.log_request_response
+  api_endpoint = apitools_base.NormalizeApiEndpoint(FLAGS.api_endpoint)
+  additional_http_headers = dict(x.split('=', 1) for x in FLAGS.add_header)
+  credentials_args = {
+      'service_account_json_keyfile': os.path.expanduser(FLAGS.service_account_json_keyfile)
+  }
+  try:
+    client = client_lib.ServicemanagementV1(
+        api_endpoint, log_request=log_request,
+        log_response=log_response,
+        credentials_args=credentials_args,
+        additional_http_headers=additional_http_headers)
+  except apitools_base.CredentialsError as e:
+    print 'Error creating credentials: %s' % e
+    sys.exit(1)
+  return client
+
+
+class PyShell(appcommands.Cmd):
+
+  def Run(self, _):
+    """Run an interactive python shell with the client."""
+    client = GetClientFromFlags()
+    params = GetGlobalParamsFromFlags()
+    for field in params.all_fields():
+      value = params.get_assigned_value(field.name)
+      if value != field.default:
+        client.AddGlobalParam(field.name, value)
+    banner = """
+           == servicemanagement interactive console ==
+                 client: a servicemanagement client
+          apitools_base: base apitools module
+         messages: the generated messages module
+    """
+    local_vars = {
+        'apitools_base': apitools_base,
+        'client': client,
+        'client_lib': client_lib,
+        'messages': messages,
+    }
+    if platform.system() == 'Linux':
+      console = apitools_base_cli.ConsoleWithReadline(
+          local_vars, histfile=FLAGS.history_file)
+    else:
+      console = code.InteractiveConsole(local_vars)
+    try:
+      console.interact(banner)
+    except SystemExit as e:
+      return e.code
+
+
+class OperationsGet(apitools_base_cli.NewCmd):
+  """Command wrapping operations.Get."""
+
+  usage = """operations_get <operationsId>"""
+
+  def __init__(self, name, fv):
+    super(OperationsGet, self).__init__(name, fv)
+
+  def RunWithArgs(self, operationsId):
+    """Gets the latest state of a long-running operation.  Clients can use
+    this method to poll the operation result at intervals as recommended by
+    the API service.
+
+    Args:
+      operationsId: Part of `name`. The name of the operation resource.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ServicemanagementOperationsGetRequest(
+        operationsId=operationsId.decode('utf8'),
+        )
+    result = client.operations.Get(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ServicesConvertConfig(apitools_base_cli.NewCmd):
+  """Command wrapping services.ConvertConfig."""
+
+  usage = """services_convertConfig"""
+
+  def __init__(self, name, fv):
+    super(ServicesConvertConfig, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'configSpec',
+        None,
+        u'Input configuration For this version of API, the supported type is '
+        u'OpenApiSpec',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'openApiSpec',
+        None,
+        u'The OpenAPI specification for an API.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'serviceName',
+        None,
+        u'The service name to use for constructing the normalized service '
+        u'configuration equivalent of the provided configuration '
+        u'specification.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'swaggerSpec',
+        None,
+        u'The swagger specification for an API.',
+        flag_values=fv)
+
+  def RunWithArgs(self):
+    """DEPRECATED. `SubmitConfigSource` with `validate_only=true` will provide
+    config conversion moving forward.  Converts an API specification (e.g.
+    Swagger spec) to an equivalent `google.api.Service`.
+
+    Flags:
+      configSpec: Input configuration For this version of API, the supported
+        type is OpenApiSpec
+      openApiSpec: The OpenAPI specification for an API.
+      serviceName: The service name to use for constructing the normalized
+        service configuration equivalent of the provided configuration
+        specification.
+      swaggerSpec: The swagger specification for an API.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ConvertConfigRequest(
+        )
+    if FLAGS['configSpec'].present:
+      request.configSpec = apitools_base.JsonToMessage(messages.ConvertConfigRequest.ConfigSpecValue, FLAGS.configSpec)
+    if FLAGS['openApiSpec'].present:
+      request.openApiSpec = apitools_base.JsonToMessage(messages.OpenApiSpec, FLAGS.openApiSpec)
+    if FLAGS['serviceName'].present:
+      request.serviceName = FLAGS.serviceName.decode('utf8')
+    if FLAGS['swaggerSpec'].present:
+      request.swaggerSpec = apitools_base.JsonToMessage(messages.SwaggerSpec, FLAGS.swaggerSpec)
+    result = client.services.ConvertConfig(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ServicesCreate(apitools_base_cli.NewCmd):
+  """Command wrapping services.Create."""
+
+  usage = """services_create"""
+
+  def __init__(self, name, fv):
+    super(ServicesCreate, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'configSource',
+        None,
+        u'User-supplied source configuration for the service. This is '
+        u'distinct from the generated configuration provided in '
+        u'`google.api.Service`. This is NOT populated on GetService calls at '
+        u'the moment. NOTE: Any upsert operation that contains both a '
+        u'service_config and a config_source is considered invalid and will '
+        u'result in an error being returned.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'generation',
+        None,
+        u'A server-assigned monotonically increasing number that changes '
+        u'whenever a mutation is made to the `ManagedService` or any of its '
+        u'components via the `ServiceManager` API.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'operations',
+        None,
+        u'Read-only view of pending operations affecting this resource, if '
+        u'requested.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'producerProjectId',
+        None,
+        u'ID of the project that produces and owns this service.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'projectSettings',
+        None,
+        u'Read-only view of settings for a particular consumer project, if '
+        u'requested.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'serviceConfig',
+        None,
+        u"The service's generated configuration.",
+        flag_values=fv)
+    flags.DEFINE_string(
+        'serviceName',
+        None,
+        u'The name of the service.  See the `ServiceManager` overview for '
+        u'naming requirements.  This name must match '
+        u'`google.api.Service.name` in the `service_config` field.',
+        flag_values=fv)
+
+  def RunWithArgs(self):
+    """Creates a new managed service.  Operation<response: ManagedService>
+
+    Flags:
+      configSource: User-supplied source configuration for the service. This
+        is distinct from the generated configuration provided in
+        `google.api.Service`. This is NOT populated on GetService calls at the
+        moment. NOTE: Any upsert operation that contains both a service_config
+        and a config_source is considered invalid and will result in an error
+        being returned.
+      generation: A server-assigned monotonically increasing number that
+        changes whenever a mutation is made to the `ManagedService` or any of
+        its components via the `ServiceManager` API.
+      operations: Read-only view of pending operations affecting this
+        resource, if requested.
+      producerProjectId: ID of the project that produces and owns this
+        service.
+      projectSettings: Read-only view of settings for a particular consumer
+        project, if requested.
+      serviceConfig: The service's generated configuration.
+      serviceName: The name of the service.  See the `ServiceManager` overview
+        for naming requirements.  This name must match
+        `google.api.Service.name` in the `service_config` field.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ManagedService(
+        )
+    if FLAGS['configSource'].present:
+      request.configSource = apitools_base.JsonToMessage(messages.ConfigSource, FLAGS.configSource)
+    if FLAGS['generation'].present:
+      request.generation = int(FLAGS.generation)
+    if FLAGS['operations'].present:
+      request.operations = [apitools_base.JsonToMessage(messages.Operation, x) for x in FLAGS.operations]
+    if FLAGS['producerProjectId'].present:
+      request.producerProjectId = FLAGS.producerProjectId.decode('utf8')
+    if FLAGS['projectSettings'].present:
+      request.projectSettings = apitools_base.JsonToMessage(messages.ProjectSettings, FLAGS.projectSettings)
+    if FLAGS['serviceConfig'].present:
+      request.serviceConfig = apitools_base.JsonToMessage(messages.Service, FLAGS.serviceConfig)
+    if FLAGS['serviceName'].present:
+      request.serviceName = FLAGS.serviceName.decode('utf8')
+    result = client.services.Create(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ServicesDelete(apitools_base_cli.NewCmd):
+  """Command wrapping services.Delete."""
+
+  usage = """services_delete <serviceName>"""
+
+  def __init__(self, name, fv):
+    super(ServicesDelete, self).__init__(name, fv)
+
+  def RunWithArgs(self, serviceName):
+    """Deletes a managed service.  Operation<response: google.protobuf.Empty>
+
+    Args:
+      serviceName: The name of the service.  See the `ServiceManager` overview
+        for naming requirements.  For example: `example.googleapis.com`.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ServicemanagementServicesDeleteRequest(
+        serviceName=serviceName.decode('utf8'),
+        )
+    result = client.services.Delete(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ServicesDisable(apitools_base_cli.NewCmd):
+  """Command wrapping services.Disable."""
+
+  usage = """services_disable <serviceName>"""
+
+  def __init__(self, name, fv):
+    super(ServicesDisable, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'disableServiceRequest',
+        None,
+        u'A DisableServiceRequest resource to be passed as the request body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, serviceName):
+    """Disable a managed service for a project. Google Service Management will
+    only disable the managed service even if there are other services depend
+    on the managed service.  Operation<response: DisableServiceResponse>
+
+    Args:
+      serviceName: Name of the service to disable. Specifying an unknown
+        service name will cause the request to fail.
+
+    Flags:
+      disableServiceRequest: A DisableServiceRequest resource to be passed as
+        the request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ServicemanagementServicesDisableRequest(
+        serviceName=serviceName.decode('utf8'),
+        )
+    if FLAGS['disableServiceRequest'].present:
+      request.disableServiceRequest = apitools_base.JsonToMessage(messages.DisableServiceRequest, FLAGS.disableServiceRequest)
+    result = client.services.Disable(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ServicesEnable(apitools_base_cli.NewCmd):
+  """Command wrapping services.Enable."""
+
+  usage = """services_enable <serviceName>"""
+
+  def __init__(self, name, fv):
+    super(ServicesEnable, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'enableServiceRequest',
+        None,
+        u'A EnableServiceRequest resource to be passed as the request body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, serviceName):
+    """Enable a managed service for a project with default setting. If the
+    managed service has dependencies, they will be enabled as well.
+    Operation<response: EnableServiceResponse>
+
+    Args:
+      serviceName: Name of the service to enable. Specifying an unknown
+        service name will cause the request to fail.
+
+    Flags:
+      enableServiceRequest: A EnableServiceRequest resource to be passed as
+        the request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ServicemanagementServicesEnableRequest(
+        serviceName=serviceName.decode('utf8'),
+        )
+    if FLAGS['enableServiceRequest'].present:
+      request.enableServiceRequest = apitools_base.JsonToMessage(messages.EnableServiceRequest, FLAGS.enableServiceRequest)
+    result = client.services.Enable(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ServicesGet(apitools_base_cli.NewCmd):
+  """Command wrapping services.Get."""
+
+  usage = """services_get <serviceName>"""
+
+  def __init__(self, name, fv):
+    super(ServicesGet, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'consumerProjectId',
+        None,
+        u'If project_settings is expanded, return settings for the specified '
+        u'consumer project.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'expand',
+        None,
+        u'Fields to expand in any results.  By default, the following fields '
+        u'are not present in the result: - `operations` - `project_settings` '
+        u'- `project_settings.operations` - `quota_usage` (It requires '
+        u'`project_settings`) - `historical_quota_usage` (It requires '
+        u'`project_settings`)',
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'view',
+        u'PROJECT_SETTINGS_VIEW_UNSPECIFIED',
+        [u'PROJECT_SETTINGS_VIEW_UNSPECIFIED', u'CONSUMER_VIEW', u'PRODUCER_VIEW', u'ALL'],
+        u'If project_settings is expanded, request only fields for the '
+        u'specified view.',
+        flag_values=fv)
+
+  def RunWithArgs(self, serviceName):
+    """Gets a managed service. If the `consumer_project_id` is specified, the
+    project's settings for the specified service are also returned.
+
+    Args:
+      serviceName: The name of the service.  See the `ServiceManager` overview
+        for naming requirements.  For example: `example.googleapis.com`.
+
+    Flags:
+      consumerProjectId: If project_settings is expanded, return settings for
+        the specified consumer project.
+      expand: Fields to expand in any results.  By default, the following
+        fields are not present in the result: - `operations` -
+        `project_settings` - `project_settings.operations` - `quota_usage` (It
+        requires `project_settings`) - `historical_quota_usage` (It requires
+        `project_settings`)
+      view: If project_settings is expanded, request only fields for the
+        specified view.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ServicemanagementServicesGetRequest(
+        serviceName=serviceName.decode('utf8'),
+        )
+    if FLAGS['consumerProjectId'].present:
+      request.consumerProjectId = FLAGS.consumerProjectId.decode('utf8')
+    if FLAGS['expand'].present:
+      request.expand = FLAGS.expand.decode('utf8')
+    if FLAGS['view'].present:
+      request.view = messages.ServicemanagementServicesGetRequest.ViewValueValuesEnum(FLAGS.view)
+    result = client.services.Get(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ServicesGetAccessPolicy(apitools_base_cli.NewCmd):
+  """Command wrapping services.GetAccessPolicy."""
+
+  usage = """services_getAccessPolicy <serviceName>"""
+
+  def __init__(self, name, fv):
+    super(ServicesGetAccessPolicy, self).__init__(name, fv)
+
+  def RunWithArgs(self, serviceName):
+    """Producer method to retrieve current policy.
+
+    Args:
+      serviceName: The name of the service.  For example:
+        `example.googleapis.com`.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ServicemanagementServicesGetAccessPolicyRequest(
+        serviceName=serviceName.decode('utf8'),
+        )
+    result = client.services.GetAccessPolicy(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ServicesGetConfig(apitools_base_cli.NewCmd):
+  """Command wrapping services.GetConfig."""
+
+  usage = """services_getConfig <serviceName>"""
+
+  def __init__(self, name, fv):
+    super(ServicesGetConfig, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'configId',
+        None,
+        u'The id of the service config resource. Optional. If it is not '
+        u'specified, the latest version of config will be returned.',
+        flag_values=fv)
+
+  def RunWithArgs(self, serviceName):
+    """Gets a service config (version) for a managed service. If `config_id`
+    is not specified, the latest service config will be returned.
+
+    Args:
+      serviceName: The name of the service.  See the `ServiceManager` overview
+        for naming requirements.  For example: `example.googleapis.com`.
+
+    Flags:
+      configId: The id of the service config resource. Optional. If it is not
+        specified, the latest version of config will be returned.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ServicemanagementServicesGetConfigRequest(
+        serviceName=serviceName.decode('utf8'),
+        )
+    if FLAGS['configId'].present:
+      request.configId = FLAGS.configId.decode('utf8')
+    result = client.services.GetConfig(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ServicesList(apitools_base_cli.NewCmd):
+  """Command wrapping services.List."""
+
+  usage = """services_list"""
+
+  def __init__(self, name, fv):
+    super(ServicesList, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'category',
+        None,
+        u'Include services only in the specified category. Supported '
+        u'categories are servicemanagement.googleapis.com/categories/google-'
+        u'services or servicemanagement.googleapis.com/categories/play-games.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'consumerProjectId',
+        None,
+        u'Include services consumed by the specified project.  If '
+        u'project_settings is expanded, then this field controls which '
+        u'project project_settings is populated for.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'expand',
+        None,
+        u'Fields to expand in any results.  By default, the following fields '
+        u'are not fully included in list results: - `operations` - '
+        u'`project_settings` - `project_settings.operations` - `quota_usage` '
+        u'(It requires `project_settings`)',
+        flag_values=fv)
+    flags.DEFINE_integer(
+        'pageSize',
+        None,
+        u'Requested size of the next page of data.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'pageToken',
+        None,
+        u'Token identifying which result to start with; returned by a '
+        u'previous list call.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'producerProjectId',
+        None,
+        u'Include services produced by the specified project.',
+        flag_values=fv)
+
+  def RunWithArgs(self):
+    """Lists all managed services. If the `consumer_project_id` is specified,
+    the project's settings for the specified service are also returned.
+
+    Flags:
+      category: Include services only in the specified category. Supported
+        categories are servicemanagement.googleapis.com/categories/google-
+        services or servicemanagement.googleapis.com/categories/play-games.
+      consumerProjectId: Include services consumed by the specified project.
+        If project_settings is expanded, then this field controls which
+        project project_settings is populated for.
+      expand: Fields to expand in any results.  By default, the following
+        fields are not fully included in list results: - `operations` -
+        `project_settings` - `project_settings.operations` - `quota_usage` (It
+        requires `project_settings`)
+      pageSize: Requested size of the next page of data.
+      pageToken: Token identifying which result to start with; returned by a
+        previous list call.
+      producerProjectId: Include services produced by the specified project.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ServicemanagementServicesListRequest(
+        )
+    if FLAGS['category'].present:
+      request.category = FLAGS.category.decode('utf8')
+    if FLAGS['consumerProjectId'].present:
+      request.consumerProjectId = FLAGS.consumerProjectId.decode('utf8')
+    if FLAGS['expand'].present:
+      request.expand = FLAGS.expand.decode('utf8')
+    if FLAGS['pageSize'].present:
+      request.pageSize = FLAGS.pageSize
+    if FLAGS['pageToken'].present:
+      request.pageToken = FLAGS.pageToken.decode('utf8')
+    if FLAGS['producerProjectId'].present:
+      request.producerProjectId = FLAGS.producerProjectId.decode('utf8')
+    result = client.services.List(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ServicesPatch(apitools_base_cli.NewCmd):
+  """Command wrapping services.Patch."""
+
+  usage = """services_patch <serviceName>"""
+
+  def __init__(self, name, fv):
+    super(ServicesPatch, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'managedService',
+        None,
+        u'A ManagedService resource to be passed as the request body.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'updateMask',
+        None,
+        u'A mask specifying which fields to update.',
+        flag_values=fv)
+
+  def RunWithArgs(self, serviceName):
+    """Updates the specified subset of the configuration. If the specified
+    service does not exists the patch operation fails.  Operation<response:
+    ManagedService>
+
+    Args:
+      serviceName: The name of the service.  See the `ServiceManager` overview
+        for naming requirements.  For example: `example.googleapis.com`.
+
+    Flags:
+      managedService: A ManagedService resource to be passed as the request
+        body.
+      updateMask: A mask specifying which fields to update.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ServicemanagementServicesPatchRequest(
+        serviceName=serviceName.decode('utf8'),
+        )
+    if FLAGS['managedService'].present:
+      request.managedService = apitools_base.JsonToMessage(messages.ManagedService, FLAGS.managedService)
+    if FLAGS['updateMask'].present:
+      request.updateMask = FLAGS.updateMask.decode('utf8')
+    result = client.services.Patch(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ServicesPatchConfig(apitools_base_cli.NewCmd):
+  """Command wrapping services.PatchConfig."""
+
+  usage = """services_patchConfig <serviceName>"""
+
+  def __init__(self, name, fv):
+    super(ServicesPatchConfig, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'service',
+        None,
+        u'A Service resource to be passed as the request body.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'updateMask',
+        None,
+        u'A mask specifying which fields to update.',
+        flag_values=fv)
+
+  def RunWithArgs(self, serviceName):
+    """Updates the specified subset of the service resource. Equivalent to
+    calling `PatchService` with only the `service_config` field updated.
+    Operation<response: google.api.Service>
+
+    Args:
+      serviceName: The name of the service.  See the `ServiceManager` overview
+        for naming requirements.  For example: `example.googleapis.com`.
+
+    Flags:
+      service: A Service resource to be passed as the request body.
+      updateMask: A mask specifying which fields to update.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ServicemanagementServicesPatchConfigRequest(
+        serviceName=serviceName.decode('utf8'),
+        )
+    if FLAGS['service'].present:
+      request.service = apitools_base.JsonToMessage(messages.Service, FLAGS.service)
+    if FLAGS['updateMask'].present:
+      request.updateMask = FLAGS.updateMask.decode('utf8')
+    result = client.services.PatchConfig(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ServicesUpdate(apitools_base_cli.NewCmd):
+  """Command wrapping services.Update."""
+
+  usage = """services_update <serviceName>"""
+
+  def __init__(self, name, fv):
+    super(ServicesUpdate, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'managedService',
+        None,
+        u'A ManagedService resource to be passed as the request body.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'updateMask',
+        None,
+        u'A mask specifying which fields to update. Update mask has been '
+        u'deprecated on UpdateService service method. Please use PatchService'
+        u' method instead to do partial updates.',
+        flag_values=fv)
+
+  def RunWithArgs(self, serviceName):
+    """Updates the configuration of a service.  If the specified service does
+    not already exist, then it is created.  Operation<response:
+    ManagedService>
+
+    Args:
+      serviceName: The name of the service.  See the `ServiceManager` overview
+        for naming requirements.  For example: `example.googleapis.com`.
+
+    Flags:
+      managedService: A ManagedService resource to be passed as the request
+        body.
+      updateMask: A mask specifying which fields to update. Update mask has
+        been deprecated on UpdateService service method. Please use
+        PatchService method instead to do partial updates.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ServicemanagementServicesUpdateRequest(
+        serviceName=serviceName.decode('utf8'),
+        )
+    if FLAGS['managedService'].present:
+      request.managedService = apitools_base.JsonToMessage(messages.ManagedService, FLAGS.managedService)
+    if FLAGS['updateMask'].present:
+      request.updateMask = FLAGS.updateMask.decode('utf8')
+    result = client.services.Update(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ServicesUpdateAccessPolicy(apitools_base_cli.NewCmd):
+  """Command wrapping services.UpdateAccessPolicy."""
+
+  usage = """services_updateAccessPolicy <serviceName>"""
+
+  def __init__(self, name, fv):
+    super(ServicesUpdateAccessPolicy, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'accessList',
+        None,
+        u'ACL for access to the unrestricted surface of the service.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'visibilityLabelAccessLists',
+        None,
+        u'ACLs for access to restricted parts of the service.  The map key is'
+        u' the visibility label that is being controlled.  Note that access '
+        u'to any label also implies access to the unrestricted surface.',
+        flag_values=fv)
+
+  def RunWithArgs(self, serviceName):
+    """Producer method to update the current policy.  This method will return
+    an error if the policy is too large (more than 50 entries across all
+    lists).
+
+    Args:
+      serviceName: The service protected by this policy.
+
+    Flags:
+      accessList: ACL for access to the unrestricted surface of the service.
+      visibilityLabelAccessLists: ACLs for access to restricted parts of the
+        service.  The map key is the visibility label that is being
+        controlled.  Note that access to any label also implies access to the
+        unrestricted surface.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ServiceAccessPolicy(
+        serviceName=serviceName.decode('utf8'),
+        )
+    if FLAGS['accessList'].present:
+      request.accessList = apitools_base.JsonToMessage(messages.ServiceAccessList, FLAGS.accessList)
+    if FLAGS['visibilityLabelAccessLists'].present:
+      request.visibilityLabelAccessLists = apitools_base.JsonToMessage(messages.ServiceAccessPolicy.VisibilityLabelAccessListsValue, FLAGS.visibilityLabelAccessLists)
+    result = client.services.UpdateAccessPolicy(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ServicesUpdateConfig(apitools_base_cli.NewCmd):
+  """Command wrapping services.UpdateConfig."""
+
+  usage = """services_updateConfig <serviceName>"""
+
+  def __init__(self, name, fv):
+    super(ServicesUpdateConfig, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'service',
+        None,
+        u'A Service resource to be passed as the request body.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'updateMask',
+        None,
+        u'A mask specifying which fields to update. Update mask has been '
+        u'deprecated on UpdateServiceConfig service method. Please use '
+        u'PatchServiceConfig method instead to do partial updates.',
+        flag_values=fv)
+
+  def RunWithArgs(self, serviceName):
+    """Updates the specified subset of the service resource. Equivalent to
+    calling `UpdateService` with only the `service_config` field updated.
+    Operation<response: google.api.Service>
+
+    Args:
+      serviceName: The name of the service.  See the `ServiceManager` overview
+        for naming requirements.  For example: `example.googleapis.com`.
+
+    Flags:
+      service: A Service resource to be passed as the request body.
+      updateMask: A mask specifying which fields to update. Update mask has
+        been deprecated on UpdateServiceConfig service method. Please use
+        PatchServiceConfig method instead to do partial updates.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ServicemanagementServicesUpdateConfigRequest(
+        serviceName=serviceName.decode('utf8'),
+        )
+    if FLAGS['service'].present:
+      request.service = apitools_base.JsonToMessage(messages.Service, FLAGS.service)
+    if FLAGS['updateMask'].present:
+      request.updateMask = FLAGS.updateMask.decode('utf8')
+    result = client.services.UpdateConfig(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ServicesAccessPolicyQuery(apitools_base_cli.NewCmd):
+  """Command wrapping services_accessPolicy.Query."""
+
+  usage = """services_accessPolicy_query <serviceName>"""
+
+  def __init__(self, name, fv):
+    super(ServicesAccessPolicyQuery, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'userEmail',
+        None,
+        u'The user to query access for.',
+        flag_values=fv)
+
+  def RunWithArgs(self, serviceName):
+    """Method to query the accessibility of a service and any associated
+    visibility labels for a specified user.  Members of the producer project
+    may call this method and specify any user.  Any user may call this method,
+    but must specify their own email address. In this case the method will
+    return NOT_FOUND if the user has no access to the service.
+
+    Args:
+      serviceName: The service to query access for.
+
+    Flags:
+      userEmail: The user to query access for.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ServicemanagementServicesAccessPolicyQueryRequest(
+        serviceName=serviceName.decode('utf8'),
+        )
+    if FLAGS['userEmail'].present:
+      request.userEmail = FLAGS.userEmail.decode('utf8')
+    result = client.services_accessPolicy.Query(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ServicesConfigsCreate(apitools_base_cli.NewCmd):
+  """Command wrapping services_configs.Create."""
+
+  usage = """services_configs_create <serviceName>"""
+
+  def __init__(self, name, fv):
+    super(ServicesConfigsCreate, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'service',
+        None,
+        u'A Service resource to be passed as the request body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, serviceName):
+    """Creates a new service config (version) for a managed service. This
+    method only stores the service config, but does not apply the service
+    config to any backend services.
+
+    Args:
+      serviceName: The name of the service.  See the `ServiceManager` overview
+        for naming requirements.  For example: `example.googleapis.com`.
+
+    Flags:
+      service: A Service resource to be passed as the request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ServicemanagementServicesConfigsCreateRequest(
+        serviceName=serviceName.decode('utf8'),
+        )
+    if FLAGS['service'].present:
+      request.service = apitools_base.JsonToMessage(messages.Service, FLAGS.service)
+    result = client.services_configs.Create(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ServicesConfigsGet(apitools_base_cli.NewCmd):
+  """Command wrapping services_configs.Get."""
+
+  usage = """services_configs_get <serviceName> <configId>"""
+
+  def __init__(self, name, fv):
+    super(ServicesConfigsGet, self).__init__(name, fv)
+
+  def RunWithArgs(self, serviceName, configId):
+    """Gets a service config (version) for a managed service. If `config_id`
+    is not specified, the latest service config will be returned.
+
+    Args:
+      serviceName: The name of the service.  See the `ServiceManager` overview
+        for naming requirements.  For example: `example.googleapis.com`.
+      configId: The id of the service config resource. Optional. If it is not
+        specified, the latest version of config will be returned.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ServicemanagementServicesConfigsGetRequest(
+        serviceName=serviceName.decode('utf8'),
+        configId=configId.decode('utf8'),
+        )
+    result = client.services_configs.Get(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ServicesConfigsList(apitools_base_cli.NewCmd):
+  """Command wrapping services_configs.List."""
+
+  usage = """services_configs_list <serviceName>"""
+
+  def __init__(self, name, fv):
+    super(ServicesConfigsList, self).__init__(name, fv)
+    flags.DEFINE_integer(
+        'pageSize',
+        None,
+        u'The max number of items to include in the response list.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'pageToken',
+        None,
+        u'The token of the page to retrieve.',
+        flag_values=fv)
+
+  def RunWithArgs(self, serviceName):
+    """Lists the history of the service config for a managed service, from the
+    newest to the oldest.
+
+    Args:
+      serviceName: The name of the service.  See the `ServiceManager` overview
+        for naming requirements.  For example: `example.googleapis.com`.
+
+    Flags:
+      pageSize: The max number of items to include in the response list.
+      pageToken: The token of the page to retrieve.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ServicemanagementServicesConfigsListRequest(
+        serviceName=serviceName.decode('utf8'),
+        )
+    if FLAGS['pageSize'].present:
+      request.pageSize = FLAGS.pageSize
+    if FLAGS['pageToken'].present:
+      request.pageToken = FLAGS.pageToken.decode('utf8')
+    result = client.services_configs.List(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ServicesConfigsSubmit(apitools_base_cli.NewCmd):
+  """Command wrapping services_configs.Submit."""
+
+  usage = """services_configs_submit <serviceName>"""
+
+  def __init__(self, name, fv):
+    super(ServicesConfigsSubmit, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'submitConfigSourceRequest',
+        None,
+        u'A SubmitConfigSourceRequest resource to be passed as the request '
+        u'body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, serviceName):
+    """Creates a new service config (version) for a managed service based on
+    user-supplied configuration sources files (for example: OpenAPI
+    Specification). This method stores the source configurations as well as
+    the generated service config. It does NOT apply the service config to any
+    backend services.  Operation<response: SubmitConfigSourceResponse>
+
+    Args:
+      serviceName: The name of the service.  See the `ServiceManager` overview
+        for naming requirements.  For example: `example.googleapis.com`.
+
+    Flags:
+      submitConfigSourceRequest: A SubmitConfigSourceRequest resource to be
+        passed as the request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ServicemanagementServicesConfigsSubmitRequest(
+        serviceName=serviceName.decode('utf8'),
+        )
+    if FLAGS['submitConfigSourceRequest'].present:
+      request.submitConfigSourceRequest = apitools_base.JsonToMessage(messages.SubmitConfigSourceRequest, FLAGS.submitConfigSourceRequest)
+    result = client.services_configs.Submit(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ServicesCustomerSettingsGet(apitools_base_cli.NewCmd):
+  """Command wrapping services_customerSettings.Get."""
+
+  usage = """services_customerSettings_get <serviceName> <customerId>"""
+
+  def __init__(self, name, fv):
+    super(ServicesCustomerSettingsGet, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'expand',
+        None,
+        u'Fields to expand in any results.',
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'view',
+        u'PROJECT_SETTINGS_VIEW_UNSPECIFIED',
+        [u'PROJECT_SETTINGS_VIEW_UNSPECIFIED', u'CONSUMER_VIEW', u'PRODUCER_VIEW', u'ALL'],
+        u'Request only fields for the specified view.',
+        flag_values=fv)
+
+  def RunWithArgs(self, serviceName, customerId):
+    """Retrieves the settings that control the specified customer's usage of
+    the service.
+
+    Args:
+      serviceName: The name of the service.  See the `ServiceManager` overview
+        for naming requirements.  For example: `example.googleapis.com`. This
+        field is required.
+      customerId: ID for the customer. See the comment for
+        `CustomerSettings.customer_id` field of message for its format. This
+        field is required.
+
+    Flags:
+      expand: Fields to expand in any results.
+      view: Request only fields for the specified view.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ServicemanagementServicesCustomerSettingsGetRequest(
+        serviceName=serviceName.decode('utf8'),
+        customerId=customerId.decode('utf8'),
+        )
+    if FLAGS['expand'].present:
+      request.expand = FLAGS.expand.decode('utf8')
+    if FLAGS['view'].present:
+      request.view = messages.ServicemanagementServicesCustomerSettingsGetRequest.ViewValueValuesEnum(FLAGS.view)
+    result = client.services_customerSettings.Get(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ServicesCustomerSettingsPatch(apitools_base_cli.NewCmd):
+  """Command wrapping services_customerSettings.Patch."""
+
+  usage = """services_customerSettings_patch <serviceName> <customerId>"""
+
+  def __init__(self, name, fv):
+    super(ServicesCustomerSettingsPatch, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'customerSettings',
+        None,
+        u'A CustomerSettings resource to be passed as the request body.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'updateMask',
+        None,
+        u'The field mask specifying which fields are to be updated.',
+        flag_values=fv)
+
+  def RunWithArgs(self, serviceName, customerId):
+    """Updates specified subset of the settings that control the specified
+    customer's usage of the service.  Attempts to update a field not
+    controlled by the caller will result in an access denied error.
+    Operation<response: CustomerSettings>
+
+    Args:
+      serviceName: The name of the service.  See the `ServiceManager` overview
+        for naming requirements.  For example: `example.googleapis.com`. This
+        field is required.
+      customerId: ID for the customer. See the comment for
+        `CustomerSettings.customer_id` field of message for its format. This
+        field is required.
+
+    Flags:
+      customerSettings: A CustomerSettings resource to be passed as the
+        request body.
+      updateMask: The field mask specifying which fields are to be updated.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ServicemanagementServicesCustomerSettingsPatchRequest(
+        serviceName=serviceName.decode('utf8'),
+        customerId=customerId.decode('utf8'),
+        )
+    if FLAGS['customerSettings'].present:
+      request.customerSettings = apitools_base.JsonToMessage(messages.CustomerSettings, FLAGS.customerSettings)
+    if FLAGS['updateMask'].present:
+      request.updateMask = FLAGS.updateMask.decode('utf8')
+    result = client.services_customerSettings.Patch(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ServicesProjectSettingsGet(apitools_base_cli.NewCmd):
+  """Command wrapping services_projectSettings.Get."""
+
+  usage = """services_projectSettings_get <serviceName> <consumerProjectId>"""
+
+  def __init__(self, name, fv):
+    super(ServicesProjectSettingsGet, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'expand',
+        None,
+        u'Fields to expand in any results.  By default, the following fields '
+        u'are not present in the result: - `operations` - `quota_usage`',
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'view',
+        u'PROJECT_SETTINGS_VIEW_UNSPECIFIED',
+        [u'PROJECT_SETTINGS_VIEW_UNSPECIFIED', u'CONSUMER_VIEW', u'PRODUCER_VIEW', u'ALL'],
+        u'Request only the fields for the specified view.',
+        flag_values=fv)
+
+  def RunWithArgs(self, serviceName, consumerProjectId):
+    """Retrieves the settings that control the specified consumer project's
+    usage of the service.
+
+    Args:
+      serviceName: The name of the service.  See the `ServiceManager` overview
+        for naming requirements.  For example: `example.googleapis.com`.
+      consumerProjectId: The project ID of the consumer.
+
+    Flags:
+      expand: Fields to expand in any results.  By default, the following
+        fields are not present in the result: - `operations` - `quota_usage`
+      view: Request only the fields for the specified view.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ServicemanagementServicesProjectSettingsGetRequest(
+        serviceName=serviceName.decode('utf8'),
+        consumerProjectId=consumerProjectId.decode('utf8'),
+        )
+    if FLAGS['expand'].present:
+      request.expand = FLAGS.expand.decode('utf8')
+    if FLAGS['view'].present:
+      request.view = messages.ServicemanagementServicesProjectSettingsGetRequest.ViewValueValuesEnum(FLAGS.view)
+    result = client.services_projectSettings.Get(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ServicesProjectSettingsPatch(apitools_base_cli.NewCmd):
+  """Command wrapping services_projectSettings.Patch."""
+
+  usage = """services_projectSettings_patch <serviceName> <consumerProjectId>"""
+
+  def __init__(self, name, fv):
+    super(ServicesProjectSettingsPatch, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'projectSettings',
+        None,
+        u'A ProjectSettings resource to be passed as the request body.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'updateMask',
+        None,
+        u'The field mask specifying which fields are to be updated.',
+        flag_values=fv)
+
+  def RunWithArgs(self, serviceName, consumerProjectId):
+    """Updates specified subset of the settings that control the specified
+    consumer project's usage of the service.  Attempts to update a field not
+    controlled by the caller will result in an access denied error.
+    Operation<response: ProjectSettings>
+
+    Args:
+      serviceName: The name of the service.  See the `ServiceManager` overview
+        for naming requirements.  For example: `example.googleapis.com`.
+      consumerProjectId: The project ID of the consumer.
+
+    Flags:
+      projectSettings: A ProjectSettings resource to be passed as the request
+        body.
+      updateMask: The field mask specifying which fields are to be updated.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ServicemanagementServicesProjectSettingsPatchRequest(
+        serviceName=serviceName.decode('utf8'),
+        consumerProjectId=consumerProjectId.decode('utf8'),
+        )
+    if FLAGS['projectSettings'].present:
+      request.projectSettings = apitools_base.JsonToMessage(messages.ProjectSettings, FLAGS.projectSettings)
+    if FLAGS['updateMask'].present:
+      request.updateMask = FLAGS.updateMask.decode('utf8')
+    result = client.services_projectSettings.Patch(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ServicesProjectSettingsUpdate(apitools_base_cli.NewCmd):
+  """Command wrapping services_projectSettings.Update."""
+
+  usage = """services_projectSettings_update <serviceName> <consumerProjectId>"""
+
+  def __init__(self, name, fv):
+    super(ServicesProjectSettingsUpdate, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'operations',
+        None,
+        u'Read-only view of pending operations affecting this resource, if '
+        u'requested.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'properties',
+        None,
+        u'Service-defined per-consumer properties.  A key-value mapping a '
+        u'string key to a google.protobuf.ListValue proto. Values in the list'
+        u" are typed as defined in the Service configuration's "
+        u'consumer.properties field.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'quotaSettings',
+        None,
+        u'Settings that control how much or how fast the service can be used '
+        u'by the consumer project.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'usageSettings',
+        None,
+        u'Settings that control whether this service is usable by the '
+        u'consumer project.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'visibilitySettings',
+        None,
+        u'Settings that control which features of the service are visible to '
+        u'the consumer project.',
+        flag_values=fv)
+
+  def RunWithArgs(self, serviceName, consumerProjectId):
+    """NOTE: Currently unsupported.  Use PatchProjectSettings instead.
+    Updates the settings that control the specified consumer project's usage
+    of the service.  Attempts to update a field not controlled by the caller
+    will result in an access denied error.  Operation<response:
+    ProjectSettings>
+
+    Args:
+      serviceName: The name of the service.  See the `ServiceManager` overview
+        for naming requirements.
+      consumerProjectId: ID for the project consuming this service.
+
+    Flags:
+      operations: Read-only view of pending operations affecting this
+        resource, if requested.
+      properties: Service-defined per-consumer properties.  A key-value
+        mapping a string key to a google.protobuf.ListValue proto. Values in
+        the list are typed as defined in the Service configuration's
+        consumer.properties field.
+      quotaSettings: Settings that control how much or how fast the service
+        can be used by the consumer project.
+      usageSettings: Settings that control whether this service is usable by
+        the consumer project.
+      visibilitySettings: Settings that control which features of the service
+        are visible to the consumer project.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ProjectSettings(
+        serviceName=serviceName.decode('utf8'),
+        consumerProjectId=consumerProjectId.decode('utf8'),
+        )
+    if FLAGS['operations'].present:
+      request.operations = [apitools_base.JsonToMessage(messages.Operation, x) for x in FLAGS.operations]
+    if FLAGS['properties'].present:
+      request.properties = apitools_base.JsonToMessage(messages.ProjectSettings.PropertiesValue, FLAGS.properties)
+    if FLAGS['quotaSettings'].present:
+      request.quotaSettings = apitools_base.JsonToMessage(messages.QuotaSettings, FLAGS.quotaSettings)
+    if FLAGS['usageSettings'].present:
+      request.usageSettings = apitools_base.JsonToMessage(messages.UsageSettings, FLAGS.usageSettings)
+    if FLAGS['visibilitySettings'].present:
+      request.visibilitySettings = apitools_base.JsonToMessage(messages.VisibilitySettings, FLAGS.visibilitySettings)
+    result = client.services_projectSettings.Update(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ConvertConfig(apitools_base_cli.NewCmd):
+  """Command wrapping v1.ConvertConfig."""
+
+  usage = """convertConfig"""
+
+  def __init__(self, name, fv):
+    super(ConvertConfig, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'configSpec',
+        None,
+        u'Input configuration For this version of API, the supported type is '
+        u'OpenApiSpec',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'openApiSpec',
+        None,
+        u'The OpenAPI specification for an API.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'serviceName',
+        None,
+        u'The service name to use for constructing the normalized service '
+        u'configuration equivalent of the provided configuration '
+        u'specification.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'swaggerSpec',
+        None,
+        u'The swagger specification for an API.',
+        flag_values=fv)
+
+  def RunWithArgs(self):
+    """DEPRECATED. `SubmitConfigSource` with `validate_only=true` will provide
+    config conversion moving forward.  Converts an API specification (e.g.
+    Swagger spec) to an equivalent `google.api.Service`.
+
+    Flags:
+      configSpec: Input configuration For this version of API, the supported
+        type is OpenApiSpec
+      openApiSpec: The OpenAPI specification for an API.
+      serviceName: The service name to use for constructing the normalized
+        service configuration equivalent of the provided configuration
+        specification.
+      swaggerSpec: The swagger specification for an API.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ConvertConfigRequest(
+        )
+    if FLAGS['configSpec'].present:
+      request.configSpec = apitools_base.JsonToMessage(messages.ConvertConfigRequest.ConfigSpecValue, FLAGS.configSpec)
+    if FLAGS['openApiSpec'].present:
+      request.openApiSpec = apitools_base.JsonToMessage(messages.OpenApiSpec, FLAGS.openApiSpec)
+    if FLAGS['serviceName'].present:
+      request.serviceName = FLAGS.serviceName.decode('utf8')
+    if FLAGS['swaggerSpec'].present:
+      request.swaggerSpec = apitools_base.JsonToMessage(messages.SwaggerSpec, FLAGS.swaggerSpec)
+    result = client.v1.ConvertConfig(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+def main(_):
+  appcommands.AddCmd('pyshell', PyShell)
+  appcommands.AddCmd('operations_get', OperationsGet)
+  appcommands.AddCmd('services_convertConfig', ServicesConvertConfig)
+  appcommands.AddCmd('services_create', ServicesCreate)
+  appcommands.AddCmd('services_delete', ServicesDelete)
+  appcommands.AddCmd('services_disable', ServicesDisable)
+  appcommands.AddCmd('services_enable', ServicesEnable)
+  appcommands.AddCmd('services_get', ServicesGet)
+  appcommands.AddCmd('services_getAccessPolicy', ServicesGetAccessPolicy)
+  appcommands.AddCmd('services_getConfig', ServicesGetConfig)
+  appcommands.AddCmd('services_list', ServicesList)
+  appcommands.AddCmd('services_patch', ServicesPatch)
+  appcommands.AddCmd('services_patchConfig', ServicesPatchConfig)
+  appcommands.AddCmd('services_update', ServicesUpdate)
+  appcommands.AddCmd('services_updateAccessPolicy', ServicesUpdateAccessPolicy)
+  appcommands.AddCmd('services_updateConfig', ServicesUpdateConfig)
+  appcommands.AddCmd('services_accessPolicy_query', ServicesAccessPolicyQuery)
+  appcommands.AddCmd('services_configs_create', ServicesConfigsCreate)
+  appcommands.AddCmd('services_configs_get', ServicesConfigsGet)
+  appcommands.AddCmd('services_configs_list', ServicesConfigsList)
+  appcommands.AddCmd('services_configs_submit', ServicesConfigsSubmit)
+  appcommands.AddCmd('services_customerSettings_get', ServicesCustomerSettingsGet)
+  appcommands.AddCmd('services_customerSettings_patch', ServicesCustomerSettingsPatch)
+  appcommands.AddCmd('services_projectSettings_get', ServicesProjectSettingsGet)
+  appcommands.AddCmd('services_projectSettings_patch', ServicesProjectSettingsPatch)
+  appcommands.AddCmd('services_projectSettings_update', ServicesProjectSettingsUpdate)
+  appcommands.AddCmd('convertConfig', ConvertConfig)
+
+  apitools_base_cli.SetupLogger()
+  if hasattr(appcommands, 'SetDefaultCommand'):
+    appcommands.SetDefaultCommand('pyshell')
+
+
+run_main = apitools_base_cli.run_main
+
+if __name__ == '__main__':
+  appcommands.Run()
diff --git a/samples/servicemanagement_sample/servicemanagement_v1/servicemanagement_v1_client.py b/samples/servicemanagement_sample/servicemanagement_v1/servicemanagement_v1_client.py
new file mode 100644
index 0000000..26291bc
--- /dev/null
+++ b/samples/servicemanagement_sample/servicemanagement_v1/servicemanagement_v1_client.py
@@ -0,0 +1,859 @@
+"""Generated client library for servicemanagement version v1."""
+# NOTE: This file is autogenerated and should not be edited by hand.
+from apitools.base.py import base_api
+from samples.servicemanagement_sample.servicemanagement_v1 import servicemanagement_v1_messages as messages
+
+
+class ServicemanagementV1(base_api.BaseApiClient):
+  """Generated client library for service servicemanagement version v1."""
+
+  MESSAGES_MODULE = messages
+  BASE_URL = u'https://servicemanagement.googleapis.com/'
+
+  _PACKAGE = u'servicemanagement'
+  _SCOPES = [u'https://www.googleapis.com/auth/cloud-platform', u'https://www.googleapis.com/auth/service.management']
+  _VERSION = u'v1'
+  _CLIENT_ID = '1042881264118.apps.googleusercontent.com'
+  _CLIENT_SECRET = 'x_Tw5K8nnjoRAqULM9PFAC2b'
+  _USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b'
+  _CLIENT_CLASS_NAME = u'ServicemanagementV1'
+  _URL_VERSION = u'v1'
+  _API_KEY = None
+
+  def __init__(self, url='', credentials=None,
+               get_credentials=True, http=None, model=None,
+               log_request=False, log_response=False,
+               credentials_args=None, default_global_params=None,
+               additional_http_headers=None):
+    """Create a new servicemanagement handle."""
+    url = url or self.BASE_URL
+    super(ServicemanagementV1, self).__init__(
+        url, credentials=credentials,
+        get_credentials=get_credentials, http=http, model=model,
+        log_request=log_request, log_response=log_response,
+        credentials_args=credentials_args,
+        default_global_params=default_global_params,
+        additional_http_headers=additional_http_headers)
+    self.operations = self.OperationsService(self)
+    self.services_accessPolicy = self.ServicesAccessPolicyService(self)
+    self.services_configs = self.ServicesConfigsService(self)
+    self.services_customerSettings = self.ServicesCustomerSettingsService(self)
+    self.services_projectSettings = self.ServicesProjectSettingsService(self)
+    self.services = self.ServicesService(self)
+    self.v1 = self.V1Service(self)
+
+  class OperationsService(base_api.BaseApiService):
+    """Service class for the operations resource."""
+
+    _NAME = u'operations'
+
+    def __init__(self, client):
+      super(ServicemanagementV1.OperationsService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def Get(self, request, global_params=None):
+      """Gets the latest state of a long-running operation.  Clients can use this.
+method to poll the operation result at intervals as recommended by the API
+service.
+
+      Args:
+        request: (ServicemanagementOperationsGetRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Operation) The response message.
+      """
+      config = self.GetMethodConfig('Get')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'servicemanagement.operations.get',
+        ordered_params=[u'operationsId'],
+        path_params=[u'operationsId'],
+        query_params=[],
+        relative_path=u'v1/operations/{operationsId}',
+        request_field='',
+        request_type_name=u'ServicemanagementOperationsGetRequest',
+        response_type_name=u'Operation',
+        supports_download=False,
+    )
+
+  class ServicesAccessPolicyService(base_api.BaseApiService):
+    """Service class for the services_accessPolicy resource."""
+
+    _NAME = u'services_accessPolicy'
+
+    def __init__(self, client):
+      super(ServicemanagementV1.ServicesAccessPolicyService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def Query(self, request, global_params=None):
+      """Method to query the accessibility of a service and any associated.
+visibility labels for a specified user.
+
+Members of the producer project may call this method and specify any user.
+
+Any user may call this method, but must specify their own email address.
+In this case the method will return NOT_FOUND if the user has no access to
+the service.
+
+      Args:
+        request: (ServicemanagementServicesAccessPolicyQueryRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (QueryUserAccessResponse) The response message.
+      """
+      config = self.GetMethodConfig('Query')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Query.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'servicemanagement.services.accessPolicy.query',
+        ordered_params=[u'serviceName'],
+        path_params=[u'serviceName'],
+        query_params=[u'userEmail'],
+        relative_path=u'v1/services/{serviceName}/accessPolicy:query',
+        request_field='',
+        request_type_name=u'ServicemanagementServicesAccessPolicyQueryRequest',
+        response_type_name=u'QueryUserAccessResponse',
+        supports_download=False,
+    )
+
+  class ServicesConfigsService(base_api.BaseApiService):
+    """Service class for the services_configs resource."""
+
+    _NAME = u'services_configs'
+
+    def __init__(self, client):
+      super(ServicemanagementV1.ServicesConfigsService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def Create(self, request, global_params=None):
+      """Creates a new service config (version) for a managed service. This method.
+only stores the service config, but does not apply the service config to
+any backend services.
+
+      Args:
+        request: (ServicemanagementServicesConfigsCreateRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Service) The response message.
+      """
+      config = self.GetMethodConfig('Create')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Create.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'servicemanagement.services.configs.create',
+        ordered_params=[u'serviceName'],
+        path_params=[u'serviceName'],
+        query_params=[],
+        relative_path=u'v1/services/{serviceName}/configs',
+        request_field=u'service',
+        request_type_name=u'ServicemanagementServicesConfigsCreateRequest',
+        response_type_name=u'Service',
+        supports_download=False,
+    )
+
+    def Get(self, request, global_params=None):
+      """Gets a service config (version) for a managed service. If `config_id` is.
+not specified, the latest service config will be returned.
+
+      Args:
+        request: (ServicemanagementServicesConfigsGetRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Service) The response message.
+      """
+      config = self.GetMethodConfig('Get')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'servicemanagement.services.configs.get',
+        ordered_params=[u'serviceName', u'configId'],
+        path_params=[u'configId', u'serviceName'],
+        query_params=[],
+        relative_path=u'v1/services/{serviceName}/configs/{configId}',
+        request_field='',
+        request_type_name=u'ServicemanagementServicesConfigsGetRequest',
+        response_type_name=u'Service',
+        supports_download=False,
+    )
+
+    def List(self, request, global_params=None):
+      """Lists the history of the service config for a managed service,.
+from the newest to the oldest.
+
+      Args:
+        request: (ServicemanagementServicesConfigsListRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ListServiceConfigsResponse) The response message.
+      """
+      config = self.GetMethodConfig('List')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'servicemanagement.services.configs.list',
+        ordered_params=[u'serviceName'],
+        path_params=[u'serviceName'],
+        query_params=[u'pageSize', u'pageToken'],
+        relative_path=u'v1/services/{serviceName}/configs',
+        request_field='',
+        request_type_name=u'ServicemanagementServicesConfigsListRequest',
+        response_type_name=u'ListServiceConfigsResponse',
+        supports_download=False,
+    )
+
+    def Submit(self, request, global_params=None):
+      """Creates a new service config (version) for a managed service based on.
+user-supplied configuration sources files (for example: OpenAPI
+Specification). This method stores the source configurations as well as the
+generated service config. It does NOT apply the service config to any
+backend services.
+
+Operation<response: SubmitConfigSourceResponse>
+
+      Args:
+        request: (ServicemanagementServicesConfigsSubmitRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Operation) The response message.
+      """
+      config = self.GetMethodConfig('Submit')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Submit.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'servicemanagement.services.configs.submit',
+        ordered_params=[u'serviceName'],
+        path_params=[u'serviceName'],
+        query_params=[],
+        relative_path=u'v1/services/{serviceName}/configs:submit',
+        request_field=u'submitConfigSourceRequest',
+        request_type_name=u'ServicemanagementServicesConfigsSubmitRequest',
+        response_type_name=u'Operation',
+        supports_download=False,
+    )
+
+  class ServicesCustomerSettingsService(base_api.BaseApiService):
+    """Service class for the services_customerSettings resource."""
+
+    _NAME = u'services_customerSettings'
+
+    def __init__(self, client):
+      super(ServicemanagementV1.ServicesCustomerSettingsService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def Get(self, request, global_params=None):
+      """Retrieves the settings that control the specified customer's usage of the.
+service.
+
+      Args:
+        request: (ServicemanagementServicesCustomerSettingsGetRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (CustomerSettings) The response message.
+      """
+      config = self.GetMethodConfig('Get')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'servicemanagement.services.customerSettings.get',
+        ordered_params=[u'serviceName', u'customerId'],
+        path_params=[u'customerId', u'serviceName'],
+        query_params=[u'expand', u'view'],
+        relative_path=u'v1/services/{serviceName}/customerSettings/{customerId}',
+        request_field='',
+        request_type_name=u'ServicemanagementServicesCustomerSettingsGetRequest',
+        response_type_name=u'CustomerSettings',
+        supports_download=False,
+    )
+
+    def Patch(self, request, global_params=None):
+      """Updates specified subset of the settings that control the specified.
+customer's usage of the service.  Attempts to update a field not
+controlled by the caller will result in an access denied error.
+
+Operation<response: CustomerSettings>
+
+      Args:
+        request: (ServicemanagementServicesCustomerSettingsPatchRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Operation) The response message.
+      """
+      config = self.GetMethodConfig('Patch')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Patch.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PATCH',
+        method_id=u'servicemanagement.services.customerSettings.patch',
+        ordered_params=[u'serviceName', u'customerId'],
+        path_params=[u'customerId', u'serviceName'],
+        query_params=[u'updateMask'],
+        relative_path=u'v1/services/{serviceName}/customerSettings/{customerId}',
+        request_field=u'customerSettings',
+        request_type_name=u'ServicemanagementServicesCustomerSettingsPatchRequest',
+        response_type_name=u'Operation',
+        supports_download=False,
+    )
+
+  class ServicesProjectSettingsService(base_api.BaseApiService):
+    """Service class for the services_projectSettings resource."""
+
+    _NAME = u'services_projectSettings'
+
+    def __init__(self, client):
+      super(ServicemanagementV1.ServicesProjectSettingsService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def Get(self, request, global_params=None):
+      """Retrieves the settings that control the specified consumer project's usage.
+of the service.
+
+      Args:
+        request: (ServicemanagementServicesProjectSettingsGetRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ProjectSettings) The response message.
+      """
+      config = self.GetMethodConfig('Get')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'servicemanagement.services.projectSettings.get',
+        ordered_params=[u'serviceName', u'consumerProjectId'],
+        path_params=[u'consumerProjectId', u'serviceName'],
+        query_params=[u'expand', u'view'],
+        relative_path=u'v1/services/{serviceName}/projectSettings/{consumerProjectId}',
+        request_field='',
+        request_type_name=u'ServicemanagementServicesProjectSettingsGetRequest',
+        response_type_name=u'ProjectSettings',
+        supports_download=False,
+    )
+
+    def Patch(self, request, global_params=None):
+      """Updates specified subset of the settings that control the specified.
+consumer project's usage of the service.  Attempts to update a field not
+controlled by the caller will result in an access denied error.
+
+Operation<response: ProjectSettings>
+
+      Args:
+        request: (ServicemanagementServicesProjectSettingsPatchRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Operation) The response message.
+      """
+      config = self.GetMethodConfig('Patch')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Patch.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PATCH',
+        method_id=u'servicemanagement.services.projectSettings.patch',
+        ordered_params=[u'serviceName', u'consumerProjectId'],
+        path_params=[u'consumerProjectId', u'serviceName'],
+        query_params=[u'updateMask'],
+        relative_path=u'v1/services/{serviceName}/projectSettings/{consumerProjectId}',
+        request_field=u'projectSettings',
+        request_type_name=u'ServicemanagementServicesProjectSettingsPatchRequest',
+        response_type_name=u'Operation',
+        supports_download=False,
+    )
+
+    def Update(self, request, global_params=None):
+      """NOTE: Currently unsupported.  Use PatchProjectSettings instead.
+
+Updates the settings that control the specified consumer project's usage
+of the service.  Attempts to update a field not controlled by the caller
+will result in an access denied error.
+
+Operation<response: ProjectSettings>
+
+      Args:
+        request: (ProjectSettings) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Operation) The response message.
+      """
+      config = self.GetMethodConfig('Update')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Update.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PUT',
+        method_id=u'servicemanagement.services.projectSettings.update',
+        ordered_params=[u'serviceName', u'consumerProjectId'],
+        path_params=[u'consumerProjectId', u'serviceName'],
+        query_params=[],
+        relative_path=u'v1/services/{serviceName}/projectSettings/{consumerProjectId}',
+        request_field='<request>',
+        request_type_name=u'ProjectSettings',
+        response_type_name=u'Operation',
+        supports_download=False,
+    )
+
+  class ServicesService(base_api.BaseApiService):
+    """Service class for the services resource."""
+
+    _NAME = u'services'
+
+    def __init__(self, client):
+      super(ServicemanagementV1.ServicesService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def ConvertConfig(self, request, global_params=None):
+      """DEPRECATED. `SubmitConfigSource` with `validate_only=true` will provide.
+config conversion moving forward.
+
+Converts an API specification (e.g. Swagger spec) to an
+equivalent `google.api.Service`.
+
+      Args:
+        request: (ConvertConfigRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ConvertConfigResponse) The response message.
+      """
+      config = self.GetMethodConfig('ConvertConfig')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    ConvertConfig.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'servicemanagement.services.convertConfig',
+        ordered_params=[],
+        path_params=[],
+        query_params=[],
+        relative_path=u'v1/services:convertConfig',
+        request_field='<request>',
+        request_type_name=u'ConvertConfigRequest',
+        response_type_name=u'ConvertConfigResponse',
+        supports_download=False,
+    )
+
+    def Create(self, request, global_params=None):
+      """Creates a new managed service.
+
+Operation<response: ManagedService>
+
+      Args:
+        request: (ManagedService) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Operation) The response message.
+      """
+      config = self.GetMethodConfig('Create')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Create.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'servicemanagement.services.create',
+        ordered_params=[],
+        path_params=[],
+        query_params=[],
+        relative_path=u'v1/services',
+        request_field='<request>',
+        request_type_name=u'ManagedService',
+        response_type_name=u'Operation',
+        supports_download=False,
+    )
+
+    def Delete(self, request, global_params=None):
+      """Deletes a managed service.
+
+Operation<response: google.protobuf.Empty>
+
+      Args:
+        request: (ServicemanagementServicesDeleteRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Operation) The response message.
+      """
+      config = self.GetMethodConfig('Delete')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Delete.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'DELETE',
+        method_id=u'servicemanagement.services.delete',
+        ordered_params=[u'serviceName'],
+        path_params=[u'serviceName'],
+        query_params=[],
+        relative_path=u'v1/services/{serviceName}',
+        request_field='',
+        request_type_name=u'ServicemanagementServicesDeleteRequest',
+        response_type_name=u'Operation',
+        supports_download=False,
+    )
+
+    def Disable(self, request, global_params=None):
+      """Disable a managed service for a project.
+Google Service Management will only disable the managed service even if
+there are other services depend on the managed service.
+
+Operation<response: DisableServiceResponse>
+
+      Args:
+        request: (ServicemanagementServicesDisableRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Operation) The response message.
+      """
+      config = self.GetMethodConfig('Disable')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Disable.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'servicemanagement.services.disable',
+        ordered_params=[u'serviceName'],
+        path_params=[u'serviceName'],
+        query_params=[],
+        relative_path=u'v1/services/{serviceName}:disable',
+        request_field=u'disableServiceRequest',
+        request_type_name=u'ServicemanagementServicesDisableRequest',
+        response_type_name=u'Operation',
+        supports_download=False,
+    )
+
+    def Enable(self, request, global_params=None):
+      """Enable a managed service for a project with default setting.
+If the managed service has dependencies, they will be enabled as well.
+
+Operation<response: EnableServiceResponse>
+
+      Args:
+        request: (ServicemanagementServicesEnableRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Operation) The response message.
+      """
+      config = self.GetMethodConfig('Enable')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Enable.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'servicemanagement.services.enable',
+        ordered_params=[u'serviceName'],
+        path_params=[u'serviceName'],
+        query_params=[],
+        relative_path=u'v1/services/{serviceName}:enable',
+        request_field=u'enableServiceRequest',
+        request_type_name=u'ServicemanagementServicesEnableRequest',
+        response_type_name=u'Operation',
+        supports_download=False,
+    )
+
+    def Get(self, request, global_params=None):
+      """Gets a managed service. If the `consumer_project_id` is specified,.
+the project's settings for the specified service are also returned.
+
+      Args:
+        request: (ServicemanagementServicesGetRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ManagedService) The response message.
+      """
+      config = self.GetMethodConfig('Get')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'servicemanagement.services.get',
+        ordered_params=[u'serviceName'],
+        path_params=[u'serviceName'],
+        query_params=[u'consumerProjectId', u'expand', u'view'],
+        relative_path=u'v1/services/{serviceName}',
+        request_field='',
+        request_type_name=u'ServicemanagementServicesGetRequest',
+        response_type_name=u'ManagedService',
+        supports_download=False,
+    )
+
+    def GetAccessPolicy(self, request, global_params=None):
+      """Producer method to retrieve current policy.
+
+      Args:
+        request: (ServicemanagementServicesGetAccessPolicyRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ServiceAccessPolicy) The response message.
+      """
+      config = self.GetMethodConfig('GetAccessPolicy')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    GetAccessPolicy.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'servicemanagement.services.getAccessPolicy',
+        ordered_params=[u'serviceName'],
+        path_params=[u'serviceName'],
+        query_params=[],
+        relative_path=u'v1/services/{serviceName}/accessPolicy',
+        request_field='',
+        request_type_name=u'ServicemanagementServicesGetAccessPolicyRequest',
+        response_type_name=u'ServiceAccessPolicy',
+        supports_download=False,
+    )
+
+    def GetConfig(self, request, global_params=None):
+      """Gets a service config (version) for a managed service. If `config_id` is.
+not specified, the latest service config will be returned.
+
+      Args:
+        request: (ServicemanagementServicesGetConfigRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Service) The response message.
+      """
+      config = self.GetMethodConfig('GetConfig')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    GetConfig.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'servicemanagement.services.getConfig',
+        ordered_params=[u'serviceName'],
+        path_params=[u'serviceName'],
+        query_params=[u'configId'],
+        relative_path=u'v1/services/{serviceName}/config',
+        request_field='',
+        request_type_name=u'ServicemanagementServicesGetConfigRequest',
+        response_type_name=u'Service',
+        supports_download=False,
+    )
+
+    def List(self, request, global_params=None):
+      """Lists all managed services. If the `consumer_project_id` is specified,.
+the project's settings for the specified service are also returned.
+
+      Args:
+        request: (ServicemanagementServicesListRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ListServicesResponse) The response message.
+      """
+      config = self.GetMethodConfig('List')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'servicemanagement.services.list',
+        ordered_params=[],
+        path_params=[],
+        query_params=[u'category', u'consumerProjectId', u'expand', u'pageSize', u'pageToken', u'producerProjectId'],
+        relative_path=u'v1/services',
+        request_field='',
+        request_type_name=u'ServicemanagementServicesListRequest',
+        response_type_name=u'ListServicesResponse',
+        supports_download=False,
+    )
+
+    def Patch(self, request, global_params=None):
+      """Updates the specified subset of the configuration. If the specified service.
+does not exists the patch operation fails.
+
+Operation<response: ManagedService>
+
+      Args:
+        request: (ServicemanagementServicesPatchRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Operation) The response message.
+      """
+      config = self.GetMethodConfig('Patch')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Patch.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PATCH',
+        method_id=u'servicemanagement.services.patch',
+        ordered_params=[u'serviceName'],
+        path_params=[u'serviceName'],
+        query_params=[u'updateMask'],
+        relative_path=u'v1/services/{serviceName}',
+        request_field=u'managedService',
+        request_type_name=u'ServicemanagementServicesPatchRequest',
+        response_type_name=u'Operation',
+        supports_download=False,
+    )
+
+    def PatchConfig(self, request, global_params=None):
+      """Updates the specified subset of the service resource. Equivalent to.
+calling `PatchService` with only the `service_config` field updated.
+
+Operation<response: google.api.Service>
+
+      Args:
+        request: (ServicemanagementServicesPatchConfigRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Operation) The response message.
+      """
+      config = self.GetMethodConfig('PatchConfig')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    PatchConfig.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PATCH',
+        method_id=u'servicemanagement.services.patchConfig',
+        ordered_params=[u'serviceName'],
+        path_params=[u'serviceName'],
+        query_params=[u'updateMask'],
+        relative_path=u'v1/services/{serviceName}/config',
+        request_field=u'service',
+        request_type_name=u'ServicemanagementServicesPatchConfigRequest',
+        response_type_name=u'Operation',
+        supports_download=False,
+    )
+
+    def Update(self, request, global_params=None):
+      """Updates the configuration of a service.  If the specified service does not.
+already exist, then it is created.
+
+Operation<response: ManagedService>
+
+      Args:
+        request: (ServicemanagementServicesUpdateRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Operation) The response message.
+      """
+      config = self.GetMethodConfig('Update')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Update.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PUT',
+        method_id=u'servicemanagement.services.update',
+        ordered_params=[u'serviceName'],
+        path_params=[u'serviceName'],
+        query_params=[u'updateMask'],
+        relative_path=u'v1/services/{serviceName}',
+        request_field=u'managedService',
+        request_type_name=u'ServicemanagementServicesUpdateRequest',
+        response_type_name=u'Operation',
+        supports_download=False,
+    )
+
+    def UpdateAccessPolicy(self, request, global_params=None):
+      """Producer method to update the current policy.  This method will return an.
+error if the policy is too large (more than 50 entries across all lists).
+
+      Args:
+        request: (ServiceAccessPolicy) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ServiceAccessPolicy) The response message.
+      """
+      config = self.GetMethodConfig('UpdateAccessPolicy')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    UpdateAccessPolicy.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PUT',
+        method_id=u'servicemanagement.services.updateAccessPolicy',
+        ordered_params=[u'serviceName'],
+        path_params=[u'serviceName'],
+        query_params=[],
+        relative_path=u'v1/services/{serviceName}/accessPolicy',
+        request_field='<request>',
+        request_type_name=u'ServiceAccessPolicy',
+        response_type_name=u'ServiceAccessPolicy',
+        supports_download=False,
+    )
+
+    def UpdateConfig(self, request, global_params=None):
+      """Updates the specified subset of the service resource. Equivalent to.
+calling `UpdateService` with only the `service_config` field updated.
+
+Operation<response: google.api.Service>
+
+      Args:
+        request: (ServicemanagementServicesUpdateConfigRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Operation) The response message.
+      """
+      config = self.GetMethodConfig('UpdateConfig')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    UpdateConfig.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PUT',
+        method_id=u'servicemanagement.services.updateConfig',
+        ordered_params=[u'serviceName'],
+        path_params=[u'serviceName'],
+        query_params=[u'updateMask'],
+        relative_path=u'v1/services/{serviceName}/config',
+        request_field=u'service',
+        request_type_name=u'ServicemanagementServicesUpdateConfigRequest',
+        response_type_name=u'Operation',
+        supports_download=False,
+    )
+
+  class V1Service(base_api.BaseApiService):
+    """Service class for the v1 resource."""
+
+    _NAME = u'v1'
+
+    def __init__(self, client):
+      super(ServicemanagementV1.V1Service, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def ConvertConfig(self, request, global_params=None):
+      """DEPRECATED. `SubmitConfigSource` with `validate_only=true` will provide.
+config conversion moving forward.
+
+Converts an API specification (e.g. Swagger spec) to an
+equivalent `google.api.Service`.
+
+      Args:
+        request: (ConvertConfigRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ConvertConfigResponse) The response message.
+      """
+      config = self.GetMethodConfig('ConvertConfig')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    ConvertConfig.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'servicemanagement.convertConfig',
+        ordered_params=[],
+        path_params=[],
+        query_params=[],
+        relative_path=u'v1:convertConfig',
+        request_field='<request>',
+        request_type_name=u'ConvertConfigRequest',
+        response_type_name=u'ConvertConfigResponse',
+        supports_download=False,
+    )
diff --git a/samples/servicemanagement_sample/servicemanagement_v1/servicemanagement_v1_messages.py b/samples/servicemanagement_sample/servicemanagement_v1/servicemanagement_v1_messages.py
new file mode 100644
index 0000000..9291cf3
--- /dev/null
+++ b/samples/servicemanagement_sample/servicemanagement_v1/servicemanagement_v1_messages.py
@@ -0,0 +1,3505 @@
+"""Generated message classes for servicemanagement version v1.
+
+The service management API for Google Cloud Platform
+"""
+# NOTE: This file is autogenerated and should not be edited by hand.
+
+from apitools.base.protorpclite import messages as _messages
+from apitools.base.py import encoding
+from apitools.base.py import extra_types
+
+
+package = 'servicemanagement'
+
+
+class Api(_messages.Message):
+  """Api is a light-weight descriptor for a protocol buffer service.
+
+  Enums:
+    SyntaxValueValuesEnum: The source syntax of the service.
+
+  Fields:
+    methods: The methods of this api, in unspecified order.
+    mixins: Included APIs. See Mixin.
+    name: The fully qualified name of this api, including package name
+      followed by the api's simple name.
+    options: Any metadata attached to the API.
+    sourceContext: Source context for the protocol buffer service represented
+      by this message.
+    syntax: The source syntax of the service.
+    version: A version string for this api. If specified, must have the form
+      `major-version.minor-version`, as in `1.10`. If the minor version is
+      omitted, it defaults to zero. If the entire version field is empty, the
+      major version is derived from the package name, as outlined below. If
+      the field is not empty, the version in the package name will be verified
+      to be consistent with what is provided here.  The versioning schema uses
+      [semantic versioning](http://semver.org) where the major version number
+      indicates a breaking change and the minor version an additive, non-
+      breaking change. Both version numbers are signals to users what to
+      expect from different versions, and should be carefully chosen based on
+      the product plan.  The major version is also reflected in the package
+      name of the API, which must end in `v<major-version>`, as in
+      `google.feature.v1`. For major versions 0 and 1, the suffix can be
+      omitted. Zero major versions must only be used for experimental, none-GA
+      apis.
+  """
+
+  class SyntaxValueValuesEnum(_messages.Enum):
+    """The source syntax of the service.
+
+    Values:
+      SYNTAX_PROTO2: Syntax `proto2`.
+      SYNTAX_PROTO3: Syntax `proto3`.
+    """
+    SYNTAX_PROTO2 = 0
+    SYNTAX_PROTO3 = 1
+
+  methods = _messages.MessageField('Method', 1, repeated=True)
+  mixins = _messages.MessageField('Mixin', 2, repeated=True)
+  name = _messages.StringField(3)
+  options = _messages.MessageField('Option', 4, repeated=True)
+  sourceContext = _messages.MessageField('SourceContext', 5)
+  syntax = _messages.EnumField('SyntaxValueValuesEnum', 6)
+  version = _messages.StringField(7)
+
+
+class AreaUnderCurveParams(_messages.Message):
+  """AreaUnderCurveParams groups the metrics relevant to generating duration
+  based metric from base (snapshot) metric and delta (change) metric.  The
+  generated metric has two dimensions:    resource usage metric and the
+  duration the metric applies.  Essentially the generated metric is the Area
+  Under Curve(AUC) of the "duration - resource" usage curve. This AUC metric
+  is readily appliable to billing since "billable resource usage" depends on
+  resource usage and duration of the resource used.  A service config may
+  contain multiple resources and corresponding metrics. AreaUnderCurveParams
+  groups the relevant ones: which snapshot_metric and change_metric are used
+  to produce which generated_metric.
+
+  Fields:
+    changeMetric: Change of resource usage at a particular timestamp. This
+      should a DELTA metric.
+    generatedMetric: Metric generated from snapshot_metric and change_metric.
+      This is also a DELTA metric.
+    snapshotMetric: Total usage of a resource at a particular timestamp. This
+      should be a GAUGE metric.
+  """
+
+  changeMetric = _messages.StringField(1)
+  generatedMetric = _messages.StringField(2)
+  snapshotMetric = _messages.StringField(3)
+
+
+class AuthProvider(_messages.Message):
+  """Configuration for an anthentication provider, including support for [JSON
+  Web Token (JWT)](https://tools.ietf.org/html/draft-ietf-oauth-json-web-
+  token-32).
+
+  Fields:
+    id: The unique identifier of the auth provider. It will be referred to by
+      `AuthRequirement.provider_id`.  Example: "bookstore_auth".
+    issuer: Identifies the principal that issued the JWT. See
+      https://tools.ietf.org/html/draft-ietf-oauth-json-web-
+      token-32#section-4.1.1 Usually a URL or an email address.  Example:
+      https://securetoken.google.com Example:
+      1234567-compute@developer.gserviceaccount.com
+    jwksUri: URL of the provider's public key set to validate signature of the
+      JWT. See [OpenID Discovery](https://openid.net/specs/openid-connect-
+      discovery-1_0.html#ProviderMetadata). Optional if the key set document:
+      - can be retrieved from    [OpenID Discovery](https://openid.net/specs
+      /openid-connect-discovery-1_0.html    of the issuer.  - can be inferred
+      from the email domain of the issuer (e.g. a Google service account).
+      Example: https://www.googleapis.com/oauth2/v1/certs
+  """
+
+  id = _messages.StringField(1)
+  issuer = _messages.StringField(2)
+  jwksUri = _messages.StringField(3)
+
+
+class AuthRequirement(_messages.Message):
+  """User-defined authentication requirements, including support for [JSON Web
+  Token (JWT)](https://tools.ietf.org/html/draft-ietf-oauth-json-web-
+  token-32).
+
+  Fields:
+    audiences: The list of JWT [audiences](https://tools.ietf.org/html/draft-
+      ietf-oauth-json-web-token-32#section-4.1.3). that are allowed to access.
+      A JWT containing any of these audiences will be accepted. When this
+      setting is absent, only JWTs with audience
+      "https://Service_name/API_name" will be accepted. For example, if no
+      audiences are in the setting, LibraryService API will only accept JWTs
+      with the following audience "https://library-
+      example.googleapis.com/google.example.library.v1.LibraryService".
+      Example:      audiences: bookstore_android.apps.googleusercontent.com,
+      bookstore_web.apps.googleusercontent.com
+    providerId: id from authentication provider.  Example:      provider_id:
+      bookstore_auth
+  """
+
+  audiences = _messages.StringField(1)
+  providerId = _messages.StringField(2)
+
+
+class Authentication(_messages.Message):
+  """`Authentication` defines the authentication configuration for an API.
+  Example for an API targeted for external use:      name:
+  calendar.googleapis.com     authentication:       rules:       - selector:
+  "*"         oauth:           canonical_scopes:
+  https://www.googleapis.com/auth/calendar        - selector:
+  google.calendar.Delegate         oauth:           canonical_scopes:
+  https://www.googleapis.com/auth/calendar.read
+
+  Fields:
+    providers: Defines a set of authentication providers that a service
+      supports.
+    rules: Individual rules for authentication.
+  """
+
+  providers = _messages.MessageField('AuthProvider', 1, repeated=True)
+  rules = _messages.MessageField('AuthenticationRule', 2, repeated=True)
+
+
+class AuthenticationRule(_messages.Message):
+  """Authentication rules for the service.  By default, if a method has any
+  authentication requirements, every request must include a valid credential
+  matching one of the requirements. It's an error to include more than one
+  kind of credential in a single request.  If a method doesn't have any auth
+  requirements, request credentials will be ignored.
+
+  Fields:
+    allowWithoutCredential: Whether to allow requests without a credential.
+      If quota is enabled, an API key is required for such request to pass the
+      quota check.
+    oauth: The requirements for OAuth credentials.
+    requirements: Requirements for additional authentication providers.
+    selector: Selects the methods to which this rule applies.  Refer to
+      selector for syntax details.
+  """
+
+  allowWithoutCredential = _messages.BooleanField(1)
+  oauth = _messages.MessageField('OAuthRequirements', 2)
+  requirements = _messages.MessageField('AuthRequirement', 3, repeated=True)
+  selector = _messages.StringField(4)
+
+
+class Backend(_messages.Message):
+  """`Backend` defines the backend configuration for a service.
+
+  Fields:
+    rules: A list of backend rules providing configuration for individual API
+      elements.
+  """
+
+  rules = _messages.MessageField('BackendRule', 1, repeated=True)
+
+
+class BackendRule(_messages.Message):
+  """A backend rule provides configuration for an individual API element.
+
+  Fields:
+    address: The address of the API backend.
+    deadline: The number of seconds to wait for a response from a request.
+      The default depends on the deployment context.
+    selector: Selects the methods to which this rule applies.  Refer to
+      selector for syntax details.
+  """
+
+  address = _messages.StringField(1)
+  deadline = _messages.FloatField(2)
+  selector = _messages.StringField(3)
+
+
+class Billing(_messages.Message):
+  """Billing related configuration of the service.  The following example
+  shows how to configure metrics for billing:      metrics:     - name:
+  library.googleapis.com/read_calls       metric_kind: DELTA       value_type:
+  INT64     - name: library.googleapis.com/write_calls       metric_kind:
+  DELTA       value_type: INT64     billing:       metrics:       -
+  library.googleapis.com/read_calls       - library.googleapis.com/write_calls
+  The next example shows how to enable billing status check and customize the
+  check behavior. It makes sure billing status check is included in the
+  `Check` method of [Service Control API](https://cloud.google.com/service-
+  control/). In the example, "google.storage.Get" method can be served when
+  the billing status is either `current` or `delinquent`, while
+  "google.storage.Write" method can only be served when the billing status is
+  `current`:      billing:       rules:       - selector: google.storage.Get
+  allowed_statuses:         - current         - delinquent       - selector:
+  google.storage.Write         allowed_statuses: current  Mostly services
+  should only allow `current` status when serving requests. In addition,
+  services can choose to allow both `current` and `delinquent` statuses when
+  serving read-only requests to resources. If there's no matching selector for
+  operation, no billing status check will be performed.
+
+  Fields:
+    areaUnderCurveParams: Per resource grouping for delta billing based
+      resource configs.
+    metrics: Names of the metrics to report to billing. Each name must be
+      defined in Service.metrics section.
+    rules: A list of billing status rules for configuring billing status
+      check.
+  """
+
+  areaUnderCurveParams = _messages.MessageField('AreaUnderCurveParams', 1, repeated=True)
+  metrics = _messages.StringField(2, repeated=True)
+  rules = _messages.MessageField('BillingStatusRule', 3, repeated=True)
+
+
+class BillingStatusRule(_messages.Message):
+  """Defines the billing status requirements for operations.  When used with
+  [Service Control API](https://cloud.google.com/service-control/), the
+  following statuses are supported:  - **current**: the associated billing
+  account is up to date and capable of                paying for resource
+  usages. - **delinquent**: the associated billing account has a correctable
+  problem,                   such as late payment.  Mostly services should
+  only allow `current` status when serving requests. In addition, services can
+  choose to allow both `current` and `delinquent` statuses when serving read-
+  only requests to resources. If the list of allowed_statuses is empty, it
+  means no billing requirement.
+
+  Fields:
+    allowedStatuses: Allowed billing statuses. The billing status check passes
+      if the actual billing status matches any of the provided values here.
+    selector: Selects the operation names to which this rule applies. Refer to
+      selector for syntax details.
+  """
+
+  allowedStatuses = _messages.StringField(1, repeated=True)
+  selector = _messages.StringField(2)
+
+
+class CompositeOperationMetadata(_messages.Message):
+  """Metadata for composite operations.
+
+  Messages:
+    OriginalRequestValue: Original request that triggered this operation.
+    ResponseFieldMasksValue: Defines which part of the response a child
+      operation will contribute. Each key of the map is the name of a child
+      operation. Each value is a field mask that identifies what that child
+      operation contributes to the response, for example, "quota_settings",
+      "visiblity_settings", etc.
+
+  Fields:
+    childOperations: The child operations. The details of the asynchronous
+      child operations are stored in a separate row and not in this metadata.
+      Only the operation name is stored here.
+    originalRequest: Original request that triggered this operation.
+    persisted: Indicates whether the requested state change has been
+      persisted. Once this field is set, it is guaranteed to propagate to all
+      backends eventually, but it may not be visible immediately. Clients that
+      are not concerned with waiting on propagation can stop polling the
+      operation once the persisted field is set
+    responseFieldMasks: Defines which part of the response a child operation
+      will contribute. Each key of the map is the name of a child operation.
+      Each value is a field mask that identifies what that child operation
+      contributes to the response, for example, "quota_settings",
+      "visiblity_settings", etc.
+  """
+
+  @encoding.MapUnrecognizedFields('additionalProperties')
+  class OriginalRequestValue(_messages.Message):
+    """Original request that triggered this operation.
+
+    Messages:
+      AdditionalProperty: An additional property for a OriginalRequestValue
+        object.
+
+    Fields:
+      additionalProperties: Properties of the object. Contains field @type
+        with type URL.
+    """
+
+    class AdditionalProperty(_messages.Message):
+      """An additional property for a OriginalRequestValue object.
+
+      Fields:
+        key: Name of the additional property.
+        value: A extra_types.JsonValue attribute.
+      """
+
+      key = _messages.StringField(1)
+      value = _messages.MessageField('extra_types.JsonValue', 2)
+
+    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
+
+  @encoding.MapUnrecognizedFields('additionalProperties')
+  class ResponseFieldMasksValue(_messages.Message):
+    """Defines which part of the response a child operation will contribute.
+    Each key of the map is the name of a child operation. Each value is a
+    field mask that identifies what that child operation contributes to the
+    response, for example, "quota_settings", "visiblity_settings", etc.
+
+    Messages:
+      AdditionalProperty: An additional property for a ResponseFieldMasksValue
+        object.
+
+    Fields:
+      additionalProperties: Additional properties of type
+        ResponseFieldMasksValue
+    """
+
+    class AdditionalProperty(_messages.Message):
+      """An additional property for a ResponseFieldMasksValue object.
+
+      Fields:
+        key: Name of the additional property.
+        value: A string attribute.
+      """
+
+      key = _messages.StringField(1)
+      value = _messages.StringField(2)
+
+    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
+
+  childOperations = _messages.MessageField('Operation', 1, repeated=True)
+  originalRequest = _messages.MessageField('OriginalRequestValue', 2)
+  persisted = _messages.BooleanField(3)
+  responseFieldMasks = _messages.MessageField('ResponseFieldMasksValue', 4)
+
+
+class ConfigFile(_messages.Message):
+  """Generic specification of a source configuration file
+
+  Enums:
+    FileTypeValueValuesEnum: The kind of configuration file represented. This
+      is used to determine the method for generating `google.api.Service`
+      using this file.
+
+  Fields:
+    contents: DEPRECATED. The contents of the configuration file. Use
+      file_contents moving forward.
+    fileContents: The bytes that constitute the file.
+    filePath: The file name of the configuration file (full or relative path).
+    fileType: The kind of configuration file represented. This is used to
+      determine the method for generating `google.api.Service` using this
+      file.
+  """
+
+  class FileTypeValueValuesEnum(_messages.Enum):
+    """The kind of configuration file represented. This is used to determine
+    the method for generating `google.api.Service` using this file.
+
+    Values:
+      FILE_TYPE_UNSPECIFIED: Unknown file type.
+      SERVICE_CONFIG_YAML: YAML-specification of service.
+      OPEN_API_JSON: OpenAPI specification, serialized in JSON.
+      OPEN_API_YAML: OpenAPI specification, serialized in YAML.
+      FILE_DESCRIPTOR_SET_PROTO: FileDescriptorSet, generated by protoc.  To
+        generate, use protoc with imports and source info included. For an
+        example test.proto file, the following command would put the value in
+        a new file named out.pb.  $protoc --include_imports
+        --include_source_info test.proto -o out.pb
+    """
+    FILE_TYPE_UNSPECIFIED = 0
+    SERVICE_CONFIG_YAML = 1
+    OPEN_API_JSON = 2
+    OPEN_API_YAML = 3
+    FILE_DESCRIPTOR_SET_PROTO = 4
+
+  contents = _messages.StringField(1)
+  fileContents = _messages.BytesField(2)
+  filePath = _messages.StringField(3)
+  fileType = _messages.EnumField('FileTypeValueValuesEnum', 4)
+
+
+class ConfigOptions(_messages.Message):
+  """A set of options to cover use of source config within `ServiceManager`
+  and related tools.
+  """
+
+
+
+class ConfigSource(_messages.Message):
+  """Represents a user-specified configuration for a service (as opposed to
+  the the generated service config form provided by `google.api.Service`).
+  This is meant to encode service config as manipulated directly by customers,
+  rather than the config form resulting from toolchain generation and
+  normalization.
+
+  Fields:
+    files: Set of source configuration files that are used to generate a
+      service config (`google.api.Service`).
+    id: A unique ID for a specific instance of this message, typically
+      assigned by the client for tracking purpose. If empty, the server may
+      choose to generate one instead.
+    openApiSpec: OpenAPI specification
+    options: Options to cover use of source config within ServiceManager and
+      tools
+    protoSpec: Protocol buffer API specification
+  """
+
+  files = _messages.MessageField('ConfigFile', 1, repeated=True)
+  id = _messages.StringField(2)
+  openApiSpec = _messages.MessageField('OpenApiSpec', 3)
+  options = _messages.MessageField('ConfigOptions', 4)
+  protoSpec = _messages.MessageField('ProtoSpec', 5)
+
+
+class Context(_messages.Message):
+  """`Context` defines which contexts an API requests.  Example:      context:
+  rules:       - selector: "*"         requested:         -
+  google.rpc.context.ProjectContext         - google.rpc.context.OriginContext
+  The above specifies that all methods in the API request
+  `google.rpc.context.ProjectContext` and `google.rpc.context.OriginContext`.
+  Available context types are defined in package `google.rpc.context`.
+
+  Fields:
+    rules: List of rules for context, applicable to methods.
+  """
+
+  rules = _messages.MessageField('ContextRule', 1, repeated=True)
+
+
+class ContextRule(_messages.Message):
+  """A context rule provides information about the context for an individual
+  API element.
+
+  Fields:
+    provided: A list of full type names of provided contexts.
+    requested: A list of full type names of requested contexts.
+    selector: Selects the methods to which this rule applies.  Refer to
+      selector for syntax details.
+  """
+
+  provided = _messages.StringField(1, repeated=True)
+  requested = _messages.StringField(2, repeated=True)
+  selector = _messages.StringField(3)
+
+
+class Control(_messages.Message):
+  """Selects and configures the service controller used by the service.  The
+  service controller handles features like abuse, quota, billing, logging,
+  monitoring, etc.
+
+  Fields:
+    environment: The service control environment to use. If empty, no control
+      plane feature (like quota and billing) will be enabled.
+  """
+
+  environment = _messages.StringField(1)
+
+
+class ConvertConfigRequest(_messages.Message):
+  """Request message for `ConvertConfig` method.
+
+  Messages:
+    ConfigSpecValue: Input configuration For this version of API, the
+      supported type is OpenApiSpec
+
+  Fields:
+    configSpec: Input configuration For this version of API, the supported
+      type is OpenApiSpec
+    openApiSpec: The OpenAPI specification for an API.
+    serviceName: The service name to use for constructing the normalized
+      service configuration equivalent of the provided configuration
+      specification.
+    swaggerSpec: The swagger specification for an API.
+  """
+
+  @encoding.MapUnrecognizedFields('additionalProperties')
+  class ConfigSpecValue(_messages.Message):
+    """Input configuration For this version of API, the supported type is
+    OpenApiSpec
+
+    Messages:
+      AdditionalProperty: An additional property for a ConfigSpecValue object.
+
+    Fields:
+      additionalProperties: Properties of the object. Contains field @type
+        with type URL.
+    """
+
+    class AdditionalProperty(_messages.Message):
+      """An additional property for a ConfigSpecValue object.
+
+      Fields:
+        key: Name of the additional property.
+        value: A extra_types.JsonValue attribute.
+      """
+
+      key = _messages.StringField(1)
+      value = _messages.MessageField('extra_types.JsonValue', 2)
+
+    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
+
+  configSpec = _messages.MessageField('ConfigSpecValue', 1)
+  openApiSpec = _messages.MessageField('OpenApiSpec', 2)
+  serviceName = _messages.StringField(3)
+  swaggerSpec = _messages.MessageField('SwaggerSpec', 4)
+
+
+class ConvertConfigResponse(_messages.Message):
+  """Response message for `ConvertConfig` method.
+
+  Fields:
+    diagnostics: Any errors or warnings that occured during config conversion.
+    serviceConfig: The service configuration. Not set if errors occured during
+      conversion.
+  """
+
+  diagnostics = _messages.MessageField('Diagnostic', 1, repeated=True)
+  serviceConfig = _messages.MessageField('Service', 2)
+
+
+class CustomError(_messages.Message):
+  """Customize service error responses.  For example, list any service
+  specific protobuf types that can appear in error detail lists of error
+  responses.  Example:      custom_error:       types:       -
+  google.foo.v1.CustomError       - google.foo.v1.AnotherError
+
+  Fields:
+    rules: The list of custom error rules to select to which messages this
+      should apply.
+    types: The list of custom error detail types, e.g.
+      'google.foo.v1.CustomError'.
+  """
+
+  rules = _messages.MessageField('CustomErrorRule', 1, repeated=True)
+  types = _messages.StringField(2, repeated=True)
+
+
+class CustomErrorRule(_messages.Message):
+  """A custom error rule.
+
+  Fields:
+    isErrorType: Mark this message as possible payload in error response.
+      Otherwise, objects of this type will be filtered when they appear in
+      error payload.
+    selector: Selects messages to which this rule applies.  Refer to selector
+      for syntax details.
+  """
+
+  isErrorType = _messages.BooleanField(1)
+  selector = _messages.StringField(2)
+
+
+class CustomHttpPattern(_messages.Message):
+  """A custom pattern is used for defining custom HTTP verb.
+
+  Fields:
+    kind: The name of this custom HTTP verb.
+    path: The path matched by this custom verb.
+  """
+
+  kind = _messages.StringField(1)
+  path = _messages.StringField(2)
+
+
+class CustomerSettings(_messages.Message):
+  """Settings that control how a customer (identified by a billing account)
+  uses a service
+
+  Fields:
+    customerId: ID for the customer that consumes the service (see above). The
+      supported types of customers are:  1. domain:{domain} A Google Apps
+      domain name. For example, google.com.  2.
+      billingAccount:{billing_account_id} A Google Cloud Plafrom billing
+      account. For Example, 123456-7890ab-cdef12.
+    quotaSettings: Settings that control how much or how fast the service can
+      be used by the consumer projects owned by the customer collectively.
+    serviceName: The name of the service.  See the `ServiceManager` overview
+      for naming requirements.
+  """
+
+  customerId = _messages.StringField(1)
+  quotaSettings = _messages.MessageField('QuotaSettings', 2)
+  serviceName = _messages.StringField(3)
+
+
+class Diagnostic(_messages.Message):
+  """A collection that represents a diagnostic message (error or warning)
+
+  Enums:
+    KindValueValuesEnum: The kind of diagnostic information provided.
+
+  Fields:
+    kind: The kind of diagnostic information provided.
+    location: Location of the cause or context of the diagnostic information.
+    message: The string message of the diagnostic information.
+  """
+
+  class KindValueValuesEnum(_messages.Enum):
+    """The kind of diagnostic information provided.
+
+    Values:
+      WARNING: Warnings and errors
+      ERROR: Only errors
+    """
+    WARNING = 0
+    ERROR = 1
+
+  kind = _messages.EnumField('KindValueValuesEnum', 1)
+  location = _messages.StringField(2)
+  message = _messages.StringField(3)
+
+
+class DisableServiceRequest(_messages.Message):
+  """Request message for DisableService method.
+
+  Fields:
+    consumerId: The identity of consumer resource which service disablement
+      will be applied to.  The Google Service Management implementation
+      accepts the following forms: "project:<project_id>",
+      "project_number:<project_number>".  Note: this is made compatible with
+      google.api.servicecontrol.v1.Operation.consumer_id.
+  """
+
+  consumerId = _messages.StringField(1)
+
+
+class Documentation(_messages.Message):
+  """`Documentation` provides the information for describing a service.
+  Example: <pre><code>documentation:   summary: >     The Google Calendar API
+  gives access     to most calendar features.   pages:   - name: Overview
+  content: &#40;== include google/foo/overview.md ==&#41;   - name: Tutorial
+  content: &#40;== include google/foo/tutorial.md ==&#41;     subpages;     -
+  name: Java       content: &#40;== include google/foo/tutorial_java.md
+  ==&#41;   rules:   - selector: google.calendar.Calendar.Get     description:
+  >       ...   - selector: google.calendar.Calendar.Put     description: >
+  ... </code></pre> Documentation is provided in markdown syntax. In addition
+  to standard markdown features, definition lists, tables and fenced code
+  blocks are supported. Section headers can be provided and are interpreted
+  relative to the section nesting of the context where a documentation
+  fragment is embedded.  Documentation from the IDL is merged with
+  documentation defined via the config at normalization time, where
+  documentation provided by config rules overrides IDL provided.  A number of
+  constructs specific to the API platform are supported in documentation text.
+  In order to reference a proto element, the following notation can be used:
+  <pre><code>&#91;fully.qualified.proto.name]&#91;]</code></pre> To override
+  the display text used for the link, this can be used:
+  <pre><code>&#91;display text]&#91;fully.qualified.proto.name]</code></pre>
+  Text can be excluded from doc using the following notation:
+  <pre><code>&#40;-- internal comment --&#41;</code></pre> Comments can be
+  made conditional using a visibility label. The below text will be only
+  rendered if the `BETA` label is available: <pre><code>&#40;--BETA: comment
+  for BETA users --&#41;</code></pre> A few directives are available in
+  documentation. Note that directives must appear on a single line to be
+  properly identified. The `include` directive includes a markdown file from
+  an external source: <pre><code>&#40;== include path/to/file
+  ==&#41;</code></pre> The `resource_for` directive marks a message to be the
+  resource of a collection in REST view. If it is not specified, tools attempt
+  to infer the resource from the operations in a collection:
+  <pre><code>&#40;== resource_for v1.shelves.books ==&#41;</code></pre> The
+  directive `suppress_warning` does not directly affect documentation and is
+  documented together with service config validation.
+
+  Fields:
+    documentationRootUrl: The URL to the root of documentation.
+    overview: Declares a single overview page. For example:
+      <pre><code>documentation:   summary: ...   overview: &#40;== include
+      overview.md ==&#41; </code></pre> This is a shortcut for the following
+      declaration (using pages style): <pre><code>documentation:   summary:
+      ...   pages:   - name: Overview     content: &#40;== include overview.md
+      ==&#41; </code></pre> Note: you cannot specify both `overview` field and
+      `pages` field.
+    pages: The top level pages for the documentation set.
+    rules: Documentation rules for individual elements of the service.
+    summary: A short summary of what the service does. Can only be provided by
+      plain text.
+  """
+
+  documentationRootUrl = _messages.StringField(1)
+  overview = _messages.StringField(2)
+  pages = _messages.MessageField('Page', 3, repeated=True)
+  rules = _messages.MessageField('DocumentationRule', 4, repeated=True)
+  summary = _messages.StringField(5)
+
+
+class DocumentationRule(_messages.Message):
+  """A documentation rule provides information about individual API elements.
+
+  Fields:
+    deprecationDescription: Deprecation description of the selected
+      element(s). It can be provided if an element is marked as `deprecated`.
+    description: Description of the selected API(s).
+    selector: The selector is a comma-separated list of patterns. Each pattern
+      is a qualified name of the element which may end in "*", indicating a
+      wildcard. Wildcards are only allowed at the end and for a whole
+      component of the qualified name, i.e. "foo.*" is ok, but not "foo.b*" or
+      "foo.*.bar". To specify a default for all applicable elements, the whole
+      pattern "*" is used.
+  """
+
+  deprecationDescription = _messages.StringField(1)
+  description = _messages.StringField(2)
+  selector = _messages.StringField(3)
+
+
+class EffectiveQuotaGroup(_messages.Message):
+  """An effective quota group contains both the metadata for a quota group as
+  derived from the service config, and the effective limits in that group as
+  calculated from producer and consumer overrides together with service
+  defaults.
+
+  Enums:
+    BillingInteractionValueValuesEnum:
+
+  Fields:
+    baseGroup: The service configuration for this quota group, minus the quota
+      limits, which are replaced by the effective limits below.
+    billingInteraction: A BillingInteractionValueValuesEnum attribute.
+    quotas: The usage and limit information for each limit within this quota
+      group.
+  """
+
+  class BillingInteractionValueValuesEnum(_messages.Enum):
+    """BillingInteractionValueValuesEnum enum type.
+
+    Values:
+      BILLING_INTERACTION_UNSPECIFIED: The interaction between this quota
+        group and the project billing status is unspecified.
+      NONBILLABLE_ONLY: This quota group is enforced only when the consumer
+        project is not billable.
+      BILLABLE_ONLY: This quota group is enforced only when the consumer
+        project is billable.
+      ANY_BILLING_STATUS: This quota group is enforced regardless of the
+        consumer project's billing status.
+    """
+    BILLING_INTERACTION_UNSPECIFIED = 0
+    NONBILLABLE_ONLY = 1
+    BILLABLE_ONLY = 2
+    ANY_BILLING_STATUS = 3
+
+  baseGroup = _messages.MessageField('QuotaGroup', 1)
+  billingInteraction = _messages.EnumField('BillingInteractionValueValuesEnum', 2)
+  quotas = _messages.MessageField('QuotaInfo', 3, repeated=True)
+
+
+class EffectiveQuotaLimit(_messages.Message):
+  """An effective quota limit contains the metadata for a quota limit as
+  derived from the service config, together with fields that describe the
+  effective limit value and what overrides can be applied to it.
+
+  Fields:
+    baseLimit: The service's configuration for this quota limit.
+    effectiveLimit: The effective limit value, based on the stored producer
+      and consumer overrides and the service defaults.
+    key: The key used to identify this limit when applying overrides. The
+      consumer_overrides and producer_overrides maps are keyed by strings of
+      the form "QuotaGroupName/QuotaLimitName".
+    maxConsumerOverrideAllowed: The maximum override value that a consumer may
+      specify.
+  """
+
+  baseLimit = _messages.MessageField('QuotaLimit', 1)
+  effectiveLimit = _messages.IntegerField(2)
+  key = _messages.StringField(3)
+  maxConsumerOverrideAllowed = _messages.IntegerField(4)
+
+
+class EnableServiceRequest(_messages.Message):
+  """Request message for EnableService method.
+
+  Fields:
+    consumerId: The identity of consumer resource which service enablement
+      will be applied to.  The Google Service Management implementation
+      accepts the following forms: "project:<project_id>",
+      "project_number:<project_number>".  Note: this is made compatible with
+      google.api.servicecontrol.v1.Operation.consumer_id.
+  """
+
+  consumerId = _messages.StringField(1)
+
+
+class Enum(_messages.Message):
+  """Enum type definition.
+
+  Enums:
+    SyntaxValueValuesEnum: The source syntax.
+
+  Fields:
+    enumvalue: Enum value definitions.
+    name: Enum type name.
+    options: Protocol buffer options.
+    sourceContext: The source context.
+    syntax: The source syntax.
+  """
+
+  class SyntaxValueValuesEnum(_messages.Enum):
+    """The source syntax.
+
+    Values:
+      SYNTAX_PROTO2: Syntax `proto2`.
+      SYNTAX_PROTO3: Syntax `proto3`.
+    """
+    SYNTAX_PROTO2 = 0
+    SYNTAX_PROTO3 = 1
+
+  enumvalue = _messages.MessageField('EnumValue', 1, repeated=True)
+  name = _messages.StringField(2)
+  options = _messages.MessageField('Option', 3, repeated=True)
+  sourceContext = _messages.MessageField('SourceContext', 4)
+  syntax = _messages.EnumField('SyntaxValueValuesEnum', 5)
+
+
+class EnumValue(_messages.Message):
+  """Enum value definition.
+
+  Fields:
+    name: Enum value name.
+    number: Enum value number.
+    options: Protocol buffer options.
+  """
+
+  name = _messages.StringField(1)
+  number = _messages.IntegerField(2, variant=_messages.Variant.INT32)
+  options = _messages.MessageField('Option', 3, repeated=True)
+
+
+class Field(_messages.Message):
+  """A single field of a message type.
+
+  Enums:
+    CardinalityValueValuesEnum: The field cardinality.
+    KindValueValuesEnum: The field type.
+
+  Fields:
+    cardinality: The field cardinality.
+    defaultValue: The string value of the default value of this field. Proto2
+      syntax only.
+    jsonName: The field JSON name.
+    kind: The field type.
+    name: The field name.
+    number: The field number.
+    oneofIndex: The index of the field type in `Type.oneofs`, for message or
+      enumeration types. The first type has index 1; zero means the type is
+      not in the list.
+    options: The protocol buffer options.
+    packed: Whether to use alternative packed wire representation.
+    typeUrl: The field type URL, without the scheme, for message or
+      enumeration types. Example:
+      `"type.googleapis.com/google.protobuf.Timestamp"`.
+  """
+
+  class CardinalityValueValuesEnum(_messages.Enum):
+    """The field cardinality.
+
+    Values:
+      CARDINALITY_UNKNOWN: For fields with unknown cardinality.
+      CARDINALITY_OPTIONAL: For optional fields.
+      CARDINALITY_REQUIRED: For required fields. Proto2 syntax only.
+      CARDINALITY_REPEATED: For repeated fields.
+    """
+    CARDINALITY_UNKNOWN = 0
+    CARDINALITY_OPTIONAL = 1
+    CARDINALITY_REQUIRED = 2
+    CARDINALITY_REPEATED = 3
+
+  class KindValueValuesEnum(_messages.Enum):
+    """The field type.
+
+    Values:
+      TYPE_UNKNOWN: Field type unknown.
+      TYPE_DOUBLE: Field type double.
+      TYPE_FLOAT: Field type float.
+      TYPE_INT64: Field type int64.
+      TYPE_UINT64: Field type uint64.
+      TYPE_INT32: Field type int32.
+      TYPE_FIXED64: Field type fixed64.
+      TYPE_FIXED32: Field type fixed32.
+      TYPE_BOOL: Field type bool.
+      TYPE_STRING: Field type string.
+      TYPE_GROUP: Field type group. Proto2 syntax only, and deprecated.
+      TYPE_MESSAGE: Field type message.
+      TYPE_BYTES: Field type bytes.
+      TYPE_UINT32: Field type uint32.
+      TYPE_ENUM: Field type enum.
+      TYPE_SFIXED32: Field type sfixed32.
+      TYPE_SFIXED64: Field type sfixed64.
+      TYPE_SINT32: Field type sint32.
+      TYPE_SINT64: Field type sint64.
+    """
+    TYPE_UNKNOWN = 0
+    TYPE_DOUBLE = 1
+    TYPE_FLOAT = 2
+    TYPE_INT64 = 3
+    TYPE_UINT64 = 4
+    TYPE_INT32 = 5
+    TYPE_FIXED64 = 6
+    TYPE_FIXED32 = 7
+    TYPE_BOOL = 8
+    TYPE_STRING = 9
+    TYPE_GROUP = 10
+    TYPE_MESSAGE = 11
+    TYPE_BYTES = 12
+    TYPE_UINT32 = 13
+    TYPE_ENUM = 14
+    TYPE_SFIXED32 = 15
+    TYPE_SFIXED64 = 16
+    TYPE_SINT32 = 17
+    TYPE_SINT64 = 18
+
+  cardinality = _messages.EnumField('CardinalityValueValuesEnum', 1)
+  defaultValue = _messages.StringField(2)
+  jsonName = _messages.StringField(3)
+  kind = _messages.EnumField('KindValueValuesEnum', 4)
+  name = _messages.StringField(5)
+  number = _messages.IntegerField(6, variant=_messages.Variant.INT32)
+  oneofIndex = _messages.IntegerField(7, variant=_messages.Variant.INT32)
+  options = _messages.MessageField('Option', 8, repeated=True)
+  packed = _messages.BooleanField(9)
+  typeUrl = _messages.StringField(10)
+
+
+class File(_messages.Message):
+  """A single swagger specification file.
+
+  Fields:
+    contents: The contents of the swagger spec file.
+    path: The relative path of the swagger spec file.
+  """
+
+  contents = _messages.StringField(1)
+  path = _messages.StringField(2)
+
+
+class Http(_messages.Message):
+  """Defines the HTTP configuration for a service. It contains a list of
+  HttpRule, each specifying the mapping of an RPC method to one or more HTTP
+  REST API methods.
+
+  Fields:
+    rules: A list of HTTP rules for configuring the HTTP REST API methods.
+  """
+
+  rules = _messages.MessageField('HttpRule', 1, repeated=True)
+
+
+class HttpRule(_messages.Message):
+  """`HttpRule` defines the mapping of an RPC method to one or more HTTP REST
+  APIs.  The mapping determines what portions of the request message are
+  populated from the path, query parameters, or body of the HTTP request.  The
+  mapping is typically specified as an `google.api.http` annotation, see
+  "google/api/annotations.proto" for details.  The mapping consists of a field
+  specifying the path template and method kind.  The path template can refer
+  to fields in the request message, as in the example below which describes a
+  REST GET operation on a resource collection of messages:  ```proto service
+  Messaging {   rpc GetMessage(GetMessageRequest) returns (Message) {
+  option (google.api.http).get = "/v1/messages/{message_id}/{sub.subfield}";
+  } } message GetMessageRequest {   message SubMessage {     string subfield =
+  1;   }   string message_id = 1; // mapped to the URL   SubMessage sub = 2;
+  // `sub.subfield` is url-mapped } message Message {   string text = 1; //
+  content of the resource } ```  This definition enables an automatic,
+  bidrectional mapping of HTTP JSON to RPC. Example:  HTTP | RPC -----|-----
+  `GET /v1/messages/123456/foo`  | `GetMessage(message_id: "123456" sub:
+  SubMessage(subfield: "foo"))`  In general, not only fields but also field
+  paths can be referenced from a path pattern. Fields mapped to the path
+  pattern cannot be repeated and must have a primitive (non-message) type.
+  Any fields in the request message which are not bound by the path pattern
+  automatically become (optional) HTTP query parameters. Assume the following
+  definition of the request message:  ```proto message GetMessageRequest {
+  message SubMessage {     string subfield = 1;   }   string message_id = 1;
+  // mapped to the URL   int64 revision = 2;    // becomes a parameter
+  SubMessage sub = 3;    // `sub.subfield` becomes a parameter } ```  This
+  enables a HTTP JSON to RPC mapping as below:  HTTP | RPC -----|----- `GET
+  /v1/messages/123456?revision=2&sub.subfield=foo` | `GetMessage(message_id:
+  "123456" revision: 2 sub: SubMessage(subfield: "foo"))`  Note that fields
+  which are mapped to HTTP parameters must have a primitive type or a repeated
+  primitive type. Message types are not allowed. In the case of a repeated
+  type, the parameter can be repeated in the URL, as in `...?param=A&param=B`.
+  For HTTP method kinds which allow a request body, the `body` field specifies
+  the mapping. Consider a REST update method on the message resource
+  collection:  ```proto service Messaging {   rpc
+  UpdateMessage(UpdateMessageRequest) returns (Message) {     option
+  (google.api.http) = {       put: "/v1/messages/{message_id}"       body:
+  "message"     };   } } message UpdateMessageRequest {   string message_id =
+  1; // mapped to the URL   Message message = 2;   // mapped to the body } ```
+  The following HTTP JSON to RPC mapping is enabled, where the representation
+  of the JSON in the request body is determined by protos JSON encoding:  HTTP
+  | RPC -----|----- `PUT /v1/messages/123456 { "text": "Hi!" }` |
+  `UpdateMessage(message_id: "123456" message { text: "Hi!" })`  The special
+  name `*` can be used in the body mapping to define that every field not
+  bound by the path template should be mapped to the request body.  This
+  enables the following alternative definition of the update method:  ```proto
+  service Messaging {   rpc UpdateMessage(Message) returns (Message) {
+  option (google.api.http) = {       put: "/v1/messages/{message_id}"
+  body: "*"     };   } } message Message {   string message_id = 1;   string
+  text = 2; } ```  The following HTTP JSON to RPC mapping is enabled:  HTTP |
+  RPC -----|----- `PUT /v1/messages/123456 { "text": "Hi!" }` |
+  `UpdateMessage(message_id: "123456" text: "Hi!")`  Note that when using `*`
+  in the body mapping, it is not possible to have HTTP parameters, as all
+  fields not bound by the path end in the body. This makes this option more
+  rarely used in practice of defining REST APIs. The common usage of `*` is in
+  custom methods which don't use the URL at all for transferring data.  It is
+  possible to define multiple HTTP methods for one RPC by using the
+  `additional_bindings` option. Example:  ```proto service Messaging {   rpc
+  GetMessage(GetMessageRequest) returns (Message) {     option
+  (google.api.http) = {       get: "/v1/messages/{message_id}"
+  additional_bindings {         get:
+  "/v1/users/{user_id}/messages/{message_id}"       }     };   } } message
+  GetMessageRequest {   string message_id = 1;   string user_id = 2; } ```
+  This enables the following two alternative HTTP JSON to RPC mappings:  HTTP
+  | RPC -----|----- `GET /v1/messages/123456` | `GetMessage(message_id:
+  "123456")` `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me"
+  message_id: "123456")`  # Rules for HTTP mapping  The rules for mapping HTTP
+  path, query parameters, and body fields to the request message are as
+  follows:  1. The `body` field specifies either `*` or a field path, or is
+  omitted. If omitted, it assumes there is no HTTP body. 2. Leaf fields
+  (recursive expansion of nested messages in the    request) can be classified
+  into three types:     (a) Matched in the URL template.     (b) Covered by
+  body (if body is `*`, everything except (a) fields;         else everything
+  under the body field)     (c) All other fields. 3. URL query parameters
+  found in the HTTP request are mapped to (c) fields. 4. Any body sent with an
+  HTTP request can contain only (b) fields.  The syntax of the path template
+  is as follows:      Template = "/" Segments [ Verb ] ;     Segments =
+  Segment { "/" Segment } ;     Segment  = "*" | "**" | LITERAL | Variable ;
+  Variable = "{" FieldPath [ "=" Segments ] "}" ;     FieldPath = IDENT { "."
+  IDENT } ;     Verb     = ":" LITERAL ;  The syntax `*` matches a single path
+  segment. It follows the semantics of [RFC
+  6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String
+  Expansion.  The syntax `**` matches zero or more path segments. It follows
+  the semantics of [RFC 6570](https://tools.ietf.org/html/rfc6570) Section
+  3.2.3 Reserved Expansion.  The syntax `LITERAL` matches literal text in the
+  URL path.  The syntax `Variable` matches the entire path as specified by its
+  template; this nested template must not contain further variables. If a
+  variable matches a single path segment, its template may be omitted, e.g.
+  `{var}` is equivalent to `{var=*}`.  NOTE: the field paths in variables and
+  in the `body` must not refer to repeated fields or map fields.  Use
+  CustomHttpPattern to specify any HTTP method that is not included in the
+  `pattern` field, such as HEAD, or "*" to leave the HTTP method unspecified
+  for a given URL path rule. The wild-card rule is useful for services that
+  provide content to Web (HTML) clients.
+
+  Fields:
+    additionalBindings: Additional HTTP bindings for the selector. Nested
+      bindings must not contain an `additional_bindings` field themselves
+      (that is, the nesting may only be one level deep).
+    body: The name of the request field whose value is mapped to the HTTP
+      body, or `*` for mapping all fields not captured by the path pattern to
+      the HTTP body. NOTE: the referred field must not be a repeated field.
+    custom: Custom pattern is used for defining custom verbs.
+    delete: Used for deleting a resource.
+    get: Used for listing and getting information about resources.
+    mediaDownload: Do not use this. For media support, add instead
+      [][google.bytestream.RestByteStream] as an API to your configuration.
+    mediaUpload: Do not use this. For media support, add instead
+      [][google.bytestream.RestByteStream] as an API to your configuration.
+    patch: Used for updating a resource.
+    post: Used for creating a resource.
+    put: Used for updating a resource.
+    selector: Selects methods to which this rule applies.  Refer to selector
+      for syntax details.
+  """
+
+  additionalBindings = _messages.MessageField('HttpRule', 1, repeated=True)
+  body = _messages.StringField(2)
+  custom = _messages.MessageField('CustomHttpPattern', 3)
+  delete = _messages.StringField(4)
+  get = _messages.StringField(5)
+  mediaDownload = _messages.MessageField('MediaDownload', 6)
+  mediaUpload = _messages.MessageField('MediaUpload', 7)
+  patch = _messages.StringField(8)
+  post = _messages.StringField(9)
+  put = _messages.StringField(10)
+  selector = _messages.StringField(11)
+
+
+class LabelDescriptor(_messages.Message):
+  """A description of a label.
+
+  Enums:
+    ValueTypeValueValuesEnum: The type of data that can be assigned to the
+      label.
+
+  Fields:
+    description: A human-readable description for the label.
+    key: The label key.
+    valueType: The type of data that can be assigned to the label.
+  """
+
+  class ValueTypeValueValuesEnum(_messages.Enum):
+    """The type of data that can be assigned to the label.
+
+    Values:
+      STRING: A variable-length string. This is the default.
+      BOOL: Boolean; true or false.
+      INT64: A 64-bit signed integer.
+    """
+    STRING = 0
+    BOOL = 1
+    INT64 = 2
+
+  description = _messages.StringField(1)
+  key = _messages.StringField(2)
+  valueType = _messages.EnumField('ValueTypeValueValuesEnum', 3)
+
+
+class ListServiceConfigsResponse(_messages.Message):
+  """Response message for ListServiceConfigs method.
+
+  Fields:
+    nextPageToken: The token of the next page of results.
+    serviceConfigs: The list of service config resources.
+  """
+
+  nextPageToken = _messages.StringField(1)
+  serviceConfigs = _messages.MessageField('Service', 2, repeated=True)
+
+
+class ListServicesResponse(_messages.Message):
+  """Response message for `ListServices` method.
+
+  Fields:
+    nextPageToken: Token that can be passed to `ListServices` to resume a
+      paginated query.
+    services: The results of the query.
+  """
+
+  nextPageToken = _messages.StringField(1)
+  services = _messages.MessageField('ManagedService', 2, repeated=True)
+
+
+class LogDescriptor(_messages.Message):
+  """A description of a log type. Example in YAML format:      - name:
+  library.googleapis.com/activity_history       description: The history of
+  borrowing and returning library items.       display_name: Activity
+  labels:       - key: /customer_id         description: Identifier of a
+  library customer
+
+  Fields:
+    description: A human-readable description of this log. This information
+      appears in the documentation and can contain details.
+    displayName: The human-readable name for this log. This information
+      appears on the user interface and should be concise.
+    labels: The set of labels that are available to describe a specific log
+      entry. Runtime requests that contain labels not specified here are
+      considered invalid.
+    name: The name of the log. It must be less than 512 characters long and
+      can include the following characters: upper- and lower-case alphanumeric
+      characters [A-Za-z0-9], and punctuation characters including slash,
+      underscore, hyphen, period [/_-.].
+  """
+
+  description = _messages.StringField(1)
+  displayName = _messages.StringField(2)
+  labels = _messages.MessageField('LabelDescriptor', 3, repeated=True)
+  name = _messages.StringField(4)
+
+
+class Logging(_messages.Message):
+  """Logging configuration of the service.  The following example shows how to
+  configure logs to be sent to the producer and consumer projects. In the
+  example, the `library.googleapis.com/activity_history` log is sent to both
+  the producer and consumer projects, whereas the
+  `library.googleapis.com/purchase_history` log is only sent to the producer
+  project:      monitored_resources:     - type: library.googleapis.com/branch
+  labels:       - key: /city         description: The city where the library
+  branch is located in.       - key: /name         description: The name of
+  the branch.     logs:     - name: library.googleapis.com/activity_history
+  labels:       - key: /customer_id     - name:
+  library.googleapis.com/purchase_history     logging:
+  producer_destinations:       - monitored_resource:
+  library.googleapis.com/branch         logs:         -
+  library.googleapis.com/activity_history         -
+  library.googleapis.com/purchase_history       consumer_destinations:       -
+  monitored_resource: library.googleapis.com/branch         logs:         -
+  library.googleapis.com/activity_history
+
+  Fields:
+    consumerDestinations: Logging configurations for sending logs to the
+      consumer project. There can be multiple consumer destinations, each one
+      must have a different monitored resource type. A log can be used in at
+      most one consumer destination.
+    producerDestinations: Logging configurations for sending logs to the
+      producer project. There can be multiple producer destinations, each one
+      must have a different monitored resource type. A log can be used in at
+      most one producer destination.
+  """
+
+  consumerDestinations = _messages.MessageField('LoggingDestination', 1, repeated=True)
+  producerDestinations = _messages.MessageField('LoggingDestination', 2, repeated=True)
+
+
+class LoggingDestination(_messages.Message):
+  """Configuration of a specific logging destination (the producer project or
+  the consumer project).
+
+  Fields:
+    logs: Names of the logs to be sent to this destination. Each name must be
+      defined in the Service.logs section.
+    monitoredResource: The monitored resource type. The type must be defined
+      in Service.monitored_resources section.
+  """
+
+  logs = _messages.StringField(1, repeated=True)
+  monitoredResource = _messages.StringField(2)
+
+
+class ManagedService(_messages.Message):
+  """The full representation of an API Service that is managed by the
+  `ServiceManager` API.  Includes both the service configuration, as well as
+  other control plane deployment related information.
+
+  Fields:
+    configSource: User-supplied source configuration for the service. This is
+      distinct from the generated configuration provided in
+      `google.api.Service`. This is NOT populated on GetService calls at the
+      moment. NOTE: Any upsert operation that contains both a service_config
+      and a config_source is considered invalid and will result in an error
+      being returned.
+    generation: A server-assigned monotonically increasing number that changes
+      whenever a mutation is made to the `ManagedService` or any of its
+      components via the `ServiceManager` API.
+    operations: Read-only view of pending operations affecting this resource,
+      if requested.
+    producerProjectId: ID of the project that produces and owns this service.
+    projectSettings: Read-only view of settings for a particular consumer
+      project, if requested.
+    serviceConfig: The service's generated configuration.
+    serviceName: The name of the service.  See the `ServiceManager` overview
+      for naming requirements.  This name must match `google.api.Service.name`
+      in the `service_config` field.
+  """
+
+  configSource = _messages.MessageField('ConfigSource', 1)
+  generation = _messages.IntegerField(2)
+  operations = _messages.MessageField('Operation', 3, repeated=True)
+  producerProjectId = _messages.StringField(4)
+  projectSettings = _messages.MessageField('ProjectSettings', 5)
+  serviceConfig = _messages.MessageField('Service', 6)
+  serviceName = _messages.StringField(7)
+
+
+class MediaDownload(_messages.Message):
+  """Do not use this. For media support, add instead
+  [][google.bytestream.RestByteStream] as an API to your configuration.
+
+  Fields:
+    enabled: Whether download is enabled.
+  """
+
+  enabled = _messages.BooleanField(1)
+
+
+class MediaUpload(_messages.Message):
+  """Do not use this. For media support, add instead
+  [][google.bytestream.RestByteStream] as an API to your configuration.
+
+  Fields:
+    enabled: Whether upload is enabled.
+  """
+
+  enabled = _messages.BooleanField(1)
+
+
+class Method(_messages.Message):
+  """Method represents a method of an api.
+
+  Enums:
+    SyntaxValueValuesEnum: The source syntax of this method.
+
+  Fields:
+    name: The simple name of this method.
+    options: Any metadata attached to the method.
+    requestStreaming: If true, the request is streamed.
+    requestTypeUrl: A URL of the input message type.
+    responseStreaming: If true, the response is streamed.
+    responseTypeUrl: The URL of the output message type.
+    syntax: The source syntax of this method.
+  """
+
+  class SyntaxValueValuesEnum(_messages.Enum):
+    """The source syntax of this method.
+
+    Values:
+      SYNTAX_PROTO2: Syntax `proto2`.
+      SYNTAX_PROTO3: Syntax `proto3`.
+    """
+    SYNTAX_PROTO2 = 0
+    SYNTAX_PROTO3 = 1
+
+  name = _messages.StringField(1)
+  options = _messages.MessageField('Option', 2, repeated=True)
+  requestStreaming = _messages.BooleanField(3)
+  requestTypeUrl = _messages.StringField(4)
+  responseStreaming = _messages.BooleanField(5)
+  responseTypeUrl = _messages.StringField(6)
+  syntax = _messages.EnumField('SyntaxValueValuesEnum', 7)
+
+
+class MetricDescriptor(_messages.Message):
+  """Defines a metric type and its schema.
+
+  Enums:
+    MetricKindValueValuesEnum: Whether the metric records instantaneous
+      values, changes to a value, etc.
+    ValueTypeValueValuesEnum: Whether the measurement is an integer, a
+      floating-point number, etc.
+
+  Fields:
+    description: A detailed description of the metric, which can be used in
+      documentation.
+    displayName: A concise name for the metric, which can be displayed in user
+      interfaces. Use sentence case without an ending period, for example
+      "Request count".
+    labels: The set of labels that can be used to describe a specific instance
+      of this metric type. For example, the
+      `compute.googleapis.com/instance/network/received_bytes_count` metric
+      type has a label, `loadbalanced`, that specifies whether the traffic was
+      received through a load balanced IP address.
+    metricKind: Whether the metric records instantaneous values, changes to a
+      value, etc.
+    name: Resource name. The format of the name may vary between different
+      implementations. For examples:
+      projects/{project_id}/metricDescriptors/{type=**}
+      metricDescriptors/{type=**}
+    type: The metric type including a DNS name prefix, for example
+      `"compute.googleapis.com/instance/cpu/utilization"`. Metric types should
+      use a natural hierarchical grouping such as the following:
+      compute.googleapis.com/instance/cpu/utilization
+      compute.googleapis.com/instance/disk/read_ops_count
+      compute.googleapis.com/instance/network/received_bytes_count  Note that
+      if the metric type changes, the monitoring data will be discontinued,
+      and anything depends on it will break, such as monitoring dashboards,
+      alerting rules and quota limits. Therefore, once a metric has been
+      published, its type should be immutable.
+    unit: The unit in which the metric value is reported. It is only
+      applicable if the `value_type` is `INT64`, `DOUBLE`, or `DISTRIBUTION`.
+      The supported units are a subset of [The Unified Code for Units of
+      Measure](http://unitsofmeasure.org/ucum.html) standard:  **Basic units
+      (UNIT)**  * `bit`   bit * `By`    byte * `s`     second * `min`   minute
+      * `h`     hour * `d`     day  **Prefixes (PREFIX)**  * `k`     kilo
+      (10**3) * `M`     mega    (10**6) * `G`     giga    (10**9) * `T`
+      tera    (10**12) * `P`     peta    (10**15) * `E`     exa     (10**18) *
+      `Z`     zetta   (10**21) * `Y`     yotta   (10**24) * `m`     milli
+      (10**-3) * `u`     micro   (10**-6) * `n`     nano    (10**-9) * `p`
+      pico    (10**-12) * `f`     femto   (10**-15) * `a`     atto
+      (10**-18) * `z`     zepto   (10**-21) * `y`     yocto   (10**-24) * `Ki`
+      kibi    (2**10) * `Mi`    mebi    (2**20) * `Gi`    gibi    (2**30) *
+      `Ti`    tebi    (2**40)  **Grammar**  The grammar includes the
+      dimensionless unit `1`, such as `1/s`.  The grammar also includes these
+      connectors:  * `/`    division (as an infix operator, e.g. `1/s`). * `.`
+      multiplication (as an infix operator, e.g. `GBy.d`)  The grammar for a
+      unit is as follows:      Expression = Component { "." Component } { "/"
+      Component } ;      Component = [ PREFIX ] UNIT [ Annotation ]
+      | Annotation               | "1"               ;      Annotation = "{"
+      NAME "}" ;  Notes:  * `Annotation` is just a comment if it follows a
+      `UNIT` and is    equivalent to `1` if it is used alone. For examples,
+      `{requests}/s == 1/s`, `By{transmitted}/s == By/s`. * `NAME` is a
+      sequence of non-blank printable ASCII characters not    containing '{'
+      or '}'.
+    valueType: Whether the measurement is an integer, a floating-point number,
+      etc.
+  """
+
+  class MetricKindValueValuesEnum(_messages.Enum):
+    """Whether the metric records instantaneous values, changes to a value,
+    etc.
+
+    Values:
+      METRIC_KIND_UNSPECIFIED: Do not use this default value.
+      GAUGE: Instantaneous measurements of a varying quantity.
+      DELTA: Changes over non-overlapping time intervals.
+      CUMULATIVE: Cumulative value over time intervals that can overlap. The
+        overlapping intervals must have the same start time.
+    """
+    METRIC_KIND_UNSPECIFIED = 0
+    GAUGE = 1
+    DELTA = 2
+    CUMULATIVE = 3
+
+  class ValueTypeValueValuesEnum(_messages.Enum):
+    """Whether the measurement is an integer, a floating-point number, etc.
+
+    Values:
+      VALUE_TYPE_UNSPECIFIED: Do not use this default value.
+      BOOL: The value is a boolean. This value type can be used only if the
+        metric kind is `GAUGE`.
+      INT64: The value is a signed 64-bit integer.
+      DOUBLE: The value is a double precision floating point number.
+      STRING: The value is a text string. This value type can be used only if
+        the metric kind is `GAUGE`.
+      DISTRIBUTION: The value is a `Distribution`.
+      MONEY: The value is money.
+    """
+    VALUE_TYPE_UNSPECIFIED = 0
+    BOOL = 1
+    INT64 = 2
+    DOUBLE = 3
+    STRING = 4
+    DISTRIBUTION = 5
+    MONEY = 6
+
+  description = _messages.StringField(1)
+  displayName = _messages.StringField(2)
+  labels = _messages.MessageField('LabelDescriptor', 3, repeated=True)
+  metricKind = _messages.EnumField('MetricKindValueValuesEnum', 4)
+  name = _messages.StringField(5)
+  type = _messages.StringField(6)
+  unit = _messages.StringField(7)
+  valueType = _messages.EnumField('ValueTypeValueValuesEnum', 8)
+
+
+class Mixin(_messages.Message):
+  """Declares an API to be included in this API. The including API must
+  redeclare all the methods from the included API, but documentation and
+  options are inherited as follows:  - If after comment and whitespace
+  stripping, the documentation   string of the redeclared method is empty, it
+  will be inherited   from the original method.  - Each annotation belonging
+  to the service config (http,   visibility) which is not set in the
+  redeclared method will be   inherited.  - If an http annotation is
+  inherited, the path pattern will be   modified as follows. Any version
+  prefix will be replaced by the   version of the including API plus the root
+  path if specified.  Example of a simple mixin:      package google.acl.v1;
+  service AccessControl {       // Get the underlying ACL object.       rpc
+  GetAcl(GetAclRequest) returns (Acl) {         option (google.api.http).get =
+  "/v1/{resource=**}:getAcl";       }     }      package google.storage.v2;
+  service Storage {       //       rpc GetAcl(GetAclRequest) returns (Acl);
+  // Get a data record.       rpc GetData(GetDataRequest) returns (Data) {
+  option (google.api.http).get = "/v2/{resource=**}";       }     }  Example
+  of a mixin configuration:      apis:     - name: google.storage.v2.Storage
+  mixins:       - name: google.acl.v1.AccessControl  The mixin construct
+  implies that all methods in `AccessControl` are also declared with same name
+  and request/response types in `Storage`. A documentation generator or
+  annotation processor will see the effective `Storage.GetAcl` method after
+  inherting documentation and annotations as follows:      service Storage {
+  // Get the underlying ACL object.       rpc GetAcl(GetAclRequest) returns
+  (Acl) {         option (google.api.http).get = "/v2/{resource=**}:getAcl";
+  }       ...     }  Note how the version in the path pattern changed from
+  `v1` to `v2`.  If the `root` field in the mixin is specified, it should be a
+  relative path under which inherited HTTP paths are placed. Example:
+  apis:     - name: google.storage.v2.Storage       mixins:       - name:
+  google.acl.v1.AccessControl         root: acls  This implies the following
+  inherited HTTP annotation:      service Storage {       // Get the
+  underlying ACL object.       rpc GetAcl(GetAclRequest) returns (Acl) {
+  option (google.api.http).get = "/v2/acls/{resource=**}:getAcl";       }
+  ...     }
+
+  Fields:
+    name: The fully qualified name of the API which is included.
+    root: If non-empty specifies a path under which inherited HTTP paths are
+      rooted.
+  """
+
+  name = _messages.StringField(1)
+  root = _messages.StringField(2)
+
+
+class MonitoredResourceDescriptor(_messages.Message):
+  """An object that describes the schema of a MonitoredResource object using a
+  type name and a set of labels.  For example, the monitored resource
+  descriptor for Google Compute Engine VM instances has a type of
+  `"gce_instance"` and specifies the use of the labels `"instance_id"` and
+  `"zone"` to identify particular VM instances.  Different APIs can support
+  different monitored resource types. APIs generally provide a `list` method
+  that returns the monitored resource descriptors used by the API.
+
+  Fields:
+    description: Optional. A detailed description of the monitored resource
+      type that might be used in documentation.
+    displayName: Optional. A concise name for the monitored resource type that
+      might be displayed in user interfaces. For example, `"Google Cloud SQL
+      Database"`.
+    labels: Required. A set of labels used to describe instances of this
+      monitored resource type. For example, an individual Google Cloud SQL
+      database is identified by values for the labels `"database_id"` and
+      `"zone"`.
+    name: Optional. The resource name of the monitored resource descriptor:
+      `"projects/{project_id}/monitoredResourceDescriptors/{type}"` where
+      {type} is the value of the `type` field in this object and {project_id}
+      is a project ID that provides API-specific context for accessing the
+      type.  APIs that do not use project information can use the resource
+      name format `"monitoredResourceDescriptors/{type}"`.
+    type: Required. The monitored resource type. For example, the type
+      `"cloudsql_database"` represents databases in Google Cloud SQL. The
+      maximum length of this value is 256 characters.
+  """
+
+  description = _messages.StringField(1)
+  displayName = _messages.StringField(2)
+  labels = _messages.MessageField('LabelDescriptor', 3, repeated=True)
+  name = _messages.StringField(4)
+  type = _messages.StringField(5)
+
+
+class Monitoring(_messages.Message):
+  """Monitoring configuration of the service.  The example below shows how to
+  configure monitored resources and metrics for monitoring. In the example, a
+  monitored resource and two metrics are defined. The
+  `library.googleapis.com/book/returned_count` metric is sent to both producer
+  and consumer projects, whereas the
+  `library.googleapis.com/book/overdue_count` metric is only sent to the
+  consumer project.      monitored_resources:     - type:
+  library.googleapis.com/branch       labels:       - key: /city
+  description: The city where the library branch is located in.       - key:
+  /name         description: The name of the branch.     metrics:     - name:
+  library.googleapis.com/book/returned_count       metric_kind: DELTA
+  value_type: INT64       labels:       - key: /customer_id     - name:
+  library.googleapis.com/book/overdue_count       metric_kind: GAUGE
+  value_type: INT64       labels:       - key: /customer_id     monitoring:
+  producer_destinations:       - monitored_resource:
+  library.googleapis.com/branch         metrics:         -
+  library.googleapis.com/book/returned_count       consumer_destinations:
+  - monitored_resource: library.googleapis.com/branch         metrics:
+  - library.googleapis.com/book/returned_count         -
+  library.googleapis.com/book/overdue_count
+
+  Fields:
+    consumerDestinations: Monitoring configurations for sending metrics to the
+      consumer project. There can be multiple consumer destinations, each one
+      must have a different monitored resource type. A metric can be used in
+      at most one consumer destination.
+    producerDestinations: Monitoring configurations for sending metrics to the
+      producer project. There can be multiple producer destinations, each one
+      must have a different monitored resource type. A metric can be used in
+      at most one producer destination.
+  """
+
+  consumerDestinations = _messages.MessageField('MonitoringDestination', 1, repeated=True)
+  producerDestinations = _messages.MessageField('MonitoringDestination', 2, repeated=True)
+
+
+class MonitoringDestination(_messages.Message):
+  """Configuration of a specific monitoring destination (the producer project
+  or the consumer project).
+
+  Fields:
+    metrics: Names of the metrics to report to this monitoring destination.
+      Each name must be defined in Service.metrics section.
+    monitoredResource: The monitored resource type. The type must be defined
+      in Service.monitored_resources section.
+  """
+
+  metrics = _messages.StringField(1, repeated=True)
+  monitoredResource = _messages.StringField(2)
+
+
+class OAuthRequirements(_messages.Message):
+  """OAuth scopes are a way to define data and permissions on data. For
+  example, there are scopes defined for "Read-only access to Google Calendar"
+  and "Access to Cloud Platform". Users can consent to a scope for an
+  application, giving it permission to access that data on their behalf.
+  OAuth scope specifications should be fairly coarse grained; a user will need
+  to see and understand the text description of what your scope means.  In
+  most cases: use one or at most two OAuth scopes for an entire family of
+  products. If your product has multiple APIs, you should probably be sharing
+  the OAuth scope across all of those APIs.  When you need finer grained OAuth
+  consent screens: talk with your product management about how developers will
+  use them in practice.  Please note that even though each of the canonical
+  scopes is enough for a request to be accepted and passed to the backend, a
+  request can still fail due to the backend requiring additional scopes or
+  permissions.
+
+  Fields:
+    canonicalScopes: The list of publicly documented OAuth scopes that are
+      allowed access. An OAuth token containing any of these scopes will be
+      accepted.  Example:       canonical_scopes:
+      https://www.googleapis.com/auth/calendar,
+      https://www.googleapis.com/auth/calendar.read
+  """
+
+  canonicalScopes = _messages.StringField(1)
+
+
+class OpenApiSpec(_messages.Message):
+  """A collection of OpenAPI specification files.
+
+  Fields:
+    openApiFiles: Individual files.
+  """
+
+  openApiFiles = _messages.MessageField('ConfigFile', 1, repeated=True)
+
+
+class Operation(_messages.Message):
+  """This resource represents a long-running operation that is the result of a
+  network API call.
+
+  Messages:
+    MetadataValue: Service-specific metadata associated with the operation.
+      It typically contains progress information and common metadata such as
+      create time. Some services might not provide such metadata.  Any method
+      that returns a long-running operation should document the metadata type,
+      if any.
+    ResponseValue: The normal response of the operation in case of success.
+      If the original method returns no data on success, such as `Delete`, the
+      response is `google.protobuf.Empty`.  If the original method is standard
+      `Get`/`Create`/`Update`, the response should be the resource.  For other
+      methods, the response should have the type `XxxResponse`, where `Xxx` is
+      the original method name.  For example, if the original method name is
+      `TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`.
+
+  Fields:
+    done: If the value is `false`, it means the operation is still in
+      progress. If true, the operation is completed, and either `error` or
+      `response` is available.
+    error: The error result of the operation in case of failure.
+    metadata: Service-specific metadata associated with the operation.  It
+      typically contains progress information and common metadata such as
+      create time. Some services might not provide such metadata.  Any method
+      that returns a long-running operation should document the metadata type,
+      if any.
+    name: The server-assigned name, which is only unique within the same
+      service that originally returns it. If you use the default HTTP mapping,
+      the `name` should have the format of `operations/some/unique/name`.
+    response: The normal response of the operation in case of success.  If the
+      original method returns no data on success, such as `Delete`, the
+      response is `google.protobuf.Empty`.  If the original method is standard
+      `Get`/`Create`/`Update`, the response should be the resource.  For other
+      methods, the response should have the type `XxxResponse`, where `Xxx` is
+      the original method name.  For example, if the original method name is
+      `TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`.
+  """
+
+  @encoding.MapUnrecognizedFields('additionalProperties')
+  class MetadataValue(_messages.Message):
+    """Service-specific metadata associated with the operation.  It typically
+    contains progress information and common metadata such as create time.
+    Some services might not provide such metadata.  Any method that returns a
+    long-running operation should document the metadata type, if any.
+
+    Messages:
+      AdditionalProperty: An additional property for a MetadataValue object.
+
+    Fields:
+      additionalProperties: Properties of the object. Contains field @type
+        with type URL.
+    """
+
+    class AdditionalProperty(_messages.Message):
+      """An additional property for a MetadataValue object.
+
+      Fields:
+        key: Name of the additional property.
+        value: A extra_types.JsonValue attribute.
+      """
+
+      key = _messages.StringField(1)
+      value = _messages.MessageField('extra_types.JsonValue', 2)
+
+    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
+
+  @encoding.MapUnrecognizedFields('additionalProperties')
+  class ResponseValue(_messages.Message):
+    """The normal response of the operation in case of success.  If the
+    original method returns no data on success, such as `Delete`, the response
+    is `google.protobuf.Empty`.  If the original method is standard
+    `Get`/`Create`/`Update`, the response should be the resource.  For other
+    methods, the response should have the type `XxxResponse`, where `Xxx` is
+    the original method name.  For example, if the original method name is
+    `TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`.
+
+    Messages:
+      AdditionalProperty: An additional property for a ResponseValue object.
+
+    Fields:
+      additionalProperties: Properties of the object. Contains field @type
+        with type URL.
+    """
+
+    class AdditionalProperty(_messages.Message):
+      """An additional property for a ResponseValue object.
+
+      Fields:
+        key: Name of the additional property.
+        value: A extra_types.JsonValue attribute.
+      """
+
+      key = _messages.StringField(1)
+      value = _messages.MessageField('extra_types.JsonValue', 2)
+
+    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
+
+  done = _messages.BooleanField(1)
+  error = _messages.MessageField('Status', 2)
+  metadata = _messages.MessageField('MetadataValue', 3)
+  name = _messages.StringField(4)
+  response = _messages.MessageField('ResponseValue', 5)
+
+
+class OperationMetadata(_messages.Message):
+  """The metadata associated with a long running operation resource.
+
+  Fields:
+    progressPercentage: Percentage of completion of this operation, ranging
+      from 0 to 100.
+    resourceNames: The full name of the resources that this operation is
+      directly associated with.
+    startTime: The start time of the operation.
+    steps: Detailed status information for each step. The order is
+      undetermined.
+  """
+
+  progressPercentage = _messages.IntegerField(1, variant=_messages.Variant.INT32)
+  resourceNames = _messages.StringField(2, repeated=True)
+  startTime = _messages.StringField(3)
+  steps = _messages.MessageField('Step', 4, repeated=True)
+
+
+class Option(_messages.Message):
+  """A protocol buffer option, which can be attached to a message, field,
+  enumeration, etc.
+
+  Messages:
+    ValueValue: The option's value. For example, `"com.google.protobuf"`.
+
+  Fields:
+    name: The option's name. For example, `"java_package"`.
+    value: The option's value. For example, `"com.google.protobuf"`.
+  """
+
+  @encoding.MapUnrecognizedFields('additionalProperties')
+  class ValueValue(_messages.Message):
+    """The option's value. For example, `"com.google.protobuf"`.
+
+    Messages:
+      AdditionalProperty: An additional property for a ValueValue object.
+
+    Fields:
+      additionalProperties: Properties of the object. Contains field @type
+        with type URL.
+    """
+
+    class AdditionalProperty(_messages.Message):
+      """An additional property for a ValueValue object.
+
+      Fields:
+        key: Name of the additional property.
+        value: A extra_types.JsonValue attribute.
+      """
+
+      key = _messages.StringField(1)
+      value = _messages.MessageField('extra_types.JsonValue', 2)
+
+    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
+
+  name = _messages.StringField(1)
+  value = _messages.MessageField('ValueValue', 2)
+
+
+class Page(_messages.Message):
+  """Represents a documentation page. A page can contain subpages to represent
+  nested documentation set structure.
+
+  Fields:
+    content: The Markdown content of the page. You can use <code>&#40;==
+      include {path} ==&#41;</code> to include content from a Markdown file.
+    name: The name of the page. It will be used as an identity of the page to
+      generate URI of the page, text of the link to this page in navigation,
+      etc. The full page name (start from the root page name to this page
+      concatenated with `.`) can be used as reference to the page in your
+      documentation. For example: <pre><code>pages: - name: Tutorial
+      content: &#40;== include tutorial.md ==&#41;   subpages:   - name: Java
+      content: &#40;== include tutorial_java.md ==&#41; </code></pre> You can
+      reference `Java` page using Markdown reference link syntax: `Java`.
+    subpages: Subpages of this page. The order of subpages specified here will
+      be honored in the generated docset.
+  """
+
+  content = _messages.StringField(1)
+  name = _messages.StringField(2)
+  subpages = _messages.MessageField('Page', 3, repeated=True)
+
+
+class ProjectProperties(_messages.Message):
+  """A descriptor for defining project properties for a service. One service
+  may have many consumer projects, and the service may want to behave
+  differently depending on some properties on the project. For example, a
+  project may be associated with a school, or a business, or a government
+  agency, a business type property on the project may affect how a service
+  responds to the client. This descriptor defines which properties are allowed
+  to be set on a project.  Example:     project_properties:      properties:
+  - name: NO_WATERMARK        type: BOOL        description: Allows usage of
+  the API without watermarks.      - name: EXTENDED_TILE_CACHE_PERIOD
+  type: INT64
+
+  Fields:
+    properties: List of per consumer project-specific properties.
+  """
+
+  properties = _messages.MessageField('Property', 1, repeated=True)
+
+
+class ProjectSettings(_messages.Message):
+  """Settings that control how a consumer project uses a service.
+
+  Messages:
+    PropertiesValue: Service-defined per-consumer properties.  A key-value
+      mapping a string key to a google.protobuf.ListValue proto. Values in the
+      list are typed as defined in the Service configuration's
+      consumer.properties field.
+
+  Fields:
+    consumerProjectId: ID for the project consuming this service.
+    operations: Read-only view of pending operations affecting this resource,
+      if requested.
+    properties: Service-defined per-consumer properties.  A key-value mapping
+      a string key to a google.protobuf.ListValue proto. Values in the list
+      are typed as defined in the Service configuration's consumer.properties
+      field.
+    quotaSettings: Settings that control how much or how fast the service can
+      be used by the consumer project.
+    serviceName: The name of the service.  See the `ServiceManager` overview
+      for naming requirements.
+    usageSettings: Settings that control whether this service is usable by the
+      consumer project.
+    visibilitySettings: Settings that control which features of the service
+      are visible to the consumer project.
+  """
+
+  @encoding.MapUnrecognizedFields('additionalProperties')
+  class PropertiesValue(_messages.Message):
+    """Service-defined per-consumer properties.  A key-value mapping a string
+    key to a google.protobuf.ListValue proto. Values in the list are typed as
+    defined in the Service configuration's consumer.properties field.
+
+    Messages:
+      AdditionalProperty: An additional property for a PropertiesValue object.
+
+    Fields:
+      additionalProperties: Additional properties of type PropertiesValue
+    """
+
+    class AdditionalProperty(_messages.Message):
+      """An additional property for a PropertiesValue object.
+
+      Fields:
+        key: Name of the additional property.
+        value: A extra_types.JsonValue attribute.
+      """
+
+      key = _messages.StringField(1)
+      value = _messages.MessageField('extra_types.JsonValue', 2, repeated=True)
+
+    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
+
+  consumerProjectId = _messages.StringField(1)
+  operations = _messages.MessageField('Operation', 2, repeated=True)
+  properties = _messages.MessageField('PropertiesValue', 3)
+  quotaSettings = _messages.MessageField('QuotaSettings', 4)
+  serviceName = _messages.StringField(5)
+  usageSettings = _messages.MessageField('UsageSettings', 6)
+  visibilitySettings = _messages.MessageField('VisibilitySettings', 7)
+
+
+class Property(_messages.Message):
+  """Defines project properties.  API services can define properties that can
+  be assigned to consumer projects so that backends can perform response
+  customization without having to make additional calls or maintain additional
+  storage. For example, Maps API defines properties that controls map tile
+  cache period, or whether to embed a watermark in a result.  These values can
+  be set via API producer console. Only API providers can define and set these
+  properties.
+
+  Enums:
+    TypeValueValuesEnum: The type of this property.
+
+  Fields:
+    description: The description of the property
+    name: The name of the property (a.k.a key).
+    type: The type of this property.
+  """
+
+  class TypeValueValuesEnum(_messages.Enum):
+    """The type of this property.
+
+    Values:
+      UNSPECIFIED: The type is unspecified, and will result in an error.
+      INT64: The type is `int64`.
+      BOOL: The type is `bool`.
+      STRING: The type is `string`.
+      DOUBLE: The type is 'double'.
+    """
+    UNSPECIFIED = 0
+    INT64 = 1
+    BOOL = 2
+    STRING = 3
+    DOUBLE = 4
+
+  description = _messages.StringField(1)
+  name = _messages.StringField(2)
+  type = _messages.EnumField('TypeValueValuesEnum', 3)
+
+
+class ProtoDescriptor(_messages.Message):
+  """Contains a serialized protoc-generated protocol buffer message descriptor
+  set along with a URL that describes the type of the descriptor message.
+
+  Fields:
+    typeUrl: A URL/resource name whose content describes the type of the
+      serialized protocol buffer message.  Only
+      'type.googleapis.com/google.protobuf.FileDescriptorSet' is supported. If
+      the type_url is not specificed,
+      'type.googleapis.com/google.protobuf.FileDescriptorSet' will be assumed.
+    value: Must be a valid serialized protocol buffer descriptor set.  To
+      generate, use protoc with imports and source info included. For an
+      example test.proto file, the following command would put the value in a
+      new file named descriptor.pb.  $protoc --include_imports
+      --include_source_info test.proto -o descriptor.pb
+  """
+
+  typeUrl = _messages.StringField(1)
+  value = _messages.BytesField(2)
+
+
+class ProtoSpec(_messages.Message):
+  """A collection of protocol buffer service specification files.
+
+  Fields:
+    protoDescriptor: A complete descriptor of a protocol buffer specification
+  """
+
+  protoDescriptor = _messages.MessageField('ProtoDescriptor', 1)
+
+
+class QueryUserAccessResponse(_messages.Message):
+  """Request message for QueryUserAccess method.
+
+  Fields:
+    accessibleVisibilityLabels: Any visibility labels on the service that are
+      accessible by the user.
+    canAccessService: True if the user can access the service and any
+      unrestricted API surface.
+  """
+
+  accessibleVisibilityLabels = _messages.StringField(1, repeated=True)
+  canAccessService = _messages.BooleanField(2)
+
+
+class Quota(_messages.Message):
+  """Quota configuration helps to achieve fairness and budgeting in service
+  usage.  - Fairness is achieved through the use of short-term quota limits
+  that are usually defined over a time window of several seconds or   minutes.
+  When such a limit is applied, for example at the user   level, it ensures
+  that no single user will monopolize the service   or a given customer's
+  allocated portion of it. - Budgeting is achieved through the use of long-
+  term quota limits   that are usually defined over a time window of one or
+  more   days. These limits help client application developers predict the
+  usage and help budgeting.  Quota enforcement uses a simple token-based
+  algorithm for resource sharing.  The quota configuration structure is as
+  follows:  - `QuotaLimit` defines a single enforceable limit with a specified
+  token amount that can be consumed over a specific duration and   applies to
+  a particular entity, like a project or an end user. If   the limit applies
+  to a user, each user making the request will   get the specified number of
+  tokens to consume. When the tokens   run out, the requests from that user
+  will be blocked until the   duration elapses and the next duration window
+  starts.  - `QuotaGroup` groups a set of quota limits.  - `QuotaRule` maps a
+  method to a set of quota groups. This allows   sharing of quota groups
+  across methods as well as one method   consuming tokens from more than one
+  quota group. When a group   contains multiple limits, requests to a method
+  consuming tokens   from that group must satisfy all the limits in that
+  group.  Example:      quota:       groups:       - name: ReadGroup
+  limits:         - description: Daily Limit           name: ProjectQpd
+  default_limit: 10000           duration: 1d           limit_by:
+  CLIENT_PROJECT          - description: Per-second Limit           name:
+  UserQps           default_limit: 20000           duration: 100s
+  limit_by: USER        - name: WriteGroup         limits:         -
+  description: Daily Limit           name: ProjectQpd           default_limit:
+  1000           max_limit: 1000           duration: 1d           limit_by:
+  CLIENT_PROJECT          - description: Per-second Limit           name:
+  UserQps           default_limit: 2000           max_limit: 4000
+  duration: 100s           limit_by: USER        rules:       - selector: "*"
+  groups:         - group: ReadGroup       - selector:
+  google.calendar.Calendar.Update         groups:         - group: WriteGroup
+  cost: 2       - selector: google.calendar.Calendar.Delete         groups:
+  - group: WriteGroup  Here, the configuration defines two quota groups:
+  ReadGroup and WriteGroup, each defining its own daily and per-second limits.
+  Note that One Platform enforces per-second limits averaged over a duration
+  of 100 seconds. The rules map ReadGroup for all methods, except for the
+  Update and Delete methods. These two methods consume from WriteGroup, with
+  Update method consuming at twice the rate as Delete method.  Multiple quota
+  groups can be specified for a method. The quota limits in all of those
+  groups will be enforced. Example:      quota:       groups:       - name:
+  WriteGroup         limits:         - description: Daily Limit
+  name: ProjectQpd           default_limit: 1000           max_limit: 1000
+  duration: 1d           limit_by: CLIENT_PROJECT          - description: Per-
+  second Limit           name: UserQps           default_limit: 2000
+  max_limit: 4000           duration: 100s           limit_by: USER        -
+  name: StorageGroup         limits:         - description: Storage Quota
+  name: StorageQuota           default_limit: 1000           duration: 0
+  limit_by: USER        rules:       - selector:
+  google.calendar.Calendar.Create         groups:         - group:
+  StorageGroup         - group: WriteGroup       - selector:
+  google.calendar.Calendar.Delete         groups:         - group:
+  StorageGroup  In the above example, the Create and Delete methods manage the
+  user's storage space. In addition, Create method uses WriteGroup to manage
+  the requests. In this case, requests to Create method need to satisfy all
+  quota limits defined in both quota groups.  One can disable quota for
+  selected method(s) identified by the selector by setting disable_quota to
+  ture. For example,        rules:       - selector: "*"         group:
+  - group ReadGroup       - selector: google.calendar.Calendar.Select
+  disable_quota: true
+
+  Fields:
+    groups: List of `QuotaGroup` definitions for the service.
+    rules: List of `QuotaRule` definitions, each one mapping a selected method
+      to one or more quota groups.
+  """
+
+  groups = _messages.MessageField('QuotaGroup', 1, repeated=True)
+  rules = _messages.MessageField('QuotaRule', 2, repeated=True)
+
+
+class QuotaGroup(_messages.Message):
+  """`QuotaGroup` defines a set of quota limits to enforce.
+
+  Fields:
+    billable: Indicates if the quota limits defined in this quota group apply
+      to consumers who have active billing. Quota limits defined in billable
+      groups will be applied only to consumers who have active billing. The
+      amount of tokens consumed from billable quota group will also be
+      reported for billing. Quota limits defined in non-billable groups will
+      be applied only to consumers who have no active billing.
+    description: User-visible description of this quota group.
+    limits: Quota limits to be enforced when this quota group is used. A
+      request must satisfy all the limits in a group for it to be permitted.
+    name: Name of this quota group. Must be unique within the service.  Quota
+      group name is used as part of the id for quota limits. Once the quota
+      group has been put into use, the name of the quota group should be
+      immutable.
+  """
+
+  billable = _messages.BooleanField(1)
+  description = _messages.StringField(2)
+  limits = _messages.MessageField('QuotaLimit', 3, repeated=True)
+  name = _messages.StringField(4)
+
+
+class QuotaGroupMapping(_messages.Message):
+  """A quota group mapping.
+
+  Fields:
+    cost: Number of tokens to consume for each request. This allows different
+      cost to be associated with different methods that consume from the same
+      quota group. By default, each request will cost one token.
+    group: The `QuotaGroup.name` of the group. Requests for the mapped methods
+      will consume tokens from each of the limits defined in this group.
+  """
+
+  cost = _messages.IntegerField(1, variant=_messages.Variant.INT32)
+  group = _messages.StringField(2)
+
+
+class QuotaInfo(_messages.Message):
+  """Metadata about an individual quota, containing usage and limit
+  information.
+
+  Fields:
+    currentUsage: The usage data for this quota as it applies to the current
+      limit.
+    historicalUsage: The historical usage data of this quota limit. Currently
+      it is only available for daily quota limit, that is, base_limit.duration
+      = "1d".
+    limit: The effective limit for this quota.
+  """
+
+  currentUsage = _messages.MessageField('QuotaUsage', 1)
+  historicalUsage = _messages.MessageField('QuotaUsage', 2, repeated=True)
+  limit = _messages.MessageField('EffectiveQuotaLimit', 3)
+
+
+class QuotaLimit(_messages.Message):
+  """`QuotaLimit` defines a specific limit that applies over a specified
+  duration for a limit type. There can be at most one limit for a duration and
+  limit type combination defined within a `QuotaGroup`.
+
+  Enums:
+    LimitByValueValuesEnum: Limit type to use for enforcing this quota limit.
+      Each unique value gets the defined number of tokens to consume from. For
+      a quota limit that uses user type, each user making requests through the
+      same client application project will get his/her own pool of tokens to
+      consume, whereas for a limit that uses client project type, all users
+      making requests through the same client application project share a
+      single pool of tokens.
+
+  Fields:
+    defaultLimit: Default number of tokens that can be consumed during the
+      specified duration. This is the number of tokens assigned when a client
+      application developer activates the service for his/her project.
+      Specifying a value of 0 will block all requests. This can be used if you
+      are provisioning quota to selected consumers and blocking others.
+      Similarly, a value of -1 will indicate an unlimited quota. No other
+      negative values are allowed.
+    description: Optional. User-visible, extended description for this quota
+      limit. Should be used only when more context is needed to understand
+      this limit than provided by the limit's display name (see:
+      `display_name`).
+    displayName: User-visible display name for this limit. Optional. If not
+      set, the UI will provide a default display name based on the quota
+      configuration. This field can be used to override the default display
+      name generated from the configuration.
+    duration: Duration of this limit in textual notation. Example: "100s",
+      "24h", "1d". For duration longer than a day, only multiple of days is
+      supported. We support only "100s" and "1d" for now. Additional support
+      will be added in the future. "0" indicates indefinite duration.
+    freeTier: Free tier value displayed in the Developers Console for this
+      limit. The free tier is the number of tokens that will be subtracted
+      from the billed amount when billing is enabled. This field can only be
+      set on a limit with duration "1d", in a billable group; it is invalid on
+      any other limit. If this field is not set, it defaults to 0, indicating
+      that there is no free tier for this service.
+    limitBy: Limit type to use for enforcing this quota limit. Each unique
+      value gets the defined number of tokens to consume from. For a quota
+      limit that uses user type, each user making requests through the same
+      client application project will get his/her own pool of tokens to
+      consume, whereas for a limit that uses client project type, all users
+      making requests through the same client application project share a
+      single pool of tokens.
+    maxLimit: Maximum number of tokens that can be consumed during the
+      specified duration. Client application developers can override the
+      default limit up to this maximum. If specified, this value cannot be set
+      to a value less than the default limit. If not specified, it is set to
+      the default limit.  To allow clients to apply overrides with no upper
+      bound, set this to -1, indicating unlimited maximum quota.
+    name: Name of the quota limit.  Must be unique within the quota group.
+      This name is used to refer to the limit when overriding the limit on a
+      per-project basis.  If a name is not provided, it will be generated from
+      the limit_by and duration fields.  The maximum length of the limit name
+      is 64 characters.  The name of a limit is used as a unique identifier
+      for this limit. Therefore, once a limit has been put into use, its name
+      should be immutable. You can use the display_name field to provide a
+      user-friendly name for the limit. The display name can be evolved over
+      time without affecting the identity of the limit.
+  """
+
+  class LimitByValueValuesEnum(_messages.Enum):
+    """Limit type to use for enforcing this quota limit. Each unique value
+    gets the defined number of tokens to consume from. For a quota limit that
+    uses user type, each user making requests through the same client
+    application project will get his/her own pool of tokens to consume,
+    whereas for a limit that uses client project type, all users making
+    requests through the same client application project share a single pool
+    of tokens.
+
+    Values:
+      CLIENT_PROJECT: ID of the project owned by the client application
+        developer making the request.
+      USER: ID of the end user making the request using the client
+        application.
+    """
+    CLIENT_PROJECT = 0
+    USER = 1
+
+  defaultLimit = _messages.IntegerField(1)
+  description = _messages.StringField(2)
+  displayName = _messages.StringField(3)
+  duration = _messages.StringField(4)
+  freeTier = _messages.IntegerField(5)
+  limitBy = _messages.EnumField('LimitByValueValuesEnum', 6)
+  maxLimit = _messages.IntegerField(7)
+  name = _messages.StringField(8)
+
+
+class QuotaLimitOverride(_messages.Message):
+  """Specifies a custom quota limit that is applied for this consumer project.
+  This overrides the default value in google.api.QuotaLimit.
+
+  Fields:
+    limit: The new limit for this project. May be -1 (unlimited), 0 (block),
+      or any positive integer.
+    unlimited: Indicates the override is to provide unlimited quota.  If true,
+      any value set for limit will be ignored. DEPRECATED. Use a limit value
+      of -1 instead.
+  """
+
+  limit = _messages.IntegerField(1)
+  unlimited = _messages.BooleanField(2)
+
+
+class QuotaRule(_messages.Message):
+  """`QuotaRule` maps a method to a set of `QuotaGroup`s.
+
+  Fields:
+    disableQuota: Indicates if quota checking should be enforced. Quota will
+      be disabled for methods without quota rules or with quota rules having
+      this field set to true. When this field is set to true, no quota group
+      mapping is allowed.
+    groups: Quota groups to be used for this method. This supports associating
+      a cost with each quota group.
+    selector: Selects methods to which this rule applies.  Refer to selector
+      for syntax details.
+  """
+
+  disableQuota = _messages.BooleanField(1)
+  groups = _messages.MessageField('QuotaGroupMapping', 2, repeated=True)
+  selector = _messages.StringField(3)
+
+
+class QuotaSettings(_messages.Message):
+  """Per-consumer overrides for quota settings. See google/api/quota.proto for
+  the corresponding service configuration which provides the default values.
+
+  Messages:
+    ConsumerOverridesValue: Quota overrides set by the consumer. Consumer
+      overrides will only have an effect up to the max_limit specified in the
+      service config, or the the producer override, if one exists.  The key
+      for this map is one of the following:  - '<GROUP_NAME>/<LIMIT_NAME>' for
+      quotas defined within quota groups, where GROUP_NAME is the
+      google.api.QuotaGroup.name field and LIMIT_NAME is the
+      google.api.QuotaLimit.name field from the service config.  For example:
+      'ReadGroup/ProjectDaily'.  - '<LIMIT_NAME>' for quotas defined without
+      quota groups, where LIMIT_NAME is the google.api.QuotaLimit.name field
+      from the service config. For example: 'borrowedCountPerOrganization'.
+    EffectiveQuotasValue: The effective quota limits for each group, derived
+      from the service defaults together with any producer or consumer
+      overrides. For each limit, the effective value is the minimum of the
+      producer and consumer overrides if either is present, or else the
+      service default if neither is present. DEPRECATED. Use
+      effective_quota_groups instead.
+    ProducerOverridesValue: Quota overrides set by the producer. Note that if
+      a consumer override is also specified, then the minimum of the two will
+      be used. This allows consumers to cap their usage voluntarily.  The key
+      for this map is one of the following:  - '<GROUP_NAME>/<LIMIT_NAME>' for
+      quotas defined within quota groups, where GROUP_NAME is the
+      google.api.QuotaGroup.name field and LIMIT_NAME is the
+      google.api.QuotaLimit.name field from the service config.  For example:
+      'ReadGroup/ProjectDaily'.  - '<LIMIT_NAME>' for quotas defined without
+      quota groups, where LIMIT_NAME is the google.api.QuotaLimit.name field
+      from the service config. For example: 'borrowedCountPerOrganization'.
+
+  Fields:
+    consumerOverrides: Quota overrides set by the consumer. Consumer overrides
+      will only have an effect up to the max_limit specified in the service
+      config, or the the producer override, if one exists.  The key for this
+      map is one of the following:  - '<GROUP_NAME>/<LIMIT_NAME>' for quotas
+      defined within quota groups, where GROUP_NAME is the
+      google.api.QuotaGroup.name field and LIMIT_NAME is the
+      google.api.QuotaLimit.name field from the service config.  For example:
+      'ReadGroup/ProjectDaily'.  - '<LIMIT_NAME>' for quotas defined without
+      quota groups, where LIMIT_NAME is the google.api.QuotaLimit.name field
+      from the service config. For example: 'borrowedCountPerOrganization'.
+    effectiveQuotaGroups: Use this field for quota limits defined under quota
+      groups. Combines service quota configuration and project-specific
+      settings, as a map from quota group name to the effective quota
+      information for that group. Output-only.
+    effectiveQuotas: The effective quota limits for each group, derived from
+      the service defaults together with any producer or consumer overrides.
+      For each limit, the effective value is the minimum of the producer and
+      consumer overrides if either is present, or else the service default if
+      neither is present. DEPRECATED. Use effective_quota_groups instead.
+    producerOverrides: Quota overrides set by the producer. Note that if a
+      consumer override is also specified, then the minimum of the two will be
+      used. This allows consumers to cap their usage voluntarily.  The key for
+      this map is one of the following:  - '<GROUP_NAME>/<LIMIT_NAME>' for
+      quotas defined within quota groups, where GROUP_NAME is the
+      google.api.QuotaGroup.name field and LIMIT_NAME is the
+      google.api.QuotaLimit.name field from the service config.  For example:
+      'ReadGroup/ProjectDaily'.  - '<LIMIT_NAME>' for quotas defined without
+      quota groups, where LIMIT_NAME is the google.api.QuotaLimit.name field
+      from the service config. For example: 'borrowedCountPerOrganization'.
+    variableTermQuotas: Quotas that are active over a specified time period.
+      Only writeable by the producer.
+  """
+
+  @encoding.MapUnrecognizedFields('additionalProperties')
+  class ConsumerOverridesValue(_messages.Message):
+    """Quota overrides set by the consumer. Consumer overrides will only have
+    an effect up to the max_limit specified in the service config, or the the
+    producer override, if one exists.  The key for this map is one of the
+    following:  - '<GROUP_NAME>/<LIMIT_NAME>' for quotas defined within quota
+    groups, where GROUP_NAME is the google.api.QuotaGroup.name field and
+    LIMIT_NAME is the google.api.QuotaLimit.name field from the service
+    config.  For example: 'ReadGroup/ProjectDaily'.  - '<LIMIT_NAME>' for
+    quotas defined without quota groups, where LIMIT_NAME is the
+    google.api.QuotaLimit.name field from the service config. For example:
+    'borrowedCountPerOrganization'.
+
+    Messages:
+      AdditionalProperty: An additional property for a ConsumerOverridesValue
+        object.
+
+    Fields:
+      additionalProperties: Additional properties of type
+        ConsumerOverridesValue
+    """
+
+    class AdditionalProperty(_messages.Message):
+      """An additional property for a ConsumerOverridesValue object.
+
+      Fields:
+        key: Name of the additional property.
+        value: A QuotaLimitOverride attribute.
+      """
+
+      key = _messages.StringField(1)
+      value = _messages.MessageField('QuotaLimitOverride', 2)
+
+    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
+
+  @encoding.MapUnrecognizedFields('additionalProperties')
+  class EffectiveQuotasValue(_messages.Message):
+    """The effective quota limits for each group, derived from the service
+    defaults together with any producer or consumer overrides. For each limit,
+    the effective value is the minimum of the producer and consumer overrides
+    if either is present, or else the service default if neither is present.
+    DEPRECATED. Use effective_quota_groups instead.
+
+    Messages:
+      AdditionalProperty: An additional property for a EffectiveQuotasValue
+        object.
+
+    Fields:
+      additionalProperties: Additional properties of type EffectiveQuotasValue
+    """
+
+    class AdditionalProperty(_messages.Message):
+      """An additional property for a EffectiveQuotasValue object.
+
+      Fields:
+        key: Name of the additional property.
+        value: A QuotaLimitOverride attribute.
+      """
+
+      key = _messages.StringField(1)
+      value = _messages.MessageField('QuotaLimitOverride', 2)
+
+    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
+
+  @encoding.MapUnrecognizedFields('additionalProperties')
+  class ProducerOverridesValue(_messages.Message):
+    """Quota overrides set by the producer. Note that if a consumer override
+    is also specified, then the minimum of the two will be used. This allows
+    consumers to cap their usage voluntarily.  The key for this map is one of
+    the following:  - '<GROUP_NAME>/<LIMIT_NAME>' for quotas defined within
+    quota groups, where GROUP_NAME is the google.api.QuotaGroup.name field and
+    LIMIT_NAME is the google.api.QuotaLimit.name field from the service
+    config.  For example: 'ReadGroup/ProjectDaily'.  - '<LIMIT_NAME>' for
+    quotas defined without quota groups, where LIMIT_NAME is the
+    google.api.QuotaLimit.name field from the service config. For example:
+    'borrowedCountPerOrganization'.
+
+    Messages:
+      AdditionalProperty: An additional property for a ProducerOverridesValue
+        object.
+
+    Fields:
+      additionalProperties: Additional properties of type
+        ProducerOverridesValue
+    """
+
+    class AdditionalProperty(_messages.Message):
+      """An additional property for a ProducerOverridesValue object.
+
+      Fields:
+        key: Name of the additional property.
+        value: A QuotaLimitOverride attribute.
+      """
+
+      key = _messages.StringField(1)
+      value = _messages.MessageField('QuotaLimitOverride', 2)
+
+    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
+
+  consumerOverrides = _messages.MessageField('ConsumerOverridesValue', 1)
+  effectiveQuotaGroups = _messages.MessageField('EffectiveQuotaGroup', 2, repeated=True)
+  effectiveQuotas = _messages.MessageField('EffectiveQuotasValue', 3)
+  producerOverrides = _messages.MessageField('ProducerOverridesValue', 4)
+  variableTermQuotas = _messages.MessageField('VariableTermQuota', 5, repeated=True)
+
+
+class QuotaUsage(_messages.Message):
+  """Specifies the used quota amount for a quota limit at a particular time.
+
+  Fields:
+    endTime: The time the quota duration ended.
+    queryTime: The time the quota usage data was queried.
+    startTime: The time the quota duration started.
+    usage: The used quota value at the "query_time".
+  """
+
+  endTime = _messages.StringField(1)
+  queryTime = _messages.StringField(2)
+  startTime = _messages.StringField(3)
+  usage = _messages.IntegerField(4)
+
+
+class Service(_messages.Message):
+  """`Service` is the root object of the configuration schema. It describes
+  basic information like the name of the service and the exposed API
+  interfaces, and delegates other aspects to configuration sub-sections.
+  Example:      type: google.api.Service     config_version: 1     name:
+  calendar.googleapis.com     title: Google Calendar API     apis:     - name:
+  google.calendar.Calendar     backend:       rules:       - selector: "*"
+  address: calendar.example.com
+
+  Fields:
+    apis: A list of API interfaces exported by this service. Only the `name`
+      field of the google.protobuf.Api needs to be provided by the
+      configuration author, as the remaining fields will be derived from the
+      IDL during the normalization process. It is an error to specify an API
+      interface here which cannot be resolved against the associated IDL
+      files.
+    authentication: Auth configuration.
+    backend: API backend configuration.
+    billing: Billing configuration of the service.
+    configVersion: The version of the service configuration. The config
+      version may influence interpretation of the configuration, for example,
+      to determine defaults. This is documented together with applicable
+      options. The current default for the config version itself is `3`.
+    context: Context configuration.
+    control: Configuration for the service control plane.
+    customError: Custom error configuration.
+    documentation: Additional API documentation.
+    enums: A list of all enum types included in this API service.  Enums
+      referenced directly or indirectly by the `apis` are automatically
+      included.  Enums which are not referenced but shall be included should
+      be listed here by name. Example:      enums:     - name:
+      google.someapi.v1.SomeEnum
+    http: HTTP configuration.
+    id: A unique ID for a specific instance of this message, typically
+      assigned by the client for tracking purpose. If empty, the server may
+      choose to generate one instead.
+    logging: Logging configuration of the service.
+    logs: Defines the logs used by this service.
+    metrics: Defines the metrics used by this service.
+    monitoredResources: Defines the monitored resources used by this service.
+      This is required by the Service.monitoring and Service.logging
+      configurations.
+    monitoring: Monitoring configuration of the service.
+    name: The DNS address at which this service is available, e.g.
+      `calendar.googleapis.com`.
+    producerProjectId: The id of the Google developer project that owns the
+      service. Members of this project can manage the service configuration,
+      manage consumption of the service, etc.
+    projectProperties: Configuration of per-consumer project properties.
+    quota: Quota configuration.
+    systemParameters: Configuration for system parameters.
+    systemTypes: A list of all proto message types included in this API
+      service. It serves similar purpose as [google.api.Service.types], except
+      that these types are not needed by user-defined APIs. Therefore, they
+      will not show up in the generated discovery doc. This field should only
+      be used to define system APIs in ESF.
+    title: The product title associated with this service.
+    types: A list of all proto message types included in this API service.
+      Types referenced directly or indirectly by the `apis` are automatically
+      included.  Messages which are not referenced but shall be included, such
+      as types used by the `google.protobuf.Any` type, should be listed here
+      by name. Example:      types:     - name: google.protobuf.Int32
+    usage: Configuration controlling usage of this service.
+    visibility: API visibility configuration.
+  """
+
+  apis = _messages.MessageField('Api', 1, repeated=True)
+  authentication = _messages.MessageField('Authentication', 2)
+  backend = _messages.MessageField('Backend', 3)
+  billing = _messages.MessageField('Billing', 4)
+  configVersion = _messages.IntegerField(5, variant=_messages.Variant.UINT32)
+  context = _messages.MessageField('Context', 6)
+  control = _messages.MessageField('Control', 7)
+  customError = _messages.MessageField('CustomError', 8)
+  documentation = _messages.MessageField('Documentation', 9)
+  enums = _messages.MessageField('Enum', 10, repeated=True)
+  http = _messages.MessageField('Http', 11)
+  id = _messages.StringField(12)
+  logging = _messages.MessageField('Logging', 13)
+  logs = _messages.MessageField('LogDescriptor', 14, repeated=True)
+  metrics = _messages.MessageField('MetricDescriptor', 15, repeated=True)
+  monitoredResources = _messages.MessageField('MonitoredResourceDescriptor', 16, repeated=True)
+  monitoring = _messages.MessageField('Monitoring', 17)
+  name = _messages.StringField(18)
+  producerProjectId = _messages.StringField(19)
+  projectProperties = _messages.MessageField('ProjectProperties', 20)
+  quota = _messages.MessageField('Quota', 21)
+  systemParameters = _messages.MessageField('SystemParameters', 22)
+  systemTypes = _messages.MessageField('Type', 23, repeated=True)
+  title = _messages.StringField(24)
+  types = _messages.MessageField('Type', 25, repeated=True)
+  usage = _messages.MessageField('Usage', 26)
+  visibility = _messages.MessageField('Visibility', 27)
+
+
+class ServiceAccessList(_messages.Message):
+  """List of users and groups that are granted access to a service or
+  visibility label.
+
+  Fields:
+    members: Members that are granted access.  - "user:{$user_email}" - Grant
+      access to an individual user - "group:{$group_email}" - Grant access to
+      direct members of the group - "domain:{$domain}" - Grant access to all
+      members of the domain. For now,      domain membership check will be
+      similar to Devconsole/TT check:      compare domain part of the user
+      email to configured domain name.      When IAM integration is complete,
+      this will be replaced with IAM      check.
+  """
+
+  members = _messages.StringField(1, repeated=True)
+
+
+class ServiceAccessPolicy(_messages.Message):
+  """Policy describing who can access a service and any visibility labels on
+  that service.
+
+  Messages:
+    VisibilityLabelAccessListsValue: ACLs for access to restricted parts of
+      the service.  The map key is the visibility label that is being
+      controlled.  Note that access to any label also implies access to the
+      unrestricted surface.
+
+  Fields:
+    accessList: ACL for access to the unrestricted surface of the service.
+    serviceName: The service protected by this policy.
+    visibilityLabelAccessLists: ACLs for access to restricted parts of the
+      service.  The map key is the visibility label that is being controlled.
+      Note that access to any label also implies access to the unrestricted
+      surface.
+  """
+
+  @encoding.MapUnrecognizedFields('additionalProperties')
+  class VisibilityLabelAccessListsValue(_messages.Message):
+    """ACLs for access to restricted parts of the service.  The map key is the
+    visibility label that is being controlled.  Note that access to any label
+    also implies access to the unrestricted surface.
+
+    Messages:
+      AdditionalProperty: An additional property for a
+        VisibilityLabelAccessListsValue object.
+
+    Fields:
+      additionalProperties: Additional properties of type
+        VisibilityLabelAccessListsValue
+    """
+
+    class AdditionalProperty(_messages.Message):
+      """An additional property for a VisibilityLabelAccessListsValue object.
+
+      Fields:
+        key: Name of the additional property.
+        value: A ServiceAccessList attribute.
+      """
+
+      key = _messages.StringField(1)
+      value = _messages.MessageField('ServiceAccessList', 2)
+
+    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
+
+  accessList = _messages.MessageField('ServiceAccessList', 1)
+  serviceName = _messages.StringField(2)
+  visibilityLabelAccessLists = _messages.MessageField('VisibilityLabelAccessListsValue', 3)
+
+
+class ServicemanagementOperationsGetRequest(_messages.Message):
+  """A ServicemanagementOperationsGetRequest object.
+
+  Fields:
+    operationsId: Part of `name`. The name of the operation resource.
+  """
+
+  operationsId = _messages.StringField(1, required=True)
+
+
+class ServicemanagementServicesAccessPolicyQueryRequest(_messages.Message):
+  """A ServicemanagementServicesAccessPolicyQueryRequest object.
+
+  Fields:
+    serviceName: The service to query access for.
+    userEmail: The user to query access for.
+  """
+
+  serviceName = _messages.StringField(1, required=True)
+  userEmail = _messages.StringField(2)
+
+
+class ServicemanagementServicesConfigsCreateRequest(_messages.Message):
+  """A ServicemanagementServicesConfigsCreateRequest object.
+
+  Fields:
+    service: A Service resource to be passed as the request body.
+    serviceName: The name of the service.  See the `ServiceManager` overview
+      for naming requirements.  For example: `example.googleapis.com`.
+  """
+
+  service = _messages.MessageField('Service', 1)
+  serviceName = _messages.StringField(2, required=True)
+
+
+class ServicemanagementServicesConfigsGetRequest(_messages.Message):
+  """A ServicemanagementServicesConfigsGetRequest object.
+
+  Fields:
+    configId: The id of the service config resource. Optional. If it is not
+      specified, the latest version of config will be returned.
+    serviceName: The name of the service.  See the `ServiceManager` overview
+      for naming requirements.  For example: `example.googleapis.com`.
+  """
+
+  configId = _messages.StringField(1, required=True)
+  serviceName = _messages.StringField(2, required=True)
+
+
+class ServicemanagementServicesConfigsListRequest(_messages.Message):
+  """A ServicemanagementServicesConfigsListRequest object.
+
+  Fields:
+    pageSize: The max number of items to include in the response list.
+    pageToken: The token of the page to retrieve.
+    serviceName: The name of the service.  See the `ServiceManager` overview
+      for naming requirements.  For example: `example.googleapis.com`.
+  """
+
+  pageSize = _messages.IntegerField(1, variant=_messages.Variant.INT32)
+  pageToken = _messages.StringField(2)
+  serviceName = _messages.StringField(3, required=True)
+
+
+class ServicemanagementServicesConfigsSubmitRequest(_messages.Message):
+  """A ServicemanagementServicesConfigsSubmitRequest object.
+
+  Fields:
+    serviceName: The name of the service.  See the `ServiceManager` overview
+      for naming requirements.  For example: `example.googleapis.com`.
+    submitConfigSourceRequest: A SubmitConfigSourceRequest resource to be
+      passed as the request body.
+  """
+
+  serviceName = _messages.StringField(1, required=True)
+  submitConfigSourceRequest = _messages.MessageField('SubmitConfigSourceRequest', 2)
+
+
+class ServicemanagementServicesCustomerSettingsGetRequest(_messages.Message):
+  """A ServicemanagementServicesCustomerSettingsGetRequest object.
+
+  Enums:
+    ViewValueValuesEnum: Request only fields for the specified view.
+
+  Fields:
+    customerId: ID for the customer. See the comment for
+      `CustomerSettings.customer_id` field of message for its format. This
+      field is required.
+    expand: Fields to expand in any results.
+    serviceName: The name of the service.  See the `ServiceManager` overview
+      for naming requirements.  For example: `example.googleapis.com`. This
+      field is required.
+    view: Request only fields for the specified view.
+  """
+
+  class ViewValueValuesEnum(_messages.Enum):
+    """Request only fields for the specified view.
+
+    Values:
+      PROJECT_SETTINGS_VIEW_UNSPECIFIED: <no description>
+      CONSUMER_VIEW: <no description>
+      PRODUCER_VIEW: <no description>
+      ALL: <no description>
+    """
+    PROJECT_SETTINGS_VIEW_UNSPECIFIED = 0
+    CONSUMER_VIEW = 1
+    PRODUCER_VIEW = 2
+    ALL = 3
+
+  customerId = _messages.StringField(1, required=True)
+  expand = _messages.StringField(2)
+  serviceName = _messages.StringField(3, required=True)
+  view = _messages.EnumField('ViewValueValuesEnum', 4)
+
+
+class ServicemanagementServicesCustomerSettingsPatchRequest(_messages.Message):
+  """A ServicemanagementServicesCustomerSettingsPatchRequest object.
+
+  Fields:
+    customerId: ID for the customer. See the comment for
+      `CustomerSettings.customer_id` field of message for its format. This
+      field is required.
+    customerSettings: A CustomerSettings resource to be passed as the request
+      body.
+    serviceName: The name of the service.  See the `ServiceManager` overview
+      for naming requirements.  For example: `example.googleapis.com`. This
+      field is required.
+    updateMask: The field mask specifying which fields are to be updated.
+  """
+
+  customerId = _messages.StringField(1, required=True)
+  customerSettings = _messages.MessageField('CustomerSettings', 2)
+  serviceName = _messages.StringField(3, required=True)
+  updateMask = _messages.StringField(4)
+
+
+class ServicemanagementServicesDeleteRequest(_messages.Message):
+  """A ServicemanagementServicesDeleteRequest object.
+
+  Fields:
+    serviceName: The name of the service.  See the `ServiceManager` overview
+      for naming requirements.  For example: `example.googleapis.com`.
+  """
+
+  serviceName = _messages.StringField(1, required=True)
+
+
+class ServicemanagementServicesDisableRequest(_messages.Message):
+  """A ServicemanagementServicesDisableRequest object.
+
+  Fields:
+    disableServiceRequest: A DisableServiceRequest resource to be passed as
+      the request body.
+    serviceName: Name of the service to disable. Specifying an unknown service
+      name will cause the request to fail.
+  """
+
+  disableServiceRequest = _messages.MessageField('DisableServiceRequest', 1)
+  serviceName = _messages.StringField(2, required=True)
+
+
+class ServicemanagementServicesEnableRequest(_messages.Message):
+  """A ServicemanagementServicesEnableRequest object.
+
+  Fields:
+    enableServiceRequest: A EnableServiceRequest resource to be passed as the
+      request body.
+    serviceName: Name of the service to enable. Specifying an unknown service
+      name will cause the request to fail.
+  """
+
+  enableServiceRequest = _messages.MessageField('EnableServiceRequest', 1)
+  serviceName = _messages.StringField(2, required=True)
+
+
+class ServicemanagementServicesGetAccessPolicyRequest(_messages.Message):
+  """A ServicemanagementServicesGetAccessPolicyRequest object.
+
+  Fields:
+    serviceName: The name of the service.  For example:
+      `example.googleapis.com`.
+  """
+
+  serviceName = _messages.StringField(1, required=True)
+
+
+class ServicemanagementServicesGetConfigRequest(_messages.Message):
+  """A ServicemanagementServicesGetConfigRequest object.
+
+  Fields:
+    configId: The id of the service config resource. Optional. If it is not
+      specified, the latest version of config will be returned.
+    serviceName: The name of the service.  See the `ServiceManager` overview
+      for naming requirements.  For example: `example.googleapis.com`.
+  """
+
+  configId = _messages.StringField(1)
+  serviceName = _messages.StringField(2, required=True)
+
+
+class ServicemanagementServicesGetRequest(_messages.Message):
+  """A ServicemanagementServicesGetRequest object.
+
+  Enums:
+    ViewValueValuesEnum: If project_settings is expanded, request only fields
+      for the specified view.
+
+  Fields:
+    consumerProjectId: If project_settings is expanded, return settings for
+      the specified consumer project.
+    expand: Fields to expand in any results.  By default, the following fields
+      are not present in the result: - `operations` - `project_settings` -
+      `project_settings.operations` - `quota_usage` (It requires
+      `project_settings`) - `historical_quota_usage` (It requires
+      `project_settings`)
+    serviceName: The name of the service.  See the `ServiceManager` overview
+      for naming requirements.  For example: `example.googleapis.com`.
+    view: If project_settings is expanded, request only fields for the
+      specified view.
+  """
+
+  class ViewValueValuesEnum(_messages.Enum):
+    """If project_settings is expanded, request only fields for the specified
+    view.
+
+    Values:
+      PROJECT_SETTINGS_VIEW_UNSPECIFIED: <no description>
+      CONSUMER_VIEW: <no description>
+      PRODUCER_VIEW: <no description>
+      ALL: <no description>
+    """
+    PROJECT_SETTINGS_VIEW_UNSPECIFIED = 0
+    CONSUMER_VIEW = 1
+    PRODUCER_VIEW = 2
+    ALL = 3
+
+  consumerProjectId = _messages.StringField(1)
+  expand = _messages.StringField(2)
+  serviceName = _messages.StringField(3, required=True)
+  view = _messages.EnumField('ViewValueValuesEnum', 4)
+
+
+class ServicemanagementServicesListRequest(_messages.Message):
+  """A ServicemanagementServicesListRequest object.
+
+  Fields:
+    category: Include services only in the specified category. Supported
+      categories are servicemanagement.googleapis.com/categories/google-
+      services or servicemanagement.googleapis.com/categories/play-games.
+    consumerProjectId: Include services consumed by the specified project.  If
+      project_settings is expanded, then this field controls which project
+      project_settings is populated for.
+    expand: Fields to expand in any results.  By default, the following fields
+      are not fully included in list results: - `operations` -
+      `project_settings` - `project_settings.operations` - `quota_usage` (It
+      requires `project_settings`)
+    pageSize: Requested size of the next page of data.
+    pageToken: Token identifying which result to start with; returned by a
+      previous list call.
+    producerProjectId: Include services produced by the specified project.
+  """
+
+  category = _messages.StringField(1)
+  consumerProjectId = _messages.StringField(2)
+  expand = _messages.StringField(3)
+  pageSize = _messages.IntegerField(4, variant=_messages.Variant.INT32)
+  pageToken = _messages.StringField(5)
+  producerProjectId = _messages.StringField(6)
+
+
+class ServicemanagementServicesPatchConfigRequest(_messages.Message):
+  """A ServicemanagementServicesPatchConfigRequest object.
+
+  Fields:
+    service: A Service resource to be passed as the request body.
+    serviceName: The name of the service.  See the `ServiceManager` overview
+      for naming requirements.  For example: `example.googleapis.com`.
+    updateMask: A mask specifying which fields to update.
+  """
+
+  service = _messages.MessageField('Service', 1)
+  serviceName = _messages.StringField(2, required=True)
+  updateMask = _messages.StringField(3)
+
+
+class ServicemanagementServicesPatchRequest(_messages.Message):
+  """A ServicemanagementServicesPatchRequest object.
+
+  Fields:
+    managedService: A ManagedService resource to be passed as the request
+      body.
+    serviceName: The name of the service.  See the `ServiceManager` overview
+      for naming requirements.  For example: `example.googleapis.com`.
+    updateMask: A mask specifying which fields to update.
+  """
+
+  managedService = _messages.MessageField('ManagedService', 1)
+  serviceName = _messages.StringField(2, required=True)
+  updateMask = _messages.StringField(3)
+
+
+class ServicemanagementServicesProjectSettingsGetRequest(_messages.Message):
+  """A ServicemanagementServicesProjectSettingsGetRequest object.
+
+  Enums:
+    ViewValueValuesEnum: Request only the fields for the specified view.
+
+  Fields:
+    consumerProjectId: The project ID of the consumer.
+    expand: Fields to expand in any results.  By default, the following fields
+      are not present in the result: - `operations` - `quota_usage`
+    serviceName: The name of the service.  See the `ServiceManager` overview
+      for naming requirements.  For example: `example.googleapis.com`.
+    view: Request only the fields for the specified view.
+  """
+
+  class ViewValueValuesEnum(_messages.Enum):
+    """Request only the fields for the specified view.
+
+    Values:
+      PROJECT_SETTINGS_VIEW_UNSPECIFIED: <no description>
+      CONSUMER_VIEW: <no description>
+      PRODUCER_VIEW: <no description>
+      ALL: <no description>
+    """
+    PROJECT_SETTINGS_VIEW_UNSPECIFIED = 0
+    CONSUMER_VIEW = 1
+    PRODUCER_VIEW = 2
+    ALL = 3
+
+  consumerProjectId = _messages.StringField(1, required=True)
+  expand = _messages.StringField(2)
+  serviceName = _messages.StringField(3, required=True)
+  view = _messages.EnumField('ViewValueValuesEnum', 4)
+
+
+class ServicemanagementServicesProjectSettingsPatchRequest(_messages.Message):
+  """A ServicemanagementServicesProjectSettingsPatchRequest object.
+
+  Fields:
+    consumerProjectId: The project ID of the consumer.
+    projectSettings: A ProjectSettings resource to be passed as the request
+      body.
+    serviceName: The name of the service.  See the `ServiceManager` overview
+      for naming requirements.  For example: `example.googleapis.com`.
+    updateMask: The field mask specifying which fields are to be updated.
+  """
+
+  consumerProjectId = _messages.StringField(1, required=True)
+  projectSettings = _messages.MessageField('ProjectSettings', 2)
+  serviceName = _messages.StringField(3, required=True)
+  updateMask = _messages.StringField(4)
+
+
+class ServicemanagementServicesUpdateConfigRequest(_messages.Message):
+  """A ServicemanagementServicesUpdateConfigRequest object.
+
+  Fields:
+    service: A Service resource to be passed as the request body.
+    serviceName: The name of the service.  See the `ServiceManager` overview
+      for naming requirements.  For example: `example.googleapis.com`.
+    updateMask: A mask specifying which fields to update. Update mask has been
+      deprecated on UpdateServiceConfig service method. Please use
+      PatchServiceConfig method instead to do partial updates.
+  """
+
+  service = _messages.MessageField('Service', 1)
+  serviceName = _messages.StringField(2, required=True)
+  updateMask = _messages.StringField(3)
+
+
+class ServicemanagementServicesUpdateRequest(_messages.Message):
+  """A ServicemanagementServicesUpdateRequest object.
+
+  Fields:
+    managedService: A ManagedService resource to be passed as the request
+      body.
+    serviceName: The name of the service.  See the `ServiceManager` overview
+      for naming requirements.  For example: `example.googleapis.com`.
+    updateMask: A mask specifying which fields to update. Update mask has been
+      deprecated on UpdateService service method. Please use PatchService
+      method instead to do partial updates.
+  """
+
+  managedService = _messages.MessageField('ManagedService', 1)
+  serviceName = _messages.StringField(2, required=True)
+  updateMask = _messages.StringField(3)
+
+
+class SourceContext(_messages.Message):
+  """`SourceContext` represents information about the source of a protobuf
+  element, like the file in which it is defined.
+
+  Fields:
+    fileName: The path-qualified name of the .proto file that contained the
+      associated protobuf element.  For example:
+      `"google/protobuf/source_context.proto"`.
+  """
+
+  fileName = _messages.StringField(1)
+
+
+class StandardQueryParameters(_messages.Message):
+  """Query parameters accepted by all methods.
+
+  Enums:
+    FXgafvValueValuesEnum: V1 error format.
+    AltValueValuesEnum: Data format for response.
+
+  Fields:
+    f__xgafv: V1 error format.
+    access_token: OAuth access token.
+    alt: Data format for response.
+    bearer_token: OAuth bearer token.
+    callback: JSONP
+    fields: Selector specifying which fields to include in a partial response.
+    key: API key. Your API key identifies your project and provides you with
+      API access, quota, and reports. Required unless you provide an OAuth 2.0
+      token.
+    oauth_token: OAuth 2.0 token for the current user.
+    pp: Pretty-print response.
+    prettyPrint: Returns response with indentations and line breaks.
+    quotaUser: Available to use for quota purposes for server-side
+      applications. Can be any arbitrary string assigned to a user, but should
+      not exceed 40 characters.
+    trace: A tracing token of the form "token:<tokenid>" to include in api
+      requests.
+    uploadType: Legacy upload protocol for media (e.g. "media", "multipart").
+    upload_protocol: Upload protocol for media (e.g. "raw", "multipart").
+  """
+
+  class AltValueValuesEnum(_messages.Enum):
+    """Data format for response.
+
+    Values:
+      json: Responses with Content-Type of application/json
+      media: Media download with context-dependent Content-Type
+      proto: Responses with Content-Type of application/x-protobuf
+    """
+    json = 0
+    media = 1
+    proto = 2
+
+  class FXgafvValueValuesEnum(_messages.Enum):
+    """V1 error format.
+
+    Values:
+      _1: v1 error format
+      _2: v2 error format
+    """
+    _1 = 0
+    _2 = 1
+
+  f__xgafv = _messages.EnumField('FXgafvValueValuesEnum', 1)
+  access_token = _messages.StringField(2)
+  alt = _messages.EnumField('AltValueValuesEnum', 3, default=u'json')
+  bearer_token = _messages.StringField(4)
+  callback = _messages.StringField(5)
+  fields = _messages.StringField(6)
+  key = _messages.StringField(7)
+  oauth_token = _messages.StringField(8)
+  pp = _messages.BooleanField(9, default=True)
+  prettyPrint = _messages.BooleanField(10, default=True)
+  quotaUser = _messages.StringField(11)
+  trace = _messages.StringField(12)
+  uploadType = _messages.StringField(13)
+  upload_protocol = _messages.StringField(14)
+
+
+class Status(_messages.Message):
+  """The `Status` type defines a logical error model that is suitable for
+  different programming environments, including REST APIs and RPC APIs. It is
+  used by [gRPC](https://github.com/grpc). The error model is designed to be:
+  - Simple to use and understand for most users - Flexible enough to meet
+  unexpected needs  # Overview  The `Status` message contains three pieces of
+  data: error code, error message, and error details. The error code should be
+  an enum value of google.rpc.Code, but it may accept additional error codes
+  if needed.  The error message should be a developer-facing English message
+  that helps developers *understand* and *resolve* the error. If a localized
+  user-facing error message is needed, put the localized message in the error
+  details or localize it in the client. The optional error details may contain
+  arbitrary information about the error. There is a predefined set of error
+  detail types in the package `google.rpc` which can be used for common error
+  conditions.  # Language mapping  The `Status` message is the logical
+  representation of the error model, but it is not necessarily the actual wire
+  format. When the `Status` message is exposed in different client libraries
+  and different wire protocols, it can be mapped differently. For example, it
+  will likely be mapped to some exceptions in Java, but more likely mapped to
+  some error codes in C.  # Other uses  The error model and the `Status`
+  message can be used in a variety of environments, either with or without
+  APIs, to provide a consistent developer experience across different
+  environments.  Example uses of this error model include:  - Partial errors.
+  If a service needs to return partial errors to the client,     it may embed
+  the `Status` in the normal response to indicate the partial     errors.  -
+  Workflow errors. A typical workflow has multiple steps. Each step may
+  have a `Status` message for error reporting purpose.  - Batch operations. If
+  a client uses batch request and batch response, the     `Status` message
+  should be used directly inside batch response, one for     each error sub-
+  response.  - Asynchronous operations. If an API call embeds asynchronous
+  operation     results in its response, the status of those operations should
+  be     represented directly using the `Status` message.  - Logging. If some
+  API errors are stored in logs, the message `Status` could     be used
+  directly after any stripping needed for security/privacy reasons.
+
+  Messages:
+    DetailsValueListEntry: A DetailsValueListEntry object.
+
+  Fields:
+    code: The status code, which should be an enum value of google.rpc.Code.
+    details: A list of messages that carry the error details.  There will be a
+      common set of message types for APIs to use.
+    message: A developer-facing error message, which should be in English. Any
+      user-facing error message should be localized and sent in the
+      google.rpc.Status.details field, or localized by the client.
+  """
+
+  @encoding.MapUnrecognizedFields('additionalProperties')
+  class DetailsValueListEntry(_messages.Message):
+    """A DetailsValueListEntry object.
+
+    Messages:
+      AdditionalProperty: An additional property for a DetailsValueListEntry
+        object.
+
+    Fields:
+      additionalProperties: Properties of the object. Contains field @type
+        with type URL.
+    """
+
+    class AdditionalProperty(_messages.Message):
+      """An additional property for a DetailsValueListEntry object.
+
+      Fields:
+        key: Name of the additional property.
+        value: A extra_types.JsonValue attribute.
+      """
+
+      key = _messages.StringField(1)
+      value = _messages.MessageField('extra_types.JsonValue', 2)
+
+    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
+
+  code = _messages.IntegerField(1, variant=_messages.Variant.INT32)
+  details = _messages.MessageField('DetailsValueListEntry', 2, repeated=True)
+  message = _messages.StringField(3)
+
+
+class Step(_messages.Message):
+  """Represents the status of one operation step.
+
+  Enums:
+    StatusValueValuesEnum: The status code.
+
+  Fields:
+    description: The short description of the step.
+    status: The status code.
+  """
+
+  class StatusValueValuesEnum(_messages.Enum):
+    """The status code.
+
+    Values:
+      STATUS_UNSPECIFIED: Unspecifed code.
+      DONE: The step has completed without errors.
+      NOT_STARTED: The step has not started yet.
+      IN_PROGRESS: The step is in progress.
+      FAILED: The step has completed with errors.
+    """
+    STATUS_UNSPECIFIED = 0
+    DONE = 1
+    NOT_STARTED = 2
+    IN_PROGRESS = 3
+    FAILED = 4
+
+  description = _messages.StringField(1)
+  status = _messages.EnumField('StatusValueValuesEnum', 2)
+
+
+class SubmitConfigSourceRequest(_messages.Message):
+  """Request message for SubmitConfigSource method.
+
+  Fields:
+    configSource: The source configuration for the service.
+    validateOnly: Optional. If set, this will result in the generation of a
+      `google.api.Service` configuration based on the `ConfigSource` provided,
+      but the generated config and the sources will NOT be persisted.
+  """
+
+  configSource = _messages.MessageField('ConfigSource', 1)
+  validateOnly = _messages.BooleanField(2)
+
+
+class SwaggerSpec(_messages.Message):
+  """A collection of swagger specification files.
+
+  Fields:
+    swaggerFiles: The individual files.
+  """
+
+  swaggerFiles = _messages.MessageField('File', 1, repeated=True)
+
+
+class SystemParameter(_messages.Message):
+  """Define a parameter's name and location. The parameter may be passed as
+  either an HTTP header or a URL query parameter, and if both are passed the
+  behavior is implementation-dependent.
+
+  Fields:
+    httpHeader: Define the HTTP header name to use for the parameter. It is
+      case insensitive.
+    name: Define the name of the parameter, such as "api_key", "alt",
+      "callback", and etc. It is case sensitive.
+    urlQueryParameter: Define the URL query parameter name to use for the
+      parameter. It is case sensitive.
+  """
+
+  httpHeader = _messages.StringField(1)
+  name = _messages.StringField(2)
+  urlQueryParameter = _messages.StringField(3)
+
+
+class SystemParameterRule(_messages.Message):
+  """Define a system parameter rule mapping system parameter definitions to
+  methods.
+
+  Fields:
+    parameters: Define parameters. Multiple names may be defined for a
+      parameter. For a given method call, only one of them should be used. If
+      multiple names are used the behavior is implementation-dependent. If
+      none of the specified names are present the behavior is parameter-
+      dependent.
+    selector: Selects the methods to which this rule applies. Use '*' to
+      indicate all methods in all APIs.  Refer to selector for syntax details.
+  """
+
+  parameters = _messages.MessageField('SystemParameter', 1, repeated=True)
+  selector = _messages.StringField(2)
+
+
+class SystemParameters(_messages.Message):
+  """### System parameter configuration  A system parameter is a special kind
+  of parameter defined by the API system, not by an individual API. It is
+  typically mapped to an HTTP header and/or a URL query parameter. This
+  configuration specifies which methods change the names of the system
+  parameters.
+
+  Fields:
+    rules: Define system parameters.  The parameters defined here will
+      override the default parameters implemented by the system. If this field
+      is missing from the service config, default system parameters will be
+      used. Default system parameters and names is implementation-dependent.
+      Example: define api key and alt name for all methods  system_parameters
+      rules:     - selector: "*"       parameters:         - name: api_key
+      url_query_parameter: api_key         - name: alt           http_header:
+      Response-Content-Type  Example: define 2 api key names for a specific
+      method.  system_parameters   rules:     - selector: "/ListShelves"
+      parameters:         - name: api_key           http_header: Api-Key1
+      - name: api_key           http_header: Api-Key2
+  """
+
+  rules = _messages.MessageField('SystemParameterRule', 1, repeated=True)
+
+
+class Type(_messages.Message):
+  """A protocol buffer message type.
+
+  Enums:
+    SyntaxValueValuesEnum: The source syntax.
+
+  Fields:
+    fields: The list of fields.
+    name: The fully qualified message name.
+    oneofs: The list of types appearing in `oneof` definitions in this type.
+    options: The protocol buffer options.
+    sourceContext: The source context.
+    syntax: The source syntax.
+  """
+
+  class SyntaxValueValuesEnum(_messages.Enum):
+    """The source syntax.
+
+    Values:
+      SYNTAX_PROTO2: Syntax `proto2`.
+      SYNTAX_PROTO3: Syntax `proto3`.
+    """
+    SYNTAX_PROTO2 = 0
+    SYNTAX_PROTO3 = 1
+
+  fields = _messages.MessageField('Field', 1, repeated=True)
+  name = _messages.StringField(2)
+  oneofs = _messages.StringField(3, repeated=True)
+  options = _messages.MessageField('Option', 4, repeated=True)
+  sourceContext = _messages.MessageField('SourceContext', 5)
+  syntax = _messages.EnumField('SyntaxValueValuesEnum', 6)
+
+
+class Usage(_messages.Message):
+  """Configuration controlling usage of a service.
+
+  Enums:
+    ServiceAccessValueValuesEnum: Controls which users can see or activate the
+      service.
+
+  Fields:
+    activationHooks: Services that must be contacted before a consumer can
+      begin using the service. Each service will be contacted in sequence,
+      and, if any activation call fails, the entire activation will fail. Each
+      hook is of the form <service.name>/<hook-id>, where <hook-id> is
+      optional; for example: 'robotservice.googleapis.com/default'.
+    deactivationHooks: Services that must be contacted before a consumer can
+      deactivate a service. Each service will be contacted in sequence, and,
+      if any deactivation call fails, the entire deactivation will fail. Each
+      hook is of the form <service.name>/<hook-id>, where <hook-id> is
+      optional; for example: 'compute.googleapis.com/'.
+    dependsOnServices: Services that must be activated in order for this
+      service to be used. The set of services activated as a result of these
+      relations are all activated in parallel with no guaranteed order of
+      activation. Each string is a service name, e.g.
+      `calendar.googleapis.com`.
+    requirements: Requirements that must be satisfied before a consumer
+      project can use the service. Each requirement is of the form
+      <service.name>/<requirement-id>; for example
+      'serviceusage.googleapis.com/billing-enabled'.
+    rules: Individual rules for configuring usage on selected methods.
+    serviceAccess: Controls which users can see or activate the service.
+  """
+
+  class ServiceAccessValueValuesEnum(_messages.Enum):
+    """Controls which users can see or activate the service.
+
+    Values:
+      RESTRICTED: The service can only be seen/used by users identified in the
+        service's access control policy.  If the service has not been
+        whitelisted by your domain administrator for out-of-org publishing,
+        then this mode will be treated like ORG_RESTRICTED.
+      PUBLIC: The service can be seen/used by anyone.  If the service has not
+        been whitelisted by your domain administrator for out-of-org
+        publishing, then this mode will be treated like ORG_PUBLIC.  The
+        discovery document for the service will also be public and allow
+        unregistered access.
+      ORG_RESTRICTED: The service can be seen/used by users identified in the
+        service's access control policy and they are within the organization
+        that owns the service.  Access is further constrained to the group
+        controlled by the administrator of the project/org that owns the
+        service.
+      ORG_PUBLIC: The service can be seen/used by the group of users
+        controlled by the administrator of the project/org that owns the
+        service.
+    """
+    RESTRICTED = 0
+    PUBLIC = 1
+    ORG_RESTRICTED = 2
+    ORG_PUBLIC = 3
+
+  activationHooks = _messages.StringField(1, repeated=True)
+  deactivationHooks = _messages.StringField(2, repeated=True)
+  dependsOnServices = _messages.StringField(3, repeated=True)
+  requirements = _messages.StringField(4, repeated=True)
+  rules = _messages.MessageField('UsageRule', 5, repeated=True)
+  serviceAccess = _messages.EnumField('ServiceAccessValueValuesEnum', 6)
+
+
+class UsageRule(_messages.Message):
+  """Usage configuration rules for the service.  NOTE: Under development.
+  Use this rule to configure unregistered calls for the service. Unregistered
+  calls are calls that do not contain consumer project identity. (Example:
+  calls that do not contain an API key). By default, API methods do not allow
+  unregistered calls, and each method call must be identified by a consumer
+  project identity. Use this rule to allow/disallow unregistered calls.
+  Example of an API that wants to allow unregistered calls for entire service.
+  usage:       rules:       - selector: "*"         allow_unregistered_calls:
+  true  Example of a method that wants to allow unregistered calls.
+  usage:       rules:       - selector:
+  "google.example.library.v1.LibraryService.CreateBook"
+  allow_unregistered_calls: true
+
+  Fields:
+    allowUnregisteredCalls: True, if the method allows unregistered calls;
+      false otherwise.
+    selector: Selects the methods to which this rule applies. Use '*' to
+      indicate all methods in all APIs.  Refer to selector for syntax details.
+  """
+
+  allowUnregisteredCalls = _messages.BooleanField(1)
+  selector = _messages.StringField(2)
+
+
+class UsageSettings(_messages.Message):
+  """Usage settings for a consumer of a service.
+
+  Enums:
+    ConsumerEnableStatusValueValuesEnum: Consumer controlled setting to
+      enable/disable use of this service by the consumer project. The default
+      value of this is controlled by the service configuration.
+
+  Fields:
+    consumerEnableStatus: Consumer controlled setting to enable/disable use of
+      this service by the consumer project. The default value of this is
+      controlled by the service configuration.
+  """
+
+  class ConsumerEnableStatusValueValuesEnum(_messages.Enum):
+    """Consumer controlled setting to enable/disable use of this service by
+    the consumer project. The default value of this is controlled by the
+    service configuration.
+
+    Values:
+      DISABLED: The service is disabled.
+      ENABLED: The service is enabled.
+    """
+    DISABLED = 0
+    ENABLED = 1
+
+  consumerEnableStatus = _messages.EnumField('ConsumerEnableStatusValueValuesEnum', 1)
+
+
+class VariableTermQuota(_messages.Message):
+  """A variable term quota is a bucket of tokens that is consumed over a
+  specified (usually long) time period. When present, it overrides any "1d"
+  duration per-project quota specified on the group.  Variable terms run from
+  midnight to midnight, start_date to end_date (inclusive) in the
+  America/Los_Angeles time zone.
+
+  Fields:
+    createTime: Time when this variable term quota was created. If multiple
+      quotas are simultaneously active, then the quota with the latest
+      create_time is the effective one.
+    displayEndDate: The displayed end of the active period for the variable
+      term quota. This may be before the effective end to give the user a
+      grace period. YYYYMMdd date format, e.g. 20140730.
+    endDate: The effective end of the active period for the variable term
+      quota (inclusive). This must be no more than 5 years after start_date.
+      YYYYMMdd date format, e.g. 20140730.
+    groupName: The quota group that has the variable term quota applied to it.
+      This must be a google.api.QuotaGroup.name specified in the service
+      configuration.
+    limit: The number of tokens available during the configured term.
+    quotaUsage: The usage data of this quota.
+    startDate: The beginning of the active period for the variable term quota.
+      YYYYMMdd date format, e.g. 20140730.
+  """
+
+  createTime = _messages.StringField(1)
+  displayEndDate = _messages.StringField(2)
+  endDate = _messages.StringField(3)
+  groupName = _messages.StringField(4)
+  limit = _messages.IntegerField(5)
+  quotaUsage = _messages.MessageField('QuotaUsage', 6)
+  startDate = _messages.StringField(7)
+
+
+class Visibility(_messages.Message):
+  """`Visibility` defines restrictions for the visibility of service elements.
+  Restrictions are specified using visibility labels (e.g., TRUSTED_TESTER)
+  that are elsewhere linked to users and projects.  Users and projects can
+  have access to more than one visibility label. The effective visibility for
+  multiple labels is the union of each label's elements, plus any unrestricted
+  elements.  If an element and its parents have no restrictions, visibility is
+  unconditionally granted.  Example:      visibility:       rules:       -
+  selector: google.calendar.Calendar.EnhancedSearch         restriction:
+  TRUSTED_TESTER       - selector: google.calendar.Calendar.Delegate
+  restriction: GOOGLE_INTERNAL  Here, all methods are publicly visible except
+  for the restricted methods EnhancedSearch and Delegate.
+
+  Fields:
+    enforceRuntimeVisibility: Controls whether visibility rules are enforced
+      at runtime for requests to all APIs and methods.  If true, requests
+      without method visibility will receive a NOT_FOUND error, and any non-
+      visible fields will be scrubbed from the response messages. In service
+      config version 0, the default is false. In later config versions, it's
+      true.  Note, the `enforce_runtime_visibility` specified in a visibility
+      rule overrides this setting for the APIs or methods asscoiated with the
+      rule.
+    rules: A list of visibility rules providing visibility configuration for
+      individual API elements.
+  """
+
+  enforceRuntimeVisibility = _messages.BooleanField(1)
+  rules = _messages.MessageField('VisibilityRule', 2, repeated=True)
+
+
+class VisibilityRule(_messages.Message):
+  """A visibility rule provides visibility configuration for an individual API
+  element.
+
+  Fields:
+    enforceRuntimeVisibility: Controls whether visibility is enforced at
+      runtime for requests to an API method. This setting has meaning only
+      when the selector applies to a method or an API.  If true, requests
+      without method visibility will receive a NOT_FOUND error, and any non-
+      visible fields will be scrubbed from the response messages. The default
+      is determined by the value of
+      google.api.Visibility.enforce_runtime_visibility.
+    restriction: Lists the visibility labels for this rule. Any of the listed
+      labels grants visibility to the element.  If a rule has multiple labels,
+      removing one of the labels but not all of them can break clients.
+      Example:      visibility:       rules:       - selector:
+      google.calendar.Calendar.EnhancedSearch         restriction:
+      GOOGLE_INTERNAL, TRUSTED_TESTER  Removing GOOGLE_INTERNAL from this
+      restriction will break clients that rely on this method and only had
+      access to it through GOOGLE_INTERNAL.
+    selector: Selects methods, messages, fields, enums, etc. to which this
+      rule applies.  Refer to selector for syntax details.
+  """
+
+  enforceRuntimeVisibility = _messages.BooleanField(1)
+  restriction = _messages.StringField(2)
+  selector = _messages.StringField(3)
+
+
+class VisibilitySettings(_messages.Message):
+  """Settings that control which features of the service are visible to the
+  consumer project.
+
+  Fields:
+    visibilityLabels: The set of visibility labels that are used to determine
+      what API surface is visible to calls made by this project. The visible
+      surface is a union of the surface features associated with each label
+      listed here, plus the publicly visible (unrestricted) surface.  The
+      service producer may add or remove labels at any time. The service
+      consumer may add a label if the calling user has been granted permission
+      to do so by the producer.  The service consumer may also remove any
+      label at any time.
+  """
+
+  visibilityLabels = _messages.StringField(1, repeated=True)
+
+
+encoding.AddCustomJsonFieldMapping(
+    StandardQueryParameters, 'f__xgafv', '$.xgafv',
+    package=u'servicemanagement')
+encoding.AddCustomJsonEnumMapping(
+    StandardQueryParameters.FXgafvValueValuesEnum, '_1', '1',
+    package=u'servicemanagement')
+encoding.AddCustomJsonEnumMapping(
+    StandardQueryParameters.FXgafvValueValuesEnum, '_2', '2',
+    package=u'servicemanagement')
diff --git a/samples/storage_sample/downloads_test.py b/samples/storage_sample/downloads_test.py
new file mode 100644
index 0000000..a51cd95
--- /dev/null
+++ b/samples/storage_sample/downloads_test.py
@@ -0,0 +1,197 @@
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Integration tests for uploading and downloading to GCS.
+
+These tests exercise most of the corner cases for upload/download of
+files in apitools, via GCS. There are no performance tests here yet.
+"""
+
+import json
+import os
+import unittest
+
+import six
+
+import apitools.base.py as apitools_base
+import storage
+
+_CLIENT = None
+
+
+def _GetClient():
+    global _CLIENT  # pylint: disable=global-statement
+    if _CLIENT is None:
+        _CLIENT = storage.StorageV1()
+    return _CLIENT
+
+
+class DownloadsTest(unittest.TestCase):
+    _DEFAULT_BUCKET = 'apitools'
+    _TESTDATA_PREFIX = 'testdata'
+
+    def setUp(self):
+        self.__client = _GetClient()
+        self.__ResetDownload()
+
+    def __ResetDownload(self, auto_transfer=False):
+        self.__buffer = six.StringIO()
+        self.__download = storage.Download.FromStream(
+            self.__buffer, auto_transfer=auto_transfer)
+
+    def __GetTestdataFileContents(self, filename):
+        file_path = os.path.join(
+            os.path.dirname(__file__), self._TESTDATA_PREFIX, filename)
+        file_contents = open(file_path).read()
+        self.assertIsNotNone(
+            file_contents, msg=('Could not read file %s' % filename))
+        return file_contents
+
+    @classmethod
+    def __GetRequest(cls, filename):
+        object_name = os.path.join(cls._TESTDATA_PREFIX, filename)
+        return storage.StorageObjectsGetRequest(
+            bucket=cls._DEFAULT_BUCKET, object=object_name)
+
+    def __GetFile(self, request):
+        response = self.__client.objects.Get(request, download=self.__download)
+        self.assertIsNone(response, msg=(
+            'Unexpected nonempty response for file download: %s' % response))
+
+    def __GetAndStream(self, request):
+        self.__GetFile(request)
+        self.__download.StreamInChunks()
+
+    def testZeroBytes(self):
+        request = self.__GetRequest('zero_byte_file')
+        self.__GetAndStream(request)
+        self.assertEqual(0, self.__buffer.tell())
+
+    def testObjectDoesNotExist(self):
+        self.__ResetDownload(auto_transfer=True)
+        with self.assertRaises(apitools_base.HttpError):
+            self.__GetFile(self.__GetRequest('nonexistent_file'))
+
+    def testAutoTransfer(self):
+        self.__ResetDownload(auto_transfer=True)
+        self.__GetFile(self.__GetRequest('fifteen_byte_file'))
+        file_contents = self.__GetTestdataFileContents('fifteen_byte_file')
+        self.assertEqual(15, self.__buffer.tell())
+        self.__buffer.seek(0)
+        self.assertEqual(file_contents, self.__buffer.read())
+
+    def testFilenameWithSpaces(self):
+        self.__ResetDownload(auto_transfer=True)
+        self.__GetFile(self.__GetRequest('filename with spaces'))
+        # NOTE(craigcitro): We add _ here to make this play nice with blaze.
+        file_contents = self.__GetTestdataFileContents('filename_with_spaces')
+        self.assertEqual(15, self.__buffer.tell())
+        self.__buffer.seek(0)
+        self.assertEqual(file_contents, self.__buffer.read())
+
+    def testGetRange(self):
+        # TODO(craigcitro): Test about a thousand more corner cases.
+        file_contents = self.__GetTestdataFileContents('fifteen_byte_file')
+        self.__GetFile(self.__GetRequest('fifteen_byte_file'))
+        self.__download.GetRange(5, 10)
+        self.assertEqual(6, self.__buffer.tell())
+        self.__buffer.seek(0)
+        self.assertEqual(file_contents[5:11], self.__buffer.read())
+
+    def testGetRangeWithNegativeStart(self):
+        file_contents = self.__GetTestdataFileContents('fifteen_byte_file')
+        self.__GetFile(self.__GetRequest('fifteen_byte_file'))
+        self.__download.GetRange(-3)
+        self.assertEqual(3, self.__buffer.tell())
+        self.__buffer.seek(0)
+        self.assertEqual(file_contents[-3:], self.__buffer.read())
+
+    def testGetRangeWithPositiveStart(self):
+        file_contents = self.__GetTestdataFileContents('fifteen_byte_file')
+        self.__GetFile(self.__GetRequest('fifteen_byte_file'))
+        self.__download.GetRange(2)
+        self.assertEqual(13, self.__buffer.tell())
+        self.__buffer.seek(0)
+        self.assertEqual(file_contents[2:15], self.__buffer.read())
+
+    def testSmallChunksizes(self):
+        file_contents = self.__GetTestdataFileContents('fifteen_byte_file')
+        request = self.__GetRequest('fifteen_byte_file')
+        for chunksize in (2, 3, 15, 100):
+            self.__ResetDownload()
+            self.__download.chunksize = chunksize
+            self.__GetAndStream(request)
+            self.assertEqual(15, self.__buffer.tell())
+            self.__buffer.seek(0)
+            self.assertEqual(file_contents, self.__buffer.read(15))
+
+    def testLargeFileChunksizes(self):
+        request = self.__GetRequest('thirty_meg_file')
+        for chunksize in (1048576, 40 * 1048576):
+            self.__ResetDownload()
+            self.__download.chunksize = chunksize
+            self.__GetAndStream(request)
+            self.__buffer.seek(0)
+
+    def testAutoGzipObject(self):
+        # TODO(craigcitro): Move this to a new object once we have a more
+        # permanent one, see: http://b/12250275
+        request = storage.StorageObjectsGetRequest(
+            bucket='ottenl-gzip', object='50K.txt')
+        # First, try without auto-transfer.
+        self.__GetFile(request)
+        self.assertEqual(0, self.__buffer.tell())
+        self.__download.StreamInChunks()
+        self.assertEqual(50000, self.__buffer.tell())
+        # Next, try with auto-transfer.
+        self.__ResetDownload(auto_transfer=True)
+        self.__GetFile(request)
+        self.assertEqual(50000, self.__buffer.tell())
+
+    def testSmallGzipObject(self):
+        request = self.__GetRequest('zero-gzipd.html')
+        self.__GetFile(request)
+        self.assertEqual(0, self.__buffer.tell())
+        additional_headers = {'accept-encoding': 'gzip, deflate'}
+        self.__download.StreamInChunks(additional_headers=additional_headers)
+        self.assertEqual(0, self.__buffer.tell())
+
+    def testSerializedDownload(self):
+
+        def _ProgressCallback(unused_response, download_object):
+            print 'Progress %s' % download_object.progress
+
+        file_contents = self.__GetTestdataFileContents('fifteen_byte_file')
+        object_name = os.path.join(self._TESTDATA_PREFIX, 'fifteen_byte_file')
+        request = storage.StorageObjectsGetRequest(
+            bucket=self._DEFAULT_BUCKET, object=object_name)
+        response = self.__client.objects.Get(request)
+        # pylint: disable=attribute-defined-outside-init
+        self.__buffer = six.StringIO()
+        download_data = json.dumps({
+            'auto_transfer': False,
+            'progress': 0,
+            'total_size': response.size,
+            'url': response.mediaLink,
+        })
+        self.__download = storage.Download.FromData(
+            self.__buffer, download_data, http=self.__client.http)
+        self.__download.StreamInChunks(callback=_ProgressCallback)
+        self.assertEqual(15, self.__buffer.tell())
+        self.__buffer.seek(0)
+        self.assertEqual(file_contents, self.__buffer.read(15))
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/samples/storage_sample/generate_clients.sh b/samples/storage_sample/generate_clients.sh
new file mode 100755
index 0000000..f1f9b69
--- /dev/null
+++ b/samples/storage_sample/generate_clients.sh
@@ -0,0 +1,17 @@
+#!/bin/bash
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+gen_client --discovery_url=storage.v1 --overwrite --outdir=storage --root_package=. client
diff --git a/samples/storage_sample/storage_v1.json b/samples/storage_sample/storage_v1.json
new file mode 100644
index 0000000..ff0b909
--- /dev/null
+++ b/samples/storage_sample/storage_v1.json
@@ -0,0 +1,3338 @@
+{
+ "kind": "discovery#restDescription",
+ "discoveryVersion": "v1",
+ "id": "storage:v1",
+ "name": "storage",
+ "version": "v1",
+ "revision": "20160525",
+ "title": "Cloud Storage JSON API",
+ "description": "Stores and retrieves potentially large, immutable data objects.",
+ "ownerDomain": "google.com",
+ "ownerName": "Google",
+ "icons": {
+  "x16": "https://www.google.com/images/icons/product/cloud_storage-16.png",
+  "x32": "https://www.google.com/images/icons/product/cloud_storage-32.png"
+ },
+ "documentationLink": "https://developers.google.com/storage/docs/json_api/",
+ "labels": [
+  "graduated"
+ ],
+ "protocol": "rest",
+ "baseUrl": "https://www.googleapis.com/storage/v1/",
+ "basePath": "/storage/v1/",
+ "rootUrl": "https://www.googleapis.com/",
+ "servicePath": "storage/v1/",
+ "batchPath": "batch",
+ "parameters": {
+  "alt": {
+   "type": "string",
+   "description": "Data format for the response.",
+   "default": "json",
+   "enum": [
+    "json"
+   ],
+   "enumDescriptions": [
+    "Responses with Content-Type of application/json"
+   ],
+   "location": "query"
+  },
+  "fields": {
+   "type": "string",
+   "description": "Selector specifying which fields to include in a partial response.",
+   "location": "query"
+  },
+  "key": {
+   "type": "string",
+   "description": "API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.",
+   "location": "query"
+  },
+  "oauth_token": {
+   "type": "string",
+   "description": "OAuth 2.0 token for the current user.",
+   "location": "query"
+  },
+  "prettyPrint": {
+   "type": "boolean",
+   "description": "Returns response with indentations and line breaks.",
+   "default": "true",
+   "location": "query"
+  },
+  "quotaUser": {
+   "type": "string",
+   "description": "Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.",
+   "location": "query"
+  },
+  "userIp": {
+   "type": "string",
+   "description": "IP address of the site where the request originates. Use this if you want to enforce per-user limits.",
+   "location": "query"
+  }
+ },
+ "auth": {
+  "oauth2": {
+   "scopes": {
+    "https://www.googleapis.com/auth/cloud-platform": {
+     "description": "View and manage your data across Google Cloud Platform services"
+    },
+    "https://www.googleapis.com/auth/cloud-platform.read-only": {
+     "description": "View your data across Google Cloud Platform services"
+    },
+    "https://www.googleapis.com/auth/devstorage.full_control": {
+     "description": "Manage your data and permissions in Google Cloud Storage"
+    },
+    "https://www.googleapis.com/auth/devstorage.read_only": {
+     "description": "View your data in Google Cloud Storage"
+    },
+    "https://www.googleapis.com/auth/devstorage.read_write": {
+     "description": "Manage your data in Google Cloud Storage"
+    }
+   }
+  }
+ },
+ "schemas": {
+  "Bucket": {
+   "id": "Bucket",
+   "type": "object",
+   "description": "A bucket.",
+   "properties": {
+    "acl": {
+     "type": "array",
+     "description": "Access controls on the bucket.",
+     "items": {
+      "$ref": "BucketAccessControl"
+     },
+     "annotations": {
+      "required": [
+       "storage.buckets.update"
+      ]
+     }
+    },
+    "cors": {
+     "type": "array",
+     "description": "The bucket's Cross-Origin Resource Sharing (CORS) configuration.",
+     "items": {
+      "type": "object",
+      "properties": {
+       "maxAgeSeconds": {
+        "type": "integer",
+        "description": "The value, in seconds, to return in the  Access-Control-Max-Age header used in preflight responses.",
+        "format": "int32"
+       },
+       "method": {
+        "type": "array",
+        "description": "The list of HTTP methods on which to include CORS response headers, (GET, OPTIONS, POST, etc) Note: \"*\" is permitted in the list of methods, and means \"any method\".",
+        "items": {
+         "type": "string"
+        }
+       },
+       "origin": {
+        "type": "array",
+        "description": "The list of Origins eligible to receive CORS response headers. Note: \"*\" is permitted in the list of origins, and means \"any Origin\".",
+        "items": {
+         "type": "string"
+        }
+       },
+       "responseHeader": {
+        "type": "array",
+        "description": "The list of HTTP headers other than the simple response headers to give permission for the user-agent to share across domains.",
+        "items": {
+         "type": "string"
+        }
+       }
+      }
+     }
+    },
+    "defaultObjectAcl": {
+     "type": "array",
+     "description": "Default access controls to apply to new objects when no ACL is provided.",
+     "items": {
+      "$ref": "ObjectAccessControl"
+     }
+    },
+    "etag": {
+     "type": "string",
+     "description": "HTTP 1.1 Entity tag for the bucket."
+    },
+    "id": {
+     "type": "string",
+     "description": "The ID of the bucket."
+    },
+    "kind": {
+     "type": "string",
+     "description": "The kind of item this is. For buckets, this is always storage#bucket.",
+     "default": "storage#bucket"
+    },
+    "lifecycle": {
+     "type": "object",
+     "description": "The bucket's lifecycle configuration. See lifecycle management for more information.",
+     "properties": {
+      "rule": {
+       "type": "array",
+       "description": "A lifecycle management rule, which is made of an action to take and the condition(s) under which the action will be taken.",
+       "items": {
+        "type": "object",
+        "properties": {
+         "action": {
+          "type": "object",
+          "description": "The action to take.",
+          "properties": {
+           "type": {
+            "type": "string",
+            "description": "Type of the action. Currently, only Delete is supported."
+           }
+          }
+         },
+         "condition": {
+          "type": "object",
+          "description": "The condition(s) under which the action will be taken.",
+          "properties": {
+           "age": {
+            "type": "integer",
+            "description": "Age of an object (in days). This condition is satisfied when an object reaches the specified age.",
+            "format": "int32"
+           },
+           "createdBefore": {
+            "type": "string",
+            "description": "A date in RFC 3339 format with only the date part (for instance, \"2013-01-15\"). This condition is satisfied when an object is created before midnight of the specified date in UTC.",
+            "format": "date"
+           },
+           "isLive": {
+            "type": "boolean",
+            "description": "Relevant only for versioned objects. If the value is true, this condition matches live objects; if the value is false, it matches archived objects."
+           },
+           "numNewerVersions": {
+            "type": "integer",
+            "description": "Relevant only for versioned objects. If the value is N, this condition is satisfied when there are at least N versions (including the live version) newer than this version of the object.",
+            "format": "int32"
+           }
+          }
+         }
+        }
+       }
+      }
+     }
+    },
+    "location": {
+     "type": "string",
+     "description": "The location of the bucket. Object data for objects in the bucket resides in physical storage within this region. Defaults to US. See the developer's guide for the authoritative list."
+    },
+    "logging": {
+     "type": "object",
+     "description": "The bucket's logging configuration, which defines the destination bucket and optional name prefix for the current bucket's logs.",
+     "properties": {
+      "logBucket": {
+       "type": "string",
+       "description": "The destination bucket where the current bucket's logs should be placed."
+      },
+      "logObjectPrefix": {
+       "type": "string",
+       "description": "A prefix for log object names."
+      }
+     }
+    },
+    "metageneration": {
+     "type": "string",
+     "description": "The metadata generation of this bucket.",
+     "format": "int64"
+    },
+    "name": {
+     "type": "string",
+     "description": "The name of the bucket.",
+     "annotations": {
+      "required": [
+       "storage.buckets.insert"
+      ]
+     }
+    },
+    "owner": {
+     "type": "object",
+     "description": "The owner of the bucket. This is always the project team's owner group.",
+     "properties": {
+      "entity": {
+       "type": "string",
+       "description": "The entity, in the form project-owner-projectId."
+      },
+      "entityId": {
+       "type": "string",
+       "description": "The ID for the entity."
+      }
+     }
+    },
+    "projectNumber": {
+     "type": "string",
+     "description": "The project number of the project the bucket belongs to.",
+     "format": "uint64"
+    },
+    "selfLink": {
+     "type": "string",
+     "description": "The URI of this bucket."
+    },
+    "storageClass": {
+     "type": "string",
+     "description": "The bucket's storage class. This defines how objects in the bucket are stored and determines the SLA and the cost of storage. Values include STANDARD, NEARLINE and DURABLE_REDUCED_AVAILABILITY. Defaults to STANDARD. For more information, see storage classes."
+    },
+    "timeCreated": {
+     "type": "string",
+     "description": "The creation time of the bucket in RFC 3339 format.",
+     "format": "date-time"
+    },
+    "updated": {
+     "type": "string",
+     "description": "The modification time of the bucket in RFC 3339 format.",
+     "format": "date-time"
+    },
+    "versioning": {
+     "type": "object",
+     "description": "The bucket's versioning configuration.",
+     "properties": {
+      "enabled": {
+       "type": "boolean",
+       "description": "While set to true, versioning is fully enabled for this bucket."
+      }
+     }
+    },
+    "website": {
+     "type": "object",
+     "description": "The bucket's website configuration, controlling how the service behaves when accessing bucket contents as a web site. See the Static Website Examples for more information.",
+     "properties": {
+      "mainPageSuffix": {
+       "type": "string",
+       "description": "If the requested object path is missing, the service will ensure the path has a trailing '/', append this suffix, and attempt to retrieve the resulting object. This allows the creation of index.html objects to represent directory pages."
+      },
+      "notFoundPage": {
+       "type": "string",
+       "description": "If the requested object path is missing, and any mainPageSuffix object is missing, if applicable, the service will return the named object from this bucket as the content for a 404 Not Found result."
+      }
+     }
+    }
+   }
+  },
+  "BucketAccessControl": {
+   "id": "BucketAccessControl",
+   "type": "object",
+   "description": "An access-control entry.",
+   "properties": {
+    "bucket": {
+     "type": "string",
+     "description": "The name of the bucket."
+    },
+    "domain": {
+     "type": "string",
+     "description": "The domain associated with the entity, if any."
+    },
+    "email": {
+     "type": "string",
+     "description": "The email address associated with the entity, if any."
+    },
+    "entity": {
+     "type": "string",
+     "description": "The entity holding the permission, in one of the following forms: \n- user-userId \n- user-email \n- group-groupId \n- group-email \n- domain-domain \n- project-team-projectId \n- allUsers \n- allAuthenticatedUsers Examples: \n- The user liz@example.com would be user-liz@example.com. \n- The group example@googlegroups.com would be group-example@googlegroups.com. \n- To refer to all members of the Google Apps for Business domain example.com, the entity would be domain-example.com.",
+     "annotations": {
+      "required": [
+       "storage.bucketAccessControls.insert"
+      ]
+     }
+    },
+    "entityId": {
+     "type": "string",
+     "description": "The ID for the entity, if any."
+    },
+    "etag": {
+     "type": "string",
+     "description": "HTTP 1.1 Entity tag for the access-control entry."
+    },
+    "id": {
+     "type": "string",
+     "description": "The ID of the access-control entry."
+    },
+    "kind": {
+     "type": "string",
+     "description": "The kind of item this is. For bucket access control entries, this is always storage#bucketAccessControl.",
+     "default": "storage#bucketAccessControl"
+    },
+    "projectTeam": {
+     "type": "object",
+     "description": "The project team associated with the entity, if any.",
+     "properties": {
+      "projectNumber": {
+       "type": "string",
+       "description": "The project number."
+      },
+      "team": {
+       "type": "string",
+       "description": "The team. Can be owners, editors, or viewers."
+      }
+     }
+    },
+    "role": {
+     "type": "string",
+     "description": "The access permission for the entity. Can be READER, WRITER, or OWNER.",
+     "annotations": {
+      "required": [
+       "storage.bucketAccessControls.insert"
+      ]
+     }
+    },
+    "selfLink": {
+     "type": "string",
+     "description": "The link to this access-control entry."
+    }
+   }
+  },
+  "BucketAccessControls": {
+   "id": "BucketAccessControls",
+   "type": "object",
+   "description": "An access-control list.",
+   "properties": {
+    "items": {
+     "type": "array",
+     "description": "The list of items.",
+     "items": {
+      "$ref": "BucketAccessControl"
+     }
+    },
+    "kind": {
+     "type": "string",
+     "description": "The kind of item this is. For lists of bucket access control entries, this is always storage#bucketAccessControls.",
+     "default": "storage#bucketAccessControls"
+    }
+   }
+  },
+  "Buckets": {
+   "id": "Buckets",
+   "type": "object",
+   "description": "A list of buckets.",
+   "properties": {
+    "items": {
+     "type": "array",
+     "description": "The list of items.",
+     "items": {
+      "$ref": "Bucket"
+     }
+    },
+    "kind": {
+     "type": "string",
+     "description": "The kind of item this is. For lists of buckets, this is always storage#buckets.",
+     "default": "storage#buckets"
+    },
+    "nextPageToken": {
+     "type": "string",
+     "description": "The continuation token, used to page through large result sets. Provide this value in a subsequent request to return the next page of results."
+    }
+   }
+  },
+  "Channel": {
+   "id": "Channel",
+   "type": "object",
+   "description": "An notification channel used to watch for resource changes.",
+   "properties": {
+    "address": {
+     "type": "string",
+     "description": "The address where notifications are delivered for this channel."
+    },
+    "expiration": {
+     "type": "string",
+     "description": "Date and time of notification channel expiration, expressed as a Unix timestamp, in milliseconds. Optional.",
+     "format": "int64"
+    },
+    "id": {
+     "type": "string",
+     "description": "A UUID or similar unique string that identifies this channel."
+    },
+    "kind": {
+     "type": "string",
+     "description": "Identifies this as a notification channel used to watch for changes to a resource. Value: the fixed string \"api#channel\".",
+     "default": "api#channel"
+    },
+    "params": {
+     "type": "object",
+     "description": "Additional parameters controlling delivery channel behavior. Optional.",
+     "additionalProperties": {
+      "type": "string",
+      "description": "Declares a new parameter by name."
+     }
+    },
+    "payload": {
+     "type": "boolean",
+     "description": "A Boolean value to indicate whether payload is wanted. Optional."
+    },
+    "resourceId": {
+     "type": "string",
+     "description": "An opaque ID that identifies the resource being watched on this channel. Stable across different API versions."
+    },
+    "resourceUri": {
+     "type": "string",
+     "description": "A version-specific identifier for the watched resource."
+    },
+    "token": {
+     "type": "string",
+     "description": "An arbitrary string delivered to the target address with each notification delivered over this channel. Optional."
+    },
+    "type": {
+     "type": "string",
+     "description": "The type of delivery mechanism used for this channel."
+    }
+   }
+  },
+  "ComposeRequest": {
+   "id": "ComposeRequest",
+   "type": "object",
+   "description": "A Compose request.",
+   "properties": {
+    "destination": {
+     "$ref": "Object",
+     "description": "Properties of the resulting object."
+    },
+    "kind": {
+     "type": "string",
+     "description": "The kind of item this is.",
+     "default": "storage#composeRequest"
+    },
+    "sourceObjects": {
+     "type": "array",
+     "description": "The list of source objects that will be concatenated into a single object.",
+     "items": {
+      "type": "object",
+      "properties": {
+       "generation": {
+        "type": "string",
+        "description": "The generation of this object to use as the source.",
+        "format": "int64"
+       },
+       "name": {
+        "type": "string",
+        "description": "The source object's name. The source object's bucket is implicitly the destination bucket.",
+        "annotations": {
+         "required": [
+          "storage.objects.compose"
+         ]
+        }
+       },
+       "objectPreconditions": {
+        "type": "object",
+        "description": "Conditions that must be met for this operation to execute.",
+        "properties": {
+         "ifGenerationMatch": {
+          "type": "string",
+          "description": "Only perform the composition if the generation of the source object that would be used matches this value. If this value and a generation are both specified, they must be the same value or the call will fail.",
+          "format": "int64"
+         }
+        }
+       }
+      }
+     },
+     "annotations": {
+      "required": [
+       "storage.objects.compose"
+      ]
+     }
+    }
+   }
+  },
+  "Notification": {
+   "id": "Notification",
+   "type": "object",
+   "description": "A subscription to receive Google PubSub notifications.",
+   "properties": {
+    "bucket": {
+     "type": "string",
+     "description": "The name of the bucket this subscription is particular to.",
+     "annotations": {
+      "required": [
+       "storage.notifications.insert"
+      ]
+     }
+    },
+    "custom_attributes": {
+     "type": "object",
+     "description": "An optional list of additional attributes to attach to each Cloud PubSub message published for this notification subscription.",
+     "additionalProperties": {
+      "type": "string"
+     }
+    },
+    "etag": {
+     "type": "string",
+     "description": "HTTP 1.1 Entity tag for this subscription notification."
+    },
+    "event_types": {
+     "type": "array",
+     "description": "If present, only send notifications about listed event types. If empty, sent notifications for all event types.",
+     "items": {
+      "type": "string"
+     }
+    },
+    "id": {
+     "type": "string",
+     "description": "The ID of the notification."
+    },
+    "kind": {
+     "type": "string",
+     "description": "The kind of item this is. For notifications, this is always storage#notification.",
+     "default": "storage#notification"
+    },
+    "object_metadata_format": {
+     "type": "string",
+     "description": "If payload_content is OBJECT_METADATA, controls the format of that metadata. Otherwise, must not be set.",
+     "default": "JSON_API_V1"
+    },
+    "object_name_prefix": {
+     "type": "string",
+     "description": "If present, only apply this notification configuration to object names that begin with this prefix."
+    },
+    "payload_content": {
+     "type": "string",
+     "description": "The desired content of the Payload. Defaults to OBJECT_METADATA.",
+     "default": "OBJECT_METADATA"
+    },
+    "selfLink": {
+     "type": "string",
+     "description": "The canonical URL of this notification."
+    },
+    "topic": {
+     "type": "string",
+     "description": "The Cloud PubSub topic to which this subscription publishes. Formatted as: '//pubsub.googleapis.com/projects/{project-identifier}/topics/{my-topic}'",
+     "annotations": {
+      "required": [
+       "storage.notifications.insert"
+      ]
+     }
+    }
+   }
+  },
+  "Notifications": {
+   "id": "Notifications",
+   "type": "object",
+   "description": "A list of notification subscriptions.",
+   "properties": {
+    "items": {
+     "type": "array",
+     "description": "The list of items.",
+     "items": {
+      "$ref": "Notification"
+     }
+    },
+    "kind": {
+     "type": "string",
+     "description": "The kind of item this is. For lists of notifications, this is always storage#notifications.",
+     "default": "storage#notifications"
+    }
+   }
+  },
+  "Object": {
+   "id": "Object",
+   "type": "object",
+   "description": "An object.",
+   "properties": {
+    "acl": {
+     "type": "array",
+     "description": "Access controls on the object.",
+     "items": {
+      "$ref": "ObjectAccessControl"
+     },
+     "annotations": {
+      "required": [
+       "storage.objects.update"
+      ]
+     }
+    },
+    "bucket": {
+     "type": "string",
+     "description": "The name of the bucket containing this object."
+    },
+    "cacheControl": {
+     "type": "string",
+     "description": "Cache-Control directive for the object data."
+    },
+    "componentCount": {
+     "type": "integer",
+     "description": "Number of underlying components that make up this object. Components are accumulated by compose operations.",
+     "format": "int32"
+    },
+    "contentDisposition": {
+     "type": "string",
+     "description": "Content-Disposition of the object data."
+    },
+    "contentEncoding": {
+     "type": "string",
+     "description": "Content-Encoding of the object data."
+    },
+    "contentLanguage": {
+     "type": "string",
+     "description": "Content-Language of the object data."
+    },
+    "contentType": {
+     "type": "string",
+     "description": "Content-Type of the object data. If contentType is not specified, object downloads will be served as application/octet-stream."
+    },
+    "crc32c": {
+     "type": "string",
+     "description": "CRC32c checksum, as described in RFC 4960, Appendix B; encoded using base64 in big-endian byte order. For more information about using the CRC32c checksum, see Hashes and ETags: Best Practices."
+    },
+    "customerEncryption": {
+     "type": "object",
+     "description": "Metadata of customer-supplied encryption key, if the object is encrypted by such a key.",
+     "properties": {
+      "encryptionAlgorithm": {
+       "type": "string",
+       "description": "The encryption algorithm."
+      },
+      "keySha256": {
+       "type": "string",
+       "description": "SHA256 hash value of the encryption key."
+      }
+     }
+    },
+    "etag": {
+     "type": "string",
+     "description": "HTTP 1.1 Entity tag for the object."
+    },
+    "generation": {
+     "type": "string",
+     "description": "The content generation of this object. Used for object versioning.",
+     "format": "int64"
+    },
+    "id": {
+     "type": "string",
+     "description": "The ID of the object."
+    },
+    "kind": {
+     "type": "string",
+     "description": "The kind of item this is. For objects, this is always storage#object.",
+     "default": "storage#object"
+    },
+    "md5Hash": {
+     "type": "string",
+     "description": "MD5 hash of the data; encoded using base64. For more information about using the MD5 hash, see Hashes and ETags: Best Practices."
+    },
+    "mediaLink": {
+     "type": "string",
+     "description": "Media download link."
+    },
+    "metadata": {
+     "type": "object",
+     "description": "User-provided metadata, in key/value pairs.",
+     "additionalProperties": {
+      "type": "string",
+      "description": "An individual metadata entry."
+     }
+    },
+    "metageneration": {
+     "type": "string",
+     "description": "The version of the metadata for this object at this generation. Used for preconditions and for detecting changes in metadata. A metageneration number is only meaningful in the context of a particular generation of a particular object.",
+     "format": "int64"
+    },
+    "name": {
+     "type": "string",
+     "description": "The name of this object. Required if not specified by URL parameter."
+    },
+    "owner": {
+     "type": "object",
+     "description": "The owner of the object. This will always be the uploader of the object.",
+     "properties": {
+      "entity": {
+       "type": "string",
+       "description": "The entity, in the form user-userId."
+      },
+      "entityId": {
+       "type": "string",
+       "description": "The ID for the entity."
+      }
+     }
+    },
+    "selfLink": {
+     "type": "string",
+     "description": "The link to this object."
+    },
+    "size": {
+     "type": "string",
+     "description": "Content-Length of the data in bytes.",
+     "format": "uint64"
+    },
+    "storageClass": {
+     "type": "string",
+     "description": "Storage class of the object."
+    },
+    "timeCreated": {
+     "type": "string",
+     "description": "The creation time of the object in RFC 3339 format.",
+     "format": "date-time"
+    },
+    "timeDeleted": {
+     "type": "string",
+     "description": "The deletion time of the object in RFC 3339 format. Will be returned if and only if this version of the object has been deleted.",
+     "format": "date-time"
+    },
+    "updated": {
+     "type": "string",
+     "description": "The modification time of the object metadata in RFC 3339 format.",
+     "format": "date-time"
+    }
+   }
+  },
+  "ObjectAccessControl": {
+   "id": "ObjectAccessControl",
+   "type": "object",
+   "description": "An access-control entry.",
+   "properties": {
+    "bucket": {
+     "type": "string",
+     "description": "The name of the bucket."
+    },
+    "domain": {
+     "type": "string",
+     "description": "The domain associated with the entity, if any."
+    },
+    "email": {
+     "type": "string",
+     "description": "The email address associated with the entity, if any."
+    },
+    "entity": {
+     "type": "string",
+     "description": "The entity holding the permission, in one of the following forms: \n- user-userId \n- user-email \n- group-groupId \n- group-email \n- domain-domain \n- project-team-projectId \n- allUsers \n- allAuthenticatedUsers Examples: \n- The user liz@example.com would be user-liz@example.com. \n- The group example@googlegroups.com would be group-example@googlegroups.com. \n- To refer to all members of the Google Apps for Business domain example.com, the entity would be domain-example.com."
+    },
+    "entityId": {
+     "type": "string",
+     "description": "The ID for the entity, if any."
+    },
+    "etag": {
+     "type": "string",
+     "description": "HTTP 1.1 Entity tag for the access-control entry."
+    },
+    "generation": {
+     "type": "string",
+     "description": "The content generation of the object.",
+     "format": "int64"
+    },
+    "id": {
+     "type": "string",
+     "description": "The ID of the access-control entry."
+    },
+    "kind": {
+     "type": "string",
+     "description": "The kind of item this is. For object access control entries, this is always storage#objectAccessControl.",
+     "default": "storage#objectAccessControl"
+    },
+    "object": {
+     "type": "string",
+     "description": "The name of the object."
+    },
+    "projectTeam": {
+     "type": "object",
+     "description": "The project team associated with the entity, if any.",
+     "properties": {
+      "projectNumber": {
+       "type": "string",
+       "description": "The project number."
+      },
+      "team": {
+       "type": "string",
+       "description": "The team. Can be owners, editors, or viewers."
+      }
+     }
+    },
+    "role": {
+     "type": "string",
+     "description": "The access permission for the entity. Can be READER or OWNER."
+    },
+    "selfLink": {
+     "type": "string",
+     "description": "The link to this access-control entry."
+    }
+   }
+  },
+  "ObjectAccessControls": {
+   "id": "ObjectAccessControls",
+   "type": "object",
+   "description": "An access-control list.",
+   "properties": {
+    "items": {
+     "type": "array",
+     "description": "The list of items.",
+     "items": {
+      "type": "any"
+     }
+    },
+    "kind": {
+     "type": "string",
+     "description": "The kind of item this is. For lists of object access control entries, this is always storage#objectAccessControls.",
+     "default": "storage#objectAccessControls"
+    }
+   }
+  },
+  "Objects": {
+   "id": "Objects",
+   "type": "object",
+   "description": "A list of objects.",
+   "properties": {
+    "items": {
+     "type": "array",
+     "description": "The list of items.",
+     "items": {
+      "$ref": "Object"
+     }
+    },
+    "kind": {
+     "type": "string",
+     "description": "The kind of item this is. For lists of objects, this is always storage#objects.",
+     "default": "storage#objects"
+    },
+    "nextPageToken": {
+     "type": "string",
+     "description": "The continuation token, used to page through large result sets. Provide this value in a subsequent request to return the next page of results."
+    },
+    "prefixes": {
+     "type": "array",
+     "description": "The list of prefixes of objects matching-but-not-listed up to and including the requested delimiter.",
+     "items": {
+      "type": "string"
+     }
+    }
+   }
+  },
+  "Policy": {
+   "id": "Policy",
+   "type": "object",
+   "description": "A bucket/object IAM policy.",
+   "properties": {
+    "bindings": {
+     "type": "array",
+     "description": "An association between a role, which comes with a set of permissions, and members who may assume that role.",
+     "items": {
+      "type": "object",
+      "properties": {
+       "members": {
+        "type": "array",
+        "description": "A collection of identifiers for members who may assume the provided role. Recognized identifiers are as follows:  \n- allUsers — A special identifier that represents anyone on the internet; with or without a Google account.  \n- allAuthenticatedUsers — A special identifier that represents anyone who is authenticated with a Google account or a service account.  \n- user:emailid — An email address that represents a specific account. For example, user:alice@gmail.com or user:joe@example.com.  \n- serviceAccount:emailid — An email address that represents a service account. For example,  serviceAccount:my-other-app@appspot.gserviceaccount.com .  \n- group:emailid — An email address that represents a Google group. For example, group:admins@example.com.  \n- domain:domain — A Google Apps domain name that represents all the users of that domain. For example, domain:google.com or domain:example.com.  \n- projectOwner:projectid — Owners of the given project. For example, projectOwner:my-example-project  \n- projectEditor:projectid — Editors of the given project. For example, projectEditor:my-example-project  \n- projectViewer:projectid — Viewers of the given project. For example, projectViewer:my-example-project",
+        "items": {
+         "type": "string"
+        },
+        "annotations": {
+         "required": [
+          "storage.buckets.setIamPolicy",
+          "storage.objects.setIamPolicy"
+         ]
+        }
+       },
+       "role": {
+        "type": "string",
+        "description": "The role to which members belong. Two types of roles are supported: new IAM roles, which grant permissions that do not map directly to those provided by ACLs, and legacy IAM roles, which do map directly to ACL permissions. All roles are of the format roles/storage.specificRole.\nThe new IAM roles are:  \n- roles/storage.admin — Full control of Google Cloud Storage resources.  \n- roles/storage.objectViewer — Read-Only access to Google Cloud Storage objects.  \n- roles/storage.objectCreator — Access to create objects in Google Cloud Storage.  \n- roles/storage.objectAdmin — Full control of Google Cloud Storage objects.   The legacy IAM roles are:  \n- roles/storage.legacyObjectReader — Read-only access to objects without listing. Equivalent to an ACL entry on an object with the READER role.  \n- roles/storage.legacyObjectOwner — Read/write access to existing objects without listing. Equivalent to an ACL entry on an object with the OWNER role.  \n- roles/storage.legacyBucketReader — Read access to buckets with object listing. Equivalent to an ACL entry on a bucket with the READER role.  \n- roles/storage.legacyBucketWriter — Read access to buckets with object listing/creation/deletion. Equivalent to an ACL entry on a bucket with the WRITER role.  \n- roles/storage.legacyBucketOwner — Read and write access to existing buckets with object listing/creation/deletion. Equivalent to an ACL entry on a bucket with the OWNER role.",
+        "annotations": {
+         "required": [
+          "storage.buckets.setIamPolicy",
+          "storage.objects.setIamPolicy"
+         ]
+        }
+       }
+      }
+     },
+     "annotations": {
+      "required": [
+       "storage.buckets.setIamPolicy",
+       "storage.objects.setIamPolicy"
+      ]
+     }
+    },
+    "etag": {
+     "type": "string",
+     "description": "HTTP 1.1  Entity tag for the policy.",
+     "format": "byte"
+    },
+    "kind": {
+     "type": "string",
+     "description": "The kind of item this is. For policies, this is always storage#policy. This field is ignored on input.",
+     "default": "storage#policy"
+    },
+    "resourceId": {
+     "type": "string",
+     "description": "The ID of the resource to which this policy belongs. Will be of the form buckets/bucket for buckets, and buckets/bucket/objects/object for objects. A specific generation may be specified by appending #generationNumber to the end of the object name, e.g. buckets/my-bucket/objects/data.txt#17. The current generation can be denoted with #0. This field is ignored on input."
+    }
+   }
+  },
+  "RewriteResponse": {
+   "id": "RewriteResponse",
+   "type": "object",
+   "description": "A rewrite response.",
+   "properties": {
+    "done": {
+     "type": "boolean",
+     "description": "true if the copy is finished; otherwise, false if the copy is in progress. This property is always present in the response."
+    },
+    "kind": {
+     "type": "string",
+     "description": "The kind of item this is.",
+     "default": "storage#rewriteResponse"
+    },
+    "objectSize": {
+     "type": "string",
+     "description": "The total size of the object being copied in bytes. This property is always present in the response.",
+     "format": "uint64"
+    },
+    "resource": {
+     "$ref": "Object",
+     "description": "A resource containing the metadata for the copied-to object. This property is present in the response only when copying completes."
+    },
+    "rewriteToken": {
+     "type": "string",
+     "description": "A token to use in subsequent requests to continue copying data. This token is present in the response only when there is more data to copy."
+    },
+    "totalBytesRewritten": {
+     "type": "string",
+     "description": "The total bytes written so far, which can be used to provide a waiting user with a progress indicator. This property is always present in the response.",
+     "format": "uint64"
+    }
+   }
+  },
+  "TestIamPermissionsResponse": {
+   "id": "TestIamPermissionsResponse",
+   "type": "object",
+   "description": "A storage.(buckets|objects).testIamPermissions response.",
+   "properties": {
+    "kind": {
+     "type": "string",
+     "description": "The kind of item this is.",
+     "default": "storage#testIamPermissionsResponse"
+    },
+    "permissions": {
+     "type": "array",
+     "description": "The permissions held by the caller. Permissions are always of the format storage.resource.capability, where resource is one of buckets or objects. The supported permissions are as follows:  \n- storage.buckets.delete — Delete bucket.  \n- storage.buckets.get — Read bucket metadata.  \n- storage.buckets.getIamPolicy — Read bucket IAM policy.  \n- storage.buckets.create — Create bucket.  \n- storage.buckets.list — List buckets.  \n- storage.buckets.setIamPolicy — Update bucket IAM policy.  \n- storage.buckets.update — Update bucket metadata.  \n- storage.objects.delete — Delete object.  \n- storage.objects.get — Read object data and metadata.  \n- storage.objects.getIamPolicy — Read object IAM policy.  \n- storage.objects.create — Create object.  \n- storage.objects.list — List objects.  \n- storage.objects.setIamPolicy — Update object IAM policy.  \n- storage.objects.update — Update object metadata.",
+     "items": {
+      "type": "string"
+     }
+    }
+   }
+  }
+ },
+ "resources": {
+  "bucketAccessControls": {
+   "methods": {
+    "delete": {
+     "id": "storage.bucketAccessControls.delete",
+     "path": "b/{bucket}/acl/{entity}",
+     "httpMethod": "DELETE",
+     "description": "Permanently deletes the ACL entry for the specified entity on the specified bucket.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of a bucket.",
+       "required": true,
+       "location": "path"
+      },
+      "entity": {
+       "type": "string",
+       "description": "The entity holding the permission. Can be user-userId, user-emailAddress, group-groupId, group-emailAddress, allUsers, or allAuthenticatedUsers.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "bucket",
+      "entity"
+     ],
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control"
+     ]
+    },
+    "get": {
+     "id": "storage.bucketAccessControls.get",
+     "path": "b/{bucket}/acl/{entity}",
+     "httpMethod": "GET",
+     "description": "Returns the ACL entry for the specified entity on the specified bucket.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of a bucket.",
+       "required": true,
+       "location": "path"
+      },
+      "entity": {
+       "type": "string",
+       "description": "The entity holding the permission. Can be user-userId, user-emailAddress, group-groupId, group-emailAddress, allUsers, or allAuthenticatedUsers.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "bucket",
+      "entity"
+     ],
+     "response": {
+      "$ref": "BucketAccessControl"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control"
+     ]
+    },
+    "insert": {
+     "id": "storage.bucketAccessControls.insert",
+     "path": "b/{bucket}/acl",
+     "httpMethod": "POST",
+     "description": "Creates a new ACL entry on the specified bucket.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of a bucket.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "bucket"
+     ],
+     "request": {
+      "$ref": "BucketAccessControl"
+     },
+     "response": {
+      "$ref": "BucketAccessControl"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control"
+     ]
+    },
+    "list": {
+     "id": "storage.bucketAccessControls.list",
+     "path": "b/{bucket}/acl",
+     "httpMethod": "GET",
+     "description": "Retrieves ACL entries on the specified bucket.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of a bucket.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "bucket"
+     ],
+     "response": {
+      "$ref": "BucketAccessControls"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control"
+     ]
+    },
+    "patch": {
+     "id": "storage.bucketAccessControls.patch",
+     "path": "b/{bucket}/acl/{entity}",
+     "httpMethod": "PATCH",
+     "description": "Updates an ACL entry on the specified bucket. This method supports patch semantics.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of a bucket.",
+       "required": true,
+       "location": "path"
+      },
+      "entity": {
+       "type": "string",
+       "description": "The entity holding the permission. Can be user-userId, user-emailAddress, group-groupId, group-emailAddress, allUsers, or allAuthenticatedUsers.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "bucket",
+      "entity"
+     ],
+     "request": {
+      "$ref": "BucketAccessControl"
+     },
+     "response": {
+      "$ref": "BucketAccessControl"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control"
+     ]
+    },
+    "update": {
+     "id": "storage.bucketAccessControls.update",
+     "path": "b/{bucket}/acl/{entity}",
+     "httpMethod": "PUT",
+     "description": "Updates an ACL entry on the specified bucket.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of a bucket.",
+       "required": true,
+       "location": "path"
+      },
+      "entity": {
+       "type": "string",
+       "description": "The entity holding the permission. Can be user-userId, user-emailAddress, group-groupId, group-emailAddress, allUsers, or allAuthenticatedUsers.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "bucket",
+      "entity"
+     ],
+     "request": {
+      "$ref": "BucketAccessControl"
+     },
+     "response": {
+      "$ref": "BucketAccessControl"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control"
+     ]
+    }
+   }
+  },
+  "buckets": {
+   "methods": {
+    "delete": {
+     "id": "storage.buckets.delete",
+     "path": "b/{bucket}",
+     "httpMethod": "DELETE",
+     "description": "Permanently deletes an empty bucket.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of a bucket.",
+       "required": true,
+       "location": "path"
+      },
+      "ifMetagenerationMatch": {
+       "type": "string",
+       "description": "If set, only deletes the bucket if its metageneration matches this value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifMetagenerationNotMatch": {
+       "type": "string",
+       "description": "If set, only deletes the bucket if its metageneration does not match this value.",
+       "format": "int64",
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "bucket"
+     ],
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control",
+      "https://www.googleapis.com/auth/devstorage.read_write"
+     ]
+    },
+    "get": {
+     "id": "storage.buckets.get",
+     "path": "b/{bucket}",
+     "httpMethod": "GET",
+     "description": "Returns metadata for the specified bucket.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of a bucket.",
+       "required": true,
+       "location": "path"
+      },
+      "ifMetagenerationMatch": {
+       "type": "string",
+       "description": "Makes the return of the bucket metadata conditional on whether the bucket's current metageneration matches the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifMetagenerationNotMatch": {
+       "type": "string",
+       "description": "Makes the return of the bucket metadata conditional on whether the bucket's current metageneration does not match the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "projection": {
+       "type": "string",
+       "description": "Set of properties to return. Defaults to noAcl.",
+       "enum": [
+        "full",
+        "noAcl"
+       ],
+       "enumDescriptions": [
+        "Include all properties.",
+        "Omit owner, acl and defaultObjectAcl properties."
+       ],
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "bucket"
+     ],
+     "response": {
+      "$ref": "Bucket"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only",
+      "https://www.googleapis.com/auth/devstorage.full_control",
+      "https://www.googleapis.com/auth/devstorage.read_only",
+      "https://www.googleapis.com/auth/devstorage.read_write"
+     ]
+    },
+    "getIamPolicy": {
+     "id": "storage.buckets.getIamPolicy",
+     "path": "b/{bucket}/iam",
+     "httpMethod": "GET",
+     "description": "Returns an IAM policy for the specified bucket.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of a bucket.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "bucket"
+     ],
+     "response": {
+      "$ref": "Policy"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only",
+      "https://www.googleapis.com/auth/devstorage.full_control",
+      "https://www.googleapis.com/auth/devstorage.read_only",
+      "https://www.googleapis.com/auth/devstorage.read_write"
+     ]
+    },
+    "insert": {
+     "id": "storage.buckets.insert",
+     "path": "b",
+     "httpMethod": "POST",
+     "description": "Creates a new bucket.",
+     "parameters": {
+      "predefinedAcl": {
+       "type": "string",
+       "description": "Apply a predefined set of access controls to this bucket.",
+       "enum": [
+        "authenticatedRead",
+        "private",
+        "projectPrivate",
+        "publicRead",
+        "publicReadWrite"
+       ],
+       "enumDescriptions": [
+        "Project team owners get OWNER access, and allAuthenticatedUsers get READER access.",
+        "Project team owners get OWNER access.",
+        "Project team members get access according to their roles.",
+        "Project team owners get OWNER access, and allUsers get READER access.",
+        "Project team owners get OWNER access, and allUsers get WRITER access."
+       ],
+       "location": "query"
+      },
+      "predefinedDefaultObjectAcl": {
+       "type": "string",
+       "description": "Apply a predefined set of default object access controls to this bucket.",
+       "enum": [
+        "authenticatedRead",
+        "bucketOwnerFullControl",
+        "bucketOwnerRead",
+        "private",
+        "projectPrivate",
+        "publicRead"
+       ],
+       "enumDescriptions": [
+        "Object owner gets OWNER access, and allAuthenticatedUsers get READER access.",
+        "Object owner gets OWNER access, and project team owners get OWNER access.",
+        "Object owner gets OWNER access, and project team owners get READER access.",
+        "Object owner gets OWNER access.",
+        "Object owner gets OWNER access, and project team members get access according to their roles.",
+        "Object owner gets OWNER access, and allUsers get READER access."
+       ],
+       "location": "query"
+      },
+      "project": {
+       "type": "string",
+       "description": "A valid API project identifier.",
+       "required": true,
+       "location": "query"
+      },
+      "projection": {
+       "type": "string",
+       "description": "Set of properties to return. Defaults to noAcl, unless the bucket resource specifies acl or defaultObjectAcl properties, when it defaults to full.",
+       "enum": [
+        "full",
+        "noAcl"
+       ],
+       "enumDescriptions": [
+        "Include all properties.",
+        "Omit owner, acl and defaultObjectAcl properties."
+       ],
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "project"
+     ],
+     "request": {
+      "$ref": "Bucket"
+     },
+     "response": {
+      "$ref": "Bucket"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control",
+      "https://www.googleapis.com/auth/devstorage.read_write"
+     ]
+    },
+    "list": {
+     "id": "storage.buckets.list",
+     "path": "b",
+     "httpMethod": "GET",
+     "description": "Retrieves a list of buckets for a given project.",
+     "parameters": {
+      "maxResults": {
+       "type": "integer",
+       "description": "Maximum number of buckets to return.",
+       "format": "uint32",
+       "minimum": "0",
+       "location": "query"
+      },
+      "pageToken": {
+       "type": "string",
+       "description": "A previously-returned page token representing part of the larger set of results to view.",
+       "location": "query"
+      },
+      "prefix": {
+       "type": "string",
+       "description": "Filter results to buckets whose names begin with this prefix.",
+       "location": "query"
+      },
+      "project": {
+       "type": "string",
+       "description": "A valid API project identifier.",
+       "required": true,
+       "location": "query"
+      },
+      "projection": {
+       "type": "string",
+       "description": "Set of properties to return. Defaults to noAcl.",
+       "enum": [
+        "full",
+        "noAcl"
+       ],
+       "enumDescriptions": [
+        "Include all properties.",
+        "Omit owner, acl and defaultObjectAcl properties."
+       ],
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "project"
+     ],
+     "response": {
+      "$ref": "Buckets"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only",
+      "https://www.googleapis.com/auth/devstorage.full_control",
+      "https://www.googleapis.com/auth/devstorage.read_only",
+      "https://www.googleapis.com/auth/devstorage.read_write"
+     ]
+    },
+    "patch": {
+     "id": "storage.buckets.patch",
+     "path": "b/{bucket}",
+     "httpMethod": "PATCH",
+     "description": "Updates a bucket. This method supports patch semantics.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of a bucket.",
+       "required": true,
+       "location": "path"
+      },
+      "ifMetagenerationMatch": {
+       "type": "string",
+       "description": "Makes the return of the bucket metadata conditional on whether the bucket's current metageneration matches the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifMetagenerationNotMatch": {
+       "type": "string",
+       "description": "Makes the return of the bucket metadata conditional on whether the bucket's current metageneration does not match the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "predefinedAcl": {
+       "type": "string",
+       "description": "Apply a predefined set of access controls to this bucket.",
+       "enum": [
+        "authenticatedRead",
+        "private",
+        "projectPrivate",
+        "publicRead",
+        "publicReadWrite"
+       ],
+       "enumDescriptions": [
+        "Project team owners get OWNER access, and allAuthenticatedUsers get READER access.",
+        "Project team owners get OWNER access.",
+        "Project team members get access according to their roles.",
+        "Project team owners get OWNER access, and allUsers get READER access.",
+        "Project team owners get OWNER access, and allUsers get WRITER access."
+       ],
+       "location": "query"
+      },
+      "predefinedDefaultObjectAcl": {
+       "type": "string",
+       "description": "Apply a predefined set of default object access controls to this bucket.",
+       "enum": [
+        "authenticatedRead",
+        "bucketOwnerFullControl",
+        "bucketOwnerRead",
+        "private",
+        "projectPrivate",
+        "publicRead"
+       ],
+       "enumDescriptions": [
+        "Object owner gets OWNER access, and allAuthenticatedUsers get READER access.",
+        "Object owner gets OWNER access, and project team owners get OWNER access.",
+        "Object owner gets OWNER access, and project team owners get READER access.",
+        "Object owner gets OWNER access.",
+        "Object owner gets OWNER access, and project team members get access according to their roles.",
+        "Object owner gets OWNER access, and allUsers get READER access."
+       ],
+       "location": "query"
+      },
+      "projection": {
+       "type": "string",
+       "description": "Set of properties to return. Defaults to full.",
+       "enum": [
+        "full",
+        "noAcl"
+       ],
+       "enumDescriptions": [
+        "Include all properties.",
+        "Omit owner, acl and defaultObjectAcl properties."
+       ],
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "bucket"
+     ],
+     "request": {
+      "$ref": "Bucket"
+     },
+     "response": {
+      "$ref": "Bucket"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control"
+     ]
+    },
+    "setIamPolicy": {
+     "id": "storage.buckets.setIamPolicy",
+     "path": "b/{bucket}/iam",
+     "httpMethod": "PUT",
+     "description": "Updates an IAM policy for the specified bucket.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of a bucket.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "bucket"
+     ],
+     "request": {
+      "$ref": "Policy"
+     },
+     "response": {
+      "$ref": "Policy"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control",
+      "https://www.googleapis.com/auth/devstorage.read_write"
+     ]
+    },
+    "testIamPermissions": {
+     "id": "storage.buckets.testIamPermissions",
+     "path": "b/{bucket}/iam/testPermissions",
+     "httpMethod": "GET",
+     "description": "Tests a set of permissions on the given bucket to see which, if any, are held by the caller.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of a bucket.",
+       "required": true,
+       "location": "path"
+      },
+      "permissions": {
+       "type": "string",
+       "description": "Permissions to test.",
+       "required": true,
+       "repeated": true,
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "bucket",
+      "permissions"
+     ],
+     "response": {
+      "$ref": "TestIamPermissionsResponse"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only",
+      "https://www.googleapis.com/auth/devstorage.full_control",
+      "https://www.googleapis.com/auth/devstorage.read_only",
+      "https://www.googleapis.com/auth/devstorage.read_write"
+     ]
+    },
+    "update": {
+     "id": "storage.buckets.update",
+     "path": "b/{bucket}",
+     "httpMethod": "PUT",
+     "description": "Updates a bucket.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of a bucket.",
+       "required": true,
+       "location": "path"
+      },
+      "ifMetagenerationMatch": {
+       "type": "string",
+       "description": "Makes the return of the bucket metadata conditional on whether the bucket's current metageneration matches the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifMetagenerationNotMatch": {
+       "type": "string",
+       "description": "Makes the return of the bucket metadata conditional on whether the bucket's current metageneration does not match the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "predefinedAcl": {
+       "type": "string",
+       "description": "Apply a predefined set of access controls to this bucket.",
+       "enum": [
+        "authenticatedRead",
+        "private",
+        "projectPrivate",
+        "publicRead",
+        "publicReadWrite"
+       ],
+       "enumDescriptions": [
+        "Project team owners get OWNER access, and allAuthenticatedUsers get READER access.",
+        "Project team owners get OWNER access.",
+        "Project team members get access according to their roles.",
+        "Project team owners get OWNER access, and allUsers get READER access.",
+        "Project team owners get OWNER access, and allUsers get WRITER access."
+       ],
+       "location": "query"
+      },
+      "predefinedDefaultObjectAcl": {
+       "type": "string",
+       "description": "Apply a predefined set of default object access controls to this bucket.",
+       "enum": [
+        "authenticatedRead",
+        "bucketOwnerFullControl",
+        "bucketOwnerRead",
+        "private",
+        "projectPrivate",
+        "publicRead"
+       ],
+       "enumDescriptions": [
+        "Object owner gets OWNER access, and allAuthenticatedUsers get READER access.",
+        "Object owner gets OWNER access, and project team owners get OWNER access.",
+        "Object owner gets OWNER access, and project team owners get READER access.",
+        "Object owner gets OWNER access.",
+        "Object owner gets OWNER access, and project team members get access according to their roles.",
+        "Object owner gets OWNER access, and allUsers get READER access."
+       ],
+       "location": "query"
+      },
+      "projection": {
+       "type": "string",
+       "description": "Set of properties to return. Defaults to full.",
+       "enum": [
+        "full",
+        "noAcl"
+       ],
+       "enumDescriptions": [
+        "Include all properties.",
+        "Omit owner, acl and defaultObjectAcl properties."
+       ],
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "bucket"
+     ],
+     "request": {
+      "$ref": "Bucket"
+     },
+     "response": {
+      "$ref": "Bucket"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control"
+     ]
+    }
+   }
+  },
+  "channels": {
+   "methods": {
+    "stop": {
+     "id": "storage.channels.stop",
+     "path": "channels/stop",
+     "httpMethod": "POST",
+     "description": "Stop watching resources through this channel",
+     "request": {
+      "$ref": "Channel",
+      "parameterName": "resource"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only",
+      "https://www.googleapis.com/auth/devstorage.full_control",
+      "https://www.googleapis.com/auth/devstorage.read_only",
+      "https://www.googleapis.com/auth/devstorage.read_write"
+     ]
+    }
+   }
+  },
+  "defaultObjectAccessControls": {
+   "methods": {
+    "delete": {
+     "id": "storage.defaultObjectAccessControls.delete",
+     "path": "b/{bucket}/defaultObjectAcl/{entity}",
+     "httpMethod": "DELETE",
+     "description": "Permanently deletes the default object ACL entry for the specified entity on the specified bucket.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of a bucket.",
+       "required": true,
+       "location": "path"
+      },
+      "entity": {
+       "type": "string",
+       "description": "The entity holding the permission. Can be user-userId, user-emailAddress, group-groupId, group-emailAddress, allUsers, or allAuthenticatedUsers.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "bucket",
+      "entity"
+     ],
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control"
+     ]
+    },
+    "get": {
+     "id": "storage.defaultObjectAccessControls.get",
+     "path": "b/{bucket}/defaultObjectAcl/{entity}",
+     "httpMethod": "GET",
+     "description": "Returns the default object ACL entry for the specified entity on the specified bucket.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of a bucket.",
+       "required": true,
+       "location": "path"
+      },
+      "entity": {
+       "type": "string",
+       "description": "The entity holding the permission. Can be user-userId, user-emailAddress, group-groupId, group-emailAddress, allUsers, or allAuthenticatedUsers.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "bucket",
+      "entity"
+     ],
+     "response": {
+      "$ref": "ObjectAccessControl"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control"
+     ]
+    },
+    "insert": {
+     "id": "storage.defaultObjectAccessControls.insert",
+     "path": "b/{bucket}/defaultObjectAcl",
+     "httpMethod": "POST",
+     "description": "Creates a new default object ACL entry on the specified bucket.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of a bucket.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "bucket"
+     ],
+     "request": {
+      "$ref": "ObjectAccessControl"
+     },
+     "response": {
+      "$ref": "ObjectAccessControl"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control"
+     ]
+    },
+    "list": {
+     "id": "storage.defaultObjectAccessControls.list",
+     "path": "b/{bucket}/defaultObjectAcl",
+     "httpMethod": "GET",
+     "description": "Retrieves default object ACL entries on the specified bucket.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of a bucket.",
+       "required": true,
+       "location": "path"
+      },
+      "ifMetagenerationMatch": {
+       "type": "string",
+       "description": "If present, only return default ACL listing if the bucket's current metageneration matches this value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifMetagenerationNotMatch": {
+       "type": "string",
+       "description": "If present, only return default ACL listing if the bucket's current metageneration does not match the given value.",
+       "format": "int64",
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "bucket"
+     ],
+     "response": {
+      "$ref": "ObjectAccessControls"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control"
+     ]
+    },
+    "patch": {
+     "id": "storage.defaultObjectAccessControls.patch",
+     "path": "b/{bucket}/defaultObjectAcl/{entity}",
+     "httpMethod": "PATCH",
+     "description": "Updates a default object ACL entry on the specified bucket. This method supports patch semantics.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of a bucket.",
+       "required": true,
+       "location": "path"
+      },
+      "entity": {
+       "type": "string",
+       "description": "The entity holding the permission. Can be user-userId, user-emailAddress, group-groupId, group-emailAddress, allUsers, or allAuthenticatedUsers.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "bucket",
+      "entity"
+     ],
+     "request": {
+      "$ref": "ObjectAccessControl"
+     },
+     "response": {
+      "$ref": "ObjectAccessControl"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control"
+     ]
+    },
+    "update": {
+     "id": "storage.defaultObjectAccessControls.update",
+     "path": "b/{bucket}/defaultObjectAcl/{entity}",
+     "httpMethod": "PUT",
+     "description": "Updates a default object ACL entry on the specified bucket.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of a bucket.",
+       "required": true,
+       "location": "path"
+      },
+      "entity": {
+       "type": "string",
+       "description": "The entity holding the permission. Can be user-userId, user-emailAddress, group-groupId, group-emailAddress, allUsers, or allAuthenticatedUsers.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "bucket",
+      "entity"
+     ],
+     "request": {
+      "$ref": "ObjectAccessControl"
+     },
+     "response": {
+      "$ref": "ObjectAccessControl"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control"
+     ]
+    }
+   }
+  },
+  "notifications": {
+   "methods": {
+    "delete": {
+     "id": "storage.notifications.delete",
+     "path": "notifications/{notification}",
+     "httpMethod": "DELETE",
+     "description": "Permanently deletes a notification subscription.",
+     "parameters": {
+      "notification": {
+       "type": "string",
+       "description": "ID of the notification to delete.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "notification"
+     ],
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control",
+      "https://www.googleapis.com/auth/devstorage.read_write"
+     ]
+    },
+    "get": {
+     "id": "storage.notifications.get",
+     "path": "notifications/{notification}",
+     "httpMethod": "GET",
+     "description": "View a notification configuration.",
+     "parameters": {
+      "notification": {
+       "type": "string",
+       "description": "Notification ID",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "notification"
+     ],
+     "response": {
+      "$ref": "Notification"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only",
+      "https://www.googleapis.com/auth/devstorage.full_control",
+      "https://www.googleapis.com/auth/devstorage.read_only",
+      "https://www.googleapis.com/auth/devstorage.read_write"
+     ]
+    },
+    "insert": {
+     "id": "storage.notifications.insert",
+     "path": "notifications",
+     "httpMethod": "POST",
+     "description": "Creates a notification subscription for a given bucket.",
+     "request": {
+      "$ref": "Notification"
+     },
+     "response": {
+      "$ref": "Notification"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control",
+      "https://www.googleapis.com/auth/devstorage.read_write"
+     ]
+    },
+    "list": {
+     "id": "storage.notifications.list",
+     "path": "notifications",
+     "httpMethod": "GET",
+     "description": "Retrieves a list of notification subscriptions for a given bucket.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of a GCS bucket.",
+       "required": true,
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "bucket"
+     ],
+     "response": {
+      "$ref": "Notifications"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only",
+      "https://www.googleapis.com/auth/devstorage.full_control",
+      "https://www.googleapis.com/auth/devstorage.read_only",
+      "https://www.googleapis.com/auth/devstorage.read_write"
+     ]
+    }
+   }
+  },
+  "objectAccessControls": {
+   "methods": {
+    "delete": {
+     "id": "storage.objectAccessControls.delete",
+     "path": "b/{bucket}/o/{object}/acl/{entity}",
+     "httpMethod": "DELETE",
+     "description": "Permanently deletes the ACL entry for the specified entity on the specified object.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of a bucket.",
+       "required": true,
+       "location": "path"
+      },
+      "entity": {
+       "type": "string",
+       "description": "The entity holding the permission. Can be user-userId, user-emailAddress, group-groupId, group-emailAddress, allUsers, or allAuthenticatedUsers.",
+       "required": true,
+       "location": "path"
+      },
+      "generation": {
+       "type": "string",
+       "description": "If present, selects a specific revision of this object (as opposed to the latest version, the default).",
+       "format": "int64",
+       "location": "query"
+      },
+      "object": {
+       "type": "string",
+       "description": "Name of the object. For information about how to URL encode object names to be path safe, see Encoding URI Path Parts.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "bucket",
+      "object",
+      "entity"
+     ],
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control"
+     ]
+    },
+    "get": {
+     "id": "storage.objectAccessControls.get",
+     "path": "b/{bucket}/o/{object}/acl/{entity}",
+     "httpMethod": "GET",
+     "description": "Returns the ACL entry for the specified entity on the specified object.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of a bucket.",
+       "required": true,
+       "location": "path"
+      },
+      "entity": {
+       "type": "string",
+       "description": "The entity holding the permission. Can be user-userId, user-emailAddress, group-groupId, group-emailAddress, allUsers, or allAuthenticatedUsers.",
+       "required": true,
+       "location": "path"
+      },
+      "generation": {
+       "type": "string",
+       "description": "If present, selects a specific revision of this object (as opposed to the latest version, the default).",
+       "format": "int64",
+       "location": "query"
+      },
+      "object": {
+       "type": "string",
+       "description": "Name of the object. For information about how to URL encode object names to be path safe, see Encoding URI Path Parts.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "bucket",
+      "object",
+      "entity"
+     ],
+     "response": {
+      "$ref": "ObjectAccessControl"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control"
+     ]
+    },
+    "insert": {
+     "id": "storage.objectAccessControls.insert",
+     "path": "b/{bucket}/o/{object}/acl",
+     "httpMethod": "POST",
+     "description": "Creates a new ACL entry on the specified object.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of a bucket.",
+       "required": true,
+       "location": "path"
+      },
+      "generation": {
+       "type": "string",
+       "description": "If present, selects a specific revision of this object (as opposed to the latest version, the default).",
+       "format": "int64",
+       "location": "query"
+      },
+      "object": {
+       "type": "string",
+       "description": "Name of the object. For information about how to URL encode object names to be path safe, see Encoding URI Path Parts.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "bucket",
+      "object"
+     ],
+     "request": {
+      "$ref": "ObjectAccessControl"
+     },
+     "response": {
+      "$ref": "ObjectAccessControl"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control"
+     ]
+    },
+    "list": {
+     "id": "storage.objectAccessControls.list",
+     "path": "b/{bucket}/o/{object}/acl",
+     "httpMethod": "GET",
+     "description": "Retrieves ACL entries on the specified object.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of a bucket.",
+       "required": true,
+       "location": "path"
+      },
+      "generation": {
+       "type": "string",
+       "description": "If present, selects a specific revision of this object (as opposed to the latest version, the default).",
+       "format": "int64",
+       "location": "query"
+      },
+      "object": {
+       "type": "string",
+       "description": "Name of the object. For information about how to URL encode object names to be path safe, see Encoding URI Path Parts.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "bucket",
+      "object"
+     ],
+     "response": {
+      "$ref": "ObjectAccessControls"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control"
+     ]
+    },
+    "patch": {
+     "id": "storage.objectAccessControls.patch",
+     "path": "b/{bucket}/o/{object}/acl/{entity}",
+     "httpMethod": "PATCH",
+     "description": "Updates an ACL entry on the specified object. This method supports patch semantics.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of a bucket.",
+       "required": true,
+       "location": "path"
+      },
+      "entity": {
+       "type": "string",
+       "description": "The entity holding the permission. Can be user-userId, user-emailAddress, group-groupId, group-emailAddress, allUsers, or allAuthenticatedUsers.",
+       "required": true,
+       "location": "path"
+      },
+      "generation": {
+       "type": "string",
+       "description": "If present, selects a specific revision of this object (as opposed to the latest version, the default).",
+       "format": "int64",
+       "location": "query"
+      },
+      "object": {
+       "type": "string",
+       "description": "Name of the object. For information about how to URL encode object names to be path safe, see Encoding URI Path Parts.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "bucket",
+      "object",
+      "entity"
+     ],
+     "request": {
+      "$ref": "ObjectAccessControl"
+     },
+     "response": {
+      "$ref": "ObjectAccessControl"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control"
+     ]
+    },
+    "update": {
+     "id": "storage.objectAccessControls.update",
+     "path": "b/{bucket}/o/{object}/acl/{entity}",
+     "httpMethod": "PUT",
+     "description": "Updates an ACL entry on the specified object.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of a bucket.",
+       "required": true,
+       "location": "path"
+      },
+      "entity": {
+       "type": "string",
+       "description": "The entity holding the permission. Can be user-userId, user-emailAddress, group-groupId, group-emailAddress, allUsers, or allAuthenticatedUsers.",
+       "required": true,
+       "location": "path"
+      },
+      "generation": {
+       "type": "string",
+       "description": "If present, selects a specific revision of this object (as opposed to the latest version, the default).",
+       "format": "int64",
+       "location": "query"
+      },
+      "object": {
+       "type": "string",
+       "description": "Name of the object. For information about how to URL encode object names to be path safe, see Encoding URI Path Parts.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "bucket",
+      "object",
+      "entity"
+     ],
+     "request": {
+      "$ref": "ObjectAccessControl"
+     },
+     "response": {
+      "$ref": "ObjectAccessControl"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control"
+     ]
+    }
+   }
+  },
+  "objects": {
+   "methods": {
+    "compose": {
+     "id": "storage.objects.compose",
+     "path": "b/{destinationBucket}/o/{destinationObject}/compose",
+     "httpMethod": "POST",
+     "description": "Concatenates a list of existing objects into a new object in the same bucket.",
+     "parameters": {
+      "destinationBucket": {
+       "type": "string",
+       "description": "Name of the bucket in which to store the new object.",
+       "required": true,
+       "location": "path"
+      },
+      "destinationObject": {
+       "type": "string",
+       "description": "Name of the new object. For information about how to URL encode object names to be path safe, see Encoding URI Path Parts.",
+       "required": true,
+       "location": "path"
+      },
+      "destinationPredefinedAcl": {
+       "type": "string",
+       "description": "Apply a predefined set of access controls to the destination object.",
+       "enum": [
+        "authenticatedRead",
+        "bucketOwnerFullControl",
+        "bucketOwnerRead",
+        "private",
+        "projectPrivate",
+        "publicRead"
+       ],
+       "enumDescriptions": [
+        "Object owner gets OWNER access, and allAuthenticatedUsers get READER access.",
+        "Object owner gets OWNER access, and project team owners get OWNER access.",
+        "Object owner gets OWNER access, and project team owners get READER access.",
+        "Object owner gets OWNER access.",
+        "Object owner gets OWNER access, and project team members get access according to their roles.",
+        "Object owner gets OWNER access, and allUsers get READER access."
+       ],
+       "location": "query"
+      },
+      "ifGenerationMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the object's current generation matches the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifMetagenerationMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the object's current metageneration matches the given value.",
+       "format": "int64",
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "destinationBucket",
+      "destinationObject"
+     ],
+     "request": {
+      "$ref": "ComposeRequest"
+     },
+     "response": {
+      "$ref": "Object"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control",
+      "https://www.googleapis.com/auth/devstorage.read_write"
+     ],
+     "supportsMediaDownload": true
+    },
+    "copy": {
+     "id": "storage.objects.copy",
+     "path": "b/{sourceBucket}/o/{sourceObject}/copyTo/b/{destinationBucket}/o/{destinationObject}",
+     "httpMethod": "POST",
+     "description": "Copies a source object to a destination object. Optionally overrides metadata.",
+     "parameters": {
+      "destinationBucket": {
+       "type": "string",
+       "description": "Name of the bucket in which to store the new object. Overrides the provided object metadata's bucket value, if any.For information about how to URL encode object names to be path safe, see Encoding URI Path Parts.",
+       "required": true,
+       "location": "path"
+      },
+      "destinationObject": {
+       "type": "string",
+       "description": "Name of the new object. Required when the object metadata is not otherwise provided. Overrides the object metadata's name value, if any.",
+       "required": true,
+       "location": "path"
+      },
+      "destinationPredefinedAcl": {
+       "type": "string",
+       "description": "Apply a predefined set of access controls to the destination object.",
+       "enum": [
+        "authenticatedRead",
+        "bucketOwnerFullControl",
+        "bucketOwnerRead",
+        "private",
+        "projectPrivate",
+        "publicRead"
+       ],
+       "enumDescriptions": [
+        "Object owner gets OWNER access, and allAuthenticatedUsers get READER access.",
+        "Object owner gets OWNER access, and project team owners get OWNER access.",
+        "Object owner gets OWNER access, and project team owners get READER access.",
+        "Object owner gets OWNER access.",
+        "Object owner gets OWNER access, and project team members get access according to their roles.",
+        "Object owner gets OWNER access, and allUsers get READER access."
+       ],
+       "location": "query"
+      },
+      "ifGenerationMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the destination object's current generation matches the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifGenerationNotMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the destination object's current generation does not match the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifMetagenerationMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the destination object's current metageneration matches the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifMetagenerationNotMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the destination object's current metageneration does not match the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifSourceGenerationMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the source object's generation matches the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifSourceGenerationNotMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the source object's generation does not match the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifSourceMetagenerationMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the source object's current metageneration matches the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifSourceMetagenerationNotMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the source object's current metageneration does not match the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "projection": {
+       "type": "string",
+       "description": "Set of properties to return. Defaults to noAcl, unless the object resource specifies the acl property, when it defaults to full.",
+       "enum": [
+        "full",
+        "noAcl"
+       ],
+       "enumDescriptions": [
+        "Include all properties.",
+        "Omit the owner, acl property."
+       ],
+       "location": "query"
+      },
+      "sourceBucket": {
+       "type": "string",
+       "description": "Name of the bucket in which to find the source object.",
+       "required": true,
+       "location": "path"
+      },
+      "sourceGeneration": {
+       "type": "string",
+       "description": "If present, selects a specific revision of the source object (as opposed to the latest version, the default).",
+       "format": "int64",
+       "location": "query"
+      },
+      "sourceObject": {
+       "type": "string",
+       "description": "Name of the source object. For information about how to URL encode object names to be path safe, see Encoding URI Path Parts.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "sourceBucket",
+      "sourceObject",
+      "destinationBucket",
+      "destinationObject"
+     ],
+     "request": {
+      "$ref": "Object"
+     },
+     "response": {
+      "$ref": "Object"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control",
+      "https://www.googleapis.com/auth/devstorage.read_write"
+     ],
+     "supportsMediaDownload": true
+    },
+    "delete": {
+     "id": "storage.objects.delete",
+     "path": "b/{bucket}/o/{object}",
+     "httpMethod": "DELETE",
+     "description": "Deletes an object and its metadata. Deletions are permanent if versioning is not enabled for the bucket, or if the generation parameter is used.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of the bucket in which the object resides.",
+       "required": true,
+       "location": "path"
+      },
+      "generation": {
+       "type": "string",
+       "description": "If present, permanently deletes a specific revision of this object (as opposed to the latest version, the default).",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifGenerationMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the object's current generation matches the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifGenerationNotMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the object's current generation does not match the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifMetagenerationMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the object's current metageneration matches the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifMetagenerationNotMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the object's current metageneration does not match the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "object": {
+       "type": "string",
+       "description": "Name of the object. For information about how to URL encode object names to be path safe, see Encoding URI Path Parts.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "bucket",
+      "object"
+     ],
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control",
+      "https://www.googleapis.com/auth/devstorage.read_write"
+     ]
+    },
+    "get": {
+     "id": "storage.objects.get",
+     "path": "b/{bucket}/o/{object}",
+     "httpMethod": "GET",
+     "description": "Retrieves an object or its metadata.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of the bucket in which the object resides.",
+       "required": true,
+       "location": "path"
+      },
+      "generation": {
+       "type": "string",
+       "description": "If present, selects a specific revision of this object (as opposed to the latest version, the default).",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifGenerationMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the object's generation matches the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifGenerationNotMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the object's generation does not match the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifMetagenerationMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the object's current metageneration matches the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifMetagenerationNotMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the object's current metageneration does not match the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "object": {
+       "type": "string",
+       "description": "Name of the object. For information about how to URL encode object names to be path safe, see Encoding URI Path Parts.",
+       "required": true,
+       "location": "path"
+      },
+      "projection": {
+       "type": "string",
+       "description": "Set of properties to return. Defaults to noAcl.",
+       "enum": [
+        "full",
+        "noAcl"
+       ],
+       "enumDescriptions": [
+        "Include all properties.",
+        "Omit the owner, acl property."
+       ],
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "bucket",
+      "object"
+     ],
+     "response": {
+      "$ref": "Object"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only",
+      "https://www.googleapis.com/auth/devstorage.full_control",
+      "https://www.googleapis.com/auth/devstorage.read_only",
+      "https://www.googleapis.com/auth/devstorage.read_write"
+     ],
+     "supportsMediaDownload": true
+    },
+    "getIamPolicy": {
+     "id": "storage.objects.getIamPolicy",
+     "path": "b/{bucket}/o/{object}/iam",
+     "httpMethod": "GET",
+     "description": "Returns an IAM policy for the specified object.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of the bucket in which the object resides.",
+       "required": true,
+       "location": "path"
+      },
+      "generation": {
+       "type": "string",
+       "description": "If present, selects a specific revision of this object (as opposed to the latest version, the default).",
+       "format": "int64",
+       "location": "query"
+      },
+      "object": {
+       "type": "string",
+       "description": "Name of the object. For information about how to URL encode object names to be path safe, see Encoding URI Path Parts.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "bucket",
+      "object"
+     ],
+     "response": {
+      "$ref": "Policy"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only",
+      "https://www.googleapis.com/auth/devstorage.full_control",
+      "https://www.googleapis.com/auth/devstorage.read_only",
+      "https://www.googleapis.com/auth/devstorage.read_write"
+     ]
+    },
+    "insert": {
+     "id": "storage.objects.insert",
+     "path": "b/{bucket}/o",
+     "httpMethod": "POST",
+     "description": "Stores a new object and metadata.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of the bucket in which to store the new object. Overrides the provided object metadata's bucket value, if any.",
+       "required": true,
+       "location": "path"
+      },
+      "contentEncoding": {
+       "type": "string",
+       "description": "If set, sets the contentEncoding property of the final object to this value. Setting this parameter is equivalent to setting the contentEncoding metadata property. This can be useful when uploading an object with uploadType=media to indicate the encoding of the content being uploaded.",
+       "location": "query"
+      },
+      "ifGenerationMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the object's current generation matches the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifGenerationNotMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the object's current generation does not match the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifMetagenerationMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the object's current metageneration matches the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifMetagenerationNotMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the object's current metageneration does not match the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "name": {
+       "type": "string",
+       "description": "Name of the object. Required when the object metadata is not otherwise provided. Overrides the object metadata's name value, if any. For information about how to URL encode object names to be path safe, see Encoding URI Path Parts.",
+       "location": "query"
+      },
+      "predefinedAcl": {
+       "type": "string",
+       "description": "Apply a predefined set of access controls to this object.",
+       "enum": [
+        "authenticatedRead",
+        "bucketOwnerFullControl",
+        "bucketOwnerRead",
+        "private",
+        "projectPrivate",
+        "publicRead"
+       ],
+       "enumDescriptions": [
+        "Object owner gets OWNER access, and allAuthenticatedUsers get READER access.",
+        "Object owner gets OWNER access, and project team owners get OWNER access.",
+        "Object owner gets OWNER access, and project team owners get READER access.",
+        "Object owner gets OWNER access.",
+        "Object owner gets OWNER access, and project team members get access according to their roles.",
+        "Object owner gets OWNER access, and allUsers get READER access."
+       ],
+       "location": "query"
+      },
+      "projection": {
+       "type": "string",
+       "description": "Set of properties to return. Defaults to noAcl, unless the object resource specifies the acl property, when it defaults to full.",
+       "enum": [
+        "full",
+        "noAcl"
+       ],
+       "enumDescriptions": [
+        "Include all properties.",
+        "Omit the owner, acl property."
+       ],
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "bucket"
+     ],
+     "request": {
+      "$ref": "Object"
+     },
+     "response": {
+      "$ref": "Object"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control",
+      "https://www.googleapis.com/auth/devstorage.read_write"
+     ],
+     "supportsMediaDownload": true,
+     "supportsMediaUpload": true,
+     "mediaUpload": {
+      "accept": [
+       "*/*"
+      ],
+      "protocols": {
+       "simple": {
+        "multipart": true,
+        "path": "/upload/storage/v1/b/{bucket}/o"
+       },
+       "resumable": {
+        "multipart": true,
+        "path": "/resumable/upload/storage/v1/b/{bucket}/o"
+       }
+      }
+     }
+    },
+    "list": {
+     "id": "storage.objects.list",
+     "path": "b/{bucket}/o",
+     "httpMethod": "GET",
+     "description": "Retrieves a list of objects matching the criteria.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of the bucket in which to look for objects.",
+       "required": true,
+       "location": "path"
+      },
+      "delimiter": {
+       "type": "string",
+       "description": "Returns results in a directory-like mode. items will contain only objects whose names, aside from the prefix, do not contain delimiter. Objects whose names, aside from the prefix, contain delimiter will have their name, truncated after the delimiter, returned in prefixes. Duplicate prefixes are omitted.",
+       "location": "query"
+      },
+      "maxResults": {
+       "type": "integer",
+       "description": "Maximum number of items plus prefixes to return. As duplicate prefixes are omitted, fewer total results may be returned than requested. The default value of this parameter is 1,000 items.",
+       "format": "uint32",
+       "minimum": "0",
+       "location": "query"
+      },
+      "pageToken": {
+       "type": "string",
+       "description": "A previously-returned page token representing part of the larger set of results to view.",
+       "location": "query"
+      },
+      "prefix": {
+       "type": "string",
+       "description": "Filter results to objects whose names begin with this prefix.",
+       "location": "query"
+      },
+      "projection": {
+       "type": "string",
+       "description": "Set of properties to return. Defaults to noAcl.",
+       "enum": [
+        "full",
+        "noAcl"
+       ],
+       "enumDescriptions": [
+        "Include all properties.",
+        "Omit the owner, acl property."
+       ],
+       "location": "query"
+      },
+      "versions": {
+       "type": "boolean",
+       "description": "If true, lists all versions of an object as distinct results. The default is false. For more information, see Object Versioning.",
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "bucket"
+     ],
+     "response": {
+      "$ref": "Objects"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only",
+      "https://www.googleapis.com/auth/devstorage.full_control",
+      "https://www.googleapis.com/auth/devstorage.read_only",
+      "https://www.googleapis.com/auth/devstorage.read_write"
+     ],
+     "supportsSubscription": true
+    },
+    "patch": {
+     "id": "storage.objects.patch",
+     "path": "b/{bucket}/o/{object}",
+     "httpMethod": "PATCH",
+     "description": "Updates an object's metadata. This method supports patch semantics.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of the bucket in which the object resides.",
+       "required": true,
+       "location": "path"
+      },
+      "generation": {
+       "type": "string",
+       "description": "If present, selects a specific revision of this object (as opposed to the latest version, the default).",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifGenerationMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the object's current generation matches the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifGenerationNotMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the object's current generation does not match the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifMetagenerationMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the object's current metageneration matches the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifMetagenerationNotMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the object's current metageneration does not match the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "object": {
+       "type": "string",
+       "description": "Name of the object. For information about how to URL encode object names to be path safe, see Encoding URI Path Parts.",
+       "required": true,
+       "location": "path"
+      },
+      "predefinedAcl": {
+       "type": "string",
+       "description": "Apply a predefined set of access controls to this object.",
+       "enum": [
+        "authenticatedRead",
+        "bucketOwnerFullControl",
+        "bucketOwnerRead",
+        "private",
+        "projectPrivate",
+        "publicRead"
+       ],
+       "enumDescriptions": [
+        "Object owner gets OWNER access, and allAuthenticatedUsers get READER access.",
+        "Object owner gets OWNER access, and project team owners get OWNER access.",
+        "Object owner gets OWNER access, and project team owners get READER access.",
+        "Object owner gets OWNER access.",
+        "Object owner gets OWNER access, and project team members get access according to their roles.",
+        "Object owner gets OWNER access, and allUsers get READER access."
+       ],
+       "location": "query"
+      },
+      "projection": {
+       "type": "string",
+       "description": "Set of properties to return. Defaults to full.",
+       "enum": [
+        "full",
+        "noAcl"
+       ],
+       "enumDescriptions": [
+        "Include all properties.",
+        "Omit the owner, acl property."
+       ],
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "bucket",
+      "object"
+     ],
+     "request": {
+      "$ref": "Object"
+     },
+     "response": {
+      "$ref": "Object"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control"
+     ]
+    },
+    "rewrite": {
+     "id": "storage.objects.rewrite",
+     "path": "b/{sourceBucket}/o/{sourceObject}/rewriteTo/b/{destinationBucket}/o/{destinationObject}",
+     "httpMethod": "POST",
+     "description": "Rewrites a source object to a destination object. Optionally overrides metadata.",
+     "parameters": {
+      "destinationBucket": {
+       "type": "string",
+       "description": "Name of the bucket in which to store the new object. Overrides the provided object metadata's bucket value, if any.",
+       "required": true,
+       "location": "path"
+      },
+      "destinationObject": {
+       "type": "string",
+       "description": "Name of the new object. Required when the object metadata is not otherwise provided. Overrides the object metadata's name value, if any. For information about how to URL encode object names to be path safe, see Encoding URI Path Parts.",
+       "required": true,
+       "location": "path"
+      },
+      "destinationPredefinedAcl": {
+       "type": "string",
+       "description": "Apply a predefined set of access controls to the destination object.",
+       "enum": [
+        "authenticatedRead",
+        "bucketOwnerFullControl",
+        "bucketOwnerRead",
+        "private",
+        "projectPrivate",
+        "publicRead"
+       ],
+       "enumDescriptions": [
+        "Object owner gets OWNER access, and allAuthenticatedUsers get READER access.",
+        "Object owner gets OWNER access, and project team owners get OWNER access.",
+        "Object owner gets OWNER access, and project team owners get READER access.",
+        "Object owner gets OWNER access.",
+        "Object owner gets OWNER access, and project team members get access according to their roles.",
+        "Object owner gets OWNER access, and allUsers get READER access."
+       ],
+       "location": "query"
+      },
+      "ifGenerationMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the destination object's current generation matches the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifGenerationNotMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the destination object's current generation does not match the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifMetagenerationMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the destination object's current metageneration matches the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifMetagenerationNotMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the destination object's current metageneration does not match the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifSourceGenerationMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the source object's generation matches the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifSourceGenerationNotMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the source object's generation does not match the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifSourceMetagenerationMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the source object's current metageneration matches the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifSourceMetagenerationNotMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the source object's current metageneration does not match the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "maxBytesRewrittenPerCall": {
+       "type": "string",
+       "description": "The maximum number of bytes that will be rewritten per rewrite request. Most callers shouldn't need to specify this parameter - it is primarily in place to support testing. If specified the value must be an integral multiple of 1 MiB (1048576). Also, this only applies to requests where the source and destination span locations and/or storage classes. Finally, this value must not change across rewrite calls else you'll get an error that the rewriteToken is invalid.",
+       "format": "int64",
+       "location": "query"
+      },
+      "projection": {
+       "type": "string",
+       "description": "Set of properties to return. Defaults to noAcl, unless the object resource specifies the acl property, when it defaults to full.",
+       "enum": [
+        "full",
+        "noAcl"
+       ],
+       "enumDescriptions": [
+        "Include all properties.",
+        "Omit the owner, acl property."
+       ],
+       "location": "query"
+      },
+      "rewriteToken": {
+       "type": "string",
+       "description": "Include this field (from the previous rewrite response) on each rewrite request after the first one, until the rewrite response 'done' flag is true. Calls that provide a rewriteToken can omit all other request fields, but if included those fields must match the values provided in the first rewrite request.",
+       "location": "query"
+      },
+      "sourceBucket": {
+       "type": "string",
+       "description": "Name of the bucket in which to find the source object.",
+       "required": true,
+       "location": "path"
+      },
+      "sourceGeneration": {
+       "type": "string",
+       "description": "If present, selects a specific revision of the source object (as opposed to the latest version, the default).",
+       "format": "int64",
+       "location": "query"
+      },
+      "sourceObject": {
+       "type": "string",
+       "description": "Name of the source object. For information about how to URL encode object names to be path safe, see Encoding URI Path Parts.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "sourceBucket",
+      "sourceObject",
+      "destinationBucket",
+      "destinationObject"
+     ],
+     "request": {
+      "$ref": "Object"
+     },
+     "response": {
+      "$ref": "RewriteResponse"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control",
+      "https://www.googleapis.com/auth/devstorage.read_write"
+     ]
+    },
+    "setIamPolicy": {
+     "id": "storage.objects.setIamPolicy",
+     "path": "b/{bucket}/o/{object}/iam",
+     "httpMethod": "PUT",
+     "description": "Updates an IAM policy for the specified object.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of the bucket in which the object resides.",
+       "required": true,
+       "location": "path"
+      },
+      "generation": {
+       "type": "string",
+       "description": "If present, selects a specific revision of this object (as opposed to the latest version, the default).",
+       "format": "int64",
+       "location": "query"
+      },
+      "object": {
+       "type": "string",
+       "description": "Name of the object. For information about how to URL encode object names to be path safe, see Encoding URI Path Parts.",
+       "required": true,
+       "location": "path"
+      }
+     },
+     "parameterOrder": [
+      "bucket",
+      "object"
+     ],
+     "request": {
+      "$ref": "Policy"
+     },
+     "response": {
+      "$ref": "Policy"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control",
+      "https://www.googleapis.com/auth/devstorage.read_write"
+     ]
+    },
+    "testIamPermissions": {
+     "id": "storage.objects.testIamPermissions",
+     "path": "b/{bucket}/o/{object}/iam/testPermissions",
+     "httpMethod": "GET",
+     "description": "Tests a set of permissions on the given object to see which, if any, are held by the caller.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of the bucket in which the object resides.",
+       "required": true,
+       "location": "path"
+      },
+      "generation": {
+       "type": "string",
+       "description": "If present, selects a specific revision of this object (as opposed to the latest version, the default).",
+       "format": "int64",
+       "location": "query"
+      },
+      "object": {
+       "type": "string",
+       "description": "Name of the object. For information about how to URL encode object names to be path safe, see Encoding URI Path Parts.",
+       "required": true,
+       "location": "path"
+      },
+      "permissions": {
+       "type": "string",
+       "description": "Permissions to test.",
+       "required": true,
+       "repeated": true,
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "bucket",
+      "object",
+      "permissions"
+     ],
+     "response": {
+      "$ref": "TestIamPermissionsResponse"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only",
+      "https://www.googleapis.com/auth/devstorage.full_control",
+      "https://www.googleapis.com/auth/devstorage.read_only",
+      "https://www.googleapis.com/auth/devstorage.read_write"
+     ]
+    },
+    "update": {
+     "id": "storage.objects.update",
+     "path": "b/{bucket}/o/{object}",
+     "httpMethod": "PUT",
+     "description": "Updates an object's metadata.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of the bucket in which the object resides.",
+       "required": true,
+       "location": "path"
+      },
+      "generation": {
+       "type": "string",
+       "description": "If present, selects a specific revision of this object (as opposed to the latest version, the default).",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifGenerationMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the object's current generation matches the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifGenerationNotMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the object's current generation does not match the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifMetagenerationMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the object's current metageneration matches the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "ifMetagenerationNotMatch": {
+       "type": "string",
+       "description": "Makes the operation conditional on whether the object's current metageneration does not match the given value.",
+       "format": "int64",
+       "location": "query"
+      },
+      "object": {
+       "type": "string",
+       "description": "Name of the object. For information about how to URL encode object names to be path safe, see Encoding URI Path Parts.",
+       "required": true,
+       "location": "path"
+      },
+      "predefinedAcl": {
+       "type": "string",
+       "description": "Apply a predefined set of access controls to this object.",
+       "enum": [
+        "authenticatedRead",
+        "bucketOwnerFullControl",
+        "bucketOwnerRead",
+        "private",
+        "projectPrivate",
+        "publicRead"
+       ],
+       "enumDescriptions": [
+        "Object owner gets OWNER access, and allAuthenticatedUsers get READER access.",
+        "Object owner gets OWNER access, and project team owners get OWNER access.",
+        "Object owner gets OWNER access, and project team owners get READER access.",
+        "Object owner gets OWNER access.",
+        "Object owner gets OWNER access, and project team members get access according to their roles.",
+        "Object owner gets OWNER access, and allUsers get READER access."
+       ],
+       "location": "query"
+      },
+      "projection": {
+       "type": "string",
+       "description": "Set of properties to return. Defaults to full.",
+       "enum": [
+        "full",
+        "noAcl"
+       ],
+       "enumDescriptions": [
+        "Include all properties.",
+        "Omit the owner, acl property."
+       ],
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "bucket",
+      "object"
+     ],
+     "request": {
+      "$ref": "Object"
+     },
+     "response": {
+      "$ref": "Object"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/devstorage.full_control"
+     ],
+     "supportsMediaDownload": true
+    },
+    "watchAll": {
+     "id": "storage.objects.watchAll",
+     "path": "b/{bucket}/o/watch",
+     "httpMethod": "POST",
+     "description": "Watch for changes on all objects in a bucket.",
+     "parameters": {
+      "bucket": {
+       "type": "string",
+       "description": "Name of the bucket in which to look for objects.",
+       "required": true,
+       "location": "path"
+      },
+      "delimiter": {
+       "type": "string",
+       "description": "Returns results in a directory-like mode. items will contain only objects whose names, aside from the prefix, do not contain delimiter. Objects whose names, aside from the prefix, contain delimiter will have their name, truncated after the delimiter, returned in prefixes. Duplicate prefixes are omitted.",
+       "location": "query"
+      },
+      "maxResults": {
+       "type": "integer",
+       "description": "Maximum number of items plus prefixes to return. As duplicate prefixes are omitted, fewer total results may be returned than requested. The default value of this parameter is 1,000 items.",
+       "format": "uint32",
+       "minimum": "0",
+       "location": "query"
+      },
+      "pageToken": {
+       "type": "string",
+       "description": "A previously-returned page token representing part of the larger set of results to view.",
+       "location": "query"
+      },
+      "prefix": {
+       "type": "string",
+       "description": "Filter results to objects whose names begin with this prefix.",
+       "location": "query"
+      },
+      "projection": {
+       "type": "string",
+       "description": "Set of properties to return. Defaults to noAcl.",
+       "enum": [
+        "full",
+        "noAcl"
+       ],
+       "enumDescriptions": [
+        "Include all properties.",
+        "Omit the owner, acl property."
+       ],
+       "location": "query"
+      },
+      "versions": {
+       "type": "boolean",
+       "description": "If true, lists all versions of an object as distinct results. The default is false. For more information, see Object Versioning.",
+       "location": "query"
+      }
+     },
+     "parameterOrder": [
+      "bucket"
+     ],
+     "request": {
+      "$ref": "Channel",
+      "parameterName": "resource"
+     },
+     "response": {
+      "$ref": "Channel"
+     },
+     "scopes": [
+      "https://www.googleapis.com/auth/cloud-platform",
+      "https://www.googleapis.com/auth/cloud-platform.read-only",
+      "https://www.googleapis.com/auth/devstorage.full_control",
+      "https://www.googleapis.com/auth/devstorage.read_only",
+      "https://www.googleapis.com/auth/devstorage.read_write"
+     ],
+     "supportsSubscription": true
+    }
+   }
+  }
+ }
+}
diff --git a/samples/storage_sample/storage_v1/__init__.py b/samples/storage_sample/storage_v1/__init__.py
new file mode 100644
index 0000000..2816da8
--- /dev/null
+++ b/samples/storage_sample/storage_v1/__init__.py
@@ -0,0 +1,5 @@
+"""Package marker file."""
+
+import pkgutil
+
+__path__ = pkgutil.extend_path(__path__, __name__)
diff --git a/samples/storage_sample/storage_v1/storage_v1.py b/samples/storage_sample/storage_v1/storage_v1.py
new file mode 100644
index 0000000..d7cff48
--- /dev/null
+++ b/samples/storage_sample/storage_v1/storage_v1.py
@@ -0,0 +1,3578 @@
+#!/usr/bin/env python
+"""CLI for storage, version v1."""
+# NOTE: This file is autogenerated and should not be edited by hand.
+
+import code
+import os
+import platform
+import sys
+
+from apitools.base.protorpclite import message_types
+from apitools.base.protorpclite import messages
+
+from google.apputils import appcommands
+import gflags as flags
+
+import apitools.base.py as apitools_base
+from apitools.base.py import cli as apitools_base_cli
+import storage_v1_client as client_lib
+import storage_v1_messages as messages
+
+
+def _DeclareStorageFlags():
+  """Declare global flags in an idempotent way."""
+  if 'api_endpoint' in flags.FLAGS:
+    return
+  flags.DEFINE_string(
+      'api_endpoint',
+      u'https://www.googleapis.com/storage/v1/',
+      'URL of the API endpoint to use.',
+      short_name='storage_url')
+  flags.DEFINE_string(
+      'history_file',
+      u'~/.storage.v1.history',
+      'File with interactive shell history.')
+  flags.DEFINE_multistring(
+      'add_header', [],
+      'Additional http headers (as key=value strings). '
+      'Can be specified multiple times.')
+  flags.DEFINE_string(
+      'service_account_json_keyfile', '',
+      'Filename for a JSON service account key downloaded'
+      ' from the Developer Console.')
+  flags.DEFINE_enum(
+      'alt',
+      u'json',
+      [u'json'],
+      u'Data format for the response.')
+  flags.DEFINE_string(
+      'fields',
+      None,
+      u'Selector specifying which fields to include in a partial response.')
+  flags.DEFINE_string(
+      'key',
+      None,
+      u'API key. Your API key identifies your project and provides you with '
+      u'API access, quota, and reports. Required unless you provide an OAuth '
+      u'2.0 token.')
+  flags.DEFINE_string(
+      'oauth_token',
+      None,
+      u'OAuth 2.0 token for the current user.')
+  flags.DEFINE_boolean(
+      'prettyPrint',
+      'True',
+      u'Returns response with indentations and line breaks.')
+  flags.DEFINE_string(
+      'quotaUser',
+      None,
+      u'Available to use for quota purposes for server-side applications. Can'
+      u' be any arbitrary string assigned to a user, but should not exceed 40'
+      u' characters. Overrides userIp if both are provided.')
+  flags.DEFINE_string(
+      'trace',
+      None,
+      'A tracing token of the form "token:<tokenid>" to include in api '
+      'requests.')
+  flags.DEFINE_string(
+      'userIp',
+      None,
+      u'IP address of the site where the request originates. Use this if you '
+      u'want to enforce per-user limits.')
+
+
+FLAGS = flags.FLAGS
+apitools_base_cli.DeclareBaseFlags()
+_DeclareStorageFlags()
+
+
+def GetGlobalParamsFromFlags():
+  """Return a StandardQueryParameters based on flags."""
+  result = messages.StandardQueryParameters()
+  if FLAGS['alt'].present:
+    result.alt = messages.StandardQueryParameters.AltValueValuesEnum(FLAGS.alt)
+  if FLAGS['fields'].present:
+    result.fields = FLAGS.fields.decode('utf8')
+  if FLAGS['key'].present:
+    result.key = FLAGS.key.decode('utf8')
+  if FLAGS['oauth_token'].present:
+    result.oauth_token = FLAGS.oauth_token.decode('utf8')
+  if FLAGS['prettyPrint'].present:
+    result.prettyPrint = FLAGS.prettyPrint
+  if FLAGS['quotaUser'].present:
+    result.quotaUser = FLAGS.quotaUser.decode('utf8')
+  if FLAGS['trace'].present:
+    result.trace = FLAGS.trace.decode('utf8')
+  if FLAGS['userIp'].present:
+    result.userIp = FLAGS.userIp.decode('utf8')
+  return result
+
+
+def GetClientFromFlags():
+  """Return a client object, configured from flags."""
+  log_request = FLAGS.log_request or FLAGS.log_request_response
+  log_response = FLAGS.log_response or FLAGS.log_request_response
+  api_endpoint = apitools_base.NormalizeApiEndpoint(FLAGS.api_endpoint)
+  additional_http_headers = dict(x.split('=', 1) for x in FLAGS.add_header)
+  credentials_args = {
+      'service_account_json_keyfile': os.path.expanduser(FLAGS.service_account_json_keyfile)
+  }
+  try:
+    client = client_lib.StorageV1(
+        api_endpoint, log_request=log_request,
+        log_response=log_response,
+        credentials_args=credentials_args,
+        additional_http_headers=additional_http_headers)
+  except apitools_base.CredentialsError as e:
+    print 'Error creating credentials: %s' % e
+    sys.exit(1)
+  return client
+
+
+class PyShell(appcommands.Cmd):
+
+  def Run(self, _):
+    """Run an interactive python shell with the client."""
+    client = GetClientFromFlags()
+    params = GetGlobalParamsFromFlags()
+    for field in params.all_fields():
+      value = params.get_assigned_value(field.name)
+      if value != field.default:
+        client.AddGlobalParam(field.name, value)
+    banner = """
+           == storage interactive console ==
+                 client: a storage client
+          apitools_base: base apitools module
+         messages: the generated messages module
+    """
+    local_vars = {
+        'apitools_base': apitools_base,
+        'client': client,
+        'client_lib': client_lib,
+        'messages': messages,
+    }
+    if platform.system() == 'Linux':
+      console = apitools_base_cli.ConsoleWithReadline(
+          local_vars, histfile=FLAGS.history_file)
+    else:
+      console = code.InteractiveConsole(local_vars)
+    try:
+      console.interact(banner)
+    except SystemExit as e:
+      return e.code
+
+
+class BucketAccessControlsDelete(apitools_base_cli.NewCmd):
+  """Command wrapping bucketAccessControls.Delete."""
+
+  usage = """bucketAccessControls_delete <bucket> <entity>"""
+
+  def __init__(self, name, fv):
+    super(BucketAccessControlsDelete, self).__init__(name, fv)
+
+  def RunWithArgs(self, bucket, entity):
+    """Permanently deletes the ACL entry for the specified entity on the
+    specified bucket.
+
+    Args:
+      bucket: Name of a bucket.
+      entity: The entity holding the permission. Can be user-userId, user-
+        emailAddress, group-groupId, group-emailAddress, allUsers, or
+        allAuthenticatedUsers.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageBucketAccessControlsDeleteRequest(
+        bucket=bucket.decode('utf8'),
+        entity=entity.decode('utf8'),
+        )
+    result = client.bucketAccessControls.Delete(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class BucketAccessControlsGet(apitools_base_cli.NewCmd):
+  """Command wrapping bucketAccessControls.Get."""
+
+  usage = """bucketAccessControls_get <bucket> <entity>"""
+
+  def __init__(self, name, fv):
+    super(BucketAccessControlsGet, self).__init__(name, fv)
+
+  def RunWithArgs(self, bucket, entity):
+    """Returns the ACL entry for the specified entity on the specified bucket.
+
+    Args:
+      bucket: Name of a bucket.
+      entity: The entity holding the permission. Can be user-userId, user-
+        emailAddress, group-groupId, group-emailAddress, allUsers, or
+        allAuthenticatedUsers.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageBucketAccessControlsGetRequest(
+        bucket=bucket.decode('utf8'),
+        entity=entity.decode('utf8'),
+        )
+    result = client.bucketAccessControls.Get(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class BucketAccessControlsInsert(apitools_base_cli.NewCmd):
+  """Command wrapping bucketAccessControls.Insert."""
+
+  usage = """bucketAccessControls_insert <bucket>"""
+
+  def __init__(self, name, fv):
+    super(BucketAccessControlsInsert, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'domain',
+        None,
+        u'The domain associated with the entity, if any.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'email',
+        None,
+        u'The email address associated with the entity, if any.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'entity',
+        None,
+        u'The entity holding the permission, in one of the following forms:  '
+        u'- user-userId  - user-email  - group-groupId  - group-email  - '
+        u'domain-domain  - project-team-projectId  - allUsers  - '
+        u'allAuthenticatedUsers Examples:  - The user liz@example.com would '
+        u'be user-liz@example.com.  - The group example@googlegroups.com '
+        u'would be group-example@googlegroups.com.  - To refer to all members'
+        u' of the Google Apps for Business domain example.com, the entity '
+        u'would be domain-example.com.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'entityId',
+        None,
+        u'The ID for the entity, if any.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'etag',
+        None,
+        u'HTTP 1.1 Entity tag for the access-control entry.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'id',
+        None,
+        u'The ID of the access-control entry.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'kind',
+        u'storage#bucketAccessControl',
+        u'The kind of item this is. For bucket access control entries, this '
+        u'is always storage#bucketAccessControl.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'projectTeam',
+        None,
+        u'The project team associated with the entity, if any.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'role',
+        None,
+        u'The access permission for the entity. Can be READER, WRITER, or '
+        u'OWNER.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'selfLink',
+        None,
+        u'The link to this access-control entry.',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket):
+    """Creates a new ACL entry on the specified bucket.
+
+    Args:
+      bucket: The name of the bucket.
+
+    Flags:
+      domain: The domain associated with the entity, if any.
+      email: The email address associated with the entity, if any.
+      entity: The entity holding the permission, in one of the following
+        forms:  - user-userId  - user-email  - group-groupId  - group-email  -
+        domain-domain  - project-team-projectId  - allUsers  -
+        allAuthenticatedUsers Examples:  - The user liz@example.com would be
+        user-liz@example.com.  - The group example@googlegroups.com would be
+        group-example@googlegroups.com.  - To refer to all members of the
+        Google Apps for Business domain example.com, the entity would be
+        domain-example.com.
+      entityId: The ID for the entity, if any.
+      etag: HTTP 1.1 Entity tag for the access-control entry.
+      id: The ID of the access-control entry.
+      kind: The kind of item this is. For bucket access control entries, this
+        is always storage#bucketAccessControl.
+      projectTeam: The project team associated with the entity, if any.
+      role: The access permission for the entity. Can be READER, WRITER, or
+        OWNER.
+      selfLink: The link to this access-control entry.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.BucketAccessControl(
+        bucket=bucket.decode('utf8'),
+        )
+    if FLAGS['domain'].present:
+      request.domain = FLAGS.domain.decode('utf8')
+    if FLAGS['email'].present:
+      request.email = FLAGS.email.decode('utf8')
+    if FLAGS['entity'].present:
+      request.entity = FLAGS.entity.decode('utf8')
+    if FLAGS['entityId'].present:
+      request.entityId = FLAGS.entityId.decode('utf8')
+    if FLAGS['etag'].present:
+      request.etag = FLAGS.etag.decode('utf8')
+    if FLAGS['id'].present:
+      request.id = FLAGS.id.decode('utf8')
+    if FLAGS['kind'].present:
+      request.kind = FLAGS.kind.decode('utf8')
+    if FLAGS['projectTeam'].present:
+      request.projectTeam = apitools_base.JsonToMessage(messages.BucketAccessControl.ProjectTeamValue, FLAGS.projectTeam)
+    if FLAGS['role'].present:
+      request.role = FLAGS.role.decode('utf8')
+    if FLAGS['selfLink'].present:
+      request.selfLink = FLAGS.selfLink.decode('utf8')
+    result = client.bucketAccessControls.Insert(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class BucketAccessControlsList(apitools_base_cli.NewCmd):
+  """Command wrapping bucketAccessControls.List."""
+
+  usage = """bucketAccessControls_list <bucket>"""
+
+  def __init__(self, name, fv):
+    super(BucketAccessControlsList, self).__init__(name, fv)
+
+  def RunWithArgs(self, bucket):
+    """Retrieves ACL entries on the specified bucket.
+
+    Args:
+      bucket: Name of a bucket.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageBucketAccessControlsListRequest(
+        bucket=bucket.decode('utf8'),
+        )
+    result = client.bucketAccessControls.List(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class BucketAccessControlsPatch(apitools_base_cli.NewCmd):
+  """Command wrapping bucketAccessControls.Patch."""
+
+  usage = """bucketAccessControls_patch <bucket> <entity>"""
+
+  def __init__(self, name, fv):
+    super(BucketAccessControlsPatch, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'domain',
+        None,
+        u'The domain associated with the entity, if any.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'email',
+        None,
+        u'The email address associated with the entity, if any.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'entityId',
+        None,
+        u'The ID for the entity, if any.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'etag',
+        None,
+        u'HTTP 1.1 Entity tag for the access-control entry.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'id',
+        None,
+        u'The ID of the access-control entry.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'kind',
+        u'storage#bucketAccessControl',
+        u'The kind of item this is. For bucket access control entries, this '
+        u'is always storage#bucketAccessControl.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'projectTeam',
+        None,
+        u'The project team associated with the entity, if any.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'role',
+        None,
+        u'The access permission for the entity. Can be READER, WRITER, or '
+        u'OWNER.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'selfLink',
+        None,
+        u'The link to this access-control entry.',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket, entity):
+    """Updates an ACL entry on the specified bucket. This method supports
+    patch semantics.
+
+    Args:
+      bucket: The name of the bucket.
+      entity: The entity holding the permission, in one of the following
+        forms:  - user-userId  - user-email  - group-groupId  - group-email  -
+        domain-domain  - project-team-projectId  - allUsers  -
+        allAuthenticatedUsers Examples:  - The user liz@example.com would be
+        user-liz@example.com.  - The group example@googlegroups.com would be
+        group-example@googlegroups.com.  - To refer to all members of the
+        Google Apps for Business domain example.com, the entity would be
+        domain-example.com.
+
+    Flags:
+      domain: The domain associated with the entity, if any.
+      email: The email address associated with the entity, if any.
+      entityId: The ID for the entity, if any.
+      etag: HTTP 1.1 Entity tag for the access-control entry.
+      id: The ID of the access-control entry.
+      kind: The kind of item this is. For bucket access control entries, this
+        is always storage#bucketAccessControl.
+      projectTeam: The project team associated with the entity, if any.
+      role: The access permission for the entity. Can be READER, WRITER, or
+        OWNER.
+      selfLink: The link to this access-control entry.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.BucketAccessControl(
+        bucket=bucket.decode('utf8'),
+        entity=entity.decode('utf8'),
+        )
+    if FLAGS['domain'].present:
+      request.domain = FLAGS.domain.decode('utf8')
+    if FLAGS['email'].present:
+      request.email = FLAGS.email.decode('utf8')
+    if FLAGS['entityId'].present:
+      request.entityId = FLAGS.entityId.decode('utf8')
+    if FLAGS['etag'].present:
+      request.etag = FLAGS.etag.decode('utf8')
+    if FLAGS['id'].present:
+      request.id = FLAGS.id.decode('utf8')
+    if FLAGS['kind'].present:
+      request.kind = FLAGS.kind.decode('utf8')
+    if FLAGS['projectTeam'].present:
+      request.projectTeam = apitools_base.JsonToMessage(messages.BucketAccessControl.ProjectTeamValue, FLAGS.projectTeam)
+    if FLAGS['role'].present:
+      request.role = FLAGS.role.decode('utf8')
+    if FLAGS['selfLink'].present:
+      request.selfLink = FLAGS.selfLink.decode('utf8')
+    result = client.bucketAccessControls.Patch(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class BucketAccessControlsUpdate(apitools_base_cli.NewCmd):
+  """Command wrapping bucketAccessControls.Update."""
+
+  usage = """bucketAccessControls_update <bucket> <entity>"""
+
+  def __init__(self, name, fv):
+    super(BucketAccessControlsUpdate, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'domain',
+        None,
+        u'The domain associated with the entity, if any.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'email',
+        None,
+        u'The email address associated with the entity, if any.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'entityId',
+        None,
+        u'The ID for the entity, if any.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'etag',
+        None,
+        u'HTTP 1.1 Entity tag for the access-control entry.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'id',
+        None,
+        u'The ID of the access-control entry.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'kind',
+        u'storage#bucketAccessControl',
+        u'The kind of item this is. For bucket access control entries, this '
+        u'is always storage#bucketAccessControl.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'projectTeam',
+        None,
+        u'The project team associated with the entity, if any.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'role',
+        None,
+        u'The access permission for the entity. Can be READER, WRITER, or '
+        u'OWNER.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'selfLink',
+        None,
+        u'The link to this access-control entry.',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket, entity):
+    """Updates an ACL entry on the specified bucket.
+
+    Args:
+      bucket: The name of the bucket.
+      entity: The entity holding the permission, in one of the following
+        forms:  - user-userId  - user-email  - group-groupId  - group-email  -
+        domain-domain  - project-team-projectId  - allUsers  -
+        allAuthenticatedUsers Examples:  - The user liz@example.com would be
+        user-liz@example.com.  - The group example@googlegroups.com would be
+        group-example@googlegroups.com.  - To refer to all members of the
+        Google Apps for Business domain example.com, the entity would be
+        domain-example.com.
+
+    Flags:
+      domain: The domain associated with the entity, if any.
+      email: The email address associated with the entity, if any.
+      entityId: The ID for the entity, if any.
+      etag: HTTP 1.1 Entity tag for the access-control entry.
+      id: The ID of the access-control entry.
+      kind: The kind of item this is. For bucket access control entries, this
+        is always storage#bucketAccessControl.
+      projectTeam: The project team associated with the entity, if any.
+      role: The access permission for the entity. Can be READER, WRITER, or
+        OWNER.
+      selfLink: The link to this access-control entry.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.BucketAccessControl(
+        bucket=bucket.decode('utf8'),
+        entity=entity.decode('utf8'),
+        )
+    if FLAGS['domain'].present:
+      request.domain = FLAGS.domain.decode('utf8')
+    if FLAGS['email'].present:
+      request.email = FLAGS.email.decode('utf8')
+    if FLAGS['entityId'].present:
+      request.entityId = FLAGS.entityId.decode('utf8')
+    if FLAGS['etag'].present:
+      request.etag = FLAGS.etag.decode('utf8')
+    if FLAGS['id'].present:
+      request.id = FLAGS.id.decode('utf8')
+    if FLAGS['kind'].present:
+      request.kind = FLAGS.kind.decode('utf8')
+    if FLAGS['projectTeam'].present:
+      request.projectTeam = apitools_base.JsonToMessage(messages.BucketAccessControl.ProjectTeamValue, FLAGS.projectTeam)
+    if FLAGS['role'].present:
+      request.role = FLAGS.role.decode('utf8')
+    if FLAGS['selfLink'].present:
+      request.selfLink = FLAGS.selfLink.decode('utf8')
+    result = client.bucketAccessControls.Update(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class BucketsDelete(apitools_base_cli.NewCmd):
+  """Command wrapping buckets.Delete."""
+
+  usage = """buckets_delete <bucket>"""
+
+  def __init__(self, name, fv):
+    super(BucketsDelete, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'ifMetagenerationMatch',
+        None,
+        u'If set, only deletes the bucket if its metageneration matches this '
+        u'value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifMetagenerationNotMatch',
+        None,
+        u'If set, only deletes the bucket if its metageneration does not '
+        u'match this value.',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket):
+    """Permanently deletes an empty bucket.
+
+    Args:
+      bucket: Name of a bucket.
+
+    Flags:
+      ifMetagenerationMatch: If set, only deletes the bucket if its
+        metageneration matches this value.
+      ifMetagenerationNotMatch: If set, only deletes the bucket if its
+        metageneration does not match this value.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageBucketsDeleteRequest(
+        bucket=bucket.decode('utf8'),
+        )
+    if FLAGS['ifMetagenerationMatch'].present:
+      request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
+    if FLAGS['ifMetagenerationNotMatch'].present:
+      request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
+    result = client.buckets.Delete(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class BucketsGet(apitools_base_cli.NewCmd):
+  """Command wrapping buckets.Get."""
+
+  usage = """buckets_get <bucket>"""
+
+  def __init__(self, name, fv):
+    super(BucketsGet, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'ifMetagenerationMatch',
+        None,
+        u'Makes the return of the bucket metadata conditional on whether the '
+        u"bucket's current metageneration matches the given value.",
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifMetagenerationNotMatch',
+        None,
+        u'Makes the return of the bucket metadata conditional on whether the '
+        u"bucket's current metageneration does not match the given value.",
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'projection',
+        u'full',
+        [u'full', u'noAcl'],
+        u'Set of properties to return. Defaults to noAcl.',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket):
+    """Returns metadata for the specified bucket.
+
+    Args:
+      bucket: Name of a bucket.
+
+    Flags:
+      ifMetagenerationMatch: Makes the return of the bucket metadata
+        conditional on whether the bucket's current metageneration matches the
+        given value.
+      ifMetagenerationNotMatch: Makes the return of the bucket metadata
+        conditional on whether the bucket's current metageneration does not
+        match the given value.
+      projection: Set of properties to return. Defaults to noAcl.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageBucketsGetRequest(
+        bucket=bucket.decode('utf8'),
+        )
+    if FLAGS['ifMetagenerationMatch'].present:
+      request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
+    if FLAGS['ifMetagenerationNotMatch'].present:
+      request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
+    if FLAGS['projection'].present:
+      request.projection = messages.StorageBucketsGetRequest.ProjectionValueValuesEnum(FLAGS.projection)
+    result = client.buckets.Get(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class BucketsGetIamPolicy(apitools_base_cli.NewCmd):
+  """Command wrapping buckets.GetIamPolicy."""
+
+  usage = """buckets_getIamPolicy <bucket>"""
+
+  def __init__(self, name, fv):
+    super(BucketsGetIamPolicy, self).__init__(name, fv)
+
+  def RunWithArgs(self, bucket):
+    """Returns an IAM policy for the specified bucket.
+
+    Args:
+      bucket: Name of a bucket.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageBucketsGetIamPolicyRequest(
+        bucket=bucket.decode('utf8'),
+        )
+    result = client.buckets.GetIamPolicy(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class BucketsInsert(apitools_base_cli.NewCmd):
+  """Command wrapping buckets.Insert."""
+
+  usage = """buckets_insert <project>"""
+
+  def __init__(self, name, fv):
+    super(BucketsInsert, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'bucket',
+        None,
+        u'A Bucket resource to be passed as the request body.',
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'predefinedAcl',
+        u'authenticatedRead',
+        [u'authenticatedRead', u'private', u'projectPrivate', u'publicRead', u'publicReadWrite'],
+        u'Apply a predefined set of access controls to this bucket.',
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'predefinedDefaultObjectAcl',
+        u'authenticatedRead',
+        [u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
+        u'Apply a predefined set of default object access controls to this '
+        u'bucket.',
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'projection',
+        u'full',
+        [u'full', u'noAcl'],
+        u'Set of properties to return. Defaults to noAcl, unless the bucket '
+        u'resource specifies acl or defaultObjectAcl properties, when it '
+        u'defaults to full.',
+        flag_values=fv)
+
+  def RunWithArgs(self, project):
+    """Creates a new bucket.
+
+    Args:
+      project: A valid API project identifier.
+
+    Flags:
+      bucket: A Bucket resource to be passed as the request body.
+      predefinedAcl: Apply a predefined set of access controls to this bucket.
+      predefinedDefaultObjectAcl: Apply a predefined set of default object
+        access controls to this bucket.
+      projection: Set of properties to return. Defaults to noAcl, unless the
+        bucket resource specifies acl or defaultObjectAcl properties, when it
+        defaults to full.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageBucketsInsertRequest(
+        project=project.decode('utf8'),
+        )
+    if FLAGS['bucket'].present:
+      request.bucket = apitools_base.JsonToMessage(messages.Bucket, FLAGS.bucket)
+    if FLAGS['predefinedAcl'].present:
+      request.predefinedAcl = messages.StorageBucketsInsertRequest.PredefinedAclValueValuesEnum(FLAGS.predefinedAcl)
+    if FLAGS['predefinedDefaultObjectAcl'].present:
+      request.predefinedDefaultObjectAcl = messages.StorageBucketsInsertRequest.PredefinedDefaultObjectAclValueValuesEnum(FLAGS.predefinedDefaultObjectAcl)
+    if FLAGS['projection'].present:
+      request.projection = messages.StorageBucketsInsertRequest.ProjectionValueValuesEnum(FLAGS.projection)
+    result = client.buckets.Insert(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class BucketsList(apitools_base_cli.NewCmd):
+  """Command wrapping buckets.List."""
+
+  usage = """buckets_list <project>"""
+
+  def __init__(self, name, fv):
+    super(BucketsList, self).__init__(name, fv)
+    flags.DEFINE_integer(
+        'maxResults',
+        None,
+        u'Maximum number of buckets to return.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'pageToken',
+        None,
+        u'A previously-returned page token representing part of the larger '
+        u'set of results to view.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'prefix',
+        None,
+        u'Filter results to buckets whose names begin with this prefix.',
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'projection',
+        u'full',
+        [u'full', u'noAcl'],
+        u'Set of properties to return. Defaults to noAcl.',
+        flag_values=fv)
+
+  def RunWithArgs(self, project):
+    """Retrieves a list of buckets for a given project.
+
+    Args:
+      project: A valid API project identifier.
+
+    Flags:
+      maxResults: Maximum number of buckets to return.
+      pageToken: A previously-returned page token representing part of the
+        larger set of results to view.
+      prefix: Filter results to buckets whose names begin with this prefix.
+      projection: Set of properties to return. Defaults to noAcl.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageBucketsListRequest(
+        project=project.decode('utf8'),
+        )
+    if FLAGS['maxResults'].present:
+      request.maxResults = FLAGS.maxResults
+    if FLAGS['pageToken'].present:
+      request.pageToken = FLAGS.pageToken.decode('utf8')
+    if FLAGS['prefix'].present:
+      request.prefix = FLAGS.prefix.decode('utf8')
+    if FLAGS['projection'].present:
+      request.projection = messages.StorageBucketsListRequest.ProjectionValueValuesEnum(FLAGS.projection)
+    result = client.buckets.List(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class BucketsPatch(apitools_base_cli.NewCmd):
+  """Command wrapping buckets.Patch."""
+
+  usage = """buckets_patch <bucket>"""
+
+  def __init__(self, name, fv):
+    super(BucketsPatch, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'bucketResource',
+        None,
+        u'A Bucket resource to be passed as the request body.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifMetagenerationMatch',
+        None,
+        u'Makes the return of the bucket metadata conditional on whether the '
+        u"bucket's current metageneration matches the given value.",
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifMetagenerationNotMatch',
+        None,
+        u'Makes the return of the bucket metadata conditional on whether the '
+        u"bucket's current metageneration does not match the given value.",
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'predefinedAcl',
+        u'authenticatedRead',
+        [u'authenticatedRead', u'private', u'projectPrivate', u'publicRead', u'publicReadWrite'],
+        u'Apply a predefined set of access controls to this bucket.',
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'predefinedDefaultObjectAcl',
+        u'authenticatedRead',
+        [u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
+        u'Apply a predefined set of default object access controls to this '
+        u'bucket.',
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'projection',
+        u'full',
+        [u'full', u'noAcl'],
+        u'Set of properties to return. Defaults to full.',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket):
+    """Updates a bucket. This method supports patch semantics.
+
+    Args:
+      bucket: Name of a bucket.
+
+    Flags:
+      bucketResource: A Bucket resource to be passed as the request body.
+      ifMetagenerationMatch: Makes the return of the bucket metadata
+        conditional on whether the bucket's current metageneration matches the
+        given value.
+      ifMetagenerationNotMatch: Makes the return of the bucket metadata
+        conditional on whether the bucket's current metageneration does not
+        match the given value.
+      predefinedAcl: Apply a predefined set of access controls to this bucket.
+      predefinedDefaultObjectAcl: Apply a predefined set of default object
+        access controls to this bucket.
+      projection: Set of properties to return. Defaults to full.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageBucketsPatchRequest(
+        bucket=bucket.decode('utf8'),
+        )
+    if FLAGS['bucketResource'].present:
+      request.bucketResource = apitools_base.JsonToMessage(messages.Bucket, FLAGS.bucketResource)
+    if FLAGS['ifMetagenerationMatch'].present:
+      request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
+    if FLAGS['ifMetagenerationNotMatch'].present:
+      request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
+    if FLAGS['predefinedAcl'].present:
+      request.predefinedAcl = messages.StorageBucketsPatchRequest.PredefinedAclValueValuesEnum(FLAGS.predefinedAcl)
+    if FLAGS['predefinedDefaultObjectAcl'].present:
+      request.predefinedDefaultObjectAcl = messages.StorageBucketsPatchRequest.PredefinedDefaultObjectAclValueValuesEnum(FLAGS.predefinedDefaultObjectAcl)
+    if FLAGS['projection'].present:
+      request.projection = messages.StorageBucketsPatchRequest.ProjectionValueValuesEnum(FLAGS.projection)
+    result = client.buckets.Patch(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class BucketsSetIamPolicy(apitools_base_cli.NewCmd):
+  """Command wrapping buckets.SetIamPolicy."""
+
+  usage = """buckets_setIamPolicy <bucket>"""
+
+  def __init__(self, name, fv):
+    super(BucketsSetIamPolicy, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'policy',
+        None,
+        u'A Policy resource to be passed as the request body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket):
+    """Updates an IAM policy for the specified bucket.
+
+    Args:
+      bucket: Name of a bucket.
+
+    Flags:
+      policy: A Policy resource to be passed as the request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageBucketsSetIamPolicyRequest(
+        bucket=bucket.decode('utf8'),
+        )
+    if FLAGS['policy'].present:
+      request.policy = apitools_base.JsonToMessage(messages.Policy, FLAGS.policy)
+    result = client.buckets.SetIamPolicy(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class BucketsTestIamPermissions(apitools_base_cli.NewCmd):
+  """Command wrapping buckets.TestIamPermissions."""
+
+  usage = """buckets_testIamPermissions <bucket> <permissions>"""
+
+  def __init__(self, name, fv):
+    super(BucketsTestIamPermissions, self).__init__(name, fv)
+
+  def RunWithArgs(self, bucket, permissions):
+    """Tests a set of permissions on the given bucket to see which, if any,
+    are held by the caller.
+
+    Args:
+      bucket: Name of a bucket.
+      permissions: Permissions to test.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageBucketsTestIamPermissionsRequest(
+        bucket=bucket.decode('utf8'),
+        permissions=permissions.decode('utf8'),
+        )
+    result = client.buckets.TestIamPermissions(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class BucketsUpdate(apitools_base_cli.NewCmd):
+  """Command wrapping buckets.Update."""
+
+  usage = """buckets_update <bucket>"""
+
+  def __init__(self, name, fv):
+    super(BucketsUpdate, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'bucketResource',
+        None,
+        u'A Bucket resource to be passed as the request body.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifMetagenerationMatch',
+        None,
+        u'Makes the return of the bucket metadata conditional on whether the '
+        u"bucket's current metageneration matches the given value.",
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifMetagenerationNotMatch',
+        None,
+        u'Makes the return of the bucket metadata conditional on whether the '
+        u"bucket's current metageneration does not match the given value.",
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'predefinedAcl',
+        u'authenticatedRead',
+        [u'authenticatedRead', u'private', u'projectPrivate', u'publicRead', u'publicReadWrite'],
+        u'Apply a predefined set of access controls to this bucket.',
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'predefinedDefaultObjectAcl',
+        u'authenticatedRead',
+        [u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
+        u'Apply a predefined set of default object access controls to this '
+        u'bucket.',
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'projection',
+        u'full',
+        [u'full', u'noAcl'],
+        u'Set of properties to return. Defaults to full.',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket):
+    """Updates a bucket.
+
+    Args:
+      bucket: Name of a bucket.
+
+    Flags:
+      bucketResource: A Bucket resource to be passed as the request body.
+      ifMetagenerationMatch: Makes the return of the bucket metadata
+        conditional on whether the bucket's current metageneration matches the
+        given value.
+      ifMetagenerationNotMatch: Makes the return of the bucket metadata
+        conditional on whether the bucket's current metageneration does not
+        match the given value.
+      predefinedAcl: Apply a predefined set of access controls to this bucket.
+      predefinedDefaultObjectAcl: Apply a predefined set of default object
+        access controls to this bucket.
+      projection: Set of properties to return. Defaults to full.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageBucketsUpdateRequest(
+        bucket=bucket.decode('utf8'),
+        )
+    if FLAGS['bucketResource'].present:
+      request.bucketResource = apitools_base.JsonToMessage(messages.Bucket, FLAGS.bucketResource)
+    if FLAGS['ifMetagenerationMatch'].present:
+      request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
+    if FLAGS['ifMetagenerationNotMatch'].present:
+      request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
+    if FLAGS['predefinedAcl'].present:
+      request.predefinedAcl = messages.StorageBucketsUpdateRequest.PredefinedAclValueValuesEnum(FLAGS.predefinedAcl)
+    if FLAGS['predefinedDefaultObjectAcl'].present:
+      request.predefinedDefaultObjectAcl = messages.StorageBucketsUpdateRequest.PredefinedDefaultObjectAclValueValuesEnum(FLAGS.predefinedDefaultObjectAcl)
+    if FLAGS['projection'].present:
+      request.projection = messages.StorageBucketsUpdateRequest.ProjectionValueValuesEnum(FLAGS.projection)
+    result = client.buckets.Update(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ChannelsStop(apitools_base_cli.NewCmd):
+  """Command wrapping channels.Stop."""
+
+  usage = """channels_stop"""
+
+  def __init__(self, name, fv):
+    super(ChannelsStop, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'address',
+        None,
+        u'The address where notifications are delivered for this channel.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'expiration',
+        None,
+        u'Date and time of notification channel expiration, expressed as a '
+        u'Unix timestamp, in milliseconds. Optional.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'id',
+        None,
+        u'A UUID or similar unique string that identifies this channel.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'kind',
+        u'api#channel',
+        u'Identifies this as a notification channel used to watch for changes'
+        u' to a resource. Value: the fixed string "api#channel".',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'params',
+        None,
+        u'Additional parameters controlling delivery channel behavior. '
+        u'Optional.',
+        flag_values=fv)
+    flags.DEFINE_boolean(
+        'payload',
+        None,
+        u'A Boolean value to indicate whether payload is wanted. Optional.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'resourceId',
+        None,
+        u'An opaque ID that identifies the resource being watched on this '
+        u'channel. Stable across different API versions.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'resourceUri',
+        None,
+        u'A version-specific identifier for the watched resource.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'token',
+        None,
+        u'An arbitrary string delivered to the target address with each '
+        u'notification delivered over this channel. Optional.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'type',
+        None,
+        u'The type of delivery mechanism used for this channel.',
+        flag_values=fv)
+
+  def RunWithArgs(self):
+    """Stop watching resources through this channel
+
+    Flags:
+      address: The address where notifications are delivered for this channel.
+      expiration: Date and time of notification channel expiration, expressed
+        as a Unix timestamp, in milliseconds. Optional.
+      id: A UUID or similar unique string that identifies this channel.
+      kind: Identifies this as a notification channel used to watch for
+        changes to a resource. Value: the fixed string "api#channel".
+      params: Additional parameters controlling delivery channel behavior.
+        Optional.
+      payload: A Boolean value to indicate whether payload is wanted.
+        Optional.
+      resourceId: An opaque ID that identifies the resource being watched on
+        this channel. Stable across different API versions.
+      resourceUri: A version-specific identifier for the watched resource.
+      token: An arbitrary string delivered to the target address with each
+        notification delivered over this channel. Optional.
+      type: The type of delivery mechanism used for this channel.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.Channel(
+        )
+    if FLAGS['address'].present:
+      request.address = FLAGS.address.decode('utf8')
+    if FLAGS['expiration'].present:
+      request.expiration = int(FLAGS.expiration)
+    if FLAGS['id'].present:
+      request.id = FLAGS.id.decode('utf8')
+    if FLAGS['kind'].present:
+      request.kind = FLAGS.kind.decode('utf8')
+    if FLAGS['params'].present:
+      request.params = apitools_base.JsonToMessage(messages.Channel.ParamsValue, FLAGS.params)
+    if FLAGS['payload'].present:
+      request.payload = FLAGS.payload
+    if FLAGS['resourceId'].present:
+      request.resourceId = FLAGS.resourceId.decode('utf8')
+    if FLAGS['resourceUri'].present:
+      request.resourceUri = FLAGS.resourceUri.decode('utf8')
+    if FLAGS['token'].present:
+      request.token = FLAGS.token.decode('utf8')
+    if FLAGS['type'].present:
+      request.type = FLAGS.type.decode('utf8')
+    result = client.channels.Stop(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class DefaultObjectAccessControlsDelete(apitools_base_cli.NewCmd):
+  """Command wrapping defaultObjectAccessControls.Delete."""
+
+  usage = """defaultObjectAccessControls_delete <bucket> <entity>"""
+
+  def __init__(self, name, fv):
+    super(DefaultObjectAccessControlsDelete, self).__init__(name, fv)
+
+  def RunWithArgs(self, bucket, entity):
+    """Permanently deletes the default object ACL entry for the specified
+    entity on the specified bucket.
+
+    Args:
+      bucket: Name of a bucket.
+      entity: The entity holding the permission. Can be user-userId, user-
+        emailAddress, group-groupId, group-emailAddress, allUsers, or
+        allAuthenticatedUsers.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageDefaultObjectAccessControlsDeleteRequest(
+        bucket=bucket.decode('utf8'),
+        entity=entity.decode('utf8'),
+        )
+    result = client.defaultObjectAccessControls.Delete(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class DefaultObjectAccessControlsGet(apitools_base_cli.NewCmd):
+  """Command wrapping defaultObjectAccessControls.Get."""
+
+  usage = """defaultObjectAccessControls_get <bucket> <entity>"""
+
+  def __init__(self, name, fv):
+    super(DefaultObjectAccessControlsGet, self).__init__(name, fv)
+
+  def RunWithArgs(self, bucket, entity):
+    """Returns the default object ACL entry for the specified entity on the
+    specified bucket.
+
+    Args:
+      bucket: Name of a bucket.
+      entity: The entity holding the permission. Can be user-userId, user-
+        emailAddress, group-groupId, group-emailAddress, allUsers, or
+        allAuthenticatedUsers.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageDefaultObjectAccessControlsGetRequest(
+        bucket=bucket.decode('utf8'),
+        entity=entity.decode('utf8'),
+        )
+    result = client.defaultObjectAccessControls.Get(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class DefaultObjectAccessControlsInsert(apitools_base_cli.NewCmd):
+  """Command wrapping defaultObjectAccessControls.Insert."""
+
+  usage = """defaultObjectAccessControls_insert <bucket>"""
+
+  def __init__(self, name, fv):
+    super(DefaultObjectAccessControlsInsert, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'domain',
+        None,
+        u'The domain associated with the entity, if any.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'email',
+        None,
+        u'The email address associated with the entity, if any.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'entity',
+        None,
+        u'The entity holding the permission, in one of the following forms:  '
+        u'- user-userId  - user-email  - group-groupId  - group-email  - '
+        u'domain-domain  - project-team-projectId  - allUsers  - '
+        u'allAuthenticatedUsers Examples:  - The user liz@example.com would '
+        u'be user-liz@example.com.  - The group example@googlegroups.com '
+        u'would be group-example@googlegroups.com.  - To refer to all members'
+        u' of the Google Apps for Business domain example.com, the entity '
+        u'would be domain-example.com.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'entityId',
+        None,
+        u'The ID for the entity, if any.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'etag',
+        None,
+        u'HTTP 1.1 Entity tag for the access-control entry.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'generation',
+        None,
+        u'The content generation of the object.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'id',
+        None,
+        u'The ID of the access-control entry.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'kind',
+        u'storage#objectAccessControl',
+        u'The kind of item this is. For object access control entries, this '
+        u'is always storage#objectAccessControl.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'object',
+        None,
+        u'The name of the object.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'projectTeam',
+        None,
+        u'The project team associated with the entity, if any.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'role',
+        None,
+        u'The access permission for the entity. Can be READER or OWNER.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'selfLink',
+        None,
+        u'The link to this access-control entry.',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket):
+    """Creates a new default object ACL entry on the specified bucket.
+
+    Args:
+      bucket: The name of the bucket.
+
+    Flags:
+      domain: The domain associated with the entity, if any.
+      email: The email address associated with the entity, if any.
+      entity: The entity holding the permission, in one of the following
+        forms:  - user-userId  - user-email  - group-groupId  - group-email  -
+        domain-domain  - project-team-projectId  - allUsers  -
+        allAuthenticatedUsers Examples:  - The user liz@example.com would be
+        user-liz@example.com.  - The group example@googlegroups.com would be
+        group-example@googlegroups.com.  - To refer to all members of the
+        Google Apps for Business domain example.com, the entity would be
+        domain-example.com.
+      entityId: The ID for the entity, if any.
+      etag: HTTP 1.1 Entity tag for the access-control entry.
+      generation: The content generation of the object.
+      id: The ID of the access-control entry.
+      kind: The kind of item this is. For object access control entries, this
+        is always storage#objectAccessControl.
+      object: The name of the object.
+      projectTeam: The project team associated with the entity, if any.
+      role: The access permission for the entity. Can be READER or OWNER.
+      selfLink: The link to this access-control entry.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ObjectAccessControl(
+        bucket=bucket.decode('utf8'),
+        )
+    if FLAGS['domain'].present:
+      request.domain = FLAGS.domain.decode('utf8')
+    if FLAGS['email'].present:
+      request.email = FLAGS.email.decode('utf8')
+    if FLAGS['entity'].present:
+      request.entity = FLAGS.entity.decode('utf8')
+    if FLAGS['entityId'].present:
+      request.entityId = FLAGS.entityId.decode('utf8')
+    if FLAGS['etag'].present:
+      request.etag = FLAGS.etag.decode('utf8')
+    if FLAGS['generation'].present:
+      request.generation = int(FLAGS.generation)
+    if FLAGS['id'].present:
+      request.id = FLAGS.id.decode('utf8')
+    if FLAGS['kind'].present:
+      request.kind = FLAGS.kind.decode('utf8')
+    if FLAGS['object'].present:
+      request.object = FLAGS.object.decode('utf8')
+    if FLAGS['projectTeam'].present:
+      request.projectTeam = apitools_base.JsonToMessage(messages.ObjectAccessControl.ProjectTeamValue, FLAGS.projectTeam)
+    if FLAGS['role'].present:
+      request.role = FLAGS.role.decode('utf8')
+    if FLAGS['selfLink'].present:
+      request.selfLink = FLAGS.selfLink.decode('utf8')
+    result = client.defaultObjectAccessControls.Insert(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class DefaultObjectAccessControlsList(apitools_base_cli.NewCmd):
+  """Command wrapping defaultObjectAccessControls.List."""
+
+  usage = """defaultObjectAccessControls_list <bucket>"""
+
+  def __init__(self, name, fv):
+    super(DefaultObjectAccessControlsList, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'ifMetagenerationMatch',
+        None,
+        u"If present, only return default ACL listing if the bucket's current"
+        u' metageneration matches this value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifMetagenerationNotMatch',
+        None,
+        u"If present, only return default ACL listing if the bucket's current"
+        u' metageneration does not match the given value.',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket):
+    """Retrieves default object ACL entries on the specified bucket.
+
+    Args:
+      bucket: Name of a bucket.
+
+    Flags:
+      ifMetagenerationMatch: If present, only return default ACL listing if
+        the bucket's current metageneration matches this value.
+      ifMetagenerationNotMatch: If present, only return default ACL listing if
+        the bucket's current metageneration does not match the given value.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageDefaultObjectAccessControlsListRequest(
+        bucket=bucket.decode('utf8'),
+        )
+    if FLAGS['ifMetagenerationMatch'].present:
+      request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
+    if FLAGS['ifMetagenerationNotMatch'].present:
+      request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
+    result = client.defaultObjectAccessControls.List(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class DefaultObjectAccessControlsPatch(apitools_base_cli.NewCmd):
+  """Command wrapping defaultObjectAccessControls.Patch."""
+
+  usage = """defaultObjectAccessControls_patch <bucket> <entity>"""
+
+  def __init__(self, name, fv):
+    super(DefaultObjectAccessControlsPatch, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'domain',
+        None,
+        u'The domain associated with the entity, if any.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'email',
+        None,
+        u'The email address associated with the entity, if any.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'entityId',
+        None,
+        u'The ID for the entity, if any.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'etag',
+        None,
+        u'HTTP 1.1 Entity tag for the access-control entry.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'generation',
+        None,
+        u'The content generation of the object.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'id',
+        None,
+        u'The ID of the access-control entry.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'kind',
+        u'storage#objectAccessControl',
+        u'The kind of item this is. For object access control entries, this '
+        u'is always storage#objectAccessControl.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'object',
+        None,
+        u'The name of the object.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'projectTeam',
+        None,
+        u'The project team associated with the entity, if any.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'role',
+        None,
+        u'The access permission for the entity. Can be READER or OWNER.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'selfLink',
+        None,
+        u'The link to this access-control entry.',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket, entity):
+    """Updates a default object ACL entry on the specified bucket. This method
+    supports patch semantics.
+
+    Args:
+      bucket: The name of the bucket.
+      entity: The entity holding the permission, in one of the following
+        forms:  - user-userId  - user-email  - group-groupId  - group-email  -
+        domain-domain  - project-team-projectId  - allUsers  -
+        allAuthenticatedUsers Examples:  - The user liz@example.com would be
+        user-liz@example.com.  - The group example@googlegroups.com would be
+        group-example@googlegroups.com.  - To refer to all members of the
+        Google Apps for Business domain example.com, the entity would be
+        domain-example.com.
+
+    Flags:
+      domain: The domain associated with the entity, if any.
+      email: The email address associated with the entity, if any.
+      entityId: The ID for the entity, if any.
+      etag: HTTP 1.1 Entity tag for the access-control entry.
+      generation: The content generation of the object.
+      id: The ID of the access-control entry.
+      kind: The kind of item this is. For object access control entries, this
+        is always storage#objectAccessControl.
+      object: The name of the object.
+      projectTeam: The project team associated with the entity, if any.
+      role: The access permission for the entity. Can be READER or OWNER.
+      selfLink: The link to this access-control entry.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ObjectAccessControl(
+        bucket=bucket.decode('utf8'),
+        entity=entity.decode('utf8'),
+        )
+    if FLAGS['domain'].present:
+      request.domain = FLAGS.domain.decode('utf8')
+    if FLAGS['email'].present:
+      request.email = FLAGS.email.decode('utf8')
+    if FLAGS['entityId'].present:
+      request.entityId = FLAGS.entityId.decode('utf8')
+    if FLAGS['etag'].present:
+      request.etag = FLAGS.etag.decode('utf8')
+    if FLAGS['generation'].present:
+      request.generation = int(FLAGS.generation)
+    if FLAGS['id'].present:
+      request.id = FLAGS.id.decode('utf8')
+    if FLAGS['kind'].present:
+      request.kind = FLAGS.kind.decode('utf8')
+    if FLAGS['object'].present:
+      request.object = FLAGS.object.decode('utf8')
+    if FLAGS['projectTeam'].present:
+      request.projectTeam = apitools_base.JsonToMessage(messages.ObjectAccessControl.ProjectTeamValue, FLAGS.projectTeam)
+    if FLAGS['role'].present:
+      request.role = FLAGS.role.decode('utf8')
+    if FLAGS['selfLink'].present:
+      request.selfLink = FLAGS.selfLink.decode('utf8')
+    result = client.defaultObjectAccessControls.Patch(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class DefaultObjectAccessControlsUpdate(apitools_base_cli.NewCmd):
+  """Command wrapping defaultObjectAccessControls.Update."""
+
+  usage = """defaultObjectAccessControls_update <bucket> <entity>"""
+
+  def __init__(self, name, fv):
+    super(DefaultObjectAccessControlsUpdate, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'domain',
+        None,
+        u'The domain associated with the entity, if any.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'email',
+        None,
+        u'The email address associated with the entity, if any.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'entityId',
+        None,
+        u'The ID for the entity, if any.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'etag',
+        None,
+        u'HTTP 1.1 Entity tag for the access-control entry.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'generation',
+        None,
+        u'The content generation of the object.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'id',
+        None,
+        u'The ID of the access-control entry.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'kind',
+        u'storage#objectAccessControl',
+        u'The kind of item this is. For object access control entries, this '
+        u'is always storage#objectAccessControl.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'object',
+        None,
+        u'The name of the object.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'projectTeam',
+        None,
+        u'The project team associated with the entity, if any.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'role',
+        None,
+        u'The access permission for the entity. Can be READER or OWNER.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'selfLink',
+        None,
+        u'The link to this access-control entry.',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket, entity):
+    """Updates a default object ACL entry on the specified bucket.
+
+    Args:
+      bucket: The name of the bucket.
+      entity: The entity holding the permission, in one of the following
+        forms:  - user-userId  - user-email  - group-groupId  - group-email  -
+        domain-domain  - project-team-projectId  - allUsers  -
+        allAuthenticatedUsers Examples:  - The user liz@example.com would be
+        user-liz@example.com.  - The group example@googlegroups.com would be
+        group-example@googlegroups.com.  - To refer to all members of the
+        Google Apps for Business domain example.com, the entity would be
+        domain-example.com.
+
+    Flags:
+      domain: The domain associated with the entity, if any.
+      email: The email address associated with the entity, if any.
+      entityId: The ID for the entity, if any.
+      etag: HTTP 1.1 Entity tag for the access-control entry.
+      generation: The content generation of the object.
+      id: The ID of the access-control entry.
+      kind: The kind of item this is. For object access control entries, this
+        is always storage#objectAccessControl.
+      object: The name of the object.
+      projectTeam: The project team associated with the entity, if any.
+      role: The access permission for the entity. Can be READER or OWNER.
+      selfLink: The link to this access-control entry.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.ObjectAccessControl(
+        bucket=bucket.decode('utf8'),
+        entity=entity.decode('utf8'),
+        )
+    if FLAGS['domain'].present:
+      request.domain = FLAGS.domain.decode('utf8')
+    if FLAGS['email'].present:
+      request.email = FLAGS.email.decode('utf8')
+    if FLAGS['entityId'].present:
+      request.entityId = FLAGS.entityId.decode('utf8')
+    if FLAGS['etag'].present:
+      request.etag = FLAGS.etag.decode('utf8')
+    if FLAGS['generation'].present:
+      request.generation = int(FLAGS.generation)
+    if FLAGS['id'].present:
+      request.id = FLAGS.id.decode('utf8')
+    if FLAGS['kind'].present:
+      request.kind = FLAGS.kind.decode('utf8')
+    if FLAGS['object'].present:
+      request.object = FLAGS.object.decode('utf8')
+    if FLAGS['projectTeam'].present:
+      request.projectTeam = apitools_base.JsonToMessage(messages.ObjectAccessControl.ProjectTeamValue, FLAGS.projectTeam)
+    if FLAGS['role'].present:
+      request.role = FLAGS.role.decode('utf8')
+    if FLAGS['selfLink'].present:
+      request.selfLink = FLAGS.selfLink.decode('utf8')
+    result = client.defaultObjectAccessControls.Update(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class NotificationsDelete(apitools_base_cli.NewCmd):
+  """Command wrapping notifications.Delete."""
+
+  usage = """notifications_delete <notification>"""
+
+  def __init__(self, name, fv):
+    super(NotificationsDelete, self).__init__(name, fv)
+
+  def RunWithArgs(self, notification):
+    """Permanently deletes a notification subscription.
+
+    Args:
+      notification: ID of the notification to delete.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageNotificationsDeleteRequest(
+        notification=notification.decode('utf8'),
+        )
+    result = client.notifications.Delete(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class NotificationsGet(apitools_base_cli.NewCmd):
+  """Command wrapping notifications.Get."""
+
+  usage = """notifications_get <notification>"""
+
+  def __init__(self, name, fv):
+    super(NotificationsGet, self).__init__(name, fv)
+
+  def RunWithArgs(self, notification):
+    """View a notification configuration.
+
+    Args:
+      notification: Notification ID
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageNotificationsGetRequest(
+        notification=notification.decode('utf8'),
+        )
+    result = client.notifications.Get(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class NotificationsInsert(apitools_base_cli.NewCmd):
+  """Command wrapping notifications.Insert."""
+
+  usage = """notifications_insert"""
+
+  def __init__(self, name, fv):
+    super(NotificationsInsert, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'bucket',
+        None,
+        u'The name of the bucket this subscription is particular to.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'custom_attributes',
+        None,
+        u'An optional list of additional attributes to attach to each Cloud '
+        u'PubSub message published for this notification subscription.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'etag',
+        None,
+        u'HTTP 1.1 Entity tag for this subscription notification.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'event_types',
+        None,
+        u'If present, only send notifications about listed event types. If '
+        u'empty, sent notifications for all event types.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'id',
+        None,
+        u'The ID of the notification.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'kind',
+        u'storage#notification',
+        u'The kind of item this is. For notifications, this is always '
+        u'storage#notification.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'object_metadata_format',
+        u'JSON_API_V1',
+        u'If payload_content is OBJECT_METADATA, controls the format of that '
+        u'metadata. Otherwise, must not be set.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'object_name_prefix',
+        None,
+        u'If present, only apply this notification configuration to object '
+        u'names that begin with this prefix.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'payload_content',
+        u'OBJECT_METADATA',
+        u'The desired content of the Payload. Defaults to OBJECT_METADATA.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'selfLink',
+        None,
+        u'The canonical URL of this notification.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'topic',
+        None,
+        u'The Cloud PubSub topic to which this subscription publishes. '
+        u"Formatted as: '//pubsub.googleapis.com/projects/{project-"
+        u"identifier}/topics/{my-topic}'",
+        flag_values=fv)
+
+  def RunWithArgs(self):
+    """Creates a notification subscription for a given bucket.
+
+    Flags:
+      bucket: The name of the bucket this subscription is particular to.
+      custom_attributes: An optional list of additional attributes to attach
+        to each Cloud PubSub message published for this notification
+        subscription.
+      etag: HTTP 1.1 Entity tag for this subscription notification.
+      event_types: If present, only send notifications about listed event
+        types. If empty, sent notifications for all event types.
+      id: The ID of the notification.
+      kind: The kind of item this is. For notifications, this is always
+        storage#notification.
+      object_metadata_format: If payload_content is OBJECT_METADATA, controls
+        the format of that metadata. Otherwise, must not be set.
+      object_name_prefix: If present, only apply this notification
+        configuration to object names that begin with this prefix.
+      payload_content: The desired content of the Payload. Defaults to
+        OBJECT_METADATA.
+      selfLink: The canonical URL of this notification.
+      topic: The Cloud PubSub topic to which this subscription publishes.
+        Formatted as: '//pubsub.googleapis.com/projects/{project-
+        identifier}/topics/{my-topic}'
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.Notification(
+        )
+    if FLAGS['bucket'].present:
+      request.bucket = FLAGS.bucket.decode('utf8')
+    if FLAGS['custom_attributes'].present:
+      request.custom_attributes = apitools_base.JsonToMessage(messages.Notification.CustomAttributesValue, FLAGS.custom_attributes)
+    if FLAGS['etag'].present:
+      request.etag = FLAGS.etag.decode('utf8')
+    if FLAGS['event_types'].present:
+      request.event_types = [x.decode('utf8') for x in FLAGS.event_types]
+    if FLAGS['id'].present:
+      request.id = FLAGS.id.decode('utf8')
+    if FLAGS['kind'].present:
+      request.kind = FLAGS.kind.decode('utf8')
+    if FLAGS['object_metadata_format'].present:
+      request.object_metadata_format = FLAGS.object_metadata_format.decode('utf8')
+    if FLAGS['object_name_prefix'].present:
+      request.object_name_prefix = FLAGS.object_name_prefix.decode('utf8')
+    if FLAGS['payload_content'].present:
+      request.payload_content = FLAGS.payload_content.decode('utf8')
+    if FLAGS['selfLink'].present:
+      request.selfLink = FLAGS.selfLink.decode('utf8')
+    if FLAGS['topic'].present:
+      request.topic = FLAGS.topic.decode('utf8')
+    result = client.notifications.Insert(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class NotificationsList(apitools_base_cli.NewCmd):
+  """Command wrapping notifications.List."""
+
+  usage = """notifications_list <bucket>"""
+
+  def __init__(self, name, fv):
+    super(NotificationsList, self).__init__(name, fv)
+
+  def RunWithArgs(self, bucket):
+    """Retrieves a list of notification subscriptions for a given bucket.
+
+    Args:
+      bucket: Name of a GCS bucket.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageNotificationsListRequest(
+        bucket=bucket.decode('utf8'),
+        )
+    result = client.notifications.List(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ObjectAccessControlsDelete(apitools_base_cli.NewCmd):
+  """Command wrapping objectAccessControls.Delete."""
+
+  usage = """objectAccessControls_delete <bucket> <object> <entity>"""
+
+  def __init__(self, name, fv):
+    super(ObjectAccessControlsDelete, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'generation',
+        None,
+        u'If present, selects a specific revision of this object (as opposed '
+        u'to the latest version, the default).',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket, object, entity):
+    """Permanently deletes the ACL entry for the specified entity on the
+    specified object.
+
+    Args:
+      bucket: Name of a bucket.
+      object: Name of the object. For information about how to URL encode
+        object names to be path safe, see Encoding URI Path Parts.
+      entity: The entity holding the permission. Can be user-userId, user-
+        emailAddress, group-groupId, group-emailAddress, allUsers, or
+        allAuthenticatedUsers.
+
+    Flags:
+      generation: If present, selects a specific revision of this object (as
+        opposed to the latest version, the default).
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageObjectAccessControlsDeleteRequest(
+        bucket=bucket.decode('utf8'),
+        object=object.decode('utf8'),
+        entity=entity.decode('utf8'),
+        )
+    if FLAGS['generation'].present:
+      request.generation = int(FLAGS.generation)
+    result = client.objectAccessControls.Delete(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ObjectAccessControlsGet(apitools_base_cli.NewCmd):
+  """Command wrapping objectAccessControls.Get."""
+
+  usage = """objectAccessControls_get <bucket> <object> <entity>"""
+
+  def __init__(self, name, fv):
+    super(ObjectAccessControlsGet, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'generation',
+        None,
+        u'If present, selects a specific revision of this object (as opposed '
+        u'to the latest version, the default).',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket, object, entity):
+    """Returns the ACL entry for the specified entity on the specified object.
+
+    Args:
+      bucket: Name of a bucket.
+      object: Name of the object. For information about how to URL encode
+        object names to be path safe, see Encoding URI Path Parts.
+      entity: The entity holding the permission. Can be user-userId, user-
+        emailAddress, group-groupId, group-emailAddress, allUsers, or
+        allAuthenticatedUsers.
+
+    Flags:
+      generation: If present, selects a specific revision of this object (as
+        opposed to the latest version, the default).
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageObjectAccessControlsGetRequest(
+        bucket=bucket.decode('utf8'),
+        object=object.decode('utf8'),
+        entity=entity.decode('utf8'),
+        )
+    if FLAGS['generation'].present:
+      request.generation = int(FLAGS.generation)
+    result = client.objectAccessControls.Get(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ObjectAccessControlsInsert(apitools_base_cli.NewCmd):
+  """Command wrapping objectAccessControls.Insert."""
+
+  usage = """objectAccessControls_insert <bucket> <object>"""
+
+  def __init__(self, name, fv):
+    super(ObjectAccessControlsInsert, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'generation',
+        None,
+        u'If present, selects a specific revision of this object (as opposed '
+        u'to the latest version, the default).',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'objectAccessControl',
+        None,
+        u'A ObjectAccessControl resource to be passed as the request body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket, object):
+    """Creates a new ACL entry on the specified object.
+
+    Args:
+      bucket: Name of a bucket.
+      object: Name of the object. For information about how to URL encode
+        object names to be path safe, see Encoding URI Path Parts.
+
+    Flags:
+      generation: If present, selects a specific revision of this object (as
+        opposed to the latest version, the default).
+      objectAccessControl: A ObjectAccessControl resource to be passed as the
+        request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageObjectAccessControlsInsertRequest(
+        bucket=bucket.decode('utf8'),
+        object=object.decode('utf8'),
+        )
+    if FLAGS['generation'].present:
+      request.generation = int(FLAGS.generation)
+    if FLAGS['objectAccessControl'].present:
+      request.objectAccessControl = apitools_base.JsonToMessage(messages.ObjectAccessControl, FLAGS.objectAccessControl)
+    result = client.objectAccessControls.Insert(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ObjectAccessControlsList(apitools_base_cli.NewCmd):
+  """Command wrapping objectAccessControls.List."""
+
+  usage = """objectAccessControls_list <bucket> <object>"""
+
+  def __init__(self, name, fv):
+    super(ObjectAccessControlsList, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'generation',
+        None,
+        u'If present, selects a specific revision of this object (as opposed '
+        u'to the latest version, the default).',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket, object):
+    """Retrieves ACL entries on the specified object.
+
+    Args:
+      bucket: Name of a bucket.
+      object: Name of the object. For information about how to URL encode
+        object names to be path safe, see Encoding URI Path Parts.
+
+    Flags:
+      generation: If present, selects a specific revision of this object (as
+        opposed to the latest version, the default).
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageObjectAccessControlsListRequest(
+        bucket=bucket.decode('utf8'),
+        object=object.decode('utf8'),
+        )
+    if FLAGS['generation'].present:
+      request.generation = int(FLAGS.generation)
+    result = client.objectAccessControls.List(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ObjectAccessControlsPatch(apitools_base_cli.NewCmd):
+  """Command wrapping objectAccessControls.Patch."""
+
+  usage = """objectAccessControls_patch <bucket> <object> <entity>"""
+
+  def __init__(self, name, fv):
+    super(ObjectAccessControlsPatch, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'generation',
+        None,
+        u'If present, selects a specific revision of this object (as opposed '
+        u'to the latest version, the default).',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'objectAccessControl',
+        None,
+        u'A ObjectAccessControl resource to be passed as the request body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket, object, entity):
+    """Updates an ACL entry on the specified object. This method supports
+    patch semantics.
+
+    Args:
+      bucket: Name of a bucket.
+      object: Name of the object. For information about how to URL encode
+        object names to be path safe, see Encoding URI Path Parts.
+      entity: The entity holding the permission. Can be user-userId, user-
+        emailAddress, group-groupId, group-emailAddress, allUsers, or
+        allAuthenticatedUsers.
+
+    Flags:
+      generation: If present, selects a specific revision of this object (as
+        opposed to the latest version, the default).
+      objectAccessControl: A ObjectAccessControl resource to be passed as the
+        request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageObjectAccessControlsPatchRequest(
+        bucket=bucket.decode('utf8'),
+        object=object.decode('utf8'),
+        entity=entity.decode('utf8'),
+        )
+    if FLAGS['generation'].present:
+      request.generation = int(FLAGS.generation)
+    if FLAGS['objectAccessControl'].present:
+      request.objectAccessControl = apitools_base.JsonToMessage(messages.ObjectAccessControl, FLAGS.objectAccessControl)
+    result = client.objectAccessControls.Patch(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ObjectAccessControlsUpdate(apitools_base_cli.NewCmd):
+  """Command wrapping objectAccessControls.Update."""
+
+  usage = """objectAccessControls_update <bucket> <object> <entity>"""
+
+  def __init__(self, name, fv):
+    super(ObjectAccessControlsUpdate, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'generation',
+        None,
+        u'If present, selects a specific revision of this object (as opposed '
+        u'to the latest version, the default).',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'objectAccessControl',
+        None,
+        u'A ObjectAccessControl resource to be passed as the request body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket, object, entity):
+    """Updates an ACL entry on the specified object.
+
+    Args:
+      bucket: Name of a bucket.
+      object: Name of the object. For information about how to URL encode
+        object names to be path safe, see Encoding URI Path Parts.
+      entity: The entity holding the permission. Can be user-userId, user-
+        emailAddress, group-groupId, group-emailAddress, allUsers, or
+        allAuthenticatedUsers.
+
+    Flags:
+      generation: If present, selects a specific revision of this object (as
+        opposed to the latest version, the default).
+      objectAccessControl: A ObjectAccessControl resource to be passed as the
+        request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageObjectAccessControlsUpdateRequest(
+        bucket=bucket.decode('utf8'),
+        object=object.decode('utf8'),
+        entity=entity.decode('utf8'),
+        )
+    if FLAGS['generation'].present:
+      request.generation = int(FLAGS.generation)
+    if FLAGS['objectAccessControl'].present:
+      request.objectAccessControl = apitools_base.JsonToMessage(messages.ObjectAccessControl, FLAGS.objectAccessControl)
+    result = client.objectAccessControls.Update(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ObjectsCompose(apitools_base_cli.NewCmd):
+  """Command wrapping objects.Compose."""
+
+  usage = """objects_compose <destinationBucket> <destinationObject>"""
+
+  def __init__(self, name, fv):
+    super(ObjectsCompose, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'composeRequest',
+        None,
+        u'A ComposeRequest resource to be passed as the request body.',
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'destinationPredefinedAcl',
+        u'authenticatedRead',
+        [u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
+        u'Apply a predefined set of access controls to the destination '
+        u'object.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifGenerationMatch',
+        None,
+        u"Makes the operation conditional on whether the object's current "
+        u'generation matches the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifMetagenerationMatch',
+        None,
+        u"Makes the operation conditional on whether the object's current "
+        u'metageneration matches the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'download_filename',
+        '',
+        'Filename to use for download.',
+        flag_values=fv)
+    flags.DEFINE_boolean(
+        'overwrite',
+        'False',
+        'If True, overwrite the existing file when downloading.',
+        flag_values=fv)
+
+  def RunWithArgs(self, destinationBucket, destinationObject):
+    """Concatenates a list of existing objects into a new object in the same
+    bucket.
+
+    Args:
+      destinationBucket: Name of the bucket in which to store the new object.
+      destinationObject: Name of the new object. For information about how to
+        URL encode object names to be path safe, see Encoding URI Path Parts.
+
+    Flags:
+      composeRequest: A ComposeRequest resource to be passed as the request
+        body.
+      destinationPredefinedAcl: Apply a predefined set of access controls to
+        the destination object.
+      ifGenerationMatch: Makes the operation conditional on whether the
+        object's current generation matches the given value.
+      ifMetagenerationMatch: Makes the operation conditional on whether the
+        object's current metageneration matches the given value.
+      download_filename: Filename to use for download.
+      overwrite: If True, overwrite the existing file when downloading.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageObjectsComposeRequest(
+        destinationBucket=destinationBucket.decode('utf8'),
+        destinationObject=destinationObject.decode('utf8'),
+        )
+    if FLAGS['composeRequest'].present:
+      request.composeRequest = apitools_base.JsonToMessage(messages.ComposeRequest, FLAGS.composeRequest)
+    if FLAGS['destinationPredefinedAcl'].present:
+      request.destinationPredefinedAcl = messages.StorageObjectsComposeRequest.DestinationPredefinedAclValueValuesEnum(FLAGS.destinationPredefinedAcl)
+    if FLAGS['ifGenerationMatch'].present:
+      request.ifGenerationMatch = int(FLAGS.ifGenerationMatch)
+    if FLAGS['ifMetagenerationMatch'].present:
+      request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
+    download = None
+    if FLAGS.download_filename:
+      download = apitools_base.Download.FromFile(FLAGS.download_filename, overwrite=FLAGS.overwrite,
+          progress_callback=apitools_base.DownloadProgressPrinter,
+          finish_callback=apitools_base.DownloadCompletePrinter)
+    result = client.objects.Compose(
+        request, global_params=global_params, download=download)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ObjectsCopy(apitools_base_cli.NewCmd):
+  """Command wrapping objects.Copy."""
+
+  usage = """objects_copy <sourceBucket> <sourceObject> <destinationBucket> <destinationObject>"""
+
+  def __init__(self, name, fv):
+    super(ObjectsCopy, self).__init__(name, fv)
+    flags.DEFINE_enum(
+        'destinationPredefinedAcl',
+        u'authenticatedRead',
+        [u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
+        u'Apply a predefined set of access controls to the destination '
+        u'object.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifGenerationMatch',
+        None,
+        u"Makes the operation conditional on whether the destination object's"
+        u' current generation matches the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifGenerationNotMatch',
+        None,
+        u"Makes the operation conditional on whether the destination object's"
+        u' current generation does not match the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifMetagenerationMatch',
+        None,
+        u"Makes the operation conditional on whether the destination object's"
+        u' current metageneration matches the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifMetagenerationNotMatch',
+        None,
+        u"Makes the operation conditional on whether the destination object's"
+        u' current metageneration does not match the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifSourceGenerationMatch',
+        None,
+        u"Makes the operation conditional on whether the source object's "
+        u'generation matches the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifSourceGenerationNotMatch',
+        None,
+        u"Makes the operation conditional on whether the source object's "
+        u'generation does not match the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifSourceMetagenerationMatch',
+        None,
+        u"Makes the operation conditional on whether the source object's "
+        u'current metageneration matches the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifSourceMetagenerationNotMatch',
+        None,
+        u"Makes the operation conditional on whether the source object's "
+        u'current metageneration does not match the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'object',
+        None,
+        u'A Object resource to be passed as the request body.',
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'projection',
+        u'full',
+        [u'full', u'noAcl'],
+        u'Set of properties to return. Defaults to noAcl, unless the object '
+        u'resource specifies the acl property, when it defaults to full.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'sourceGeneration',
+        None,
+        u'If present, selects a specific revision of the source object (as '
+        u'opposed to the latest version, the default).',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'download_filename',
+        '',
+        'Filename to use for download.',
+        flag_values=fv)
+    flags.DEFINE_boolean(
+        'overwrite',
+        'False',
+        'If True, overwrite the existing file when downloading.',
+        flag_values=fv)
+
+  def RunWithArgs(self, sourceBucket, sourceObject, destinationBucket, destinationObject):
+    """Copies a source object to a destination object. Optionally overrides
+    metadata.
+
+    Args:
+      sourceBucket: Name of the bucket in which to find the source object.
+      sourceObject: Name of the source object. For information about how to
+        URL encode object names to be path safe, see Encoding URI Path Parts.
+      destinationBucket: Name of the bucket in which to store the new object.
+        Overrides the provided object metadata's bucket value, if any.For
+        information about how to URL encode object names to be path safe, see
+        Encoding URI Path Parts.
+      destinationObject: Name of the new object. Required when the object
+        metadata is not otherwise provided. Overrides the object metadata's
+        name value, if any.
+
+    Flags:
+      destinationPredefinedAcl: Apply a predefined set of access controls to
+        the destination object.
+      ifGenerationMatch: Makes the operation conditional on whether the
+        destination object's current generation matches the given value.
+      ifGenerationNotMatch: Makes the operation conditional on whether the
+        destination object's current generation does not match the given
+        value.
+      ifMetagenerationMatch: Makes the operation conditional on whether the
+        destination object's current metageneration matches the given value.
+      ifMetagenerationNotMatch: Makes the operation conditional on whether the
+        destination object's current metageneration does not match the given
+        value.
+      ifSourceGenerationMatch: Makes the operation conditional on whether the
+        source object's generation matches the given value.
+      ifSourceGenerationNotMatch: Makes the operation conditional on whether
+        the source object's generation does not match the given value.
+      ifSourceMetagenerationMatch: Makes the operation conditional on whether
+        the source object's current metageneration matches the given value.
+      ifSourceMetagenerationNotMatch: Makes the operation conditional on
+        whether the source object's current metageneration does not match the
+        given value.
+      object: A Object resource to be passed as the request body.
+      projection: Set of properties to return. Defaults to noAcl, unless the
+        object resource specifies the acl property, when it defaults to full.
+      sourceGeneration: If present, selects a specific revision of the source
+        object (as opposed to the latest version, the default).
+      download_filename: Filename to use for download.
+      overwrite: If True, overwrite the existing file when downloading.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageObjectsCopyRequest(
+        sourceBucket=sourceBucket.decode('utf8'),
+        sourceObject=sourceObject.decode('utf8'),
+        destinationBucket=destinationBucket.decode('utf8'),
+        destinationObject=destinationObject.decode('utf8'),
+        )
+    if FLAGS['destinationPredefinedAcl'].present:
+      request.destinationPredefinedAcl = messages.StorageObjectsCopyRequest.DestinationPredefinedAclValueValuesEnum(FLAGS.destinationPredefinedAcl)
+    if FLAGS['ifGenerationMatch'].present:
+      request.ifGenerationMatch = int(FLAGS.ifGenerationMatch)
+    if FLAGS['ifGenerationNotMatch'].present:
+      request.ifGenerationNotMatch = int(FLAGS.ifGenerationNotMatch)
+    if FLAGS['ifMetagenerationMatch'].present:
+      request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
+    if FLAGS['ifMetagenerationNotMatch'].present:
+      request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
+    if FLAGS['ifSourceGenerationMatch'].present:
+      request.ifSourceGenerationMatch = int(FLAGS.ifSourceGenerationMatch)
+    if FLAGS['ifSourceGenerationNotMatch'].present:
+      request.ifSourceGenerationNotMatch = int(FLAGS.ifSourceGenerationNotMatch)
+    if FLAGS['ifSourceMetagenerationMatch'].present:
+      request.ifSourceMetagenerationMatch = int(FLAGS.ifSourceMetagenerationMatch)
+    if FLAGS['ifSourceMetagenerationNotMatch'].present:
+      request.ifSourceMetagenerationNotMatch = int(FLAGS.ifSourceMetagenerationNotMatch)
+    if FLAGS['object'].present:
+      request.object = apitools_base.JsonToMessage(messages.Object, FLAGS.object)
+    if FLAGS['projection'].present:
+      request.projection = messages.StorageObjectsCopyRequest.ProjectionValueValuesEnum(FLAGS.projection)
+    if FLAGS['sourceGeneration'].present:
+      request.sourceGeneration = int(FLAGS.sourceGeneration)
+    download = None
+    if FLAGS.download_filename:
+      download = apitools_base.Download.FromFile(FLAGS.download_filename, overwrite=FLAGS.overwrite,
+          progress_callback=apitools_base.DownloadProgressPrinter,
+          finish_callback=apitools_base.DownloadCompletePrinter)
+    result = client.objects.Copy(
+        request, global_params=global_params, download=download)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ObjectsDelete(apitools_base_cli.NewCmd):
+  """Command wrapping objects.Delete."""
+
+  usage = """objects_delete <bucket> <object>"""
+
+  def __init__(self, name, fv):
+    super(ObjectsDelete, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'generation',
+        None,
+        u'If present, permanently deletes a specific revision of this object '
+        u'(as opposed to the latest version, the default).',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifGenerationMatch',
+        None,
+        u"Makes the operation conditional on whether the object's current "
+        u'generation matches the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifGenerationNotMatch',
+        None,
+        u"Makes the operation conditional on whether the object's current "
+        u'generation does not match the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifMetagenerationMatch',
+        None,
+        u"Makes the operation conditional on whether the object's current "
+        u'metageneration matches the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifMetagenerationNotMatch',
+        None,
+        u"Makes the operation conditional on whether the object's current "
+        u'metageneration does not match the given value.',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket, object):
+    """Deletes an object and its metadata. Deletions are permanent if
+    versioning is not enabled for the bucket, or if the generation parameter
+    is used.
+
+    Args:
+      bucket: Name of the bucket in which the object resides.
+      object: Name of the object. For information about how to URL encode
+        object names to be path safe, see Encoding URI Path Parts.
+
+    Flags:
+      generation: If present, permanently deletes a specific revision of this
+        object (as opposed to the latest version, the default).
+      ifGenerationMatch: Makes the operation conditional on whether the
+        object's current generation matches the given value.
+      ifGenerationNotMatch: Makes the operation conditional on whether the
+        object's current generation does not match the given value.
+      ifMetagenerationMatch: Makes the operation conditional on whether the
+        object's current metageneration matches the given value.
+      ifMetagenerationNotMatch: Makes the operation conditional on whether the
+        object's current metageneration does not match the given value.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageObjectsDeleteRequest(
+        bucket=bucket.decode('utf8'),
+        object=object.decode('utf8'),
+        )
+    if FLAGS['generation'].present:
+      request.generation = int(FLAGS.generation)
+    if FLAGS['ifGenerationMatch'].present:
+      request.ifGenerationMatch = int(FLAGS.ifGenerationMatch)
+    if FLAGS['ifGenerationNotMatch'].present:
+      request.ifGenerationNotMatch = int(FLAGS.ifGenerationNotMatch)
+    if FLAGS['ifMetagenerationMatch'].present:
+      request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
+    if FLAGS['ifMetagenerationNotMatch'].present:
+      request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
+    result = client.objects.Delete(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ObjectsGet(apitools_base_cli.NewCmd):
+  """Command wrapping objects.Get."""
+
+  usage = """objects_get <bucket> <object>"""
+
+  def __init__(self, name, fv):
+    super(ObjectsGet, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'generation',
+        None,
+        u'If present, selects a specific revision of this object (as opposed '
+        u'to the latest version, the default).',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifGenerationMatch',
+        None,
+        u"Makes the operation conditional on whether the object's generation "
+        u'matches the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifGenerationNotMatch',
+        None,
+        u"Makes the operation conditional on whether the object's generation "
+        u'does not match the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifMetagenerationMatch',
+        None,
+        u"Makes the operation conditional on whether the object's current "
+        u'metageneration matches the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifMetagenerationNotMatch',
+        None,
+        u"Makes the operation conditional on whether the object's current "
+        u'metageneration does not match the given value.',
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'projection',
+        u'full',
+        [u'full', u'noAcl'],
+        u'Set of properties to return. Defaults to noAcl.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'download_filename',
+        '',
+        'Filename to use for download.',
+        flag_values=fv)
+    flags.DEFINE_boolean(
+        'overwrite',
+        'False',
+        'If True, overwrite the existing file when downloading.',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket, object):
+    """Retrieves an object or its metadata.
+
+    Args:
+      bucket: Name of the bucket in which the object resides.
+      object: Name of the object. For information about how to URL encode
+        object names to be path safe, see Encoding URI Path Parts.
+
+    Flags:
+      generation: If present, selects a specific revision of this object (as
+        opposed to the latest version, the default).
+      ifGenerationMatch: Makes the operation conditional on whether the
+        object's generation matches the given value.
+      ifGenerationNotMatch: Makes the operation conditional on whether the
+        object's generation does not match the given value.
+      ifMetagenerationMatch: Makes the operation conditional on whether the
+        object's current metageneration matches the given value.
+      ifMetagenerationNotMatch: Makes the operation conditional on whether the
+        object's current metageneration does not match the given value.
+      projection: Set of properties to return. Defaults to noAcl.
+      download_filename: Filename to use for download.
+      overwrite: If True, overwrite the existing file when downloading.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageObjectsGetRequest(
+        bucket=bucket.decode('utf8'),
+        object=object.decode('utf8'),
+        )
+    if FLAGS['generation'].present:
+      request.generation = int(FLAGS.generation)
+    if FLAGS['ifGenerationMatch'].present:
+      request.ifGenerationMatch = int(FLAGS.ifGenerationMatch)
+    if FLAGS['ifGenerationNotMatch'].present:
+      request.ifGenerationNotMatch = int(FLAGS.ifGenerationNotMatch)
+    if FLAGS['ifMetagenerationMatch'].present:
+      request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
+    if FLAGS['ifMetagenerationNotMatch'].present:
+      request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
+    if FLAGS['projection'].present:
+      request.projection = messages.StorageObjectsGetRequest.ProjectionValueValuesEnum(FLAGS.projection)
+    download = None
+    if FLAGS.download_filename:
+      download = apitools_base.Download.FromFile(FLAGS.download_filename, overwrite=FLAGS.overwrite,
+          progress_callback=apitools_base.DownloadProgressPrinter,
+          finish_callback=apitools_base.DownloadCompletePrinter)
+    result = client.objects.Get(
+        request, global_params=global_params, download=download)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ObjectsGetIamPolicy(apitools_base_cli.NewCmd):
+  """Command wrapping objects.GetIamPolicy."""
+
+  usage = """objects_getIamPolicy <bucket> <object>"""
+
+  def __init__(self, name, fv):
+    super(ObjectsGetIamPolicy, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'generation',
+        None,
+        u'If present, selects a specific revision of this object (as opposed '
+        u'to the latest version, the default).',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket, object):
+    """Returns an IAM policy for the specified object.
+
+    Args:
+      bucket: Name of the bucket in which the object resides.
+      object: Name of the object. For information about how to URL encode
+        object names to be path safe, see Encoding URI Path Parts.
+
+    Flags:
+      generation: If present, selects a specific revision of this object (as
+        opposed to the latest version, the default).
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageObjectsGetIamPolicyRequest(
+        bucket=bucket.decode('utf8'),
+        object=object.decode('utf8'),
+        )
+    if FLAGS['generation'].present:
+      request.generation = int(FLAGS.generation)
+    result = client.objects.GetIamPolicy(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ObjectsInsert(apitools_base_cli.NewCmd):
+  """Command wrapping objects.Insert."""
+
+  usage = """objects_insert <bucket>"""
+
+  def __init__(self, name, fv):
+    super(ObjectsInsert, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'contentEncoding',
+        None,
+        u'If set, sets the contentEncoding property of the final object to '
+        u'this value. Setting this parameter is equivalent to setting the '
+        u'contentEncoding metadata property. This can be useful when '
+        u'uploading an object with uploadType=media to indicate the encoding '
+        u'of the content being uploaded.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifGenerationMatch',
+        None,
+        u"Makes the operation conditional on whether the object's current "
+        u'generation matches the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifGenerationNotMatch',
+        None,
+        u"Makes the operation conditional on whether the object's current "
+        u'generation does not match the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifMetagenerationMatch',
+        None,
+        u"Makes the operation conditional on whether the object's current "
+        u'metageneration matches the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifMetagenerationNotMatch',
+        None,
+        u"Makes the operation conditional on whether the object's current "
+        u'metageneration does not match the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'name',
+        None,
+        u'Name of the object. Required when the object metadata is not '
+        u"otherwise provided. Overrides the object metadata's name value, if "
+        u'any. For information about how to URL encode object names to be '
+        u'path safe, see Encoding URI Path Parts.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'object',
+        None,
+        u'A Object resource to be passed as the request body.',
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'predefinedAcl',
+        u'authenticatedRead',
+        [u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
+        u'Apply a predefined set of access controls to this object.',
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'projection',
+        u'full',
+        [u'full', u'noAcl'],
+        u'Set of properties to return. Defaults to noAcl, unless the object '
+        u'resource specifies the acl property, when it defaults to full.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'upload_filename',
+        '',
+        'Filename to use for upload.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'upload_mime_type',
+        '',
+        'MIME type to use for the upload. Only needed if the extension on '
+        '--upload_filename does not determine the correct (or any) MIME '
+        'type.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'download_filename',
+        '',
+        'Filename to use for download.',
+        flag_values=fv)
+    flags.DEFINE_boolean(
+        'overwrite',
+        'False',
+        'If True, overwrite the existing file when downloading.',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket):
+    """Stores a new object and metadata.
+
+    Args:
+      bucket: Name of the bucket in which to store the new object. Overrides
+        the provided object metadata's bucket value, if any.
+
+    Flags:
+      contentEncoding: If set, sets the contentEncoding property of the final
+        object to this value. Setting this parameter is equivalent to setting
+        the contentEncoding metadata property. This can be useful when
+        uploading an object with uploadType=media to indicate the encoding of
+        the content being uploaded.
+      ifGenerationMatch: Makes the operation conditional on whether the
+        object's current generation matches the given value.
+      ifGenerationNotMatch: Makes the operation conditional on whether the
+        object's current generation does not match the given value.
+      ifMetagenerationMatch: Makes the operation conditional on whether the
+        object's current metageneration matches the given value.
+      ifMetagenerationNotMatch: Makes the operation conditional on whether the
+        object's current metageneration does not match the given value.
+      name: Name of the object. Required when the object metadata is not
+        otherwise provided. Overrides the object metadata's name value, if
+        any. For information about how to URL encode object names to be path
+        safe, see Encoding URI Path Parts.
+      object: A Object resource to be passed as the request body.
+      predefinedAcl: Apply a predefined set of access controls to this object.
+      projection: Set of properties to return. Defaults to noAcl, unless the
+        object resource specifies the acl property, when it defaults to full.
+      upload_filename: Filename to use for upload.
+      upload_mime_type: MIME type to use for the upload. Only needed if the
+        extension on --upload_filename does not determine the correct (or any)
+        MIME type.
+      download_filename: Filename to use for download.
+      overwrite: If True, overwrite the existing file when downloading.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageObjectsInsertRequest(
+        bucket=bucket.decode('utf8'),
+        )
+    if FLAGS['contentEncoding'].present:
+      request.contentEncoding = FLAGS.contentEncoding.decode('utf8')
+    if FLAGS['ifGenerationMatch'].present:
+      request.ifGenerationMatch = int(FLAGS.ifGenerationMatch)
+    if FLAGS['ifGenerationNotMatch'].present:
+      request.ifGenerationNotMatch = int(FLAGS.ifGenerationNotMatch)
+    if FLAGS['ifMetagenerationMatch'].present:
+      request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
+    if FLAGS['ifMetagenerationNotMatch'].present:
+      request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
+    if FLAGS['name'].present:
+      request.name = FLAGS.name.decode('utf8')
+    if FLAGS['object'].present:
+      request.object = apitools_base.JsonToMessage(messages.Object, FLAGS.object)
+    if FLAGS['predefinedAcl'].present:
+      request.predefinedAcl = messages.StorageObjectsInsertRequest.PredefinedAclValueValuesEnum(FLAGS.predefinedAcl)
+    if FLAGS['projection'].present:
+      request.projection = messages.StorageObjectsInsertRequest.ProjectionValueValuesEnum(FLAGS.projection)
+    upload = None
+    if FLAGS.upload_filename:
+      upload = apitools_base.Upload.FromFile(
+          FLAGS.upload_filename, FLAGS.upload_mime_type,
+          progress_callback=apitools_base.UploadProgressPrinter,
+          finish_callback=apitools_base.UploadCompletePrinter)
+    download = None
+    if FLAGS.download_filename:
+      download = apitools_base.Download.FromFile(FLAGS.download_filename, overwrite=FLAGS.overwrite,
+          progress_callback=apitools_base.DownloadProgressPrinter,
+          finish_callback=apitools_base.DownloadCompletePrinter)
+    result = client.objects.Insert(
+        request, global_params=global_params, upload=upload, download=download)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ObjectsList(apitools_base_cli.NewCmd):
+  """Command wrapping objects.List."""
+
+  usage = """objects_list <bucket>"""
+
+  def __init__(self, name, fv):
+    super(ObjectsList, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'delimiter',
+        None,
+        u'Returns results in a directory-like mode. items will contain only '
+        u'objects whose names, aside from the prefix, do not contain '
+        u'delimiter. Objects whose names, aside from the prefix, contain '
+        u'delimiter will have their name, truncated after the delimiter, '
+        u'returned in prefixes. Duplicate prefixes are omitted.',
+        flag_values=fv)
+    flags.DEFINE_integer(
+        'maxResults',
+        None,
+        u'Maximum number of items plus prefixes to return. As duplicate '
+        u'prefixes are omitted, fewer total results may be returned than '
+        u'requested. The default value of this parameter is 1,000 items.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'pageToken',
+        None,
+        u'A previously-returned page token representing part of the larger '
+        u'set of results to view.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'prefix',
+        None,
+        u'Filter results to objects whose names begin with this prefix.',
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'projection',
+        u'full',
+        [u'full', u'noAcl'],
+        u'Set of properties to return. Defaults to noAcl.',
+        flag_values=fv)
+    flags.DEFINE_boolean(
+        'versions',
+        None,
+        u'If true, lists all versions of an object as distinct results. The '
+        u'default is false. For more information, see Object Versioning.',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket):
+    """Retrieves a list of objects matching the criteria.
+
+    Args:
+      bucket: Name of the bucket in which to look for objects.
+
+    Flags:
+      delimiter: Returns results in a directory-like mode. items will contain
+        only objects whose names, aside from the prefix, do not contain
+        delimiter. Objects whose names, aside from the prefix, contain
+        delimiter will have their name, truncated after the delimiter,
+        returned in prefixes. Duplicate prefixes are omitted.
+      maxResults: Maximum number of items plus prefixes to return. As
+        duplicate prefixes are omitted, fewer total results may be returned
+        than requested. The default value of this parameter is 1,000 items.
+      pageToken: A previously-returned page token representing part of the
+        larger set of results to view.
+      prefix: Filter results to objects whose names begin with this prefix.
+      projection: Set of properties to return. Defaults to noAcl.
+      versions: If true, lists all versions of an object as distinct results.
+        The default is false. For more information, see Object Versioning.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageObjectsListRequest(
+        bucket=bucket.decode('utf8'),
+        )
+    if FLAGS['delimiter'].present:
+      request.delimiter = FLAGS.delimiter.decode('utf8')
+    if FLAGS['maxResults'].present:
+      request.maxResults = FLAGS.maxResults
+    if FLAGS['pageToken'].present:
+      request.pageToken = FLAGS.pageToken.decode('utf8')
+    if FLAGS['prefix'].present:
+      request.prefix = FLAGS.prefix.decode('utf8')
+    if FLAGS['projection'].present:
+      request.projection = messages.StorageObjectsListRequest.ProjectionValueValuesEnum(FLAGS.projection)
+    if FLAGS['versions'].present:
+      request.versions = FLAGS.versions
+    result = client.objects.List(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ObjectsPatch(apitools_base_cli.NewCmd):
+  """Command wrapping objects.Patch."""
+
+  usage = """objects_patch <bucket> <object>"""
+
+  def __init__(self, name, fv):
+    super(ObjectsPatch, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'generation',
+        None,
+        u'If present, selects a specific revision of this object (as opposed '
+        u'to the latest version, the default).',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifGenerationMatch',
+        None,
+        u"Makes the operation conditional on whether the object's current "
+        u'generation matches the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifGenerationNotMatch',
+        None,
+        u"Makes the operation conditional on whether the object's current "
+        u'generation does not match the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifMetagenerationMatch',
+        None,
+        u"Makes the operation conditional on whether the object's current "
+        u'metageneration matches the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifMetagenerationNotMatch',
+        None,
+        u"Makes the operation conditional on whether the object's current "
+        u'metageneration does not match the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'objectResource',
+        None,
+        u'A Object resource to be passed as the request body.',
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'predefinedAcl',
+        u'authenticatedRead',
+        [u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
+        u'Apply a predefined set of access controls to this object.',
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'projection',
+        u'full',
+        [u'full', u'noAcl'],
+        u'Set of properties to return. Defaults to full.',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket, object):
+    """Updates an object's metadata. This method supports patch semantics.
+
+    Args:
+      bucket: Name of the bucket in which the object resides.
+      object: Name of the object. For information about how to URL encode
+        object names to be path safe, see Encoding URI Path Parts.
+
+    Flags:
+      generation: If present, selects a specific revision of this object (as
+        opposed to the latest version, the default).
+      ifGenerationMatch: Makes the operation conditional on whether the
+        object's current generation matches the given value.
+      ifGenerationNotMatch: Makes the operation conditional on whether the
+        object's current generation does not match the given value.
+      ifMetagenerationMatch: Makes the operation conditional on whether the
+        object's current metageneration matches the given value.
+      ifMetagenerationNotMatch: Makes the operation conditional on whether the
+        object's current metageneration does not match the given value.
+      objectResource: A Object resource to be passed as the request body.
+      predefinedAcl: Apply a predefined set of access controls to this object.
+      projection: Set of properties to return. Defaults to full.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageObjectsPatchRequest(
+        bucket=bucket.decode('utf8'),
+        object=object.decode('utf8'),
+        )
+    if FLAGS['generation'].present:
+      request.generation = int(FLAGS.generation)
+    if FLAGS['ifGenerationMatch'].present:
+      request.ifGenerationMatch = int(FLAGS.ifGenerationMatch)
+    if FLAGS['ifGenerationNotMatch'].present:
+      request.ifGenerationNotMatch = int(FLAGS.ifGenerationNotMatch)
+    if FLAGS['ifMetagenerationMatch'].present:
+      request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
+    if FLAGS['ifMetagenerationNotMatch'].present:
+      request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
+    if FLAGS['objectResource'].present:
+      request.objectResource = apitools_base.JsonToMessage(messages.Object, FLAGS.objectResource)
+    if FLAGS['predefinedAcl'].present:
+      request.predefinedAcl = messages.StorageObjectsPatchRequest.PredefinedAclValueValuesEnum(FLAGS.predefinedAcl)
+    if FLAGS['projection'].present:
+      request.projection = messages.StorageObjectsPatchRequest.ProjectionValueValuesEnum(FLAGS.projection)
+    result = client.objects.Patch(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ObjectsRewrite(apitools_base_cli.NewCmd):
+  """Command wrapping objects.Rewrite."""
+
+  usage = """objects_rewrite <sourceBucket> <sourceObject> <destinationBucket> <destinationObject>"""
+
+  def __init__(self, name, fv):
+    super(ObjectsRewrite, self).__init__(name, fv)
+    flags.DEFINE_enum(
+        'destinationPredefinedAcl',
+        u'authenticatedRead',
+        [u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
+        u'Apply a predefined set of access controls to the destination '
+        u'object.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifGenerationMatch',
+        None,
+        u"Makes the operation conditional on whether the destination object's"
+        u' current generation matches the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifGenerationNotMatch',
+        None,
+        u"Makes the operation conditional on whether the destination object's"
+        u' current generation does not match the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifMetagenerationMatch',
+        None,
+        u"Makes the operation conditional on whether the destination object's"
+        u' current metageneration matches the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifMetagenerationNotMatch',
+        None,
+        u"Makes the operation conditional on whether the destination object's"
+        u' current metageneration does not match the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifSourceGenerationMatch',
+        None,
+        u"Makes the operation conditional on whether the source object's "
+        u'generation matches the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifSourceGenerationNotMatch',
+        None,
+        u"Makes the operation conditional on whether the source object's "
+        u'generation does not match the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifSourceMetagenerationMatch',
+        None,
+        u"Makes the operation conditional on whether the source object's "
+        u'current metageneration matches the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifSourceMetagenerationNotMatch',
+        None,
+        u"Makes the operation conditional on whether the source object's "
+        u'current metageneration does not match the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'maxBytesRewrittenPerCall',
+        None,
+        u'The maximum number of bytes that will be rewritten per rewrite '
+        u"request. Most callers shouldn't need to specify this parameter - it"
+        u' is primarily in place to support testing. If specified the value '
+        u'must be an integral multiple of 1 MiB (1048576). Also, this only '
+        u'applies to requests where the source and destination span locations'
+        u' and/or storage classes. Finally, this value must not change across'
+        u" rewrite calls else you'll get an error that the rewriteToken is "
+        u'invalid.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'object',
+        None,
+        u'A Object resource to be passed as the request body.',
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'projection',
+        u'full',
+        [u'full', u'noAcl'],
+        u'Set of properties to return. Defaults to noAcl, unless the object '
+        u'resource specifies the acl property, when it defaults to full.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'rewriteToken',
+        None,
+        u'Include this field (from the previous rewrite response) on each '
+        u'rewrite request after the first one, until the rewrite response '
+        u"'done' flag is true. Calls that provide a rewriteToken can omit all"
+        u' other request fields, but if included those fields must match the '
+        u'values provided in the first rewrite request.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'sourceGeneration',
+        None,
+        u'If present, selects a specific revision of the source object (as '
+        u'opposed to the latest version, the default).',
+        flag_values=fv)
+
+  def RunWithArgs(self, sourceBucket, sourceObject, destinationBucket, destinationObject):
+    """Rewrites a source object to a destination object. Optionally overrides
+    metadata.
+
+    Args:
+      sourceBucket: Name of the bucket in which to find the source object.
+      sourceObject: Name of the source object. For information about how to
+        URL encode object names to be path safe, see Encoding URI Path Parts.
+      destinationBucket: Name of the bucket in which to store the new object.
+        Overrides the provided object metadata's bucket value, if any.
+      destinationObject: Name of the new object. Required when the object
+        metadata is not otherwise provided. Overrides the object metadata's
+        name value, if any. For information about how to URL encode object
+        names to be path safe, see Encoding URI Path Parts.
+
+    Flags:
+      destinationPredefinedAcl: Apply a predefined set of access controls to
+        the destination object.
+      ifGenerationMatch: Makes the operation conditional on whether the
+        destination object's current generation matches the given value.
+      ifGenerationNotMatch: Makes the operation conditional on whether the
+        destination object's current generation does not match the given
+        value.
+      ifMetagenerationMatch: Makes the operation conditional on whether the
+        destination object's current metageneration matches the given value.
+      ifMetagenerationNotMatch: Makes the operation conditional on whether the
+        destination object's current metageneration does not match the given
+        value.
+      ifSourceGenerationMatch: Makes the operation conditional on whether the
+        source object's generation matches the given value.
+      ifSourceGenerationNotMatch: Makes the operation conditional on whether
+        the source object's generation does not match the given value.
+      ifSourceMetagenerationMatch: Makes the operation conditional on whether
+        the source object's current metageneration matches the given value.
+      ifSourceMetagenerationNotMatch: Makes the operation conditional on
+        whether the source object's current metageneration does not match the
+        given value.
+      maxBytesRewrittenPerCall: The maximum number of bytes that will be
+        rewritten per rewrite request. Most callers shouldn't need to specify
+        this parameter - it is primarily in place to support testing. If
+        specified the value must be an integral multiple of 1 MiB (1048576).
+        Also, this only applies to requests where the source and destination
+        span locations and/or storage classes. Finally, this value must not
+        change across rewrite calls else you'll get an error that the
+        rewriteToken is invalid.
+      object: A Object resource to be passed as the request body.
+      projection: Set of properties to return. Defaults to noAcl, unless the
+        object resource specifies the acl property, when it defaults to full.
+      rewriteToken: Include this field (from the previous rewrite response) on
+        each rewrite request after the first one, until the rewrite response
+        'done' flag is true. Calls that provide a rewriteToken can omit all
+        other request fields, but if included those fields must match the
+        values provided in the first rewrite request.
+      sourceGeneration: If present, selects a specific revision of the source
+        object (as opposed to the latest version, the default).
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageObjectsRewriteRequest(
+        sourceBucket=sourceBucket.decode('utf8'),
+        sourceObject=sourceObject.decode('utf8'),
+        destinationBucket=destinationBucket.decode('utf8'),
+        destinationObject=destinationObject.decode('utf8'),
+        )
+    if FLAGS['destinationPredefinedAcl'].present:
+      request.destinationPredefinedAcl = messages.StorageObjectsRewriteRequest.DestinationPredefinedAclValueValuesEnum(FLAGS.destinationPredefinedAcl)
+    if FLAGS['ifGenerationMatch'].present:
+      request.ifGenerationMatch = int(FLAGS.ifGenerationMatch)
+    if FLAGS['ifGenerationNotMatch'].present:
+      request.ifGenerationNotMatch = int(FLAGS.ifGenerationNotMatch)
+    if FLAGS['ifMetagenerationMatch'].present:
+      request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
+    if FLAGS['ifMetagenerationNotMatch'].present:
+      request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
+    if FLAGS['ifSourceGenerationMatch'].present:
+      request.ifSourceGenerationMatch = int(FLAGS.ifSourceGenerationMatch)
+    if FLAGS['ifSourceGenerationNotMatch'].present:
+      request.ifSourceGenerationNotMatch = int(FLAGS.ifSourceGenerationNotMatch)
+    if FLAGS['ifSourceMetagenerationMatch'].present:
+      request.ifSourceMetagenerationMatch = int(FLAGS.ifSourceMetagenerationMatch)
+    if FLAGS['ifSourceMetagenerationNotMatch'].present:
+      request.ifSourceMetagenerationNotMatch = int(FLAGS.ifSourceMetagenerationNotMatch)
+    if FLAGS['maxBytesRewrittenPerCall'].present:
+      request.maxBytesRewrittenPerCall = int(FLAGS.maxBytesRewrittenPerCall)
+    if FLAGS['object'].present:
+      request.object = apitools_base.JsonToMessage(messages.Object, FLAGS.object)
+    if FLAGS['projection'].present:
+      request.projection = messages.StorageObjectsRewriteRequest.ProjectionValueValuesEnum(FLAGS.projection)
+    if FLAGS['rewriteToken'].present:
+      request.rewriteToken = FLAGS.rewriteToken.decode('utf8')
+    if FLAGS['sourceGeneration'].present:
+      request.sourceGeneration = int(FLAGS.sourceGeneration)
+    result = client.objects.Rewrite(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ObjectsSetIamPolicy(apitools_base_cli.NewCmd):
+  """Command wrapping objects.SetIamPolicy."""
+
+  usage = """objects_setIamPolicy <bucket> <object>"""
+
+  def __init__(self, name, fv):
+    super(ObjectsSetIamPolicy, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'generation',
+        None,
+        u'If present, selects a specific revision of this object (as opposed '
+        u'to the latest version, the default).',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'policy',
+        None,
+        u'A Policy resource to be passed as the request body.',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket, object):
+    """Updates an IAM policy for the specified object.
+
+    Args:
+      bucket: Name of the bucket in which the object resides.
+      object: Name of the object. For information about how to URL encode
+        object names to be path safe, see Encoding URI Path Parts.
+
+    Flags:
+      generation: If present, selects a specific revision of this object (as
+        opposed to the latest version, the default).
+      policy: A Policy resource to be passed as the request body.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageObjectsSetIamPolicyRequest(
+        bucket=bucket.decode('utf8'),
+        object=object.decode('utf8'),
+        )
+    if FLAGS['generation'].present:
+      request.generation = int(FLAGS.generation)
+    if FLAGS['policy'].present:
+      request.policy = apitools_base.JsonToMessage(messages.Policy, FLAGS.policy)
+    result = client.objects.SetIamPolicy(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ObjectsTestIamPermissions(apitools_base_cli.NewCmd):
+  """Command wrapping objects.TestIamPermissions."""
+
+  usage = """objects_testIamPermissions <bucket> <object> <permissions>"""
+
+  def __init__(self, name, fv):
+    super(ObjectsTestIamPermissions, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'generation',
+        None,
+        u'If present, selects a specific revision of this object (as opposed '
+        u'to the latest version, the default).',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket, object, permissions):
+    """Tests a set of permissions on the given object to see which, if any,
+    are held by the caller.
+
+    Args:
+      bucket: Name of the bucket in which the object resides.
+      object: Name of the object. For information about how to URL encode
+        object names to be path safe, see Encoding URI Path Parts.
+      permissions: Permissions to test.
+
+    Flags:
+      generation: If present, selects a specific revision of this object (as
+        opposed to the latest version, the default).
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageObjectsTestIamPermissionsRequest(
+        bucket=bucket.decode('utf8'),
+        object=object.decode('utf8'),
+        permissions=permissions.decode('utf8'),
+        )
+    if FLAGS['generation'].present:
+      request.generation = int(FLAGS.generation)
+    result = client.objects.TestIamPermissions(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ObjectsUpdate(apitools_base_cli.NewCmd):
+  """Command wrapping objects.Update."""
+
+  usage = """objects_update <bucket> <object>"""
+
+  def __init__(self, name, fv):
+    super(ObjectsUpdate, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'generation',
+        None,
+        u'If present, selects a specific revision of this object (as opposed '
+        u'to the latest version, the default).',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifGenerationMatch',
+        None,
+        u"Makes the operation conditional on whether the object's current "
+        u'generation matches the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifGenerationNotMatch',
+        None,
+        u"Makes the operation conditional on whether the object's current "
+        u'generation does not match the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifMetagenerationMatch',
+        None,
+        u"Makes the operation conditional on whether the object's current "
+        u'metageneration matches the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'ifMetagenerationNotMatch',
+        None,
+        u"Makes the operation conditional on whether the object's current "
+        u'metageneration does not match the given value.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'objectResource',
+        None,
+        u'A Object resource to be passed as the request body.',
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'predefinedAcl',
+        u'authenticatedRead',
+        [u'authenticatedRead', u'bucketOwnerFullControl', u'bucketOwnerRead', u'private', u'projectPrivate', u'publicRead'],
+        u'Apply a predefined set of access controls to this object.',
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'projection',
+        u'full',
+        [u'full', u'noAcl'],
+        u'Set of properties to return. Defaults to full.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'download_filename',
+        '',
+        'Filename to use for download.',
+        flag_values=fv)
+    flags.DEFINE_boolean(
+        'overwrite',
+        'False',
+        'If True, overwrite the existing file when downloading.',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket, object):
+    """Updates an object's metadata.
+
+    Args:
+      bucket: Name of the bucket in which the object resides.
+      object: Name of the object. For information about how to URL encode
+        object names to be path safe, see Encoding URI Path Parts.
+
+    Flags:
+      generation: If present, selects a specific revision of this object (as
+        opposed to the latest version, the default).
+      ifGenerationMatch: Makes the operation conditional on whether the
+        object's current generation matches the given value.
+      ifGenerationNotMatch: Makes the operation conditional on whether the
+        object's current generation does not match the given value.
+      ifMetagenerationMatch: Makes the operation conditional on whether the
+        object's current metageneration matches the given value.
+      ifMetagenerationNotMatch: Makes the operation conditional on whether the
+        object's current metageneration does not match the given value.
+      objectResource: A Object resource to be passed as the request body.
+      predefinedAcl: Apply a predefined set of access controls to this object.
+      projection: Set of properties to return. Defaults to full.
+      download_filename: Filename to use for download.
+      overwrite: If True, overwrite the existing file when downloading.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageObjectsUpdateRequest(
+        bucket=bucket.decode('utf8'),
+        object=object.decode('utf8'),
+        )
+    if FLAGS['generation'].present:
+      request.generation = int(FLAGS.generation)
+    if FLAGS['ifGenerationMatch'].present:
+      request.ifGenerationMatch = int(FLAGS.ifGenerationMatch)
+    if FLAGS['ifGenerationNotMatch'].present:
+      request.ifGenerationNotMatch = int(FLAGS.ifGenerationNotMatch)
+    if FLAGS['ifMetagenerationMatch'].present:
+      request.ifMetagenerationMatch = int(FLAGS.ifMetagenerationMatch)
+    if FLAGS['ifMetagenerationNotMatch'].present:
+      request.ifMetagenerationNotMatch = int(FLAGS.ifMetagenerationNotMatch)
+    if FLAGS['objectResource'].present:
+      request.objectResource = apitools_base.JsonToMessage(messages.Object, FLAGS.objectResource)
+    if FLAGS['predefinedAcl'].present:
+      request.predefinedAcl = messages.StorageObjectsUpdateRequest.PredefinedAclValueValuesEnum(FLAGS.predefinedAcl)
+    if FLAGS['projection'].present:
+      request.projection = messages.StorageObjectsUpdateRequest.ProjectionValueValuesEnum(FLAGS.projection)
+    download = None
+    if FLAGS.download_filename:
+      download = apitools_base.Download.FromFile(FLAGS.download_filename, overwrite=FLAGS.overwrite,
+          progress_callback=apitools_base.DownloadProgressPrinter,
+          finish_callback=apitools_base.DownloadCompletePrinter)
+    result = client.objects.Update(
+        request, global_params=global_params, download=download)
+    print apitools_base_cli.FormatOutput(result)
+
+
+class ObjectsWatchAll(apitools_base_cli.NewCmd):
+  """Command wrapping objects.WatchAll."""
+
+  usage = """objects_watchAll <bucket>"""
+
+  def __init__(self, name, fv):
+    super(ObjectsWatchAll, self).__init__(name, fv)
+    flags.DEFINE_string(
+        'channel',
+        None,
+        u'A Channel resource to be passed as the request body.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'delimiter',
+        None,
+        u'Returns results in a directory-like mode. items will contain only '
+        u'objects whose names, aside from the prefix, do not contain '
+        u'delimiter. Objects whose names, aside from the prefix, contain '
+        u'delimiter will have their name, truncated after the delimiter, '
+        u'returned in prefixes. Duplicate prefixes are omitted.',
+        flag_values=fv)
+    flags.DEFINE_integer(
+        'maxResults',
+        None,
+        u'Maximum number of items plus prefixes to return. As duplicate '
+        u'prefixes are omitted, fewer total results may be returned than '
+        u'requested. The default value of this parameter is 1,000 items.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'pageToken',
+        None,
+        u'A previously-returned page token representing part of the larger '
+        u'set of results to view.',
+        flag_values=fv)
+    flags.DEFINE_string(
+        'prefix',
+        None,
+        u'Filter results to objects whose names begin with this prefix.',
+        flag_values=fv)
+    flags.DEFINE_enum(
+        'projection',
+        u'full',
+        [u'full', u'noAcl'],
+        u'Set of properties to return. Defaults to noAcl.',
+        flag_values=fv)
+    flags.DEFINE_boolean(
+        'versions',
+        None,
+        u'If true, lists all versions of an object as distinct results. The '
+        u'default is false. For more information, see Object Versioning.',
+        flag_values=fv)
+
+  def RunWithArgs(self, bucket):
+    """Watch for changes on all objects in a bucket.
+
+    Args:
+      bucket: Name of the bucket in which to look for objects.
+
+    Flags:
+      channel: A Channel resource to be passed as the request body.
+      delimiter: Returns results in a directory-like mode. items will contain
+        only objects whose names, aside from the prefix, do not contain
+        delimiter. Objects whose names, aside from the prefix, contain
+        delimiter will have their name, truncated after the delimiter,
+        returned in prefixes. Duplicate prefixes are omitted.
+      maxResults: Maximum number of items plus prefixes to return. As
+        duplicate prefixes are omitted, fewer total results may be returned
+        than requested. The default value of this parameter is 1,000 items.
+      pageToken: A previously-returned page token representing part of the
+        larger set of results to view.
+      prefix: Filter results to objects whose names begin with this prefix.
+      projection: Set of properties to return. Defaults to noAcl.
+      versions: If true, lists all versions of an object as distinct results.
+        The default is false. For more information, see Object Versioning.
+    """
+    client = GetClientFromFlags()
+    global_params = GetGlobalParamsFromFlags()
+    request = messages.StorageObjectsWatchAllRequest(
+        bucket=bucket.decode('utf8'),
+        )
+    if FLAGS['channel'].present:
+      request.channel = apitools_base.JsonToMessage(messages.Channel, FLAGS.channel)
+    if FLAGS['delimiter'].present:
+      request.delimiter = FLAGS.delimiter.decode('utf8')
+    if FLAGS['maxResults'].present:
+      request.maxResults = FLAGS.maxResults
+    if FLAGS['pageToken'].present:
+      request.pageToken = FLAGS.pageToken.decode('utf8')
+    if FLAGS['prefix'].present:
+      request.prefix = FLAGS.prefix.decode('utf8')
+    if FLAGS['projection'].present:
+      request.projection = messages.StorageObjectsWatchAllRequest.ProjectionValueValuesEnum(FLAGS.projection)
+    if FLAGS['versions'].present:
+      request.versions = FLAGS.versions
+    result = client.objects.WatchAll(
+        request, global_params=global_params)
+    print apitools_base_cli.FormatOutput(result)
+
+
+def main(_):
+  appcommands.AddCmd('pyshell', PyShell)
+  appcommands.AddCmd('bucketAccessControls_delete', BucketAccessControlsDelete)
+  appcommands.AddCmd('bucketAccessControls_get', BucketAccessControlsGet)
+  appcommands.AddCmd('bucketAccessControls_insert', BucketAccessControlsInsert)
+  appcommands.AddCmd('bucketAccessControls_list', BucketAccessControlsList)
+  appcommands.AddCmd('bucketAccessControls_patch', BucketAccessControlsPatch)
+  appcommands.AddCmd('bucketAccessControls_update', BucketAccessControlsUpdate)
+  appcommands.AddCmd('buckets_delete', BucketsDelete)
+  appcommands.AddCmd('buckets_get', BucketsGet)
+  appcommands.AddCmd('buckets_getIamPolicy', BucketsGetIamPolicy)
+  appcommands.AddCmd('buckets_insert', BucketsInsert)
+  appcommands.AddCmd('buckets_list', BucketsList)
+  appcommands.AddCmd('buckets_patch', BucketsPatch)
+  appcommands.AddCmd('buckets_setIamPolicy', BucketsSetIamPolicy)
+  appcommands.AddCmd('buckets_testIamPermissions', BucketsTestIamPermissions)
+  appcommands.AddCmd('buckets_update', BucketsUpdate)
+  appcommands.AddCmd('channels_stop', ChannelsStop)
+  appcommands.AddCmd('defaultObjectAccessControls_delete', DefaultObjectAccessControlsDelete)
+  appcommands.AddCmd('defaultObjectAccessControls_get', DefaultObjectAccessControlsGet)
+  appcommands.AddCmd('defaultObjectAccessControls_insert', DefaultObjectAccessControlsInsert)
+  appcommands.AddCmd('defaultObjectAccessControls_list', DefaultObjectAccessControlsList)
+  appcommands.AddCmd('defaultObjectAccessControls_patch', DefaultObjectAccessControlsPatch)
+  appcommands.AddCmd('defaultObjectAccessControls_update', DefaultObjectAccessControlsUpdate)
+  appcommands.AddCmd('notifications_delete', NotificationsDelete)
+  appcommands.AddCmd('notifications_get', NotificationsGet)
+  appcommands.AddCmd('notifications_insert', NotificationsInsert)
+  appcommands.AddCmd('notifications_list', NotificationsList)
+  appcommands.AddCmd('objectAccessControls_delete', ObjectAccessControlsDelete)
+  appcommands.AddCmd('objectAccessControls_get', ObjectAccessControlsGet)
+  appcommands.AddCmd('objectAccessControls_insert', ObjectAccessControlsInsert)
+  appcommands.AddCmd('objectAccessControls_list', ObjectAccessControlsList)
+  appcommands.AddCmd('objectAccessControls_patch', ObjectAccessControlsPatch)
+  appcommands.AddCmd('objectAccessControls_update', ObjectAccessControlsUpdate)
+  appcommands.AddCmd('objects_compose', ObjectsCompose)
+  appcommands.AddCmd('objects_copy', ObjectsCopy)
+  appcommands.AddCmd('objects_delete', ObjectsDelete)
+  appcommands.AddCmd('objects_get', ObjectsGet)
+  appcommands.AddCmd('objects_getIamPolicy', ObjectsGetIamPolicy)
+  appcommands.AddCmd('objects_insert', ObjectsInsert)
+  appcommands.AddCmd('objects_list', ObjectsList)
+  appcommands.AddCmd('objects_patch', ObjectsPatch)
+  appcommands.AddCmd('objects_rewrite', ObjectsRewrite)
+  appcommands.AddCmd('objects_setIamPolicy', ObjectsSetIamPolicy)
+  appcommands.AddCmd('objects_testIamPermissions', ObjectsTestIamPermissions)
+  appcommands.AddCmd('objects_update', ObjectsUpdate)
+  appcommands.AddCmd('objects_watchAll', ObjectsWatchAll)
+
+  apitools_base_cli.SetupLogger()
+  if hasattr(appcommands, 'SetDefaultCommand'):
+    appcommands.SetDefaultCommand('pyshell')
+
+
+run_main = apitools_base_cli.run_main
+
+if __name__ == '__main__':
+  appcommands.Run()
diff --git a/samples/storage_sample/storage_v1/storage_v1_client.py b/samples/storage_sample/storage_v1/storage_v1_client.py
new file mode 100644
index 0000000..74dfdc4
--- /dev/null
+++ b/samples/storage_sample/storage_v1/storage_v1_client.py
@@ -0,0 +1,1310 @@
+"""Generated client library for storage version v1."""
+# NOTE: This file is autogenerated and should not be edited by hand.
+from apitools.base.py import base_api
+from samples.storage_sample.storage_v1 import storage_v1_messages as messages
+
+
+class StorageV1(base_api.BaseApiClient):
+  """Generated client library for service storage version v1."""
+
+  MESSAGES_MODULE = messages
+  BASE_URL = u'https://www.googleapis.com/storage/v1/'
+
+  _PACKAGE = u'storage'
+  _SCOPES = [u'https://www.googleapis.com/auth/cloud-platform', u'https://www.googleapis.com/auth/cloud-platform.read-only', u'https://www.googleapis.com/auth/devstorage.full_control', u'https://www.googleapis.com/auth/devstorage.read_only', u'https://www.googleapis.com/auth/devstorage.read_write']
+  _VERSION = u'v1'
+  _CLIENT_ID = '1042881264118.apps.googleusercontent.com'
+  _CLIENT_SECRET = 'x_Tw5K8nnjoRAqULM9PFAC2b'
+  _USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b'
+  _CLIENT_CLASS_NAME = u'StorageV1'
+  _URL_VERSION = u'v1'
+  _API_KEY = None
+
+  def __init__(self, url='', credentials=None,
+               get_credentials=True, http=None, model=None,
+               log_request=False, log_response=False,
+               credentials_args=None, default_global_params=None,
+               additional_http_headers=None):
+    """Create a new storage handle."""
+    url = url or self.BASE_URL
+    super(StorageV1, self).__init__(
+        url, credentials=credentials,
+        get_credentials=get_credentials, http=http, model=model,
+        log_request=log_request, log_response=log_response,
+        credentials_args=credentials_args,
+        default_global_params=default_global_params,
+        additional_http_headers=additional_http_headers)
+    self.bucketAccessControls = self.BucketAccessControlsService(self)
+    self.buckets = self.BucketsService(self)
+    self.channels = self.ChannelsService(self)
+    self.defaultObjectAccessControls = self.DefaultObjectAccessControlsService(self)
+    self.notifications = self.NotificationsService(self)
+    self.objectAccessControls = self.ObjectAccessControlsService(self)
+    self.objects = self.ObjectsService(self)
+
+  class BucketAccessControlsService(base_api.BaseApiService):
+    """Service class for the bucketAccessControls resource."""
+
+    _NAME = u'bucketAccessControls'
+
+    def __init__(self, client):
+      super(StorageV1.BucketAccessControlsService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def Delete(self, request, global_params=None):
+      """Permanently deletes the ACL entry for the specified entity on the specified bucket.
+
+      Args:
+        request: (StorageBucketAccessControlsDeleteRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (StorageBucketAccessControlsDeleteResponse) The response message.
+      """
+      config = self.GetMethodConfig('Delete')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Delete.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'DELETE',
+        method_id=u'storage.bucketAccessControls.delete',
+        ordered_params=[u'bucket', u'entity'],
+        path_params=[u'bucket', u'entity'],
+        query_params=[],
+        relative_path=u'b/{bucket}/acl/{entity}',
+        request_field='',
+        request_type_name=u'StorageBucketAccessControlsDeleteRequest',
+        response_type_name=u'StorageBucketAccessControlsDeleteResponse',
+        supports_download=False,
+    )
+
+    def Get(self, request, global_params=None):
+      """Returns the ACL entry for the specified entity on the specified bucket.
+
+      Args:
+        request: (StorageBucketAccessControlsGetRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (BucketAccessControl) The response message.
+      """
+      config = self.GetMethodConfig('Get')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'storage.bucketAccessControls.get',
+        ordered_params=[u'bucket', u'entity'],
+        path_params=[u'bucket', u'entity'],
+        query_params=[],
+        relative_path=u'b/{bucket}/acl/{entity}',
+        request_field='',
+        request_type_name=u'StorageBucketAccessControlsGetRequest',
+        response_type_name=u'BucketAccessControl',
+        supports_download=False,
+    )
+
+    def Insert(self, request, global_params=None):
+      """Creates a new ACL entry on the specified bucket.
+
+      Args:
+        request: (BucketAccessControl) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (BucketAccessControl) The response message.
+      """
+      config = self.GetMethodConfig('Insert')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Insert.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'storage.bucketAccessControls.insert',
+        ordered_params=[u'bucket'],
+        path_params=[u'bucket'],
+        query_params=[],
+        relative_path=u'b/{bucket}/acl',
+        request_field='<request>',
+        request_type_name=u'BucketAccessControl',
+        response_type_name=u'BucketAccessControl',
+        supports_download=False,
+    )
+
+    def List(self, request, global_params=None):
+      """Retrieves ACL entries on the specified bucket.
+
+      Args:
+        request: (StorageBucketAccessControlsListRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (BucketAccessControls) The response message.
+      """
+      config = self.GetMethodConfig('List')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'storage.bucketAccessControls.list',
+        ordered_params=[u'bucket'],
+        path_params=[u'bucket'],
+        query_params=[],
+        relative_path=u'b/{bucket}/acl',
+        request_field='',
+        request_type_name=u'StorageBucketAccessControlsListRequest',
+        response_type_name=u'BucketAccessControls',
+        supports_download=False,
+    )
+
+    def Patch(self, request, global_params=None):
+      """Updates an ACL entry on the specified bucket. This method supports patch semantics.
+
+      Args:
+        request: (BucketAccessControl) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (BucketAccessControl) The response message.
+      """
+      config = self.GetMethodConfig('Patch')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Patch.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PATCH',
+        method_id=u'storage.bucketAccessControls.patch',
+        ordered_params=[u'bucket', u'entity'],
+        path_params=[u'bucket', u'entity'],
+        query_params=[],
+        relative_path=u'b/{bucket}/acl/{entity}',
+        request_field='<request>',
+        request_type_name=u'BucketAccessControl',
+        response_type_name=u'BucketAccessControl',
+        supports_download=False,
+    )
+
+    def Update(self, request, global_params=None):
+      """Updates an ACL entry on the specified bucket.
+
+      Args:
+        request: (BucketAccessControl) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (BucketAccessControl) The response message.
+      """
+      config = self.GetMethodConfig('Update')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Update.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PUT',
+        method_id=u'storage.bucketAccessControls.update',
+        ordered_params=[u'bucket', u'entity'],
+        path_params=[u'bucket', u'entity'],
+        query_params=[],
+        relative_path=u'b/{bucket}/acl/{entity}',
+        request_field='<request>',
+        request_type_name=u'BucketAccessControl',
+        response_type_name=u'BucketAccessControl',
+        supports_download=False,
+    )
+
+  class BucketsService(base_api.BaseApiService):
+    """Service class for the buckets resource."""
+
+    _NAME = u'buckets'
+
+    def __init__(self, client):
+      super(StorageV1.BucketsService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def Delete(self, request, global_params=None):
+      """Permanently deletes an empty bucket.
+
+      Args:
+        request: (StorageBucketsDeleteRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (StorageBucketsDeleteResponse) The response message.
+      """
+      config = self.GetMethodConfig('Delete')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Delete.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'DELETE',
+        method_id=u'storage.buckets.delete',
+        ordered_params=[u'bucket'],
+        path_params=[u'bucket'],
+        query_params=[u'ifMetagenerationMatch', u'ifMetagenerationNotMatch'],
+        relative_path=u'b/{bucket}',
+        request_field='',
+        request_type_name=u'StorageBucketsDeleteRequest',
+        response_type_name=u'StorageBucketsDeleteResponse',
+        supports_download=False,
+    )
+
+    def Get(self, request, global_params=None):
+      """Returns metadata for the specified bucket.
+
+      Args:
+        request: (StorageBucketsGetRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Bucket) The response message.
+      """
+      config = self.GetMethodConfig('Get')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'storage.buckets.get',
+        ordered_params=[u'bucket'],
+        path_params=[u'bucket'],
+        query_params=[u'ifMetagenerationMatch', u'ifMetagenerationNotMatch', u'projection'],
+        relative_path=u'b/{bucket}',
+        request_field='',
+        request_type_name=u'StorageBucketsGetRequest',
+        response_type_name=u'Bucket',
+        supports_download=False,
+    )
+
+    def GetIamPolicy(self, request, global_params=None):
+      """Returns an IAM policy for the specified bucket.
+
+      Args:
+        request: (StorageBucketsGetIamPolicyRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Policy) The response message.
+      """
+      config = self.GetMethodConfig('GetIamPolicy')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    GetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'storage.buckets.getIamPolicy',
+        ordered_params=[u'bucket'],
+        path_params=[u'bucket'],
+        query_params=[],
+        relative_path=u'b/{bucket}/iam',
+        request_field='',
+        request_type_name=u'StorageBucketsGetIamPolicyRequest',
+        response_type_name=u'Policy',
+        supports_download=False,
+    )
+
+    def Insert(self, request, global_params=None):
+      """Creates a new bucket.
+
+      Args:
+        request: (StorageBucketsInsertRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Bucket) The response message.
+      """
+      config = self.GetMethodConfig('Insert')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Insert.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'storage.buckets.insert',
+        ordered_params=[u'project'],
+        path_params=[],
+        query_params=[u'predefinedAcl', u'predefinedDefaultObjectAcl', u'project', u'projection'],
+        relative_path=u'b',
+        request_field=u'bucket',
+        request_type_name=u'StorageBucketsInsertRequest',
+        response_type_name=u'Bucket',
+        supports_download=False,
+    )
+
+    def List(self, request, global_params=None):
+      """Retrieves a list of buckets for a given project.
+
+      Args:
+        request: (StorageBucketsListRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Buckets) The response message.
+      """
+      config = self.GetMethodConfig('List')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'storage.buckets.list',
+        ordered_params=[u'project'],
+        path_params=[],
+        query_params=[u'maxResults', u'pageToken', u'prefix', u'project', u'projection'],
+        relative_path=u'b',
+        request_field='',
+        request_type_name=u'StorageBucketsListRequest',
+        response_type_name=u'Buckets',
+        supports_download=False,
+    )
+
+    def Patch(self, request, global_params=None):
+      """Updates a bucket. This method supports patch semantics.
+
+      Args:
+        request: (StorageBucketsPatchRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Bucket) The response message.
+      """
+      config = self.GetMethodConfig('Patch')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Patch.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PATCH',
+        method_id=u'storage.buckets.patch',
+        ordered_params=[u'bucket'],
+        path_params=[u'bucket'],
+        query_params=[u'ifMetagenerationMatch', u'ifMetagenerationNotMatch', u'predefinedAcl', u'predefinedDefaultObjectAcl', u'projection'],
+        relative_path=u'b/{bucket}',
+        request_field=u'bucketResource',
+        request_type_name=u'StorageBucketsPatchRequest',
+        response_type_name=u'Bucket',
+        supports_download=False,
+    )
+
+    def SetIamPolicy(self, request, global_params=None):
+      """Updates an IAM policy for the specified bucket.
+
+      Args:
+        request: (StorageBucketsSetIamPolicyRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Policy) The response message.
+      """
+      config = self.GetMethodConfig('SetIamPolicy')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    SetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PUT',
+        method_id=u'storage.buckets.setIamPolicy',
+        ordered_params=[u'bucket'],
+        path_params=[u'bucket'],
+        query_params=[],
+        relative_path=u'b/{bucket}/iam',
+        request_field=u'policy',
+        request_type_name=u'StorageBucketsSetIamPolicyRequest',
+        response_type_name=u'Policy',
+        supports_download=False,
+    )
+
+    def TestIamPermissions(self, request, global_params=None):
+      """Tests a set of permissions on the given bucket to see which, if any, are held by the caller.
+
+      Args:
+        request: (StorageBucketsTestIamPermissionsRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (TestIamPermissionsResponse) The response message.
+      """
+      config = self.GetMethodConfig('TestIamPermissions')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'storage.buckets.testIamPermissions',
+        ordered_params=[u'bucket', u'permissions'],
+        path_params=[u'bucket'],
+        query_params=[u'permissions'],
+        relative_path=u'b/{bucket}/iam/testPermissions',
+        request_field='',
+        request_type_name=u'StorageBucketsTestIamPermissionsRequest',
+        response_type_name=u'TestIamPermissionsResponse',
+        supports_download=False,
+    )
+
+    def Update(self, request, global_params=None):
+      """Updates a bucket.
+
+      Args:
+        request: (StorageBucketsUpdateRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Bucket) The response message.
+      """
+      config = self.GetMethodConfig('Update')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Update.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PUT',
+        method_id=u'storage.buckets.update',
+        ordered_params=[u'bucket'],
+        path_params=[u'bucket'],
+        query_params=[u'ifMetagenerationMatch', u'ifMetagenerationNotMatch', u'predefinedAcl', u'predefinedDefaultObjectAcl', u'projection'],
+        relative_path=u'b/{bucket}',
+        request_field=u'bucketResource',
+        request_type_name=u'StorageBucketsUpdateRequest',
+        response_type_name=u'Bucket',
+        supports_download=False,
+    )
+
+  class ChannelsService(base_api.BaseApiService):
+    """Service class for the channels resource."""
+
+    _NAME = u'channels'
+
+    def __init__(self, client):
+      super(StorageV1.ChannelsService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def Stop(self, request, global_params=None):
+      """Stop watching resources through this channel.
+
+      Args:
+        request: (Channel) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (StorageChannelsStopResponse) The response message.
+      """
+      config = self.GetMethodConfig('Stop')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Stop.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'storage.channels.stop',
+        ordered_params=[],
+        path_params=[],
+        query_params=[],
+        relative_path=u'channels/stop',
+        request_field='<request>',
+        request_type_name=u'Channel',
+        response_type_name=u'StorageChannelsStopResponse',
+        supports_download=False,
+    )
+
+  class DefaultObjectAccessControlsService(base_api.BaseApiService):
+    """Service class for the defaultObjectAccessControls resource."""
+
+    _NAME = u'defaultObjectAccessControls'
+
+    def __init__(self, client):
+      super(StorageV1.DefaultObjectAccessControlsService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def Delete(self, request, global_params=None):
+      """Permanently deletes the default object ACL entry for the specified entity on the specified bucket.
+
+      Args:
+        request: (StorageDefaultObjectAccessControlsDeleteRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (StorageDefaultObjectAccessControlsDeleteResponse) The response message.
+      """
+      config = self.GetMethodConfig('Delete')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Delete.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'DELETE',
+        method_id=u'storage.defaultObjectAccessControls.delete',
+        ordered_params=[u'bucket', u'entity'],
+        path_params=[u'bucket', u'entity'],
+        query_params=[],
+        relative_path=u'b/{bucket}/defaultObjectAcl/{entity}',
+        request_field='',
+        request_type_name=u'StorageDefaultObjectAccessControlsDeleteRequest',
+        response_type_name=u'StorageDefaultObjectAccessControlsDeleteResponse',
+        supports_download=False,
+    )
+
+    def Get(self, request, global_params=None):
+      """Returns the default object ACL entry for the specified entity on the specified bucket.
+
+      Args:
+        request: (StorageDefaultObjectAccessControlsGetRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ObjectAccessControl) The response message.
+      """
+      config = self.GetMethodConfig('Get')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'storage.defaultObjectAccessControls.get',
+        ordered_params=[u'bucket', u'entity'],
+        path_params=[u'bucket', u'entity'],
+        query_params=[],
+        relative_path=u'b/{bucket}/defaultObjectAcl/{entity}',
+        request_field='',
+        request_type_name=u'StorageDefaultObjectAccessControlsGetRequest',
+        response_type_name=u'ObjectAccessControl',
+        supports_download=False,
+    )
+
+    def Insert(self, request, global_params=None):
+      """Creates a new default object ACL entry on the specified bucket.
+
+      Args:
+        request: (ObjectAccessControl) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ObjectAccessControl) The response message.
+      """
+      config = self.GetMethodConfig('Insert')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Insert.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'storage.defaultObjectAccessControls.insert',
+        ordered_params=[u'bucket'],
+        path_params=[u'bucket'],
+        query_params=[],
+        relative_path=u'b/{bucket}/defaultObjectAcl',
+        request_field='<request>',
+        request_type_name=u'ObjectAccessControl',
+        response_type_name=u'ObjectAccessControl',
+        supports_download=False,
+    )
+
+    def List(self, request, global_params=None):
+      """Retrieves default object ACL entries on the specified bucket.
+
+      Args:
+        request: (StorageDefaultObjectAccessControlsListRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ObjectAccessControls) The response message.
+      """
+      config = self.GetMethodConfig('List')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'storage.defaultObjectAccessControls.list',
+        ordered_params=[u'bucket'],
+        path_params=[u'bucket'],
+        query_params=[u'ifMetagenerationMatch', u'ifMetagenerationNotMatch'],
+        relative_path=u'b/{bucket}/defaultObjectAcl',
+        request_field='',
+        request_type_name=u'StorageDefaultObjectAccessControlsListRequest',
+        response_type_name=u'ObjectAccessControls',
+        supports_download=False,
+    )
+
+    def Patch(self, request, global_params=None):
+      """Updates a default object ACL entry on the specified bucket. This method supports patch semantics.
+
+      Args:
+        request: (ObjectAccessControl) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ObjectAccessControl) The response message.
+      """
+      config = self.GetMethodConfig('Patch')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Patch.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PATCH',
+        method_id=u'storage.defaultObjectAccessControls.patch',
+        ordered_params=[u'bucket', u'entity'],
+        path_params=[u'bucket', u'entity'],
+        query_params=[],
+        relative_path=u'b/{bucket}/defaultObjectAcl/{entity}',
+        request_field='<request>',
+        request_type_name=u'ObjectAccessControl',
+        response_type_name=u'ObjectAccessControl',
+        supports_download=False,
+    )
+
+    def Update(self, request, global_params=None):
+      """Updates a default object ACL entry on the specified bucket.
+
+      Args:
+        request: (ObjectAccessControl) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ObjectAccessControl) The response message.
+      """
+      config = self.GetMethodConfig('Update')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Update.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PUT',
+        method_id=u'storage.defaultObjectAccessControls.update',
+        ordered_params=[u'bucket', u'entity'],
+        path_params=[u'bucket', u'entity'],
+        query_params=[],
+        relative_path=u'b/{bucket}/defaultObjectAcl/{entity}',
+        request_field='<request>',
+        request_type_name=u'ObjectAccessControl',
+        response_type_name=u'ObjectAccessControl',
+        supports_download=False,
+    )
+
+  class NotificationsService(base_api.BaseApiService):
+    """Service class for the notifications resource."""
+
+    _NAME = u'notifications'
+
+    def __init__(self, client):
+      super(StorageV1.NotificationsService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def Delete(self, request, global_params=None):
+      """Permanently deletes a notification subscription.
+
+      Args:
+        request: (StorageNotificationsDeleteRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (StorageNotificationsDeleteResponse) The response message.
+      """
+      config = self.GetMethodConfig('Delete')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Delete.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'DELETE',
+        method_id=u'storage.notifications.delete',
+        ordered_params=[u'notification'],
+        path_params=[u'notification'],
+        query_params=[],
+        relative_path=u'notifications/{notification}',
+        request_field='',
+        request_type_name=u'StorageNotificationsDeleteRequest',
+        response_type_name=u'StorageNotificationsDeleteResponse',
+        supports_download=False,
+    )
+
+    def Get(self, request, global_params=None):
+      """View a notification configuration.
+
+      Args:
+        request: (StorageNotificationsGetRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Notification) The response message.
+      """
+      config = self.GetMethodConfig('Get')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'storage.notifications.get',
+        ordered_params=[u'notification'],
+        path_params=[u'notification'],
+        query_params=[],
+        relative_path=u'notifications/{notification}',
+        request_field='',
+        request_type_name=u'StorageNotificationsGetRequest',
+        response_type_name=u'Notification',
+        supports_download=False,
+    )
+
+    def Insert(self, request, global_params=None):
+      """Creates a notification subscription for a given bucket.
+
+      Args:
+        request: (Notification) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Notification) The response message.
+      """
+      config = self.GetMethodConfig('Insert')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Insert.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'storage.notifications.insert',
+        ordered_params=[],
+        path_params=[],
+        query_params=[],
+        relative_path=u'notifications',
+        request_field='<request>',
+        request_type_name=u'Notification',
+        response_type_name=u'Notification',
+        supports_download=False,
+    )
+
+    def List(self, request, global_params=None):
+      """Retrieves a list of notification subscriptions for a given bucket.
+
+      Args:
+        request: (StorageNotificationsListRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Notifications) The response message.
+      """
+      config = self.GetMethodConfig('List')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'storage.notifications.list',
+        ordered_params=[u'bucket'],
+        path_params=[],
+        query_params=[u'bucket'],
+        relative_path=u'notifications',
+        request_field='',
+        request_type_name=u'StorageNotificationsListRequest',
+        response_type_name=u'Notifications',
+        supports_download=False,
+    )
+
+  class ObjectAccessControlsService(base_api.BaseApiService):
+    """Service class for the objectAccessControls resource."""
+
+    _NAME = u'objectAccessControls'
+
+    def __init__(self, client):
+      super(StorageV1.ObjectAccessControlsService, self).__init__(client)
+      self._upload_configs = {
+          }
+
+    def Delete(self, request, global_params=None):
+      """Permanently deletes the ACL entry for the specified entity on the specified object.
+
+      Args:
+        request: (StorageObjectAccessControlsDeleteRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (StorageObjectAccessControlsDeleteResponse) The response message.
+      """
+      config = self.GetMethodConfig('Delete')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Delete.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'DELETE',
+        method_id=u'storage.objectAccessControls.delete',
+        ordered_params=[u'bucket', u'object', u'entity'],
+        path_params=[u'bucket', u'entity', u'object'],
+        query_params=[u'generation'],
+        relative_path=u'b/{bucket}/o/{object}/acl/{entity}',
+        request_field='',
+        request_type_name=u'StorageObjectAccessControlsDeleteRequest',
+        response_type_name=u'StorageObjectAccessControlsDeleteResponse',
+        supports_download=False,
+    )
+
+    def Get(self, request, global_params=None):
+      """Returns the ACL entry for the specified entity on the specified object.
+
+      Args:
+        request: (StorageObjectAccessControlsGetRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ObjectAccessControl) The response message.
+      """
+      config = self.GetMethodConfig('Get')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'storage.objectAccessControls.get',
+        ordered_params=[u'bucket', u'object', u'entity'],
+        path_params=[u'bucket', u'entity', u'object'],
+        query_params=[u'generation'],
+        relative_path=u'b/{bucket}/o/{object}/acl/{entity}',
+        request_field='',
+        request_type_name=u'StorageObjectAccessControlsGetRequest',
+        response_type_name=u'ObjectAccessControl',
+        supports_download=False,
+    )
+
+    def Insert(self, request, global_params=None):
+      """Creates a new ACL entry on the specified object.
+
+      Args:
+        request: (StorageObjectAccessControlsInsertRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ObjectAccessControl) The response message.
+      """
+      config = self.GetMethodConfig('Insert')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Insert.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'storage.objectAccessControls.insert',
+        ordered_params=[u'bucket', u'object'],
+        path_params=[u'bucket', u'object'],
+        query_params=[u'generation'],
+        relative_path=u'b/{bucket}/o/{object}/acl',
+        request_field=u'objectAccessControl',
+        request_type_name=u'StorageObjectAccessControlsInsertRequest',
+        response_type_name=u'ObjectAccessControl',
+        supports_download=False,
+    )
+
+    def List(self, request, global_params=None):
+      """Retrieves ACL entries on the specified object.
+
+      Args:
+        request: (StorageObjectAccessControlsListRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ObjectAccessControls) The response message.
+      """
+      config = self.GetMethodConfig('List')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'storage.objectAccessControls.list',
+        ordered_params=[u'bucket', u'object'],
+        path_params=[u'bucket', u'object'],
+        query_params=[u'generation'],
+        relative_path=u'b/{bucket}/o/{object}/acl',
+        request_field='',
+        request_type_name=u'StorageObjectAccessControlsListRequest',
+        response_type_name=u'ObjectAccessControls',
+        supports_download=False,
+    )
+
+    def Patch(self, request, global_params=None):
+      """Updates an ACL entry on the specified object. This method supports patch semantics.
+
+      Args:
+        request: (StorageObjectAccessControlsPatchRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ObjectAccessControl) The response message.
+      """
+      config = self.GetMethodConfig('Patch')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Patch.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PATCH',
+        method_id=u'storage.objectAccessControls.patch',
+        ordered_params=[u'bucket', u'object', u'entity'],
+        path_params=[u'bucket', u'entity', u'object'],
+        query_params=[u'generation'],
+        relative_path=u'b/{bucket}/o/{object}/acl/{entity}',
+        request_field=u'objectAccessControl',
+        request_type_name=u'StorageObjectAccessControlsPatchRequest',
+        response_type_name=u'ObjectAccessControl',
+        supports_download=False,
+    )
+
+    def Update(self, request, global_params=None):
+      """Updates an ACL entry on the specified object.
+
+      Args:
+        request: (StorageObjectAccessControlsUpdateRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (ObjectAccessControl) The response message.
+      """
+      config = self.GetMethodConfig('Update')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Update.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PUT',
+        method_id=u'storage.objectAccessControls.update',
+        ordered_params=[u'bucket', u'object', u'entity'],
+        path_params=[u'bucket', u'entity', u'object'],
+        query_params=[u'generation'],
+        relative_path=u'b/{bucket}/o/{object}/acl/{entity}',
+        request_field=u'objectAccessControl',
+        request_type_name=u'StorageObjectAccessControlsUpdateRequest',
+        response_type_name=u'ObjectAccessControl',
+        supports_download=False,
+    )
+
+  class ObjectsService(base_api.BaseApiService):
+    """Service class for the objects resource."""
+
+    _NAME = u'objects'
+
+    def __init__(self, client):
+      super(StorageV1.ObjectsService, self).__init__(client)
+      self._upload_configs = {
+          'Insert': base_api.ApiUploadInfo(
+              accept=['*/*'],
+              max_size=None,
+              resumable_multipart=True,
+              resumable_path=u'/resumable/upload/storage/v1/b/{bucket}/o',
+              simple_multipart=True,
+              simple_path=u'/upload/storage/v1/b/{bucket}/o',
+          ),
+          }
+
+    def Compose(self, request, global_params=None, download=None):
+      """Concatenates a list of existing objects into a new object in the same bucket.
+
+      Args:
+        request: (StorageObjectsComposeRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+        download: (Download, default: None) If present, download
+            data from the request via this stream.
+      Returns:
+        (Object) The response message.
+      """
+      config = self.GetMethodConfig('Compose')
+      return self._RunMethod(
+          config, request, global_params=global_params,
+          download=download)
+
+    Compose.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'storage.objects.compose',
+        ordered_params=[u'destinationBucket', u'destinationObject'],
+        path_params=[u'destinationBucket', u'destinationObject'],
+        query_params=[u'destinationPredefinedAcl', u'ifGenerationMatch', u'ifMetagenerationMatch'],
+        relative_path=u'b/{destinationBucket}/o/{destinationObject}/compose',
+        request_field=u'composeRequest',
+        request_type_name=u'StorageObjectsComposeRequest',
+        response_type_name=u'Object',
+        supports_download=True,
+    )
+
+    def Copy(self, request, global_params=None, download=None):
+      """Copies a source object to a destination object. Optionally overrides metadata.
+
+      Args:
+        request: (StorageObjectsCopyRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+        download: (Download, default: None) If present, download
+            data from the request via this stream.
+      Returns:
+        (Object) The response message.
+      """
+      config = self.GetMethodConfig('Copy')
+      return self._RunMethod(
+          config, request, global_params=global_params,
+          download=download)
+
+    Copy.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'storage.objects.copy',
+        ordered_params=[u'sourceBucket', u'sourceObject', u'destinationBucket', u'destinationObject'],
+        path_params=[u'destinationBucket', u'destinationObject', u'sourceBucket', u'sourceObject'],
+        query_params=[u'destinationPredefinedAcl', u'ifGenerationMatch', u'ifGenerationNotMatch', u'ifMetagenerationMatch', u'ifMetagenerationNotMatch', u'ifSourceGenerationMatch', u'ifSourceGenerationNotMatch', u'ifSourceMetagenerationMatch', u'ifSourceMetagenerationNotMatch', u'projection', u'sourceGeneration'],
+        relative_path=u'b/{sourceBucket}/o/{sourceObject}/copyTo/b/{destinationBucket}/o/{destinationObject}',
+        request_field=u'object',
+        request_type_name=u'StorageObjectsCopyRequest',
+        response_type_name=u'Object',
+        supports_download=True,
+    )
+
+    def Delete(self, request, global_params=None):
+      """Deletes an object and its metadata. Deletions are permanent if versioning is not enabled for the bucket, or if the generation parameter is used.
+
+      Args:
+        request: (StorageObjectsDeleteRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (StorageObjectsDeleteResponse) The response message.
+      """
+      config = self.GetMethodConfig('Delete')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Delete.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'DELETE',
+        method_id=u'storage.objects.delete',
+        ordered_params=[u'bucket', u'object'],
+        path_params=[u'bucket', u'object'],
+        query_params=[u'generation', u'ifGenerationMatch', u'ifGenerationNotMatch', u'ifMetagenerationMatch', u'ifMetagenerationNotMatch'],
+        relative_path=u'b/{bucket}/o/{object}',
+        request_field='',
+        request_type_name=u'StorageObjectsDeleteRequest',
+        response_type_name=u'StorageObjectsDeleteResponse',
+        supports_download=False,
+    )
+
+    def Get(self, request, global_params=None, download=None):
+      """Retrieves an object or its metadata.
+
+      Args:
+        request: (StorageObjectsGetRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+        download: (Download, default: None) If present, download
+            data from the request via this stream.
+      Returns:
+        (Object) The response message.
+      """
+      config = self.GetMethodConfig('Get')
+      return self._RunMethod(
+          config, request, global_params=global_params,
+          download=download)
+
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'storage.objects.get',
+        ordered_params=[u'bucket', u'object'],
+        path_params=[u'bucket', u'object'],
+        query_params=[u'generation', u'ifGenerationMatch', u'ifGenerationNotMatch', u'ifMetagenerationMatch', u'ifMetagenerationNotMatch', u'projection'],
+        relative_path=u'b/{bucket}/o/{object}',
+        request_field='',
+        request_type_name=u'StorageObjectsGetRequest',
+        response_type_name=u'Object',
+        supports_download=True,
+    )
+
+    def GetIamPolicy(self, request, global_params=None):
+      """Returns an IAM policy for the specified object.
+
+      Args:
+        request: (StorageObjectsGetIamPolicyRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Policy) The response message.
+      """
+      config = self.GetMethodConfig('GetIamPolicy')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    GetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'storage.objects.getIamPolicy',
+        ordered_params=[u'bucket', u'object'],
+        path_params=[u'bucket', u'object'],
+        query_params=[u'generation'],
+        relative_path=u'b/{bucket}/o/{object}/iam',
+        request_field='',
+        request_type_name=u'StorageObjectsGetIamPolicyRequest',
+        response_type_name=u'Policy',
+        supports_download=False,
+    )
+
+    def Insert(self, request, global_params=None, upload=None, download=None):
+      """Stores a new object and metadata.
+
+      Args:
+        request: (StorageObjectsInsertRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+        upload: (Upload, default: None) If present, upload
+            this stream with the request.
+        download: (Download, default: None) If present, download
+            data from the request via this stream.
+      Returns:
+        (Object) The response message.
+      """
+      config = self.GetMethodConfig('Insert')
+      upload_config = self.GetUploadConfig('Insert')
+      return self._RunMethod(
+          config, request, global_params=global_params,
+          upload=upload, upload_config=upload_config,
+          download=download)
+
+    Insert.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'storage.objects.insert',
+        ordered_params=[u'bucket'],
+        path_params=[u'bucket'],
+        query_params=[u'contentEncoding', u'ifGenerationMatch', u'ifGenerationNotMatch', u'ifMetagenerationMatch', u'ifMetagenerationNotMatch', u'name', u'predefinedAcl', u'projection'],
+        relative_path=u'b/{bucket}/o',
+        request_field=u'object',
+        request_type_name=u'StorageObjectsInsertRequest',
+        response_type_name=u'Object',
+        supports_download=True,
+    )
+
+    def List(self, request, global_params=None):
+      """Retrieves a list of objects matching the criteria.
+
+      Args:
+        request: (StorageObjectsListRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Objects) The response message.
+      """
+      config = self.GetMethodConfig('List')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'storage.objects.list',
+        ordered_params=[u'bucket'],
+        path_params=[u'bucket'],
+        query_params=[u'delimiter', u'maxResults', u'pageToken', u'prefix', u'projection', u'versions'],
+        relative_path=u'b/{bucket}/o',
+        request_field='',
+        request_type_name=u'StorageObjectsListRequest',
+        response_type_name=u'Objects',
+        supports_download=False,
+    )
+
+    def Patch(self, request, global_params=None):
+      """Updates an object's metadata. This method supports patch semantics.
+
+      Args:
+        request: (StorageObjectsPatchRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Object) The response message.
+      """
+      config = self.GetMethodConfig('Patch')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Patch.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PATCH',
+        method_id=u'storage.objects.patch',
+        ordered_params=[u'bucket', u'object'],
+        path_params=[u'bucket', u'object'],
+        query_params=[u'generation', u'ifGenerationMatch', u'ifGenerationNotMatch', u'ifMetagenerationMatch', u'ifMetagenerationNotMatch', u'predefinedAcl', u'projection'],
+        relative_path=u'b/{bucket}/o/{object}',
+        request_field=u'objectResource',
+        request_type_name=u'StorageObjectsPatchRequest',
+        response_type_name=u'Object',
+        supports_download=False,
+    )
+
+    def Rewrite(self, request, global_params=None):
+      """Rewrites a source object to a destination object. Optionally overrides metadata.
+
+      Args:
+        request: (StorageObjectsRewriteRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (RewriteResponse) The response message.
+      """
+      config = self.GetMethodConfig('Rewrite')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    Rewrite.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'storage.objects.rewrite',
+        ordered_params=[u'sourceBucket', u'sourceObject', u'destinationBucket', u'destinationObject'],
+        path_params=[u'destinationBucket', u'destinationObject', u'sourceBucket', u'sourceObject'],
+        query_params=[u'destinationPredefinedAcl', u'ifGenerationMatch', u'ifGenerationNotMatch', u'ifMetagenerationMatch', u'ifMetagenerationNotMatch', u'ifSourceGenerationMatch', u'ifSourceGenerationNotMatch', u'ifSourceMetagenerationMatch', u'ifSourceMetagenerationNotMatch', u'maxBytesRewrittenPerCall', u'projection', u'rewriteToken', u'sourceGeneration'],
+        relative_path=u'b/{sourceBucket}/o/{sourceObject}/rewriteTo/b/{destinationBucket}/o/{destinationObject}',
+        request_field=u'object',
+        request_type_name=u'StorageObjectsRewriteRequest',
+        response_type_name=u'RewriteResponse',
+        supports_download=False,
+    )
+
+    def SetIamPolicy(self, request, global_params=None):
+      """Updates an IAM policy for the specified object.
+
+      Args:
+        request: (StorageObjectsSetIamPolicyRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Policy) The response message.
+      """
+      config = self.GetMethodConfig('SetIamPolicy')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    SetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PUT',
+        method_id=u'storage.objects.setIamPolicy',
+        ordered_params=[u'bucket', u'object'],
+        path_params=[u'bucket', u'object'],
+        query_params=[u'generation'],
+        relative_path=u'b/{bucket}/o/{object}/iam',
+        request_field=u'policy',
+        request_type_name=u'StorageObjectsSetIamPolicyRequest',
+        response_type_name=u'Policy',
+        supports_download=False,
+    )
+
+    def TestIamPermissions(self, request, global_params=None):
+      """Tests a set of permissions on the given object to see which, if any, are held by the caller.
+
+      Args:
+        request: (StorageObjectsTestIamPermissionsRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (TestIamPermissionsResponse) The response message.
+      """
+      config = self.GetMethodConfig('TestIamPermissions')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'storage.objects.testIamPermissions',
+        ordered_params=[u'bucket', u'object', u'permissions'],
+        path_params=[u'bucket', u'object'],
+        query_params=[u'generation', u'permissions'],
+        relative_path=u'b/{bucket}/o/{object}/iam/testPermissions',
+        request_field='',
+        request_type_name=u'StorageObjectsTestIamPermissionsRequest',
+        response_type_name=u'TestIamPermissionsResponse',
+        supports_download=False,
+    )
+
+    def Update(self, request, global_params=None, download=None):
+      """Updates an object's metadata.
+
+      Args:
+        request: (StorageObjectsUpdateRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+        download: (Download, default: None) If present, download
+            data from the request via this stream.
+      Returns:
+        (Object) The response message.
+      """
+      config = self.GetMethodConfig('Update')
+      return self._RunMethod(
+          config, request, global_params=global_params,
+          download=download)
+
+    Update.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PUT',
+        method_id=u'storage.objects.update',
+        ordered_params=[u'bucket', u'object'],
+        path_params=[u'bucket', u'object'],
+        query_params=[u'generation', u'ifGenerationMatch', u'ifGenerationNotMatch', u'ifMetagenerationMatch', u'ifMetagenerationNotMatch', u'predefinedAcl', u'projection'],
+        relative_path=u'b/{bucket}/o/{object}',
+        request_field=u'objectResource',
+        request_type_name=u'StorageObjectsUpdateRequest',
+        response_type_name=u'Object',
+        supports_download=True,
+    )
+
+    def WatchAll(self, request, global_params=None):
+      """Watch for changes on all objects in a bucket.
+
+      Args:
+        request: (StorageObjectsWatchAllRequest) input message
+        global_params: (StandardQueryParameters, default: None) global arguments
+      Returns:
+        (Channel) The response message.
+      """
+      config = self.GetMethodConfig('WatchAll')
+      return self._RunMethod(
+          config, request, global_params=global_params)
+
+    WatchAll.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'storage.objects.watchAll',
+        ordered_params=[u'bucket'],
+        path_params=[u'bucket'],
+        query_params=[u'delimiter', u'maxResults', u'pageToken', u'prefix', u'projection', u'versions'],
+        relative_path=u'b/{bucket}/o/watch',
+        request_field=u'channel',
+        request_type_name=u'StorageObjectsWatchAllRequest',
+        response_type_name=u'Channel',
+        supports_download=False,
+    )
diff --git a/samples/storage_sample/storage_v1/storage_v1_messages.py b/samples/storage_sample/storage_v1/storage_v1_messages.py
new file mode 100644
index 0000000..f392334
--- /dev/null
+++ b/samples/storage_sample/storage_v1/storage_v1_messages.py
@@ -0,0 +1,2218 @@
+"""Generated message classes for storage version v1.
+
+Stores and retrieves potentially large, immutable data objects.
+"""
+# NOTE: This file is autogenerated and should not be edited by hand.
+
+from apitools.base.protorpclite import message_types as _message_types
+from apitools.base.protorpclite import messages as _messages
+from apitools.base.py import encoding
+from apitools.base.py import extra_types
+
+
+package = 'storage'
+
+
+class Bucket(_messages.Message):
+  """A bucket.
+
+  Messages:
+    CorsValueListEntry: A CorsValueListEntry object.
+    LifecycleValue: The bucket's lifecycle configuration. See lifecycle
+      management for more information.
+    LoggingValue: The bucket's logging configuration, which defines the
+      destination bucket and optional name prefix for the current bucket's
+      logs.
+    OwnerValue: The owner of the bucket. This is always the project team's
+      owner group.
+    VersioningValue: The bucket's versioning configuration.
+    WebsiteValue: The bucket's website configuration, controlling how the
+      service behaves when accessing bucket contents as a web site. See the
+      Static Website Examples for more information.
+
+  Fields:
+    acl: Access controls on the bucket.
+    cors: The bucket's Cross-Origin Resource Sharing (CORS) configuration.
+    defaultObjectAcl: Default access controls to apply to new objects when no
+      ACL is provided.
+    etag: HTTP 1.1 Entity tag for the bucket.
+    id: The ID of the bucket.
+    kind: The kind of item this is. For buckets, this is always
+      storage#bucket.
+    lifecycle: The bucket's lifecycle configuration. See lifecycle management
+      for more information.
+    location: The location of the bucket. Object data for objects in the
+      bucket resides in physical storage within this region. Defaults to US.
+      See the developer's guide for the authoritative list.
+    logging: The bucket's logging configuration, which defines the destination
+      bucket and optional name prefix for the current bucket's logs.
+    metageneration: The metadata generation of this bucket.
+    name: The name of the bucket.
+    owner: The owner of the bucket. This is always the project team's owner
+      group.
+    projectNumber: The project number of the project the bucket belongs to.
+    selfLink: The URI of this bucket.
+    storageClass: The bucket's storage class. This defines how objects in the
+      bucket are stored and determines the SLA and the cost of storage. Values
+      include STANDARD, NEARLINE and DURABLE_REDUCED_AVAILABILITY. Defaults to
+      STANDARD. For more information, see storage classes.
+    timeCreated: The creation time of the bucket in RFC 3339 format.
+    updated: The modification time of the bucket in RFC 3339 format.
+    versioning: The bucket's versioning configuration.
+    website: The bucket's website configuration, controlling how the service
+      behaves when accessing bucket contents as a web site. See the Static
+      Website Examples for more information.
+  """
+
+  class CorsValueListEntry(_messages.Message):
+    """A CorsValueListEntry object.
+
+    Fields:
+      maxAgeSeconds: The value, in seconds, to return in the  Access-Control-
+        Max-Age header used in preflight responses.
+      method: The list of HTTP methods on which to include CORS response
+        headers, (GET, OPTIONS, POST, etc) Note: "*" is permitted in the list
+        of methods, and means "any method".
+      origin: The list of Origins eligible to receive CORS response headers.
+        Note: "*" is permitted in the list of origins, and means "any Origin".
+      responseHeader: The list of HTTP headers other than the simple response
+        headers to give permission for the user-agent to share across domains.
+    """
+
+    maxAgeSeconds = _messages.IntegerField(1, variant=_messages.Variant.INT32)
+    method = _messages.StringField(2, repeated=True)
+    origin = _messages.StringField(3, repeated=True)
+    responseHeader = _messages.StringField(4, repeated=True)
+
+  class LifecycleValue(_messages.Message):
+    """The bucket's lifecycle configuration. See lifecycle management for more
+    information.
+
+    Messages:
+      RuleValueListEntry: A RuleValueListEntry object.
+
+    Fields:
+      rule: A lifecycle management rule, which is made of an action to take
+        and the condition(s) under which the action will be taken.
+    """
+
+    class RuleValueListEntry(_messages.Message):
+      """A RuleValueListEntry object.
+
+      Messages:
+        ActionValue: The action to take.
+        ConditionValue: The condition(s) under which the action will be taken.
+
+      Fields:
+        action: The action to take.
+        condition: The condition(s) under which the action will be taken.
+      """
+
+      class ActionValue(_messages.Message):
+        """The action to take.
+
+        Fields:
+          type: Type of the action. Currently, only Delete is supported.
+        """
+
+        type = _messages.StringField(1)
+
+      class ConditionValue(_messages.Message):
+        """The condition(s) under which the action will be taken.
+
+        Fields:
+          age: Age of an object (in days). This condition is satisfied when an
+            object reaches the specified age.
+          createdBefore: A date in RFC 3339 format with only the date part
+            (for instance, "2013-01-15"). This condition is satisfied when an
+            object is created before midnight of the specified date in UTC.
+          isLive: Relevant only for versioned objects. If the value is true,
+            this condition matches live objects; if the value is false, it
+            matches archived objects.
+          numNewerVersions: Relevant only for versioned objects. If the value
+            is N, this condition is satisfied when there are at least N
+            versions (including the live version) newer than this version of
+            the object.
+        """
+
+        age = _messages.IntegerField(1, variant=_messages.Variant.INT32)
+        createdBefore = extra_types.DateField(2)
+        isLive = _messages.BooleanField(3)
+        numNewerVersions = _messages.IntegerField(4, variant=_messages.Variant.INT32)
+
+      action = _messages.MessageField('ActionValue', 1)
+      condition = _messages.MessageField('ConditionValue', 2)
+
+    rule = _messages.MessageField('RuleValueListEntry', 1, repeated=True)
+
+  class LoggingValue(_messages.Message):
+    """The bucket's logging configuration, which defines the destination
+    bucket and optional name prefix for the current bucket's logs.
+
+    Fields:
+      logBucket: The destination bucket where the current bucket's logs should
+        be placed.
+      logObjectPrefix: A prefix for log object names.
+    """
+
+    logBucket = _messages.StringField(1)
+    logObjectPrefix = _messages.StringField(2)
+
+  class OwnerValue(_messages.Message):
+    """The owner of the bucket. This is always the project team's owner group.
+
+    Fields:
+      entity: The entity, in the form project-owner-projectId.
+      entityId: The ID for the entity.
+    """
+
+    entity = _messages.StringField(1)
+    entityId = _messages.StringField(2)
+
+  class VersioningValue(_messages.Message):
+    """The bucket's versioning configuration.
+
+    Fields:
+      enabled: While set to true, versioning is fully enabled for this bucket.
+    """
+
+    enabled = _messages.BooleanField(1)
+
+  class WebsiteValue(_messages.Message):
+    """The bucket's website configuration, controlling how the service behaves
+    when accessing bucket contents as a web site. See the Static Website
+    Examples for more information.
+
+    Fields:
+      mainPageSuffix: If the requested object path is missing, the service
+        will ensure the path has a trailing '/', append this suffix, and
+        attempt to retrieve the resulting object. This allows the creation of
+        index.html objects to represent directory pages.
+      notFoundPage: If the requested object path is missing, and any
+        mainPageSuffix object is missing, if applicable, the service will
+        return the named object from this bucket as the content for a 404 Not
+        Found result.
+    """
+
+    mainPageSuffix = _messages.StringField(1)
+    notFoundPage = _messages.StringField(2)
+
+  acl = _messages.MessageField('BucketAccessControl', 1, repeated=True)
+  cors = _messages.MessageField('CorsValueListEntry', 2, repeated=True)
+  defaultObjectAcl = _messages.MessageField('ObjectAccessControl', 3, repeated=True)
+  etag = _messages.StringField(4)
+  id = _messages.StringField(5)
+  kind = _messages.StringField(6, default=u'storage#bucket')
+  lifecycle = _messages.MessageField('LifecycleValue', 7)
+  location = _messages.StringField(8)
+  logging = _messages.MessageField('LoggingValue', 9)
+  metageneration = _messages.IntegerField(10)
+  name = _messages.StringField(11)
+  owner = _messages.MessageField('OwnerValue', 12)
+  projectNumber = _messages.IntegerField(13, variant=_messages.Variant.UINT64)
+  selfLink = _messages.StringField(14)
+  storageClass = _messages.StringField(15)
+  timeCreated = _message_types.DateTimeField(16)
+  updated = _message_types.DateTimeField(17)
+  versioning = _messages.MessageField('VersioningValue', 18)
+  website = _messages.MessageField('WebsiteValue', 19)
+
+
+class BucketAccessControl(_messages.Message):
+  """An access-control entry.
+
+  Messages:
+    ProjectTeamValue: The project team associated with the entity, if any.
+
+  Fields:
+    bucket: The name of the bucket.
+    domain: The domain associated with the entity, if any.
+    email: The email address associated with the entity, if any.
+    entity: The entity holding the permission, in one of the following forms:
+      - user-userId  - user-email  - group-groupId  - group-email  - domain-
+      domain  - project-team-projectId  - allUsers  - allAuthenticatedUsers
+      Examples:  - The user liz@example.com would be user-liz@example.com.  -
+      The group example@googlegroups.com would be group-
+      example@googlegroups.com.  - To refer to all members of the Google Apps
+      for Business domain example.com, the entity would be domain-example.com.
+    entityId: The ID for the entity, if any.
+    etag: HTTP 1.1 Entity tag for the access-control entry.
+    id: The ID of the access-control entry.
+    kind: The kind of item this is. For bucket access control entries, this is
+      always storage#bucketAccessControl.
+    projectTeam: The project team associated with the entity, if any.
+    role: The access permission for the entity. Can be READER, WRITER, or
+      OWNER.
+    selfLink: The link to this access-control entry.
+  """
+
+  class ProjectTeamValue(_messages.Message):
+    """The project team associated with the entity, if any.
+
+    Fields:
+      projectNumber: The project number.
+      team: The team. Can be owners, editors, or viewers.
+    """
+
+    projectNumber = _messages.StringField(1)
+    team = _messages.StringField(2)
+
+  bucket = _messages.StringField(1)
+  domain = _messages.StringField(2)
+  email = _messages.StringField(3)
+  entity = _messages.StringField(4)
+  entityId = _messages.StringField(5)
+  etag = _messages.StringField(6)
+  id = _messages.StringField(7)
+  kind = _messages.StringField(8, default=u'storage#bucketAccessControl')
+  projectTeam = _messages.MessageField('ProjectTeamValue', 9)
+  role = _messages.StringField(10)
+  selfLink = _messages.StringField(11)
+
+
+class BucketAccessControls(_messages.Message):
+  """An access-control list.
+
+  Fields:
+    items: The list of items.
+    kind: The kind of item this is. For lists of bucket access control
+      entries, this is always storage#bucketAccessControls.
+  """
+
+  items = _messages.MessageField('BucketAccessControl', 1, repeated=True)
+  kind = _messages.StringField(2, default=u'storage#bucketAccessControls')
+
+
+class Buckets(_messages.Message):
+  """A list of buckets.
+
+  Fields:
+    items: The list of items.
+    kind: The kind of item this is. For lists of buckets, this is always
+      storage#buckets.
+    nextPageToken: The continuation token, used to page through large result
+      sets. Provide this value in a subsequent request to return the next page
+      of results.
+  """
+
+  items = _messages.MessageField('Bucket', 1, repeated=True)
+  kind = _messages.StringField(2, default=u'storage#buckets')
+  nextPageToken = _messages.StringField(3)
+
+
+class Channel(_messages.Message):
+  """An notification channel used to watch for resource changes.
+
+  Messages:
+    ParamsValue: Additional parameters controlling delivery channel behavior.
+      Optional.
+
+  Fields:
+    address: The address where notifications are delivered for this channel.
+    expiration: Date and time of notification channel expiration, expressed as
+      a Unix timestamp, in milliseconds. Optional.
+    id: A UUID or similar unique string that identifies this channel.
+    kind: Identifies this as a notification channel used to watch for changes
+      to a resource. Value: the fixed string "api#channel".
+    params: Additional parameters controlling delivery channel behavior.
+      Optional.
+    payload: A Boolean value to indicate whether payload is wanted. Optional.
+    resourceId: An opaque ID that identifies the resource being watched on
+      this channel. Stable across different API versions.
+    resourceUri: A version-specific identifier for the watched resource.
+    token: An arbitrary string delivered to the target address with each
+      notification delivered over this channel. Optional.
+    type: The type of delivery mechanism used for this channel.
+  """
+
+  @encoding.MapUnrecognizedFields('additionalProperties')
+  class ParamsValue(_messages.Message):
+    """Additional parameters controlling delivery channel behavior. Optional.
+
+    Messages:
+      AdditionalProperty: An additional property for a ParamsValue object.
+
+    Fields:
+      additionalProperties: Declares a new parameter by name.
+    """
+
+    class AdditionalProperty(_messages.Message):
+      """An additional property for a ParamsValue object.
+
+      Fields:
+        key: Name of the additional property.
+        value: A string attribute.
+      """
+
+      key = _messages.StringField(1)
+      value = _messages.StringField(2)
+
+    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
+
+  address = _messages.StringField(1)
+  expiration = _messages.IntegerField(2)
+  id = _messages.StringField(3)
+  kind = _messages.StringField(4, default=u'api#channel')
+  params = _messages.MessageField('ParamsValue', 5)
+  payload = _messages.BooleanField(6)
+  resourceId = _messages.StringField(7)
+  resourceUri = _messages.StringField(8)
+  token = _messages.StringField(9)
+  type = _messages.StringField(10)
+
+
+class ComposeRequest(_messages.Message):
+  """A Compose request.
+
+  Messages:
+    SourceObjectsValueListEntry: A SourceObjectsValueListEntry object.
+
+  Fields:
+    destination: Properties of the resulting object.
+    kind: The kind of item this is.
+    sourceObjects: The list of source objects that will be concatenated into a
+      single object.
+  """
+
+  class SourceObjectsValueListEntry(_messages.Message):
+    """A SourceObjectsValueListEntry object.
+
+    Messages:
+      ObjectPreconditionsValue: Conditions that must be met for this operation
+        to execute.
+
+    Fields:
+      generation: The generation of this object to use as the source.
+      name: The source object's name. The source object's bucket is implicitly
+        the destination bucket.
+      objectPreconditions: Conditions that must be met for this operation to
+        execute.
+    """
+
+    class ObjectPreconditionsValue(_messages.Message):
+      """Conditions that must be met for this operation to execute.
+
+      Fields:
+        ifGenerationMatch: Only perform the composition if the generation of
+          the source object that would be used matches this value. If this
+          value and a generation are both specified, they must be the same
+          value or the call will fail.
+      """
+
+      ifGenerationMatch = _messages.IntegerField(1)
+
+    generation = _messages.IntegerField(1)
+    name = _messages.StringField(2)
+    objectPreconditions = _messages.MessageField('ObjectPreconditionsValue', 3)
+
+  destination = _messages.MessageField('Object', 1)
+  kind = _messages.StringField(2, default=u'storage#composeRequest')
+  sourceObjects = _messages.MessageField('SourceObjectsValueListEntry', 3, repeated=True)
+
+
+class Notification(_messages.Message):
+  """A subscription to receive Google PubSub notifications.
+
+  Messages:
+    CustomAttributesValue: An optional list of additional attributes to attach
+      to each Cloud PubSub message published for this notification
+      subscription.
+
+  Fields:
+    bucket: The name of the bucket this subscription is particular to.
+    custom_attributes: An optional list of additional attributes to attach to
+      each Cloud PubSub message published for this notification subscription.
+    etag: HTTP 1.1 Entity tag for this subscription notification.
+    event_types: If present, only send notifications about listed event types.
+      If empty, sent notifications for all event types.
+    id: The ID of the notification.
+    kind: The kind of item this is. For notifications, this is always
+      storage#notification.
+    object_metadata_format: If payload_content is OBJECT_METADATA, controls
+      the format of that metadata. Otherwise, must not be set.
+    object_name_prefix: If present, only apply this notification configuration
+      to object names that begin with this prefix.
+    payload_content: The desired content of the Payload. Defaults to
+      OBJECT_METADATA.
+    selfLink: The canonical URL of this notification.
+    topic: The Cloud PubSub topic to which this subscription publishes.
+      Formatted as: '//pubsub.googleapis.com/projects/{project-
+      identifier}/topics/{my-topic}'
+  """
+
+  @encoding.MapUnrecognizedFields('additionalProperties')
+  class CustomAttributesValue(_messages.Message):
+    """An optional list of additional attributes to attach to each Cloud
+    PubSub message published for this notification subscription.
+
+    Messages:
+      AdditionalProperty: An additional property for a CustomAttributesValue
+        object.
+
+    Fields:
+      additionalProperties: Additional properties of type
+        CustomAttributesValue
+    """
+
+    class AdditionalProperty(_messages.Message):
+      """An additional property for a CustomAttributesValue object.
+
+      Fields:
+        key: Name of the additional property.
+        value: A string attribute.
+      """
+
+      key = _messages.StringField(1)
+      value = _messages.StringField(2)
+
+    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
+
+  bucket = _messages.StringField(1)
+  custom_attributes = _messages.MessageField('CustomAttributesValue', 2)
+  etag = _messages.StringField(3)
+  event_types = _messages.StringField(4, repeated=True)
+  id = _messages.StringField(5)
+  kind = _messages.StringField(6, default=u'storage#notification')
+  object_metadata_format = _messages.StringField(7, default=u'JSON_API_V1')
+  object_name_prefix = _messages.StringField(8)
+  payload_content = _messages.StringField(9, default=u'OBJECT_METADATA')
+  selfLink = _messages.StringField(10)
+  topic = _messages.StringField(11)
+
+
+class Notifications(_messages.Message):
+  """A list of notification subscriptions.
+
+  Fields:
+    items: The list of items.
+    kind: The kind of item this is. For lists of notifications, this is always
+      storage#notifications.
+  """
+
+  items = _messages.MessageField('Notification', 1, repeated=True)
+  kind = _messages.StringField(2, default=u'storage#notifications')
+
+
+class Object(_messages.Message):
+  """An object.
+
+  Messages:
+    CustomerEncryptionValue: Metadata of customer-supplied encryption key, if
+      the object is encrypted by such a key.
+    MetadataValue: User-provided metadata, in key/value pairs.
+    OwnerValue: The owner of the object. This will always be the uploader of
+      the object.
+
+  Fields:
+    acl: Access controls on the object.
+    bucket: The name of the bucket containing this object.
+    cacheControl: Cache-Control directive for the object data.
+    componentCount: Number of underlying components that make up this object.
+      Components are accumulated by compose operations.
+    contentDisposition: Content-Disposition of the object data.
+    contentEncoding: Content-Encoding of the object data.
+    contentLanguage: Content-Language of the object data.
+    contentType: Content-Type of the object data. If contentType is not
+      specified, object downloads will be served as application/octet-stream.
+    crc32c: CRC32c checksum, as described in RFC 4960, Appendix B; encoded
+      using base64 in big-endian byte order. For more information about using
+      the CRC32c checksum, see Hashes and ETags: Best Practices.
+    customerEncryption: Metadata of customer-supplied encryption key, if the
+      object is encrypted by such a key.
+    etag: HTTP 1.1 Entity tag for the object.
+    generation: The content generation of this object. Used for object
+      versioning.
+    id: The ID of the object.
+    kind: The kind of item this is. For objects, this is always
+      storage#object.
+    md5Hash: MD5 hash of the data; encoded using base64. For more information
+      about using the MD5 hash, see Hashes and ETags: Best Practices.
+    mediaLink: Media download link.
+    metadata: User-provided metadata, in key/value pairs.
+    metageneration: The version of the metadata for this object at this
+      generation. Used for preconditions and for detecting changes in
+      metadata. A metageneration number is only meaningful in the context of a
+      particular generation of a particular object.
+    name: The name of this object. Required if not specified by URL parameter.
+    owner: The owner of the object. This will always be the uploader of the
+      object.
+    selfLink: The link to this object.
+    size: Content-Length of the data in bytes.
+    storageClass: Storage class of the object.
+    timeCreated: The creation time of the object in RFC 3339 format.
+    timeDeleted: The deletion time of the object in RFC 3339 format. Will be
+      returned if and only if this version of the object has been deleted.
+    updated: The modification time of the object metadata in RFC 3339 format.
+  """
+
+  class CustomerEncryptionValue(_messages.Message):
+    """Metadata of customer-supplied encryption key, if the object is
+    encrypted by such a key.
+
+    Fields:
+      encryptionAlgorithm: The encryption algorithm.
+      keySha256: SHA256 hash value of the encryption key.
+    """
+
+    encryptionAlgorithm = _messages.StringField(1)
+    keySha256 = _messages.StringField(2)
+
+  @encoding.MapUnrecognizedFields('additionalProperties')
+  class MetadataValue(_messages.Message):
+    """User-provided metadata, in key/value pairs.
+
+    Messages:
+      AdditionalProperty: An additional property for a MetadataValue object.
+
+    Fields:
+      additionalProperties: An individual metadata entry.
+    """
+
+    class AdditionalProperty(_messages.Message):
+      """An additional property for a MetadataValue object.
+
+      Fields:
+        key: Name of the additional property.
+        value: A string attribute.
+      """
+
+      key = _messages.StringField(1)
+      value = _messages.StringField(2)
+
+    additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
+
+  class OwnerValue(_messages.Message):
+    """The owner of the object. This will always be the uploader of the
+    object.
+
+    Fields:
+      entity: The entity, in the form user-userId.
+      entityId: The ID for the entity.
+    """
+
+    entity = _messages.StringField(1)
+    entityId = _messages.StringField(2)
+
+  acl = _messages.MessageField('ObjectAccessControl', 1, repeated=True)
+  bucket = _messages.StringField(2)
+  cacheControl = _messages.StringField(3)
+  componentCount = _messages.IntegerField(4, variant=_messages.Variant.INT32)
+  contentDisposition = _messages.StringField(5)
+  contentEncoding = _messages.StringField(6)
+  contentLanguage = _messages.StringField(7)
+  contentType = _messages.StringField(8)
+  crc32c = _messages.StringField(9)
+  customerEncryption = _messages.MessageField('CustomerEncryptionValue', 10)
+  etag = _messages.StringField(11)
+  generation = _messages.IntegerField(12)
+  id = _messages.StringField(13)
+  kind = _messages.StringField(14, default=u'storage#object')
+  md5Hash = _messages.StringField(15)
+  mediaLink = _messages.StringField(16)
+  metadata = _messages.MessageField('MetadataValue', 17)
+  metageneration = _messages.IntegerField(18)
+  name = _messages.StringField(19)
+  owner = _messages.MessageField('OwnerValue', 20)
+  selfLink = _messages.StringField(21)
+  size = _messages.IntegerField(22, variant=_messages.Variant.UINT64)
+  storageClass = _messages.StringField(23)
+  timeCreated = _message_types.DateTimeField(24)
+  timeDeleted = _message_types.DateTimeField(25)
+  updated = _message_types.DateTimeField(26)
+
+
+class ObjectAccessControl(_messages.Message):
+  """An access-control entry.
+
+  Messages:
+    ProjectTeamValue: The project team associated with the entity, if any.
+
+  Fields:
+    bucket: The name of the bucket.
+    domain: The domain associated with the entity, if any.
+    email: The email address associated with the entity, if any.
+    entity: The entity holding the permission, in one of the following forms:
+      - user-userId  - user-email  - group-groupId  - group-email  - domain-
+      domain  - project-team-projectId  - allUsers  - allAuthenticatedUsers
+      Examples:  - The user liz@example.com would be user-liz@example.com.  -
+      The group example@googlegroups.com would be group-
+      example@googlegroups.com.  - To refer to all members of the Google Apps
+      for Business domain example.com, the entity would be domain-example.com.
+    entityId: The ID for the entity, if any.
+    etag: HTTP 1.1 Entity tag for the access-control entry.
+    generation: The content generation of the object.
+    id: The ID of the access-control entry.
+    kind: The kind of item this is. For object access control entries, this is
+      always storage#objectAccessControl.
+    object: The name of the object.
+    projectTeam: The project team associated with the entity, if any.
+    role: The access permission for the entity. Can be READER or OWNER.
+    selfLink: The link to this access-control entry.
+  """
+
+  class ProjectTeamValue(_messages.Message):
+    """The project team associated with the entity, if any.
+
+    Fields:
+      projectNumber: The project number.
+      team: The team. Can be owners, editors, or viewers.
+    """
+
+    projectNumber = _messages.StringField(1)
+    team = _messages.StringField(2)
+
+  bucket = _messages.StringField(1)
+  domain = _messages.StringField(2)
+  email = _messages.StringField(3)
+  entity = _messages.StringField(4)
+  entityId = _messages.StringField(5)
+  etag = _messages.StringField(6)
+  generation = _messages.IntegerField(7)
+  id = _messages.StringField(8)
+  kind = _messages.StringField(9, default=u'storage#objectAccessControl')
+  object = _messages.StringField(10)
+  projectTeam = _messages.MessageField('ProjectTeamValue', 11)
+  role = _messages.StringField(12)
+  selfLink = _messages.StringField(13)
+
+
+class ObjectAccessControls(_messages.Message):
+  """An access-control list.
+
+  Fields:
+    items: The list of items.
+    kind: The kind of item this is. For lists of object access control
+      entries, this is always storage#objectAccessControls.
+  """
+
+  items = _messages.MessageField('extra_types.JsonValue', 1, repeated=True)
+  kind = _messages.StringField(2, default=u'storage#objectAccessControls')
+
+
+class Objects(_messages.Message):
+  """A list of objects.
+
+  Fields:
+    items: The list of items.
+    kind: The kind of item this is. For lists of objects, this is always
+      storage#objects.
+    nextPageToken: The continuation token, used to page through large result
+      sets. Provide this value in a subsequent request to return the next page
+      of results.
+    prefixes: The list of prefixes of objects matching-but-not-listed up to
+      and including the requested delimiter.
+  """
+
+  items = _messages.MessageField('Object', 1, repeated=True)
+  kind = _messages.StringField(2, default=u'storage#objects')
+  nextPageToken = _messages.StringField(3)
+  prefixes = _messages.StringField(4, repeated=True)
+
+
+class Policy(_messages.Message):
+  """A bucket/object IAM policy.
+
+  Messages:
+    BindingsValueListEntry: A BindingsValueListEntry object.
+
+  Fields:
+    bindings: An association between a role, which comes with a set of
+      permissions, and members who may assume that role.
+    etag: HTTP 1.1  Entity tag for the policy.
+    kind: The kind of item this is. For policies, this is always
+      storage#policy. This field is ignored on input.
+    resourceId: The ID of the resource to which this policy belongs. Will be
+      of the form buckets/bucket for buckets, and
+      buckets/bucket/objects/object for objects. A specific generation may be
+      specified by appending #generationNumber to the end of the object name,
+      e.g. buckets/my-bucket/objects/data.txt#17. The current generation can
+      be denoted with #0. This field is ignored on input.
+  """
+
+  class BindingsValueListEntry(_messages.Message):
+    """A BindingsValueListEntry object.
+
+    Fields:
+      members: A collection of identifiers for members who may assume the
+        provided role. Recognized identifiers are as follows:   - allUsers \u2014 A
+        special identifier that represents anyone on the internet; with or
+        without a Google account.   - allAuthenticatedUsers \u2014 A special
+        identifier that represents anyone who is authenticated with a Google
+        account or a service account.   - user:emailid \u2014 An email address that
+        represents a specific account. For example, user:alice@gmail.com or
+        user:joe@example.com.   - serviceAccount:emailid \u2014 An email address
+        that represents a service account. For example,  serviceAccount:my-
+        other-app@appspot.gserviceaccount.com .   - group:emailid \u2014 An email
+        address that represents a Google group. For example,
+        group:admins@example.com.   - domain:domain \u2014 A Google Apps domain
+        name that represents all the users of that domain. For example,
+        domain:google.com or domain:example.com.   - projectOwner:projectid \u2014
+        Owners of the given project. For example, projectOwner:my-example-
+        project   - projectEditor:projectid \u2014 Editors of the given project.
+        For example, projectEditor:my-example-project   -
+        projectViewer:projectid \u2014 Viewers of the given project. For example,
+        projectViewer:my-example-project
+      role: The role to which members belong. Two types of roles are
+        supported: new IAM roles, which grant permissions that do not map
+        directly to those provided by ACLs, and legacy IAM roles, which do map
+        directly to ACL permissions. All roles are of the format
+        roles/storage.specificRole. The new IAM roles are:   -
+        roles/storage.admin \u2014 Full control of Google Cloud Storage resources.
+        - roles/storage.objectViewer \u2014 Read-Only access to Google Cloud
+        Storage objects.   - roles/storage.objectCreator \u2014 Access to create
+        objects in Google Cloud Storage.   - roles/storage.objectAdmin \u2014 Full
+        control of Google Cloud Storage objects.   The legacy IAM roles are:
+        - roles/storage.legacyObjectReader \u2014 Read-only access to objects
+        without listing. Equivalent to an ACL entry on an object with the
+        READER role.   - roles/storage.legacyObjectOwner \u2014 Read/write access
+        to existing objects without listing. Equivalent to an ACL entry on an
+        object with the OWNER role.   - roles/storage.legacyBucketReader \u2014
+        Read access to buckets with object listing. Equivalent to an ACL entry
+        on a bucket with the READER role.   - roles/storage.legacyBucketWriter
+        \u2014 Read access to buckets with object listing/creation/deletion.
+        Equivalent to an ACL entry on a bucket with the WRITER role.   -
+        roles/storage.legacyBucketOwner \u2014 Read and write access to existing
+        buckets with object listing/creation/deletion. Equivalent to an ACL
+        entry on a bucket with the OWNER role.
+    """
+
+    members = _messages.StringField(1, repeated=True)
+    role = _messages.StringField(2)
+
+  bindings = _messages.MessageField('BindingsValueListEntry', 1, repeated=True)
+  etag = _messages.BytesField(2)
+  kind = _messages.StringField(3, default=u'storage#policy')
+  resourceId = _messages.StringField(4)
+
+
+class RewriteResponse(_messages.Message):
+  """A rewrite response.
+
+  Fields:
+    done: true if the copy is finished; otherwise, false if the copy is in
+      progress. This property is always present in the response.
+    kind: The kind of item this is.
+    objectSize: The total size of the object being copied in bytes. This
+      property is always present in the response.
+    resource: A resource containing the metadata for the copied-to object.
+      This property is present in the response only when copying completes.
+    rewriteToken: A token to use in subsequent requests to continue copying
+      data. This token is present in the response only when there is more data
+      to copy.
+    totalBytesRewritten: The total bytes written so far, which can be used to
+      provide a waiting user with a progress indicator. This property is
+      always present in the response.
+  """
+
+  done = _messages.BooleanField(1)
+  kind = _messages.StringField(2, default=u'storage#rewriteResponse')
+  objectSize = _messages.IntegerField(3, variant=_messages.Variant.UINT64)
+  resource = _messages.MessageField('Object', 4)
+  rewriteToken = _messages.StringField(5)
+  totalBytesRewritten = _messages.IntegerField(6, variant=_messages.Variant.UINT64)
+
+
+class StandardQueryParameters(_messages.Message):
+  """Query parameters accepted by all methods.
+
+  Enums:
+    AltValueValuesEnum: Data format for the response.
+
+  Fields:
+    alt: Data format for the response.
+    fields: Selector specifying which fields to include in a partial response.
+    key: API key. Your API key identifies your project and provides you with
+      API access, quota, and reports. Required unless you provide an OAuth 2.0
+      token.
+    oauth_token: OAuth 2.0 token for the current user.
+    prettyPrint: Returns response with indentations and line breaks.
+    quotaUser: Available to use for quota purposes for server-side
+      applications. Can be any arbitrary string assigned to a user, but should
+      not exceed 40 characters. Overrides userIp if both are provided.
+    trace: A tracing token of the form "token:<tokenid>" to include in api
+      requests.
+    userIp: IP address of the site where the request originates. Use this if
+      you want to enforce per-user limits.
+  """
+
+  class AltValueValuesEnum(_messages.Enum):
+    """Data format for the response.
+
+    Values:
+      json: Responses with Content-Type of application/json
+    """
+    json = 0
+
+  alt = _messages.EnumField('AltValueValuesEnum', 1, default=u'json')
+  fields = _messages.StringField(2)
+  key = _messages.StringField(3)
+  oauth_token = _messages.StringField(4)
+  prettyPrint = _messages.BooleanField(5, default=True)
+  quotaUser = _messages.StringField(6)
+  trace = _messages.StringField(7)
+  userIp = _messages.StringField(8)
+
+
+class StorageBucketAccessControlsDeleteRequest(_messages.Message):
+  """A StorageBucketAccessControlsDeleteRequest object.
+
+  Fields:
+    bucket: Name of a bucket.
+    entity: The entity holding the permission. Can be user-userId, user-
+      emailAddress, group-groupId, group-emailAddress, allUsers, or
+      allAuthenticatedUsers.
+  """
+
+  bucket = _messages.StringField(1, required=True)
+  entity = _messages.StringField(2, required=True)
+
+
+class StorageBucketAccessControlsDeleteResponse(_messages.Message):
+  """An empty StorageBucketAccessControlsDelete response."""
+
+
+class StorageBucketAccessControlsGetRequest(_messages.Message):
+  """A StorageBucketAccessControlsGetRequest object.
+
+  Fields:
+    bucket: Name of a bucket.
+    entity: The entity holding the permission. Can be user-userId, user-
+      emailAddress, group-groupId, group-emailAddress, allUsers, or
+      allAuthenticatedUsers.
+  """
+
+  bucket = _messages.StringField(1, required=True)
+  entity = _messages.StringField(2, required=True)
+
+
+class StorageBucketAccessControlsListRequest(_messages.Message):
+  """A StorageBucketAccessControlsListRequest object.
+
+  Fields:
+    bucket: Name of a bucket.
+  """
+
+  bucket = _messages.StringField(1, required=True)
+
+
+class StorageBucketsDeleteRequest(_messages.Message):
+  """A StorageBucketsDeleteRequest object.
+
+  Fields:
+    bucket: Name of a bucket.
+    ifMetagenerationMatch: If set, only deletes the bucket if its
+      metageneration matches this value.
+    ifMetagenerationNotMatch: If set, only deletes the bucket if its
+      metageneration does not match this value.
+  """
+
+  bucket = _messages.StringField(1, required=True)
+  ifMetagenerationMatch = _messages.IntegerField(2)
+  ifMetagenerationNotMatch = _messages.IntegerField(3)
+
+
+class StorageBucketsDeleteResponse(_messages.Message):
+  """An empty StorageBucketsDelete response."""
+
+
+class StorageBucketsGetIamPolicyRequest(_messages.Message):
+  """A StorageBucketsGetIamPolicyRequest object.
+
+  Fields:
+    bucket: Name of a bucket.
+  """
+
+  bucket = _messages.StringField(1, required=True)
+
+
+class StorageBucketsGetRequest(_messages.Message):
+  """A StorageBucketsGetRequest object.
+
+  Enums:
+    ProjectionValueValuesEnum: Set of properties to return. Defaults to noAcl.
+
+  Fields:
+    bucket: Name of a bucket.
+    ifMetagenerationMatch: Makes the return of the bucket metadata conditional
+      on whether the bucket's current metageneration matches the given value.
+    ifMetagenerationNotMatch: Makes the return of the bucket metadata
+      conditional on whether the bucket's current metageneration does not
+      match the given value.
+    projection: Set of properties to return. Defaults to noAcl.
+  """
+
+  class ProjectionValueValuesEnum(_messages.Enum):
+    """Set of properties to return. Defaults to noAcl.
+
+    Values:
+      full: Include all properties.
+      noAcl: Omit owner, acl and defaultObjectAcl properties.
+    """
+    full = 0
+    noAcl = 1
+
+  bucket = _messages.StringField(1, required=True)
+  ifMetagenerationMatch = _messages.IntegerField(2)
+  ifMetagenerationNotMatch = _messages.IntegerField(3)
+  projection = _messages.EnumField('ProjectionValueValuesEnum', 4)
+
+
+class StorageBucketsInsertRequest(_messages.Message):
+  """A StorageBucketsInsertRequest object.
+
+  Enums:
+    PredefinedAclValueValuesEnum: Apply a predefined set of access controls to
+      this bucket.
+    PredefinedDefaultObjectAclValueValuesEnum: Apply a predefined set of
+      default object access controls to this bucket.
+    ProjectionValueValuesEnum: Set of properties to return. Defaults to noAcl,
+      unless the bucket resource specifies acl or defaultObjectAcl properties,
+      when it defaults to full.
+
+  Fields:
+    bucket: A Bucket resource to be passed as the request body.
+    predefinedAcl: Apply a predefined set of access controls to this bucket.
+    predefinedDefaultObjectAcl: Apply a predefined set of default object
+      access controls to this bucket.
+    project: A valid API project identifier.
+    projection: Set of properties to return. Defaults to noAcl, unless the
+      bucket resource specifies acl or defaultObjectAcl properties, when it
+      defaults to full.
+  """
+
+  class PredefinedAclValueValuesEnum(_messages.Enum):
+    """Apply a predefined set of access controls to this bucket.
+
+    Values:
+      authenticatedRead: Project team owners get OWNER access, and
+        allAuthenticatedUsers get READER access.
+      private: Project team owners get OWNER access.
+      projectPrivate: Project team members get access according to their
+        roles.
+      publicRead: Project team owners get OWNER access, and allUsers get
+        READER access.
+      publicReadWrite: Project team owners get OWNER access, and allUsers get
+        WRITER access.
+    """
+    authenticatedRead = 0
+    private = 1
+    projectPrivate = 2
+    publicRead = 3
+    publicReadWrite = 4
+
+  class PredefinedDefaultObjectAclValueValuesEnum(_messages.Enum):
+    """Apply a predefined set of default object access controls to this
+    bucket.
+
+    Values:
+      authenticatedRead: Object owner gets OWNER access, and
+        allAuthenticatedUsers get READER access.
+      bucketOwnerFullControl: Object owner gets OWNER access, and project team
+        owners get OWNER access.
+      bucketOwnerRead: Object owner gets OWNER access, and project team owners
+        get READER access.
+      private: Object owner gets OWNER access.
+      projectPrivate: Object owner gets OWNER access, and project team members
+        get access according to their roles.
+      publicRead: Object owner gets OWNER access, and allUsers get READER
+        access.
+    """
+    authenticatedRead = 0
+    bucketOwnerFullControl = 1
+    bucketOwnerRead = 2
+    private = 3
+    projectPrivate = 4
+    publicRead = 5
+
+  class ProjectionValueValuesEnum(_messages.Enum):
+    """Set of properties to return. Defaults to noAcl, unless the bucket
+    resource specifies acl or defaultObjectAcl properties, when it defaults to
+    full.
+
+    Values:
+      full: Include all properties.
+      noAcl: Omit owner, acl and defaultObjectAcl properties.
+    """
+    full = 0
+    noAcl = 1
+
+  bucket = _messages.MessageField('Bucket', 1)
+  predefinedAcl = _messages.EnumField('PredefinedAclValueValuesEnum', 2)
+  predefinedDefaultObjectAcl = _messages.EnumField('PredefinedDefaultObjectAclValueValuesEnum', 3)
+  project = _messages.StringField(4, required=True)
+  projection = _messages.EnumField('ProjectionValueValuesEnum', 5)
+
+
+class StorageBucketsListRequest(_messages.Message):
+  """A StorageBucketsListRequest object.
+
+  Enums:
+    ProjectionValueValuesEnum: Set of properties to return. Defaults to noAcl.
+
+  Fields:
+    maxResults: Maximum number of buckets to return.
+    pageToken: A previously-returned page token representing part of the
+      larger set of results to view.
+    prefix: Filter results to buckets whose names begin with this prefix.
+    project: A valid API project identifier.
+    projection: Set of properties to return. Defaults to noAcl.
+  """
+
+  class ProjectionValueValuesEnum(_messages.Enum):
+    """Set of properties to return. Defaults to noAcl.
+
+    Values:
+      full: Include all properties.
+      noAcl: Omit owner, acl and defaultObjectAcl properties.
+    """
+    full = 0
+    noAcl = 1
+
+  maxResults = _messages.IntegerField(1, variant=_messages.Variant.UINT32)
+  pageToken = _messages.StringField(2)
+  prefix = _messages.StringField(3)
+  project = _messages.StringField(4, required=True)
+  projection = _messages.EnumField('ProjectionValueValuesEnum', 5)
+
+
+class StorageBucketsPatchRequest(_messages.Message):
+  """A StorageBucketsPatchRequest object.
+
+  Enums:
+    PredefinedAclValueValuesEnum: Apply a predefined set of access controls to
+      this bucket.
+    PredefinedDefaultObjectAclValueValuesEnum: Apply a predefined set of
+      default object access controls to this bucket.
+    ProjectionValueValuesEnum: Set of properties to return. Defaults to full.
+
+  Fields:
+    bucket: Name of a bucket.
+    bucketResource: A Bucket resource to be passed as the request body.
+    ifMetagenerationMatch: Makes the return of the bucket metadata conditional
+      on whether the bucket's current metageneration matches the given value.
+    ifMetagenerationNotMatch: Makes the return of the bucket metadata
+      conditional on whether the bucket's current metageneration does not
+      match the given value.
+    predefinedAcl: Apply a predefined set of access controls to this bucket.
+    predefinedDefaultObjectAcl: Apply a predefined set of default object
+      access controls to this bucket.
+    projection: Set of properties to return. Defaults to full.
+  """
+
+  class PredefinedAclValueValuesEnum(_messages.Enum):
+    """Apply a predefined set of access controls to this bucket.
+
+    Values:
+      authenticatedRead: Project team owners get OWNER access, and
+        allAuthenticatedUsers get READER access.
+      private: Project team owners get OWNER access.
+      projectPrivate: Project team members get access according to their
+        roles.
+      publicRead: Project team owners get OWNER access, and allUsers get
+        READER access.
+      publicReadWrite: Project team owners get OWNER access, and allUsers get
+        WRITER access.
+    """
+    authenticatedRead = 0
+    private = 1
+    projectPrivate = 2
+    publicRead = 3
+    publicReadWrite = 4
+
+  class PredefinedDefaultObjectAclValueValuesEnum(_messages.Enum):
+    """Apply a predefined set of default object access controls to this
+    bucket.
+
+    Values:
+      authenticatedRead: Object owner gets OWNER access, and
+        allAuthenticatedUsers get READER access.
+      bucketOwnerFullControl: Object owner gets OWNER access, and project team
+        owners get OWNER access.
+      bucketOwnerRead: Object owner gets OWNER access, and project team owners
+        get READER access.
+      private: Object owner gets OWNER access.
+      projectPrivate: Object owner gets OWNER access, and project team members
+        get access according to their roles.
+      publicRead: Object owner gets OWNER access, and allUsers get READER
+        access.
+    """
+    authenticatedRead = 0
+    bucketOwnerFullControl = 1
+    bucketOwnerRead = 2
+    private = 3
+    projectPrivate = 4
+    publicRead = 5
+
+  class ProjectionValueValuesEnum(_messages.Enum):
+    """Set of properties to return. Defaults to full.
+
+    Values:
+      full: Include all properties.
+      noAcl: Omit owner, acl and defaultObjectAcl properties.
+    """
+    full = 0
+    noAcl = 1
+
+  bucket = _messages.StringField(1, required=True)
+  bucketResource = _messages.MessageField('Bucket', 2)
+  ifMetagenerationMatch = _messages.IntegerField(3)
+  ifMetagenerationNotMatch = _messages.IntegerField(4)
+  predefinedAcl = _messages.EnumField('PredefinedAclValueValuesEnum', 5)
+  predefinedDefaultObjectAcl = _messages.EnumField('PredefinedDefaultObjectAclValueValuesEnum', 6)
+  projection = _messages.EnumField('ProjectionValueValuesEnum', 7)
+
+
+class StorageBucketsSetIamPolicyRequest(_messages.Message):
+  """A StorageBucketsSetIamPolicyRequest object.
+
+  Fields:
+    bucket: Name of a bucket.
+    policy: A Policy resource to be passed as the request body.
+  """
+
+  bucket = _messages.StringField(1, required=True)
+  policy = _messages.MessageField('Policy', 2)
+
+
+class StorageBucketsTestIamPermissionsRequest(_messages.Message):
+  """A StorageBucketsTestIamPermissionsRequest object.
+
+  Fields:
+    bucket: Name of a bucket.
+    permissions: Permissions to test.
+  """
+
+  bucket = _messages.StringField(1, required=True)
+  permissions = _messages.StringField(2, required=True)
+
+
+class StorageBucketsUpdateRequest(_messages.Message):
+  """A StorageBucketsUpdateRequest object.
+
+  Enums:
+    PredefinedAclValueValuesEnum: Apply a predefined set of access controls to
+      this bucket.
+    PredefinedDefaultObjectAclValueValuesEnum: Apply a predefined set of
+      default object access controls to this bucket.
+    ProjectionValueValuesEnum: Set of properties to return. Defaults to full.
+
+  Fields:
+    bucket: Name of a bucket.
+    bucketResource: A Bucket resource to be passed as the request body.
+    ifMetagenerationMatch: Makes the return of the bucket metadata conditional
+      on whether the bucket's current metageneration matches the given value.
+    ifMetagenerationNotMatch: Makes the return of the bucket metadata
+      conditional on whether the bucket's current metageneration does not
+      match the given value.
+    predefinedAcl: Apply a predefined set of access controls to this bucket.
+    predefinedDefaultObjectAcl: Apply a predefined set of default object
+      access controls to this bucket.
+    projection: Set of properties to return. Defaults to full.
+  """
+
+  class PredefinedAclValueValuesEnum(_messages.Enum):
+    """Apply a predefined set of access controls to this bucket.
+
+    Values:
+      authenticatedRead: Project team owners get OWNER access, and
+        allAuthenticatedUsers get READER access.
+      private: Project team owners get OWNER access.
+      projectPrivate: Project team members get access according to their
+        roles.
+      publicRead: Project team owners get OWNER access, and allUsers get
+        READER access.
+      publicReadWrite: Project team owners get OWNER access, and allUsers get
+        WRITER access.
+    """
+    authenticatedRead = 0
+    private = 1
+    projectPrivate = 2
+    publicRead = 3
+    publicReadWrite = 4
+
+  class PredefinedDefaultObjectAclValueValuesEnum(_messages.Enum):
+    """Apply a predefined set of default object access controls to this
+    bucket.
+
+    Values:
+      authenticatedRead: Object owner gets OWNER access, and
+        allAuthenticatedUsers get READER access.
+      bucketOwnerFullControl: Object owner gets OWNER access, and project team
+        owners get OWNER access.
+      bucketOwnerRead: Object owner gets OWNER access, and project team owners
+        get READER access.
+      private: Object owner gets OWNER access.
+      projectPrivate: Object owner gets OWNER access, and project team members
+        get access according to their roles.
+      publicRead: Object owner gets OWNER access, and allUsers get READER
+        access.
+    """
+    authenticatedRead = 0
+    bucketOwnerFullControl = 1
+    bucketOwnerRead = 2
+    private = 3
+    projectPrivate = 4
+    publicRead = 5
+
+  class ProjectionValueValuesEnum(_messages.Enum):
+    """Set of properties to return. Defaults to full.
+
+    Values:
+      full: Include all properties.
+      noAcl: Omit owner, acl and defaultObjectAcl properties.
+    """
+    full = 0
+    noAcl = 1
+
+  bucket = _messages.StringField(1, required=True)
+  bucketResource = _messages.MessageField('Bucket', 2)
+  ifMetagenerationMatch = _messages.IntegerField(3)
+  ifMetagenerationNotMatch = _messages.IntegerField(4)
+  predefinedAcl = _messages.EnumField('PredefinedAclValueValuesEnum', 5)
+  predefinedDefaultObjectAcl = _messages.EnumField('PredefinedDefaultObjectAclValueValuesEnum', 6)
+  projection = _messages.EnumField('ProjectionValueValuesEnum', 7)
+
+
+class StorageChannelsStopResponse(_messages.Message):
+  """An empty StorageChannelsStop response."""
+
+
+class StorageDefaultObjectAccessControlsDeleteRequest(_messages.Message):
+  """A StorageDefaultObjectAccessControlsDeleteRequest object.
+
+  Fields:
+    bucket: Name of a bucket.
+    entity: The entity holding the permission. Can be user-userId, user-
+      emailAddress, group-groupId, group-emailAddress, allUsers, or
+      allAuthenticatedUsers.
+  """
+
+  bucket = _messages.StringField(1, required=True)
+  entity = _messages.StringField(2, required=True)
+
+
+class StorageDefaultObjectAccessControlsDeleteResponse(_messages.Message):
+  """An empty StorageDefaultObjectAccessControlsDelete response."""
+
+
+class StorageDefaultObjectAccessControlsGetRequest(_messages.Message):
+  """A StorageDefaultObjectAccessControlsGetRequest object.
+
+  Fields:
+    bucket: Name of a bucket.
+    entity: The entity holding the permission. Can be user-userId, user-
+      emailAddress, group-groupId, group-emailAddress, allUsers, or
+      allAuthenticatedUsers.
+  """
+
+  bucket = _messages.StringField(1, required=True)
+  entity = _messages.StringField(2, required=True)
+
+
+class StorageDefaultObjectAccessControlsListRequest(_messages.Message):
+  """A StorageDefaultObjectAccessControlsListRequest object.
+
+  Fields:
+    bucket: Name of a bucket.
+    ifMetagenerationMatch: If present, only return default ACL listing if the
+      bucket's current metageneration matches this value.
+    ifMetagenerationNotMatch: If present, only return default ACL listing if
+      the bucket's current metageneration does not match the given value.
+  """
+
+  bucket = _messages.StringField(1, required=True)
+  ifMetagenerationMatch = _messages.IntegerField(2)
+  ifMetagenerationNotMatch = _messages.IntegerField(3)
+
+
+class StorageNotificationsDeleteRequest(_messages.Message):
+  """A StorageNotificationsDeleteRequest object.
+
+  Fields:
+    notification: ID of the notification to delete.
+  """
+
+  notification = _messages.StringField(1, required=True)
+
+
+class StorageNotificationsDeleteResponse(_messages.Message):
+  """An empty StorageNotificationsDelete response."""
+
+
+class StorageNotificationsGetRequest(_messages.Message):
+  """A StorageNotificationsGetRequest object.
+
+  Fields:
+    notification: Notification ID
+  """
+
+  notification = _messages.StringField(1, required=True)
+
+
+class StorageNotificationsListRequest(_messages.Message):
+  """A StorageNotificationsListRequest object.
+
+  Fields:
+    bucket: Name of a GCS bucket.
+  """
+
+  bucket = _messages.StringField(1, required=True)
+
+
+class StorageObjectAccessControlsDeleteRequest(_messages.Message):
+  """A StorageObjectAccessControlsDeleteRequest object.
+
+  Fields:
+    bucket: Name of a bucket.
+    entity: The entity holding the permission. Can be user-userId, user-
+      emailAddress, group-groupId, group-emailAddress, allUsers, or
+      allAuthenticatedUsers.
+    generation: If present, selects a specific revision of this object (as
+      opposed to the latest version, the default).
+    object: Name of the object. For information about how to URL encode object
+      names to be path safe, see Encoding URI Path Parts.
+  """
+
+  bucket = _messages.StringField(1, required=True)
+  entity = _messages.StringField(2, required=True)
+  generation = _messages.IntegerField(3)
+  object = _messages.StringField(4, required=True)
+
+
+class StorageObjectAccessControlsDeleteResponse(_messages.Message):
+  """An empty StorageObjectAccessControlsDelete response."""
+
+
+class StorageObjectAccessControlsGetRequest(_messages.Message):
+  """A StorageObjectAccessControlsGetRequest object.
+
+  Fields:
+    bucket: Name of a bucket.
+    entity: The entity holding the permission. Can be user-userId, user-
+      emailAddress, group-groupId, group-emailAddress, allUsers, or
+      allAuthenticatedUsers.
+    generation: If present, selects a specific revision of this object (as
+      opposed to the latest version, the default).
+    object: Name of the object. For information about how to URL encode object
+      names to be path safe, see Encoding URI Path Parts.
+  """
+
+  bucket = _messages.StringField(1, required=True)
+  entity = _messages.StringField(2, required=True)
+  generation = _messages.IntegerField(3)
+  object = _messages.StringField(4, required=True)
+
+
+class StorageObjectAccessControlsInsertRequest(_messages.Message):
+  """A StorageObjectAccessControlsInsertRequest object.
+
+  Fields:
+    bucket: Name of a bucket.
+    generation: If present, selects a specific revision of this object (as
+      opposed to the latest version, the default).
+    object: Name of the object. For information about how to URL encode object
+      names to be path safe, see Encoding URI Path Parts.
+    objectAccessControl: A ObjectAccessControl resource to be passed as the
+      request body.
+  """
+
+  bucket = _messages.StringField(1, required=True)
+  generation = _messages.IntegerField(2)
+  object = _messages.StringField(3, required=True)
+  objectAccessControl = _messages.MessageField('ObjectAccessControl', 4)
+
+
+class StorageObjectAccessControlsListRequest(_messages.Message):
+  """A StorageObjectAccessControlsListRequest object.
+
+  Fields:
+    bucket: Name of a bucket.
+    generation: If present, selects a specific revision of this object (as
+      opposed to the latest version, the default).
+    object: Name of the object. For information about how to URL encode object
+      names to be path safe, see Encoding URI Path Parts.
+  """
+
+  bucket = _messages.StringField(1, required=True)
+  generation = _messages.IntegerField(2)
+  object = _messages.StringField(3, required=True)
+
+
+class StorageObjectAccessControlsPatchRequest(_messages.Message):
+  """A StorageObjectAccessControlsPatchRequest object.
+
+  Fields:
+    bucket: Name of a bucket.
+    entity: The entity holding the permission. Can be user-userId, user-
+      emailAddress, group-groupId, group-emailAddress, allUsers, or
+      allAuthenticatedUsers.
+    generation: If present, selects a specific revision of this object (as
+      opposed to the latest version, the default).
+    object: Name of the object. For information about how to URL encode object
+      names to be path safe, see Encoding URI Path Parts.
+    objectAccessControl: A ObjectAccessControl resource to be passed as the
+      request body.
+  """
+
+  bucket = _messages.StringField(1, required=True)
+  entity = _messages.StringField(2, required=True)
+  generation = _messages.IntegerField(3)
+  object = _messages.StringField(4, required=True)
+  objectAccessControl = _messages.MessageField('ObjectAccessControl', 5)
+
+
+class StorageObjectAccessControlsUpdateRequest(_messages.Message):
+  """A StorageObjectAccessControlsUpdateRequest object.
+
+  Fields:
+    bucket: Name of a bucket.
+    entity: The entity holding the permission. Can be user-userId, user-
+      emailAddress, group-groupId, group-emailAddress, allUsers, or
+      allAuthenticatedUsers.
+    generation: If present, selects a specific revision of this object (as
+      opposed to the latest version, the default).
+    object: Name of the object. For information about how to URL encode object
+      names to be path safe, see Encoding URI Path Parts.
+    objectAccessControl: A ObjectAccessControl resource to be passed as the
+      request body.
+  """
+
+  bucket = _messages.StringField(1, required=True)
+  entity = _messages.StringField(2, required=True)
+  generation = _messages.IntegerField(3)
+  object = _messages.StringField(4, required=True)
+  objectAccessControl = _messages.MessageField('ObjectAccessControl', 5)
+
+
+class StorageObjectsComposeRequest(_messages.Message):
+  """A StorageObjectsComposeRequest object.
+
+  Enums:
+    DestinationPredefinedAclValueValuesEnum: Apply a predefined set of access
+      controls to the destination object.
+
+  Fields:
+    composeRequest: A ComposeRequest resource to be passed as the request
+      body.
+    destinationBucket: Name of the bucket in which to store the new object.
+    destinationObject: Name of the new object. For information about how to
+      URL encode object names to be path safe, see Encoding URI Path Parts.
+    destinationPredefinedAcl: Apply a predefined set of access controls to the
+      destination object.
+    ifGenerationMatch: Makes the operation conditional on whether the object's
+      current generation matches the given value.
+    ifMetagenerationMatch: Makes the operation conditional on whether the
+      object's current metageneration matches the given value.
+  """
+
+  class DestinationPredefinedAclValueValuesEnum(_messages.Enum):
+    """Apply a predefined set of access controls to the destination object.
+
+    Values:
+      authenticatedRead: Object owner gets OWNER access, and
+        allAuthenticatedUsers get READER access.
+      bucketOwnerFullControl: Object owner gets OWNER access, and project team
+        owners get OWNER access.
+      bucketOwnerRead: Object owner gets OWNER access, and project team owners
+        get READER access.
+      private: Object owner gets OWNER access.
+      projectPrivate: Object owner gets OWNER access, and project team members
+        get access according to their roles.
+      publicRead: Object owner gets OWNER access, and allUsers get READER
+        access.
+    """
+    authenticatedRead = 0
+    bucketOwnerFullControl = 1
+    bucketOwnerRead = 2
+    private = 3
+    projectPrivate = 4
+    publicRead = 5
+
+  composeRequest = _messages.MessageField('ComposeRequest', 1)
+  destinationBucket = _messages.StringField(2, required=True)
+  destinationObject = _messages.StringField(3, required=True)
+  destinationPredefinedAcl = _messages.EnumField('DestinationPredefinedAclValueValuesEnum', 4)
+  ifGenerationMatch = _messages.IntegerField(5)
+  ifMetagenerationMatch = _messages.IntegerField(6)
+
+
+class StorageObjectsCopyRequest(_messages.Message):
+  """A StorageObjectsCopyRequest object.
+
+  Enums:
+    DestinationPredefinedAclValueValuesEnum: Apply a predefined set of access
+      controls to the destination object.
+    ProjectionValueValuesEnum: Set of properties to return. Defaults to noAcl,
+      unless the object resource specifies the acl property, when it defaults
+      to full.
+
+  Fields:
+    destinationBucket: Name of the bucket in which to store the new object.
+      Overrides the provided object metadata's bucket value, if any.For
+      information about how to URL encode object names to be path safe, see
+      Encoding URI Path Parts.
+    destinationObject: Name of the new object. Required when the object
+      metadata is not otherwise provided. Overrides the object metadata's name
+      value, if any.
+    destinationPredefinedAcl: Apply a predefined set of access controls to the
+      destination object.
+    ifGenerationMatch: Makes the operation conditional on whether the
+      destination object's current generation matches the given value.
+    ifGenerationNotMatch: Makes the operation conditional on whether the
+      destination object's current generation does not match the given value.
+    ifMetagenerationMatch: Makes the operation conditional on whether the
+      destination object's current metageneration matches the given value.
+    ifMetagenerationNotMatch: Makes the operation conditional on whether the
+      destination object's current metageneration does not match the given
+      value.
+    ifSourceGenerationMatch: Makes the operation conditional on whether the
+      source object's generation matches the given value.
+    ifSourceGenerationNotMatch: Makes the operation conditional on whether the
+      source object's generation does not match the given value.
+    ifSourceMetagenerationMatch: Makes the operation conditional on whether
+      the source object's current metageneration matches the given value.
+    ifSourceMetagenerationNotMatch: Makes the operation conditional on whether
+      the source object's current metageneration does not match the given
+      value.
+    object: A Object resource to be passed as the request body.
+    projection: Set of properties to return. Defaults to noAcl, unless the
+      object resource specifies the acl property, when it defaults to full.
+    sourceBucket: Name of the bucket in which to find the source object.
+    sourceGeneration: If present, selects a specific revision of the source
+      object (as opposed to the latest version, the default).
+    sourceObject: Name of the source object. For information about how to URL
+      encode object names to be path safe, see Encoding URI Path Parts.
+  """
+
+  class DestinationPredefinedAclValueValuesEnum(_messages.Enum):
+    """Apply a predefined set of access controls to the destination object.
+
+    Values:
+      authenticatedRead: Object owner gets OWNER access, and
+        allAuthenticatedUsers get READER access.
+      bucketOwnerFullControl: Object owner gets OWNER access, and project team
+        owners get OWNER access.
+      bucketOwnerRead: Object owner gets OWNER access, and project team owners
+        get READER access.
+      private: Object owner gets OWNER access.
+      projectPrivate: Object owner gets OWNER access, and project team members
+        get access according to their roles.
+      publicRead: Object owner gets OWNER access, and allUsers get READER
+        access.
+    """
+    authenticatedRead = 0
+    bucketOwnerFullControl = 1
+    bucketOwnerRead = 2
+    private = 3
+    projectPrivate = 4
+    publicRead = 5
+
+  class ProjectionValueValuesEnum(_messages.Enum):
+    """Set of properties to return. Defaults to noAcl, unless the object
+    resource specifies the acl property, when it defaults to full.
+
+    Values:
+      full: Include all properties.
+      noAcl: Omit the owner, acl property.
+    """
+    full = 0
+    noAcl = 1
+
+  destinationBucket = _messages.StringField(1, required=True)
+  destinationObject = _messages.StringField(2, required=True)
+  destinationPredefinedAcl = _messages.EnumField('DestinationPredefinedAclValueValuesEnum', 3)
+  ifGenerationMatch = _messages.IntegerField(4)
+  ifGenerationNotMatch = _messages.IntegerField(5)
+  ifMetagenerationMatch = _messages.IntegerField(6)
+  ifMetagenerationNotMatch = _messages.IntegerField(7)
+  ifSourceGenerationMatch = _messages.IntegerField(8)
+  ifSourceGenerationNotMatch = _messages.IntegerField(9)
+  ifSourceMetagenerationMatch = _messages.IntegerField(10)
+  ifSourceMetagenerationNotMatch = _messages.IntegerField(11)
+  object = _messages.MessageField('Object', 12)
+  projection = _messages.EnumField('ProjectionValueValuesEnum', 13)
+  sourceBucket = _messages.StringField(14, required=True)
+  sourceGeneration = _messages.IntegerField(15)
+  sourceObject = _messages.StringField(16, required=True)
+
+
+class StorageObjectsDeleteRequest(_messages.Message):
+  """A StorageObjectsDeleteRequest object.
+
+  Fields:
+    bucket: Name of the bucket in which the object resides.
+    generation: If present, permanently deletes a specific revision of this
+      object (as opposed to the latest version, the default).
+    ifGenerationMatch: Makes the operation conditional on whether the object's
+      current generation matches the given value.
+    ifGenerationNotMatch: Makes the operation conditional on whether the
+      object's current generation does not match the given value.
+    ifMetagenerationMatch: Makes the operation conditional on whether the
+      object's current metageneration matches the given value.
+    ifMetagenerationNotMatch: Makes the operation conditional on whether the
+      object's current metageneration does not match the given value.
+    object: Name of the object. For information about how to URL encode object
+      names to be path safe, see Encoding URI Path Parts.
+  """
+
+  bucket = _messages.StringField(1, required=True)
+  generation = _messages.IntegerField(2)
+  ifGenerationMatch = _messages.IntegerField(3)
+  ifGenerationNotMatch = _messages.IntegerField(4)
+  ifMetagenerationMatch = _messages.IntegerField(5)
+  ifMetagenerationNotMatch = _messages.IntegerField(6)
+  object = _messages.StringField(7, required=True)
+
+
+class StorageObjectsDeleteResponse(_messages.Message):
+  """An empty StorageObjectsDelete response."""
+
+
+class StorageObjectsGetIamPolicyRequest(_messages.Message):
+  """A StorageObjectsGetIamPolicyRequest object.
+
+  Fields:
+    bucket: Name of the bucket in which the object resides.
+    generation: If present, selects a specific revision of this object (as
+      opposed to the latest version, the default).
+    object: Name of the object. For information about how to URL encode object
+      names to be path safe, see Encoding URI Path Parts.
+  """
+
+  bucket = _messages.StringField(1, required=True)
+  generation = _messages.IntegerField(2)
+  object = _messages.StringField(3, required=True)
+
+
+class StorageObjectsGetRequest(_messages.Message):
+  """A StorageObjectsGetRequest object.
+
+  Enums:
+    ProjectionValueValuesEnum: Set of properties to return. Defaults to noAcl.
+
+  Fields:
+    bucket: Name of the bucket in which the object resides.
+    generation: If present, selects a specific revision of this object (as
+      opposed to the latest version, the default).
+    ifGenerationMatch: Makes the operation conditional on whether the object's
+      generation matches the given value.
+    ifGenerationNotMatch: Makes the operation conditional on whether the
+      object's generation does not match the given value.
+    ifMetagenerationMatch: Makes the operation conditional on whether the
+      object's current metageneration matches the given value.
+    ifMetagenerationNotMatch: Makes the operation conditional on whether the
+      object's current metageneration does not match the given value.
+    object: Name of the object. For information about how to URL encode object
+      names to be path safe, see Encoding URI Path Parts.
+    projection: Set of properties to return. Defaults to noAcl.
+  """
+
+  class ProjectionValueValuesEnum(_messages.Enum):
+    """Set of properties to return. Defaults to noAcl.
+
+    Values:
+      full: Include all properties.
+      noAcl: Omit the owner, acl property.
+    """
+    full = 0
+    noAcl = 1
+
+  bucket = _messages.StringField(1, required=True)
+  generation = _messages.IntegerField(2)
+  ifGenerationMatch = _messages.IntegerField(3)
+  ifGenerationNotMatch = _messages.IntegerField(4)
+  ifMetagenerationMatch = _messages.IntegerField(5)
+  ifMetagenerationNotMatch = _messages.IntegerField(6)
+  object = _messages.StringField(7, required=True)
+  projection = _messages.EnumField('ProjectionValueValuesEnum', 8)
+
+
+class StorageObjectsInsertRequest(_messages.Message):
+  """A StorageObjectsInsertRequest object.
+
+  Enums:
+    PredefinedAclValueValuesEnum: Apply a predefined set of access controls to
+      this object.
+    ProjectionValueValuesEnum: Set of properties to return. Defaults to noAcl,
+      unless the object resource specifies the acl property, when it defaults
+      to full.
+
+  Fields:
+    bucket: Name of the bucket in which to store the new object. Overrides the
+      provided object metadata's bucket value, if any.
+    contentEncoding: If set, sets the contentEncoding property of the final
+      object to this value. Setting this parameter is equivalent to setting
+      the contentEncoding metadata property. This can be useful when uploading
+      an object with uploadType=media to indicate the encoding of the content
+      being uploaded.
+    ifGenerationMatch: Makes the operation conditional on whether the object's
+      current generation matches the given value.
+    ifGenerationNotMatch: Makes the operation conditional on whether the
+      object's current generation does not match the given value.
+    ifMetagenerationMatch: Makes the operation conditional on whether the
+      object's current metageneration matches the given value.
+    ifMetagenerationNotMatch: Makes the operation conditional on whether the
+      object's current metageneration does not match the given value.
+    name: Name of the object. Required when the object metadata is not
+      otherwise provided. Overrides the object metadata's name value, if any.
+      For information about how to URL encode object names to be path safe,
+      see Encoding URI Path Parts.
+    object: A Object resource to be passed as the request body.
+    predefinedAcl: Apply a predefined set of access controls to this object.
+    projection: Set of properties to return. Defaults to noAcl, unless the
+      object resource specifies the acl property, when it defaults to full.
+  """
+
+  class PredefinedAclValueValuesEnum(_messages.Enum):
+    """Apply a predefined set of access controls to this object.
+
+    Values:
+      authenticatedRead: Object owner gets OWNER access, and
+        allAuthenticatedUsers get READER access.
+      bucketOwnerFullControl: Object owner gets OWNER access, and project team
+        owners get OWNER access.
+      bucketOwnerRead: Object owner gets OWNER access, and project team owners
+        get READER access.
+      private: Object owner gets OWNER access.
+      projectPrivate: Object owner gets OWNER access, and project team members
+        get access according to their roles.
+      publicRead: Object owner gets OWNER access, and allUsers get READER
+        access.
+    """
+    authenticatedRead = 0
+    bucketOwnerFullControl = 1
+    bucketOwnerRead = 2
+    private = 3
+    projectPrivate = 4
+    publicRead = 5
+
+  class ProjectionValueValuesEnum(_messages.Enum):
+    """Set of properties to return. Defaults to noAcl, unless the object
+    resource specifies the acl property, when it defaults to full.
+
+    Values:
+      full: Include all properties.
+      noAcl: Omit the owner, acl property.
+    """
+    full = 0
+    noAcl = 1
+
+  bucket = _messages.StringField(1, required=True)
+  contentEncoding = _messages.StringField(2)
+  ifGenerationMatch = _messages.IntegerField(3)
+  ifGenerationNotMatch = _messages.IntegerField(4)
+  ifMetagenerationMatch = _messages.IntegerField(5)
+  ifMetagenerationNotMatch = _messages.IntegerField(6)
+  name = _messages.StringField(7)
+  object = _messages.MessageField('Object', 8)
+  predefinedAcl = _messages.EnumField('PredefinedAclValueValuesEnum', 9)
+  projection = _messages.EnumField('ProjectionValueValuesEnum', 10)
+
+
+class StorageObjectsListRequest(_messages.Message):
+  """A StorageObjectsListRequest object.
+
+  Enums:
+    ProjectionValueValuesEnum: Set of properties to return. Defaults to noAcl.
+
+  Fields:
+    bucket: Name of the bucket in which to look for objects.
+    delimiter: Returns results in a directory-like mode. items will contain
+      only objects whose names, aside from the prefix, do not contain
+      delimiter. Objects whose names, aside from the prefix, contain delimiter
+      will have their name, truncated after the delimiter, returned in
+      prefixes. Duplicate prefixes are omitted.
+    maxResults: Maximum number of items plus prefixes to return. As duplicate
+      prefixes are omitted, fewer total results may be returned than
+      requested. The default value of this parameter is 1,000 items.
+    pageToken: A previously-returned page token representing part of the
+      larger set of results to view.
+    prefix: Filter results to objects whose names begin with this prefix.
+    projection: Set of properties to return. Defaults to noAcl.
+    versions: If true, lists all versions of an object as distinct results.
+      The default is false. For more information, see Object Versioning.
+  """
+
+  class ProjectionValueValuesEnum(_messages.Enum):
+    """Set of properties to return. Defaults to noAcl.
+
+    Values:
+      full: Include all properties.
+      noAcl: Omit the owner, acl property.
+    """
+    full = 0
+    noAcl = 1
+
+  bucket = _messages.StringField(1, required=True)
+  delimiter = _messages.StringField(2)
+  maxResults = _messages.IntegerField(3, variant=_messages.Variant.UINT32)
+  pageToken = _messages.StringField(4)
+  prefix = _messages.StringField(5)
+  projection = _messages.EnumField('ProjectionValueValuesEnum', 6)
+  versions = _messages.BooleanField(7)
+
+
+class StorageObjectsPatchRequest(_messages.Message):
+  """A StorageObjectsPatchRequest object.
+
+  Enums:
+    PredefinedAclValueValuesEnum: Apply a predefined set of access controls to
+      this object.
+    ProjectionValueValuesEnum: Set of properties to return. Defaults to full.
+
+  Fields:
+    bucket: Name of the bucket in which the object resides.
+    generation: If present, selects a specific revision of this object (as
+      opposed to the latest version, the default).
+    ifGenerationMatch: Makes the operation conditional on whether the object's
+      current generation matches the given value.
+    ifGenerationNotMatch: Makes the operation conditional on whether the
+      object's current generation does not match the given value.
+    ifMetagenerationMatch: Makes the operation conditional on whether the
+      object's current metageneration matches the given value.
+    ifMetagenerationNotMatch: Makes the operation conditional on whether the
+      object's current metageneration does not match the given value.
+    object: Name of the object. For information about how to URL encode object
+      names to be path safe, see Encoding URI Path Parts.
+    objectResource: A Object resource to be passed as the request body.
+    predefinedAcl: Apply a predefined set of access controls to this object.
+    projection: Set of properties to return. Defaults to full.
+  """
+
+  class PredefinedAclValueValuesEnum(_messages.Enum):
+    """Apply a predefined set of access controls to this object.
+
+    Values:
+      authenticatedRead: Object owner gets OWNER access, and
+        allAuthenticatedUsers get READER access.
+      bucketOwnerFullControl: Object owner gets OWNER access, and project team
+        owners get OWNER access.
+      bucketOwnerRead: Object owner gets OWNER access, and project team owners
+        get READER access.
+      private: Object owner gets OWNER access.
+      projectPrivate: Object owner gets OWNER access, and project team members
+        get access according to their roles.
+      publicRead: Object owner gets OWNER access, and allUsers get READER
+        access.
+    """
+    authenticatedRead = 0
+    bucketOwnerFullControl = 1
+    bucketOwnerRead = 2
+    private = 3
+    projectPrivate = 4
+    publicRead = 5
+
+  class ProjectionValueValuesEnum(_messages.Enum):
+    """Set of properties to return. Defaults to full.
+
+    Values:
+      full: Include all properties.
+      noAcl: Omit the owner, acl property.
+    """
+    full = 0
+    noAcl = 1
+
+  bucket = _messages.StringField(1, required=True)
+  generation = _messages.IntegerField(2)
+  ifGenerationMatch = _messages.IntegerField(3)
+  ifGenerationNotMatch = _messages.IntegerField(4)
+  ifMetagenerationMatch = _messages.IntegerField(5)
+  ifMetagenerationNotMatch = _messages.IntegerField(6)
+  object = _messages.StringField(7, required=True)
+  objectResource = _messages.MessageField('Object', 8)
+  predefinedAcl = _messages.EnumField('PredefinedAclValueValuesEnum', 9)
+  projection = _messages.EnumField('ProjectionValueValuesEnum', 10)
+
+
+class StorageObjectsRewriteRequest(_messages.Message):
+  """A StorageObjectsRewriteRequest object.
+
+  Enums:
+    DestinationPredefinedAclValueValuesEnum: Apply a predefined set of access
+      controls to the destination object.
+    ProjectionValueValuesEnum: Set of properties to return. Defaults to noAcl,
+      unless the object resource specifies the acl property, when it defaults
+      to full.
+
+  Fields:
+    destinationBucket: Name of the bucket in which to store the new object.
+      Overrides the provided object metadata's bucket value, if any.
+    destinationObject: Name of the new object. Required when the object
+      metadata is not otherwise provided. Overrides the object metadata's name
+      value, if any. For information about how to URL encode object names to
+      be path safe, see Encoding URI Path Parts.
+    destinationPredefinedAcl: Apply a predefined set of access controls to the
+      destination object.
+    ifGenerationMatch: Makes the operation conditional on whether the
+      destination object's current generation matches the given value.
+    ifGenerationNotMatch: Makes the operation conditional on whether the
+      destination object's current generation does not match the given value.
+    ifMetagenerationMatch: Makes the operation conditional on whether the
+      destination object's current metageneration matches the given value.
+    ifMetagenerationNotMatch: Makes the operation conditional on whether the
+      destination object's current metageneration does not match the given
+      value.
+    ifSourceGenerationMatch: Makes the operation conditional on whether the
+      source object's generation matches the given value.
+    ifSourceGenerationNotMatch: Makes the operation conditional on whether the
+      source object's generation does not match the given value.
+    ifSourceMetagenerationMatch: Makes the operation conditional on whether
+      the source object's current metageneration matches the given value.
+    ifSourceMetagenerationNotMatch: Makes the operation conditional on whether
+      the source object's current metageneration does not match the given
+      value.
+    maxBytesRewrittenPerCall: The maximum number of bytes that will be
+      rewritten per rewrite request. Most callers shouldn't need to specify
+      this parameter - it is primarily in place to support testing. If
+      specified the value must be an integral multiple of 1 MiB (1048576).
+      Also, this only applies to requests where the source and destination
+      span locations and/or storage classes. Finally, this value must not
+      change across rewrite calls else you'll get an error that the
+      rewriteToken is invalid.
+    object: A Object resource to be passed as the request body.
+    projection: Set of properties to return. Defaults to noAcl, unless the
+      object resource specifies the acl property, when it defaults to full.
+    rewriteToken: Include this field (from the previous rewrite response) on
+      each rewrite request after the first one, until the rewrite response
+      'done' flag is true. Calls that provide a rewriteToken can omit all
+      other request fields, but if included those fields must match the values
+      provided in the first rewrite request.
+    sourceBucket: Name of the bucket in which to find the source object.
+    sourceGeneration: If present, selects a specific revision of the source
+      object (as opposed to the latest version, the default).
+    sourceObject: Name of the source object. For information about how to URL
+      encode object names to be path safe, see Encoding URI Path Parts.
+  """
+
+  class DestinationPredefinedAclValueValuesEnum(_messages.Enum):
+    """Apply a predefined set of access controls to the destination object.
+
+    Values:
+      authenticatedRead: Object owner gets OWNER access, and
+        allAuthenticatedUsers get READER access.
+      bucketOwnerFullControl: Object owner gets OWNER access, and project team
+        owners get OWNER access.
+      bucketOwnerRead: Object owner gets OWNER access, and project team owners
+        get READER access.
+      private: Object owner gets OWNER access.
+      projectPrivate: Object owner gets OWNER access, and project team members
+        get access according to their roles.
+      publicRead: Object owner gets OWNER access, and allUsers get READER
+        access.
+    """
+    authenticatedRead = 0
+    bucketOwnerFullControl = 1
+    bucketOwnerRead = 2
+    private = 3
+    projectPrivate = 4
+    publicRead = 5
+
+  class ProjectionValueValuesEnum(_messages.Enum):
+    """Set of properties to return. Defaults to noAcl, unless the object
+    resource specifies the acl property, when it defaults to full.
+
+    Values:
+      full: Include all properties.
+      noAcl: Omit the owner, acl property.
+    """
+    full = 0
+    noAcl = 1
+
+  destinationBucket = _messages.StringField(1, required=True)
+  destinationObject = _messages.StringField(2, required=True)
+  destinationPredefinedAcl = _messages.EnumField('DestinationPredefinedAclValueValuesEnum', 3)
+  ifGenerationMatch = _messages.IntegerField(4)
+  ifGenerationNotMatch = _messages.IntegerField(5)
+  ifMetagenerationMatch = _messages.IntegerField(6)
+  ifMetagenerationNotMatch = _messages.IntegerField(7)
+  ifSourceGenerationMatch = _messages.IntegerField(8)
+  ifSourceGenerationNotMatch = _messages.IntegerField(9)
+  ifSourceMetagenerationMatch = _messages.IntegerField(10)
+  ifSourceMetagenerationNotMatch = _messages.IntegerField(11)
+  maxBytesRewrittenPerCall = _messages.IntegerField(12)
+  object = _messages.MessageField('Object', 13)
+  projection = _messages.EnumField('ProjectionValueValuesEnum', 14)
+  rewriteToken = _messages.StringField(15)
+  sourceBucket = _messages.StringField(16, required=True)
+  sourceGeneration = _messages.IntegerField(17)
+  sourceObject = _messages.StringField(18, required=True)
+
+
+class StorageObjectsSetIamPolicyRequest(_messages.Message):
+  """A StorageObjectsSetIamPolicyRequest object.
+
+  Fields:
+    bucket: Name of the bucket in which the object resides.
+    generation: If present, selects a specific revision of this object (as
+      opposed to the latest version, the default).
+    object: Name of the object. For information about how to URL encode object
+      names to be path safe, see Encoding URI Path Parts.
+    policy: A Policy resource to be passed as the request body.
+  """
+
+  bucket = _messages.StringField(1, required=True)
+  generation = _messages.IntegerField(2)
+  object = _messages.StringField(3, required=True)
+  policy = _messages.MessageField('Policy', 4)
+
+
+class StorageObjectsTestIamPermissionsRequest(_messages.Message):
+  """A StorageObjectsTestIamPermissionsRequest object.
+
+  Fields:
+    bucket: Name of the bucket in which the object resides.
+    generation: If present, selects a specific revision of this object (as
+      opposed to the latest version, the default).
+    object: Name of the object. For information about how to URL encode object
+      names to be path safe, see Encoding URI Path Parts.
+    permissions: Permissions to test.
+  """
+
+  bucket = _messages.StringField(1, required=True)
+  generation = _messages.IntegerField(2)
+  object = _messages.StringField(3, required=True)
+  permissions = _messages.StringField(4, required=True)
+
+
+class StorageObjectsUpdateRequest(_messages.Message):
+  """A StorageObjectsUpdateRequest object.
+
+  Enums:
+    PredefinedAclValueValuesEnum: Apply a predefined set of access controls to
+      this object.
+    ProjectionValueValuesEnum: Set of properties to return. Defaults to full.
+
+  Fields:
+    bucket: Name of the bucket in which the object resides.
+    generation: If present, selects a specific revision of this object (as
+      opposed to the latest version, the default).
+    ifGenerationMatch: Makes the operation conditional on whether the object's
+      current generation matches the given value.
+    ifGenerationNotMatch: Makes the operation conditional on whether the
+      object's current generation does not match the given value.
+    ifMetagenerationMatch: Makes the operation conditional on whether the
+      object's current metageneration matches the given value.
+    ifMetagenerationNotMatch: Makes the operation conditional on whether the
+      object's current metageneration does not match the given value.
+    object: Name of the object. For information about how to URL encode object
+      names to be path safe, see Encoding URI Path Parts.
+    objectResource: A Object resource to be passed as the request body.
+    predefinedAcl: Apply a predefined set of access controls to this object.
+    projection: Set of properties to return. Defaults to full.
+  """
+
+  class PredefinedAclValueValuesEnum(_messages.Enum):
+    """Apply a predefined set of access controls to this object.
+
+    Values:
+      authenticatedRead: Object owner gets OWNER access, and
+        allAuthenticatedUsers get READER access.
+      bucketOwnerFullControl: Object owner gets OWNER access, and project team
+        owners get OWNER access.
+      bucketOwnerRead: Object owner gets OWNER access, and project team owners
+        get READER access.
+      private: Object owner gets OWNER access.
+      projectPrivate: Object owner gets OWNER access, and project team members
+        get access according to their roles.
+      publicRead: Object owner gets OWNER access, and allUsers get READER
+        access.
+    """
+    authenticatedRead = 0
+    bucketOwnerFullControl = 1
+    bucketOwnerRead = 2
+    private = 3
+    projectPrivate = 4
+    publicRead = 5
+
+  class ProjectionValueValuesEnum(_messages.Enum):
+    """Set of properties to return. Defaults to full.
+
+    Values:
+      full: Include all properties.
+      noAcl: Omit the owner, acl property.
+    """
+    full = 0
+    noAcl = 1
+
+  bucket = _messages.StringField(1, required=True)
+  generation = _messages.IntegerField(2)
+  ifGenerationMatch = _messages.IntegerField(3)
+  ifGenerationNotMatch = _messages.IntegerField(4)
+  ifMetagenerationMatch = _messages.IntegerField(5)
+  ifMetagenerationNotMatch = _messages.IntegerField(6)
+  object = _messages.StringField(7, required=True)
+  objectResource = _messages.MessageField('Object', 8)
+  predefinedAcl = _messages.EnumField('PredefinedAclValueValuesEnum', 9)
+  projection = _messages.EnumField('ProjectionValueValuesEnum', 10)
+
+
+class StorageObjectsWatchAllRequest(_messages.Message):
+  """A StorageObjectsWatchAllRequest object.
+
+  Enums:
+    ProjectionValueValuesEnum: Set of properties to return. Defaults to noAcl.
+
+  Fields:
+    bucket: Name of the bucket in which to look for objects.
+    channel: A Channel resource to be passed as the request body.
+    delimiter: Returns results in a directory-like mode. items will contain
+      only objects whose names, aside from the prefix, do not contain
+      delimiter. Objects whose names, aside from the prefix, contain delimiter
+      will have their name, truncated after the delimiter, returned in
+      prefixes. Duplicate prefixes are omitted.
+    maxResults: Maximum number of items plus prefixes to return. As duplicate
+      prefixes are omitted, fewer total results may be returned than
+      requested. The default value of this parameter is 1,000 items.
+    pageToken: A previously-returned page token representing part of the
+      larger set of results to view.
+    prefix: Filter results to objects whose names begin with this prefix.
+    projection: Set of properties to return. Defaults to noAcl.
+    versions: If true, lists all versions of an object as distinct results.
+      The default is false. For more information, see Object Versioning.
+  """
+
+  class ProjectionValueValuesEnum(_messages.Enum):
+    """Set of properties to return. Defaults to noAcl.
+
+    Values:
+      full: Include all properties.
+      noAcl: Omit the owner, acl property.
+    """
+    full = 0
+    noAcl = 1
+
+  bucket = _messages.StringField(1, required=True)
+  channel = _messages.MessageField('Channel', 2)
+  delimiter = _messages.StringField(3)
+  maxResults = _messages.IntegerField(4, variant=_messages.Variant.UINT32)
+  pageToken = _messages.StringField(5)
+  prefix = _messages.StringField(6)
+  projection = _messages.EnumField('ProjectionValueValuesEnum', 7)
+  versions = _messages.BooleanField(8)
+
+
+class TestIamPermissionsResponse(_messages.Message):
+  """A storage.(buckets|objects).testIamPermissions response.
+
+  Fields:
+    kind: The kind of item this is.
+    permissions: The permissions held by the caller. Permissions are always of
+      the format storage.resource.capability, where resource is one of buckets
+      or objects. The supported permissions are as follows:   -
+      storage.buckets.delete \u2014 Delete bucket.   - storage.buckets.get \u2014 Read
+      bucket metadata.   - storage.buckets.getIamPolicy \u2014 Read bucket IAM
+      policy.   - storage.buckets.create \u2014 Create bucket.   -
+      storage.buckets.list \u2014 List buckets.   - storage.buckets.setIamPolicy \u2014
+      Update bucket IAM policy.   - storage.buckets.update \u2014 Update bucket
+      metadata.   - storage.objects.delete \u2014 Delete object.   -
+      storage.objects.get \u2014 Read object data and metadata.   -
+      storage.objects.getIamPolicy \u2014 Read object IAM policy.   -
+      storage.objects.create \u2014 Create object.   - storage.objects.list \u2014 List
+      objects.   - storage.objects.setIamPolicy \u2014 Update object IAM policy.
+      - storage.objects.update \u2014 Update object metadata.
+  """
+
+  kind = _messages.StringField(1, default=u'storage#testIamPermissionsResponse')
+  permissions = _messages.StringField(2, repeated=True)
+
+
diff --git a/samples/storage_sample/testdata/fifteen_byte_file b/samples/storage_sample/testdata/fifteen_byte_file
new file mode 100644
index 0000000..6b665aa
--- /dev/null
+++ b/samples/storage_sample/testdata/fifteen_byte_file
@@ -0,0 +1,4 @@
+a
+ab
+abc
+abcde
diff --git a/samples/storage_sample/testdata/filename_with_spaces b/samples/storage_sample/testdata/filename_with_spaces
new file mode 100644
index 0000000..6b665aa
--- /dev/null
+++ b/samples/storage_sample/testdata/filename_with_spaces
@@ -0,0 +1,4 @@
+a
+ab
+abc
+abcde
diff --git a/samples/storage_sample/uploads_test.py b/samples/storage_sample/uploads_test.py
new file mode 100644
index 0000000..cfe6aaa
--- /dev/null
+++ b/samples/storage_sample/uploads_test.py
@@ -0,0 +1,175 @@
+#
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Integration tests for uploading and downloading to GCS.
+
+These tests exercise most of the corner cases for upload/download of
+files in apitools, via GCS. There are no performance tests here yet.
+"""
+
+import json
+import os
+import random
+import string
+import unittest
+
+import six
+
+import apitools.base.py as apitools_base
+import storage
+
+_CLIENT = None
+
+
+def _GetClient():
+    global _CLIENT  # pylint: disable=global-statement
+    if _CLIENT is None:
+        _CLIENT = storage.StorageV1()
+    return _CLIENT
+
+
+class UploadsTest(unittest.TestCase):
+    _DEFAULT_BUCKET = 'apitools'
+    _TESTDATA_PREFIX = 'uploads'
+
+    def setUp(self):
+        self.__client = _GetClient()
+        self.__files = []
+        self.__content = ''
+        self.__buffer = None
+        self.__upload = None
+
+    def tearDown(self):
+        self.__DeleteFiles()
+
+    def __ResetUpload(self, size, auto_transfer=True):
+        self.__content = ''.join(
+            random.choice(string.ascii_letters) for _ in range(size))
+        self.__buffer = six.StringIO(self.__content)
+        self.__upload = storage.Upload.FromStream(
+            self.__buffer, 'text/plain', auto_transfer=auto_transfer)
+
+    def __DeleteFiles(self):
+        for filename in self.__files:
+            self.__DeleteFile(filename)
+
+    def __DeleteFile(self, filename):
+        object_name = os.path.join(self._TESTDATA_PREFIX, filename)
+        req = storage.StorageObjectsDeleteRequest(
+            bucket=self._DEFAULT_BUCKET, object=object_name)
+        self.__client.objects.Delete(req)
+
+    def __InsertRequest(self, filename):
+        object_name = os.path.join(self._TESTDATA_PREFIX, filename)
+        return storage.StorageObjectsInsertRequest(
+            name=object_name, bucket=self._DEFAULT_BUCKET)
+
+    def __GetRequest(self, filename):
+        object_name = os.path.join(self._TESTDATA_PREFIX, filename)
+        return storage.StorageObjectsGetRequest(
+            object=object_name, bucket=self._DEFAULT_BUCKET)
+
+    def __InsertFile(self, filename, request=None):
+        if request is None:
+            request = self.__InsertRequest(filename)
+        response = self.__client.objects.Insert(request, upload=self.__upload)
+        self.assertIsNotNone(response)
+        self.__files.append(filename)
+        return response
+
+    def testZeroBytes(self):
+        filename = 'zero_byte_file'
+        self.__ResetUpload(0)
+        response = self.__InsertFile(filename)
+        self.assertEqual(0, response.size)
+
+    def testSimpleUpload(self):
+        filename = 'fifteen_byte_file'
+        self.__ResetUpload(15)
+        response = self.__InsertFile(filename)
+        self.assertEqual(15, response.size)
+
+    def testMultipartUpload(self):
+        filename = 'fifteen_byte_file'
+        self.__ResetUpload(15)
+        request = self.__InsertRequest(filename)
+        request.object = storage.Object(contentLanguage='en')
+        response = self.__InsertFile(filename, request=request)
+        self.assertEqual(15, response.size)
+        self.assertEqual('en', response.contentLanguage)
+
+    def testAutoUpload(self):
+        filename = 'ten_meg_file'
+        size = 10 << 20
+        self.__ResetUpload(size)
+        request = self.__InsertRequest(filename)
+        response = self.__InsertFile(filename, request=request)
+        self.assertEqual(size, response.size)
+
+    def testStreamMedia(self):
+        filename = 'ten_meg_file'
+        size = 10 << 20
+        self.__ResetUpload(size, auto_transfer=False)
+        self.__upload.strategy = 'resumable'
+        self.__upload.total_size = size
+        request = self.__InsertRequest(filename)
+        initial_response = self.__client.objects.Insert(
+            request, upload=self.__upload)
+        self.assertIsNotNone(initial_response)
+        self.assertEqual(0, self.__buffer.tell())
+        self.__upload.StreamMedia()
+        self.assertEqual(size, self.__buffer.tell())
+
+    def testBreakAndResumeUpload(self):
+        filename = ('ten_meg_file_' +
+                    ''.join(random.sample(string.ascii_letters, 5)))
+        size = 10 << 20
+        self.__ResetUpload(size, auto_transfer=False)
+        self.__upload.strategy = 'resumable'
+        self.__upload.total_size = size
+        # Start the upload
+        request = self.__InsertRequest(filename)
+        initial_response = self.__client.objects.Insert(
+            request, upload=self.__upload)
+        self.assertIsNotNone(initial_response)
+        self.assertEqual(0, self.__buffer.tell())
+        # Pretend the process died, and resume with a new attempt at the
+        # same upload.
+        upload_data = json.dumps(self.__upload.serialization_data)
+        second_upload_attempt = apitools_base.Upload.FromData(
+            self.__buffer, upload_data, self.__upload.http)
+        second_upload_attempt._Upload__SendChunk(0)
+        self.assertEqual(second_upload_attempt.chunksize, self.__buffer.tell())
+        # Simulate a third try, and stream from there.
+        final_upload_attempt = apitools_base.Upload.FromData(
+            self.__buffer, upload_data, self.__upload.http)
+        final_upload_attempt.StreamInChunks()
+        self.assertEqual(size, self.__buffer.tell())
+        # Verify the upload
+        object_info = self.__client.objects.Get(self.__GetRequest(filename))
+        self.assertEqual(size, object_info.size)
+        # Confirm that a new attempt successfully does nothing.
+        completed_upload_attempt = apitools_base.Upload.FromData(
+            self.__buffer, upload_data, self.__upload.http)
+        self.assertTrue(completed_upload_attempt.complete)
+        completed_upload_attempt.StreamInChunks()
+        # Verify the upload didn't pick up extra bytes.
+        object_info = self.__client.objects.Get(self.__GetRequest(filename))
+        self.assertEqual(size, object_info.size)
+        # TODO(craigcitro): Add tests for callbacks (especially around
+        # finish callback).
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/samples/uptodate_check_test.py b/samples/uptodate_check_test.py
new file mode 100644
index 0000000..6fbea9c
--- /dev/null
+++ b/samples/uptodate_check_test.py
@@ -0,0 +1,86 @@
+# Copyright 2015 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import difflib
+
+import unittest2
+
+from apitools.gen import gen_client
+from apitools.gen import test_utils
+
+
+def GetSampleClientPath(api_name, *path):
+    return os.path.join(os.path.dirname(__file__), api_name + '_sample', *path)
+
+
+def _GetContent(file_path):
+    with open(file_path) as f:
+        return f.read()
+
+
+@test_utils.RunOnlyOnPython27
+class ClientGenCliTest(unittest2.TestCase):
+
+    def AssertDiffEqual(self, expected, actual):
+        """Like unittest.assertEqual with a diff in the exception message."""
+        if expected != actual:
+            unified_diff = difflib.unified_diff(
+                expected.splitlines(), actual.splitlines())
+            raise AssertionError('\n'.join(unified_diff))
+
+    def _CheckGeneratedFiles(self, api_name, api_version):
+        prefix = api_name + '_' + api_version
+        with test_utils.TempDir() as tmp_dir_path:
+            gen_client.main([
+                gen_client.__file__,
+                '--generate_cli',
+                '--init-file', 'empty',
+                '--infile',
+                GetSampleClientPath(api_name, prefix + '.json'),
+                '--outdir', tmp_dir_path,
+                '--overwrite',
+                '--root_package',
+                'samples.{0}_sample.{0}_{1}'.format(api_name, api_version),
+                'client'
+            ])
+            expected_files = (
+                set([prefix + '.py']) |  # CLI files
+                set([prefix + '_client.py',
+                     prefix + '_messages.py',
+                     '__init__.py']))
+            self.assertEquals(expected_files, set(os.listdir(tmp_dir_path)))
+            for expected_file in expected_files:
+                self.AssertDiffEqual(
+                    _GetContent(GetSampleClientPath(
+                        api_name, prefix, expected_file)),
+                    _GetContent(os.path.join(tmp_dir_path, expected_file)))
+
+    def testGenClient_BigqueryDoc(self):
+        self._CheckGeneratedFiles('bigquery', 'v2')
+
+    def testGenClient_DnsDoc(self):
+        self._CheckGeneratedFiles('dns', 'v1')
+
+    def testGenClient_FusiontablesDoc(self):
+        self._CheckGeneratedFiles('fusiontables', 'v1')
+
+    def testGenClient_IamDoc(self):
+        self._CheckGeneratedFiles('iam', 'v1')
+
+    def testGenClient_ServicemanagementDoc(self):
+        self._CheckGeneratedFiles('servicemanagement', 'v1')
+
+    def testGenClient_StorageDoc(self):
+        self._CheckGeneratedFiles('storage', 'v1')
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..9667697
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 Google Inc. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Setup configuration."""
+
+import platform
+
+try:
+    import setuptools
+except ImportError:
+    from ez_setup import use_setuptools
+    use_setuptools()
+    import setuptools
+
+# Configure the required packages and scripts to install, depending on
+# Python version and OS.
+REQUIRED_PACKAGES = [
+    'httplib2>=0.8',
+    'oauth2client>=1.5.2,<4.0.0dev',
+    'six>=1.9.0',
+    ]
+
+CLI_PACKAGES = [
+    'google-apputils>=0.4.0',
+    'python-gflags==3.0.6',  # Starting version 3.0.7 py26 is not supported.
+]
+
+TESTING_PACKAGES = [
+    'google-apputils>=0.4.0',
+    'unittest2>=0.5.1',
+    'mock>=1.0.1',
+]
+
+CONSOLE_SCRIPTS = [
+    'gen_client = apitools.gen.gen_client:main',
+]
+
+py_version = platform.python_version()
+
+if py_version < '2.7':
+    REQUIRED_PACKAGES.append('argparse>=1.2.1')
+
+_APITOOLS_VERSION = '0.5.11'
+
+with open('README.rst') as fileobj:
+    README = fileobj.read()
+
+setuptools.setup(
+    name='google-apitools',
+    version=_APITOOLS_VERSION,
+    description='client libraries for humans',
+    long_description=README,
+    url='http://github.com/craigcitro/apitools',
+    author='Craig Citro',
+    author_email='craigcitro@google.com',
+    # Contained modules and scripts.
+    packages=setuptools.find_packages(),
+    entry_points={'console_scripts': CONSOLE_SCRIPTS},
+    install_requires=REQUIRED_PACKAGES,
+    tests_require=REQUIRED_PACKAGES + CLI_PACKAGES + TESTING_PACKAGES,
+    extras_require={
+        'cli': CLI_PACKAGES,
+        'testing': TESTING_PACKAGES,
+        },
+    # Add in any packaged data.
+    include_package_data=True,
+    package_data={
+        'apitools.data': ['*'],
+    },
+    # PyPI package information.
+    classifiers=[
+        'License :: OSI Approved :: Apache Software License',
+        'Topic :: Software Development :: Libraries',
+        'Topic :: Software Development :: Libraries :: Python Modules',
+        ],
+    license='Apache 2.0',
+    keywords='apitools',
+    )
diff --git a/tox.ini b/tox.ini
new file mode 100644
index 0000000..e2d1f23
--- /dev/null
+++ b/tox.ini
@@ -0,0 +1,87 @@
+[tox]
+envlist = py26,py27,pypy,py34,py35,lint,cover,py27oldoauth2client
+
+[testenv]
+deps =
+    nose
+    python-gflags==3.0.6
+commands =
+    pip install google-apitools[testing]
+    nosetests []
+passenv = TRAVIS*
+
+[testenv:py27oldoauth2client]
+commands =
+    pip install oauth2client==1.5.2
+    {[testenv]commands}
+deps = {[testenv]deps}
+
+[testenv:py34]
+basepython = python3.4
+deps =
+    mock
+    nose
+    unittest2
+commands = nosetests []
+
+[testenv:py35]
+basepython = python3.5
+deps =
+    mock
+    nose
+    unittest2
+commands = nosetests []
+
+[pep8]
+exclude = samples/*_sample/*/*,*/testdata/*,*.egg/,*.egg-info/,.*/,ez_setup.py,build
+verbose = 1
+
+[testenv:lint]
+basepython =
+    python2.7
+commands =
+    pip install six google-apitools[testing]
+    pep8
+    python run_pylint.py
+deps =
+    pep8
+    pylint
+    unittest2
+
+[testenv:cover]
+basepython =
+    python2.7
+commands =
+    nosetests --with-xunit --with-xcoverage --cover-package=apitools --nocapture --cover-erase --cover-tests --cover-branches []
+deps =
+    google-apputils
+    python-gflags==3.0.6
+    mock
+    nose
+    unittest2
+    coverage
+    nosexcover
+
+[testenv:coveralls]
+basepython = {[testenv:cover]basepython}
+commands =
+    {[testenv:cover]commands}
+    coveralls
+deps =
+    {[testenv:cover]deps}
+    coveralls
+
+[testenv:transfer_coverage]
+basepython =
+    python2.7
+deps =
+    mock
+    nose
+    unittest2
+    coverage
+commands =
+    coverage run --branch -p samples/storage_sample/downloads_test.py
+    coverage run --branch -p samples/storage_sample/uploads_test.py
+    coverage run --branch -p apitools/base/py/transfer_test.py
+    coverage combine
+    coverage html