blob: 9b68f9c61585eb7d75c82649e84f37d00a3afc54 [file] [log] [blame]
"""Generated message classes for bigquery version v2.
A data platform for customers to create, manage, share and query data.
"""
# NOTE: This file is autogenerated and should not be edited by hand.
from apitools.base.protorpclite import messages as _messages
from apitools.base.py import encoding
from apitools.base.py import extra_types
package = 'bigquery'
class BigqueryDatasetsDeleteRequest(_messages.Message):
"""A BigqueryDatasetsDeleteRequest object.
Fields:
datasetId: Dataset ID of dataset being deleted
deleteContents: If True, delete all the tables in the dataset. If False
and the dataset contains tables, the request will fail. Default is False
projectId: Project ID of the dataset being deleted
"""
datasetId = _messages.StringField(1, required=True)
deleteContents = _messages.BooleanField(2)
projectId = _messages.StringField(3, required=True)
class BigqueryDatasetsDeleteResponse(_messages.Message):
"""An empty BigqueryDatasetsDelete response."""
class BigqueryDatasetsGetRequest(_messages.Message):
"""A BigqueryDatasetsGetRequest object.
Fields:
datasetId: Dataset ID of the requested dataset
projectId: Project ID of the requested dataset
"""
datasetId = _messages.StringField(1, required=True)
projectId = _messages.StringField(2, required=True)
class BigqueryDatasetsInsertRequest(_messages.Message):
"""A BigqueryDatasetsInsertRequest object.
Fields:
dataset: A Dataset resource to be passed as the request body.
projectId: Project ID of the new dataset
"""
dataset = _messages.MessageField('Dataset', 1)
projectId = _messages.StringField(2, required=True)
class BigqueryDatasetsListRequest(_messages.Message):
"""A BigqueryDatasetsListRequest object.
Fields:
all: Whether to list all datasets, including hidden ones
filter: An expression for filtering the results of the request by label.
The syntax is "labels.[:]". Multiple filters can be ANDed together by
connecting with a space. Example: "labels.department:receiving
labels.active". See https://cloud.google.com/bigquery/docs/labeling-
datasets#filtering_datasets_using_labels for details.
maxResults: The maximum number of results to return
pageToken: Page token, returned by a previous call, to request the next
page of results
projectId: Project ID of the datasets to be listed
"""
all = _messages.BooleanField(1)
filter = _messages.StringField(2)
maxResults = _messages.IntegerField(3, variant=_messages.Variant.UINT32)
pageToken = _messages.StringField(4)
projectId = _messages.StringField(5, required=True)
class BigqueryDatasetsPatchRequest(_messages.Message):
"""A BigqueryDatasetsPatchRequest object.
Fields:
dataset: A Dataset resource to be passed as the request body.
datasetId: Dataset ID of the dataset being updated
projectId: Project ID of the dataset being updated
"""
dataset = _messages.MessageField('Dataset', 1)
datasetId = _messages.StringField(2, required=True)
projectId = _messages.StringField(3, required=True)
class BigqueryDatasetsUpdateRequest(_messages.Message):
"""A BigqueryDatasetsUpdateRequest object.
Fields:
dataset: A Dataset resource to be passed as the request body.
datasetId: Dataset ID of the dataset being updated
projectId: Project ID of the dataset being updated
"""
dataset = _messages.MessageField('Dataset', 1)
datasetId = _messages.StringField(2, required=True)
projectId = _messages.StringField(3, required=True)
class BigqueryJobsCancelRequest(_messages.Message):
"""A BigqueryJobsCancelRequest object.
Fields:
jobId: [Required] Job ID of the job to cancel
projectId: [Required] Project ID of the job to cancel
"""
jobId = _messages.StringField(1, required=True)
projectId = _messages.StringField(2, required=True)
class BigqueryJobsGetQueryResultsRequest(_messages.Message):
"""A BigqueryJobsGetQueryResultsRequest object.
Fields:
jobId: [Required] Job ID of the query job
maxResults: Maximum number of results to read
pageToken: Page token, returned by a previous call, to request the next
page of results
projectId: [Required] Project ID of the query job
startIndex: Zero-based index of the starting row
timeoutMs: How long to wait for the query to complete, in milliseconds,
before returning. Default is 10 seconds. If the timeout passes before
the job completes, the 'jobComplete' field in the response will be false
"""
jobId = _messages.StringField(1, required=True)
maxResults = _messages.IntegerField(2, variant=_messages.Variant.UINT32)
pageToken = _messages.StringField(3)
projectId = _messages.StringField(4, required=True)
startIndex = _messages.IntegerField(5, variant=_messages.Variant.UINT64)
timeoutMs = _messages.IntegerField(6, variant=_messages.Variant.UINT32)
class BigqueryJobsGetRequest(_messages.Message):
"""A BigqueryJobsGetRequest object.
Fields:
jobId: [Required] Job ID of the requested job
projectId: [Required] Project ID of the requested job
"""
jobId = _messages.StringField(1, required=True)
projectId = _messages.StringField(2, required=True)
class BigqueryJobsInsertRequest(_messages.Message):
"""A BigqueryJobsInsertRequest object.
Fields:
job: A Job resource to be passed as the request body.
projectId: Project ID of the project that will be billed for the job
"""
job = _messages.MessageField('Job', 1)
projectId = _messages.StringField(2, required=True)
class BigqueryJobsListRequest(_messages.Message):
"""A BigqueryJobsListRequest object.
Enums:
ProjectionValueValuesEnum: Restrict information returned to a set of
selected fields
StateFilterValueValuesEnum: Filter for job state
Fields:
allUsers: Whether to display jobs owned by all users in the project.
Default false
maxResults: Maximum number of results to return
pageToken: Page token, returned by a previous call, to request the next
page of results
projectId: Project ID of the jobs to list
projection: Restrict information returned to a set of selected fields
stateFilter: Filter for job state
"""
class ProjectionValueValuesEnum(_messages.Enum):
"""Restrict information returned to a set of selected fields
Values:
full: Includes all job data
minimal: Does not include the job configuration
"""
full = 0
minimal = 1
class StateFilterValueValuesEnum(_messages.Enum):
"""Filter for job state
Values:
done: Finished jobs
pending: Pending jobs
running: Running jobs
"""
done = 0
pending = 1
running = 2
allUsers = _messages.BooleanField(1)
maxResults = _messages.IntegerField(2, variant=_messages.Variant.UINT32)
pageToken = _messages.StringField(3)
projectId = _messages.StringField(4, required=True)
projection = _messages.EnumField('ProjectionValueValuesEnum', 5)
stateFilter = _messages.EnumField('StateFilterValueValuesEnum', 6, repeated=True)
class BigqueryJobsQueryRequest(_messages.Message):
"""A BigqueryJobsQueryRequest object.
Fields:
projectId: Project ID of the project billed for the query
queryRequest: A QueryRequest resource to be passed as the request body.
"""
projectId = _messages.StringField(1, required=True)
queryRequest = _messages.MessageField('QueryRequest', 2)
class BigqueryProjectsListRequest(_messages.Message):
"""A BigqueryProjectsListRequest object.
Fields:
maxResults: Maximum number of results to return
pageToken: Page token, returned by a previous call, to request the next
page of results
"""
maxResults = _messages.IntegerField(1, variant=_messages.Variant.UINT32)
pageToken = _messages.StringField(2)
class BigqueryTabledataInsertAllRequest(_messages.Message):
"""A BigqueryTabledataInsertAllRequest object.
Fields:
datasetId: Dataset ID of the destination table.
projectId: Project ID of the destination table.
tableDataInsertAllRequest: A TableDataInsertAllRequest resource to be
passed as the request body.
tableId: Table ID of the destination table.
"""
datasetId = _messages.StringField(1, required=True)
projectId = _messages.StringField(2, required=True)
tableDataInsertAllRequest = _messages.MessageField('TableDataInsertAllRequest', 3)
tableId = _messages.StringField(4, required=True)
class BigqueryTabledataListRequest(_messages.Message):
"""A BigqueryTabledataListRequest object.
Fields:
datasetId: Dataset ID of the table to read
maxResults: Maximum number of results to return
pageToken: Page token, returned by a previous call, identifying the result
set
projectId: Project ID of the table to read
startIndex: Zero-based index of the starting row to read
tableId: Table ID of the table to read
"""
datasetId = _messages.StringField(1, required=True)
maxResults = _messages.IntegerField(2, variant=_messages.Variant.UINT32)
pageToken = _messages.StringField(3)
projectId = _messages.StringField(4, required=True)
startIndex = _messages.IntegerField(5, variant=_messages.Variant.UINT64)
tableId = _messages.StringField(6, required=True)
class BigqueryTablesDeleteRequest(_messages.Message):
"""A BigqueryTablesDeleteRequest object.
Fields:
datasetId: Dataset ID of the table to delete
projectId: Project ID of the table to delete
tableId: Table ID of the table to delete
"""
datasetId = _messages.StringField(1, required=True)
projectId = _messages.StringField(2, required=True)
tableId = _messages.StringField(3, required=True)
class BigqueryTablesDeleteResponse(_messages.Message):
"""An empty BigqueryTablesDelete response."""
class BigqueryTablesGetRequest(_messages.Message):
"""A BigqueryTablesGetRequest object.
Fields:
datasetId: Dataset ID of the requested table
projectId: Project ID of the requested table
tableId: Table ID of the requested table
"""
datasetId = _messages.StringField(1, required=True)
projectId = _messages.StringField(2, required=True)
tableId = _messages.StringField(3, required=True)
class BigqueryTablesInsertRequest(_messages.Message):
"""A BigqueryTablesInsertRequest object.
Fields:
datasetId: Dataset ID of the new table
projectId: Project ID of the new table
table: A Table resource to be passed as the request body.
"""
datasetId = _messages.StringField(1, required=True)
projectId = _messages.StringField(2, required=True)
table = _messages.MessageField('Table', 3)
class BigqueryTablesListRequest(_messages.Message):
"""A BigqueryTablesListRequest object.
Fields:
datasetId: Dataset ID of the tables to list
maxResults: Maximum number of results to return
pageToken: Page token, returned by a previous call, to request the next
page of results
projectId: Project ID of the tables to list
"""
datasetId = _messages.StringField(1, required=True)
maxResults = _messages.IntegerField(2, variant=_messages.Variant.UINT32)
pageToken = _messages.StringField(3)
projectId = _messages.StringField(4, required=True)
class BigqueryTablesPatchRequest(_messages.Message):
"""A BigqueryTablesPatchRequest object.
Fields:
datasetId: Dataset ID of the table to update
projectId: Project ID of the table to update
table: A Table resource to be passed as the request body.
tableId: Table ID of the table to update
"""
datasetId = _messages.StringField(1, required=True)
projectId = _messages.StringField(2, required=True)
table = _messages.MessageField('Table', 3)
tableId = _messages.StringField(4, required=True)
class BigqueryTablesUpdateRequest(_messages.Message):
"""A BigqueryTablesUpdateRequest object.
Fields:
datasetId: Dataset ID of the table to update
projectId: Project ID of the table to update
table: A Table resource to be passed as the request body.
tableId: Table ID of the table to update
"""
datasetId = _messages.StringField(1, required=True)
projectId = _messages.StringField(2, required=True)
table = _messages.MessageField('Table', 3)
tableId = _messages.StringField(4, required=True)
class BigtableColumn(_messages.Message):
"""A BigtableColumn object.
Fields:
encoding: [Optional] The encoding of the values when the type is not
STRING. Acceptable encoding values are: TEXT - indicates values are
alphanumeric text strings. BINARY - indicates values are encoded using
HBase Bytes.toBytes family of functions. 'encoding' can also be set at
the column family level. However, the setting at this level takes
precedence if 'encoding' is set at both levels.
fieldName: [Optional] If the qualifier is not a valid BigQuery field
identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid identifier
must be provided as the column field name and is used as field name in
queries.
onlyReadLatest: [Optional] If this is set, only the latest version of
value in this column are exposed. 'onlyReadLatest' can also be set at
the column family level. However, the setting at this level takes
precedence if 'onlyReadLatest' is set at both levels.
qualifierEncoded: [Required] Qualifier of the column. Columns in the
parent column family that has this exact qualifier are exposed as .
field. If the qualifier is valid UTF-8 string, it can be specified in
the qualifier_string field. Otherwise, a base-64 encoded value must be
set to qualifier_encoded. The column field name is the same as the
column qualifier. However, if the qualifier is not a valid BigQuery
field identifier i.e. does not match [a-zA-Z][a-zA-Z0-9_]*, a valid
identifier must be provided as field_name.
qualifierString: A string attribute.
type: [Optional] The type to convert the value in cells of this column.
The values are expected to be encoded using HBase Bytes.toBytes function
when using the BINARY encoding value. Following BigQuery types are
allowed (case-sensitive) - BYTES STRING INTEGER FLOAT BOOLEAN Default
type is BYTES. 'type' can also be set at the column family level.
However, the setting at this level takes precedence if 'type' is set at
both levels.
"""
encoding = _messages.StringField(1)
fieldName = _messages.StringField(2)
onlyReadLatest = _messages.BooleanField(3)
qualifierEncoded = _messages.BytesField(4)
qualifierString = _messages.StringField(5)
type = _messages.StringField(6)
class BigtableColumnFamily(_messages.Message):
"""A BigtableColumnFamily object.
Fields:
columns: [Optional] Lists of columns that should be exposed as individual
fields as opposed to a list of (column name, value) pairs. All columns
whose qualifier matches a qualifier in this list can be accessed as ..
Other columns can be accessed as a list through .Column field.
encoding: [Optional] The encoding of the values when the type is not
STRING. Acceptable encoding values are: TEXT - indicates values are
alphanumeric text strings. BINARY - indicates values are encoded using
HBase Bytes.toBytes family of functions. This can be overridden for a
specific column by listing that column in 'columns' and specifying an
encoding for it.
familyId: Identifier of the column family.
onlyReadLatest: [Optional] If this is set only the latest version of value
are exposed for all columns in this column family. This can be
overridden for a specific column by listing that column in 'columns' and
specifying a different setting for that column.
type: [Optional] The type to convert the value in cells of this column
family. The values are expected to be encoded using HBase Bytes.toBytes
function when using the BINARY encoding value. Following BigQuery types
are allowed (case-sensitive) - BYTES STRING INTEGER FLOAT BOOLEAN
Default type is BYTES. This can be overridden for a specific column by
listing that column in 'columns' and specifying a type for it.
"""
columns = _messages.MessageField('BigtableColumn', 1, repeated=True)
encoding = _messages.StringField(2)
familyId = _messages.StringField(3)
onlyReadLatest = _messages.BooleanField(4)
type = _messages.StringField(5)
class BigtableOptions(_messages.Message):
"""A BigtableOptions object.
Fields:
columnFamilies: [Optional] List of column families to expose in the table
schema along with their types. This list restricts the column families
that can be referenced in queries and specifies their value types. You
can use this list to do type conversions - see the 'type' field for more
details. If you leave this list empty, all column families are present
in the table schema and their values are read as BYTES. During a query
only the column families referenced in that query are read from
Bigtable.
ignoreUnspecifiedColumnFamilies: [Optional] If field is true, then the
column families that are not specified in columnFamilies list are not
exposed in the table schema. Otherwise, they are read with BYTES type
values. The default value is false.
readRowkeyAsString: [Optional] If field is true, then the rowkey column
families will be read and converted to string. Otherwise they are read
with BYTES type values and users need to manually cast them with CAST if
necessary. The default value is false.
"""
columnFamilies = _messages.MessageField('BigtableColumnFamily', 1, repeated=True)
ignoreUnspecifiedColumnFamilies = _messages.BooleanField(2)
readRowkeyAsString = _messages.BooleanField(3)
class CsvOptions(_messages.Message):
"""A CsvOptions object.
Fields:
allowJaggedRows: [Optional] Indicates if BigQuery should accept rows that
are missing trailing optional columns. If true, BigQuery treats missing
trailing columns as null values. If false, records with missing trailing
columns are treated as bad records, and if there are too many bad
records, an invalid error is returned in the job result. The default
value is false.
allowQuotedNewlines: [Optional] Indicates if BigQuery should allow quoted
data sections that contain newline characters in a CSV file. The default
value is false.
encoding: [Optional] The character encoding of the data. The supported
values are UTF-8 or ISO-8859-1. The default value is UTF-8. BigQuery
decodes the data after the raw, binary data has been split using the
values of the quote and fieldDelimiter properties.
fieldDelimiter: [Optional] The separator for fields in a CSV file.
BigQuery converts the string to ISO-8859-1 encoding, and then uses the
first byte of the encoded string to split the data in its raw, binary
state. BigQuery also supports the escape sequence "\t" to specify a tab
separator. The default value is a comma (',').
quote: [Optional] The value that is used to quote data sections in a CSV
file. BigQuery converts the string to ISO-8859-1 encoding, and then uses
the first byte of the encoded string to split the data in its raw,
binary state. The default value is a double-quote ('"'). If your data
does not contain quoted sections, set the property value to an empty
string. If your data contains quoted newline characters, you must also
set the allowQuotedNewlines property to true.
skipLeadingRows: [Optional] The number of rows at the top of a CSV file
that BigQuery will skip when reading the data. The default value is 0.
This property is useful if you have header rows in the file that should
be skipped.
"""
allowJaggedRows = _messages.BooleanField(1)
allowQuotedNewlines = _messages.BooleanField(2)
encoding = _messages.StringField(3)
fieldDelimiter = _messages.StringField(4)
quote = _messages.StringField(5, default=u'"')
skipLeadingRows = _messages.IntegerField(6)
class Dataset(_messages.Message):
"""A Dataset object.
Messages:
AccessValueListEntry: A AccessValueListEntry object.
LabelsValue: [Experimental] The labels associated with this dataset. You
can use these to organize and group your datasets. You can set this
property when inserting or updating a dataset. Label keys and values can
be no longer than 63 characters, can only contain letters, numeric
characters, underscores and dashes. International characters are
allowed. Label values are optional. Label keys must start with a letter
and must be unique within a dataset. Both keys and values are
additionally constrained to be <= 128 bytes in size.
Fields:
access: [Optional] An array of objects that define dataset access for one
or more entities. You can set this property when inserting or updating a
dataset in order to control who is allowed to access the data. If
unspecified at dataset creation time, BigQuery adds default dataset
access for the following entities: access.specialGroup: projectReaders;
access.role: READER; access.specialGroup: projectWriters; access.role:
WRITER; access.specialGroup: projectOwners; access.role: OWNER;
access.userByEmail: [dataset creator email]; access.role: OWNER;
creationTime: [Output-only] The time when this dataset was created, in
milliseconds since the epoch.
datasetReference: [Required] A reference that identifies the dataset.
defaultTableExpirationMs: [Optional] The default lifetime of all tables in
the dataset, in milliseconds. The minimum value is 3600000 milliseconds
(one hour). Once this property is set, all newly-created tables in the
dataset will have an expirationTime property set to the creation time
plus the value in this property, and changing the value will only affect
new tables, not existing ones. When the expirationTime for a given table
is reached, that table will be deleted automatically. If a table's
expirationTime is modified or removed before the table expires, or if
you provide an explicit expirationTime when creating a table, that value
takes precedence over the default expiration time indicated by this
property.
description: [Optional] A user-friendly description of the dataset.
etag: [Output-only] A hash of the resource.
friendlyName: [Optional] A descriptive name for the dataset.
id: [Output-only] The fully-qualified unique name of the dataset in the
format projectId:datasetId. The dataset name without the project name is
given in the datasetId field. When creating a new dataset, leave this
field blank, and instead specify the datasetId field.
kind: [Output-only] The resource type.
labels: [Experimental] The labels associated with this dataset. You can
use these to organize and group your datasets. You can set this property
when inserting or updating a dataset. Label keys and values can be no
longer than 63 characters, can only contain letters, numeric characters,
underscores and dashes. International characters are allowed. Label
values are optional. Label keys must start with a letter and must be
unique within a dataset. Both keys and values are additionally
constrained to be <= 128 bytes in size.
lastModifiedTime: [Output-only] The date when this dataset or any of its
tables was last modified, in milliseconds since the epoch.
location: [Experimental] The geographic location where the dataset should
reside. Possible values include EU and US. The default value is US.
selfLink: [Output-only] A URL that can be used to access the resource
again. You can use this URL in Get or Update requests to the resource.
"""
class AccessValueListEntry(_messages.Message):
"""A AccessValueListEntry object.
Fields:
domain: [Pick one] A domain to grant access to. Any users signed in with
the domain specified will be granted the specified access. Example:
"example.com".
groupByEmail: [Pick one] An email address of a Google Group to grant
access to.
role: [Required] Describes the rights granted to the user specified by
the other member of the access object. The following string values are
supported: READER, WRITER, OWNER.
specialGroup: [Pick one] A special group to grant access to. Possible
values include: projectOwners: Owners of the enclosing project.
projectReaders: Readers of the enclosing project. projectWriters:
Writers of the enclosing project. allAuthenticatedUsers: All
authenticated BigQuery users.
userByEmail: [Pick one] An email address of a user to grant access to.
For example: fred@example.com.
view: [Pick one] A view from a different dataset to grant access to.
Queries executed against that view will have read access to tables in
this dataset. The role field is not required when this field is set.
If that view is updated by any user, access to the view needs to be
granted again via an update operation.
"""
domain = _messages.StringField(1)
groupByEmail = _messages.StringField(2)
role = _messages.StringField(3)
specialGroup = _messages.StringField(4)
userByEmail = _messages.StringField(5)
view = _messages.MessageField('TableReference', 6)
@encoding.MapUnrecognizedFields('additionalProperties')
class LabelsValue(_messages.Message):
"""[Experimental] The labels associated with this dataset. You can use
these to organize and group your datasets. You can set this property when
inserting or updating a dataset. Label keys and values can be no longer
than 63 characters, can only contain letters, numeric characters,
underscores and dashes. International characters are allowed. Label values
are optional. Label keys must start with a letter and must be unique
within a dataset. Both keys and values are additionally constrained to be
<= 128 bytes in size.
Messages:
AdditionalProperty: An additional property for a LabelsValue object.
Fields:
additionalProperties: Additional properties of type LabelsValue
"""
class AdditionalProperty(_messages.Message):
"""An additional property for a LabelsValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
access = _messages.MessageField('AccessValueListEntry', 1, repeated=True)
creationTime = _messages.IntegerField(2)
datasetReference = _messages.MessageField('DatasetReference', 3)
defaultTableExpirationMs = _messages.IntegerField(4)
description = _messages.StringField(5)
etag = _messages.StringField(6)
friendlyName = _messages.StringField(7)
id = _messages.StringField(8)
kind = _messages.StringField(9, default=u'bigquery#dataset')
labels = _messages.MessageField('LabelsValue', 10)
lastModifiedTime = _messages.IntegerField(11)
location = _messages.StringField(12)
selfLink = _messages.StringField(13)
class DatasetList(_messages.Message):
"""A DatasetList object.
Messages:
DatasetsValueListEntry: A DatasetsValueListEntry object.
Fields:
datasets: An array of the dataset resources in the project. Each resource
contains basic information. For full information about a particular
dataset resource, use the Datasets: get method. This property is omitted
when there are no datasets in the project.
etag: A hash value of the results page. You can use this property to
determine if the page has changed since the last request.
kind: The list type. This property always returns the value
"bigquery#datasetList".
nextPageToken: A token that can be used to request the next results page.
This property is omitted on the final results page.
"""
class DatasetsValueListEntry(_messages.Message):
"""A DatasetsValueListEntry object.
Messages:
LabelsValue: [Experimental] The labels associated with this dataset. You
can use these to organize and group your datasets.
Fields:
datasetReference: The dataset reference. Use this property to access
specific parts of the dataset's ID, such as project ID or dataset ID.
friendlyName: A descriptive name for the dataset, if one exists.
id: The fully-qualified, unique, opaque ID of the dataset.
kind: The resource type. This property always returns the value
"bigquery#dataset".
labels: [Experimental] The labels associated with this dataset. You can
use these to organize and group your datasets.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class LabelsValue(_messages.Message):
"""[Experimental] The labels associated with this dataset. You can use
these to organize and group your datasets.
Messages:
AdditionalProperty: An additional property for a LabelsValue object.
Fields:
additionalProperties: Additional properties of type LabelsValue
"""
class AdditionalProperty(_messages.Message):
"""An additional property for a LabelsValue object.
Fields:
key: Name of the additional property.
value: A string attribute.
"""
key = _messages.StringField(1)
value = _messages.StringField(2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
datasetReference = _messages.MessageField('DatasetReference', 1)
friendlyName = _messages.StringField(2)
id = _messages.StringField(3)
kind = _messages.StringField(4, default=u'bigquery#dataset')
labels = _messages.MessageField('LabelsValue', 5)
datasets = _messages.MessageField('DatasetsValueListEntry', 1, repeated=True)
etag = _messages.StringField(2)
kind = _messages.StringField(3, default=u'bigquery#datasetList')
nextPageToken = _messages.StringField(4)
class DatasetReference(_messages.Message):
"""A DatasetReference object.
Fields:
datasetId: [Required] A unique ID for this dataset, without the project
name. The ID must contain only letters (a-z, A-Z), numbers (0-9), or
underscores (_). The maximum length is 1,024 characters.
projectId: [Optional] The ID of the project containing this dataset.
"""
datasetId = _messages.StringField(1)
projectId = _messages.StringField(2)
class ErrorProto(_messages.Message):
"""A ErrorProto object.
Fields:
debugInfo: Debugging information. This property is internal to Google and
should not be used.
location: Specifies where the error occurred, if present.
message: A human-readable description of the error.
reason: A short error code that summarizes the error.
"""
debugInfo = _messages.StringField(1)
location = _messages.StringField(2)
message = _messages.StringField(3)
reason = _messages.StringField(4)
class ExplainQueryStage(_messages.Message):
"""A ExplainQueryStage object.
Fields:
computeRatioAvg: Relative amount of time the average shard spent on CPU-
bound tasks.
computeRatioMax: Relative amount of time the slowest shard spent on CPU-
bound tasks.
id: Unique ID for stage within plan.
name: Human-readable name for stage.
readRatioAvg: Relative amount of time the average shard spent reading
input.
readRatioMax: Relative amount of time the slowest shard spent reading
input.
recordsRead: Number of records read into the stage.
recordsWritten: Number of records written by the stage.
steps: List of operations within the stage in dependency order
(approximately chronological).
waitRatioAvg: Relative amount of time the average shard spent waiting to
be scheduled.
waitRatioMax: Relative amount of time the slowest shard spent waiting to
be scheduled.
writeRatioAvg: Relative amount of time the average shard spent on writing
output.
writeRatioMax: Relative amount of time the slowest shard spent on writing
output.
"""
computeRatioAvg = _messages.FloatField(1)
computeRatioMax = _messages.FloatField(2)
id = _messages.IntegerField(3)
name = _messages.StringField(4)
readRatioAvg = _messages.FloatField(5)
readRatioMax = _messages.FloatField(6)
recordsRead = _messages.IntegerField(7)
recordsWritten = _messages.IntegerField(8)
steps = _messages.MessageField('ExplainQueryStep', 9, repeated=True)
waitRatioAvg = _messages.FloatField(10)
waitRatioMax = _messages.FloatField(11)
writeRatioAvg = _messages.FloatField(12)
writeRatioMax = _messages.FloatField(13)
class ExplainQueryStep(_messages.Message):
"""A ExplainQueryStep object.
Fields:
kind: Machine-readable operation type.
substeps: Human-readable stage descriptions.
"""
kind = _messages.StringField(1)
substeps = _messages.StringField(2, repeated=True)
class ExternalDataConfiguration(_messages.Message):
"""A ExternalDataConfiguration object.
Fields:
autodetect: [Experimental] Try to detect schema and format options
automatically. Any option specified explicitly will be honored.
bigtableOptions: [Optional] Additional options if sourceFormat is set to
BIGTABLE.
compression: [Optional] The compression type of the data source. Possible
values include GZIP and NONE. The default value is NONE. This setting is
ignored for Google Cloud Bigtable, Google Cloud Datastore backups and
Avro formats.
csvOptions: Additional properties to set if sourceFormat is set to CSV.
googleSheetsOptions: [Optional] Additional options if sourceFormat is set
to GOOGLE_SHEETS.
ignoreUnknownValues: [Optional] Indicates if BigQuery should allow extra
values that are not represented in the table schema. If true, the extra
values are ignored. If false, records with extra columns are treated as
bad records, and if there are too many bad records, an invalid error is
returned in the job result. The default value is false. The sourceFormat
property determines what BigQuery treats as an extra value: CSV:
Trailing columns JSON: Named values that don't match any column names
Google Cloud Bigtable: This setting is ignored. Google Cloud Datastore
backups: This setting is ignored. Avro: This setting is ignored.
maxBadRecords: [Optional] The maximum number of bad records that BigQuery
can ignore when reading data. If the number of bad records exceeds this
value, an invalid error is returned in the job result. The default value
is 0, which requires that all records are valid. This setting is ignored
for Google Cloud Bigtable, Google Cloud Datastore backups and Avro
formats.
schema: [Optional] The schema for the data. Schema is required for CSV and
JSON formats. Schema is disallowed for Google Cloud Bigtable, Cloud
Datastore backups, and Avro formats.
sourceFormat: [Required] The data format. For CSV files, specify "CSV".
For Google sheets, specify "GOOGLE_SHEETS". For newline-delimited JSON,
specify "NEWLINE_DELIMITED_JSON". For Avro files, specify "AVRO". For
Google Cloud Datastore backups, specify "DATASTORE_BACKUP".
[Experimental] For Google Cloud Bigtable, specify "BIGTABLE". Please
note that reading from Google Cloud Bigtable is experimental and has to
be enabled for your project. Please contact Google Cloud Support to
enable this for your project.
sourceUris: [Required] The fully-qualified URIs that point to your data in
Google Cloud. For Google Cloud Storage URIs: Each URI can contain one
'*' wildcard character and it must come after the 'bucket' name. Size
limits related to load jobs apply to external data sources. For Google
Cloud Bigtable URIs: Exactly one URI can be specified and it has be a
fully specified and valid HTTPS URL for a Google Cloud Bigtable table.
For Google Cloud Datastore backups, exactly one URI can be specified,
and it must end with '.backup_info'. Also, the '*' wildcard character is
not allowed.
"""
autodetect = _messages.BooleanField(1)
bigtableOptions = _messages.MessageField('BigtableOptions', 2)
compression = _messages.StringField(3)
csvOptions = _messages.MessageField('CsvOptions', 4)
googleSheetsOptions = _messages.MessageField('GoogleSheetsOptions', 5)
ignoreUnknownValues = _messages.BooleanField(6)
maxBadRecords = _messages.IntegerField(7, variant=_messages.Variant.INT32)
schema = _messages.MessageField('TableSchema', 8)
sourceFormat = _messages.StringField(9)
sourceUris = _messages.StringField(10, repeated=True)
class GetQueryResultsResponse(_messages.Message):
"""A GetQueryResultsResponse object.
Fields:
cacheHit: Whether the query result was fetched from the query cache.
errors: [Output-only] All errors and warnings encountered during the
running of the job. Errors here do not necessarily mean that the job has
completed or was unsuccessful.
etag: A hash of this response.
jobComplete: Whether the query has completed or not. If rows or totalRows
are present, this will always be true. If this is false, totalRows will
not be available.
jobReference: Reference to the BigQuery Job that was created to run the
query. This field will be present even if the original request timed
out, in which case GetQueryResults can be used to read the results once
the query has completed. Since this API only returns the first page of
results, subsequent pages can be fetched via the same mechanism
(GetQueryResults).
kind: The resource type of the response.
numDmlAffectedRows: [Output-only, Experimental] The number of rows
affected by a DML statement. Present only for DML statements INSERT,
UPDATE or DELETE.
pageToken: A token used for paging results.
rows: An object with as many results as can be contained within the
maximum permitted reply size. To get any additional rows, you can call
GetQueryResults and specify the jobReference returned above. Present
only when the query completes successfully.
schema: The schema of the results. Present only when the query completes
successfully.
totalBytesProcessed: The total number of bytes processed for this query.
totalRows: The total number of rows in the complete query result set,
which can be more than the number of rows in this single page of
results. Present only when the query completes successfully.
"""
cacheHit = _messages.BooleanField(1)
errors = _messages.MessageField('ErrorProto', 2, repeated=True)
etag = _messages.StringField(3)
jobComplete = _messages.BooleanField(4)
jobReference = _messages.MessageField('JobReference', 5)
kind = _messages.StringField(6, default=u'bigquery#getQueryResultsResponse')
numDmlAffectedRows = _messages.IntegerField(7)
pageToken = _messages.StringField(8)
rows = _messages.MessageField('TableRow', 9, repeated=True)
schema = _messages.MessageField('TableSchema', 10)
totalBytesProcessed = _messages.IntegerField(11)
totalRows = _messages.IntegerField(12, variant=_messages.Variant.UINT64)
class GoogleSheetsOptions(_messages.Message):
"""A GoogleSheetsOptions object.
Fields:
skipLeadingRows: [Optional] The number of rows at the top of a sheet that
BigQuery will skip when reading the data. The default value is 0. This
property is useful if you have header rows that should be skipped. When
autodetect is on, behavior is the following: * skipLeadingRows
unspecified - Autodetect tries to detect headers in the first row. If
they are not detected, the row is read as data. Otherwise data is read
starting from the second row. * skipLeadingRows is 0 - Instructs
autodetect that there are no headers and data should be read starting
from the first row. * skipLeadingRows = N > 0 - Autodetect skips N-1
rows and tries to detect headers in row N. If headers are not detected,
row N is just skipped. Otherwise row N is used to extract column names
for the detected schema.
"""
skipLeadingRows = _messages.IntegerField(1)
class Job(_messages.Message):
"""A Job object.
Fields:
configuration: [Required] Describes the job configuration.
etag: [Output-only] A hash of this resource.
id: [Output-only] Opaque ID field of the job
jobReference: [Optional] Reference describing the unique-per-user name of
the job.
kind: [Output-only] The type of the resource.
selfLink: [Output-only] A URL that can be used to access this resource
again.
statistics: [Output-only] Information about the job, including starting
time and ending time of the job.
status: [Output-only] The status of this job. Examine this value when
polling an asynchronous job to see if the job is complete.
user_email: [Output-only] Email address of the user who ran the job.
"""
configuration = _messages.MessageField('JobConfiguration', 1)
etag = _messages.StringField(2)
id = _messages.StringField(3)
jobReference = _messages.MessageField('JobReference', 4)
kind = _messages.StringField(5, default=u'bigquery#job')
selfLink = _messages.StringField(6)
statistics = _messages.MessageField('JobStatistics', 7)
status = _messages.MessageField('JobStatus', 8)
user_email = _messages.StringField(9)
class JobCancelResponse(_messages.Message):
"""A JobCancelResponse object.
Fields:
job: The final state of the job.
kind: The resource type of the response.
"""
job = _messages.MessageField('Job', 1)
kind = _messages.StringField(2, default=u'bigquery#jobCancelResponse')
class JobConfiguration(_messages.Message):
"""A JobConfiguration object.
Fields:
copy: [Pick one] Copies a table.
dryRun: [Optional] If set, don't actually run this job. A valid query will
return a mostly empty response with some processing statistics, while an
invalid query will return the same error it would if it wasn't a dry
run. Behavior of non-query jobs is undefined.
extract: [Pick one] Configures an extract job.
load: [Pick one] Configures a load job.
query: [Pick one] Configures a query job.
"""
copy = _messages.MessageField('JobConfigurationTableCopy', 1)
dryRun = _messages.BooleanField(2)
extract = _messages.MessageField('JobConfigurationExtract', 3)
load = _messages.MessageField('JobConfigurationLoad', 4)
query = _messages.MessageField('JobConfigurationQuery', 5)
class JobConfigurationExtract(_messages.Message):
"""A JobConfigurationExtract object.
Fields:
compression: [Optional] The compression type to use for exported files.
Possible values include GZIP and NONE. The default value is NONE.
destinationFormat: [Optional] The exported file format. Possible values
include CSV, NEWLINE_DELIMITED_JSON and AVRO. The default value is CSV.
Tables with nested or repeated fields cannot be exported as CSV.
destinationUri: [Pick one] DEPRECATED: Use destinationUris instead,
passing only one URI as necessary. The fully-qualified Google Cloud
Storage URI where the extracted table should be written.
destinationUris: [Pick one] A list of fully-qualified Google Cloud Storage
URIs where the extracted table should be written.
fieldDelimiter: [Optional] Delimiter to use between fields in the exported
data. Default is ','
printHeader: [Optional] Whether to print out a header row in the results.
Default is true.
sourceTable: [Required] A reference to the table being exported.
"""
compression = _messages.StringField(1)
destinationFormat = _messages.StringField(2)
destinationUri = _messages.StringField(3)
destinationUris = _messages.StringField(4, repeated=True)
fieldDelimiter = _messages.StringField(5)
printHeader = _messages.BooleanField(6, default=True)
sourceTable = _messages.MessageField('TableReference', 7)
class JobConfigurationLoad(_messages.Message):
"""A JobConfigurationLoad object.
Fields:
allowJaggedRows: [Optional] Accept rows that are missing trailing optional
columns. The missing values are treated as nulls. If false, records with
missing trailing columns are treated as bad records, and if there are
too many bad records, an invalid error is returned in the job result.
The default value is false. Only applicable to CSV, ignored for other
formats.
allowQuotedNewlines: Indicates if BigQuery should allow quoted data
sections that contain newline characters in a CSV file. The default
value is false.
autodetect: [Experimental] Indicates if we should automatically infer the
options and schema for CSV and JSON sources.
createDisposition: [Optional] Specifies whether the job is allowed to
create new tables. The following values are supported: CREATE_IF_NEEDED:
If the table does not exist, BigQuery creates the table. CREATE_NEVER:
The table must already exist. If it does not, a 'notFound' error is
returned in the job result. The default value is CREATE_IF_NEEDED.
Creation, truncation and append actions occur as one atomic update upon
job completion.
destinationTable: [Required] The destination table to load the data into.
encoding: [Optional] The character encoding of the data. The supported
values are UTF-8 or ISO-8859-1. The default value is UTF-8. BigQuery
decodes the data after the raw, binary data has been split using the
values of the quote and fieldDelimiter properties.
fieldDelimiter: [Optional] The separator for fields in a CSV file. The
separator can be any ISO-8859-1 single-byte character. To use a
character in the range 128-255, you must encode the character as UTF8.
BigQuery converts the string to ISO-8859-1 encoding, and then uses the
first byte of the encoded string to split the data in its raw, binary
state. BigQuery also supports the escape sequence "\t" to specify a tab
separator. The default value is a comma (',').
ignoreUnknownValues: [Optional] Indicates if BigQuery should allow extra
values that are not represented in the table schema. If true, the extra
values are ignored. If false, records with extra columns are treated as
bad records, and if there are too many bad records, an invalid error is
returned in the job result. The default value is false. The sourceFormat
property determines what BigQuery treats as an extra value: CSV:
Trailing columns JSON: Named values that don't match any column names
maxBadRecords: [Optional] The maximum number of bad records that BigQuery
can ignore when running the job. If the number of bad records exceeds
this value, an invalid error is returned in the job result. The default
value is 0, which requires that all records are valid.
projectionFields: [Experimental] If sourceFormat is set to
"DATASTORE_BACKUP", indicates which entity properties to load into
BigQuery from a Cloud Datastore backup. Property names are case
sensitive and must be top-level properties. If no properties are
specified, BigQuery loads all properties. If any named property isn't
found in the Cloud Datastore backup, an invalid error is returned in the
job result.
quote: [Optional] The value that is used to quote data sections in a CSV
file. BigQuery converts the string to ISO-8859-1 encoding, and then uses
the first byte of the encoded string to split the data in its raw,
binary state. The default value is a double-quote ('"'). If your data
does not contain quoted sections, set the property value to an empty
string. If your data contains quoted newline characters, you must also
set the allowQuotedNewlines property to true.
schema: [Optional] The schema for the destination table. The schema can be
omitted if the destination table already exists, or if you're loading
data from Google Cloud Datastore.
schemaInline: [Deprecated] The inline schema. For CSV schemas, specify as
"Field1:Type1[,Field2:Type2]*". For example, "foo:STRING, bar:INTEGER,
baz:FLOAT".
schemaInlineFormat: [Deprecated] The format of the schemaInline property.
schemaUpdateOptions: [Experimental] Allows the schema of the desitination
table to be updated as a side effect of the load job. Schema update
options are supported in two cases: when writeDisposition is
WRITE_APPEND; when writeDisposition is WRITE_TRUNCATE and the
destination table is a partition of a table, specified by partition
decorators. For normal tables, WRITE_TRUNCATE will always overwrite the
schema. One or more of the following values are specified:
ALLOW_FIELD_ADDITION: allow adding a nullable field to the schema.
ALLOW_FIELD_RELAXATION: allow relaxing a required field in the original
schema to nullable.
skipLeadingRows: [Optional] The number of rows at the top of a CSV file
that BigQuery will skip when loading the data. The default value is 0.
This property is useful if you have header rows in the file that should
be skipped.
sourceFormat: [Optional] The format of the data files. For CSV files,
specify "CSV". For datastore backups, specify "DATASTORE_BACKUP". For
newline-delimited JSON, specify "NEWLINE_DELIMITED_JSON". For Avro,
specify "AVRO". The default value is CSV.
sourceUris: [Required] The fully-qualified URIs that point to your data in
Google Cloud Storage. Each URI can contain one '*' wildcard character
and it must come after the 'bucket' name.
writeDisposition: [Optional] Specifies the action that occurs if the
destination table already exists. The following values are supported:
WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the
table data. WRITE_APPEND: If the table already exists, BigQuery appends
the data to the table. WRITE_EMPTY: If the table already exists and
contains data, a 'duplicate' error is returned in the job result. The
default value is WRITE_APPEND. Each action is atomic and only occurs if
BigQuery is able to complete the job successfully. Creation, truncation
and append actions occur as one atomic update upon job completion.
"""
allowJaggedRows = _messages.BooleanField(1)
allowQuotedNewlines = _messages.BooleanField(2)
autodetect = _messages.BooleanField(3)
createDisposition = _messages.StringField(4)
destinationTable = _messages.MessageField('TableReference', 5)
encoding = _messages.StringField(6)
fieldDelimiter = _messages.StringField(7)
ignoreUnknownValues = _messages.BooleanField(8)
maxBadRecords = _messages.IntegerField(9, variant=_messages.Variant.INT32)
projectionFields = _messages.StringField(10, repeated=True)
quote = _messages.StringField(11, default=u'"')
schema = _messages.MessageField('TableSchema', 12)
schemaInline = _messages.StringField(13)
schemaInlineFormat = _messages.StringField(14)
schemaUpdateOptions = _messages.StringField(15, repeated=True)
skipLeadingRows = _messages.IntegerField(16, variant=_messages.Variant.INT32)
sourceFormat = _messages.StringField(17)
sourceUris = _messages.StringField(18, repeated=True)
writeDisposition = _messages.StringField(19)
class JobConfigurationQuery(_messages.Message):
"""A JobConfigurationQuery object.
Messages:
TableDefinitionsValue: [Optional] If querying an external data source
outside of BigQuery, describes the data format, location and other
properties of the data source. By defining these properties, the data
source can then be queried as if it were a standard BigQuery table.
Fields:
allowLargeResults: If true, allows the query to produce arbitrarily large
result tables at a slight cost in performance. Requires destinationTable
to be set.
createDisposition: [Optional] Specifies whether the job is allowed to
create new tables. The following values are supported: CREATE_IF_NEEDED:
If the table does not exist, BigQuery creates the table. CREATE_NEVER:
The table must already exist. If it does not, a 'notFound' error is
returned in the job result. The default value is CREATE_IF_NEEDED.
Creation, truncation and append actions occur as one atomic update upon
job completion.
defaultDataset: [Optional] Specifies the default dataset to use for
unqualified table names in the query.
destinationTable: [Optional] Describes the table where the query results
should be stored. If not present, a new table will be created to store
the results.
flattenResults: [Optional] Flattens all nested and repeated fields in the
query results. The default value is true. allowLargeResults must be true
if this is set to false.
maximumBillingTier: [Optional] Limits the billing tier for this job.
Queries that have resource usage beyond this tier will fail (without
incurring a charge). If unspecified, this will be set to your project
default.
maximumBytesBilled: [Optional] Limits the bytes billed for this job.
Queries that will have bytes billed beyond this limit will fail (without
incurring a charge). If unspecified, this will be set to your project
default.
preserveNulls: [Deprecated] This property is deprecated.
priority: [Optional] Specifies a priority for the query. Possible values
include INTERACTIVE and BATCH. The default value is INTERACTIVE.
query: [Required] BigQuery SQL query to execute.
schemaUpdateOptions: [Experimental] Allows the schema of the desitination
table to be updated as a side effect of the query job. Schema update
options are supported in two cases: when writeDisposition is
WRITE_APPEND; when writeDisposition is WRITE_TRUNCATE and the
destination table is a partition of a table, specified by partition
decorators. For normal tables, WRITE_TRUNCATE will always overwrite the
schema. One or more of the following values are specified:
ALLOW_FIELD_ADDITION: allow adding a nullable field to the schema.
ALLOW_FIELD_RELAXATION: allow relaxing a required field in the original
schema to nullable.
tableDefinitions: [Optional] If querying an external data source outside
of BigQuery, describes the data format, location and other properties of
the data source. By defining these properties, the data source can then
be queried as if it were a standard BigQuery table.
useLegacySql: [Experimental] Specifies whether to use BigQuery's legacy
SQL dialect for this query. The default value is true. If set to false,
the query will use BigQuery's standard SQL:
https://cloud.google.com/bigquery/sql-reference/ When useLegacySql is
set to false, the values of allowLargeResults and flattenResults are
ignored; query will be run as if allowLargeResults is true and
flattenResults is false.
useQueryCache: [Optional] Whether to look for the result in the query
cache. The query cache is a best-effort cache that will be flushed
whenever tables in the query are modified. Moreover, the query cache is
only available when a query does not have a destination table specified.
The default value is true.
userDefinedFunctionResources: [Experimental] Describes user-defined
function resources used in the query.
writeDisposition: [Optional] Specifies the action that occurs if the
destination table already exists. The following values are supported:
WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the
table data. WRITE_APPEND: If the table already exists, BigQuery appends
the data to the table. WRITE_EMPTY: If the table already exists and
contains data, a 'duplicate' error is returned in the job result. The
default value is WRITE_EMPTY. Each action is atomic and only occurs if
BigQuery is able to complete the job successfully. Creation, truncation
and append actions occur as one atomic update upon job completion.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class TableDefinitionsValue(_messages.Message):
"""[Optional] If querying an external data source outside of BigQuery,
describes the data format, location and other properties of the data
source. By defining these properties, the data source can then be queried
as if it were a standard BigQuery table.
Messages:
AdditionalProperty: An additional property for a TableDefinitionsValue
object.
Fields:
additionalProperties: Additional properties of type
TableDefinitionsValue
"""
class AdditionalProperty(_messages.Message):
"""An additional property for a TableDefinitionsValue object.
Fields:
key: Name of the additional property.
value: A ExternalDataConfiguration attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('ExternalDataConfiguration', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
allowLargeResults = _messages.BooleanField(1)
createDisposition = _messages.StringField(2)
defaultDataset = _messages.MessageField('DatasetReference', 3)
destinationTable = _messages.MessageField('TableReference', 4)
flattenResults = _messages.BooleanField(5, default=True)
maximumBillingTier = _messages.IntegerField(6, variant=_messages.Variant.INT32, default=1)
maximumBytesBilled = _messages.IntegerField(7)
preserveNulls = _messages.BooleanField(8)
priority = _messages.StringField(9)
query = _messages.StringField(10)
schemaUpdateOptions = _messages.StringField(11, repeated=True)
tableDefinitions = _messages.MessageField('TableDefinitionsValue', 12)
useLegacySql = _messages.BooleanField(13)
useQueryCache = _messages.BooleanField(14, default=True)
userDefinedFunctionResources = _messages.MessageField('UserDefinedFunctionResource', 15, repeated=True)
writeDisposition = _messages.StringField(16)
class JobConfigurationTableCopy(_messages.Message):
"""A JobConfigurationTableCopy object.
Fields:
createDisposition: [Optional] Specifies whether the job is allowed to
create new tables. The following values are supported: CREATE_IF_NEEDED:
If the table does not exist, BigQuery creates the table. CREATE_NEVER:
The table must already exist. If it does not, a 'notFound' error is
returned in the job result. The default value is CREATE_IF_NEEDED.
Creation, truncation and append actions occur as one atomic update upon
job completion.
destinationTable: [Required] The destination table
sourceTable: [Pick one] Source table to copy.
sourceTables: [Pick one] Source tables to copy.
writeDisposition: [Optional] Specifies the action that occurs if the
destination table already exists. The following values are supported:
WRITE_TRUNCATE: If the table already exists, BigQuery overwrites the
table data. WRITE_APPEND: If the table already exists, BigQuery appends
the data to the table. WRITE_EMPTY: If the table already exists and
contains data, a 'duplicate' error is returned in the job result. The
default value is WRITE_EMPTY. Each action is atomic and only occurs if
BigQuery is able to complete the job successfully. Creation, truncation
and append actions occur as one atomic update upon job completion.
"""
createDisposition = _messages.StringField(1)
destinationTable = _messages.MessageField('TableReference', 2)
sourceTable = _messages.MessageField('TableReference', 3)
sourceTables = _messages.MessageField('TableReference', 4, repeated=True)
writeDisposition = _messages.StringField(5)
class JobList(_messages.Message):
"""A JobList object.
Messages:
JobsValueListEntry: A JobsValueListEntry object.
Fields:
etag: A hash of this page of results.
jobs: List of jobs that were requested.
kind: The resource type of the response.
nextPageToken: A token to request the next page of results.
"""
class JobsValueListEntry(_messages.Message):
"""A JobsValueListEntry object.
Fields:
configuration: [Full-projection-only] Specifies the job configuration.
errorResult: A result object that will be present only if the job has
failed.
id: Unique opaque ID of the job.
jobReference: Job reference uniquely identifying the job.
kind: The resource type.
state: Running state of the job. When the state is DONE, errorResult can
be checked to determine whether the job succeeded or failed.
statistics: [Output-only] Information about the job, including starting
time and ending time of the job.
status: [Full-projection-only] Describes the state of the job.
user_email: [Full-projection-only] Email address of the user who ran the
job.
"""
configuration = _messages.MessageField('JobConfiguration', 1)
errorResult = _messages.MessageField('ErrorProto', 2)
id = _messages.StringField(3)
jobReference = _messages.MessageField('JobReference', 4)
kind = _messages.StringField(5, default=u'bigquery#job')
state = _messages.StringField(6)
statistics = _messages.MessageField('JobStatistics', 7)
status = _messages.MessageField('JobStatus', 8)
user_email = _messages.StringField(9)
etag = _messages.StringField(1)
jobs = _messages.MessageField('JobsValueListEntry', 2, repeated=True)
kind = _messages.StringField(3, default=u'bigquery#jobList')
nextPageToken = _messages.StringField(4)
class JobReference(_messages.Message):
"""A JobReference object.
Fields:
jobId: [Required] The ID of the job. The ID must contain only letters
(a-z, A-Z), numbers (0-9), underscores (_), or dashes (-). The maximum
length is 1,024 characters.
projectId: [Required] The ID of the project containing this job.
"""
jobId = _messages.StringField(1)
projectId = _messages.StringField(2)
class JobStatistics(_messages.Message):
"""A JobStatistics object.
Fields:
creationTime: [Output-only] Creation time of this job, in milliseconds
since the epoch. This field will be present on all jobs.
endTime: [Output-only] End time of this job, in milliseconds since the
epoch. This field will be present whenever a job is in the DONE state.
extract: [Output-only] Statistics for an extract job.
load: [Output-only] Statistics for a load job.
query: [Output-only] Statistics for a query job.
startTime: [Output-only] Start time of this job, in milliseconds since the
epoch. This field will be present when the job transitions from the
PENDING state to either RUNNING or DONE.
totalBytesProcessed: [Output-only] [Deprecated] Use the bytes processed in
the query statistics instead.
"""
creationTime = _messages.IntegerField(1)
endTime = _messages.IntegerField(2)
extract = _messages.MessageField('JobStatistics4', 3)
load = _messages.MessageField('JobStatistics3', 4)
query = _messages.MessageField('JobStatistics2', 5)
startTime = _messages.IntegerField(6)
totalBytesProcessed = _messages.IntegerField(7)
class JobStatistics2(_messages.Message):
"""A JobStatistics2 object.
Fields:
billingTier: [Output-only] Billing tier for the job.
cacheHit: [Output-only] Whether the query result was fetched from the
query cache.
numDmlAffectedRows: [Output-only, Experimental] The number of rows
affected by a DML statement. Present only for DML statements INSERT,
UPDATE or DELETE.
queryPlan: [Output-only, Experimental] Describes execution plan for the
query.
referencedTables: [Output-only, Experimental] Referenced tables for the
job. Queries that reference more than 50 tables will not have a complete
list.
schema: [Output-only, Experimental] The schema of the results. Present
only for successful dry run of non-legacy SQL queries.
totalBytesBilled: [Output-only] Total bytes billed for the job.
totalBytesProcessed: [Output-only] Total bytes processed for the job.
"""
billingTier = _messages.IntegerField(1, variant=_messages.Variant.INT32)
cacheHit = _messages.BooleanField(2)
numDmlAffectedRows = _messages.IntegerField(3)
queryPlan = _messages.MessageField('ExplainQueryStage', 4, repeated=True)
referencedTables = _messages.MessageField('TableReference', 5, repeated=True)
schema = _messages.MessageField('TableSchema', 6)
totalBytesBilled = _messages.IntegerField(7)
totalBytesProcessed = _messages.IntegerField(8)
class JobStatistics3(_messages.Message):
"""A JobStatistics3 object.
Fields:
inputFileBytes: [Output-only] Number of bytes of source data in a load
job.
inputFiles: [Output-only] Number of source files in a load job.
outputBytes: [Output-only] Size of the loaded data in bytes. Note that
while a load job is in the running state, this value may change.
outputRows: [Output-only] Number of rows imported in a load job. Note that
while an import job is in the running state, this value may change.
"""
inputFileBytes = _messages.IntegerField(1)
inputFiles = _messages.IntegerField(2)
outputBytes = _messages.IntegerField(3)
outputRows = _messages.IntegerField(4)
class JobStatistics4(_messages.Message):
"""A JobStatistics4 object.
Fields:
destinationUriFileCounts: [Output-only] Number of files per destination
URI or URI pattern specified in the extract configuration. These values
will be in the same order as the URIs specified in the 'destinationUris'
field.
"""
destinationUriFileCounts = _messages.IntegerField(1, repeated=True)
class JobStatus(_messages.Message):
"""A JobStatus object.
Fields:
errorResult: [Output-only] Final error result of the job. If present,
indicates that the job has completed and was unsuccessful.
errors: [Output-only] All errors encountered during the running of the
job. Errors here do not necessarily mean that the job has completed or
was unsuccessful.
state: [Output-only] Running state of the job.
"""
errorResult = _messages.MessageField('ErrorProto', 1)
errors = _messages.MessageField('ErrorProto', 2, repeated=True)
state = _messages.StringField(3)
@encoding.MapUnrecognizedFields('additionalProperties')
class JsonObject(_messages.Message):
"""Represents a single JSON object.
Messages:
AdditionalProperty: An additional property for a JsonObject object.
Fields:
additionalProperties: Additional properties of type JsonObject
"""
class AdditionalProperty(_messages.Message):
"""An additional property for a JsonObject object.
Fields:
key: Name of the additional property.
value: A JsonValue attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('JsonValue', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
JsonValue = extra_types.JsonValue
class ProjectList(_messages.Message):
"""A ProjectList object.
Messages:
ProjectsValueListEntry: A ProjectsValueListEntry object.
Fields:
etag: A hash of the page of results
kind: The type of list.
nextPageToken: A token to request the next page of results.
projects: Projects to which you have at least READ access.
totalItems: The total number of projects in the list.
"""
class ProjectsValueListEntry(_messages.Message):
"""A ProjectsValueListEntry object.
Fields:
friendlyName: A descriptive name for this project.
id: An opaque ID of this project.
kind: The resource type.
numericId: The numeric ID of this project.
projectReference: A unique reference to this project.
"""
friendlyName = _messages.StringField(1)
id = _messages.StringField(2)
kind = _messages.StringField(3, default=u'bigquery#project')
numericId = _messages.IntegerField(4, variant=_messages.Variant.UINT64)
projectReference = _messages.MessageField('ProjectReference', 5)
etag = _messages.StringField(1)
kind = _messages.StringField(2, default=u'bigquery#projectList')
nextPageToken = _messages.StringField(3)
projects = _messages.MessageField('ProjectsValueListEntry', 4, repeated=True)
totalItems = _messages.IntegerField(5, variant=_messages.Variant.INT32)
class ProjectReference(_messages.Message):
"""A ProjectReference object.
Fields:
projectId: [Required] ID of the project. Can be either the numeric ID or
the assigned ID of the project.
"""
projectId = _messages.StringField(1)
class QueryRequest(_messages.Message):
"""A QueryRequest object.
Fields:
defaultDataset: [Optional] Specifies the default datasetId and projectId
to assume for any unqualified table names in the query. If not set, all
table names in the query string must be qualified in the format
'datasetId.tableId'.
dryRun: [Optional] If set to true, BigQuery doesn't run the job. Instead,
if the query is valid, BigQuery returns statistics about the job such as
how many bytes would be processed. If the query is invalid, an error
returns. The default value is false.
kind: The resource type of the request.
maxResults: [Optional] The maximum number of rows of data to return per
page of results. Setting this flag to a small value such as 1000 and
then paging through results might improve reliability when the query
result set is large. In addition to this limit, responses are also
limited to 10 MB. By default, there is no maximum row count, and only
the byte limit applies.
preserveNulls: [Deprecated] This property is deprecated.
query: [Required] A query string, following the BigQuery query syntax, of
the query to execute. Example: "SELECT count(f1) FROM
[myProjectId:myDatasetId.myTableId]".
timeoutMs: [Optional] How long to wait for the query to complete, in
milliseconds, before the request times out and returns. Note that this
is only a timeout for the request, not the query. If the query takes
longer to run than the timeout value, the call returns without any
results and with the 'jobComplete' flag set to false. You can call
GetQueryResults() to wait for the query to complete and read the
results. The default value is 10000 milliseconds (10 seconds).
useLegacySql: [Experimental] Specifies whether to use BigQuery's legacy
SQL dialect for this query. The default value is true. If set to false,
the query will use BigQuery's standard SQL:
https://cloud.google.com/bigquery/sql-reference/ When useLegacySql is
set to false, the values of allowLargeResults and flattenResults are
ignored; query will be run as if allowLargeResults is true and
flattenResults is false.
useQueryCache: [Optional] Whether to look for the result in the query
cache. The query cache is a best-effort cache that will be flushed
whenever tables in the query are modified. The default value is true.
"""
defaultDataset = _messages.MessageField('DatasetReference', 1)
dryRun = _messages.BooleanField(2)
kind = _messages.StringField(3, default=u'bigquery#queryRequest')
maxResults = _messages.IntegerField(4, variant=_messages.Variant.UINT32)
preserveNulls = _messages.BooleanField(5)
query = _messages.StringField(6)
timeoutMs = _messages.IntegerField(7, variant=_messages.Variant.UINT32)
useLegacySql = _messages.BooleanField(8)
useQueryCache = _messages.BooleanField(9, default=True)
class QueryResponse(_messages.Message):
"""A QueryResponse object.
Fields:
cacheHit: Whether the query result was fetched from the query cache.
errors: [Output-only] All errors and warnings encountered during the
running of the job. Errors here do not necessarily mean that the job has
completed or was unsuccessful.
jobComplete: Whether the query has completed or not. If rows or totalRows
are present, this will always be true. If this is false, totalRows will
not be available.
jobReference: Reference to the Job that was created to run the query. This
field will be present even if the original request timed out, in which
case GetQueryResults can be used to read the results once the query has
completed. Since this API only returns the first page of results,
subsequent pages can be fetched via the same mechanism
(GetQueryResults).
kind: The resource type.
numDmlAffectedRows: [Output-only, Experimental] The number of rows
affected by a DML statement. Present only for DML statements INSERT,
UPDATE or DELETE.
pageToken: A token used for paging results.
rows: An object with as many results as can be contained within the
maximum permitted reply size. To get any additional rows, you can call
GetQueryResults and specify the jobReference returned above.
schema: The schema of the results. Present only when the query completes
successfully.
totalBytesProcessed: The total number of bytes processed for this query.
If this query was a dry run, this is the number of bytes that would be
processed if the query were run.
totalRows: The total number of rows in the complete query result set,
which can be more than the number of rows in this single page of
results.
"""
cacheHit = _messages.BooleanField(1)
errors = _messages.MessageField('ErrorProto', 2, repeated=True)
jobComplete = _messages.BooleanField(3)
jobReference = _messages.MessageField('JobReference', 4)
kind = _messages.StringField(5, default=u'bigquery#queryResponse')
numDmlAffectedRows = _messages.IntegerField(6)
pageToken = _messages.StringField(7)
rows = _messages.MessageField('TableRow', 8, repeated=True)
schema = _messages.MessageField('TableSchema', 9)
totalBytesProcessed = _messages.IntegerField(10)
totalRows = _messages.IntegerField(11, variant=_messages.Variant.UINT64)
class StandardQueryParameters(_messages.Message):
"""Query parameters accepted by all methods.
Enums:
AltValueValuesEnum: Data format for the response.
Fields:
alt: Data format for the response.
fields: Selector specifying which fields to include in a partial response.
key: API key. Your API key identifies your project and provides you with
API access, quota, and reports. Required unless you provide an OAuth 2.0
token.
oauth_token: OAuth 2.0 token for the current user.
prettyPrint: Returns response with indentations and line breaks.
quotaUser: Available to use for quota purposes for server-side
applications. Can be any arbitrary string assigned to a user, but should
not exceed 40 characters. Overrides userIp if both are provided.
trace: A tracing token of the form "token:<tokenid>" to include in api
requests.
userIp: IP address of the site where the request originates. Use this if
you want to enforce per-user limits.
"""
class AltValueValuesEnum(_messages.Enum):
"""Data format for the response.
Values:
json: Responses with Content-Type of application/json
"""
json = 0
alt = _messages.EnumField('AltValueValuesEnum', 1, default=u'json')
fields = _messages.StringField(2)
key = _messages.StringField(3)
oauth_token = _messages.StringField(4)
prettyPrint = _messages.BooleanField(5, default=True)
quotaUser = _messages.StringField(6)
trace = _messages.StringField(7)
userIp = _messages.StringField(8)
class Streamingbuffer(_messages.Message):
"""A Streamingbuffer object.
Fields:
estimatedBytes: [Output-only] A lower-bound estimate of the number of
bytes currently in the streaming buffer.
estimatedRows: [Output-only] A lower-bound estimate of the number of rows
currently in the streaming buffer.
oldestEntryTime: [Output-only] Contains the timestamp of the oldest entry
in the streaming buffer, in milliseconds since the epoch, if the
streaming buffer is available.
"""
estimatedBytes = _messages.IntegerField(1, variant=_messages.Variant.UINT64)
estimatedRows = _messages.IntegerField(2, variant=_messages.Variant.UINT64)
oldestEntryTime = _messages.IntegerField(3, variant=_messages.Variant.UINT64)
class Table(_messages.Message):
"""A Table object.
Fields:
creationTime: [Output-only] The time when this table was created, in
milliseconds since the epoch.
description: [Optional] A user-friendly description of this table.
etag: [Output-only] A hash of this resource.
expirationTime: [Optional] The time when this table expires, in
milliseconds since the epoch. If not present, the table will persist
indefinitely. Expired tables will be deleted and their storage
reclaimed.
externalDataConfiguration: [Optional] Describes the data format, location,
and other properties of a table stored outside of BigQuery. By defining
these properties, the data source can then be queried as if it were a
standard BigQuery table.
friendlyName: [Optional] A descriptive name for this table.
id: [Output-only] An opaque ID uniquely identifying the table.
kind: [Output-only] The type of the resource.
lastModifiedTime: [Output-only] The time when this table was last
modified, in milliseconds since the epoch.
location: [Output-only] The geographic location where the table resides.
This value is inherited from the dataset.
numBytes: [Output-only] The size of this table in bytes, excluding any
data in the streaming buffer.
numLongTermBytes: [Output-only] The number of bytes in the table that are
considered "long-term storage".
numRows: [Output-only] The number of rows of data in this table, excluding
any data in the streaming buffer.
schema: [Optional] Describes the schema of this table.
selfLink: [Output-only] A URL that can be used to access this resource
again.
streamingBuffer: [Output-only] Contains information regarding this table's
streaming buffer, if one is present. This field will be absent if the
table is not being streamed to or if there is no data in the streaming
buffer.
tableReference: [Required] Reference describing the ID of this table.
timePartitioning: [Experimental] If specified, configures time-based
partitioning for this table.
type: [Output-only] Describes the table type. The following values are
supported: TABLE: A normal BigQuery table. VIEW: A virtual table defined
by a SQL query. EXTERNAL: A table that references data stored in an
external storage system, such as Google Cloud Storage. The default value
is TABLE.
view: [Optional] The view definition.
"""
creationTime = _messages.IntegerField(1)
description = _messages.StringField(2)
etag = _messages.StringField(3)
expirationTime = _messages.IntegerField(4)
externalDataConfiguration = _messages.MessageField('ExternalDataConfiguration', 5)
friendlyName = _messages.StringField(6)
id = _messages.StringField(7)
kind = _messages.StringField(8, default=u'bigquery#table')
lastModifiedTime = _messages.IntegerField(9, variant=_messages.Variant.UINT64)
location = _messages.StringField(10)
numBytes = _messages.IntegerField(11)
numLongTermBytes = _messages.IntegerField(12)
numRows = _messages.IntegerField(13, variant=_messages.Variant.UINT64)
schema = _messages.MessageField('TableSchema', 14)
selfLink = _messages.StringField(15)
streamingBuffer = _messages.MessageField('Streamingbuffer', 16)
tableReference = _messages.MessageField('TableReference', 17)
timePartitioning = _messages.MessageField('TimePartitioning', 18)
type = _messages.StringField(19)
view = _messages.MessageField('ViewDefinition', 20)
class TableCell(_messages.Message):
"""A TableCell object.
Fields:
v: A extra_types.JsonValue attribute.
"""
v = _messages.MessageField('extra_types.JsonValue', 1)
class TableDataInsertAllRequest(_messages.Message):
"""A TableDataInsertAllRequest object.
Messages:
RowsValueListEntry: A RowsValueListEntry object.
Fields:
ignoreUnknownValues: [Optional] Accept rows that contain values that do
not match the schema. The unknown values are ignored. Default is false,
which treats unknown values as errors.
kind: The resource type of the response.
rows: The rows to insert.
skipInvalidRows: [Optional] Insert all valid rows of a request, even if
invalid rows exist. The default value is false, which causes the entire
request to fail if any invalid rows exist.
templateSuffix: [Experimental] If specified, treats the destination table
as a base template, and inserts the rows into an instance table named
"{destination}{templateSuffix}". BigQuery will manage creation of the
instance table, using the schema of the base template table. See
https://cloud.google.com/bigquery/streaming-data-into-bigquery#template-
tables for considerations when working with templates tables.
"""
class RowsValueListEntry(_messages.Message):
"""A RowsValueListEntry object.
Fields:
insertId: [Optional] A unique ID for each row. BigQuery uses this
property to detect duplicate insertion requests on a best-effort
basis.
json: [Required] A JSON object that contains a row of data. The object's
properties and values must match the destination table's schema.
"""
insertId = _messages.StringField(1)
json = _messages.MessageField('JsonObject', 2)
ignoreUnknownValues = _messages.BooleanField(1)
kind = _messages.StringField(2, default=u'bigquery#tableDataInsertAllRequest')
rows = _messages.MessageField('RowsValueListEntry', 3, repeated=True)
skipInvalidRows = _messages.BooleanField(4)
templateSuffix = _messages.StringField(5)
class TableDataInsertAllResponse(_messages.Message):
"""A TableDataInsertAllResponse object.
Messages:
InsertErrorsValueListEntry: A InsertErrorsValueListEntry object.
Fields:
insertErrors: An array of errors for rows that were not inserted.
kind: The resource type of the response.
"""
class InsertErrorsValueListEntry(_messages.Message):
"""A InsertErrorsValueListEntry object.
Fields:
errors: Error information for the row indicated by the index property.
index: The index of the row that error applies to.
"""
errors = _messages.MessageField('ErrorProto', 1, repeated=True)
index = _messages.IntegerField(2, variant=_messages.Variant.UINT32)
insertErrors = _messages.MessageField('InsertErrorsValueListEntry', 1, repeated=True)
kind = _messages.StringField(2, default=u'bigquery#tableDataInsertAllResponse')
class TableDataList(_messages.Message):
"""A TableDataList object.
Fields:
etag: A hash of this page of results.
kind: The resource type of the response.
pageToken: A token used for paging results. Providing this token instead
of the startIndex parameter can help you retrieve stable results when an
underlying table is changing.
rows: Rows of results.
totalRows: The total number of rows in the complete table.
"""
etag = _messages.StringField(1)
kind = _messages.StringField(2, default=u'bigquery#tableDataList')
pageToken = _messages.StringField(3)
rows = _messages.MessageField('TableRow', 4, repeated=True)
totalRows = _messages.IntegerField(5)
class TableFieldSchema(_messages.Message):
"""A TableFieldSchema object.
Fields:
description: [Optional] The field description. The maximum length is 16K
characters.
fields: [Optional] Describes the nested schema fields if the type property
is set to RECORD.
mode: [Optional] The field mode. Possible values include NULLABLE,
REQUIRED and REPEATED. The default value is NULLABLE.
name: [Required] The field name. The name must contain only letters (a-z,
A-Z), numbers (0-9), or underscores (_), and must start with a letter or
underscore. The maximum length is 128 characters.
type: [Required] The field data type. Possible values include STRING,
BYTES, INTEGER, FLOAT, BOOLEAN, TIMESTAMP or RECORD (where RECORD
indicates that the field contains a nested schema).
"""
description = _messages.StringField(1)
fields = _messages.MessageField('TableFieldSchema', 2, repeated=True)
mode = _messages.StringField(3)
name = _messages.StringField(4)
type = _messages.StringField(5)
class TableList(_messages.Message):
"""A TableList object.
Messages:
TablesValueListEntry: A TablesValueListEntry object.
Fields:
etag: A hash of this page of results.
kind: The type of list.
nextPageToken: A token to request the next page of results.
tables: Tables in the requested dataset.
totalItems: The total number of tables in the dataset.
"""
class TablesValueListEntry(_messages.Message):
"""A TablesValueListEntry object.
Fields:
friendlyName: The user-friendly name for this table.
id: An opaque ID of the table
kind: The resource type.
tableReference: A reference uniquely identifying the table.
type: The type of table. Possible values are: TABLE, VIEW.
"""
friendlyName = _messages.StringField(1)
id = _messages.StringField(2)
kind = _messages.StringField(3, default=u'bigquery#table')
tableReference = _messages.MessageField('TableReference', 4)
type = _messages.StringField(5)
etag = _messages.StringField(1)
kind = _messages.StringField(2, default=u'bigquery#tableList')
nextPageToken = _messages.StringField(3)
tables = _messages.MessageField('TablesValueListEntry', 4, repeated=True)
totalItems = _messages.IntegerField(5, variant=_messages.Variant.INT32)
class TableReference(_messages.Message):
"""A TableReference object.
Fields:
datasetId: [Required] The ID of the dataset containing this table.
projectId: [Required] The ID of the project containing this table.
tableId: [Required] The ID of the table. The ID must contain only letters
(a-z, A-Z), numbers (0-9), or underscores (_). The maximum length is
1,024 characters.
"""
datasetId = _messages.StringField(1)
projectId = _messages.StringField(2)
tableId = _messages.StringField(3)
class TableRow(_messages.Message):
"""A TableRow object.
Fields:
f: Represents a single row in the result set, consisting of one or more
fields.
"""
f = _messages.MessageField('TableCell', 1, repeated=True)
class TableSchema(_messages.Message):
"""A TableSchema object.
Fields:
fields: Describes the fields in a table.
"""
fields = _messages.MessageField('TableFieldSchema', 1, repeated=True)
class TimePartitioning(_messages.Message):
"""A TimePartitioning object.
Fields:
expirationMs: [Optional] Number of milliseconds for which to keep the
storage for a partition.
type: [Required] The only type supported is DAY, which will generate one
partition per day based on data loading time.
"""
expirationMs = _messages.IntegerField(1)
type = _messages.StringField(2)
class UserDefinedFunctionResource(_messages.Message):
"""A UserDefinedFunctionResource object.
Fields:
inlineCode: [Pick one] An inline resource that contains code for a user-
defined function (UDF). Providing a inline code resource is equivalent
to providing a URI for a file containing the same code.
resourceUri: [Pick one] A code resource to load from a Google Cloud
Storage URI (gs://bucket/path).
"""
inlineCode = _messages.StringField(1)
resourceUri = _messages.StringField(2)
class ViewDefinition(_messages.Message):
"""A ViewDefinition object.
Fields:
query: [Required] A query that BigQuery executes when the view is
referenced.
useLegacySql: [Experimental] Specifies whether to use BigQuery's legacy
SQL for this view. The default value is true. If set to false, the view
will use BigQuery's standard SQL: https://cloud.google.com/bigquery/sql-
reference/ Queries and views that reference this view must use the same
flag value.
userDefinedFunctionResources: [Experimental] Describes user-defined
function resources used in the query.
"""
query = _messages.StringField(1)
useLegacySql = _messages.BooleanField(2)
userDefinedFunctionResources = _messages.MessageField('UserDefinedFunctionResource', 3, repeated=True)