1610 lines
54 KiB
Python
1610 lines
54 KiB
Python
#!/usr/bin/python
|
|
# -*- coding: utf-8 -*-
|
|
#
|
|
# Copyright (C) 2017 Google
|
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
|
# ----------------------------------------------------------------------------
|
|
#
|
|
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
|
|
#
|
|
# ----------------------------------------------------------------------------
|
|
#
|
|
# This file is automatically generated by Magic Modules and manual
|
|
# changes will be clobbered when the file is regenerated.
|
|
#
|
|
# Please read more about how to change this file at
|
|
# https://www.github.com/GoogleCloudPlatform/magic-modules
|
|
#
|
|
# ----------------------------------------------------------------------------
|
|
|
|
from __future__ import absolute_import, division, print_function
|
|
|
|
__metaclass__ = type
|
|
|
|
################################################################################
|
|
# Documentation
|
|
################################################################################
|
|
|
|
ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'}
|
|
|
|
DOCUMENTATION = '''
|
|
---
|
|
module: gcp_bigquery_table
|
|
description:
|
|
- A Table that belongs to a Dataset .
|
|
short_description: Creates a GCP Table
|
|
version_added: 2.8
|
|
author: Google Inc. (@googlecloudplatform)
|
|
requirements:
|
|
- python >= 2.6
|
|
- requests >= 2.18.4
|
|
- google-auth >= 1.3.0
|
|
options:
|
|
state:
|
|
description:
|
|
- Whether the given object should exist in GCP
|
|
choices:
|
|
- present
|
|
- absent
|
|
default: present
|
|
table_reference:
|
|
description:
|
|
- Reference describing the ID of this table.
|
|
required: false
|
|
suboptions:
|
|
dataset_id:
|
|
description:
|
|
- The ID of the dataset containing this table.
|
|
required: false
|
|
project_id:
|
|
description:
|
|
- The ID of the project containing this table.
|
|
required: false
|
|
table_id:
|
|
description:
|
|
- The ID of the the table.
|
|
required: false
|
|
description:
|
|
description:
|
|
- A user-friendly description of the dataset.
|
|
required: false
|
|
friendly_name:
|
|
description:
|
|
- A descriptive name for this table.
|
|
required: false
|
|
labels:
|
|
description:
|
|
- The labels associated with this dataset. You can use these to organize and group
|
|
your datasets .
|
|
required: false
|
|
name:
|
|
description:
|
|
- Name of the table.
|
|
required: false
|
|
view:
|
|
description:
|
|
- The view definition.
|
|
required: false
|
|
suboptions:
|
|
use_legacy_sql:
|
|
description:
|
|
- Specifies whether to use BigQuery's legacy SQL for this view .
|
|
required: false
|
|
type: bool
|
|
user_defined_function_resources:
|
|
description:
|
|
- Describes user-defined function resources used in the query.
|
|
required: false
|
|
suboptions:
|
|
inline_code:
|
|
description:
|
|
- An inline resource that contains code for a user-defined function (UDF).
|
|
Providing a inline code resource is equivalent to providing a URI for
|
|
a file containing the same code.
|
|
required: false
|
|
resource_uri:
|
|
description:
|
|
- A code resource to load from a Google Cloud Storage URI (gs://bucket/path).
|
|
required: false
|
|
time_partitioning:
|
|
description:
|
|
- If specified, configures time-based partitioning for this table.
|
|
required: false
|
|
suboptions:
|
|
expiration_ms:
|
|
description:
|
|
- Number of milliseconds for which to keep the storage for a partition.
|
|
required: false
|
|
type:
|
|
description:
|
|
- The only type supported is DAY, which will generate one partition per day.
|
|
required: false
|
|
choices:
|
|
- DAY
|
|
schema:
|
|
description:
|
|
- Describes the schema of this table.
|
|
required: false
|
|
suboptions:
|
|
fields:
|
|
description:
|
|
- Describes the fields in a table.
|
|
required: false
|
|
suboptions:
|
|
description:
|
|
description:
|
|
- The field description. The maximum length is 1,024 characters.
|
|
required: false
|
|
fields:
|
|
description:
|
|
- Describes the nested schema fields if the type property is set to RECORD.
|
|
required: false
|
|
mode:
|
|
description:
|
|
- The field mode.
|
|
required: false
|
|
choices:
|
|
- NULLABLE
|
|
- REQUIRED
|
|
- REPEATED
|
|
name:
|
|
description:
|
|
- The field name.
|
|
required: false
|
|
type:
|
|
description:
|
|
- The field data type.
|
|
required: false
|
|
choices:
|
|
- STRING
|
|
- BYTES
|
|
- INTEGER
|
|
- FLOAT
|
|
- TIMESTAMP
|
|
- DATE
|
|
- TIME
|
|
- DATETIME
|
|
- RECORD
|
|
encryption_configuration:
|
|
description:
|
|
- Custom encryption configuration.
|
|
required: false
|
|
suboptions:
|
|
kms_key_name:
|
|
description:
|
|
- Describes the Cloud KMS encryption key that will be used to protect destination
|
|
BigQuery table. The BigQuery Service Account associated with your project
|
|
requires access to this encryption key.
|
|
required: false
|
|
expiration_time:
|
|
description:
|
|
- The time when this table expires, in milliseconds since the epoch. If not present,
|
|
the table will persist indefinitely.
|
|
required: false
|
|
external_data_configuration:
|
|
description:
|
|
- Describes the data format, location, and other properties of a table stored
|
|
outside of BigQuery. By defining these properties, the data source can then
|
|
be queried as if it were a standard BigQuery table.
|
|
required: false
|
|
suboptions:
|
|
autodetect:
|
|
description:
|
|
- Try to detect schema and format options automatically. Any option specified
|
|
explicitly will be honored.
|
|
required: false
|
|
type: bool
|
|
compression:
|
|
description:
|
|
- The compression type of the data source.
|
|
required: false
|
|
choices:
|
|
- GZIP
|
|
- NONE
|
|
ignore_unknown_values:
|
|
description:
|
|
- Indicates if BigQuery should allow extra values that are not represented
|
|
in the table schema .
|
|
required: false
|
|
type: bool
|
|
max_bad_records:
|
|
description:
|
|
- The maximum number of bad records that BigQuery can ignore when reading
|
|
data .
|
|
required: false
|
|
default: '0'
|
|
source_format:
|
|
description:
|
|
- The data format.
|
|
required: false
|
|
choices:
|
|
- CSV
|
|
- GOOGLE_SHEETS
|
|
- NEWLINE_DELIMITED_JSON
|
|
- AVRO
|
|
- DATASTORE_BACKUP
|
|
- BIGTABLE
|
|
source_uris:
|
|
description:
|
|
- 'The fully-qualified URIs that point to your data in Google Cloud. For Google
|
|
Cloud Storage URIs: Each URI can contain one ''*'' wildcard character and
|
|
it must come after the ''bucket'' name. Size limits related to load jobs
|
|
apply to external data sources. For Google Cloud Bigtable URIs: Exactly
|
|
one URI can be specified and it has be a fully specified and valid HTTPS
|
|
URL for a Google Cloud Bigtable table. For Google Cloud Datastore backups,
|
|
exactly one URI can be specified. Also, the ''*'' wildcard character is
|
|
not allowed.'
|
|
required: false
|
|
schema:
|
|
description:
|
|
- The schema for the data. Schema is required for CSV and JSON formats.
|
|
required: false
|
|
suboptions:
|
|
fields:
|
|
description:
|
|
- Describes the fields in a table.
|
|
required: false
|
|
suboptions:
|
|
description:
|
|
description:
|
|
- The field description.
|
|
required: false
|
|
fields:
|
|
description:
|
|
- Describes the nested schema fields if the type property is set to
|
|
RECORD .
|
|
required: false
|
|
mode:
|
|
description:
|
|
- Field mode.
|
|
required: false
|
|
choices:
|
|
- NULLABLE
|
|
- REQUIRED
|
|
- REPEATED
|
|
name:
|
|
description:
|
|
- Field name.
|
|
required: false
|
|
type:
|
|
description:
|
|
- Field data type.
|
|
required: false
|
|
choices:
|
|
- STRING
|
|
- BYTES
|
|
- INTEGER
|
|
- FLOAT
|
|
- TIMESTAMP
|
|
- DATE
|
|
- TIME
|
|
- DATETIME
|
|
- RECORD
|
|
google_sheets_options:
|
|
description:
|
|
- Additional options if sourceFormat is set to GOOGLE_SHEETS.
|
|
required: false
|
|
suboptions:
|
|
skip_leading_rows:
|
|
description:
|
|
- The number of rows at the top of a Google Sheet that BigQuery will skip
|
|
when reading the data.
|
|
required: false
|
|
default: '0'
|
|
csv_options:
|
|
description:
|
|
- Additional properties to set if sourceFormat is set to CSV.
|
|
required: false
|
|
suboptions:
|
|
allow_jagged_rows:
|
|
description:
|
|
- Indicates if BigQuery should accept rows that are missing trailing optional
|
|
columns .
|
|
required: false
|
|
type: bool
|
|
allow_quoted_newlines:
|
|
description:
|
|
- Indicates if BigQuery should allow quoted data sections that contain
|
|
newline characters in a CSV file .
|
|
required: false
|
|
type: bool
|
|
encoding:
|
|
description:
|
|
- The character encoding of the data.
|
|
required: false
|
|
choices:
|
|
- UTF-8
|
|
- ISO-8859-1
|
|
field_delimiter:
|
|
description:
|
|
- The separator for fields in a CSV file.
|
|
required: false
|
|
quote:
|
|
description:
|
|
- The value that is used to quote data sections in a CSV file.
|
|
required: false
|
|
skip_leading_rows:
|
|
description:
|
|
- The number of rows at the top of a CSV file that BigQuery will skip
|
|
when reading the data.
|
|
required: false
|
|
default: '0'
|
|
bigtable_options:
|
|
description:
|
|
- Additional options if sourceFormat is set to BIGTABLE.
|
|
required: false
|
|
suboptions:
|
|
ignore_unspecified_column_families:
|
|
description:
|
|
- If field is true, then the column families that are not specified in
|
|
columnFamilies list are not exposed in the table schema .
|
|
required: false
|
|
type: bool
|
|
read_rowkey_as_string:
|
|
description:
|
|
- If field is true, then the rowkey column families will be read and converted
|
|
to string.
|
|
required: false
|
|
type: bool
|
|
column_families:
|
|
description:
|
|
- List of column families to expose in the table schema along with their
|
|
types.
|
|
required: false
|
|
suboptions:
|
|
columns:
|
|
description:
|
|
- Lists of columns that should be exposed as individual fields as
|
|
opposed to a list of (column name, value) pairs.
|
|
required: false
|
|
suboptions:
|
|
encoding:
|
|
description:
|
|
- The encoding of the values when the type is not STRING.
|
|
required: false
|
|
choices:
|
|
- TEXT
|
|
- BINARY
|
|
field_name:
|
|
description:
|
|
- If the qualifier is not a valid BigQuery field identifier, a
|
|
valid identifier must be provided as the column field name and
|
|
is used as field name in queries.
|
|
required: false
|
|
only_read_latest:
|
|
description:
|
|
- If this is set, only the latest version of value in this column
|
|
are exposed .
|
|
required: false
|
|
type: bool
|
|
qualifier_string:
|
|
description:
|
|
- Qualifier of the column.
|
|
required: true
|
|
type:
|
|
description:
|
|
- The type to convert the value in cells of this column.
|
|
required: false
|
|
choices:
|
|
- BYTES
|
|
- STRING
|
|
- INTEGER
|
|
- FLOAT
|
|
- BOOLEAN
|
|
encoding:
|
|
description:
|
|
- The encoding of the values when the type is not STRING.
|
|
required: false
|
|
choices:
|
|
- TEXT
|
|
- BINARY
|
|
family_id:
|
|
description:
|
|
- Identifier of the column family.
|
|
required: false
|
|
only_read_latest:
|
|
description:
|
|
- If this is set only the latest version of value are exposed for
|
|
all columns in this column family .
|
|
required: false
|
|
type: bool
|
|
type:
|
|
description:
|
|
- The type to convert the value in cells of this column family.
|
|
required: false
|
|
choices:
|
|
- BYTES
|
|
- STRING
|
|
- INTEGER
|
|
- FLOAT
|
|
- BOOLEAN
|
|
dataset:
|
|
description:
|
|
- Name of the dataset.
|
|
required: false
|
|
extends_documentation_fragment: gcp
|
|
'''
|
|
|
|
EXAMPLES = '''
|
|
- name: create a dataset
|
|
gcp_bigquery_dataset:
|
|
name: example_dataset
|
|
dataset_reference:
|
|
dataset_id: example_dataset
|
|
project: "{{ gcp_project }}"
|
|
auth_kind: "{{ gcp_cred_kind }}"
|
|
service_account_file: "{{ gcp_cred_file }}"
|
|
state: present
|
|
register: dataset
|
|
|
|
- name: create a table
|
|
gcp_bigquery_table:
|
|
name: example_table
|
|
dataset: example_dataset
|
|
table_reference:
|
|
dataset_id: example_dataset
|
|
project_id: "test_project"
|
|
table_id: example_table
|
|
project: "test_project"
|
|
auth_kind: "serviceaccount"
|
|
service_account_file: "/tmp/auth.pem"
|
|
state: present
|
|
'''
|
|
|
|
RETURN = '''
|
|
tableReference:
|
|
description:
|
|
- Reference describing the ID of this table.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
datasetId:
|
|
description:
|
|
- The ID of the dataset containing this table.
|
|
returned: success
|
|
type: str
|
|
projectId:
|
|
description:
|
|
- The ID of the project containing this table.
|
|
returned: success
|
|
type: str
|
|
tableId:
|
|
description:
|
|
- The ID of the the table.
|
|
returned: success
|
|
type: str
|
|
creationTime:
|
|
description:
|
|
- The time when this dataset was created, in milliseconds since the epoch.
|
|
returned: success
|
|
type: int
|
|
description:
|
|
description:
|
|
- A user-friendly description of the dataset.
|
|
returned: success
|
|
type: str
|
|
friendlyName:
|
|
description:
|
|
- A descriptive name for this table.
|
|
returned: success
|
|
type: str
|
|
id:
|
|
description:
|
|
- An opaque ID uniquely identifying the table.
|
|
returned: success
|
|
type: str
|
|
labels:
|
|
description:
|
|
- The labels associated with this dataset. You can use these to organize and group
|
|
your datasets .
|
|
returned: success
|
|
type: dict
|
|
lastModifiedTime:
|
|
description:
|
|
- The time when this table was last modified, in milliseconds since the epoch.
|
|
returned: success
|
|
type: int
|
|
location:
|
|
description:
|
|
- The geographic location where the table resides. This value is inherited from
|
|
the dataset.
|
|
returned: success
|
|
type: str
|
|
name:
|
|
description:
|
|
- Name of the table.
|
|
returned: success
|
|
type: str
|
|
numBytes:
|
|
description:
|
|
- The size of this table in bytes, excluding any data in the streaming buffer.
|
|
returned: success
|
|
type: int
|
|
numLongTermBytes:
|
|
description:
|
|
- The number of bytes in the table that are considered "long-term storage".
|
|
returned: success
|
|
type: int
|
|
numRows:
|
|
description:
|
|
- The number of rows of data in this table, excluding any data in the streaming
|
|
buffer.
|
|
returned: success
|
|
type: int
|
|
type:
|
|
description:
|
|
- Describes the table type.
|
|
returned: success
|
|
type: str
|
|
view:
|
|
description:
|
|
- The view definition.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
useLegacySql:
|
|
description:
|
|
- Specifies whether to use BigQuery's legacy SQL for this view .
|
|
returned: success
|
|
type: bool
|
|
userDefinedFunctionResources:
|
|
description:
|
|
- Describes user-defined function resources used in the query.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
inlineCode:
|
|
description:
|
|
- An inline resource that contains code for a user-defined function (UDF).
|
|
Providing a inline code resource is equivalent to providing a URI for
|
|
a file containing the same code.
|
|
returned: success
|
|
type: str
|
|
resourceUri:
|
|
description:
|
|
- A code resource to load from a Google Cloud Storage URI (gs://bucket/path).
|
|
returned: success
|
|
type: str
|
|
timePartitioning:
|
|
description:
|
|
- If specified, configures time-based partitioning for this table.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
expirationMs:
|
|
description:
|
|
- Number of milliseconds for which to keep the storage for a partition.
|
|
returned: success
|
|
type: int
|
|
type:
|
|
description:
|
|
- The only type supported is DAY, which will generate one partition per day.
|
|
returned: success
|
|
type: str
|
|
streamingBuffer:
|
|
description:
|
|
- Contains information regarding this table's streaming buffer, if one is present.
|
|
This field will be absent if the table is not being streamed to or if there is
|
|
no data in the streaming buffer.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
estimatedBytes:
|
|
description:
|
|
- A lower-bound estimate of the number of bytes currently in the streaming buffer.
|
|
returned: success
|
|
type: int
|
|
estimatedRows:
|
|
description:
|
|
- A lower-bound estimate of the number of rows currently in the streaming buffer.
|
|
returned: success
|
|
type: int
|
|
oldestEntryTime:
|
|
description:
|
|
- Contains the timestamp of the oldest entry in the streaming buffer, in milliseconds
|
|
since the epoch, if the streaming buffer is available.
|
|
returned: success
|
|
type: int
|
|
schema:
|
|
description:
|
|
- Describes the schema of this table.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
fields:
|
|
description:
|
|
- Describes the fields in a table.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
description:
|
|
description:
|
|
- The field description. The maximum length is 1,024 characters.
|
|
returned: success
|
|
type: str
|
|
fields:
|
|
description:
|
|
- Describes the nested schema fields if the type property is set to RECORD.
|
|
returned: success
|
|
type: list
|
|
mode:
|
|
description:
|
|
- The field mode.
|
|
returned: success
|
|
type: str
|
|
name:
|
|
description:
|
|
- The field name.
|
|
returned: success
|
|
type: str
|
|
type:
|
|
description:
|
|
- The field data type.
|
|
returned: success
|
|
type: str
|
|
encryptionConfiguration:
|
|
description:
|
|
- Custom encryption configuration.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
kmsKeyName:
|
|
description:
|
|
- Describes the Cloud KMS encryption key that will be used to protect destination
|
|
BigQuery table. The BigQuery Service Account associated with your project
|
|
requires access to this encryption key.
|
|
returned: success
|
|
type: str
|
|
expirationTime:
|
|
description:
|
|
- The time when this table expires, in milliseconds since the epoch. If not present,
|
|
the table will persist indefinitely.
|
|
returned: success
|
|
type: int
|
|
externalDataConfiguration:
|
|
description:
|
|
- Describes the data format, location, and other properties of a table stored outside
|
|
of BigQuery. By defining these properties, the data source can then be queried
|
|
as if it were a standard BigQuery table.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
autodetect:
|
|
description:
|
|
- Try to detect schema and format options automatically. Any option specified
|
|
explicitly will be honored.
|
|
returned: success
|
|
type: bool
|
|
compression:
|
|
description:
|
|
- The compression type of the data source.
|
|
returned: success
|
|
type: str
|
|
ignoreUnknownValues:
|
|
description:
|
|
- Indicates if BigQuery should allow extra values that are not represented in
|
|
the table schema .
|
|
returned: success
|
|
type: bool
|
|
maxBadRecords:
|
|
description:
|
|
- The maximum number of bad records that BigQuery can ignore when reading data
|
|
.
|
|
returned: success
|
|
type: int
|
|
sourceFormat:
|
|
description:
|
|
- The data format.
|
|
returned: success
|
|
type: str
|
|
sourceUris:
|
|
description:
|
|
- 'The fully-qualified URIs that point to your data in Google Cloud. For Google
|
|
Cloud Storage URIs: Each URI can contain one ''*'' wildcard character and
|
|
it must come after the ''bucket'' name. Size limits related to load jobs apply
|
|
to external data sources. For Google Cloud Bigtable URIs: Exactly one URI
|
|
can be specified and it has be a fully specified and valid HTTPS URL for a
|
|
Google Cloud Bigtable table. For Google Cloud Datastore backups, exactly one
|
|
URI can be specified. Also, the ''*'' wildcard character is not allowed.'
|
|
returned: success
|
|
type: list
|
|
schema:
|
|
description:
|
|
- The schema for the data. Schema is required for CSV and JSON formats.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
fields:
|
|
description:
|
|
- Describes the fields in a table.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
description:
|
|
description:
|
|
- The field description.
|
|
returned: success
|
|
type: str
|
|
fields:
|
|
description:
|
|
- Describes the nested schema fields if the type property is set to
|
|
RECORD .
|
|
returned: success
|
|
type: list
|
|
mode:
|
|
description:
|
|
- Field mode.
|
|
returned: success
|
|
type: str
|
|
name:
|
|
description:
|
|
- Field name.
|
|
returned: success
|
|
type: str
|
|
type:
|
|
description:
|
|
- Field data type.
|
|
returned: success
|
|
type: str
|
|
googleSheetsOptions:
|
|
description:
|
|
- Additional options if sourceFormat is set to GOOGLE_SHEETS.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
skipLeadingRows:
|
|
description:
|
|
- The number of rows at the top of a Google Sheet that BigQuery will skip
|
|
when reading the data.
|
|
returned: success
|
|
type: int
|
|
csvOptions:
|
|
description:
|
|
- Additional properties to set if sourceFormat is set to CSV.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
allowJaggedRows:
|
|
description:
|
|
- Indicates if BigQuery should accept rows that are missing trailing optional
|
|
columns .
|
|
returned: success
|
|
type: bool
|
|
allowQuotedNewlines:
|
|
description:
|
|
- Indicates if BigQuery should allow quoted data sections that contain newline
|
|
characters in a CSV file .
|
|
returned: success
|
|
type: bool
|
|
encoding:
|
|
description:
|
|
- The character encoding of the data.
|
|
returned: success
|
|
type: str
|
|
fieldDelimiter:
|
|
description:
|
|
- The separator for fields in a CSV file.
|
|
returned: success
|
|
type: str
|
|
quote:
|
|
description:
|
|
- The value that is used to quote data sections in a CSV file.
|
|
returned: success
|
|
type: str
|
|
skipLeadingRows:
|
|
description:
|
|
- The number of rows at the top of a CSV file that BigQuery will skip when
|
|
reading the data.
|
|
returned: success
|
|
type: int
|
|
bigtableOptions:
|
|
description:
|
|
- Additional options if sourceFormat is set to BIGTABLE.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
ignoreUnspecifiedColumnFamilies:
|
|
description:
|
|
- If field is true, then the column families that are not specified in columnFamilies
|
|
list are not exposed in the table schema .
|
|
returned: success
|
|
type: bool
|
|
readRowkeyAsString:
|
|
description:
|
|
- If field is true, then the rowkey column families will be read and converted
|
|
to string.
|
|
returned: success
|
|
type: bool
|
|
columnFamilies:
|
|
description:
|
|
- List of column families to expose in the table schema along with their
|
|
types.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
columns:
|
|
description:
|
|
- Lists of columns that should be exposed as individual fields as opposed
|
|
to a list of (column name, value) pairs.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
encoding:
|
|
description:
|
|
- The encoding of the values when the type is not STRING.
|
|
returned: success
|
|
type: str
|
|
fieldName:
|
|
description:
|
|
- If the qualifier is not a valid BigQuery field identifier, a valid
|
|
identifier must be provided as the column field name and is used
|
|
as field name in queries.
|
|
returned: success
|
|
type: str
|
|
onlyReadLatest:
|
|
description:
|
|
- If this is set, only the latest version of value in this column
|
|
are exposed .
|
|
returned: success
|
|
type: bool
|
|
qualifierString:
|
|
description:
|
|
- Qualifier of the column.
|
|
returned: success
|
|
type: str
|
|
type:
|
|
description:
|
|
- The type to convert the value in cells of this column.
|
|
returned: success
|
|
type: str
|
|
encoding:
|
|
description:
|
|
- The encoding of the values when the type is not STRING.
|
|
returned: success
|
|
type: str
|
|
familyId:
|
|
description:
|
|
- Identifier of the column family.
|
|
returned: success
|
|
type: str
|
|
onlyReadLatest:
|
|
description:
|
|
- If this is set only the latest version of value are exposed for all
|
|
columns in this column family .
|
|
returned: success
|
|
type: bool
|
|
type:
|
|
description:
|
|
- The type to convert the value in cells of this column family.
|
|
returned: success
|
|
type: str
|
|
dataset:
|
|
description:
|
|
- Name of the dataset.
|
|
returned: success
|
|
type: str
|
|
'''
|
|
|
|
################################################################################
|
|
# Imports
|
|
################################################################################
|
|
|
|
from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest, remove_nones_from_dict, replace_resource_dict
|
|
import json
|
|
|
|
################################################################################
|
|
# Main
|
|
################################################################################
|
|
|
|
|
|
def main():
|
|
"""Main function"""
|
|
|
|
module = GcpModule(
|
|
argument_spec=dict(
|
|
state=dict(default='present', choices=['present', 'absent'], type='str'),
|
|
table_reference=dict(type='dict', options=dict(dataset_id=dict(type='str'), project_id=dict(type='str'), table_id=dict(type='str'))),
|
|
description=dict(type='str'),
|
|
friendly_name=dict(type='str'),
|
|
labels=dict(type='dict'),
|
|
name=dict(type='str'),
|
|
view=dict(
|
|
type='dict',
|
|
options=dict(
|
|
use_legacy_sql=dict(type='bool'),
|
|
user_defined_function_resources=dict(
|
|
type='list', elements='dict', options=dict(inline_code=dict(type='str'), resource_uri=dict(type='str'))
|
|
),
|
|
),
|
|
),
|
|
time_partitioning=dict(type='dict', options=dict(expiration_ms=dict(type='int'), type=dict(type='str', choices=['DAY']))),
|
|
schema=dict(
|
|
type='dict',
|
|
options=dict(
|
|
fields=dict(
|
|
type='list',
|
|
elements='dict',
|
|
options=dict(
|
|
description=dict(type='str'),
|
|
fields=dict(type='list', elements='str'),
|
|
mode=dict(type='str', choices=['NULLABLE', 'REQUIRED', 'REPEATED']),
|
|
name=dict(type='str'),
|
|
type=dict(type='str', choices=['STRING', 'BYTES', 'INTEGER', 'FLOAT', 'TIMESTAMP', 'DATE', 'TIME', 'DATETIME', 'RECORD']),
|
|
),
|
|
)
|
|
),
|
|
),
|
|
encryption_configuration=dict(type='dict', options=dict(kms_key_name=dict(type='str'))),
|
|
expiration_time=dict(type='int'),
|
|
external_data_configuration=dict(
|
|
type='dict',
|
|
options=dict(
|
|
autodetect=dict(type='bool'),
|
|
compression=dict(type='str', choices=['GZIP', 'NONE']),
|
|
ignore_unknown_values=dict(type='bool'),
|
|
max_bad_records=dict(default=0, type='int'),
|
|
source_format=dict(type='str', choices=['CSV', 'GOOGLE_SHEETS', 'NEWLINE_DELIMITED_JSON', 'AVRO', 'DATASTORE_BACKUP', 'BIGTABLE']),
|
|
source_uris=dict(type='list', elements='str'),
|
|
schema=dict(
|
|
type='dict',
|
|
options=dict(
|
|
fields=dict(
|
|
type='list',
|
|
elements='dict',
|
|
options=dict(
|
|
description=dict(type='str'),
|
|
fields=dict(type='list', elements='str'),
|
|
mode=dict(type='str', choices=['NULLABLE', 'REQUIRED', 'REPEATED']),
|
|
name=dict(type='str'),
|
|
type=dict(type='str', choices=['STRING', 'BYTES', 'INTEGER', 'FLOAT', 'TIMESTAMP', 'DATE', 'TIME', 'DATETIME', 'RECORD']),
|
|
),
|
|
)
|
|
),
|
|
),
|
|
google_sheets_options=dict(type='dict', options=dict(skip_leading_rows=dict(default=0, type='int'))),
|
|
csv_options=dict(
|
|
type='dict',
|
|
options=dict(
|
|
allow_jagged_rows=dict(type='bool'),
|
|
allow_quoted_newlines=dict(type='bool'),
|
|
encoding=dict(type='str', choices=['UTF-8', 'ISO-8859-1']),
|
|
field_delimiter=dict(type='str'),
|
|
quote=dict(type='str'),
|
|
skip_leading_rows=dict(default=0, type='int'),
|
|
),
|
|
),
|
|
bigtable_options=dict(
|
|
type='dict',
|
|
options=dict(
|
|
ignore_unspecified_column_families=dict(type='bool'),
|
|
read_rowkey_as_string=dict(type='bool'),
|
|
column_families=dict(
|
|
type='list',
|
|
elements='dict',
|
|
options=dict(
|
|
columns=dict(
|
|
type='list',
|
|
elements='dict',
|
|
options=dict(
|
|
encoding=dict(type='str', choices=['TEXT', 'BINARY']),
|
|
field_name=dict(type='str'),
|
|
only_read_latest=dict(type='bool'),
|
|
qualifier_string=dict(required=True, type='str'),
|
|
type=dict(type='str', choices=['BYTES', 'STRING', 'INTEGER', 'FLOAT', 'BOOLEAN']),
|
|
),
|
|
),
|
|
encoding=dict(type='str', choices=['TEXT', 'BINARY']),
|
|
family_id=dict(type='str'),
|
|
only_read_latest=dict(type='bool'),
|
|
type=dict(type='str', choices=['BYTES', 'STRING', 'INTEGER', 'FLOAT', 'BOOLEAN']),
|
|
),
|
|
),
|
|
),
|
|
),
|
|
),
|
|
),
|
|
dataset=dict(type='str'),
|
|
)
|
|
)
|
|
|
|
if not module.params['scopes']:
|
|
module.params['scopes'] = ['https://www.googleapis.com/auth/bigquery']
|
|
|
|
state = module.params['state']
|
|
kind = 'bigquery#table'
|
|
|
|
fetch = fetch_resource(module, self_link(module), kind)
|
|
changed = False
|
|
|
|
if fetch:
|
|
if state == 'present':
|
|
if is_different(module, fetch):
|
|
update(module, self_link(module), kind)
|
|
fetch = fetch_resource(module, self_link(module), kind)
|
|
changed = True
|
|
else:
|
|
delete(module, self_link(module), kind)
|
|
fetch = {}
|
|
changed = True
|
|
else:
|
|
if state == 'present':
|
|
fetch = create(module, collection(module), kind)
|
|
changed = True
|
|
else:
|
|
fetch = {}
|
|
|
|
fetch.update({'changed': changed})
|
|
|
|
module.exit_json(**fetch)
|
|
|
|
|
|
def create(module, link, kind):
|
|
auth = GcpSession(module, 'bigquery')
|
|
return return_if_object(module, auth.post(link, resource_to_request(module)), kind)
|
|
|
|
|
|
def update(module, link, kind):
|
|
auth = GcpSession(module, 'bigquery')
|
|
return return_if_object(module, auth.put(link, resource_to_request(module)), kind)
|
|
|
|
|
|
def delete(module, link, kind):
|
|
auth = GcpSession(module, 'bigquery')
|
|
return return_if_object(module, auth.delete(link), kind)
|
|
|
|
|
|
def resource_to_request(module):
|
|
request = {
|
|
u'kind': 'bigquery#table',
|
|
u'tableReference': TableTablereference(module.params.get('table_reference', {}), module).to_request(),
|
|
u'description': module.params.get('description'),
|
|
u'friendlyName': module.params.get('friendly_name'),
|
|
u'labels': module.params.get('labels'),
|
|
u'name': module.params.get('name'),
|
|
u'view': TableView(module.params.get('view', {}), module).to_request(),
|
|
u'timePartitioning': TableTimepartitioning(module.params.get('time_partitioning', {}), module).to_request(),
|
|
u'schema': TableSchema(module.params.get('schema', {}), module).to_request(),
|
|
u'encryptionConfiguration': TableEncryptionconfiguration(module.params.get('encryption_configuration', {}), module).to_request(),
|
|
u'expirationTime': module.params.get('expiration_time'),
|
|
u'externalDataConfiguration': TableExternaldataconfiguration(module.params.get('external_data_configuration', {}), module).to_request(),
|
|
}
|
|
return_vals = {}
|
|
for k, v in request.items():
|
|
if v or v is False:
|
|
return_vals[k] = v
|
|
|
|
return return_vals
|
|
|
|
|
|
def fetch_resource(module, link, kind, allow_not_found=True):
|
|
auth = GcpSession(module, 'bigquery')
|
|
return return_if_object(module, auth.get(link), kind, allow_not_found)
|
|
|
|
|
|
def self_link(module):
|
|
return "https://www.googleapis.com/bigquery/v2/projects/{project}/datasets/{dataset}/tables/{name}".format(**module.params)
|
|
|
|
|
|
def collection(module):
|
|
return "https://www.googleapis.com/bigquery/v2/projects/{project}/datasets/{dataset}/tables".format(**module.params)
|
|
|
|
|
|
def return_if_object(module, response, kind, allow_not_found=False):
|
|
# If not found, return nothing.
|
|
if allow_not_found and response.status_code == 404:
|
|
return None
|
|
|
|
# If no content, return nothing.
|
|
if response.status_code == 204:
|
|
return None
|
|
|
|
try:
|
|
module.raise_for_status(response)
|
|
result = response.json()
|
|
except getattr(json.decoder, 'JSONDecodeError', ValueError):
|
|
module.fail_json(msg="Invalid JSON response with error: %s" % response.text)
|
|
|
|
if navigate_hash(result, ['error', 'errors']):
|
|
module.fail_json(msg=navigate_hash(result, ['error', 'errors']))
|
|
|
|
return result
|
|
|
|
|
|
def is_different(module, response):
|
|
request = resource_to_request(module)
|
|
response = response_to_hash(module, response)
|
|
|
|
# Remove all output-only from response.
|
|
response_vals = {}
|
|
for k, v in response.items():
|
|
if k in request:
|
|
response_vals[k] = v
|
|
|
|
request_vals = {}
|
|
for k, v in request.items():
|
|
if k in response:
|
|
request_vals[k] = v
|
|
|
|
return GcpRequest(request_vals) != GcpRequest(response_vals)
|
|
|
|
|
|
# Remove unnecessary properties from the response.
|
|
# This is for doing comparisons with Ansible's current parameters.
|
|
def response_to_hash(module, response):
|
|
return {
|
|
u'tableReference': TableTablereference(response.get(u'tableReference', {}), module).from_response(),
|
|
u'creationTime': response.get(u'creationTime'),
|
|
u'description': response.get(u'description'),
|
|
u'friendlyName': response.get(u'friendlyName'),
|
|
u'id': response.get(u'id'),
|
|
u'labels': response.get(u'labels'),
|
|
u'lastModifiedTime': response.get(u'lastModifiedTime'),
|
|
u'location': response.get(u'location'),
|
|
u'name': response.get(u'name'),
|
|
u'numBytes': response.get(u'numBytes'),
|
|
u'numLongTermBytes': response.get(u'numLongTermBytes'),
|
|
u'numRows': response.get(u'numRows'),
|
|
u'type': response.get(u'type'),
|
|
u'view': TableView(response.get(u'view', {}), module).from_response(),
|
|
u'timePartitioning': TableTimepartitioning(response.get(u'timePartitioning', {}), module).from_response(),
|
|
u'streamingBuffer': TableStreamingbuffer(response.get(u'streamingBuffer', {}), module).from_response(),
|
|
u'schema': TableSchema(response.get(u'schema', {}), module).from_response(),
|
|
u'encryptionConfiguration': TableEncryptionconfiguration(response.get(u'encryptionConfiguration', {}), module).from_response(),
|
|
u'expirationTime': response.get(u'expirationTime'),
|
|
u'externalDataConfiguration': TableExternaldataconfiguration(response.get(u'externalDataConfiguration', {}), module).from_response(),
|
|
}
|
|
|
|
|
|
class TableTablereference(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = {}
|
|
|
|
def to_request(self):
|
|
return remove_nones_from_dict(
|
|
{u'datasetId': self.request.get('dataset_id'), u'projectId': self.request.get('project_id'), u'tableId': self.request.get('table_id')}
|
|
)
|
|
|
|
def from_response(self):
|
|
return remove_nones_from_dict(
|
|
{u'datasetId': self.request.get(u'datasetId'), u'projectId': self.request.get(u'projectId'), u'tableId': self.request.get(u'tableId')}
|
|
)
|
|
|
|
|
|
class TableView(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = {}
|
|
|
|
def to_request(self):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'useLegacySql': self.request.get('use_legacy_sql'),
|
|
u'userDefinedFunctionResources': TableUserdefinedfunctionresourcesArray(
|
|
self.request.get('user_defined_function_resources', []), self.module
|
|
).to_request(),
|
|
}
|
|
)
|
|
|
|
def from_response(self):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'useLegacySql': self.request.get(u'useLegacySql'),
|
|
u'userDefinedFunctionResources': TableUserdefinedfunctionresourcesArray(
|
|
self.request.get(u'userDefinedFunctionResources', []), self.module
|
|
).from_response(),
|
|
}
|
|
)
|
|
|
|
|
|
class TableUserdefinedfunctionresourcesArray(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = []
|
|
|
|
def to_request(self):
|
|
items = []
|
|
for item in self.request:
|
|
items.append(self._request_for_item(item))
|
|
return items
|
|
|
|
def from_response(self):
|
|
items = []
|
|
for item in self.request:
|
|
items.append(self._response_from_item(item))
|
|
return items
|
|
|
|
def _request_for_item(self, item):
|
|
return remove_nones_from_dict({u'inlineCode': item.get('inline_code'), u'resourceUri': item.get('resource_uri')})
|
|
|
|
def _response_from_item(self, item):
|
|
return remove_nones_from_dict({u'inlineCode': item.get(u'inlineCode'), u'resourceUri': item.get(u'resourceUri')})
|
|
|
|
|
|
class TableTimepartitioning(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = {}
|
|
|
|
def to_request(self):
|
|
return remove_nones_from_dict({u'expirationMs': self.request.get('expiration_ms'), u'type': self.request.get('type')})
|
|
|
|
def from_response(self):
|
|
return remove_nones_from_dict({u'expirationMs': self.request.get(u'expirationMs'), u'type': self.request.get(u'type')})
|
|
|
|
|
|
class TableStreamingbuffer(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = {}
|
|
|
|
def to_request(self):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'estimatedBytes': self.request.get('estimated_bytes'),
|
|
u'estimatedRows': self.request.get('estimated_rows'),
|
|
u'oldestEntryTime': self.request.get('oldest_entry_time'),
|
|
}
|
|
)
|
|
|
|
def from_response(self):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'estimatedBytes': self.request.get(u'estimatedBytes'),
|
|
u'estimatedRows': self.request.get(u'estimatedRows'),
|
|
u'oldestEntryTime': self.request.get(u'oldestEntryTime'),
|
|
}
|
|
)
|
|
|
|
|
|
class TableSchema(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = {}
|
|
|
|
def to_request(self):
|
|
return remove_nones_from_dict({u'fields': TableFieldsArray(self.request.get('fields', []), self.module).to_request()})
|
|
|
|
def from_response(self):
|
|
return remove_nones_from_dict({u'fields': TableFieldsArray(self.request.get(u'fields', []), self.module).from_response()})
|
|
|
|
|
|
class TableFieldsArray(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = []
|
|
|
|
def to_request(self):
|
|
items = []
|
|
for item in self.request:
|
|
items.append(self._request_for_item(item))
|
|
return items
|
|
|
|
def from_response(self):
|
|
items = []
|
|
for item in self.request:
|
|
items.append(self._response_from_item(item))
|
|
return items
|
|
|
|
def _request_for_item(self, item):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'description': item.get('description'),
|
|
u'fields': item.get('fields'),
|
|
u'mode': item.get('mode'),
|
|
u'name': item.get('name'),
|
|
u'type': item.get('type'),
|
|
}
|
|
)
|
|
|
|
def _response_from_item(self, item):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'description': item.get(u'description'),
|
|
u'fields': item.get(u'fields'),
|
|
u'mode': item.get(u'mode'),
|
|
u'name': item.get(u'name'),
|
|
u'type': item.get(u'type'),
|
|
}
|
|
)
|
|
|
|
|
|
class TableEncryptionconfiguration(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = {}
|
|
|
|
def to_request(self):
|
|
return remove_nones_from_dict({u'kmsKeyName': self.request.get('kms_key_name')})
|
|
|
|
def from_response(self):
|
|
return remove_nones_from_dict({u'kmsKeyName': self.request.get(u'kmsKeyName')})
|
|
|
|
|
|
class TableExternaldataconfiguration(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = {}
|
|
|
|
def to_request(self):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'autodetect': self.request.get('autodetect'),
|
|
u'compression': self.request.get('compression'),
|
|
u'ignoreUnknownValues': self.request.get('ignore_unknown_values'),
|
|
u'maxBadRecords': self.request.get('max_bad_records'),
|
|
u'sourceFormat': self.request.get('source_format'),
|
|
u'sourceUris': self.request.get('source_uris'),
|
|
u'schema': TableSchema(self.request.get('schema', {}), self.module).to_request(),
|
|
u'googleSheetsOptions': TableGooglesheetsoptions(self.request.get('google_sheets_options', {}), self.module).to_request(),
|
|
u'csvOptions': TableCsvoptions(self.request.get('csv_options', {}), self.module).to_request(),
|
|
u'bigtableOptions': TableBigtableoptions(self.request.get('bigtable_options', {}), self.module).to_request(),
|
|
}
|
|
)
|
|
|
|
def from_response(self):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'autodetect': self.request.get(u'autodetect'),
|
|
u'compression': self.request.get(u'compression'),
|
|
u'ignoreUnknownValues': self.request.get(u'ignoreUnknownValues'),
|
|
u'maxBadRecords': self.request.get(u'maxBadRecords'),
|
|
u'sourceFormat': self.request.get(u'sourceFormat'),
|
|
u'sourceUris': self.request.get(u'sourceUris'),
|
|
u'schema': TableSchema(self.request.get(u'schema', {}), self.module).from_response(),
|
|
u'googleSheetsOptions': TableGooglesheetsoptions(self.request.get(u'googleSheetsOptions', {}), self.module).from_response(),
|
|
u'csvOptions': TableCsvoptions(self.request.get(u'csvOptions', {}), self.module).from_response(),
|
|
u'bigtableOptions': TableBigtableoptions(self.request.get(u'bigtableOptions', {}), self.module).from_response(),
|
|
}
|
|
)
|
|
|
|
|
|
class TableSchema(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = {}
|
|
|
|
def to_request(self):
|
|
return remove_nones_from_dict({u'fields': TableFieldsArray(self.request.get('fields', []), self.module).to_request()})
|
|
|
|
def from_response(self):
|
|
return remove_nones_from_dict({u'fields': TableFieldsArray(self.request.get(u'fields', []), self.module).from_response()})
|
|
|
|
|
|
class TableFieldsArray(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = []
|
|
|
|
def to_request(self):
|
|
items = []
|
|
for item in self.request:
|
|
items.append(self._request_for_item(item))
|
|
return items
|
|
|
|
def from_response(self):
|
|
items = []
|
|
for item in self.request:
|
|
items.append(self._response_from_item(item))
|
|
return items
|
|
|
|
def _request_for_item(self, item):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'description': item.get('description'),
|
|
u'fields': item.get('fields'),
|
|
u'mode': item.get('mode'),
|
|
u'name': item.get('name'),
|
|
u'type': item.get('type'),
|
|
}
|
|
)
|
|
|
|
def _response_from_item(self, item):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'description': item.get(u'description'),
|
|
u'fields': item.get(u'fields'),
|
|
u'mode': item.get(u'mode'),
|
|
u'name': item.get(u'name'),
|
|
u'type': item.get(u'type'),
|
|
}
|
|
)
|
|
|
|
|
|
class TableGooglesheetsoptions(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = {}
|
|
|
|
def to_request(self):
|
|
return remove_nones_from_dict({u'skipLeadingRows': self.request.get('skip_leading_rows')})
|
|
|
|
def from_response(self):
|
|
return remove_nones_from_dict({u'skipLeadingRows': self.request.get(u'skipLeadingRows')})
|
|
|
|
|
|
class TableCsvoptions(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = {}
|
|
|
|
def to_request(self):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'allowJaggedRows': self.request.get('allow_jagged_rows'),
|
|
u'allowQuotedNewlines': self.request.get('allow_quoted_newlines'),
|
|
u'encoding': self.request.get('encoding'),
|
|
u'fieldDelimiter': self.request.get('field_delimiter'),
|
|
u'quote': self.request.get('quote'),
|
|
u'skipLeadingRows': self.request.get('skip_leading_rows'),
|
|
}
|
|
)
|
|
|
|
def from_response(self):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'allowJaggedRows': self.request.get(u'allowJaggedRows'),
|
|
u'allowQuotedNewlines': self.request.get(u'allowQuotedNewlines'),
|
|
u'encoding': self.request.get(u'encoding'),
|
|
u'fieldDelimiter': self.request.get(u'fieldDelimiter'),
|
|
u'quote': self.request.get(u'quote'),
|
|
u'skipLeadingRows': self.request.get(u'skipLeadingRows'),
|
|
}
|
|
)
|
|
|
|
|
|
class TableBigtableoptions(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = {}
|
|
|
|
def to_request(self):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'ignoreUnspecifiedColumnFamilies': self.request.get('ignore_unspecified_column_families'),
|
|
u'readRowkeyAsString': self.request.get('read_rowkey_as_string'),
|
|
u'columnFamilies': TableColumnfamiliesArray(self.request.get('column_families', []), self.module).to_request(),
|
|
}
|
|
)
|
|
|
|
def from_response(self):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'ignoreUnspecifiedColumnFamilies': self.request.get(u'ignoreUnspecifiedColumnFamilies'),
|
|
u'readRowkeyAsString': self.request.get(u'readRowkeyAsString'),
|
|
u'columnFamilies': TableColumnfamiliesArray(self.request.get(u'columnFamilies', []), self.module).from_response(),
|
|
}
|
|
)
|
|
|
|
|
|
class TableColumnfamiliesArray(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = []
|
|
|
|
def to_request(self):
|
|
items = []
|
|
for item in self.request:
|
|
items.append(self._request_for_item(item))
|
|
return items
|
|
|
|
def from_response(self):
|
|
items = []
|
|
for item in self.request:
|
|
items.append(self._response_from_item(item))
|
|
return items
|
|
|
|
def _request_for_item(self, item):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'columns': TableColumnsArray(item.get('columns', []), self.module).to_request(),
|
|
u'encoding': item.get('encoding'),
|
|
u'familyId': item.get('family_id'),
|
|
u'onlyReadLatest': item.get('only_read_latest'),
|
|
u'type': item.get('type'),
|
|
}
|
|
)
|
|
|
|
def _response_from_item(self, item):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'columns': TableColumnsArray(item.get(u'columns', []), self.module).from_response(),
|
|
u'encoding': item.get(u'encoding'),
|
|
u'familyId': item.get(u'familyId'),
|
|
u'onlyReadLatest': item.get(u'onlyReadLatest'),
|
|
u'type': item.get(u'type'),
|
|
}
|
|
)
|
|
|
|
|
|
class TableColumnsArray(object):
|
|
def __init__(self, request, module):
|
|
self.module = module
|
|
if request:
|
|
self.request = request
|
|
else:
|
|
self.request = []
|
|
|
|
def to_request(self):
|
|
items = []
|
|
for item in self.request:
|
|
items.append(self._request_for_item(item))
|
|
return items
|
|
|
|
def from_response(self):
|
|
items = []
|
|
for item in self.request:
|
|
items.append(self._response_from_item(item))
|
|
return items
|
|
|
|
def _request_for_item(self, item):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'encoding': item.get('encoding'),
|
|
u'fieldName': item.get('field_name'),
|
|
u'onlyReadLatest': item.get('only_read_latest'),
|
|
u'qualifierString': item.get('qualifier_string'),
|
|
u'type': item.get('type'),
|
|
}
|
|
)
|
|
|
|
def _response_from_item(self, item):
|
|
return remove_nones_from_dict(
|
|
{
|
|
u'encoding': item.get(u'encoding'),
|
|
u'fieldName': item.get(u'fieldName'),
|
|
u'onlyReadLatest': item.get(u'onlyReadLatest'),
|
|
u'qualifierString': item.get(u'qualifierString'),
|
|
u'type': item.get(u'type'),
|
|
}
|
|
)
|
|
|
|
|
|
if __name__ == '__main__':
|
|
main()
|