mirror of
https://github.com/ansible-collections/community.general.git
synced 2024-09-14 20:13:21 +02:00
5d226f86b6
* Remove ANSIBLE_METADATA * remove from other modules * remove from plugins and scripts
348 lines
13 KiB
Python
348 lines
13 KiB
Python
#!/usr/bin/python
|
|
# -*- coding: utf-8 -*-
|
|
#
|
|
# Copyright (C) 2017 Google
|
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
|
# ----------------------------------------------------------------------------
|
|
#
|
|
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
|
|
#
|
|
# ----------------------------------------------------------------------------
|
|
#
|
|
# This file is automatically generated by Magic Modules and manual
|
|
# changes will be clobbered when the file is regenerated.
|
|
#
|
|
# Please read more about how to change this file at
|
|
# https://www.github.com/GoogleCloudPlatform/magic-modules
|
|
#
|
|
# ----------------------------------------------------------------------------
|
|
|
|
from __future__ import absolute_import, division, print_function
|
|
|
|
__metaclass__ = type
|
|
|
|
################################################################################
|
|
# Documentation
|
|
################################################################################
|
|
|
|
DOCUMENTATION = '''
|
|
---
|
|
module: gcp_bigquery_dataset_info
|
|
description:
|
|
- Gather info for GCP Dataset
|
|
short_description: Gather info for GCP Dataset
|
|
author: Google Inc. (@googlecloudplatform)
|
|
requirements:
|
|
- python >= 2.6
|
|
- requests >= 2.18.4
|
|
- google-auth >= 1.3.0
|
|
options:
|
|
project:
|
|
description:
|
|
- The Google Cloud Platform project to use.
|
|
type: str
|
|
auth_kind:
|
|
description:
|
|
- The type of credential used.
|
|
type: str
|
|
required: true
|
|
choices:
|
|
- application
|
|
- machineaccount
|
|
- serviceaccount
|
|
service_account_contents:
|
|
description:
|
|
- The contents of a Service Account JSON file, either in a dictionary or as a
|
|
JSON string that represents it.
|
|
type: jsonarg
|
|
service_account_file:
|
|
description:
|
|
- The path of a Service Account JSON file if serviceaccount is selected as type.
|
|
type: path
|
|
service_account_email:
|
|
description:
|
|
- An optional service account email address if machineaccount is selected and
|
|
the user does not wish to use the default email.
|
|
type: str
|
|
scopes:
|
|
description:
|
|
- Array of scopes to be used
|
|
type: list
|
|
env_type:
|
|
description:
|
|
- Specifies which Ansible environment you're running this module within.
|
|
- This should not be set unless you know what you're doing.
|
|
- This only alters the User Agent string for any API requests.
|
|
type: str
|
|
notes:
|
|
- for authentication, you can set service_account_file using the C(gcp_service_account_file)
|
|
env variable.
|
|
- for authentication, you can set service_account_contents using the C(GCP_SERVICE_ACCOUNT_CONTENTS)
|
|
env variable.
|
|
- For authentication, you can set service_account_email using the C(GCP_SERVICE_ACCOUNT_EMAIL)
|
|
env variable.
|
|
- For authentication, you can set auth_kind using the C(GCP_AUTH_KIND) env variable.
|
|
- For authentication, you can set scopes using the C(GCP_SCOPES) env variable.
|
|
- Environment variables values will only be used if the playbook values are not set.
|
|
- The I(service_account_email) and I(service_account_file) options are mutually exclusive.
|
|
'''
|
|
|
|
EXAMPLES = '''
|
|
- name: get info on a dataset
|
|
gcp_bigquery_dataset_info:
|
|
project: test_project
|
|
auth_kind: serviceaccount
|
|
service_account_file: "/tmp/auth.pem"
|
|
'''
|
|
|
|
RETURN = '''
|
|
resources:
|
|
description: List of resources
|
|
returned: always
|
|
type: complex
|
|
contains:
|
|
name:
|
|
description:
|
|
- Dataset name.
|
|
returned: success
|
|
type: str
|
|
access:
|
|
description:
|
|
- An array of objects that define dataset access for one or more entities.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
domain:
|
|
description:
|
|
- A domain to grant access to. Any users signed in with the domain specified
|
|
will be granted the specified access .
|
|
returned: success
|
|
type: str
|
|
groupByEmail:
|
|
description:
|
|
- An email address of a Google Group to grant access to.
|
|
returned: success
|
|
type: str
|
|
role:
|
|
description:
|
|
- Describes the rights granted to the user specified by the other member
|
|
of the access object. Primitive, Predefined and custom roles are supported.
|
|
Predefined roles that have equivalent primitive roles are swapped by the
|
|
API to their Primitive counterparts, and will show a diff post-create.
|
|
See [official docs](U(https://cloud.google.com/bigquery/docs/access-control)).
|
|
returned: success
|
|
type: str
|
|
specialGroup:
|
|
description:
|
|
- A special group to grant access to.
|
|
- 'Possible values include: * `projectOwners`: Owners of the enclosing project.'
|
|
- "* `projectReaders`: Readers of the enclosing project."
|
|
- "* `projectWriters`: Writers of the enclosing project."
|
|
- "* `allAuthenticatedUsers`: All authenticated BigQuery users. ."
|
|
returned: success
|
|
type: str
|
|
userByEmail:
|
|
description:
|
|
- 'An email address of a user to grant access to. For example: fred@example.com
|
|
.'
|
|
returned: success
|
|
type: str
|
|
view:
|
|
description:
|
|
- A view from a different dataset to grant access to. Queries executed against
|
|
that view will have read access to tables in this dataset. The role field
|
|
is not required when this field is set. If that view is updated by any
|
|
user, access to the view needs to be granted again via an update operation.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
datasetId:
|
|
description:
|
|
- The ID of the dataset containing this table.
|
|
returned: success
|
|
type: str
|
|
projectId:
|
|
description:
|
|
- The ID of the project containing this table.
|
|
returned: success
|
|
type: str
|
|
tableId:
|
|
description:
|
|
- The ID of the table. The ID must contain only letters (a-z, A-Z),
|
|
numbers (0-9), or underscores. The maximum length is 1,024 characters.
|
|
returned: success
|
|
type: str
|
|
creationTime:
|
|
description:
|
|
- The time when this dataset was created, in milliseconds since the epoch.
|
|
returned: success
|
|
type: int
|
|
datasetReference:
|
|
description:
|
|
- A reference that identifies the dataset.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
datasetId:
|
|
description:
|
|
- A unique ID for this dataset, without the project name. The ID must contain
|
|
only letters (a-z, A-Z), numbers (0-9), or underscores. The maximum length
|
|
is 1,024 characters.
|
|
returned: success
|
|
type: str
|
|
projectId:
|
|
description:
|
|
- The ID of the project containing this dataset.
|
|
returned: success
|
|
type: str
|
|
defaultTableExpirationMs:
|
|
description:
|
|
- The default lifetime of all tables in the dataset, in milliseconds.
|
|
- The minimum value is 3600000 milliseconds (one hour).
|
|
- Once this property is set, all newly-created tables in the dataset will have
|
|
an `expirationTime` property set to the creation time plus the value in this
|
|
property, and changing the value will only affect new tables, not existing
|
|
ones. When the `expirationTime` for a given table is reached, that table will
|
|
be deleted automatically.
|
|
- If a table's `expirationTime` is modified or removed before the table expires,
|
|
or if you provide an explicit `expirationTime` when creating a table, that
|
|
value takes precedence over the default expiration time indicated by this
|
|
property.
|
|
returned: success
|
|
type: int
|
|
defaultPartitionExpirationMs:
|
|
description:
|
|
- The default partition expiration for all partitioned tables in the dataset,
|
|
in milliseconds.
|
|
- Once this property is set, all newly-created partitioned tables in the dataset
|
|
will have an `expirationMs` property in the `timePartitioning` settings set
|
|
to this value, and changing the value will only affect new tables, not existing
|
|
ones. The storage in a partition will have an expiration time of its partition
|
|
time plus this value.
|
|
- 'Setting this property overrides the use of `defaultTableExpirationMs` for
|
|
partitioned tables: only one of `defaultTableExpirationMs` and `defaultPartitionExpirationMs`
|
|
will be used for any new partitioned table. If you provide an explicit `timePartitioning.expirationMs`
|
|
when creating or updating a partitioned table, that value takes precedence
|
|
over the default partition expiration time indicated by this property.'
|
|
returned: success
|
|
type: int
|
|
description:
|
|
description:
|
|
- A user-friendly description of the dataset.
|
|
returned: success
|
|
type: str
|
|
etag:
|
|
description:
|
|
- A hash of the resource.
|
|
returned: success
|
|
type: str
|
|
friendlyName:
|
|
description:
|
|
- A descriptive name for the dataset.
|
|
returned: success
|
|
type: str
|
|
id:
|
|
description:
|
|
- The fully-qualified unique name of the dataset in the format projectId:datasetId.
|
|
The dataset name without the project name is given in the datasetId field
|
|
.
|
|
returned: success
|
|
type: str
|
|
labels:
|
|
description:
|
|
- The labels associated with this dataset. You can use these to organize and
|
|
group your datasets .
|
|
returned: success
|
|
type: dict
|
|
lastModifiedTime:
|
|
description:
|
|
- The date when this dataset or any of its tables was last modified, in milliseconds
|
|
since the epoch.
|
|
returned: success
|
|
type: int
|
|
location:
|
|
description:
|
|
- The geographic location where the dataset should reside.
|
|
- See [official docs](U(https://cloud.google.com/bigquery/docs/dataset-locations)).
|
|
- There are two types of locations, regional or multi-regional. A regional location
|
|
is a specific geographic place, such as Tokyo, and a multi-regional location
|
|
is a large geographic area, such as the United States, that contains at least
|
|
two geographic places.
|
|
- 'Possible regional values include: `asia-east1`, `asia-northeast1`, `asia-southeast1`,
|
|
`australia-southeast1`, `europe-north1`, `europe-west2` and `us-east4`.'
|
|
- 'Possible multi-regional values: `EU` and `US`.'
|
|
- The default value is multi-regional location `US`.
|
|
- Changing this forces a new resource to be created.
|
|
returned: success
|
|
type: str
|
|
defaultEncryptionConfiguration:
|
|
description:
|
|
- The default encryption key for all tables in the dataset. Once this property
|
|
is set, all newly-created partitioned tables in the dataset will have encryption
|
|
key set to this value, unless table creation request (or query) overrides
|
|
the key.
|
|
returned: success
|
|
type: complex
|
|
contains:
|
|
kmsKeyName:
|
|
description:
|
|
- Describes the Cloud KMS encryption key that will be used to protect destination
|
|
BigQuery table. The BigQuery Service Account associated with your project
|
|
requires access to this encryption key.
|
|
returned: success
|
|
type: str
|
|
'''
|
|
|
|
################################################################################
|
|
# Imports
|
|
################################################################################
|
|
from ansible_collections.google.cloud.plugins.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest
|
|
import json
|
|
|
|
################################################################################
|
|
# Main
|
|
################################################################################
|
|
|
|
|
|
def main():
|
|
module = GcpModule(argument_spec=dict())
|
|
|
|
if not module.params['scopes']:
|
|
module.params['scopes'] = ['https://www.googleapis.com/auth/bigquery']
|
|
|
|
return_value = {'resources': fetch_list(module, collection(module))}
|
|
module.exit_json(**return_value)
|
|
|
|
|
|
def collection(module):
|
|
return "https://www.googleapis.com/bigquery/v2/projects/{project}/datasets".format(**module.params)
|
|
|
|
|
|
def fetch_list(module, link):
|
|
auth = GcpSession(module, 'bigquery')
|
|
return auth.list(link, return_if_object, array_name='datasets')
|
|
|
|
|
|
def return_if_object(module, response):
|
|
# If not found, return nothing.
|
|
if response.status_code == 404:
|
|
return None
|
|
|
|
# If no content, return nothing.
|
|
if response.status_code == 204:
|
|
return None
|
|
|
|
try:
|
|
module.raise_for_status(response)
|
|
result = response.json()
|
|
except getattr(json.decoder, 'JSONDecodeError', ValueError) as inst:
|
|
module.fail_json(msg="Invalid JSON response with error: %s" % inst)
|
|
|
|
if navigate_hash(result, ['error', 'errors']):
|
|
module.fail_json(msg=navigate_hash(result, ['error', 'errors']))
|
|
|
|
return result
|
|
|
|
|
|
if __name__ == "__main__":
|
|
main()
|