2020-03-09 10:11:07 +01:00
|
|
|
#!/usr/bin/python
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
# (c) 2017, Ryan Scott Brown <ryansb@redhat.com>
|
|
|
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
|
|
|
|
|
|
|
from __future__ import absolute_import, division, print_function
|
|
|
|
__metaclass__ = type
|
|
|
|
|
|
|
|
|
2021-04-17 22:32:54 +02:00
|
|
|
DOCUMENTATION = r'''
|
2020-03-09 10:11:07 +01:00
|
|
|
---
|
|
|
|
module: terraform
|
|
|
|
short_description: Manages a Terraform deployment (and plans)
|
|
|
|
description:
|
|
|
|
- Provides support for deploying resources with Terraform and pulling
|
|
|
|
resource information back into Ansible.
|
|
|
|
options:
|
|
|
|
state:
|
|
|
|
choices: ['planned', 'present', 'absent']
|
|
|
|
description:
|
|
|
|
- Goal state of given stage/project
|
2020-09-01 13:44:04 +02:00
|
|
|
type: str
|
2020-03-09 10:11:07 +01:00
|
|
|
default: present
|
|
|
|
binary_path:
|
|
|
|
description:
|
|
|
|
- The path of a terraform binary to use, relative to the 'service_path'
|
|
|
|
unless you supply an absolute path.
|
2020-09-01 13:44:04 +02:00
|
|
|
type: path
|
2020-03-09 10:11:07 +01:00
|
|
|
project_path:
|
|
|
|
description:
|
|
|
|
- The path to the root of the Terraform directory with the
|
|
|
|
vars.tf/main.tf/etc to use.
|
2020-09-01 13:44:04 +02:00
|
|
|
type: path
|
2020-03-09 10:11:07 +01:00
|
|
|
required: true
|
2021-04-22 01:05:37 +02:00
|
|
|
plugin_paths:
|
|
|
|
description:
|
|
|
|
- List of paths containing Terraform plugin executable files.
|
|
|
|
- Plugin executables can be downloaded from U(https://releases.hashicorp.com/).
|
|
|
|
- When set, the plugin discovery and auto-download behavior of Terraform is disabled.
|
|
|
|
- The directory structure in the plugin path can be tricky. The Terraform docs
|
|
|
|
U(https://learn.hashicorp.com/tutorials/terraform/automate-terraform#pre-installed-plugins)
|
|
|
|
show a simple directory of files, but actually, the directory structure
|
|
|
|
has to follow the same structure you would see if Terraform auto-downloaded the plugins.
|
|
|
|
See the examples below for a tree output of an example plugin directory.
|
|
|
|
type: list
|
|
|
|
elements: path
|
|
|
|
version_added: 3.0.0
|
2020-03-09 10:11:07 +01:00
|
|
|
workspace:
|
|
|
|
description:
|
|
|
|
- The terraform workspace to work with.
|
2020-09-01 13:44:04 +02:00
|
|
|
type: str
|
2020-03-09 10:11:07 +01:00
|
|
|
default: default
|
|
|
|
purge_workspace:
|
|
|
|
description:
|
|
|
|
- Only works with state = absent
|
|
|
|
- If true, the workspace will be deleted after the "terraform destroy" action.
|
|
|
|
- The 'default' workspace will not be deleted.
|
|
|
|
default: false
|
|
|
|
type: bool
|
|
|
|
plan_file:
|
|
|
|
description:
|
|
|
|
- The path to an existing Terraform plan file to apply. If this is not
|
|
|
|
specified, Ansible will build a new TF plan and execute it.
|
|
|
|
Note that this option is required if 'state' has the 'planned' value.
|
2020-09-01 13:44:04 +02:00
|
|
|
type: path
|
2020-03-09 10:11:07 +01:00
|
|
|
state_file:
|
|
|
|
description:
|
|
|
|
- The path to an existing Terraform state file to use when building plan.
|
|
|
|
If this is not specified, the default `terraform.tfstate` will be used.
|
|
|
|
- This option is ignored when plan is specified.
|
2020-09-01 13:44:04 +02:00
|
|
|
type: path
|
2020-06-03 14:23:00 +02:00
|
|
|
variables_files:
|
2020-03-09 10:11:07 +01:00
|
|
|
description:
|
|
|
|
- The path to a variables file for Terraform to fill into the TF
|
2020-06-03 14:23:00 +02:00
|
|
|
configurations. This can accept a list of paths to multiple variables files.
|
2020-06-13 15:01:19 +02:00
|
|
|
- Up until Ansible 2.9, this option was usable as I(variables_file).
|
2020-06-03 14:23:00 +02:00
|
|
|
type: list
|
|
|
|
elements: path
|
|
|
|
aliases: [ 'variables_file' ]
|
2020-03-09 10:11:07 +01:00
|
|
|
variables:
|
|
|
|
description:
|
|
|
|
- A group of key-values to override template variables or those in
|
|
|
|
variables files.
|
2020-09-01 13:44:04 +02:00
|
|
|
type: dict
|
2020-03-09 10:11:07 +01:00
|
|
|
targets:
|
|
|
|
description:
|
|
|
|
- A list of specific resources to target in this plan/application. The
|
|
|
|
resources selected here will also auto-include any dependencies.
|
2020-09-01 13:44:04 +02:00
|
|
|
type: list
|
|
|
|
elements: str
|
2020-03-09 10:11:07 +01:00
|
|
|
lock:
|
|
|
|
description:
|
|
|
|
- Enable statefile locking, if you use a service that accepts locks (such
|
|
|
|
as S3+DynamoDB) to store your statefile.
|
|
|
|
type: bool
|
2020-09-01 13:44:04 +02:00
|
|
|
default: true
|
2020-03-09 10:11:07 +01:00
|
|
|
lock_timeout:
|
|
|
|
description:
|
|
|
|
- How long to maintain the lock on the statefile, if you use a service
|
|
|
|
that accepts locks (such as S3+DynamoDB).
|
2020-09-01 13:44:04 +02:00
|
|
|
type: int
|
2020-03-09 10:11:07 +01:00
|
|
|
force_init:
|
|
|
|
description:
|
|
|
|
- To avoid duplicating infra, if a state file can't be found this will
|
|
|
|
force a `terraform init`. Generally, this should be turned off unless
|
|
|
|
you intend to provision an entirely new Terraform deployment.
|
|
|
|
default: false
|
|
|
|
type: bool
|
2021-05-27 20:15:33 +02:00
|
|
|
overwrite_init:
|
|
|
|
description:
|
|
|
|
- Run init even if C(.terraform/terraform.tfstate) already exists in I(project_path).
|
|
|
|
default: true
|
|
|
|
type: bool
|
|
|
|
version_added: '3.2.0'
|
2020-03-09 10:11:07 +01:00
|
|
|
backend_config:
|
|
|
|
description:
|
|
|
|
- A group of key-values to provide at init stage to the -backend-config parameter.
|
2020-09-01 13:44:04 +02:00
|
|
|
type: dict
|
2020-06-03 14:23:00 +02:00
|
|
|
backend_config_files:
|
|
|
|
description:
|
|
|
|
- The path to a configuration file to provide at init state to the -backend-config parameter.
|
|
|
|
This can accept a list of paths to multiple configuration files.
|
|
|
|
type: list
|
|
|
|
elements: path
|
2020-06-13 15:01:19 +02:00
|
|
|
version_added: '0.2.0'
|
2020-10-30 20:05:41 +01:00
|
|
|
init_reconfigure:
|
|
|
|
description:
|
|
|
|
- Forces backend reconfiguration during init.
|
|
|
|
default: false
|
|
|
|
type: bool
|
|
|
|
version_added: '1.3.0'
|
2021-06-27 13:00:50 +02:00
|
|
|
check_destroy:
|
|
|
|
description:
|
|
|
|
- Apply only when no resources are destroyed. Note that this only prevents "destroy" actions,
|
|
|
|
but not "destroy and re-create" actions. This option is ignored when I(state=absent).
|
|
|
|
type: bool
|
|
|
|
default: false
|
|
|
|
version_added: '3.3.0'
|
2021-10-11 23:21:16 +02:00
|
|
|
parallelism:
|
|
|
|
description:
|
|
|
|
- Restrict concurrent operations when Terraform applies the plan.
|
|
|
|
type: int
|
|
|
|
version_added: '3.8.0'
|
2020-03-09 10:11:07 +01:00
|
|
|
notes:
|
|
|
|
- To just run a `terraform plan`, use check mode.
|
|
|
|
requirements: [ "terraform" ]
|
|
|
|
author: "Ryan Scott Brown (@ryansb)"
|
|
|
|
'''
|
|
|
|
|
|
|
|
EXAMPLES = """
|
2020-05-15 12:13:45 +02:00
|
|
|
- name: Basic deploy of a service
|
2020-07-13 21:50:31 +02:00
|
|
|
community.general.terraform:
|
2020-03-09 10:11:07 +01:00
|
|
|
project_path: '{{ project_dir }}'
|
|
|
|
state: present
|
|
|
|
|
2020-05-15 12:13:45 +02:00
|
|
|
- name: Define the backend configuration at init
|
2020-07-13 21:50:31 +02:00
|
|
|
community.general.terraform:
|
2020-03-09 10:11:07 +01:00
|
|
|
project_path: 'project/'
|
|
|
|
state: "{{ state }}"
|
|
|
|
force_init: true
|
|
|
|
backend_config:
|
|
|
|
region: "eu-west-1"
|
|
|
|
bucket: "some-bucket"
|
|
|
|
key: "random.tfstate"
|
2020-06-03 14:23:00 +02:00
|
|
|
|
|
|
|
- name: Define the backend configuration with one or more files at init
|
2020-07-13 21:50:31 +02:00
|
|
|
community.general.terraform:
|
2020-06-03 14:23:00 +02:00
|
|
|
project_path: 'project/'
|
|
|
|
state: "{{ state }}"
|
|
|
|
force_init: true
|
|
|
|
backend_config_files:
|
|
|
|
- /path/to/backend_config_file_1
|
|
|
|
- /path/to/backend_config_file_2
|
2021-04-22 01:05:37 +02:00
|
|
|
|
|
|
|
- name: Disable plugin discovery and auto-download by setting plugin_paths
|
|
|
|
community.general.terraform:
|
|
|
|
project_path: 'project/'
|
|
|
|
state: "{{ state }}"
|
|
|
|
force_init: true
|
|
|
|
plugin_paths:
|
|
|
|
- /path/to/plugins_dir_1
|
|
|
|
- /path/to/plugins_dir_2
|
|
|
|
|
|
|
|
### Example directory structure for plugin_paths example
|
|
|
|
# $ tree /path/to/plugins_dir_1
|
|
|
|
# /path/to/plugins_dir_1/
|
|
|
|
# └── registry.terraform.io
|
|
|
|
# └── hashicorp
|
|
|
|
# └── vsphere
|
|
|
|
# ├── 1.24.0
|
|
|
|
# │ └── linux_amd64
|
|
|
|
# │ └── terraform-provider-vsphere_v1.24.0_x4
|
|
|
|
# └── 1.26.0
|
|
|
|
# └── linux_amd64
|
|
|
|
# └── terraform-provider-vsphere_v1.26.0_x4
|
2020-03-09 10:11:07 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
RETURN = """
|
|
|
|
outputs:
|
|
|
|
type: complex
|
|
|
|
description: A dictionary of all the TF outputs by their assigned name. Use `.outputs.MyOutputName.value` to access the value.
|
|
|
|
returned: on success
|
|
|
|
sample: '{"bukkit_arn": {"sensitive": false, "type": "string", "value": "arn:aws:s3:::tf-test-bukkit"}'
|
|
|
|
contains:
|
|
|
|
sensitive:
|
|
|
|
type: bool
|
|
|
|
returned: always
|
|
|
|
description: Whether Terraform has marked this value as sensitive
|
|
|
|
type:
|
|
|
|
type: str
|
|
|
|
returned: always
|
|
|
|
description: The type of the value (string, int, etc)
|
|
|
|
value:
|
2020-11-23 09:22:30 +01:00
|
|
|
type: str
|
2020-03-09 10:11:07 +01:00
|
|
|
returned: always
|
|
|
|
description: The value of the output as interpolated by Terraform
|
|
|
|
stdout:
|
|
|
|
type: str
|
|
|
|
description: Full `terraform` command stdout, in case you want to display it or examine the event log
|
|
|
|
returned: always
|
|
|
|
sample: ''
|
|
|
|
command:
|
|
|
|
type: str
|
|
|
|
description: Full `terraform` command built by this module, in case you want to re-run the command outside the module or debug a problem.
|
|
|
|
returned: always
|
|
|
|
sample: terraform apply ...
|
|
|
|
"""
|
|
|
|
|
|
|
|
import os
|
|
|
|
import json
|
|
|
|
import tempfile
|
|
|
|
from ansible.module_utils.six.moves import shlex_quote
|
|
|
|
|
|
|
|
from ansible.module_utils.basic import AnsibleModule
|
|
|
|
|
2021-12-24 19:15:51 +01:00
|
|
|
from ansible_collections.community.general.plugins.module_utils.version import LooseVersion
|
|
|
|
|
2020-03-09 10:11:07 +01:00
|
|
|
module = None
|
|
|
|
|
|
|
|
|
2021-04-17 22:32:54 +02:00
|
|
|
def get_version(bin_path):
|
|
|
|
extract_version = module.run_command([bin_path, 'version', '-json'])
|
|
|
|
terraform_version = (json.loads(extract_version[1]))['terraform_version']
|
|
|
|
return terraform_version
|
|
|
|
|
|
|
|
|
|
|
|
def preflight_validation(bin_path, project_path, version, variables_args=None, plan_file=None):
|
2021-05-27 20:15:49 +02:00
|
|
|
if project_path is None or '/' not in project_path:
|
2020-03-09 10:11:07 +01:00
|
|
|
module.fail_json(msg="Path for Terraform project can not be None or ''.")
|
|
|
|
if not os.path.exists(bin_path):
|
|
|
|
module.fail_json(msg="Path for Terraform binary '{0}' doesn't exist on this host - check the path and try again please.".format(bin_path))
|
|
|
|
if not os.path.isdir(project_path):
|
|
|
|
module.fail_json(msg="Path for Terraform project '{0}' doesn't exist on this host - check the path and try again please.".format(project_path))
|
2021-04-17 22:32:54 +02:00
|
|
|
if LooseVersion(version) < LooseVersion('0.15.0'):
|
|
|
|
rc, out, err = module.run_command([bin_path, 'validate'] + variables_args, check_rc=True, cwd=project_path)
|
|
|
|
else:
|
|
|
|
rc, out, err = module.run_command([bin_path, 'validate'], check_rc=True, cwd=project_path)
|
2020-03-09 10:11:07 +01:00
|
|
|
|
|
|
|
|
|
|
|
def _state_args(state_file):
|
|
|
|
if state_file and os.path.exists(state_file):
|
|
|
|
return ['-state', state_file]
|
|
|
|
if state_file and not os.path.exists(state_file):
|
|
|
|
module.fail_json(msg='Could not find state_file "{0}", check the path and try again.'.format(state_file))
|
|
|
|
return []
|
|
|
|
|
|
|
|
|
2021-04-22 01:05:37 +02:00
|
|
|
def init_plugins(bin_path, project_path, backend_config, backend_config_files, init_reconfigure, plugin_paths):
|
2020-03-09 10:11:07 +01:00
|
|
|
command = [bin_path, 'init', '-input=false']
|
|
|
|
if backend_config:
|
|
|
|
for key, val in backend_config.items():
|
|
|
|
command.extend([
|
|
|
|
'-backend-config',
|
|
|
|
shlex_quote('{0}={1}'.format(key, val))
|
|
|
|
])
|
2020-06-03 14:23:00 +02:00
|
|
|
if backend_config_files:
|
|
|
|
for f in backend_config_files:
|
|
|
|
command.extend(['-backend-config', f])
|
2020-10-30 20:05:41 +01:00
|
|
|
if init_reconfigure:
|
2021-01-13 07:49:00 +01:00
|
|
|
command.extend(['-reconfigure'])
|
2021-04-22 01:05:37 +02:00
|
|
|
if plugin_paths:
|
|
|
|
for plugin_path in plugin_paths:
|
|
|
|
command.extend(['-plugin-dir', plugin_path])
|
2021-01-23 11:44:51 +01:00
|
|
|
rc, out, err = module.run_command(command, check_rc=True, cwd=project_path)
|
2020-03-09 10:11:07 +01:00
|
|
|
|
|
|
|
|
|
|
|
def get_workspace_context(bin_path, project_path):
|
|
|
|
workspace_ctx = {"current": "default", "all": []}
|
|
|
|
command = [bin_path, 'workspace', 'list', '-no-color']
|
|
|
|
rc, out, err = module.run_command(command, cwd=project_path)
|
|
|
|
if rc != 0:
|
|
|
|
module.warn("Failed to list Terraform workspaces:\r\n{0}".format(err))
|
|
|
|
for item in out.split('\n'):
|
|
|
|
stripped_item = item.strip()
|
|
|
|
if not stripped_item:
|
|
|
|
continue
|
|
|
|
elif stripped_item.startswith('* '):
|
|
|
|
workspace_ctx["current"] = stripped_item.replace('* ', '')
|
|
|
|
else:
|
|
|
|
workspace_ctx["all"].append(stripped_item)
|
|
|
|
return workspace_ctx
|
|
|
|
|
|
|
|
|
|
|
|
def _workspace_cmd(bin_path, project_path, action, workspace):
|
|
|
|
command = [bin_path, 'workspace', action, workspace, '-no-color']
|
2021-01-23 11:44:51 +01:00
|
|
|
rc, out, err = module.run_command(command, check_rc=True, cwd=project_path)
|
2020-03-09 10:11:07 +01:00
|
|
|
return rc, out, err
|
|
|
|
|
|
|
|
|
|
|
|
def create_workspace(bin_path, project_path, workspace):
|
|
|
|
_workspace_cmd(bin_path, project_path, 'new', workspace)
|
|
|
|
|
|
|
|
|
|
|
|
def select_workspace(bin_path, project_path, workspace):
|
|
|
|
_workspace_cmd(bin_path, project_path, 'select', workspace)
|
|
|
|
|
|
|
|
|
|
|
|
def remove_workspace(bin_path, project_path, workspace):
|
|
|
|
_workspace_cmd(bin_path, project_path, 'delete', workspace)
|
|
|
|
|
|
|
|
|
2021-11-23 05:51:17 +01:00
|
|
|
def build_plan(command, project_path, variables_args, state_file, targets, state, apply_args, plan_path=None):
|
2020-03-09 10:11:07 +01:00
|
|
|
if plan_path is None:
|
|
|
|
f, plan_path = tempfile.mkstemp(suffix='.tfplan')
|
|
|
|
|
2022-05-08 11:18:32 +02:00
|
|
|
local_command = command[:]
|
2021-11-23 05:51:17 +01:00
|
|
|
|
|
|
|
plan_command = [command[0], 'plan']
|
|
|
|
|
|
|
|
if state == "planned":
|
|
|
|
for c in local_command[1:]:
|
|
|
|
plan_command.append(c)
|
|
|
|
|
|
|
|
if state == "present":
|
|
|
|
for a in apply_args:
|
|
|
|
local_command.remove(a)
|
|
|
|
for c in local_command[1:]:
|
|
|
|
plan_command.append(c)
|
|
|
|
|
|
|
|
plan_command.extend(['-input=false', '-no-color', '-detailed-exitcode', '-out', plan_path])
|
2020-03-09 10:11:07 +01:00
|
|
|
|
2021-06-27 13:00:50 +02:00
|
|
|
for t in targets:
|
2020-03-09 10:11:07 +01:00
|
|
|
plan_command.extend(['-target', t])
|
|
|
|
|
|
|
|
plan_command.extend(_state_args(state_file))
|
|
|
|
|
2021-04-17 22:32:54 +02:00
|
|
|
rc, out, err = module.run_command(plan_command + variables_args, cwd=project_path)
|
2020-03-09 10:11:07 +01:00
|
|
|
|
|
|
|
if rc == 0:
|
|
|
|
# no changes
|
|
|
|
return plan_path, False, out, err, plan_command if state == 'planned' else command
|
|
|
|
elif rc == 1:
|
|
|
|
# failure to plan
|
|
|
|
module.fail_json(msg='Terraform plan could not be created\r\nSTDOUT: {0}\r\n\r\nSTDERR: {1}'.format(out, err))
|
|
|
|
elif rc == 2:
|
|
|
|
# changes, but successful
|
|
|
|
return plan_path, True, out, err, plan_command if state == 'planned' else command
|
|
|
|
|
|
|
|
module.fail_json(msg='Terraform plan failed with unexpected exit code {0}. \r\nSTDOUT: {1}\r\n\r\nSTDERR: {2}'.format(rc, out, err))
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
global module
|
|
|
|
module = AnsibleModule(
|
|
|
|
argument_spec=dict(
|
|
|
|
project_path=dict(required=True, type='path'),
|
|
|
|
binary_path=dict(type='path'),
|
2021-04-22 01:05:37 +02:00
|
|
|
plugin_paths=dict(type='list', elements='path'),
|
2021-06-27 13:00:50 +02:00
|
|
|
workspace=dict(type='str', default='default'),
|
2020-03-09 10:11:07 +01:00
|
|
|
purge_workspace=dict(type='bool', default=False),
|
|
|
|
state=dict(default='present', choices=['present', 'absent', 'planned']),
|
|
|
|
variables=dict(type='dict'),
|
2021-06-27 13:00:50 +02:00
|
|
|
variables_files=dict(aliases=['variables_file'], type='list', elements='path'),
|
2020-03-09 10:11:07 +01:00
|
|
|
plan_file=dict(type='path'),
|
|
|
|
state_file=dict(type='path'),
|
2020-09-01 13:44:04 +02:00
|
|
|
targets=dict(type='list', elements='str', default=[]),
|
2020-03-09 10:11:07 +01:00
|
|
|
lock=dict(type='bool', default=True),
|
|
|
|
lock_timeout=dict(type='int',),
|
|
|
|
force_init=dict(type='bool', default=False),
|
2021-06-27 13:00:50 +02:00
|
|
|
backend_config=dict(type='dict'),
|
|
|
|
backend_config_files=dict(type='list', elements='path'),
|
|
|
|
init_reconfigure=dict(type='bool', default=False),
|
2021-05-27 20:15:33 +02:00
|
|
|
overwrite_init=dict(type='bool', default=True),
|
2021-06-27 13:00:50 +02:00
|
|
|
check_destroy=dict(type='bool', default=False),
|
2021-10-11 23:21:16 +02:00
|
|
|
parallelism=dict(type='int'),
|
2020-03-09 10:11:07 +01:00
|
|
|
),
|
|
|
|
required_if=[('state', 'planned', ['plan_file'])],
|
|
|
|
supports_check_mode=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
project_path = module.params.get('project_path')
|
|
|
|
bin_path = module.params.get('binary_path')
|
2021-04-22 01:05:37 +02:00
|
|
|
plugin_paths = module.params.get('plugin_paths')
|
2020-03-09 10:11:07 +01:00
|
|
|
workspace = module.params.get('workspace')
|
|
|
|
purge_workspace = module.params.get('purge_workspace')
|
|
|
|
state = module.params.get('state')
|
|
|
|
variables = module.params.get('variables') or {}
|
2020-06-03 14:23:00 +02:00
|
|
|
variables_files = module.params.get('variables_files')
|
2020-03-09 10:11:07 +01:00
|
|
|
plan_file = module.params.get('plan_file')
|
|
|
|
state_file = module.params.get('state_file')
|
|
|
|
force_init = module.params.get('force_init')
|
|
|
|
backend_config = module.params.get('backend_config')
|
2020-06-03 14:23:00 +02:00
|
|
|
backend_config_files = module.params.get('backend_config_files')
|
2020-10-30 20:05:41 +01:00
|
|
|
init_reconfigure = module.params.get('init_reconfigure')
|
2021-05-27 20:15:33 +02:00
|
|
|
overwrite_init = module.params.get('overwrite_init')
|
2021-06-27 13:00:50 +02:00
|
|
|
check_destroy = module.params.get('check_destroy')
|
2020-03-09 10:11:07 +01:00
|
|
|
|
|
|
|
if bin_path is not None:
|
|
|
|
command = [bin_path]
|
|
|
|
else:
|
|
|
|
command = [module.get_bin_path('terraform', required=True)]
|
|
|
|
|
2021-04-17 22:32:54 +02:00
|
|
|
checked_version = get_version(command[0])
|
|
|
|
|
|
|
|
if LooseVersion(checked_version) < LooseVersion('0.15.0'):
|
|
|
|
DESTROY_ARGS = ('destroy', '-no-color', '-force')
|
|
|
|
APPLY_ARGS = ('apply', '-no-color', '-input=false', '-auto-approve=true')
|
|
|
|
else:
|
|
|
|
DESTROY_ARGS = ('destroy', '-no-color', '-auto-approve')
|
|
|
|
APPLY_ARGS = ('apply', '-no-color', '-input=false', '-auto-approve')
|
|
|
|
|
2020-03-09 10:11:07 +01:00
|
|
|
if force_init:
|
2021-05-27 20:15:33 +02:00
|
|
|
if overwrite_init or not os.path.isfile(os.path.join(project_path, ".terraform", "terraform.tfstate")):
|
|
|
|
init_plugins(command[0], project_path, backend_config, backend_config_files, init_reconfigure, plugin_paths)
|
2020-03-09 10:11:07 +01:00
|
|
|
|
|
|
|
workspace_ctx = get_workspace_context(command[0], project_path)
|
|
|
|
if workspace_ctx["current"] != workspace:
|
|
|
|
if workspace not in workspace_ctx["all"]:
|
|
|
|
create_workspace(command[0], project_path, workspace)
|
|
|
|
else:
|
|
|
|
select_workspace(command[0], project_path, workspace)
|
|
|
|
|
|
|
|
if state == 'present':
|
|
|
|
command.extend(APPLY_ARGS)
|
|
|
|
elif state == 'absent':
|
|
|
|
command.extend(DESTROY_ARGS)
|
|
|
|
|
2021-10-11 23:21:16 +02:00
|
|
|
if state == 'present' and module.params.get('parallelism') is not None:
|
|
|
|
command.append('-parallelism=%d' % module.params.get('parallelism'))
|
|
|
|
|
2020-03-09 10:11:07 +01:00
|
|
|
variables_args = []
|
|
|
|
for k, v in variables.items():
|
|
|
|
variables_args.extend([
|
|
|
|
'-var',
|
2022-03-16 07:22:03 +01:00
|
|
|
'{0}={1}'.format(k, v)
|
2020-03-09 10:11:07 +01:00
|
|
|
])
|
2020-06-03 14:23:00 +02:00
|
|
|
if variables_files:
|
|
|
|
for f in variables_files:
|
|
|
|
variables_args.extend(['-var-file', f])
|
2020-03-09 10:11:07 +01:00
|
|
|
|
2021-04-17 22:32:54 +02:00
|
|
|
preflight_validation(command[0], project_path, checked_version, variables_args)
|
2020-03-09 10:11:07 +01:00
|
|
|
|
|
|
|
if module.params.get('lock') is not None:
|
|
|
|
if module.params.get('lock'):
|
|
|
|
command.append('-lock=true')
|
|
|
|
else:
|
|
|
|
command.append('-lock=false')
|
|
|
|
if module.params.get('lock_timeout') is not None:
|
|
|
|
command.append('-lock-timeout=%ds' % module.params.get('lock_timeout'))
|
|
|
|
|
|
|
|
for t in (module.params.get('targets') or []):
|
|
|
|
command.extend(['-target', t])
|
|
|
|
|
|
|
|
# we aren't sure if this plan will result in changes, so assume yes
|
|
|
|
needs_application, changed = True, False
|
|
|
|
|
|
|
|
out, err = '', ''
|
|
|
|
|
|
|
|
if state == 'absent':
|
|
|
|
command.extend(variables_args)
|
|
|
|
elif state == 'present' and plan_file:
|
|
|
|
if any([os.path.isfile(project_path + "/" + plan_file), os.path.isfile(plan_file)]):
|
|
|
|
command.append(plan_file)
|
|
|
|
else:
|
|
|
|
module.fail_json(msg='Could not find plan_file "{0}", check the path and try again.'.format(plan_file))
|
|
|
|
else:
|
|
|
|
plan_file, needs_application, out, err, command = build_plan(command, project_path, variables_args, state_file,
|
2021-11-23 05:51:17 +01:00
|
|
|
module.params.get('targets'), state, APPLY_ARGS, plan_file)
|
2021-06-27 13:00:50 +02:00
|
|
|
if state == 'present' and check_destroy and '- destroy' in out:
|
|
|
|
module.fail_json(msg="Aborting command because it would destroy some resources. "
|
|
|
|
"Consider switching the 'check_destroy' to false to suppress this error")
|
2020-03-09 10:11:07 +01:00
|
|
|
command.append(plan_file)
|
|
|
|
|
2021-06-27 13:00:50 +02:00
|
|
|
if needs_application and not module.check_mode and state != 'planned':
|
2021-06-04 21:12:53 +02:00
|
|
|
rc, out, err = module.run_command(command, check_rc=False, cwd=project_path)
|
|
|
|
if rc != 0:
|
|
|
|
if workspace_ctx["current"] != workspace:
|
|
|
|
select_workspace(command[0], project_path, workspace_ctx["current"])
|
|
|
|
module.fail_json(msg=err.rstrip(), rc=rc, stdout=out,
|
|
|
|
stdout_lines=out.splitlines(), stderr=err,
|
|
|
|
stderr_lines=err.splitlines(),
|
|
|
|
cmd=' '.join(command))
|
2020-03-09 10:11:07 +01:00
|
|
|
# checks out to decide if changes were made during execution
|
2020-08-21 21:44:15 +02:00
|
|
|
if ' 0 added, 0 changed' not in out and not state == "absent" or ' 0 destroyed' not in out:
|
2020-03-09 10:11:07 +01:00
|
|
|
changed = True
|
|
|
|
|
|
|
|
outputs_command = [command[0], 'output', '-no-color', '-json'] + _state_args(state_file)
|
|
|
|
rc, outputs_text, outputs_err = module.run_command(outputs_command, cwd=project_path)
|
|
|
|
if rc == 1:
|
|
|
|
module.warn("Could not get Terraform outputs. This usually means none have been defined.\nstdout: {0}\nstderr: {1}".format(outputs_text, outputs_err))
|
|
|
|
outputs = {}
|
|
|
|
elif rc != 0:
|
|
|
|
module.fail_json(
|
|
|
|
msg="Failure when getting Terraform outputs. "
|
|
|
|
"Exited {0}.\nstdout: {1}\nstderr: {2}".format(rc, outputs_text, outputs_err),
|
|
|
|
command=' '.join(outputs_command))
|
|
|
|
else:
|
|
|
|
outputs = json.loads(outputs_text)
|
|
|
|
|
|
|
|
# Restore the Terraform workspace found when running the module
|
|
|
|
if workspace_ctx["current"] != workspace:
|
|
|
|
select_workspace(command[0], project_path, workspace_ctx["current"])
|
|
|
|
if state == 'absent' and workspace != 'default' and purge_workspace is True:
|
|
|
|
remove_workspace(command[0], project_path, workspace)
|
|
|
|
|
|
|
|
module.exit_json(changed=changed, state=state, workspace=workspace, outputs=outputs, stdout=out, stderr=err, command=' '.join(command))
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|