From cef9e4289656520e1e7b6e1c30bb14762cb899a4 Mon Sep 17 00:00:00 2001 From: Joseph Callen Date: Fri, 5 Feb 2016 14:48:12 -0500 Subject: [PATCH] Resolves issue with vmware_host module for v2.0 When this module was written back in May 2015 we were using 1.9.x. Being lazy I added to param the objects that the other functions would need. What I have noticed is in 2.0 exit_json is trying to jsonify those complex objects and failing. This PR resolves that issue with the vmware_host module. @kamsz reported this issue in https://github.com/ansible/ansible-modules-extras/pull/1568 Playbook ``` - name: Add Host local_action: module: vmware_host hostname: "{{ mgmt_ip_address }}" username: "{{ vcsa_user }}" password: "{{ vcsa_pass }}" datacenter_name: "{{ mgmt_vdc }}" cluster_name: "{{ mgmt_cluster }}" esxi_hostname: "{{ hostvars[item].hostname }}" esxi_username: "{{ esxi_username }}" esxi_password: "{{ site_passwd }}" state: present with_items: groups['foundation_esxi'] ``` Module Testing ``` TASK [Add Host] **************************************************************** task path: /opt/autodeploy/projects/emmet/site_deploy.yml:214 ESTABLISH LOCAL CONNECTION FOR USER: root localhost EXEC ( umask 22 && mkdir -p "$( echo $HOME/.ansible/tmp/ansible-tmp-1454693866.1-87710459703937 )" && echo "$( echo $HOME/.ansible/tmp/ansible-tmp-1454693866.1-87710459703937 )" ) localhost PUT /tmp/tmppmr9i9 TO /root/.ansible/tmp/ansible-tmp-1454693866.1-87710459703937/vmware_host localhost EXEC LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 /usr/bin/python /root/.ansible/tmp/ansible-tmp-1454693866.1-87710459703937/vmware_host; rm -rf "/root/.ansible/tmp/ansible-tmp-1454693866.1-87710459703937/" > /dev/null 2>&1 localhost EXEC ( umask 22 && mkdir -p "$( echo $HOME/.ansible/tmp/ansible-tmp-1454693943.8-75870536677834 )" && echo "$( echo $HOME/.ansible/tmp/ansible-tmp-1454693943.8-75870536677834 )" ) localhost PUT /tmp/tmpVB81f2 TO /root/.ansible/tmp/ansible-tmp-1454693943.8-75870536677834/vmware_host localhost EXEC LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 /usr/bin/python /root/.ansible/tmp/ansible-tmp-1454693943.8-75870536677834/vmware_host; rm -rf "/root/.ansible/tmp/ansible-tmp-1454693943.8-75870536677834/" > /dev/null 2>&1 localhost EXEC ( umask 22 && mkdir -p "$( echo $HOME/.ansible/tmp/ansible-tmp-1454693991.56-163414752982563 )" && echo "$( echo $HOME/.ansible/tmp/ansible-tmp-1454693991.56-163414752982563 )" ) localhost PUT /tmp/tmpFB7VQB TO /root/.ansible/tmp/ansible-tmp-1454693991.56-163414752982563/vmware_host localhost EXEC LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 /usr/bin/python /root/.ansible/tmp/ansible-tmp-1454693991.56-163414752982563/vmware_host; rm -rf "/root/.ansible/tmp/ansible-tmp-1454693991.56-163414752982563/" > /dev/null 2>&1 changed: [foundation-vcsa -> localhost] => (item=foundation-esxi-01) => {"changed": true, "invocation": {"module_args": {"cluster_name": "Foundation", "datacenter_name": "Test-Lab", "esxi_hostname": "cscesxtmp001", "esxi_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "esxi_username": "root", "hostname": "172.27.0.100", "password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "state": "present", "username": "root"}, "module_name": "vmware_host"}, "item": "foundation-esxi-01", "result": "'vim.HostSystem:host-15'"} changed: [foundation-vcsa -> localhost] => (item=foundation-esxi-02) => {"changed": true, "invocation": {"module_args": {"cluster_name": "Foundation", "datacenter_name": "Test-Lab", "esxi_hostname": "cscesxtmp002", "esxi_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "esxi_username": "root", "hostname": "172.27.0.100", "password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "state": "present", "username": "root"}, "module_name": "vmware_host"}, "item": "foundation-esxi-02", "result": "'vim.HostSystem:host-20'"} changed: [foundation-vcsa -> localhost] => (item=foundation-esxi-03) => {"changed": true, "invocation": {"module_args": {"cluster_name": "Foundation", "datacenter_name": "Test-Lab", "esxi_hostname": "cscesxtmp003", "esxi_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "esxi_username": "root", "hostname": "172.27.0.100", "password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER", "state": "present", "username": "root"}, "module_name": "vmware_host"}, "item": "foundation-esxi-03", "result": "'vim.HostSystem:host-21'"} ``` --- .../extras/cloud/vmware/vmware_host.py | 199 +++++++++--------- 1 file changed, 98 insertions(+), 101 deletions(-) diff --git a/lib/ansible/modules/extras/cloud/vmware/vmware_host.py b/lib/ansible/modules/extras/cloud/vmware/vmware_host.py index dba7ce9a11..dd8e2f9eed 100644 --- a/lib/ansible/modules/extras/cloud/vmware/vmware_host.py +++ b/lib/ansible/modules/extras/cloud/vmware/vmware_host.py @@ -87,102 +87,118 @@ except ImportError: HAS_PYVMOMI = False -def find_host_by_cluster_datacenter(module): - datacenter_name = module.params['datacenter_name'] - cluster_name = module.params['cluster_name'] - content = module.params['content'] - esxi_hostname = module.params['esxi_hostname'] +class VMwareHost(object): + def __init__(self, module): + self.module = module + self.datacenter_name = module.params['datacenter_name'] + self.cluster_name = module.params['cluster_name'] + self.esxi_hostname = module.params['esxi_hostname'] + self.esxi_username = module.params['esxi_username'] + self.esxi_password = module.params['esxi_password'] + self.state = module.params['state'] + self.dc = None + self.cluster = None + self.host = None + self.content = connect_to_api(module) - dc = find_datacenter_by_name(content, datacenter_name) - cluster = find_cluster_by_name_datacenter(dc, cluster_name) + def process_state(self): + try: + # Currently state_update_dvs is not implemented. + host_states = { + 'absent': { + 'present': self.state_remove_host, + 'absent': self.state_exit_unchanged, + }, + 'present': { + 'present': self.state_exit_unchanged, + 'absent': self.state_add_host, + } + } - for host in cluster.host: - if host.name == esxi_hostname: - return host, cluster + host_states[self.state][self.check_host_state()]() - return None, cluster + except vmodl.RuntimeFault as runtime_fault: + self.module.fail_json(msg=runtime_fault.msg) + except vmodl.MethodFault as method_fault: + self.module.fail_json(msg=method_fault.msg) + except Exception as e: + self.module.fail_json(msg=str(e)) + def find_host_by_cluster_datacenter(self): + self.dc = find_datacenter_by_name(self.content, self.datacenter_name) + self.cluster = find_cluster_by_name_datacenter(self.dc, self.cluster_name) -def add_host_to_vcenter(module): - cluster = module.params['cluster'] + for host in self.cluster.host: + if host.name == self.esxi_hostname: + return host, self.cluster - host_connect_spec = vim.host.ConnectSpec() - host_connect_spec.hostName = module.params['esxi_hostname'] - host_connect_spec.userName = module.params['esxi_username'] - host_connect_spec.password = module.params['esxi_password'] - host_connect_spec.force = True - host_connect_spec.sslThumbprint = "" - as_connected = True - esxi_license = None - resource_pool = None + return None, self.cluster - try: - task = cluster.AddHost_Task(host_connect_spec, as_connected, resource_pool, esxi_license) + def add_host_to_vcenter(self): + host_connect_spec = vim.host.ConnectSpec() + host_connect_spec.hostName = self.esxi_hostname + host_connect_spec.userName = self.esxi_username + host_connect_spec.password = self.esxi_password + host_connect_spec.force = True + host_connect_spec.sslThumbprint = "" + as_connected = True + esxi_license = None + resource_pool = None + + try: + task = self.cluster.AddHost_Task(host_connect_spec, as_connected, resource_pool, esxi_license) + success, result = wait_for_task(task) + return success, result + except TaskError as add_task_error: + # This is almost certain to fail the first time. + # In order to get the sslThumbprint we first connect + # get the vim.fault.SSLVerifyFault then grab the sslThumbprint + # from that object. + # + # args is a tuple, selecting the first tuple + ssl_verify_fault = add_task_error.args[0] + host_connect_spec.sslThumbprint = ssl_verify_fault.thumbprint + + task = self.cluster.AddHost_Task(host_connect_spec, as_connected, resource_pool, esxi_license) success, result = wait_for_task(task) return success, result - except TaskError as add_task_error: - # This is almost certain to fail the first time. - # In order to get the sslThumbprint we first connect - # get the vim.fault.SSLVerifyFault then grab the sslThumbprint - # from that object. - # - # args is a tuple, selecting the first tuple - ssl_verify_fault = add_task_error.args[0] - host_connect_spec.sslThumbprint = ssl_verify_fault.thumbprint - task = cluster.AddHost_Task(host_connect_spec, as_connected, resource_pool, esxi_license) - success, result = wait_for_task(task) - return success, result + def state_exit_unchanged(self): + self.module.exit_json(changed=False) + def state_remove_host(self): + changed = True + result = None + if not self.module.check_mode: + if not self.host.runtime.inMaintenanceMode: + maintenance_mode_task = self.host.EnterMaintenanceMode_Task(300, True, None) + changed, result = wait_for_task(maintenance_mode_task) -def state_exit_unchanged(module): - module.exit_json(changed=False) + if changed: + task = self.host.Destroy_Task() + changed, result = wait_for_task(task) + else: + raise Exception(result) + self.module.exit_json(changed=changed, result=str(result)) + def state_update_host(self): + self.module.exit_json(changed=False, msg="Currently not implemented.") -def state_remove_host(module): - host = module.params['host'] - changed = True - result = None - if not module.check_mode: - if not host.runtime.inMaintenanceMode: - maintenance_mode_task = host.EnterMaintenanceMode_Task(300, True, None) - changed, result = wait_for_task(maintenance_mode_task) + def state_add_host(self): + changed = True + result = None - if changed: - task = host.Destroy_Task() - changed, result = wait_for_task(task) + if not self.module.check_mode: + changed, result = self.add_host_to_vcenter() + self.module.exit_json(changed=changed, result=str(result)) + + def check_host_state(self): + self.host, self.cluster = self.find_host_by_cluster_datacenter() + + if self.host is None: + return 'absent' else: - raise Exception(result) - module.exit_json(changed=changed, result=str(result)) - - -def state_update_host(module): - module.exit_json(changed=False, msg="Currently not implemented.") - - -def state_add_host(module): - - changed = True - result = None - - if not module.check_mode: - changed, result = add_host_to_vcenter(module) - module.exit_json(changed=changed, result=str(result)) - - -def check_host_state(module): - - content = connect_to_api(module) - module.params['content'] = content - - host, cluster = find_host_by_cluster_datacenter(module) - - module.params['cluster'] = cluster - if host is None: - return 'absent' - else: - module.params['host'] = host - return 'present' + return 'present' def main(): @@ -199,27 +215,8 @@ def main(): if not HAS_PYVMOMI: module.fail_json(msg='pyvmomi is required for this module') - try: - # Currently state_update_dvs is not implemented. - host_states = { - 'absent': { - 'present': state_remove_host, - 'absent': state_exit_unchanged, - }, - 'present': { - 'present': state_exit_unchanged, - 'absent': state_add_host, - } - } - - host_states[module.params['state']][check_host_state(module)](module) - - except vmodl.RuntimeFault as runtime_fault: - module.fail_json(msg=runtime_fault.msg) - except vmodl.MethodFault as method_fault: - module.fail_json(msg=method_fault.msg) - except Exception as e: - module.fail_json(msg=str(e)) + vmware_host = VMwareHost(module) + vmware_host.process_state() from ansible.module_utils.vmware import * from ansible.module_utils.basic import *