mirror of
https://github.com/ansible-collections/community.general.git
synced 2024-09-14 20:13:21 +02:00
Merge pull request #12226 from mgedmin/py3k
Replace .iteritems() with six.iteritems()
This commit is contained in:
commit
ce97874523
28 changed files with 81 additions and 41 deletions
|
@ -37,6 +37,7 @@ import re
|
|||
from time import time
|
||||
import ConfigParser
|
||||
|
||||
from six import iteritems
|
||||
from libcloud.compute.types import Provider
|
||||
from libcloud.compute.providers import get_driver
|
||||
import libcloud.security as sec
|
||||
|
@ -268,7 +269,7 @@ class LibcloudInventory(object):
|
|||
elif key == 'ec2_region':
|
||||
instance_vars[key] = value.name
|
||||
elif key == 'ec2_tags':
|
||||
for k, v in value.iteritems():
|
||||
for k, v in iteritems(value):
|
||||
key = self.to_safe('ec2_tag_' + k)
|
||||
instance_vars[key] = v
|
||||
elif key == 'ec2_groups':
|
||||
|
|
|
@ -72,6 +72,8 @@ try:
|
|||
except ImportError:
|
||||
import simplejson as json
|
||||
|
||||
from six import iteritems
|
||||
|
||||
# NOTE -- this file assumes Ansible is being accessed FROM the cobbler
|
||||
# server, so it does not attempt to login with a username and password.
|
||||
# this will be addressed in a future version of this script.
|
||||
|
@ -171,7 +173,7 @@ class CobblerInventory(object):
|
|||
interfaces = host['interfaces']
|
||||
# hostname is often empty for non-static IP hosts
|
||||
if dns_name == '':
|
||||
for (iname, ivalue) in interfaces.iteritems():
|
||||
for (iname, ivalue) in iteritems(interfaces):
|
||||
if ivalue['management'] or not ivalue['static']:
|
||||
this_dns_name = ivalue.get('dns_name', None)
|
||||
if this_dns_name is not None and this_dns_name is not "":
|
||||
|
@ -203,7 +205,7 @@ class CobblerInventory(object):
|
|||
|
||||
self.cache[dns_name] = host
|
||||
if "ks_meta" in host:
|
||||
for key, value in host["ks_meta"].iteritems():
|
||||
for key, value in iteritems(host["ks_meta"]):
|
||||
self.cache[dns_name][key] = value
|
||||
|
||||
self.write_to_cache(self.cache, self.cache_path_cache)
|
||||
|
|
|
@ -83,6 +83,8 @@ try:
|
|||
except ImportError:
|
||||
import simplejson as json
|
||||
|
||||
from six import iteritems
|
||||
|
||||
|
||||
class CollinsDefaults(object):
|
||||
ASSETS_API_ENDPOINT = '%s/api/assets'
|
||||
|
@ -174,7 +176,7 @@ class CollinsInventory(object):
|
|||
# the CQL search feature as described here:
|
||||
# http://tumblr.github.io/collins/recipes.html
|
||||
attributes_query = [ '='.join(attr_pair)
|
||||
for attr_pair in attributes.iteritems() ]
|
||||
for attr_pair in iteritems(attributes) ]
|
||||
query_parameters = {
|
||||
'details': ['True'],
|
||||
'operation': [operation],
|
||||
|
|
|
@ -141,6 +141,7 @@ except ImportError as e:
|
|||
http://python-consul.readthedocs.org/en/latest/#installation'""")
|
||||
sys.exit(1)
|
||||
|
||||
from six import iteritems
|
||||
|
||||
|
||||
class ConsulInventory(object):
|
||||
|
@ -187,7 +188,7 @@ class ConsulInventory(object):
|
|||
an 'available' or 'unavailable' grouping. The suffix for each group can be
|
||||
controlled from the config'''
|
||||
if self.config.has_config('availability'):
|
||||
for service_name, service in node['Services'].iteritems():
|
||||
for service_name, service in iteritems(node['Services']):
|
||||
for node in self.consul_api.health.service(service_name)[1]:
|
||||
for check in node['Checks']:
|
||||
if check['ServiceName'] == service_name:
|
||||
|
|
|
@ -26,6 +26,7 @@ import re
|
|||
import os
|
||||
import ConfigParser
|
||||
from novaclient import client as nova_client
|
||||
from six import iteritems
|
||||
|
||||
try:
|
||||
import json
|
||||
|
@ -194,7 +195,7 @@ if (len(sys.argv) == 2 and sys.argv[1] == '--list') or len(sys.argv) == 1:
|
|||
push(groups, server.name, access_ip)
|
||||
|
||||
# Run through each metadata item and add instance to it
|
||||
for key, value in server.metadata.iteritems():
|
||||
for key, value in iteritems(server.metadata):
|
||||
composed_key = to_safe('tag_{0}_{1}'.format(key, value))
|
||||
push(groups, composed_key, access_ip)
|
||||
|
||||
|
|
|
@ -25,6 +25,9 @@ import os
|
|||
import sys
|
||||
from optparse import OptionParser
|
||||
|
||||
from six import iteritems
|
||||
|
||||
|
||||
class ProxmoxNodeList(list):
|
||||
def get_names(self):
|
||||
return [node['node'] for node in self]
|
||||
|
@ -32,7 +35,7 @@ class ProxmoxNodeList(list):
|
|||
class ProxmoxQemu(dict):
|
||||
def get_variables(self):
|
||||
variables = {}
|
||||
for key, value in self.iteritems():
|
||||
for key, value in iteritems(self):
|
||||
variables['proxmox_' + key] = value
|
||||
return variables
|
||||
|
||||
|
|
|
@ -153,6 +153,8 @@ import warnings
|
|||
import collections
|
||||
import ConfigParser
|
||||
|
||||
from six import iteritems
|
||||
|
||||
from ansible.constants import get_config, mk_boolean
|
||||
|
||||
try:
|
||||
|
@ -267,7 +269,7 @@ def _list_into_cache(regions):
|
|||
|
||||
hostvars[server.name]['rax_region'] = region
|
||||
|
||||
for key, value in server.metadata.iteritems():
|
||||
for key, value in iteritems(server.metadata):
|
||||
groups['%s_%s_%s' % (prefix, key, value)].append(server.name)
|
||||
|
||||
groups['instance-%s' % server.id].append(server.name)
|
||||
|
|
|
@ -49,6 +49,8 @@ from optparse import OptionParser
|
|||
import subprocess
|
||||
import ConfigParser
|
||||
|
||||
from six import iteritems
|
||||
|
||||
try:
|
||||
import json
|
||||
except:
|
||||
|
@ -193,10 +195,10 @@ if options.list:
|
|||
sys.exit(2)
|
||||
|
||||
if options.human:
|
||||
for group, systems in groups.iteritems():
|
||||
for group, systems in iteritems(groups):
|
||||
print('[%s]\n%s\n' % (group, '\n'.join(systems)))
|
||||
else:
|
||||
final = dict( [ (k, list(s)) for k, s in groups.iteritems() ] )
|
||||
final = dict( [ (k, list(s)) for k, s in iteritems(groups) ] )
|
||||
final["_meta"] = meta
|
||||
print(json.dumps( final ))
|
||||
#print(json.dumps(groups))
|
||||
|
@ -221,7 +223,7 @@ elif options.host:
|
|||
|
||||
if options.human:
|
||||
print('Host: %s' % options.host)
|
||||
for k, v in host_details.iteritems():
|
||||
for k, v in iteritems(host_details):
|
||||
print(' %s: %s' % (k, '\n '.join(v.split(';'))))
|
||||
else:
|
||||
print( json.dumps( dict( ( key, ( value.split(';') if ';' in value else value) ) for key, value in host_details.items() ) ) )
|
||||
|
|
|
@ -23,6 +23,7 @@ from ansible import utils
|
|||
import os
|
||||
import yaml
|
||||
import sys
|
||||
from six import iteritems
|
||||
|
||||
class InventoryParserYaml(object):
|
||||
''' Host inventory parser for ansible '''
|
||||
|
@ -176,7 +177,7 @@ if __name__ == "__main__":
|
|||
groupfh.write(yaml.dump(record.get_variables()))
|
||||
groupfh.close()
|
||||
|
||||
for (host_name, host_record) in yamlp._hosts.iteritems():
|
||||
for (host_name, host_record) in iteritems(yamlp._hosts):
|
||||
hostfiledir = os.path.join(dirname, "host_vars")
|
||||
if not os.path.exists(hostfiledir):
|
||||
print "* creating: %s" % hostfiledir
|
||||
|
|
|
@ -34,6 +34,7 @@ import subprocess
|
|||
import cgi
|
||||
import warnings
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
from six import iteritems
|
||||
|
||||
from ansible.utils import module_docs
|
||||
from ansible.utils.vars import merge_hash
|
||||
|
@ -292,7 +293,7 @@ def process_module(module, options, env, template, outputname, module_map, alias
|
|||
del doc['version_added']
|
||||
|
||||
if 'options' in doc and doc['options']:
|
||||
for (k,v) in doc['options'].iteritems():
|
||||
for (k,v) in iteritems(doc['options']):
|
||||
# don't show version added information if it's too old to be called out
|
||||
if 'version_added' in doc['options'][k] and too_old(doc['options'][k]['version_added']):
|
||||
del doc['options'][k]['version_added']
|
||||
|
|
|
@ -24,6 +24,8 @@ import termios
|
|||
import traceback
|
||||
import textwrap
|
||||
|
||||
from six import iteritems
|
||||
|
||||
from ansible import constants as C
|
||||
from ansible.errors import AnsibleError, AnsibleOptionsError
|
||||
from ansible.plugins import module_loader
|
||||
|
@ -101,7 +103,7 @@ class DocCLI(CLI):
|
|||
if doc is not None:
|
||||
|
||||
all_keys = []
|
||||
for (k,v) in doc['options'].iteritems():
|
||||
for (k,v) in iteritems(doc['options']):
|
||||
all_keys.append(k)
|
||||
all_keys = sorted(all_keys)
|
||||
doc['option_keys'] = all_keys
|
||||
|
|
|
@ -21,6 +21,8 @@ __metaclass__ = type
|
|||
|
||||
import fnmatch
|
||||
|
||||
from six import iteritems
|
||||
|
||||
from ansible import constants as C
|
||||
|
||||
from ansible.errors import *
|
||||
|
@ -322,7 +324,7 @@ class PlayIterator:
|
|||
self._host_states[host.name] = s
|
||||
|
||||
def get_failed_hosts(self):
|
||||
return dict((host, True) for (host, state) in self._host_states.iteritems() if state.run_state == self.ITERATING_COMPLETE and state.fail_state != self.FAILED_NONE)
|
||||
return dict((host, True) for (host, state) in iteritems(self._host_states) if state.run_state == self.ITERATING_COMPLETE and state.fail_state != self.FAILED_NONE)
|
||||
|
||||
def get_original_task(self, host, task):
|
||||
'''
|
||||
|
|
|
@ -20,6 +20,8 @@ from __future__ import (absolute_import, division, print_function)
|
|||
__metaclass__ = type
|
||||
|
||||
from six.moves import queue
|
||||
from six import iteritems
|
||||
|
||||
import multiprocessing
|
||||
import os
|
||||
import signal
|
||||
|
@ -157,7 +159,7 @@ class ResultProcess(multiprocessing.Process):
|
|||
# if this task is registering facts, do that now
|
||||
item = result_item.get('item', None)
|
||||
if result._task.action in ('set_fact', 'include_vars'):
|
||||
for (key, value) in result_item['ansible_facts'].iteritems():
|
||||
for (key, value) in iteritems(result_item['ansible_facts']):
|
||||
self._send_result(('set_host_var', result._host, result._task, item, key, value))
|
||||
else:
|
||||
self._send_result(('set_host_facts', result._host, result._task, item, result_item['ansible_facts']))
|
||||
|
|
|
@ -25,6 +25,8 @@ import subprocess
|
|||
import sys
|
||||
import time
|
||||
|
||||
from six import iteritems
|
||||
|
||||
from ansible import constants as C
|
||||
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable
|
||||
from ansible.playbook.conditional import Conditional
|
||||
|
@ -289,7 +291,7 @@ class TaskExecutor:
|
|||
# And filter out any fields which were set to default(omit), and got the omit token value
|
||||
omit_token = variables.get('omit')
|
||||
if omit_token is not None:
|
||||
self._task.args = dict(filter(lambda x: x[1] != omit_token, self._task.args.iteritems()))
|
||||
self._task.args = dict(filter(lambda x: x[1] != omit_token, iteritems(self._task.args)))
|
||||
|
||||
# Read some values from the task, so that we can modify them if need be
|
||||
retries = self._task.retries
|
||||
|
|
|
@ -22,9 +22,10 @@ __metaclass__ = type
|
|||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
from collections import Mapping
|
||||
|
||||
from six import iteritems
|
||||
|
||||
from ansible import constants as C
|
||||
from ansible.errors import *
|
||||
from ansible.inventory.host import Host
|
||||
|
@ -122,7 +123,7 @@ class InventoryScript:
|
|||
raise AnsibleError("You defined a group \"%s\" with bad "
|
||||
"data for variables:\n %s" % (group_name, data))
|
||||
|
||||
for k, v in data['vars'].iteritems():
|
||||
for k, v in iteritems(data['vars']):
|
||||
if group.name == all.name:
|
||||
all.set_variable(k, v)
|
||||
else:
|
||||
|
|
|
@ -25,6 +25,8 @@ import pipes
|
|||
import random
|
||||
import re
|
||||
|
||||
from six import iteritems
|
||||
|
||||
from ansible import constants as C
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.playbook.attribute import Attribute, FieldAttribute
|
||||
|
@ -308,7 +310,7 @@ class PlayContext(Base):
|
|||
|
||||
# finally, use the MAGIC_VARIABLE_MAPPING dictionary to update this
|
||||
# connection info object with 'magic' variables from the variable list
|
||||
for (attr, variable_names) in MAGIC_VARIABLE_MAPPING.iteritems():
|
||||
for (attr, variable_names) in iteritems(MAGIC_VARIABLE_MAPPING):
|
||||
for variable_name in variable_names:
|
||||
if variable_name in variables:
|
||||
setattr(new_info, attr, variables[variable_name])
|
||||
|
|
|
@ -21,6 +21,8 @@ __metaclass__ = type
|
|||
|
||||
import os
|
||||
|
||||
from six import iteritems
|
||||
|
||||
from ansible.errors import AnsibleParserError
|
||||
from ansible.parsing.splitter import split_args, parse_kv
|
||||
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping
|
||||
|
@ -95,7 +97,7 @@ class PlaybookInclude(Base, Conditional, Taggable):
|
|||
if isinstance(ds, AnsibleBaseYAMLObject):
|
||||
new_ds.ansible_pos = ds.ansible_pos
|
||||
|
||||
for (k,v) in ds.iteritems():
|
||||
for (k,v) in iteritems(ds):
|
||||
if k == 'include':
|
||||
self._preprocess_include(ds, new_ds, k, v)
|
||||
else:
|
||||
|
|
|
@ -51,7 +51,7 @@ def hash_params(params):
|
|||
return params
|
||||
else:
|
||||
s = set()
|
||||
for k,v in params.iteritems():
|
||||
for k,v in iteritems(params):
|
||||
if isinstance(v, dict):
|
||||
s.update((k, hash_params(v)))
|
||||
elif isinstance(v, list):
|
||||
|
@ -105,7 +105,7 @@ class Role(Base, Become, Conditional, Taggable):
|
|||
params['tags'] = role_include.tags
|
||||
hashed_params = hash_params(params)
|
||||
if role_include.role in play.ROLE_CACHE:
|
||||
for (entry, role_obj) in play.ROLE_CACHE[role_include.role].iteritems():
|
||||
for (entry, role_obj) in iteritems(play.ROLE_CACHE[role_include.role]):
|
||||
if hashed_params == entry:
|
||||
if parent_role:
|
||||
role_obj.add_parent(parent_role)
|
||||
|
|
|
@ -181,7 +181,7 @@ class RoleDefinition(Base, Become, Conditional, Taggable):
|
|||
for (key, value) in iteritems(ds):
|
||||
# use the list of FieldAttribute values to determine what is and is not
|
||||
# an extra parameter for this role (or sub-class of this role)
|
||||
if key not in [attr_name for (attr_name, attr_value) in self._get_base_attributes().iteritems()]:
|
||||
if key not in [attr_name for (attr_name, attr_value) in iteritems(self._get_base_attributes())]:
|
||||
# this key does not match a field attribute, so it must be a role param
|
||||
role_params[key] = value
|
||||
else:
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from six import string_types
|
||||
from six import iteritems, string_types
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
|
||||
|
@ -118,7 +118,7 @@ class Task(Base, Conditional, Taggable, Become):
|
|||
return ds
|
||||
elif isinstance(ds, dict):
|
||||
buf = ""
|
||||
for (k,v) in ds.iteritems():
|
||||
for (k,v) in iteritems(ds):
|
||||
if k.startswith('_'):
|
||||
continue
|
||||
buf = buf + "%s=%s " % (k,v)
|
||||
|
@ -180,7 +180,7 @@ class Task(Base, Conditional, Taggable, Become):
|
|||
else:
|
||||
new_ds['vars'] = dict()
|
||||
|
||||
for (k,v) in ds.iteritems():
|
||||
for (k,v) in iteritems(ds):
|
||||
if k in ('action', 'local_action', 'args', 'connection') or k == action or k == 'shell':
|
||||
# we don't want to re-assign these values, which were
|
||||
# determined by the ModuleArgsParser() above
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from six import iteritems
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.plugins.action import ActionBase
|
||||
|
@ -32,7 +33,7 @@ class ActionModule(ActionBase):
|
|||
def run(self, tmp=None, task_vars=dict()):
|
||||
facts = dict()
|
||||
if self._task.args:
|
||||
for (k, v) in self._task.args.iteritems():
|
||||
for (k, v) in iteritems(self._task.args):
|
||||
k = self._templar.template(k)
|
||||
|
||||
if not isidentifier(k):
|
||||
|
|
2
lib/ansible/plugins/cache/__init__.py
vendored
2
lib/ansible/plugins/cache/__init__.py
vendored
|
@ -60,7 +60,7 @@ class FactCache(MutableMapping):
|
|||
|
||||
def copy(self):
|
||||
""" Return a primitive copy of the keys and values from the cache. """
|
||||
return dict([(k, v) for (k, v) in self.iteritems()])
|
||||
return dict([(k, v) for (k, v) in iteritems(self)])
|
||||
|
||||
def keys(self):
|
||||
return self._plugin.keys()
|
||||
|
|
|
@ -39,6 +39,8 @@ import sys
|
|||
from termios import tcflush, TCIFLUSH
|
||||
from binascii import hexlify
|
||||
|
||||
from six import iteritems
|
||||
|
||||
from ansible import constants as C
|
||||
from ansible.errors import AnsibleError, AnsibleConnectionFailure, AnsibleFileNotFound
|
||||
from ansible.plugins.connections import ConnectionBase
|
||||
|
@ -306,8 +308,8 @@ class Connection(ConnectionBase):
|
|||
def _any_keys_added(self):
|
||||
|
||||
added_any = False
|
||||
for hostname, keys in self.ssh._host_keys.iteritems():
|
||||
for keytype, key in keys.iteritems():
|
||||
for hostname, keys in iteritems(self.ssh._host_keys):
|
||||
for keytype, key in iteritems(keys):
|
||||
added_this_time = getattr(key, '_added_by_ansible_this_time', False)
|
||||
if added_this_time:
|
||||
return True
|
||||
|
@ -327,18 +329,18 @@ class Connection(ConnectionBase):
|
|||
|
||||
f = open(filename, 'w')
|
||||
|
||||
for hostname, keys in self.ssh._host_keys.iteritems():
|
||||
for hostname, keys in iteritems(self.ssh._host_keys):
|
||||
|
||||
for keytype, key in keys.iteritems():
|
||||
for keytype, key in iteritems(keys):
|
||||
|
||||
# was f.write
|
||||
added_this_time = getattr(key, '_added_by_ansible_this_time', False)
|
||||
if not added_this_time:
|
||||
f.write("%s %s %s\n" % (hostname, keytype, key.get_base64()))
|
||||
|
||||
for hostname, keys in self.ssh._host_keys.iteritems():
|
||||
for hostname, keys in iteritems(self.ssh._host_keys):
|
||||
|
||||
for keytype, key in keys.iteritems():
|
||||
for keytype, key in iteritems(keys):
|
||||
added_this_time = getattr(key, '_added_by_ansible_this_time', False)
|
||||
if added_this_time:
|
||||
f.write("%s %s %s\n" % (hostname, keytype, key.get_base64()))
|
||||
|
|
|
@ -38,6 +38,7 @@ import uuid
|
|||
import yaml
|
||||
from jinja2.filters import environmentfilter
|
||||
from distutils.version import LooseVersion, StrictVersion
|
||||
from six import iteritems
|
||||
|
||||
from ansible import errors
|
||||
from ansible.parsing.yaml.dumper import AnsibleDumper
|
||||
|
@ -245,7 +246,7 @@ def combine(*terms, **kwargs):
|
|||
if recursive:
|
||||
return reduce(merge_hash, terms)
|
||||
else:
|
||||
return dict(itertools.chain(*map(dict.iteritems, terms)))
|
||||
return dict(itertools.chain(*map(iteritems, terms)))
|
||||
|
||||
class FilterModule(object):
|
||||
''' Ansible core jinja2 filters '''
|
||||
|
|
|
@ -20,6 +20,8 @@ from __future__ import (absolute_import, division, print_function)
|
|||
__metaclass__ = type
|
||||
|
||||
from six.moves import queue as Queue
|
||||
from six import iteritems
|
||||
|
||||
import time
|
||||
|
||||
from ansible import constants as C
|
||||
|
@ -207,7 +209,7 @@ class StrategyBase:
|
|||
if task_result._task._role is not None and result[0] in ('host_task_ok', 'host_task_failed'):
|
||||
# lookup the role in the ROLE_CACHE to make sure we're dealing
|
||||
# with the correct object and mark it as executed
|
||||
for (entry, role_obj) in iterator._play.ROLE_CACHE[task_result._task._role._role_name].iteritems():
|
||||
for (entry, role_obj) in iteritems(iterator._play.ROLE_CACHE[task_result._task._role._role_name]):
|
||||
if role_obj._uuid == task_result._task._role._uuid:
|
||||
role_obj._had_task_run[host.name] = True
|
||||
|
||||
|
@ -358,7 +360,7 @@ class StrategyBase:
|
|||
groups[group_name] = []
|
||||
groups[group_name].append(host)
|
||||
|
||||
for group_name, hosts in groups.iteritems():
|
||||
for group_name, hosts in iteritems(groups):
|
||||
new_group = self._inventory.get_group(group_name)
|
||||
if not new_group:
|
||||
# create the new group and add it to inventory
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from six import iteritems
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.executor.play_iterator import PlayIterator
|
||||
from ansible.playbook.block import Block
|
||||
|
@ -63,7 +65,7 @@ class StrategyModule(StrategyBase):
|
|||
lowest_cur_block = len(iterator._blocks)
|
||||
|
||||
display.debug("counting tasks in each state of execution")
|
||||
for (k, v) in host_tasks.iteritems():
|
||||
for (k, v) in iteritems(host_tasks):
|
||||
if v is None:
|
||||
continue
|
||||
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from six import iteritems
|
||||
from jinja2.utils import missing
|
||||
|
||||
__all__ = ['AnsibleJ2Vars']
|
||||
|
@ -46,7 +47,7 @@ class AnsibleJ2Vars:
|
|||
self._extras = extras
|
||||
self._locals = dict()
|
||||
if isinstance(locals, dict):
|
||||
for key, val in locals.iteritems():
|
||||
for key, val in iteritems(locals):
|
||||
if key[:2] == 'l_' and val is not missing:
|
||||
self._locals[key[2:]] = val
|
||||
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from six import iteritems
|
||||
|
||||
from ansible.compat.tests import unittest
|
||||
from ansible.compat.tests.mock import patch, MagicMock
|
||||
|
||||
|
@ -57,7 +59,7 @@ class TestVariableManager(unittest.TestCase):
|
|||
|
||||
vars = v.get_vars(loader=fake_loader, use_cache=False)
|
||||
|
||||
for (key, val) in extra_vars.iteritems():
|
||||
for (key, val) in iteritems(extra_vars):
|
||||
self.assertEqual(vars.get(key), val)
|
||||
|
||||
self.assertIsNot(v.extra_vars, extra_vars)
|
||||
|
|
Loading…
Reference in a new issue