mirror of
https://github.com/ansible-collections/community.general.git
synced 2024-09-14 20:13:21 +02:00
Cleaning up FIXMEs
This commit is contained in:
parent
1e50d31cdc
commit
86de1429e5
32 changed files with 49 additions and 105 deletions
|
@ -38,7 +38,7 @@ from ansible.utils.display import Display
|
|||
class SortedOptParser(optparse.OptionParser):
|
||||
'''Optparser which sorts the options by opt before outputting --help'''
|
||||
|
||||
#FIXME: epilog parsing: OptionParser.format_epilog = lambda self, formatter: self.epilog
|
||||
# TODO: epilog parsing: OptionParser.format_epilog = lambda self, formatter: self.epilog
|
||||
|
||||
def format_help(self, formatter=None, epilog=None):
|
||||
self.option_list.sort(key=operator.methodcaller('get_opt_string'))
|
||||
|
@ -223,8 +223,8 @@ class CLI(object):
|
|||
async_opts=False, connect_opts=False, subset_opts=False, check_opts=False, inventory_opts=False, epilog=None, fork_opts=False):
|
||||
''' create an options parser for most ansible scripts '''
|
||||
|
||||
#FIXME: implemente epilog parsing
|
||||
#OptionParser.format_epilog = lambda self, formatter: self.epilog
|
||||
# TODO: implement epilog parsing
|
||||
# OptionParser.format_epilog = lambda self, formatter: self.epilog
|
||||
|
||||
# base opts
|
||||
parser = SortedOptParser(usage, version=CLI.version("%prog"))
|
||||
|
|
|
@ -181,7 +181,7 @@ class PlaybookExecutor:
|
|||
self.display.display("No issues encountered")
|
||||
return result
|
||||
|
||||
# FIXME: this stat summary stuff should be cleaned up and moved
|
||||
# TODO: this stat summary stuff should be cleaned up and moved
|
||||
# to a new method, if it even belongs here...
|
||||
self._display.banner("PLAY RECAP")
|
||||
|
||||
|
|
|
@ -108,11 +108,11 @@ class ResultProcess(multiprocessing.Process):
|
|||
self._send_result(('register_host_var', result._host, result._task.register, result._result))
|
||||
|
||||
# send callbacks, execute other options based on the result status
|
||||
# FIXME: this should all be cleaned up and probably moved to a sub-function.
|
||||
# the fact that this sometimes sends a TaskResult and other times
|
||||
# sends a raw dictionary back may be confusing, but the result vs.
|
||||
# results implementation for tasks with loops should be cleaned up
|
||||
# better than this
|
||||
# TODO: this should all be cleaned up and probably moved to a sub-function.
|
||||
# the fact that this sometimes sends a TaskResult and other times
|
||||
# sends a raw dictionary back may be confusing, but the result vs.
|
||||
# results implementation for tasks with loops should be cleaned up
|
||||
# better than this
|
||||
if result.is_unreachable():
|
||||
self._send_result(('host_unreachable', result))
|
||||
elif result.is_failed():
|
||||
|
@ -163,8 +163,8 @@ class ResultProcess(multiprocessing.Process):
|
|||
except (KeyboardInterrupt, IOError, EOFError):
|
||||
break
|
||||
except:
|
||||
# FIXME: we should probably send a proper callback here instead of
|
||||
# simply dumping a stack trace on the screen
|
||||
# TODO: we should probably send a proper callback here instead of
|
||||
# simply dumping a stack trace on the screen
|
||||
traceback.print_exc()
|
||||
break
|
||||
|
||||
|
|
|
@ -131,7 +131,6 @@ class WorkerProcess(multiprocessing.Process):
|
|||
task_result = TaskResult(host, task, dict(unreachable=True))
|
||||
self._rslt_q.put(task_result, block=False)
|
||||
except:
|
||||
# FIXME: most likely an abort, catch those kinds of errors specifically
|
||||
break
|
||||
except Exception as e:
|
||||
if isinstance(e, (IOError, EOFError, KeyboardInterrupt)) and not isinstance(e, TemplateNotFound):
|
||||
|
@ -144,7 +143,6 @@ class WorkerProcess(multiprocessing.Process):
|
|||
except:
|
||||
debug("WORKER EXCEPTION: %s" % e)
|
||||
debug("WORKER EXCEPTION: %s" % traceback.format_exc())
|
||||
# FIXME: most likely an abort, catch those kinds of errors specifically
|
||||
break
|
||||
|
||||
debug("WORKER PROCESS EXITING")
|
||||
|
|
|
@ -341,7 +341,6 @@ class TaskExecutor:
|
|||
result = None
|
||||
for attempt in range(retries):
|
||||
if attempt > 0:
|
||||
# FIXME: this should use the self._display.callback/message passing mechanism
|
||||
self._display.display("FAILED - RETRYING: %s (%d retries left). Result was: %s" % (self._task, retries-attempt, result), color="red")
|
||||
result['attempts'] = attempt + 1
|
||||
|
||||
|
@ -480,8 +479,6 @@ class TaskExecutor:
|
|||
correct connection object from the list of connection plugins
|
||||
'''
|
||||
|
||||
# FIXME: calculation of connection params/auth stuff should be done here
|
||||
|
||||
if not self._play_context.remote_addr:
|
||||
self._play_context.remote_addr = self._host.address
|
||||
|
||||
|
|
|
@ -117,8 +117,6 @@ class Inventory(object):
|
|||
|
||||
self._vars_plugins = [ x for x in vars_loader.all(self) ]
|
||||
|
||||
# FIXME: shouldn't be required, since the group/host vars file
|
||||
# management will be done in VariableManager
|
||||
# get group vars from group_vars/ files and vars plugins
|
||||
for group in self.groups.values():
|
||||
group.vars = combine_vars(group.vars, self.get_group_variables(group.name))
|
||||
|
@ -648,11 +646,11 @@ class Inventory(object):
|
|||
if dir_name != self._playbook_basedir:
|
||||
self._playbook_basedir = dir_name
|
||||
# get group vars from group_vars/ files
|
||||
# FIXME: excluding the new_pb_basedir directory may result in group_vars
|
||||
# files loading more than they should, however with the file caching
|
||||
# we do this shouldn't be too much of an issue. Still, this should
|
||||
# be fixed at some point to allow a "first load" to touch all of the
|
||||
# directories, then later runs only touch the new basedir specified
|
||||
# TODO: excluding the new_pb_basedir directory may result in group_vars
|
||||
# files loading more than they should, however with the file caching
|
||||
# we do this shouldn't be too much of an issue. Still, this should
|
||||
# be fixed at some point to allow a "first load" to touch all of the
|
||||
# directories, then later runs only touch the new basedir specified
|
||||
for group in self.groups.values():
|
||||
#group.vars = combine_vars(group.vars, self.get_group_vars(group, new_pb_basedir=True))
|
||||
group.vars = combine_vars(group.vars, self.get_group_vars(group))
|
||||
|
@ -708,7 +706,6 @@ class Inventory(object):
|
|||
if _basedir == self._playbook_basedir and scan_pass != 1:
|
||||
continue
|
||||
|
||||
# FIXME: these should go to VariableManager
|
||||
if group and host is None:
|
||||
# load vars in dir/group_vars/name_of_group
|
||||
base_path = os.path.realpath(os.path.join(basedir, "group_vars/%s" % group.name))
|
||||
|
|
|
@ -243,7 +243,6 @@ class ModuleArgsParser:
|
|||
# this is the 'extra gross' scenario detailed above, so we grab
|
||||
# the args and pass them in as additional arguments, which can/will
|
||||
# be overwritten via dict updates from the other arg sources below
|
||||
# FIXME: add test cases for this
|
||||
additional_args = self._task_ds.get('args', dict())
|
||||
|
||||
# We can have one of action, local_action, or module specified
|
||||
|
|
|
@ -360,7 +360,7 @@ class VaultFile(object):
|
|||
|
||||
_, self.tmpfile = tempfile.mkstemp()
|
||||
|
||||
### FIXME:
|
||||
### TODO:
|
||||
# __del__ can be problematic in python... For this use case, make
|
||||
# VaultFile a context manager instead (implement __enter__ and __exit__)
|
||||
def __del__(self):
|
||||
|
|
|
@ -183,8 +183,6 @@ class Base:
|
|||
|
||||
# Walk all attributes in the class. We sort them based on their priority
|
||||
# so that certain fields can be loaded before others, if they are dependent.
|
||||
# FIXME: we currently don't do anything with private attributes but
|
||||
# may later decide to filter them out of 'ds' here.
|
||||
base_attributes = self._get_base_attributes()
|
||||
for name, attr in sorted(base_attributes.items(), key=operator.itemgetter(1)):
|
||||
# copy the value over unless a _load_field method is defined
|
||||
|
|
|
@ -233,7 +233,6 @@ class Play(Base, Taggable, Become):
|
|||
vars_prompts.append(prompt_data)
|
||||
return vars_prompts
|
||||
|
||||
# FIXME: post_validation needs to ensure that become/su/sudo have only 1 set
|
||||
def _compile_roles(self):
|
||||
'''
|
||||
Handles the role compilation step, returning a flat list of tasks
|
||||
|
|
|
@ -472,7 +472,8 @@ class PlayContext(Base):
|
|||
In case users need to access from the play, this is a legacy from runner.
|
||||
'''
|
||||
|
||||
#FIXME: remove password? possibly add become/sudo settings
|
||||
# TODO: should we be setting the more generic values here rather than
|
||||
# the more specific _ssh_ ones?
|
||||
for special_var in ['ansible_connection', 'ansible_ssh_host', 'ansible_ssh_pass', 'ansible_ssh_port', 'ansible_ssh_user', 'ansible_ssh_private_key_file', 'ansible_ssh_pipelining']:
|
||||
if special_var not in variables:
|
||||
for prop, varnames in MAGIC_VARIABLE_MAPPING.items():
|
||||
|
|
|
@ -107,8 +107,6 @@ class PlaybookInclude(Base, Conditional, Taggable):
|
|||
else:
|
||||
# some basic error checking, to make sure vars are properly
|
||||
# formatted and do not conflict with k=v parameters
|
||||
# FIXME: we could merge these instead, but controlling the order
|
||||
# in which they're encountered could be difficult
|
||||
if k == 'vars':
|
||||
if 'vars' in new_ds:
|
||||
raise AnsibleParserError("include parameters cannot be mixed with 'vars' entries for include statements", obj=ds)
|
||||
|
@ -129,8 +127,6 @@ class PlaybookInclude(Base, Conditional, Taggable):
|
|||
if len(items) == 0:
|
||||
raise AnsibleParserError("include statements must specify the file name to include", obj=ds)
|
||||
else:
|
||||
# FIXME/TODO: validate that items[0] is a file, which also
|
||||
# exists and is readable
|
||||
new_ds['include'] = items[0]
|
||||
if len(items) > 1:
|
||||
# rejoin the parameter portion of the arguments and
|
||||
|
@ -139,7 +135,6 @@ class PlaybookInclude(Base, Conditional, Taggable):
|
|||
if 'tags' in params:
|
||||
new_ds['tags'] = params.pop('tags')
|
||||
if 'vars' in new_ds:
|
||||
# FIXME: see fixme above regarding merging vars
|
||||
raise AnsibleParserError("include parameters cannot be mixed with 'vars' entries for include statements", obj=ds)
|
||||
new_ds['vars'] = params
|
||||
|
||||
|
|
|
@ -37,10 +37,10 @@ from ansible.utils.vars import combine_vars
|
|||
|
||||
__all__ = ['Role', 'hash_params']
|
||||
|
||||
# FIXME: this should be a utility function, but can't be a member of
|
||||
# the role due to the fact that it would require the use of self
|
||||
# in a static method. This is also used in the base class for
|
||||
# strategies (ansible/plugins/strategy/__init__.py)
|
||||
# TODO: this should be a utility function, but can't be a member of
|
||||
# the role due to the fact that it would require the use of self
|
||||
# in a static method. This is also used in the base class for
|
||||
# strategies (ansible/plugins/strategy/__init__.py)
|
||||
def hash_params(params):
|
||||
if not isinstance(params, dict):
|
||||
return params
|
||||
|
@ -128,7 +128,6 @@ class Role(Base, Become, Conditional, Taggable):
|
|||
return r
|
||||
|
||||
except RuntimeError:
|
||||
# FIXME: needs a better way to access the ds in the role include
|
||||
raise AnsibleError("A recursion loop was detected with the roles specified. Make sure child roles do not have dependencies on parent roles", obj=role_include._ds)
|
||||
|
||||
def _load_role_data(self, role_include, parent_role=None):
|
||||
|
@ -244,7 +243,6 @@ class Role(Base, Become, Conditional, Taggable):
|
|||
return self._parents
|
||||
|
||||
def get_default_vars(self):
|
||||
# FIXME: get these from dependent roles too
|
||||
default_vars = dict()
|
||||
for dep in self.get_all_dependencies():
|
||||
default_vars = combine_vars(default_vars, dep.get_default_vars())
|
||||
|
|
|
@ -70,6 +70,9 @@ class RoleDefinition(Base, Become, Conditional, Taggable):
|
|||
if isinstance(ds, dict):
|
||||
ds = super(RoleDefinition, self).preprocess_data(ds)
|
||||
|
||||
# save the original ds for use later
|
||||
self._ds = ds
|
||||
|
||||
# we create a new data structure here, using the same
|
||||
# object used internally by the YAML parsing code so we
|
||||
# can preserve file:line:column information if it exists
|
||||
|
@ -97,9 +100,6 @@ class RoleDefinition(Base, Become, Conditional, Taggable):
|
|||
# we store the role path internally
|
||||
self._role_path = role_path
|
||||
|
||||
# save the original ds for use later
|
||||
self._ds = ds
|
||||
|
||||
# and return the cleaned-up data structure
|
||||
return new_ds
|
||||
|
||||
|
@ -176,10 +176,7 @@ class RoleDefinition(Base, Become, Conditional, Taggable):
|
|||
if self._loader.path_exists(role_path):
|
||||
return (role_name, role_path)
|
||||
|
||||
# FIXME: make the parser smart about list/string entries in
|
||||
# the yaml so the error line/file can be reported here
|
||||
|
||||
raise AnsibleError("the role '%s' was not found in %s" % (role_name, ":".join(role_search_paths)))
|
||||
raise AnsibleError("the role '%s' was not found in %s" % (role_name, ":".join(role_search_paths)), obj=self._ds)
|
||||
|
||||
def _split_role_params(self, ds):
|
||||
'''
|
||||
|
|
|
@ -36,7 +36,8 @@ __all__ = ['RoleInclude']
|
|||
class RoleInclude(RoleDefinition):
|
||||
|
||||
"""
|
||||
FIXME: docstring
|
||||
A derivative of RoleDefinition, used by playbook code when a role
|
||||
is included for execution in a play.
|
||||
"""
|
||||
|
||||
_delegate_to = FieldAttribute(isa='string')
|
||||
|
|
|
@ -50,7 +50,8 @@ except ImportError:
|
|||
class RoleRequirement(RoleDefinition):
|
||||
|
||||
"""
|
||||
FIXME: document various ways role specs can be specified
|
||||
Helper class for Galaxy, which is used to parse both dependencies
|
||||
specified in meta/main.yml and requirements.yml files.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
|
|
|
@ -232,13 +232,12 @@ class PluginLoader:
|
|||
# they can have any suffix
|
||||
suffix = ''
|
||||
|
||||
### FIXME:
|
||||
# Instead of using the self._paths cache (PATH_CACHE) and
|
||||
# self._searched_paths we could use an iterator. Before enabling that
|
||||
# we need to make sure we don't want to add additional directories
|
||||
# (add_directory()) once we start using the iterator. Currently, it
|
||||
# looks like _get_paths() never forces a cache refresh so if we expect
|
||||
# additional directories to be added later, it is buggy.
|
||||
# TODO: Instead of using the self._paths cache (PATH_CACHE) and
|
||||
# self._searched_paths we could use an iterator. Before enabling that
|
||||
# we need to make sure we don't want to add additional directories
|
||||
# (add_directory()) once we start using the iterator. Currently, it
|
||||
# looks like _get_paths() never forces a cache refresh so if we expect
|
||||
# additional directories to be added later, it is buggy.
|
||||
for path in (p for p in self._get_paths() if p not in self._searched_paths and os.path.isdir(p)):
|
||||
try:
|
||||
full_paths = (os.path.join(path, f) for f in os.listdir(path))
|
||||
|
|
|
@ -88,12 +88,10 @@ class ActionBase:
|
|||
module_path = self._shared_loader_obj.module_loader.find_plugin(module_name, mod_type)
|
||||
if module_path:
|
||||
break
|
||||
else: # This is a for-else: http://bit.ly/1ElPkyg
|
||||
# FIXME: Why is it necessary to look for the windows version?
|
||||
# Shouldn't all modules be installed?
|
||||
#
|
||||
else:
|
||||
# Use Windows version of ping module to check module paths when
|
||||
# using a connection that supports .ps1 suffixes.
|
||||
# using a connection that supports .ps1 suffixes. We check specifically
|
||||
# for win_ping here, otherwise the code would look for ping.ps1
|
||||
if '.ps1' in self._connection.module_implementation_preferences:
|
||||
ping_module = 'win_ping'
|
||||
else:
|
||||
|
@ -138,8 +136,6 @@ class ActionBase:
|
|||
Determines if a temp path should be created before the action is executed.
|
||||
'''
|
||||
|
||||
# FIXME: modified from original, needs testing? Since this is now inside
|
||||
# the action plugin, it should make it just this simple
|
||||
return getattr(self, 'TRANSFERS_FILES', False)
|
||||
|
||||
def _late_needs_tmp_path(self, tmp, module_style):
|
||||
|
@ -158,8 +154,6 @@ class ActionBase:
|
|||
return True
|
||||
return False
|
||||
|
||||
# FIXME: return a datastructure in this function instead of raising errors -
|
||||
# the new executor pipeline handles it much better that way
|
||||
def _make_tmp_path(self):
|
||||
'''
|
||||
Create and return a temporary path on a remote box.
|
||||
|
@ -199,8 +193,6 @@ class ActionBase:
|
|||
output = output + u": %s" % result['stdout']
|
||||
raise AnsibleConnectionFailure(output)
|
||||
|
||||
# FIXME: do we still need to do this?
|
||||
#rc = self._connection._shell.join_path(utils.last_non_blank_line(result['stdout']).strip(), '')
|
||||
rc = self._connection._shell.join_path(result['stdout'].strip(), u'').splitlines()[-1]
|
||||
|
||||
# Catch failure conditions, files should never be
|
||||
|
|
|
@ -57,14 +57,10 @@ class ActionModule(ActionBase):
|
|||
source = os.path.expanduser(source)
|
||||
|
||||
if copy:
|
||||
# FIXME: the original file stuff needs to be reworked
|
||||
if '_original_file' in task_vars:
|
||||
source = self._loader.path_dwim_relative(task_vars['_original_file'], 'files', source)
|
||||
if self._task._role is not None:
|
||||
source = self._loader.path_dwim_relative(self._task._role._role_path, 'files', source)
|
||||
else:
|
||||
if self._task._role is not None:
|
||||
source = self._loader.path_dwim_relative(self._task._role._role_path, 'files', source)
|
||||
else:
|
||||
source = self._loader.path_dwim_relative(self._loader.get_basedir(), 'files', source)
|
||||
source = self._loader.path_dwim_relative(self._loader.get_basedir(), 'files', source)
|
||||
|
||||
remote_checksum = self._remote_checksum(dest, all_vars=task_vars)
|
||||
if remote_checksum != '3':
|
||||
|
|
3
lib/ansible/plugins/cache/redis.py
vendored
3
lib/ansible/plugins/cache/redis.py
vendored
|
@ -17,7 +17,6 @@
|
|||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
# FIXME: can we store these as something else before we ship it?
|
||||
import sys
|
||||
import time
|
||||
import json
|
||||
|
@ -95,7 +94,7 @@ class CacheModule(BaseCacheModule):
|
|||
self.delete(key)
|
||||
|
||||
def copy(self):
|
||||
# FIXME: there is probably a better way to do this in redis
|
||||
# TODO: there is probably a better way to do this in redis
|
||||
ret = dict()
|
||||
for key in self.keys():
|
||||
ret[key] = self.get(key)
|
||||
|
|
|
@ -40,9 +40,6 @@ class CallbackBase:
|
|||
custom actions.
|
||||
'''
|
||||
|
||||
# FIXME: the list of functions here needs to be updated once we have
|
||||
# finalized the list of callback methods used in the default callback
|
||||
|
||||
def __init__(self, display):
|
||||
self._display = display
|
||||
if self._display.verbosity >= 4:
|
||||
|
|
|
@ -166,10 +166,6 @@ class Connection(ConnectionBase):
|
|||
|
||||
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
|
||||
|
||||
# FIXME:
|
||||
#if sudoable and self..become and self.runner.become_method not in self.become_methods_supported:
|
||||
# raise AnsibleError("Internal Error: this module does not support running commands via %s" % self.runner.become_method)
|
||||
|
||||
if in_data:
|
||||
raise AnsibleError("Internal Error: this module does not support optimized module pipelining")
|
||||
|
||||
|
|
|
@ -137,9 +137,7 @@ class Connection(ConnectionBase):
|
|||
## Next, additional arguments based on the configuration.
|
||||
|
||||
# sftp batch mode allows us to correctly catch failed transfers, but can
|
||||
# be disabled if the client side doesn't support the option. FIXME: is
|
||||
# this still a real concern?
|
||||
|
||||
# be disabled if the client side doesn't support the option.
|
||||
if binary == 'sftp' and C.DEFAULT_SFTP_BATCH_MODE:
|
||||
self._command += ['-b', '-']
|
||||
|
||||
|
|
|
@ -63,7 +63,6 @@ class LookupModule(LookupBase):
|
|||
|
||||
def run(self, terms, variables, **kwargs):
|
||||
|
||||
### FIXME: Is this needed now that listify is run on all lookup plugin terms?
|
||||
if not isinstance(terms, list):
|
||||
raise AnsibleError("with_flattened expects a list")
|
||||
|
||||
|
|
|
@ -27,7 +27,6 @@ class LookupModule(LookupBase):
|
|||
|
||||
def run(self, terms, variables, **kwargs):
|
||||
|
||||
### FIXME: Is this needed now that listify is run on all lookup plugin terms?
|
||||
if not isinstance(terms, list):
|
||||
raise AnsibleError("with_indexed_items expects a list")
|
||||
|
||||
|
|
|
@ -47,9 +47,9 @@ except ImportError:
|
|||
|
||||
__all__ = ['StrategyBase']
|
||||
|
||||
# FIXME: this should probably be in the plugins/__init__.py, with
|
||||
# a smarter mechanism to set all of the attributes based on
|
||||
# the loaders created there
|
||||
# TODO: this should probably be in the plugins/__init__.py, with
|
||||
# a smarter mechanism to set all of the attributes based on
|
||||
# the loaders created there
|
||||
class SharedPluginLoaderObj:
|
||||
'''
|
||||
A simple object to make pass the various plugin loaders to
|
||||
|
@ -327,7 +327,6 @@ class StrategyBase:
|
|||
allgroup.add_host(new_host)
|
||||
|
||||
# Set/update the vars for this host
|
||||
# FIXME: probably should have a set vars method for the host?
|
||||
new_vars = host_info.get('host_vars', dict())
|
||||
new_host.vars = self._inventory.get_host_vars(new_host)
|
||||
new_host.vars.update(new_vars)
|
||||
|
@ -351,7 +350,6 @@ class StrategyBase:
|
|||
|
||||
# clear pattern caching completely since it's unpredictable what
|
||||
# patterns may have referenced the group
|
||||
# FIXME: is this still required?
|
||||
self._inventory.clear_pattern_cache()
|
||||
|
||||
def _add_group(self, host, result_item):
|
||||
|
|
|
@ -109,7 +109,6 @@ class StrategyModule(StrategyBase):
|
|||
# since they do not use k=v pairs, so get that
|
||||
meta_action = task.args.get('_raw_params')
|
||||
if meta_action == 'noop':
|
||||
# FIXME: issue a callback for the noop here?
|
||||
continue
|
||||
elif meta_action == 'flush_handlers':
|
||||
# FIXME: in the 'free' mode, flushing handlers should result in
|
||||
|
@ -170,8 +169,6 @@ class StrategyModule(StrategyBase):
|
|||
results = self._wait_on_pending_results(iterator)
|
||||
host_results.extend(results)
|
||||
except Exception as e:
|
||||
# FIXME: ctrl+c can cause some failures here, so catch them
|
||||
# with the appropriate error type
|
||||
pass
|
||||
|
||||
# run the base class run() method, which executes the cleanup function
|
||||
|
|
|
@ -15,7 +15,6 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# FIXME: copied mostly from old code, needs py3 improvements
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
|
@ -227,7 +226,6 @@ class Display:
|
|||
except OSError:
|
||||
self.warning("somebody cleverly deleted cowsay or something during the PB run. heh.")
|
||||
|
||||
#FIXME: make this dynamic on tty size (look and ansible-doc)
|
||||
msg = msg.strip()
|
||||
star_len = (79 - len(msg))
|
||||
if star_len < 0:
|
||||
|
|
|
@ -28,12 +28,11 @@ from ansible.template.safe_eval import safe_eval
|
|||
|
||||
__all__ = ['listify_lookup_plugin_terms']
|
||||
|
||||
#FIXME: probably just move this into lookup plugin base class
|
||||
def listify_lookup_plugin_terms(terms, templar, loader, fail_on_undefined=False, convert_bare=True):
|
||||
|
||||
if isinstance(terms, string_types):
|
||||
stripped = terms.strip()
|
||||
#FIXME: warn/deprecation on bare vars in with_ so we can eventually remove fail on undefined override
|
||||
# TODO: warn/deprecation on bare vars in with_ so we can eventually remove fail on undefined override
|
||||
terms = templar.template(terms, convert_bare=convert_bare, fail_on_undefined=fail_on_undefined)
|
||||
else:
|
||||
terms = templar.template(terms, fail_on_undefined=fail_on_undefined)
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# FIXME: tags should auto-apply to role dependencies
|
||||
- name: cleanup old files
|
||||
shell: rm -rf {{output_dir}}/*
|
||||
|
||||
|
|
|
@ -32,7 +32,6 @@ from ansible.parsing.yaml.objects import AnsibleMapping
|
|||
class TestDataLoader(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
# FIXME: need to add tests that utilize vault_password
|
||||
self._loader = DataLoader()
|
||||
|
||||
def tearDown(self):
|
||||
|
|
|
@ -70,8 +70,6 @@ class TestTemplar(unittest.TestCase):
|
|||
self.assertEqual(templar.template("{{bad_dict}}"), "{a='b'")
|
||||
self.assertEqual(templar.template("{{var_list}}"), [1])
|
||||
self.assertEqual(templar.template(1, convert_bare=True), 1)
|
||||
#FIXME: lookup ignores fake file and returns error
|
||||
#self.assertEqual(templar.template("{{lookup('file', '/path/to/my_file.txt')}}"), "foo")
|
||||
|
||||
# force errors
|
||||
self.assertRaises(AnsibleUndefinedVariable, templar.template, "{{bad_var}}")
|
||||
|
|
Loading…
Reference in a new issue