2014-10-02 19:07:05 +02:00
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
2014-10-16 01:22:54 +02:00
# Make coding more python3-ish
from __future__ import ( absolute_import , division , print_function )
__metaclass__ = type
2015-04-13 21:37:25 +02:00
from six . moves import StringIO
2015-07-27 04:29:56 +02:00
import base64
2014-11-14 23:14:08 +01:00
import json
import os
import random
2015-07-27 04:29:56 +02:00
import stat
2015-07-14 06:23:17 +02:00
import sys
2014-11-14 23:14:08 +01:00
import tempfile
import time
from ansible import constants as C
from ansible . errors import AnsibleError
2015-02-10 21:35:34 +01:00
from ansible . executor . module_common import modify_module
2014-11-14 23:14:08 +01:00
from ansible . parsing . utils . jsonify import jsonify
2015-05-11 18:22:41 +02:00
from ansible . utils . unicode import to_bytes
2014-11-14 23:14:08 +01:00
2015-07-23 16:24:50 +02:00
try :
from __main__ import display
except ImportError :
from ansible . utils . display import Display
display = Display ( )
2014-11-14 23:14:08 +01:00
class ActionBase :
'''
This class is the base class for all action plugins , and defines
code common to all actions . The base class handles the connection
by putting / getting files and executing commands based on the current
action in use .
'''
2015-07-21 18:12:22 +02:00
def __init__ ( self , task , connection , play_context , loader , templar , shared_loader_obj ) :
2015-05-02 06:48:11 +02:00
self . _task = task
self . _connection = connection
2015-07-21 18:12:22 +02:00
self . _play_context = play_context
2015-05-02 06:48:11 +02:00
self . _loader = loader
2015-05-04 08:33:10 +02:00
self . _templar = templar
2015-05-02 06:48:11 +02:00
self . _shared_loader_obj = shared_loader_obj
2015-07-23 16:24:50 +02:00
self . _display = display
2014-11-14 23:14:08 +01:00
2015-05-11 18:22:41 +02:00
self . _supports_check_mode = True
2014-11-14 23:14:08 +01:00
2015-06-04 21:43:07 +02:00
def _configure_module ( self , module_name , module_args , task_vars = dict ( ) ) :
2014-11-14 23:14:08 +01:00
'''
Handles the loading and templating of the module code through the
2015-02-10 21:35:34 +01:00
modify_module ( ) function .
2014-11-14 23:14:08 +01:00
'''
# Search module path(s) for named module.
module_suffixes = getattr ( self . _connection , ' default_suffixes ' , None )
2015-07-24 18:39:54 +02:00
# Check to determine if PowerShell modules are supported, and apply
# some fixes (hacks) to module name + args.
if module_suffixes and ' .ps1 ' in module_suffixes :
# Use Windows versions of stat/file/copy modules when called from
# within other action plugins.
if module_name in ( ' stat ' , ' file ' , ' copy ' ) and self . _task . action != module_name :
module_name = ' win_ %s ' % module_name
# Remove extra quotes surrounding path parameters before sending to module.
if module_name in ( ' win_stat ' , ' win_file ' , ' win_copy ' , ' slurp ' ) and module_args and hasattr ( self . _connection . _shell , ' _unquote ' ) :
for key in ( ' src ' , ' dest ' , ' path ' ) :
if key in module_args :
module_args [ key ] = self . _connection . _shell . _unquote ( module_args [ key ] )
2015-05-02 06:48:11 +02:00
module_path = self . _shared_loader_obj . module_loader . find_plugin ( module_name , module_suffixes )
2014-11-14 23:14:08 +01:00
if module_path is None :
2015-07-24 18:39:54 +02:00
# Use Windows version of ping module to check module paths when
# using a connection that supports .ps1 suffixes.
if module_suffixes and ' .ps1 ' in module_suffixes :
ping_module = ' win_ping '
else :
ping_module = ' ping '
module_path2 = self . _shared_loader_obj . module_loader . find_plugin ( ping_module , module_suffixes )
2014-11-14 23:14:08 +01:00
if module_path2 is not None :
raise AnsibleError ( " The module %s was not found in configured module paths " % ( module_name ) )
else :
raise AnsibleError ( " The module %s was not found in configured module paths. " \
" Additionally, core modules are missing. If this is a checkout, " \
" run ' git submodule update --init --recursive ' to correct this problem. " % ( module_name ) )
# insert shared code and arguments into the module
2015-06-04 21:43:07 +02:00
( module_data , module_style , module_shebang ) = modify_module ( module_path , module_args , task_vars = task_vars )
2014-11-14 23:14:08 +01:00
return ( module_style , module_shebang , module_data )
def _compute_environment_string ( self ) :
'''
Builds the environment string to be used when executing the remote task .
'''
2015-07-21 19:52:51 +02:00
final_environment = dict ( )
if self . _task . environment is not None :
environments = self . _task . environment
if not isinstance ( environments , list ) :
environments = [ environments ]
for environment in environments :
2015-08-25 16:15:32 +02:00
if environment is None :
continue
2015-07-28 21:34:10 +02:00
if not isinstance ( environment , dict ) :
raise AnsibleError ( " environment must be a dictionary, received %s ( %s ) " % ( environment , type ( environment ) ) )
2015-07-21 19:52:51 +02:00
# very deliberatly using update here instead of combine_vars, as
# these environment settings should not need to merge sub-dicts
final_environment . update ( environment )
return self . _connection . _shell . env_prefix ( * * final_environment )
2014-11-14 23:14:08 +01:00
def _early_needs_tmp_path ( self ) :
'''
Determines if a temp path should be created before the action is executed .
'''
# FIXME: modified from original, needs testing? Since this is now inside
# the action plugin, it should make it just this simple
return getattr ( self , ' TRANSFERS_FILES ' , False )
2015-04-13 18:35:20 +02:00
2014-11-14 23:14:08 +01:00
def _late_needs_tmp_path ( self , tmp , module_style ) :
'''
Determines if a temp path is required after some early actions have already taken place .
'''
if tmp and " tmp " in tmp :
# tmp has already been created
return False
2015-07-23 21:11:10 +02:00
if not self . _connection . has_pipelining or not C . ANSIBLE_SSH_PIPELINING or C . DEFAULT_KEEP_REMOTE_FILES or self . _play_context . become :
2014-11-14 23:14:08 +01:00
# tmp is necessary to store the module source code
# or we want to keep the files on the target system
return True
if module_style != " new " :
# even when conn has pipelining, old style modules need tmp to store arguments
return True
return False
# FIXME: return a datastructure in this function instead of raising errors -
# the new executor pipeline handles it much better that way
def _make_tmp_path ( self ) :
'''
Create and return a temporary path on a remote box .
'''
basefile = ' ansible-tmp- %s - %s ' % ( time . time ( ) , random . randint ( 0 , 2 * * 48 ) )
use_system_tmp = False
2015-07-21 18:12:22 +02:00
if self . _play_context . become and self . _play_context . become_user != ' root ' :
2014-11-14 23:14:08 +01:00
use_system_tmp = True
tmp_mode = None
2015-07-21 18:12:22 +02:00
if self . _play_context . remote_user != ' root ' or self . _play_context . become and self . _play_context . become_user != ' root ' :
2015-07-28 09:24:23 +02:00
tmp_mode = 0755
2014-11-14 23:14:08 +01:00
2015-06-29 21:41:51 +02:00
cmd = self . _connection . _shell . mkdtemp ( basefile , use_system_tmp , tmp_mode )
2015-07-23 16:24:50 +02:00
self . _display . debug ( " executing _low_level_execute_command to create the tmp path " )
2014-11-14 23:14:08 +01:00
result = self . _low_level_execute_command ( cmd , None , sudoable = False )
2015-07-23 16:24:50 +02:00
self . _display . debug ( " done with creation of tmp path " )
2014-11-14 23:14:08 +01:00
# error handling on this seems a little aggressive?
if result [ ' rc ' ] != 0 :
if result [ ' rc ' ] == 5 :
output = ' Authentication failure. '
2015-04-16 01:32:44 +02:00
elif result [ ' rc ' ] == 255 and self . _connection . transport in ( ' ssh ' , ) :
2015-06-06 06:16:35 +02:00
2015-07-21 18:12:22 +02:00
if self . _play_context . verbosity > 3 :
2015-06-06 06:16:35 +02:00
output = ' SSH encountered an unknown error. The output was: \n %s ' % ( result [ ' stdout ' ] + result [ ' stderr ' ] )
else :
output = ' SSH encountered an unknown error during the connection. We recommend you re-run the command using -vvvv, which will enable SSH debugging output to help diagnose the issue '
2014-11-14 23:14:08 +01:00
elif ' No space left on device ' in result [ ' stderr ' ] :
output = result [ ' stderr ' ]
else :
output = ' Authentication or permission failure. In some cases, you may have been able to authenticate and did not have permissions on the remote directory. Consider changing the remote temp path in ansible.cfg to a path rooted in " /tmp " . Failed command was: %s , exited with result %d ' % ( cmd , result [ ' rc ' ] )
if ' stdout ' in result and result [ ' stdout ' ] != ' ' :
output = output + " : %s " % result [ ' stdout ' ]
raise AnsibleError ( output )
# FIXME: do we still need to do this?
2015-06-29 21:41:51 +02:00
#rc = self._connection._shell.join_path(utils.last_non_blank_line(result['stdout']).strip(), '')
rc = self . _connection . _shell . join_path ( result [ ' stdout ' ] . strip ( ) , ' ' ) . splitlines ( ) [ - 1 ]
2014-11-14 23:14:08 +01:00
# Catch failure conditions, files should never be
# written to locations in /.
if rc == ' / ' :
2015-04-15 01:13:27 +02:00
raise AnsibleError ( ' failed to resolve remote temporary directory from %s : ` %s ` returned empty string ' % ( basefile , cmd ) )
2014-11-14 23:14:08 +01:00
return rc
def _remove_tmp_path ( self , tmp_path ) :
''' Remove a temporary path we created. '''
2015-02-09 23:54:44 +01:00
if tmp_path and " -tmp- " in tmp_path :
2015-06-29 21:41:51 +02:00
cmd = self . _connection . _shell . remove ( tmp_path , recurse = True )
2014-11-14 23:14:08 +01:00
# If we have gotten here we have a working ssh configuration.
# If ssh breaks we could leave tmp directories out on the remote system.
2015-07-23 16:24:50 +02:00
self . _display . debug ( " calling _low_level_execute_command to remove the tmp path " )
2014-11-14 23:14:08 +01:00
self . _low_level_execute_command ( cmd , None , sudoable = False )
2015-07-23 16:24:50 +02:00
self . _display . debug ( " done removing the tmp path " )
2014-11-14 23:14:08 +01:00
def _transfer_data ( self , remote_path , data ) :
'''
Copies the module data out to the temporary module path .
'''
2015-05-11 18:22:41 +02:00
if isinstance ( data , dict ) :
2014-11-14 23:14:08 +01:00
data = jsonify ( data )
afd , afile = tempfile . mkstemp ( )
afo = os . fdopen ( afd , ' w ' )
try :
2015-05-11 18:22:41 +02:00
data = to_bytes ( data , errors = ' strict ' )
2014-11-14 23:14:08 +01:00
afo . write ( data )
2015-04-13 18:35:20 +02:00
except Exception as e :
2015-02-09 23:54:44 +01:00
#raise AnsibleError("failure encoding into utf-8: %s" % str(e))
raise AnsibleError ( " failure writing module data to temporary file for transfer: %s " % str ( e ) )
2014-11-14 23:14:08 +01:00
afo . flush ( )
afo . close ( )
try :
self . _connection . put_file ( afile , remote_path )
finally :
os . unlink ( afile )
return remote_path
def _remote_chmod ( self , tmp , mode , path , sudoable = False ) :
'''
Issue a remote chmod command
'''
2015-06-29 21:41:51 +02:00
cmd = self . _connection . _shell . chmod ( mode , path )
2015-07-23 16:24:50 +02:00
self . _display . debug ( " calling _low_level_execute_command to chmod the remote path " )
2015-02-09 23:54:44 +01:00
res = self . _low_level_execute_command ( cmd , tmp , sudoable = sudoable )
2015-07-23 16:24:50 +02:00
self . _display . debug ( " done with chmod call " )
2015-02-09 23:54:44 +01:00
return res
2014-11-14 23:14:08 +01:00
2015-08-15 18:00:13 +02:00
def _remote_checksum ( self , tmp , path , all_vars ) :
2014-11-14 23:14:08 +01:00
'''
Takes a remote checksum and returns 1 if no file
'''
2015-08-15 18:00:13 +02:00
python_interp = all_vars . get ( ' ansible_python_interpreter ' , ' python ' )
2015-06-29 21:41:51 +02:00
cmd = self . _connection . _shell . checksum ( path , python_interp )
2015-07-23 16:24:50 +02:00
self . _display . debug ( " calling _low_level_execute_command to get the remote checksum " )
2014-11-14 23:14:08 +01:00
data = self . _low_level_execute_command ( cmd , tmp , sudoable = True )
2015-07-23 16:24:50 +02:00
self . _display . debug ( " done getting the remote checksum " )
2014-11-14 23:14:08 +01:00
# FIXME: implement this function?
#data2 = utils.last_non_blank_line(data['stdout'])
try :
2015-01-02 14:51:15 +01:00
data2 = data [ ' stdout ' ] . strip ( ) . splitlines ( ) [ - 1 ]
2014-11-14 23:14:08 +01:00
if data2 == ' ' :
# this may happen if the connection to the remote server
# failed, so just return "INVALIDCHECKSUM" to avoid errors
return " INVALIDCHECKSUM "
else :
return data2 . split ( ) [ 0 ]
except IndexError :
2015-08-07 21:25:36 +02:00
self . _display . warning ( " Calculating checksum failed unusually, please report this to " + \
" the list so it can be fixed \n command: %s \n ---- \n output: %s \n ---- \n " ) % ( cmd , data )
2014-11-14 23:14:08 +01:00
# this will signal that it changed and allow things to keep going
return " INVALIDCHECKSUM "
def _remote_expand_user ( self , path , tmp ) :
''' takes a remote path and performs tilde expansion on the remote host '''
2015-07-24 18:39:54 +02:00
if not path . startswith ( ' ~ ' ) : # FIXME: Windows paths may start with "~ instead of just ~
2014-11-14 23:14:08 +01:00
return path
2015-07-24 18:39:54 +02:00
# FIXME: Can't use os.path.sep for Windows paths.
2014-11-14 23:14:08 +01:00
split_path = path . split ( os . path . sep , 1 )
expand_path = split_path [ 0 ]
if expand_path == ' ~ ' :
2015-07-21 18:12:22 +02:00
if self . _play_context . become and self . _play_context . become_user :
expand_path = ' ~ %s ' % self . _play_context . become_user
2014-11-14 23:14:08 +01:00
2015-06-29 21:41:51 +02:00
cmd = self . _connection . _shell . expand_user ( expand_path )
2015-07-23 16:24:50 +02:00
self . _display . debug ( " calling _low_level_execute_command to expand the remote user path " )
2014-11-14 23:14:08 +01:00
data = self . _low_level_execute_command ( cmd , tmp , sudoable = False )
2015-07-23 16:24:50 +02:00
self . _display . debug ( " done expanding the remote user path " )
2014-11-14 23:14:08 +01:00
#initial_fragment = utils.last_non_blank_line(data['stdout'])
initial_fragment = data [ ' stdout ' ] . strip ( ) . splitlines ( ) [ - 1 ]
if not initial_fragment :
# Something went wrong trying to expand the path remotely. Return
# the original string
return path
if len ( split_path ) > 1 :
2015-06-29 21:41:51 +02:00
return self . _connection . _shell . join_path ( initial_fragment , * split_path [ 1 : ] )
2014-11-14 23:14:08 +01:00
else :
return initial_fragment
def _filter_leading_non_json_lines ( self , data ) :
'''
Used to avoid random output from SSH at the top of JSON output , like messages from
tcagetattr , or where dropbear spews MOTD on every single command ( which is nuts ) .
need to filter anything which starts not with ' { ' , ' [ ' , ' , ' = ' or is an empty line.
filter only leading lines since multiline JSON is valid .
'''
2015-04-13 22:31:16 +02:00
filtered_lines = StringIO ( )
2014-11-14 23:14:08 +01:00
stop_filtering = False
for line in data . splitlines ( ) :
if stop_filtering or line . startswith ( ' { ' ) or line . startswith ( ' [ ' ) :
stop_filtering = True
filtered_lines . write ( line + ' \n ' )
return filtered_lines . getvalue ( )
2015-06-04 21:43:07 +02:00
def _execute_module ( self , module_name = None , module_args = None , tmp = None , task_vars = dict ( ) , persist_files = False , delete_remote_tmp = True ) :
2014-11-14 23:14:08 +01:00
'''
Transfer and run a module along with its arguments .
'''
# if a module name was not specified for this execution, use
# the action from the task
if module_name is None :
module_name = self . _task . action
if module_args is None :
module_args = self . _task . args
2015-01-28 15:55:18 +01:00
# set check mode in the module arguments, if required
2015-07-21 18:12:22 +02:00
if self . _play_context . check_mode and not self . _task . always_run :
2015-01-28 15:55:18 +01:00
if not self . _supports_check_mode :
raise AnsibleError ( " check mode is not supported for this operation " )
module_args [ ' _ansible_check_mode ' ] = True
# set no log in the module arguments, if required
2015-07-21 18:12:22 +02:00
if self . _play_context . no_log :
2015-01-28 15:55:18 +01:00
module_args [ ' _ansible_no_log ' ] = True
2015-07-23 16:24:50 +02:00
self . _display . debug ( " in _execute_module ( %s , %s ) " % ( module_name , module_args ) )
2014-11-14 23:14:08 +01:00
2015-06-04 21:43:07 +02:00
( module_style , shebang , module_data ) = self . _configure_module ( module_name = module_name , module_args = module_args , task_vars = task_vars )
2014-11-14 23:14:08 +01:00
if not shebang :
raise AnsibleError ( " module is missing interpreter line " )
# a remote tmp path may be necessary and not already created
remote_module_path = None
2015-01-02 14:51:15 +01:00
if not tmp and self . _late_needs_tmp_path ( tmp , module_style ) :
2014-11-14 23:14:08 +01:00
tmp = self . _make_tmp_path ( )
2015-07-24 18:39:54 +02:00
if tmp :
2015-06-29 21:41:51 +02:00
remote_module_path = self . _connection . _shell . join_path ( tmp , module_name )
2014-11-14 23:14:08 +01:00
2015-01-28 15:55:18 +01:00
# FIXME: async stuff here?
2014-11-14 23:14:08 +01:00
#if (module_style != 'new' or async_jid is not None or not self._connection._has_pipelining or not C.ANSIBLE_SSH_PIPELINING or C.DEFAULT_KEEP_REMOTE_FILES):
if remote_module_path :
2015-07-23 16:24:50 +02:00
self . _display . debug ( " transferring module to remote " )
2014-11-14 23:14:08 +01:00
self . _transfer_data ( remote_module_path , module_data )
2015-07-23 16:24:50 +02:00
self . _display . debug ( " done transferring module to remote " )
2014-11-14 23:14:08 +01:00
environment_string = self . _compute_environment_string ( )
2015-07-21 18:12:22 +02:00
if tmp and " tmp " in tmp and self . _play_context . become and self . _play_context . become_user != ' root ' :
2014-11-14 23:14:08 +01:00
# deal with possible umask issues once sudo'ed to other user
self . _remote_chmod ( tmp , ' a+r ' , remote_module_path )
cmd = " "
in_data = None
# FIXME: all of the old-module style and async stuff has been removed from here, and
# might need to be re-added (unless we decide to drop support for old-style modules
# at this point and rework things to support non-python modules specifically)
2015-07-23 21:11:10 +02:00
if self . _connection . has_pipelining and C . ANSIBLE_SSH_PIPELINING and not C . DEFAULT_KEEP_REMOTE_FILES :
2014-11-14 23:14:08 +01:00
in_data = module_data
else :
if remote_module_path :
cmd = remote_module_path
rm_tmp = None
if tmp and " tmp " in tmp and not C . DEFAULT_KEEP_REMOTE_FILES and not persist_files and delete_remote_tmp :
2015-07-21 18:12:22 +02:00
if not self . _play_context . become or self . _play_context . become_user == ' root ' :
2014-11-14 23:14:08 +01:00
# not sudoing or sudoing to root, so can cleanup files in the same step
rm_tmp = tmp
2015-06-29 21:41:51 +02:00
cmd = self . _connection . _shell . build_module_command ( environment_string , shebang , cmd , rm_tmp )
2014-11-14 23:14:08 +01:00
cmd = cmd . strip ( )
sudoable = True
if module_name == " accelerate " :
# always run the accelerate module as the user
# specified in the play, not the sudo_user
sudoable = False
2015-07-23 16:24:50 +02:00
self . _display . debug ( " calling _low_level_execute_command() for command %s " % cmd )
2014-11-14 23:14:08 +01:00
res = self . _low_level_execute_command ( cmd , tmp , sudoable = sudoable , in_data = in_data )
2015-07-23 16:24:50 +02:00
self . _display . debug ( " _low_level_execute_command returned ok " )
2014-11-14 23:14:08 +01:00
if tmp and " tmp " in tmp and not C . DEFAULT_KEEP_REMOTE_FILES and not persist_files and delete_remote_tmp :
2015-07-21 18:12:22 +02:00
if self . _play_context . become and self . _play_context . become_user != ' root ' :
2014-11-14 23:14:08 +01:00
# not sudoing to root, so maybe can't delete files as that other user
# have to clean up temp files as original user in a second step
2015-06-29 21:41:51 +02:00
cmd2 = self . _connection . _shell . remove ( tmp , recurse = True )
2014-11-14 23:14:08 +01:00
self . _low_level_execute_command ( cmd2 , tmp , sudoable = False )
2015-04-21 16:48:13 +02:00
try :
data = json . loads ( self . _filter_leading_non_json_lines ( res . get ( ' stdout ' , ' ' ) ) )
except ValueError :
# not valid json, lets try to capture error
data = dict ( failed = True , parsed = False )
if ' stderr ' in res and res [ ' stderr ' ] . startswith ( ' Traceback ' ) :
2015-06-20 04:58:53 +02:00
data [ ' exception ' ] = res [ ' stderr ' ]
2015-04-21 16:48:13 +02:00
else :
data [ ' msg ' ] = res . get ( ' stdout ' , ' ' )
if ' stderr ' in res :
data [ ' msg ' ] + = res [ ' stderr ' ]
# pre-split stdout into lines, if stdout is in the data and there
# isn't already a stdout_lines value there
if ' stdout ' in data and ' stdout_lines ' not in data :
data [ ' stdout_lines ' ] = data . get ( ' stdout ' , ' ' ) . splitlines ( )
2014-11-14 23:14:08 +01:00
2015-01-15 23:56:54 +01:00
# store the module invocation details back into the result
2015-07-15 16:20:55 +02:00
if self . _task . async != 0 :
2015-07-14 06:23:17 +02:00
data [ ' invocation ' ] = dict (
module_args = module_args ,
module_name = module_name ,
)
2015-01-15 23:56:54 +01:00
2015-07-23 16:24:50 +02:00
self . _display . debug ( " done with _execute_module ( %s , %s ) " % ( module_name , module_args ) )
2014-11-14 23:14:08 +01:00
return data
2015-07-13 22:23:14 +02:00
def _low_level_execute_command ( self , cmd , tmp , sudoable = True , in_data = None , executable = None ) :
2014-11-14 23:14:08 +01:00
'''
This is the function which executes the low level shell command , which
may be commands to create / remove directories for temporary files , or to
run the module code or python directly when pipelining .
'''
2015-07-13 22:23:14 +02:00
if executable is not None :
cmd = executable + ' -c ' + cmd
2015-07-23 16:24:50 +02:00
self . _display . debug ( " in _low_level_execute_command() ( %s ) " % ( cmd , ) )
2014-11-14 23:14:08 +01:00
if not cmd :
# this can happen with powershell modules when there is no analog to a Windows command (like chmod)
2015-07-23 16:24:50 +02:00
self . _display . debug ( " no command, exiting _low_level_execute_command() " )
2014-11-14 23:14:08 +01:00
return dict ( stdout = ' ' , stderr = ' ' )
2015-08-07 22:21:02 +02:00
if sudoable and self . _play_context . become :
self . _display . debug ( " using become for this command " )
2015-07-21 18:12:22 +02:00
cmd = self . _play_context . make_become_cmd ( cmd , executable = executable )
2015-06-24 18:12:54 +02:00
2015-07-23 16:24:50 +02:00
self . _display . debug ( " executing the command %s through the connection " % cmd )
2015-06-15 06:09:25 +02:00
rc , stdin , stdout , stderr = self . _connection . exec_command ( cmd , tmp , in_data = in_data , sudoable = sudoable )
2015-07-23 16:24:50 +02:00
self . _display . debug ( " command execution done " )
2014-11-14 23:14:08 +01:00
if not isinstance ( stdout , basestring ) :
out = ' ' . join ( stdout . readlines ( ) )
else :
out = stdout
if not isinstance ( stderr , basestring ) :
err = ' ' . join ( stderr . readlines ( ) )
else :
err = stderr
2015-07-23 16:24:50 +02:00
self . _display . debug ( " done with _low_level_execute_command() ( %s ) " % ( cmd , ) )
2015-06-06 06:16:35 +02:00
if rc is None :
rc = 0
2015-07-24 18:39:54 +02:00
return dict ( rc = rc , stdout = out , stdout_lines = out . splitlines ( ) , stderr = err )
2015-07-16 01:47:59 +02:00
def _get_first_available_file ( self , faf , of = None , searchdir = ' files ' ) :
2015-07-23 16:24:50 +02:00
self . _display . deprecated ( " first_available_file, use with_first_found or lookup( ' first_found ' ,...) instead " )
2015-07-16 01:47:59 +02:00
for fn in faf :
fn_orig = fn
fnt = self . _templar . template ( fn )
if self . _task . _role is not None :
lead = self . _task . _role . _role_path
else :
lead = fnt
fnd = self . _loader . path_dwim_relative ( lead , searchdir , fnt )
if not os . path . exists ( fnd ) and of is not None :
if self . _task . _role is not None :
lead = self . _task . _role . _role_path
else :
lead = of
fnd = self . _loader . path_dwim_relative ( lead , searchdir , of )
if os . path . exists ( fnd ) :
return fnd
return None
2015-07-27 04:29:56 +02:00
def _get_diff_data ( self , tmp , destination , source , task_vars , source_file = True ) :
diff = { }
self . _display . debug ( " Going to peek to see if file has changed permissions " )
peek_result = self . _execute_module ( module_name = ' file ' , module_args = dict ( path = destination , diff_peek = True ) , task_vars = task_vars , persist_files = True )
if not ( ' failed ' in peek_result and peek_result [ ' failed ' ] ) or peek_result . get ( ' rc ' , 0 ) == 0 :
if peek_result [ ' state ' ] == ' absent ' :
diff [ ' before ' ] = ' '
elif peek_result [ ' appears_binary ' ] :
diff [ ' dst_binary ' ] = 1
elif peek_result [ ' size ' ] > C . MAX_FILE_SIZE_FOR_DIFF :
diff [ ' dst_larger ' ] = C . MAX_FILE_SIZE_FOR_DIFF
else :
self . _display . debug ( " Slurping the file %s " % source )
dest_result = self . _execute_module ( module_name = ' slurp ' , module_args = dict ( path = destination ) , task_vars = task_vars , persist_files = True )
if ' content ' in dest_result :
dest_contents = dest_result [ ' content ' ]
if dest_result [ ' encoding ' ] == ' base64 ' :
dest_contents = base64 . b64decode ( dest_contents )
else :
raise AnsibleError ( " unknown encoding in content option, failed: %s " % dest_result )
diff [ ' before_header ' ] = destination
diff [ ' before ' ] = dest_contents
if source_file :
self . _display . debug ( " Reading local copy of the file %s " % source )
try :
src = open ( source )
src_contents = src . read ( 8192 )
st = os . stat ( source )
except Exception as e :
raise AnsibleError ( " Unexpected error while reading source ( %s ) for diff: %s " % ( source , str ( e ) ) )
if " \x00 " in src_contents :
diff [ ' src_binary ' ] = 1
elif st [ stat . ST_SIZE ] > C . MAX_FILE_SIZE_FOR_DIFF :
diff [ ' src_larger ' ] = C . MAX_FILE_SIZE_FOR_DIFF
else :
diff [ ' after_header ' ] = source
2015-08-16 07:42:07 +02:00
diff [ ' after ' ] = src_contents
2015-07-27 04:29:56 +02:00
else :
self . _display . debug ( " source of file passed in " )
diff [ ' after_header ' ] = ' dynamically generated '
diff [ ' after ' ] = source
return diff