2014-10-02 19:07:05 +02:00
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
2015-11-30 18:20:59 +01:00
#
2014-10-02 19:07:05 +02:00
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
2014-10-16 01:22:54 +02:00
# Make coding more python3-ish
from __future__ import ( absolute_import , division , print_function )
__metaclass__ = type
2015-07-27 04:29:56 +02:00
import base64
2014-11-14 23:14:08 +01:00
import json
import os
2015-10-02 18:58:15 +02:00
import pipes
2014-11-14 23:14:08 +01:00
import random
2016-01-20 21:26:45 +01:00
import re
2015-07-27 04:29:56 +02:00
import stat
2014-11-14 23:14:08 +01:00
import tempfile
import time
2015-10-23 01:07:26 +02:00
from abc import ABCMeta , abstractmethod
2014-11-14 23:14:08 +01:00
2015-10-23 01:07:26 +02:00
from ansible . compat . six import binary_type , text_type , iteritems , with_metaclass
2015-09-21 07:48:59 +02:00
2014-11-14 23:14:08 +01:00
from ansible import constants as C
2015-09-09 21:26:40 +02:00
from ansible . errors import AnsibleError , AnsibleConnectionFailure
2015-02-10 21:35:34 +01:00
from ansible . executor . module_common import modify_module
2014-11-14 23:14:08 +01:00
from ansible . parsing . utils . jsonify import jsonify
2015-10-01 16:28:56 +02:00
from ansible . utils . unicode import to_bytes , to_unicode
2014-11-14 23:14:08 +01:00
2015-07-23 16:24:50 +02:00
try :
from __main__ import display
except ImportError :
from ansible . utils . display import Display
display = Display ( )
2015-11-11 17:29:37 +01:00
2015-10-23 01:07:26 +02:00
class ActionBase ( with_metaclass ( ABCMeta , object ) ) :
2014-11-14 23:14:08 +01:00
'''
This class is the base class for all action plugins , and defines
code common to all actions . The base class handles the connection
by putting / getting files and executing commands based on the current
action in use .
'''
2015-07-21 18:12:22 +02:00
def __init__ ( self , task , connection , play_context , loader , templar , shared_loader_obj ) :
2015-05-02 06:48:11 +02:00
self . _task = task
self . _connection = connection
2015-07-21 18:12:22 +02:00
self . _play_context = play_context
2015-05-02 06:48:11 +02:00
self . _loader = loader
2015-05-04 08:33:10 +02:00
self . _templar = templar
2015-05-02 06:48:11 +02:00
self . _shared_loader_obj = shared_loader_obj
2015-11-11 17:29:37 +01:00
# Backwards compat: self._display isn't really needed, just import the global display and use that.
2015-07-23 16:24:50 +02:00
self . _display = display
2014-11-14 23:14:08 +01:00
2015-05-11 18:22:41 +02:00
self . _supports_check_mode = True
2014-11-14 23:14:08 +01:00
2015-10-23 01:07:26 +02:00
@abstractmethod
def run ( self , tmp = None , task_vars = None ) :
""" Action Plugins should implement this method to perform their
tasks . Everything else in this base class is a helper method for the
action plugin to do that .
: kwarg tmp : Temporary directory . Sometimes an action plugin sets up
a temporary directory and then calls another module . This parameter
allows us to reuse the same directory for both .
: kwarg task_vars : The variables ( host vars , group vars , config vars ,
etc ) associated with this task .
: returns : dictionary of results from the module
Implementors of action modules may find the following variables especially useful :
* Module parameters . These are stored in self . _task . args
"""
2015-12-19 20:09:20 +01:00
# store the module invocation details into the results
2015-10-23 01:07:26 +02:00
results = { }
2015-12-19 20:09:20 +01:00
if self . _task . async == 0 :
results [ ' invocation ' ] = dict (
module_name = self . _task . action ,
module_args = self . _task . args ,
)
2015-10-23 01:07:26 +02:00
return results
2016-03-25 16:13:44 +01:00
def _remote_file_exists ( self , path ) :
cmd = self . _connection . _shell . exists ( path )
result = self . _low_level_execute_command ( cmd = cmd , sudoable = True )
if result [ ' rc ' ] == 0 :
return True
return False
2015-10-23 01:07:26 +02:00
def _configure_module ( self , module_name , module_args , task_vars = None ) :
2014-11-14 23:14:08 +01:00
'''
Handles the loading and templating of the module code through the
2015-02-10 21:35:34 +01:00
modify_module ( ) function .
2014-11-14 23:14:08 +01:00
'''
2015-10-23 01:07:26 +02:00
if task_vars is None :
task_vars = dict ( )
2014-11-14 23:14:08 +01:00
# Search module path(s) for named module.
2015-09-10 21:55:59 +02:00
for mod_type in self . _connection . module_implementation_preferences :
# Check to determine if PowerShell modules are supported, and apply
# some fixes (hacks) to module name + args.
if mod_type == ' .ps1 ' :
# win_stat, win_file, and win_copy are not just like their
# python counterparts but they are compatible enough for our
# internal usage
if module_name in ( ' stat ' , ' file ' , ' copy ' ) and self . _task . action != module_name :
module_name = ' win_ %s ' % module_name
# Remove extra quotes surrounding path parameters before sending to module.
if module_name in ( ' win_stat ' , ' win_file ' , ' win_copy ' , ' slurp ' ) and module_args and hasattr ( self . _connection . _shell , ' _unquote ' ) :
for key in ( ' src ' , ' dest ' , ' path ' ) :
if key in module_args :
module_args [ key ] = self . _connection . _shell . _unquote ( module_args [ key ] )
module_path = self . _shared_loader_obj . module_loader . find_plugin ( module_name , mod_type )
if module_path :
break
2015-12-13 14:54:57 +01:00
else : # This is a for-else: http://bit.ly/1ElPkyg
2015-07-24 18:39:54 +02:00
# Use Windows version of ping module to check module paths when
2015-10-22 22:03:37 +02:00
# using a connection that supports .ps1 suffixes. We check specifically
# for win_ping here, otherwise the code would look for ping.ps1
2015-09-10 21:55:59 +02:00
if ' .ps1 ' in self . _connection . module_implementation_preferences :
2015-07-24 18:39:54 +02:00
ping_module = ' win_ping '
else :
ping_module = ' ping '
2015-09-10 21:55:59 +02:00
module_path2 = self . _shared_loader_obj . module_loader . find_plugin ( ping_module , self . _connection . module_implementation_preferences )
2014-11-14 23:14:08 +01:00
if module_path2 is not None :
raise AnsibleError ( " The module %s was not found in configured module paths " % ( module_name ) )
else :
2015-11-11 17:29:37 +01:00
raise AnsibleError ( " The module %s was not found in configured module paths. "
" Additionally, core modules are missing. If this is a checkout, "
2014-11-14 23:14:08 +01:00
" run ' git submodule update --init --recursive ' to correct this problem. " % ( module_name ) )
# insert shared code and arguments into the module
2015-06-04 21:43:07 +02:00
( module_data , module_style , module_shebang ) = modify_module ( module_path , module_args , task_vars = task_vars )
2014-11-14 23:14:08 +01:00
return ( module_style , module_shebang , module_data )
def _compute_environment_string ( self ) :
'''
Builds the environment string to be used when executing the remote task .
'''
2015-07-21 19:52:51 +02:00
final_environment = dict ( )
if self . _task . environment is not None :
environments = self . _task . environment
if not isinstance ( environments , list ) :
environments = [ environments ]
2015-12-17 15:44:40 +01:00
# the environments as inherited need to be reversed, to make
# sure we merge in the parent's values first so those in the
# block then task 'win' in precedence
environments . reverse ( )
2015-07-21 19:52:51 +02:00
for environment in environments :
2015-08-25 16:15:32 +02:00
if environment is None :
continue
2015-12-17 15:44:40 +01:00
temp_environment = self . _templar . template ( environment )
if not isinstance ( temp_environment , dict ) :
raise AnsibleError ( " environment must be a dictionary, received %s ( %s ) " % ( temp_environment , type ( temp_environment ) ) )
2015-10-26 22:04:28 +01:00
# very deliberately using update here instead of combine_vars, as
2015-07-21 19:52:51 +02:00
# these environment settings should not need to merge sub-dicts
2015-12-17 15:44:40 +01:00
final_environment . update ( temp_environment )
2015-07-21 19:52:51 +02:00
2015-11-17 21:37:18 +01:00
final_environment = self . _templar . template ( final_environment )
2015-07-21 19:52:51 +02:00
return self . _connection . _shell . env_prefix ( * * final_environment )
2014-11-14 23:14:08 +01:00
def _early_needs_tmp_path ( self ) :
'''
Determines if a temp path should be created before the action is executed .
'''
return getattr ( self , ' TRANSFERS_FILES ' , False )
2015-04-13 18:35:20 +02:00
2014-11-14 23:14:08 +01:00
def _late_needs_tmp_path ( self , tmp , module_style ) :
'''
Determines if a temp path is required after some early actions have already taken place .
'''
if tmp and " tmp " in tmp :
# tmp has already been created
return False
2015-12-03 17:01:05 +01:00
if not self . _connection . has_pipelining or not self . _play_context . pipelining or C . DEFAULT_KEEP_REMOTE_FILES or self . _play_context . become_method == ' su ' :
2014-11-14 23:14:08 +01:00
# tmp is necessary to store the module source code
# or we want to keep the files on the target system
return True
if module_style != " new " :
# even when conn has pipelining, old style modules need tmp to store arguments
return True
return False
2016-03-21 22:17:53 +01:00
def _make_tmp_path ( self , remote_user ) :
2014-11-14 23:14:08 +01:00
'''
Create and return a temporary path on a remote box .
'''
basefile = ' ansible-tmp- %s - %s ' % ( time . time ( ) , random . randint ( 0 , 2 * * 48 ) )
use_system_tmp = False
2016-03-21 22:17:53 +01:00
if self . _play_context . become and self . _play_context . become_user not in ( ' root ' , remote_user ) :
2014-11-14 23:14:08 +01:00
use_system_tmp = True
2016-03-21 22:17:53 +01:00
tmp_mode = 0o700
2014-11-14 23:14:08 +01:00
2015-06-29 21:41:51 +02:00
cmd = self . _connection . _shell . mkdtemp ( basefile , use_system_tmp , tmp_mode )
2015-09-24 22:29:36 +02:00
result = self . _low_level_execute_command ( cmd , sudoable = False )
2014-11-14 23:14:08 +01:00
# error handling on this seems a little aggressive?
if result [ ' rc ' ] != 0 :
if result [ ' rc ' ] == 5 :
output = ' Authentication failure. '
2015-04-16 01:32:44 +02:00
elif result [ ' rc ' ] == 255 and self . _connection . transport in ( ' ssh ' , ) :
2015-06-06 06:16:35 +02:00
2015-07-21 18:12:22 +02:00
if self . _play_context . verbosity > 3 :
2015-10-06 23:10:25 +02:00
output = u ' SSH encountered an unknown error. The output was: \n %s %s ' % ( result [ ' stdout ' ] , result [ ' stderr ' ] )
2015-06-06 06:16:35 +02:00
else :
2015-11-11 17:29:37 +01:00
output = ( u ' SSH encountered an unknown error during the connection. '
' We recommend you re-run the command using -vvvv, which will enable SSH debugging output to help diagnose the issue ' )
2015-06-06 06:16:35 +02:00
2015-10-06 23:10:25 +02:00
elif u ' No space left on device ' in result [ ' stderr ' ] :
2014-11-14 23:14:08 +01:00
output = result [ ' stderr ' ]
else :
2015-11-11 17:29:37 +01:00
output = ( ' Authentication or permission failure. '
' In some cases, you may have been able to authenticate and did not have permissions on the remote directory. '
' Consider changing the remote temp path in ansible.cfg to a path rooted in " /tmp " . '
' Failed command was: %s , exited with result %d ' % ( cmd , result [ ' rc ' ] ) )
2015-10-06 23:10:25 +02:00
if ' stdout ' in result and result [ ' stdout ' ] != u ' ' :
output = output + u " : %s " % result [ ' stdout ' ]
2015-09-09 21:26:40 +02:00
raise AnsibleConnectionFailure ( output )
2014-11-14 23:14:08 +01:00
2015-11-30 18:16:33 +01:00
try :
rc = self . _connection . _shell . join_path ( result [ ' stdout ' ] . strip ( ) , u ' ' ) . splitlines ( ) [ - 1 ]
except IndexError :
# stdout was empty or just space, set to / to trigger error in next if
rc = ' / '
2014-11-14 23:14:08 +01:00
# Catch failure conditions, files should never be
# written to locations in /.
if rc == ' / ' :
2015-04-15 01:13:27 +02:00
raise AnsibleError ( ' failed to resolve remote temporary directory from %s : ` %s ` returned empty string ' % ( basefile , cmd ) )
2014-11-14 23:14:08 +01:00
return rc
def _remove_tmp_path ( self , tmp_path ) :
''' Remove a temporary path we created. '''
2015-02-09 23:54:44 +01:00
if tmp_path and " -tmp- " in tmp_path :
2015-06-29 21:41:51 +02:00
cmd = self . _connection . _shell . remove ( tmp_path , recurse = True )
2014-11-14 23:14:08 +01:00
# If we have gotten here we have a working ssh configuration.
# If ssh breaks we could leave tmp directories out on the remote system.
2015-09-24 22:29:36 +02:00
self . _low_level_execute_command ( cmd , sudoable = False )
2014-11-14 23:14:08 +01:00
2016-03-21 22:17:53 +01:00
def _transfer_file ( self , local_path , remote_path ) :
self . _connection . put_file ( local_path , remote_path )
return remote_path
2014-11-14 23:14:08 +01:00
def _transfer_data ( self , remote_path , data ) :
'''
Copies the module data out to the temporary module path .
'''
2015-05-11 18:22:41 +02:00
if isinstance ( data , dict ) :
2014-11-14 23:14:08 +01:00
data = jsonify ( data )
afd , afile = tempfile . mkstemp ( )
afo = os . fdopen ( afd , ' w ' )
try :
2015-05-11 18:22:41 +02:00
data = to_bytes ( data , errors = ' strict ' )
2014-11-14 23:14:08 +01:00
afo . write ( data )
2015-04-13 18:35:20 +02:00
except Exception as e :
2015-02-09 23:54:44 +01:00
raise AnsibleError ( " failure writing module data to temporary file for transfer: %s " % str ( e ) )
2014-11-14 23:14:08 +01:00
afo . flush ( )
afo . close ( )
try :
2016-03-21 22:17:53 +01:00
self . _transfer_file ( afile , remote_path )
2014-11-14 23:14:08 +01:00
finally :
os . unlink ( afile )
return remote_path
2016-03-21 22:17:53 +01:00
def _fixup_perms ( self , remote_path , remote_user , execute = False , recursive = True ) :
"""
If the become_user is unprivileged and different from the
remote_user then we need to make the files we ' ve uploaded readable by them.
"""
if remote_path is None :
# Sometimes code calls us naively -- it has a var which could
# contain a path to a tmp dir but doesn't know if it needs to
# exist or not. If there's no path, then there's no need for us
# to do work
self . _display . debug ( ' _fixup_perms called with remote_path==None. Sure this is correct? ' )
return remote_path
if self . _play_context . become and self . _play_context . become_user not in ( ' root ' , remote_user ) :
# Unprivileged user that's different than the ssh user. Let's get
# to work!
if remote_user == ' root ' :
# SSh'ing as root, therefore we can chown
2016-03-29 07:07:14 +02:00
res = self . _remote_chown ( remote_path , self . _play_context . become_user , recursive = recursive )
if res [ ' rc ' ] != 0 :
raise AnsibleError ( ' Failed to set owner on remote files (rc: {0} , err: {1} ) ' . format ( res [ ' rc ' ] , res [ ' stderr ' ] ) )
2016-03-21 22:17:53 +01:00
if execute :
# root can read things that don't have read bit but can't
# execute them.
2016-03-29 07:07:14 +02:00
res = self . _remote_chmod ( ' u+x ' , remote_path , recursive = recursive )
if res [ ' rc ' ] != 0 :
raise AnsibleError ( ' Failed to set file mode on remote files (rc: {0} , err: {1} ) ' . format ( res [ ' rc ' ] , res [ ' stderr ' ] ) )
2016-03-21 22:17:53 +01:00
else :
if execute :
mode = ' rx '
else :
mode = ' rX '
# Try to use fs acls to solve this problem
res = self . _remote_set_user_facl ( remote_path , self . _play_context . become_user , mode , recursive = recursive , sudoable = False )
if res [ ' rc ' ] != 0 :
if C . ALLOW_WORLD_READABLE_TMPFILES :
# fs acls failed -- do things this insecure way only
# if the user opted in in the config file
self . _display . warning ( ' Using world-readable permissions for temporary files Ansible needs to create when becoming an unprivileged user which may be insecure. For information on securing this, see https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user ' )
2016-03-29 07:07:14 +02:00
res = self . _remote_chmod ( ' a+ %s ' % mode , remote_path , recursive = recursive )
if res [ ' rc ' ] != 0 :
raise AnsibleError ( ' Failed to set file mode on remote files (rc: {0} , err: {1} ) ' . format ( res [ ' rc ' ] , res [ ' stderr ' ] ) )
2016-03-21 22:17:53 +01:00
else :
raise AnsibleError ( ' Failed to set permissions on the temporary files Ansible needs to create when becoming an unprivileged user. For information on working around this, see https://docs.ansible.com/ansible/become.html#becoming-an-unprivileged-user ' )
elif execute :
# Can't depend on the file being transferred with execute
# permissions. Only need user perms because no become was
# used here
2016-03-29 07:07:14 +02:00
res = self . _remote_chmod ( ' u+x ' , remote_path , recursive = recursive )
if res [ ' rc ' ] != 0 :
raise AnsibleError ( ' Failed to set file mode on remote files (rc: {0} , err: {1} ) ' . format ( res [ ' rc ' ] , res [ ' stderr ' ] ) )
2016-03-21 22:17:53 +01:00
return remote_path
def _remote_chmod ( self , mode , path , recursive = True , sudoable = False ) :
2014-11-14 23:14:08 +01:00
'''
Issue a remote chmod command
'''
2016-03-21 22:17:53 +01:00
cmd = self . _connection . _shell . chmod ( mode , path , recursive = recursive )
res = self . _low_level_execute_command ( cmd , sudoable = sudoable )
return res
2014-11-14 23:14:08 +01:00
2016-03-21 22:17:53 +01:00
def _remote_chown ( self , path , user , group = None , recursive = True , sudoable = False ) :
'''
Issue a remote chown command
'''
cmd = self . _connection . _shell . chown ( path , user , group , recursive = recursive )
res = self . _low_level_execute_command ( cmd , sudoable = sudoable )
return res
def _remote_set_user_facl ( self , path , user , mode , recursive = True , sudoable = False ) :
'''
Issue a remote call to setfacl
'''
cmd = self . _connection . _shell . set_user_facl ( path , user , mode , recursive = recursive )
2015-09-24 22:29:36 +02:00
res = self . _low_level_execute_command ( cmd , sudoable = sudoable )
2015-02-09 23:54:44 +01:00
return res
2014-11-14 23:14:08 +01:00
2016-03-10 20:06:41 +01:00
def _execute_remote_stat ( self , path , all_vars , follow , tmp = None ) :
2014-11-14 23:14:08 +01:00
'''
2016-02-15 23:11:49 +01:00
Get information from remote file .
2014-11-14 23:14:08 +01:00
'''
2016-02-15 23:11:49 +01:00
module_args = dict (
path = path ,
follow = follow ,
get_md5 = False ,
get_checksum = True ,
checksum_algo = ' sha1 ' ,
)
2016-03-10 20:06:41 +01:00
mystat = self . _execute_module ( module_name = ' stat ' , module_args = module_args , task_vars = all_vars , tmp = tmp , delete_remote_tmp = ( tmp is None ) )
2016-02-15 23:11:49 +01:00
if ' failed ' in mystat and mystat [ ' failed ' ] :
raise AnsibleError ( ' Failed to get information on remote file ( %s ): %s ' % ( path , mystat [ ' msg ' ] ) )
if not mystat [ ' stat ' ] [ ' exists ' ] :
# empty might be matched, 1 should never match, also backwards compatible
mystat [ ' stat ' ] [ ' checksum ' ] = ' 1 '
2014-11-14 23:14:08 +01:00
2016-02-23 21:06:37 +01:00
# happens sometimes when it is a dir and not on bsd
if not ' checksum ' in mystat [ ' stat ' ] :
mystat [ ' stat ' ] [ ' checksum ' ] = ' '
2016-02-15 23:11:49 +01:00
return mystat [ ' stat ' ]
2015-08-15 18:00:13 +02:00
2016-02-15 23:11:49 +01:00
def _remote_checksum ( self , path , all_vars ) :
'''
Produces a remote checksum given a path ,
Returns a number 0 - 4 for specific errors instead of checksum , also ensures it is different
0 = unknown error
1 = file does not exist , this might not be an error
2 = permissions issue
3 = its a directory , not a file
4 = stat module failed , likely due to not finding python
'''
x = " 0 " # unknown error has occured
2014-11-14 23:14:08 +01:00
try :
2016-02-15 23:11:49 +01:00
remote_stat = self . _execute_remote_stat ( path , all_vars , follow = False )
if remote_stat [ ' exists ' ] and remote_stat [ ' isdir ' ] :
x = " 3 " # its a directory not a file
2014-11-14 23:14:08 +01:00
else :
2016-02-15 23:11:49 +01:00
x = remote_stat [ ' checksum ' ] # if 1, file is missing
except AnsibleError as e :
2016-02-18 10:41:23 +01:00
errormsg = to_unicode ( e )
2016-02-15 23:11:49 +01:00
if errormsg . endswith ( ' Permission denied ' ) :
x = " 2 " # cannot read file
elif errormsg . endswith ( ' MODULE FAILURE ' ) :
x = " 4 " # python not found or module uncaught exception
finally :
return x
2014-11-14 23:14:08 +01:00
2015-09-24 22:29:36 +02:00
def _remote_expand_user ( self , path ) :
2014-11-14 23:14:08 +01:00
''' takes a remote path and performs tilde expansion on the remote host '''
2015-07-24 18:39:54 +02:00
if not path . startswith ( ' ~ ' ) : # FIXME: Windows paths may start with "~ instead of just ~
2014-11-14 23:14:08 +01:00
return path
2015-07-24 18:39:54 +02:00
# FIXME: Can't use os.path.sep for Windows paths.
2014-11-14 23:14:08 +01:00
split_path = path . split ( os . path . sep , 1 )
expand_path = split_path [ 0 ]
if expand_path == ' ~ ' :
2015-07-21 18:12:22 +02:00
if self . _play_context . become and self . _play_context . become_user :
expand_path = ' ~ %s ' % self . _play_context . become_user
2014-11-14 23:14:08 +01:00
2015-06-29 21:41:51 +02:00
cmd = self . _connection . _shell . expand_user ( expand_path )
2015-09-24 22:29:36 +02:00
data = self . _low_level_execute_command ( cmd , sudoable = False )
2014-11-14 23:14:08 +01:00
#initial_fragment = utils.last_non_blank_line(data['stdout'])
initial_fragment = data [ ' stdout ' ] . strip ( ) . splitlines ( ) [ - 1 ]
if not initial_fragment :
# Something went wrong trying to expand the path remotely. Return
# the original string
return path
if len ( split_path ) > 1 :
2015-06-29 21:41:51 +02:00
return self . _connection . _shell . join_path ( initial_fragment , * split_path [ 1 : ] )
2014-11-14 23:14:08 +01:00
else :
return initial_fragment
def _filter_leading_non_json_lines ( self , data ) :
'''
Used to avoid random output from SSH at the top of JSON output , like messages from
tcagetattr , or where dropbear spews MOTD on every single command ( which is nuts ) .
need to filter anything which starts not with ' { ' , ' [ ' , ' , ' = ' or is an empty line.
filter only leading lines since multiline JSON is valid .
'''
2015-10-06 23:10:25 +02:00
idx = 0
for line in data . splitlines ( True ) :
if line . startswith ( ( u ' { ' , u ' [ ' ) ) :
break
idx = idx + len ( line )
2014-11-14 23:14:08 +01:00
2015-10-06 23:10:25 +02:00
return data [ idx : ]
2014-11-14 23:14:08 +01:00
2016-01-20 21:26:45 +01:00
def _strip_success_message ( self , data ) :
'''
Removes the BECOME - SUCCESS message from the data .
'''
if data . strip ( ) . startswith ( ' BECOME-SUCCESS- ' ) :
data = re . sub ( r ' ^(( \ r)? \ n)?BECOME-SUCCESS.*( \ r)? \ n ' , ' ' , data )
return data
2015-10-23 01:07:26 +02:00
def _execute_module ( self , module_name = None , module_args = None , tmp = None , task_vars = None , persist_files = False , delete_remote_tmp = True ) :
2014-11-14 23:14:08 +01:00
'''
Transfer and run a module along with its arguments .
'''
2015-10-23 01:07:26 +02:00
if task_vars is None :
task_vars = dict ( )
2014-11-14 23:14:08 +01:00
# if a module name was not specified for this execution, use
# the action from the task
if module_name is None :
module_name = self . _task . action
if module_args is None :
module_args = self . _task . args
2015-01-28 15:55:18 +01:00
# set check mode in the module arguments, if required
2016-01-07 07:37:19 +01:00
if self . _play_context . check_mode :
2015-01-28 15:55:18 +01:00
if not self . _supports_check_mode :
raise AnsibleError ( " check mode is not supported for this operation " )
module_args [ ' _ansible_check_mode ' ] = True
2016-01-07 07:37:19 +01:00
else :
module_args [ ' _ansible_check_mode ' ] = False
2015-01-28 15:55:18 +01:00
2016-03-21 22:17:53 +01:00
# Get the connection user for permission checks
remote_user = task_vars . get ( ' ansible_ssh_user ' ) or self . _play_context . remote_user
2015-01-28 15:55:18 +01:00
# set no log in the module arguments, if required
2016-01-07 07:37:19 +01:00
module_args [ ' _ansible_no_log ' ] = self . _play_context . no_log or C . DEFAULT_NO_TARGET_SYSLOG
2015-01-28 15:55:18 +01:00
2015-09-26 05:57:03 +02:00
# set debug in the module arguments, if required
2016-01-07 07:37:19 +01:00
module_args [ ' _ansible_debug ' ] = C . DEFAULT_DEBUG
# let module know we are in diff mode
module_args [ ' _ansible_diff ' ] = self . _play_context . diff
# let module know our verbosity
module_args [ ' _ansible_verbosity ' ] = self . _display . verbosity
2015-09-26 05:57:03 +02:00
2015-06-04 21:43:07 +02:00
( module_style , shebang , module_data ) = self . _configure_module ( module_name = module_name , module_args = module_args , task_vars = task_vars )
2014-11-14 23:14:08 +01:00
if not shebang :
2016-01-12 17:24:43 +01:00
raise AnsibleError ( " module ( %s ) is missing interpreter line " % module_name )
2014-11-14 23:14:08 +01:00
# a remote tmp path may be necessary and not already created
remote_module_path = None
2015-10-02 18:58:15 +02:00
args_file_path = None
2015-01-02 14:51:15 +01:00
if not tmp and self . _late_needs_tmp_path ( tmp , module_style ) :
2016-03-21 22:17:53 +01:00
tmp = self . _make_tmp_path ( remote_user )
2015-07-24 18:39:54 +02:00
if tmp :
2015-12-01 22:39:02 +01:00
remote_module_filename = self . _connection . _shell . get_remote_filename ( module_name )
remote_module_path = self . _connection . _shell . join_path ( tmp , remote_module_filename )
2016-01-08 18:37:28 +01:00
if module_style in [ ' old ' , ' non_native_want_json ' ] :
2015-10-02 18:58:15 +02:00
# we'll also need a temp file to hold our module arguments
args_file_path = self . _connection . _shell . join_path ( tmp , ' args ' )
2014-11-14 23:14:08 +01:00
2015-10-02 18:58:15 +02:00
if remote_module_path or module_style != ' new ' :
2015-11-11 17:29:37 +01:00
display . debug ( " transferring module to remote " )
2014-11-14 23:14:08 +01:00
self . _transfer_data ( remote_module_path , module_data )
2015-10-02 18:58:15 +02:00
if module_style == ' old ' :
# we need to dump the module args to a k=v string in a file on
# the remote system, which can be read and parsed by the module
args_data = " "
for k , v in iteritems ( module_args ) :
2015-11-20 18:58:50 +01:00
args_data + = ' %s = " %s " ' % ( k , pipes . quote ( text_type ( v ) ) )
2015-10-02 18:58:15 +02:00
self . _transfer_data ( args_file_path , args_data )
2016-01-08 18:37:28 +01:00
elif module_style == ' non_native_want_json ' :
self . _transfer_data ( args_file_path , json . dumps ( module_args ) )
2015-11-11 17:29:37 +01:00
display . debug ( " done transferring module to remote " )
2014-11-14 23:14:08 +01:00
environment_string = self . _compute_environment_string ( )
2016-03-21 22:17:53 +01:00
# Fix permissions of the tmp path and tmp files. This should be
# called after all files have been transferred.
self . _fixup_perms ( tmp , remote_user , recursive = True )
2014-11-14 23:14:08 +01:00
cmd = " "
in_data = None
2016-01-15 02:21:15 +01:00
if self . _connection . has_pipelining and self . _play_context . pipelining and not C . DEFAULT_KEEP_REMOTE_FILES and module_style == ' new ' :
2014-11-14 23:14:08 +01:00
in_data = module_data
else :
if remote_module_path :
cmd = remote_module_path
rm_tmp = None
if tmp and " tmp " in tmp and not C . DEFAULT_KEEP_REMOTE_FILES and not persist_files and delete_remote_tmp :
2015-07-21 18:12:22 +02:00
if not self . _play_context . become or self . _play_context . become_user == ' root ' :
2014-11-14 23:14:08 +01:00
# not sudoing or sudoing to root, so can cleanup files in the same step
rm_tmp = tmp
2015-12-03 17:01:05 +01:00
cmd = self . _connection . _shell . build_module_command ( environment_string , shebang , cmd , arg_path = args_file_path , rm_tmp = rm_tmp )
2014-11-14 23:14:08 +01:00
cmd = cmd . strip ( )
sudoable = True
if module_name == " accelerate " :
# always run the accelerate module as the user
# specified in the play, not the sudo_user
sudoable = False
2015-09-24 22:29:36 +02:00
res = self . _low_level_execute_command ( cmd , sudoable = sudoable , in_data = in_data )
2014-11-14 23:14:08 +01:00
if tmp and " tmp " in tmp and not C . DEFAULT_KEEP_REMOTE_FILES and not persist_files and delete_remote_tmp :
2015-07-21 18:12:22 +02:00
if self . _play_context . become and self . _play_context . become_user != ' root ' :
2015-11-11 17:29:37 +01:00
# not sudoing to root, so maybe can't delete files as that other user
# have to clean up temp files as original user in a second step
2015-06-29 21:41:51 +02:00
cmd2 = self . _connection . _shell . remove ( tmp , recurse = True )
2016-03-29 07:07:14 +02:00
res2 = self . _low_level_execute_command ( cmd2 , sudoable = False )
if res2 [ ' rc ' ] != 0 :
display . warning ( ' Error deleting remote temporary files (rc: {0} , stderr: {1} ) ' . format ( res2 [ ' rc ' ] , res2 [ ' stderr ' ] ) )
2014-11-14 23:14:08 +01:00
2015-04-21 16:48:13 +02:00
try :
2015-10-06 23:10:25 +02:00
data = json . loads ( self . _filter_leading_non_json_lines ( res . get ( ' stdout ' , u ' ' ) ) )
2015-04-21 16:48:13 +02:00
except ValueError :
# not valid json, lets try to capture error
data = dict ( failed = True , parsed = False )
2016-03-16 04:31:40 +01:00
data [ ' msg ' ] = " MODULE FAILURE "
data [ ' module_stdout ' ] = res . get ( ' stdout ' , u ' ' )
if ' stderr ' in res :
data [ ' module_stderr ' ] = res [ ' stderr ' ]
if res [ ' stderr ' ] . startswith ( u ' Traceback ' ) :
data [ ' exception ' ] = res [ ' stderr ' ]
2015-04-21 16:48:13 +02:00
# pre-split stdout into lines, if stdout is in the data and there
# isn't already a stdout_lines value there
if ' stdout ' in data and ' stdout_lines ' not in data :
2015-10-06 23:10:25 +02:00
data [ ' stdout_lines ' ] = data . get ( ' stdout ' , u ' ' ) . splitlines ( )
2014-11-14 23:14:08 +01:00
2015-11-11 17:29:37 +01:00
display . debug ( " done with _execute_module ( %s , %s ) " % ( module_name , module_args ) )
2014-11-14 23:14:08 +01:00
return data
2016-03-12 02:38:38 +01:00
def _low_level_execute_command ( self , cmd , sudoable = True , in_data = None , executable = None , encoding_errors = ' replace ' ) :
2014-11-14 23:14:08 +01:00
'''
This is the function which executes the low level shell command , which
may be commands to create / remove directories for temporary files , or to
run the module code or python directly when pipelining .
2015-10-06 23:10:25 +02:00
: kwarg encoding_errors : If the value returned by the command isn ' t
utf - 8 then we have to figure out how to transform it to unicode .
If the value is just going to be displayed to the user ( or
discarded ) then the default of ' replace ' is fine . If the data is
used as a key or is going to be written back out to a file
verbatim , then this won ' t work. May have to use some sort of
replacement strategy ( python3 could use surrogateescape )
2014-11-14 23:14:08 +01:00
'''
2015-12-22 20:12:41 +01:00
2015-12-11 02:41:48 +01:00
display . debug ( " _low_level_execute_command(): starting " )
2014-11-14 23:14:08 +01:00
if not cmd :
# this can happen with powershell modules when there is no analog to a Windows command (like chmod)
2015-12-11 02:41:48 +01:00
display . debug ( " _low_level_execute_command(): no command, exiting " )
2014-11-14 23:14:08 +01:00
return dict ( stdout = ' ' , stderr = ' ' )
2015-09-17 14:59:22 +02:00
allow_same_user = C . BECOME_ALLOW_SAME_USER
same_user = self . _play_context . become_user == self . _play_context . remote_user
if sudoable and self . _play_context . become and ( allow_same_user or not same_user ) :
2015-12-11 02:41:48 +01:00
display . debug ( " _low_level_execute_command(): using become for this command " )
2015-07-21 18:12:22 +02:00
cmd = self . _play_context . make_become_cmd ( cmd , executable = executable )
2015-06-24 18:12:54 +02:00
2016-03-12 02:38:38 +01:00
if self . _connection . allow_executable :
if executable is None :
executable = self . _play_context . executable
2016-03-28 17:02:27 +02:00
if executable :
cmd = executable + ' -c ' + pipes . quote ( cmd )
2016-02-10 01:05:41 +01:00
2015-12-11 02:41:48 +01:00
display . debug ( " _low_level_execute_command(): executing: %s " % ( cmd , ) )
2015-12-22 20:11:50 +01:00
rc , stdout , stderr = self . _connection . exec_command ( cmd , in_data = in_data , sudoable = sudoable )
2014-11-14 23:14:08 +01:00
2015-10-01 16:28:56 +02:00
# stdout and stderr may be either a file-like or a bytes object.
# Convert either one to a text type
if isinstance ( stdout , binary_type ) :
2015-10-06 23:10:25 +02:00
out = to_unicode ( stdout , errors = encoding_errors )
2015-10-01 16:28:56 +02:00
elif not isinstance ( stdout , text_type ) :
2015-10-06 23:10:25 +02:00
out = to_unicode ( b ' ' . join ( stdout . readlines ( ) ) , errors = encoding_errors )
2014-11-14 23:14:08 +01:00
else :
out = stdout
2015-10-01 16:28:56 +02:00
if isinstance ( stderr , binary_type ) :
2015-10-06 23:10:25 +02:00
err = to_unicode ( stderr , errors = encoding_errors )
2015-10-01 16:28:56 +02:00
elif not isinstance ( stderr , text_type ) :
2015-10-06 23:10:25 +02:00
err = to_unicode ( b ' ' . join ( stderr . readlines ( ) ) , errors = encoding_errors )
2014-11-14 23:14:08 +01:00
else :
err = stderr
2015-06-06 06:16:35 +02:00
if rc is None :
rc = 0
2016-02-09 17:18:09 +01:00
# be sure to remove the BECOME-SUCCESS message now
out = self . _strip_success_message ( out )
2015-12-11 02:41:48 +01:00
2016-02-09 17:18:09 +01:00
display . debug ( " _low_level_execute_command() done: rc= %d , stdout= %s , stderr= %s " % ( rc , stdout , stderr ) )
2015-07-24 18:39:54 +02:00
return dict ( rc = rc , stdout = out , stdout_lines = out . splitlines ( ) , stderr = err )
2015-07-16 01:47:59 +02:00
def _get_first_available_file ( self , faf , of = None , searchdir = ' files ' ) :
2015-11-11 17:29:37 +01:00
display . deprecated ( " first_available_file, use with_first_found or lookup( ' first_found ' ,...) instead " )
2015-07-16 01:47:59 +02:00
for fn in faf :
fnt = self . _templar . template ( fn )
if self . _task . _role is not None :
lead = self . _task . _role . _role_path
else :
lead = fnt
fnd = self . _loader . path_dwim_relative ( lead , searchdir , fnt )
if not os . path . exists ( fnd ) and of is not None :
if self . _task . _role is not None :
lead = self . _task . _role . _role_path
else :
lead = of
fnd = self . _loader . path_dwim_relative ( lead , searchdir , of )
if os . path . exists ( fnd ) :
return fnd
return None
2015-07-27 04:29:56 +02:00
2015-09-24 22:29:36 +02:00
def _get_diff_data ( self , destination , source , task_vars , source_file = True ) :
2015-07-27 04:29:56 +02:00
diff = { }
2015-11-11 17:29:37 +01:00
display . debug ( " Going to peek to see if file has changed permissions " )
2015-07-27 04:29:56 +02:00
peek_result = self . _execute_module ( module_name = ' file ' , module_args = dict ( path = destination , diff_peek = True ) , task_vars = task_vars , persist_files = True )
if not ( ' failed ' in peek_result and peek_result [ ' failed ' ] ) or peek_result . get ( ' rc ' , 0 ) == 0 :
if peek_result [ ' state ' ] == ' absent ' :
diff [ ' before ' ] = ' '
elif peek_result [ ' appears_binary ' ] :
diff [ ' dst_binary ' ] = 1
2016-02-10 17:23:49 +01:00
elif C . MAX_FILE_SIZE_FOR_DIFF > 0 and peek_result [ ' size ' ] > C . MAX_FILE_SIZE_FOR_DIFF :
2015-07-27 04:29:56 +02:00
diff [ ' dst_larger ' ] = C . MAX_FILE_SIZE_FOR_DIFF
else :
2015-11-11 17:29:37 +01:00
display . debug ( " Slurping the file %s " % source )
2015-07-27 04:29:56 +02:00
dest_result = self . _execute_module ( module_name = ' slurp ' , module_args = dict ( path = destination ) , task_vars = task_vars , persist_files = True )
if ' content ' in dest_result :
dest_contents = dest_result [ ' content ' ]
if dest_result [ ' encoding ' ] == ' base64 ' :
dest_contents = base64 . b64decode ( dest_contents )
else :
raise AnsibleError ( " unknown encoding in content option, failed: %s " % dest_result )
diff [ ' before_header ' ] = destination
diff [ ' before ' ] = dest_contents
if source_file :
2016-02-10 18:05:45 +01:00
st = os . stat ( source )
2016-02-10 17:23:49 +01:00
if C . MAX_FILE_SIZE_FOR_DIFF > 0 and st [ stat . ST_SIZE ] > C . MAX_FILE_SIZE_FOR_DIFF :
2015-07-27 04:29:56 +02:00
diff [ ' src_larger ' ] = C . MAX_FILE_SIZE_FOR_DIFF
else :
2016-02-10 18:05:45 +01:00
display . debug ( " Reading local copy of the file %s " % source )
try :
src = open ( source )
src_contents = src . read ( )
except Exception as e :
raise AnsibleError ( " Unexpected error while reading source ( %s ) for diff: %s " % ( source , str ( e ) ) )
2016-02-10 17:23:49 +01:00
2016-02-10 18:05:45 +01:00
if " \x00 " in src_contents :
diff [ ' src_binary ' ] = 1
else :
diff [ ' after_header ' ] = source
diff [ ' after ' ] = src_contents
2015-07-27 04:29:56 +02:00
else :
2015-11-11 17:29:37 +01:00
display . debug ( " source of file passed in " )
2015-07-27 04:29:56 +02:00
diff [ ' after_header ' ] = ' dynamically generated '
diff [ ' after ' ] = source
2016-02-09 07:05:46 +01:00
if self . _play_context . no_log :
if ' before ' in diff :
diff [ " before " ] = " "
if ' after ' in diff :
diff [ " after " ] = " [[ Diff output has been hidden because ' no_log: true ' was specified for this result ]] "
2016-02-04 21:44:45 +01:00
2015-07-27 04:29:56 +02:00
return diff