mirror of
https://github.com/ansible-collections/community.general.git
synced 2024-09-14 20:13:21 +02:00
Move a path being passed around as a byte string to being passed around as a text string. (#17190)
This is enough to get minimal copy module working on python3 We have t omodify dataloader's path_dwim_relative_stack and everything that calls it to use text paths instead of byte string paths
This commit is contained in:
parent
20bde8f549
commit
313d4b2c9e
12 changed files with 94 additions and 58 deletions
|
@ -36,7 +36,7 @@ from ansible.parsing.yaml.loader import AnsibleLoader
|
||||||
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleUnicode
|
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleUnicode
|
||||||
from ansible.module_utils.basic import is_executable
|
from ansible.module_utils.basic import is_executable
|
||||||
from ansible.utils.path import unfrackpath
|
from ansible.utils.path import unfrackpath
|
||||||
from ansible.utils.unicode import to_unicode, to_bytes
|
from ansible.utils.unicode import to_unicode, to_bytes, to_str
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from __main__ import display
|
from __main__ import display
|
||||||
|
@ -279,45 +279,56 @@ class DataLoader():
|
||||||
def path_dwim_relative_stack(self, paths, dirname, source):
|
def path_dwim_relative_stack(self, paths, dirname, source):
|
||||||
'''
|
'''
|
||||||
find one file in first path in stack taking roles into account and adding play basedir as fallback
|
find one file in first path in stack taking roles into account and adding play basedir as fallback
|
||||||
|
|
||||||
|
:arg paths: A list of text strings which are the paths to look for the filename in.
|
||||||
|
:arg dirname: A text string representing a directory. The directory
|
||||||
|
is prepended to the source to form the path to search for.
|
||||||
|
:arg source: A text string which is the filename to search for
|
||||||
|
:rtype: A text string
|
||||||
|
:returns: An absolute path to the filename ``source``
|
||||||
'''
|
'''
|
||||||
|
b_dirname = to_bytes(dirname)
|
||||||
|
b_source = to_bytes(source)
|
||||||
|
|
||||||
result = None
|
result = None
|
||||||
if not source:
|
if not source:
|
||||||
display.warning('Invalid request to find a file that matches an empty string or "null" value')
|
display.warning('Invalid request to find a file that matches an empty string or "null" value')
|
||||||
elif source.startswith('~') or source.startswith(os.path.sep):
|
elif source.startswith('~') or source.startswith(os.path.sep):
|
||||||
# path is absolute, no relative needed, check existence and return source
|
# path is absolute, no relative needed, check existence and return source
|
||||||
test_path = to_bytes(unfrackpath(source),errors='strict')
|
test_path = unfrackpath(b_source)
|
||||||
if os.path.exists(test_path):
|
if os.path.exists(to_bytes(test_path, errors='strict')):
|
||||||
result = test_path
|
result = test_path
|
||||||
else:
|
else:
|
||||||
search = []
|
search = []
|
||||||
for path in paths:
|
for path in paths:
|
||||||
upath = unfrackpath(path)
|
upath = unfrackpath(path)
|
||||||
mydir = os.path.dirname(upath)
|
b_upath = to_bytes(upath, errors='strict')
|
||||||
|
b_mydir = os.path.dirname(b_upath)
|
||||||
|
|
||||||
# if path is in role and 'tasks' not there already, add it into the search
|
# if path is in role and 'tasks' not there already, add it into the search
|
||||||
if upath.endswith('tasks') and os.path.exists(to_bytes(os.path.join(upath,'main.yml'), errors='strict')) \
|
if b_upath.endswith(b'tasks') and os.path.exists(os.path.join(b_upath, b'main.yml')) \
|
||||||
or os.path.exists(to_bytes(os.path.join(upath,'tasks/main.yml'), errors='strict')) \
|
or os.path.exists(os.path.join(b_upath, b'tasks/main.yml')) \
|
||||||
or os.path.exists(to_bytes(os.path.join(os.path.dirname(upath),'tasks/main.yml'), errors='strict')):
|
or os.path.exists(os.path.join(b_mydir, b'tasks/main.yml')):
|
||||||
if mydir.endswith('tasks'):
|
if b_mydir.endswith(b'tasks'):
|
||||||
search.append(os.path.join(os.path.dirname(mydir), dirname, source))
|
search.append(os.path.join(os.path.dirname(b_mydir), b_dirname, b_source))
|
||||||
search.append(os.path.join(mydir, source))
|
search.append(os.path.join(b_mydir, b_source))
|
||||||
else:
|
else:
|
||||||
search.append(os.path.join(upath, dirname, source))
|
search.append(os.path.join(b_upath, b_dirname, b_source))
|
||||||
search.append(os.path.join(upath, 'tasks', source))
|
search.append(os.path.join(b_upath, b'tasks', b_source))
|
||||||
elif dirname not in source.split('/'):
|
elif b_dirname not in b_source.split(b'/'):
|
||||||
# don't add dirname if user already is using it in source
|
# don't add dirname if user already is using it in source
|
||||||
search.append(os.path.join(upath, dirname, source))
|
search.append(os.path.join(b_upath, b_dirname, b_source))
|
||||||
search.append(os.path.join(upath, source))
|
search.append(os.path.join(b_upath, b_source))
|
||||||
|
|
||||||
# always append basedir as last resort
|
# always append basedir as last resort
|
||||||
search.append(os.path.join(self.get_basedir(), dirname, source))
|
search.append(os.path.join(to_bytes(self.get_basedir()), b_dirname, b_source))
|
||||||
search.append(os.path.join(self.get_basedir(), source))
|
search.append(os.path.join(to_bytes(self.get_basedir()), b_source))
|
||||||
|
|
||||||
display.debug('search_path:\n\t' + '\n\t'.join(search))
|
display.debug(u'search_path:\n\t%s' % to_unicode(b'\n\t'.join(search), errors='replace'))
|
||||||
for candidate in search:
|
for b_candidate in search:
|
||||||
display.vvvvv('looking for "%s" at "%s"' % (source, candidate))
|
display.vvvvv(u'looking for "%s" at "%s"' % (source, to_unicode(b_candidate)))
|
||||||
if os.path.exists(to_bytes(candidate, errors='strict')):
|
if os.path.exists(b_candidate):
|
||||||
result = candidate
|
result = to_unicode(b_candidate)
|
||||||
break
|
break
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
@ -370,10 +381,11 @@ class DataLoader():
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not file_path or not isinstance(file_path, string_types):
|
if not file_path or not isinstance(file_path, string_types):
|
||||||
raise AnsibleParserError("Invalid filename: '%s'" % str(file_path))
|
raise AnsibleParserError("Invalid filename: '%s'" % to_str(file_path))
|
||||||
|
|
||||||
if not self.path_exists(file_path) or not self.is_file(file_path):
|
b_file_path = to_bytes(file_path, errors='strict')
|
||||||
raise AnsibleFileNotFound("the file_name '%s' does not exist, or is not readable" % file_path)
|
if not self.path_exists(b_file_path) or not self.is_file(b_file_path):
|
||||||
|
raise AnsibleFileNotFound("the file_name '%s' does not exist, or is not readable" % to_str(file_path))
|
||||||
|
|
||||||
if not self._vault:
|
if not self._vault:
|
||||||
self._vault = VaultLib(password="")
|
self._vault = VaultLib(password="")
|
||||||
|
@ -398,7 +410,7 @@ class DataLoader():
|
||||||
return real_path
|
return real_path
|
||||||
|
|
||||||
except (IOError, OSError) as e:
|
except (IOError, OSError) as e:
|
||||||
raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (real_path, str(e)))
|
raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (to_str(real_path), to_str(e)))
|
||||||
|
|
||||||
def cleanup_tmp_file(self, file_path):
|
def cleanup_tmp_file(self, file_path):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -37,7 +37,7 @@ from ansible.errors import AnsibleError, AnsibleConnectionFailure
|
||||||
from ansible.executor.module_common import modify_module
|
from ansible.executor.module_common import modify_module
|
||||||
from ansible.release import __version__
|
from ansible.release import __version__
|
||||||
from ansible.parsing.utils.jsonify import jsonify
|
from ansible.parsing.utils.jsonify import jsonify
|
||||||
from ansible.utils.unicode import to_bytes, to_unicode
|
from ansible.utils.unicode import to_bytes, to_str, to_unicode
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from __main__ import display
|
from __main__ import display
|
||||||
|
@ -844,7 +844,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
||||||
result = self._loader.path_dwim_relative_stack(path_stack, dirname, needle)
|
result = self._loader.path_dwim_relative_stack(path_stack, dirname, needle)
|
||||||
|
|
||||||
if result is None:
|
if result is None:
|
||||||
raise AnsibleError("Unable to find '%s' in expected paths." % needle)
|
raise AnsibleError("Unable to find '%s' in expected paths." % to_str(needle))
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
|
@ -27,7 +27,7 @@ from ansible.errors import AnsibleError
|
||||||
from ansible.plugins.action import ActionBase
|
from ansible.plugins.action import ActionBase
|
||||||
from ansible.utils.boolean import boolean
|
from ansible.utils.boolean import boolean
|
||||||
from ansible.utils.hashing import checksum_s
|
from ansible.utils.hashing import checksum_s
|
||||||
from ansible.utils.unicode import to_str
|
from ansible.utils.unicode import to_str, to_unicode
|
||||||
|
|
||||||
|
|
||||||
class ActionModule(ActionBase):
|
class ActionModule(ActionBase):
|
||||||
|
@ -42,10 +42,10 @@ class ActionModule(ActionBase):
|
||||||
delimit_me = False
|
delimit_me = False
|
||||||
add_newline = False
|
add_newline = False
|
||||||
|
|
||||||
for f in sorted(os.listdir(src_path)):
|
for f in (to_unicode(p, errors='strict') for p in sorted(os.listdir(src_path))):
|
||||||
if compiled_regexp and not compiled_regexp.search(f):
|
if compiled_regexp and not compiled_regexp.search(f):
|
||||||
continue
|
continue
|
||||||
fragment = "%s/%s" % (src_path, f)
|
fragment = u"%s/%s" % (src_path, f)
|
||||||
if not os.path.isfile(fragment) or (ignore_hidden and os.path.basename(fragment).startswith('.')):
|
if not os.path.isfile(fragment) or (ignore_hidden and os.path.basename(fragment).startswith('.')):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -119,7 +119,7 @@ class ActionModule(ActionBase):
|
||||||
|
|
||||||
if not os.path.isdir(src):
|
if not os.path.isdir(src):
|
||||||
result['failed'] = True
|
result['failed'] = True
|
||||||
result['msg'] = "Source (%s) is not a directory" % src
|
result['msg'] = u"Source (%s) is not a directory" % src
|
||||||
return result
|
return result
|
||||||
|
|
||||||
_re = None
|
_re = None
|
||||||
|
|
|
@ -18,10 +18,12 @@ from __future__ import (absolute_import, division, print_function)
|
||||||
__metaclass__ = type
|
__metaclass__ = type
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
import pipes
|
||||||
import random
|
import random
|
||||||
|
|
||||||
from ansible import constants as C
|
from ansible import constants as C
|
||||||
from ansible.plugins.action import ActionBase
|
from ansible.plugins.action import ActionBase
|
||||||
|
from ansible.compat.six import iteritems
|
||||||
from ansible.utils.unicode import to_unicode
|
from ansible.utils.unicode import to_unicode
|
||||||
|
|
||||||
class ActionModule(ActionBase):
|
class ActionModule(ActionBase):
|
||||||
|
|
|
@ -27,7 +27,7 @@ from ansible.errors import AnsibleError
|
||||||
from ansible.plugins.action import ActionBase
|
from ansible.plugins.action import ActionBase
|
||||||
from ansible.utils.boolean import boolean
|
from ansible.utils.boolean import boolean
|
||||||
from ansible.utils.hashing import checksum
|
from ansible.utils.hashing import checksum
|
||||||
from ansible.utils.unicode import to_bytes, to_str
|
from ansible.utils.unicode import to_bytes, to_str, to_unicode
|
||||||
|
|
||||||
|
|
||||||
class ActionModule(ActionBase):
|
class ActionModule(ActionBase):
|
||||||
|
@ -96,7 +96,7 @@ class ActionModule(ActionBase):
|
||||||
source = self._find_needle('files', source)
|
source = self._find_needle('files', source)
|
||||||
except AnsibleError as e:
|
except AnsibleError as e:
|
||||||
result['failed'] = True
|
result['failed'] = True
|
||||||
result['msg'] = to_str(e)
|
result['msg'] = to_unicode(e)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
# A list of source file tuples (full_path, relative_path) which will try to copy to the destination
|
# A list of source file tuples (full_path, relative_path) which will try to copy to the destination
|
||||||
|
@ -111,7 +111,7 @@ class ActionModule(ActionBase):
|
||||||
sz = len(source.rsplit('/', 1)[0]) + 1
|
sz = len(source.rsplit('/', 1)[0]) + 1
|
||||||
|
|
||||||
# Walk the directory and append the file tuples to source_files.
|
# Walk the directory and append the file tuples to source_files.
|
||||||
for base_path, sub_folders, files in os.walk(source):
|
for base_path, sub_folders, files in os.walk(to_bytes(source)):
|
||||||
for file in files:
|
for file in files:
|
||||||
full_path = os.path.join(base_path, file)
|
full_path = os.path.join(base_path, file)
|
||||||
rel_path = full_path[sz:]
|
rel_path = full_path[sz:]
|
||||||
|
|
|
@ -93,19 +93,20 @@ class ActionModule(ActionBase):
|
||||||
dest = os.path.join(dest, base)
|
dest = os.path.join(dest, base)
|
||||||
|
|
||||||
# template the source data locally & get ready to transfer
|
# template the source data locally & get ready to transfer
|
||||||
|
b_source = to_bytes(source)
|
||||||
try:
|
try:
|
||||||
with open(source, 'r') as f:
|
with open(b_source, 'r') as f:
|
||||||
template_data = to_unicode(f.read())
|
template_data = to_unicode(f.read())
|
||||||
|
|
||||||
try:
|
try:
|
||||||
template_uid = pwd.getpwuid(os.stat(source).st_uid).pw_name
|
template_uid = pwd.getpwuid(os.stat(b_source).st_uid).pw_name
|
||||||
except:
|
except:
|
||||||
template_uid = os.stat(source).st_uid
|
template_uid = os.stat(b_source).st_uid
|
||||||
|
|
||||||
temp_vars = task_vars.copy()
|
temp_vars = task_vars.copy()
|
||||||
temp_vars['template_host'] = os.uname()[1]
|
temp_vars['template_host'] = os.uname()[1]
|
||||||
temp_vars['template_path'] = source
|
temp_vars['template_path'] = source
|
||||||
temp_vars['template_mtime'] = datetime.datetime.fromtimestamp(os.path.getmtime(source))
|
temp_vars['template_mtime'] = datetime.datetime.fromtimestamp(os.path.getmtime(b_source))
|
||||||
temp_vars['template_uid'] = template_uid
|
temp_vars['template_uid'] = template_uid
|
||||||
temp_vars['template_fullpath'] = os.path.abspath(source)
|
temp_vars['template_fullpath'] = os.path.abspath(source)
|
||||||
temp_vars['template_run_date'] = datetime.datetime.now()
|
temp_vars['template_run_date'] = datetime.datetime.now()
|
||||||
|
@ -118,7 +119,7 @@ class ActionModule(ActionBase):
|
||||||
)
|
)
|
||||||
temp_vars['ansible_managed'] = time.strftime(
|
temp_vars['ansible_managed'] = time.strftime(
|
||||||
managed_str,
|
managed_str,
|
||||||
time.localtime(os.path.getmtime(source))
|
time.localtime(os.path.getmtime(b_source))
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create a new searchpath list to assign to the templar environment's file
|
# Create a new searchpath list to assign to the templar environment's file
|
||||||
|
|
|
@ -38,7 +38,7 @@ class LookupModule(LookupBase):
|
||||||
|
|
||||||
# Find the file in the expected search path
|
# Find the file in the expected search path
|
||||||
lookupfile = self.find_file_in_search_path(variables, 'files', term)
|
lookupfile = self.find_file_in_search_path(variables, 'files', term)
|
||||||
display.vvvv("File lookup using %s as file" % lookupfile)
|
display.vvvv(u"File lookup using %s as file" % lookupfile)
|
||||||
try:
|
try:
|
||||||
if lookupfile:
|
if lookupfile:
|
||||||
contents, show_data = self._loader._get_file_contents(lookupfile)
|
contents, show_data = self._loader._get_file_contents(lookupfile)
|
||||||
|
|
|
@ -22,6 +22,7 @@ import glob
|
||||||
|
|
||||||
from ansible.plugins.lookup import LookupBase
|
from ansible.plugins.lookup import LookupBase
|
||||||
from ansible.errors import AnsibleFileNotFound
|
from ansible.errors import AnsibleFileNotFound
|
||||||
|
from ansible.module_utils.unicode import to_bytes
|
||||||
|
|
||||||
class LookupModule(LookupBase):
|
class LookupModule(LookupBase):
|
||||||
|
|
||||||
|
@ -35,6 +36,6 @@ class LookupModule(LookupBase):
|
||||||
except AnsibleFileNotFound:
|
except AnsibleFileNotFound:
|
||||||
dwimmed_path = None
|
dwimmed_path = None
|
||||||
if dwimmed_path:
|
if dwimmed_path:
|
||||||
globbed = glob.glob(os.path.join(dwimmed_path, term_file))
|
globbed = glob.glob(to_bytes(os.path.join(dwimmed_path, term_file), errors='strict'))
|
||||||
ret.extend(g for g in globbed if os.path.isfile(g))
|
ret.extend(to_unicode(g, errors='strict') for g in globbed if os.path.isfile(g))
|
||||||
return ret
|
return ret
|
||||||
|
|
|
@ -58,13 +58,13 @@ class LookupModule(LookupBase):
|
||||||
|
|
||||||
def read_properties(self, filename, key, dflt, is_regexp):
|
def read_properties(self, filename, key, dflt, is_regexp):
|
||||||
config = StringIO()
|
config = StringIO()
|
||||||
config.write(u'[java_properties]\n' + open(filename).read())
|
config.write(u'[java_properties]\n' + open(to_bytes(filename, errors='strict')).read())
|
||||||
config.seek(0, os.SEEK_SET)
|
config.seek(0, os.SEEK_SET)
|
||||||
self.cp.readfp(config)
|
self.cp.readfp(config)
|
||||||
return self.get_value(key, 'java_properties', dflt, is_regexp)
|
return self.get_value(key, 'java_properties', dflt, is_regexp)
|
||||||
|
|
||||||
def read_ini(self, filename, key, section, dflt, is_regexp):
|
def read_ini(self, filename, key, section, dflt, is_regexp):
|
||||||
self.cp.readfp(open(filename))
|
self.cp.readfp(open(to_bytes(filename, errors='strict')))
|
||||||
return self.get_value(key, section, dflt, is_regexp)
|
return self.get_value(key, section, dflt, is_regexp)
|
||||||
|
|
||||||
def get_value(self, key, section, dflt, is_regexp):
|
def get_value(self, key, section, dflt, is_regexp):
|
||||||
|
|
|
@ -21,6 +21,7 @@ import shelve
|
||||||
|
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.plugins.lookup import LookupBase
|
from ansible.plugins.lookup import LookupBase
|
||||||
|
from ansible.utils.unicode import to_bytes, to_unicode
|
||||||
|
|
||||||
class LookupModule(LookupBase):
|
class LookupModule(LookupBase):
|
||||||
|
|
||||||
|
@ -29,7 +30,7 @@ class LookupModule(LookupBase):
|
||||||
"""
|
"""
|
||||||
Read the value of "key" from a shelve file
|
Read the value of "key" from a shelve file
|
||||||
"""
|
"""
|
||||||
d = shelve.open(shelve_filename)
|
d = shelve.open(to_bytes(shelve_filename))
|
||||||
res = d.get(key, None)
|
res = d.get(key, None)
|
||||||
d.close()
|
d.close()
|
||||||
return res
|
return res
|
||||||
|
@ -65,7 +66,7 @@ class LookupModule(LookupBase):
|
||||||
if res is None:
|
if res is None:
|
||||||
raise AnsibleError("Key %s not found in shelve file %s" % (key, file))
|
raise AnsibleError("Key %s not found in shelve file %s" % (key, file))
|
||||||
# Convert the value read to string
|
# Convert the value read to string
|
||||||
ret.append(str(res))
|
ret.append(to_unicode(res))
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
raise AnsibleError("Could not locate shelve file in lookup: %s" % file)
|
raise AnsibleError("Could not locate shelve file in lookup: %s" % file)
|
||||||
|
|
|
@ -43,7 +43,7 @@ class LookupModule(LookupBase):
|
||||||
lookupfile = self.find_file_in_search_path(variables, 'templates', term)
|
lookupfile = self.find_file_in_search_path(variables, 'templates', term)
|
||||||
display.vvvv("File lookup using %s as file" % lookupfile)
|
display.vvvv("File lookup using %s as file" % lookupfile)
|
||||||
if lookupfile:
|
if lookupfile:
|
||||||
with open(lookupfile, 'r') as f:
|
with open(to_bytes(lookupfile, errors='strict'), 'r') as f:
|
||||||
template_data = to_unicode(f.read())
|
template_data = to_unicode(f.read())
|
||||||
|
|
||||||
# set jinja2 internal search path for includes
|
# set jinja2 internal search path for includes
|
||||||
|
|
|
@ -20,29 +20,48 @@ __metaclass__ = type
|
||||||
import os
|
import os
|
||||||
from errno import EEXIST
|
from errno import EEXIST
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.utils.unicode import to_bytes, to_str
|
from ansible.utils.unicode import to_bytes, to_str, to_unicode
|
||||||
|
from ansible.compat.six import PY2
|
||||||
|
|
||||||
__all__ = ['unfrackpath', 'makedirs_safe']
|
__all__ = ['unfrackpath', 'makedirs_safe']
|
||||||
|
|
||||||
def unfrackpath(path):
|
def unfrackpath(path):
|
||||||
'''
|
'''
|
||||||
returns a path that is free of symlinks, environment
|
Returns a path that is free of symlinks, environment
|
||||||
variables, relative path traversals and symbols (~)
|
variables, relative path traversals and symbols (~)
|
||||||
example:
|
|
||||||
|
:arg path: A byte or text string representing a path to be canonicalized
|
||||||
|
:raises UnicodeDecodeError: If the canonicalized version of the path
|
||||||
|
contains non-utf8 byte sequences.
|
||||||
|
:rtype: A text string (unicode on pyyhon2, str on python3).
|
||||||
|
:returns: An absolute path with symlinks, environment variables, and tilde
|
||||||
|
expanded. Note that this does not check whether a path exists.
|
||||||
|
|
||||||
|
example::
|
||||||
'$HOME/../../var/mail' becomes '/var/spool/mail'
|
'$HOME/../../var/mail' becomes '/var/spool/mail'
|
||||||
'''
|
'''
|
||||||
return os.path.normpath(os.path.realpath(os.path.expanduser(os.path.expandvars(to_bytes(path, errors='strict')))))
|
canonical_path = os.path.normpath(os.path.realpath(os.path.expanduser(os.path.expandvars(to_bytes(path, errors='strict')))))
|
||||||
|
if PY2:
|
||||||
|
return to_unicode(canonical_path, errors='strict')
|
||||||
|
return to_unicode(canonical_path, errors='surrogateescape')
|
||||||
|
|
||||||
def makedirs_safe(path, mode=None):
|
def makedirs_safe(path, mode=None):
|
||||||
'''Safe way to create dirs in muliprocess/thread environments'''
|
'''Safe way to create dirs in muliprocess/thread environments.
|
||||||
|
|
||||||
|
:arg path: A byte or text string representing a directory to be created
|
||||||
|
:kwarg mode: If given, the mode to set the directory to
|
||||||
|
:raises AnsibleError: If the directory cannot be created and does not already exists.
|
||||||
|
:raises UnicodeDecodeError: if the path is not decodable in the utf-8 encoding.
|
||||||
|
'''
|
||||||
|
|
||||||
rpath = unfrackpath(path)
|
rpath = unfrackpath(path)
|
||||||
if not os.path.exists(rpath):
|
b_rpath = to_bytes(rpath)
|
||||||
|
if not os.path.exists(b_rpath):
|
||||||
try:
|
try:
|
||||||
if mode:
|
if mode:
|
||||||
os.makedirs(rpath, mode)
|
os.makedirs(b_rpath, mode)
|
||||||
else:
|
else:
|
||||||
os.makedirs(rpath)
|
os.makedirs(b_rpath)
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
if e.errno != EEXIST:
|
if e.errno != EEXIST:
|
||||||
raise AnsibleError("Unable to create local directories(%s): %s" % (rpath, to_str(e)))
|
raise AnsibleError("Unable to create local directories(%s): %s" % (to_str(rpath), to_str(e)))
|
||||||
|
|
Loading…
Reference in a new issue