mirror of
https://github.com/ansible-collections/community.general.git
synced 2024-09-14 20:13:21 +02:00
* Removing irrelevant s3 call Fix exception handling Make s3_sync pep8 and remove from legacy file
This commit is contained in:
parent
3f62dfda92
commit
d63af51f90
2 changed files with 44 additions and 38 deletions
|
@ -191,6 +191,7 @@ import datetime
|
|||
from dateutil import tz
|
||||
import hashlib
|
||||
import fnmatch
|
||||
import traceback
|
||||
|
||||
# import module snippets
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
@ -207,6 +208,7 @@ try:
|
|||
except ImportError:
|
||||
HAS_BOTO3 = False
|
||||
|
||||
|
||||
def boto_exception(err):
|
||||
'''generic error message handler'''
|
||||
if hasattr(err, 'error_message'):
|
||||
|
@ -242,6 +244,8 @@ def boto_exception(err):
|
|||
# along with calculate_multipart_etag. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
DEFAULT_CHUNK_SIZE = 5 * 1024 * 1024
|
||||
|
||||
|
||||
def calculate_multipart_etag(source_path, chunk_size=DEFAULT_CHUNK_SIZE):
|
||||
|
||||
"""
|
||||
|
@ -307,13 +311,14 @@ def gather_files(fileroot, include=None, exclude=None):
|
|||
'fullpath': fullpath,
|
||||
'chopped_path': chopped_path,
|
||||
'modified_epoch': f_modified_epoch,
|
||||
'bytes': f_size
|
||||
'bytes': f_size,
|
||||
})
|
||||
# dirpath = path *to* the directory
|
||||
# dirnames = subdirs *in* our directory
|
||||
# filenames
|
||||
return ret
|
||||
|
||||
|
||||
def calculate_s3_path(filelist, key_prefix=''):
|
||||
ret = []
|
||||
for fileentry in filelist:
|
||||
|
@ -323,6 +328,7 @@ def calculate_s3_path(filelist, key_prefix=''):
|
|||
ret.append(retentry)
|
||||
return ret
|
||||
|
||||
|
||||
def calculate_local_etag(filelist, key_prefix=''):
|
||||
'''Really, "calculate md5", but since AWS uses their own format, we'll just call
|
||||
it a "local etag". TODO optimization: only calculate if remote key exists.'''
|
||||
|
@ -334,6 +340,7 @@ def calculate_local_etag(filelist, key_prefix=''):
|
|||
ret.append(retentry)
|
||||
return ret
|
||||
|
||||
|
||||
def determine_mimetypes(filelist, override_map):
|
||||
ret = []
|
||||
for fileentry in filelist:
|
||||
|
@ -357,6 +364,7 @@ def determine_mimetypes(filelist, override_map):
|
|||
|
||||
return ret
|
||||
|
||||
|
||||
def head_s3(s3, bucket, s3keys):
|
||||
retkeys = []
|
||||
for entry in s3keys:
|
||||
|
@ -377,6 +385,7 @@ def head_s3(s3, bucket, s3keys):
|
|||
retkeys.append(retentry)
|
||||
return retkeys
|
||||
|
||||
|
||||
def filter_list(s3, bucket, s3filelist, strategy):
|
||||
keeplist = list(s3filelist)
|
||||
|
||||
|
@ -429,6 +438,7 @@ def filter_list(s3, bucket, s3filelist, strategy):
|
|||
# prune 'please skip' entries, if any.
|
||||
return [x for x in keeplist if not x.get('skip_flag')]
|
||||
|
||||
|
||||
def upload_files(s3, bucket, filelist, params):
|
||||
ret = []
|
||||
for entry in filelist:
|
||||
|
@ -437,6 +447,7 @@ def upload_files(s3, bucket, filelist, params):
|
|||
}
|
||||
if params.get('permission'):
|
||||
args['ACL'] = params['permission']
|
||||
# if this fails exception is caught in main()
|
||||
s3.upload_file(entry['fullpath'], bucket, entry['s3_path'], ExtraArgs=args, Callback=None, Config=None)
|
||||
ret.append(entry)
|
||||
return ret
|
||||
|
@ -450,8 +461,8 @@ def main():
|
|||
bucket=dict(required=True),
|
||||
key_prefix=dict(required=False, default=''),
|
||||
file_root=dict(required=True, type='path'),
|
||||
permission = dict(required=False, choices=['private', 'public-read', 'public-read-write', 'authenticated-read', 'aws-exec-read', 'bucket-owner-read',
|
||||
'bucket-owner-full-control']),
|
||||
permission=dict(required=False, choices=['private', 'public-read', 'public-read-write', 'authenticated-read',
|
||||
'aws-exec-read', 'bucket-owner-read', 'bucket-owner-full-control']),
|
||||
retries=dict(required=False),
|
||||
mime_map=dict(required=False, type='dict'),
|
||||
exclude=dict(required=False, default=".*"),
|
||||
|
@ -469,13 +480,10 @@ def main():
|
|||
result = {}
|
||||
mode = module.params['mode']
|
||||
|
||||
|
||||
try:
|
||||
region, ec2_url, aws_connect_kwargs = ansible.module_utils.ec2.get_aws_connection_info(module, boto3=True)
|
||||
if not region:
|
||||
module.fail_json(msg="Region must be specified")
|
||||
s3 = ansible.module_utils.ec2.boto3_conn(module, conn_type='client', resource='s3', region=region, endpoint=ec2_url, **aws_connect_kwargs)
|
||||
s3.list_buckets()
|
||||
except botocore.exceptions.NoCredentialsError as e:
|
||||
module.fail_json(msg=str(e))
|
||||
|
||||
if mode == 'push':
|
||||
try:
|
||||
|
@ -490,10 +498,9 @@ def main():
|
|||
if result.get('uploads') and len(result.get('uploads')):
|
||||
result['changed'] = True
|
||||
# result.update(filelist=actionable_filelist)
|
||||
except Exception as err:
|
||||
except botocore.exceptions.ClientError as err:
|
||||
error_msg = boto_exception(err)
|
||||
import traceback # traces get swallowed by Ansible.
|
||||
module.fail_json(msg=error_msg, traceback=traceback.format_exc().splitlines())
|
||||
module.fail_json(msg=error_msg, exception=traceback.format_exc(), **camel_dict_to_snake_dict(err.response))
|
||||
|
||||
module.exit_json(**result)
|
||||
|
||||
|
|
|
@ -213,7 +213,6 @@ lib/ansible/modules/cloud/amazon/s3.py
|
|||
lib/ansible/modules/cloud/amazon/s3_bucket.py
|
||||
lib/ansible/modules/cloud/amazon/s3_lifecycle.py
|
||||
lib/ansible/modules/cloud/amazon/s3_logging.py
|
||||
lib/ansible/modules/cloud/amazon/s3_sync.py
|
||||
lib/ansible/modules/cloud/amazon/s3_website.py
|
||||
lib/ansible/modules/cloud/amazon/sns_topic.py
|
||||
lib/ansible/modules/cloud/amazon/sts_assume_role.py
|
||||
|
|
Loading…
Reference in a new issue