1
0
Fork 0
mirror of https://github.com/ansible-collections/community.general.git synced 2024-09-14 20:13:21 +02:00

Merge pull request #3415 from neomantra/digital_ocean_inv

digital_ocean.py enhancements
This commit is contained in:
Michael DeHaan 2013-07-11 05:36:38 -07:00
commit 2e8a78b3e0

View file

@ -14,6 +14,12 @@ location is /tmp/ansible-digital_ocean.cache).
The --pretty (-p) option pretty-prints the output for better human readability. The --pretty (-p) option pretty-prints the output for better human readability.
----
Although the cache stores all the information received from DigitalOcean,
the cache is not used for current droplet information (in --list, --host,
--all, and --droplets). This is so that accurate droplet information is always
found. You can force this script to use the cache with --force-cache.
---- ----
Configuration is read from `digital_ocean.ini`, then from environment variables, Configuration is read from `digital_ocean.ini`, then from environment variables,
then and command-line arguments. then and command-line arguments.
@ -178,12 +184,21 @@ or environment variables (DO_CLIENT_ID and DO_API_KEY)'''
self.cache_filename = self.cache_path + "/ansible-digital_ocean.cache" self.cache_filename = self.cache_path + "/ansible-digital_ocean.cache"
self.cache_refreshed = False self.cache_refreshed = False
if self.args.refresh_cache or not self.is_cache_valid(): if not self.args.force_cache and self.args.refresh_cache or not self.is_cache_valid():
self.load_all_data_from_digital_ocean() self.load_all_data_from_digital_ocean()
else: else:
self.load_from_cache() self.load_from_cache()
if len(self.data) == 0: if len(self.data) == 0:
if self.args.force_cache:
print '''Cache is empty and --force-cache was specified'''
sys.exit(-1)
self.load_all_data_from_digital_ocean() self.load_all_data_from_digital_ocean()
else:
# We always get fresh droplets for --list, --host, --all, and --droplets
# unless --force-cache is specified
if not self.args.force_cache and (
self.args.list or self.args.host or self.args.all or self.args.droplets):
self.load_droplets_from_digital_ocean()
# Pick the json_data to print based on the CLI command # Pick the json_data to print based on the CLI command
if self.args.droplets: json_data = { 'droplets': self.data['droplets'] } if self.args.droplets: json_data = { 'droplets': self.data['droplets'] }
@ -199,9 +214,9 @@ or environment variables (DO_CLIENT_ID and DO_API_KEY)'''
json_data = self.inventory json_data = self.inventory
if self.args.pretty: if self.args.pretty:
print json.dumps( json_data, sort_keys=True, indent=2 ) print json.dumps(json_data, sort_keys=True, indent=2)
else: else:
print json.dumps( json_data ) print json.dumps(json_data)
# That's all she wrote... # That's all she wrote...
@ -242,7 +257,7 @@ or environment variables (DO_CLIENT_ID and DO_API_KEY)'''
parser.add_argument('--host', action='store', help='Get all Ansible inventory variables about a specific Droplet') parser.add_argument('--host', action='store', help='Get all Ansible inventory variables about a specific Droplet')
parser.add_argument('--all', action='store_true', help='List all DigitalOcean information as JSON') parser.add_argument('--all', action='store_true', help='List all DigitalOcean information as JSON')
parser.add_argument('--droplets', action='store_true', help='List Droplets as JSON') parser.add_argument('--droplets','-d', action='store_true', help='List Droplets as JSON')
parser.add_argument('--regions', action='store_true', help='List Regions as JSON') parser.add_argument('--regions', action='store_true', help='List Regions as JSON')
parser.add_argument('--images', action='store_true', help='List Images as JSON') parser.add_argument('--images', action='store_true', help='List Images as JSON')
parser.add_argument('--sizes', action='store_true', help='List Sizes as JSON') parser.add_argument('--sizes', action='store_true', help='List Sizes as JSON')
@ -253,7 +268,8 @@ or environment variables (DO_CLIENT_ID and DO_API_KEY)'''
parser.add_argument('--cache-path', action='store', help='Path to the cache files (default: .)') parser.add_argument('--cache-path', action='store', help='Path to the cache files (default: .)')
parser.add_argument('--cache-max_age', action='store', help='Maximum age of the cached items (default: 0)') parser.add_argument('--cache-max_age', action='store', help='Maximum age of the cached items (default: 0)')
parser.add_argument('--refresh-cache', action='store_true', default=False, help='Force refresh of cache by making API requests to DigitalOcean (default: False - use cache files)') parser.add_argument('--force-cache', action='store_true', default=False, help='Only use data from the cache')
parser.add_argument('--refresh-cache','-r', action='store_true', default=False, help='Force refresh of cache by making API requests to DigitalOcean (default: False - use cache files)')
parser.add_argument('--env','-e', action='store_true', help='Display DO_CLIENT_ID and DO_API_KEY') parser.add_argument('--env','-e', action='store_true', help='Display DO_CLIENT_ID and DO_API_KEY')
parser.add_argument('--client-id','-c', action='store', help='DigitalOcean Client ID') parser.add_argument('--client-id','-c', action='store', help='DigitalOcean Client ID')
@ -266,6 +282,11 @@ or environment variables (DO_CLIENT_ID and DO_API_KEY)'''
if self.args.cache_path: self.cache_path = self.args.cache_path if self.args.cache_path: self.cache_path = self.args.cache_path
if self.args.cache_max_age: self.cache_max_age = self.args.cache_max_age if self.args.cache_max_age: self.cache_max_age = self.args.cache_max_age
# Make --list default if none of the other commands are specified
if (not self.args.droplets and not self.args.regions and not self.args.images and
not self.args.sizes and not self.args.ssh_keys and not self.args.domains and
not self.args.all and not self.args.host):
self.args.list = True
########################################################################### ###########################################################################
@ -274,33 +295,43 @@ or environment variables (DO_CLIENT_ID and DO_API_KEY)'''
def load_all_data_from_digital_ocean(self): def load_all_data_from_digital_ocean(self):
''' Use dopy to get all the information from DigitalOcean and save data in cache files ''' ''' Use dopy to get all the information from DigitalOcean and save data in cache files '''
manager = DoManager( self.client_id, self.api_key ) manager = DoManager(self.client_id, self.api_key)
self.data = {} self.data = {}
self.data['droplets'] = self.sanitize_list( manager.all_active_droplets() ) self.data['droplets'] = self.sanitize_list(manager.all_active_droplets())
self.data['regions'] = self.sanitize_list( manager.all_regions() ) self.data['regions'] = self.sanitize_list(manager.all_regions())
self.data['images'] = self.sanitize_list( manager.all_images() ) self.data['images'] = self.sanitize_list(manager.all_images(filter=None))
self.data['sizes'] = self.sanitize_list( manager.sizes() ) self.data['sizes'] = self.sanitize_list(manager.sizes())
self.data['ssh_keys'] = self.sanitize_list( manager.all_ssh_keys() ) self.data['ssh_keys'] = self.sanitize_list(manager.all_ssh_keys())
self.data['domains'] = self.sanitize_list( manager.all_domains() ) self.data['domains'] = self.sanitize_list(manager.all_domains())
self.index = {} self.index = {}
self.index['region_to_name'] = self.build_index( self.data['regions'], 'id', 'name' ) self.index['region_to_name'] = self.build_index(self.data['regions'], 'id', 'name')
self.index['size_to_name'] = self.build_index( self.data['sizes'], 'id', 'name' ) self.index['size_to_name'] = self.build_index(self.data['sizes'], 'id', 'name')
self.index['image_to_name'] = self.build_index( self.data['images'], 'id', 'name' ) self.index['image_to_name'] = self.build_index(self.data['images'], 'id', 'name')
self.index['image_to_distro'] = self.build_index( self.data['images'], 'id', 'distribution' ) self.index['image_to_distro'] = self.build_index(self.data['images'], 'id', 'distribution')
self.index['host_to_droplet'] = self.build_index( self.data['droplets'], 'ip_address', 'id', False ) self.index['host_to_droplet'] = self.build_index(self.data['droplets'], 'ip_address', 'id', False)
self.build_inventory() self.build_inventory()
self.write_to_cache() self.write_to_cache()
def load_droplets_from_digital_ocean(self):
''' Use dopy to get droplet information from DigitalOcean and save data in cache files '''
manager = DoManager(self.client_id, self.api_key)
self.data['droplets'] = self.sanitize_list(manager.all_active_droplets())
self.index['host_to_droplet'] = self.build_index(self.data['droplets'], 'ip_address', 'id', False)
self.build_inventory()
self.write_to_cache()
def build_index(self, source_seq, key_from, key_to, use_slug=True): def build_index(self, source_seq, key_from, key_to, use_slug=True):
dest_dict = {} dest_dict = {}
for item in source_seq: for item in source_seq:
name = (use_slug and item.has_key('slug')) and item['slug'] or item[key_to] name = (use_slug and item.has_key('slug')) and item['slug'] or item[key_to]
dest_dict[item[key_from]] = name key = item[key_from]
dest_dict[key] = name
return dest_dict return dest_dict
@ -313,23 +344,23 @@ or environment variables (DO_CLIENT_ID and DO_API_KEY)'''
dest = droplet['ip_address'] dest = droplet['ip_address']
self.inventory[droplet['id']] = [dest] self.inventory[droplet['id']] = [dest]
self.push( self.inventory, droplet['name'], dest ) self.push(self.inventory, droplet['name'], dest)
self.push( self.inventory, 'region_'+droplet['region_id'], dest ) self.push(self.inventory, 'region_'+droplet['region_id'], dest)
self.push( self.inventory, 'image_' +droplet['image_id'], dest ) self.push(self.inventory, 'image_' +droplet['image_id'], dest)
self.push( self.inventory, 'size_' +droplet['size_id'], dest ) self.push(self.inventory, 'size_' +droplet['size_id'], dest)
self.push( self.inventory, 'status_'+droplet['status'], dest ) self.push(self.inventory, 'status_'+droplet['status'], dest)
region_name = self.index['region_to_name'][droplet['region_id']] region_name = self.index['region_to_name'][droplet['region_id']]
self.push( self.inventory, 'region_'+region_name, dest ) self.push(self.inventory, 'region_'+region_name, dest)
size_name = self.index['size_to_name'][droplet['size_id']] size_name = self.index['size_to_name'][droplet['size_id']]
self.push( self.inventory, 'size_'+size_name, dest ) self.push(self.inventory, 'size_'+size_name, dest)
image_name = self.index['image_to_name'][droplet['image_id']] image_name = self.index['image_to_name'][droplet['image_id']]
self.push( self.inventory, 'image_'+image_name, dest ) self.push(self.inventory, 'image_'+image_name, dest)
distro_name = self.index['image_to_distro'][droplet['image_id']] distro_name = self.index['image_to_distro'][droplet['image_id']]
self.push( self.inventory, 'distro_'+distro_name, dest ) self.push(self.inventory, 'distro_'+distro_name, dest)
def load_droplet_variables_for_host(self): def load_droplet_variables_for_host(self):
@ -338,6 +369,7 @@ or environment variables (DO_CLIENT_ID and DO_API_KEY)'''
if not host in self.index['host_to_droplet']: if not host in self.index['host_to_droplet']:
# try updating cache # try updating cache
if not self.args.force_cache:
self.load_all_data_from_digital_ocean() self.load_all_data_from_digital_ocean()
if not host in self.index['host_to_droplet']: if not host in self.index['host_to_droplet']:
# host might not exist anymore # host might not exist anymore
@ -347,13 +379,13 @@ or environment variables (DO_CLIENT_ID and DO_API_KEY)'''
if self.cache_refreshed: if self.cache_refreshed:
for drop in self.data['droplets']: for drop in self.data['droplets']:
if drop['ip_address'] == host: if drop['ip_address'] == host:
droplet = drop droplet = self.sanitize_dict(drop)
break break
else: else:
# Cache wasn't refreshed this run, so hit DigitalOcean API # Cache wasn't refreshed this run, so hit DigitalOcean API
manager = DoManager( self.client_id, self.api_key ) manager = DoManager(self.client_id, self.api_key)
droplet_id = self.index['host_to_droplet'][host] droplet_id = self.index['host_to_droplet'][host]
droplet = self.sanitize_dict( manager.show_droplet( droplet_id ) ) droplet = self.sanitize_dict(manager.show_droplet(droplet_id))
if not droplet: if not droplet:
return {} return {}
@ -395,7 +427,7 @@ or environment variables (DO_CLIENT_ID and DO_API_KEY)'''
cache = open(self.cache_filename, 'r') cache = open(self.cache_filename, 'r')
json_data = cache.read() json_data = cache.read()
cache.close() cache.close()
data = json.loads( json_data ) data = json.loads(json_data)
self.data = data['data'] self.data = data['data']
self.inventory = data['inventory'] self.inventory = data['inventory']
@ -405,7 +437,7 @@ or environment variables (DO_CLIENT_ID and DO_API_KEY)'''
def write_to_cache(self): def write_to_cache(self):
''' Writes data in JSON format to a file ''' ''' Writes data in JSON format to a file '''
data = { 'data': self.data, 'index': self.index, 'inventory': self.inventory } data = { 'data': self.data, 'index': self.index, 'inventory': self.inventory }
json_data = json.dumps( data, sort_keys=True, indent=2 ) json_data = json.dumps(data, sort_keys=True, indent=2)
cache = open(self.cache_filename, 'w') cache = open(self.cache_filename, 'w')
cache.write(json_data) cache.write(json_data)
@ -441,7 +473,7 @@ or environment variables (DO_CLIENT_ID and DO_API_KEY)'''
def sanitize_list(self, seq): def sanitize_list(self, seq):
new_seq = [] new_seq = []
for d in seq: for d in seq:
new_seq.append( self.sanitize_dict(d) ) new_seq.append(self.sanitize_dict(d))
return new_seq return new_seq