From 0d3e015dd105d32395995c3e583ee8e9f8fb18f1 Mon Sep 17 00:00:00 2001 From: Aleksey Zhukov Date: Thu, 7 May 2015 22:53:10 +0300 Subject: [PATCH] Update DigitalOcean dynamic inventory to API v2 --- plugins/inventory/digital_ocean.py | 299 +++++++---------------------- 1 file changed, 74 insertions(+), 225 deletions(-) diff --git a/plugins/inventory/digital_ocean.py b/plugins/inventory/digital_ocean.py index 1c3eccd21e..29c4856efb 100755 --- a/plugins/inventory/digital_ocean.py +++ b/plugins/inventory/digital_ocean.py @@ -68,10 +68,7 @@ When run against a specific host, this script returns the following variables: usage: digital_ocean.py [-h] [--list] [--host HOST] [--all] [--droplets] [--regions] [--images] [--sizes] [--ssh-keys] [--domains] [--pretty] - [--cache-path CACHE_PATH] - [--cache-max_age CACHE_MAX_AGE] - [--refresh-cache] [--client-id CLIENT_ID] - [--api-key API_KEY] + [--api-token API_TOKEN] Produce an Ansible Inventory file based on DigitalOcean credentials @@ -89,16 +86,8 @@ optional arguments: --ssh-keys List SSH keys as JSON --domains List Domains as JSON --pretty, -p Pretty-print results - --cache-path CACHE_PATH - Path to the cache files (default: .) - --cache-max_age CACHE_MAX_AGE - Maximum age of the cached items (default: 0) - --refresh-cache Force refresh of cache by making API requests to - DigitalOcean (default: False - use cache files) - --client-id CLIENT_ID, -c CLIENT_ID - DigitalOcean Client ID - --api-key API_KEY, -a API_KEY - DigitalOcean API Key + --api-token API_TOKEN, -a API_TOKEN + DigitalOcean API Token ``` ''' @@ -157,11 +146,6 @@ class DigitalOceanInventory(object): # DigitalOceanInventory data self.data = {} # All DigitalOcean data self.inventory = {} # Ansible Inventory - self.index = {} # Various indices of Droplet metadata - - # Define defaults - self.cache_path = '.' - self.cache_max_age = 0 # Read settings, environment variables, and CLI arguments self.read_settings() @@ -169,49 +153,40 @@ class DigitalOceanInventory(object): self.read_cli_args() # Verify credentials were set - if not hasattr(self, 'client_id') or not hasattr(self, 'api_key'): - print '''Could not find values for DigitalOcean client_id and api_key. -They must be specified via either ini file, command line argument (--client-id and --api-key), -or environment variables (DO_CLIENT_ID and DO_API_KEY)''' + if not hasattr(self, 'api_token'): + print '''Could not find values for DigitalOcean api_token. +They must be specified via either ini file, command line argument (--api-token), +or environment variables (DO_API_TOKEN)''' sys.exit(-1) # env command, show DigitalOcean credentials if self.args.env: - print "DO_CLIENT_ID=%s DO_API_KEY=%s" % (self.client_id, self.api_key) + print "DO_API_TOKEN=%s" % self.api_token sys.exit(0) - # Manage cache - self.cache_filename = self.cache_path + "/ansible-digital_ocean.cache" - self.cache_refreshed = False - - if not self.args.force_cache and self.args.refresh_cache or not self.is_cache_valid(): - self.load_all_data_from_digital_ocean() - else: - self.load_from_cache() - if len(self.data) == 0: - if self.args.force_cache: - print '''Cache is empty and --force-cache was specified''' - sys.exit(-1) - self.load_all_data_from_digital_ocean() - else: - # We always get fresh droplets for --list, --host, --all, and --droplets - # unless --force-cache is specified - if not self.args.force_cache and ( - self.args.list or self.args.host or self.args.all or self.args.droplets): - self.load_droplets_from_digital_ocean() + self.manager = DoManager(None, self.api_token, api_version=2) # Pick the json_data to print based on the CLI command - if self.args.droplets: json_data = { 'droplets': self.data['droplets'] } - elif self.args.regions: json_data = { 'regions': self.data['regions'] } - elif self.args.images: json_data = { 'images': self.data['images'] } - elif self.args.sizes: json_data = { 'sizes': self.data['sizes'] } - elif self.args.ssh_keys: json_data = { 'ssh_keys': self.data['ssh_keys'] } - elif self.args.domains: json_data = { 'domains': self.data['domains'] } - elif self.args.all: json_data = self.data - - elif self.args.host: json_data = self.load_droplet_variables_for_host() + if self.args.droplets: + json_data = self.load_from_digital_ocean('droplets') + elif self.args.regions: + json_data = self.load_from_digital_ocean('regions') + elif self.args.images: + json_data = self.load_from_digital_ocean('images') + elif self.args.sizes: + json_data = self.load_from_digital_ocean('sizes') + elif self.args.ssh_keys: + json_data = self.load_from_digital_ocean('ssh_keys') + elif self.args.domains: + json_data = self.load_from_digital_ocean('domains') + elif self.args.all: + json_data = self.load_from_digital_ocean() + elif self.args.host: + json_data = self.load_droplet_variables_for_host() else: # '--list' this is last to make it default - json_data = self.inventory + self.data = self.load_from_digital_ocean('droplets') + self.build_inventory() + json_data = self.inventory if self.args.pretty: print json.dumps(json_data, sort_keys=True, indent=2) @@ -230,10 +205,8 @@ or environment variables (DO_CLIENT_ID and DO_API_KEY)''' config.read(os.path.dirname(os.path.realpath(__file__)) + '/digital_ocean.ini') # Credentials - if config.has_option('digital_ocean', 'client_id'): - self.client_id = config.get('digital_ocean', 'client_id') - if config.has_option('digital_ocean', 'api_key'): - self.api_key = config.get('digital_ocean', 'api_key') + if config.has_option('digital_ocean', 'api_token'): + self.api_token = config.get('digital_ocean', 'api_token') # Cache related if config.has_option('digital_ocean', 'cache_path'): @@ -245,8 +218,10 @@ or environment variables (DO_CLIENT_ID and DO_API_KEY)''' def read_environment(self): ''' Reads the settings from environment variables ''' # Setup credentials - if os.getenv("DO_CLIENT_ID"): self.client_id = os.getenv("DO_CLIENT_ID") - if os.getenv("DO_API_KEY"): self.api_key = os.getenv("DO_API_KEY") + if os.getenv("DO_API_TOKEN"): + self.api_token = os.getenv("DO_API_TOKEN") + if os.getenv("DO_API_KEY"): + self.api_token = os.getenv("DO_API_KEY") def read_cli_args(self): @@ -266,73 +241,42 @@ or environment variables (DO_CLIENT_ID and DO_API_KEY)''' parser.add_argument('--pretty','-p', action='store_true', help='Pretty-print results') - parser.add_argument('--cache-path', action='store', help='Path to the cache files (default: .)') - parser.add_argument('--cache-max_age', action='store', help='Maximum age of the cached items (default: 0)') - parser.add_argument('--force-cache', action='store_true', default=False, help='Only use data from the cache') - parser.add_argument('--refresh-cache','-r', action='store_true', default=False, help='Force refresh of cache by making API requests to DigitalOcean (default: False - use cache files)') - - parser.add_argument('--env','-e', action='store_true', help='Display DO_CLIENT_ID and DO_API_KEY') - parser.add_argument('--client-id','-c', action='store', help='DigitalOcean Client ID') - parser.add_argument('--api-key','-a', action='store', help='DigitalOcean API Key') + parser.add_argument('--env','-e', action='store_true', help='Display DO_API_TOKEN') + parser.add_argument('--api-token','-a', action='store', help='DigitalOcean API Token') self.args = parser.parse_args() - if self.args.client_id: self.client_id = self.args.client_id - if self.args.api_key: self.api_key = self.args.api_key - if self.args.cache_path: self.cache_path = self.args.cache_path - if self.args.cache_max_age: self.cache_max_age = self.args.cache_max_age + if self.args.api_token: + self.api_token = self.args.api_token # Make --list default if none of the other commands are specified - if (not self.args.droplets and not self.args.regions and not self.args.images and - not self.args.sizes and not self.args.ssh_keys and not self.args.domains and - not self.args.all and not self.args.host): - self.args.list = True + if (not self.args.droplets and not self.args.regions and + not self.args.images and not self.args.sizes and + not self.args.ssh_keys and not self.args.domains and + not self.args.all and not self.args.host): + self.args.list = True ########################################################################### # Data Management ########################################################################### - def load_all_data_from_digital_ocean(self): - ''' Use dopy to get all the information from DigitalOcean and save data in cache files ''' - manager = DoManager(self.client_id, self.api_key) - - self.data = {} - self.data['droplets'] = self.sanitize_list(manager.all_active_droplets()) - self.data['regions'] = self.sanitize_list(manager.all_regions()) - self.data['images'] = self.sanitize_list(manager.all_images(filter=None)) - self.data['sizes'] = self.sanitize_list(manager.sizes()) - self.data['ssh_keys'] = self.sanitize_list(manager.all_ssh_keys()) - self.data['domains'] = self.sanitize_list(manager.all_domains()) - - self.index = {} - self.index['region_to_name'] = self.build_index(self.data['regions'], 'id', 'name') - self.index['size_to_name'] = self.build_index(self.data['sizes'], 'id', 'name') - self.index['image_to_name'] = self.build_index(self.data['images'], 'id', 'name') - self.index['image_to_distro'] = self.build_index(self.data['images'], 'id', 'distribution') - self.index['host_to_droplet'] = self.build_index(self.data['droplets'], 'ip_address', 'id', False) - - self.build_inventory() - - self.write_to_cache() - - - def load_droplets_from_digital_ocean(self): - ''' Use dopy to get droplet information from DigitalOcean and save data in cache files ''' - manager = DoManager(self.client_id, self.api_key) - self.data['droplets'] = self.sanitize_list(manager.all_active_droplets()) - self.index['host_to_droplet'] = self.build_index(self.data['droplets'], 'ip_address', 'id', False) - self.build_inventory() - self.write_to_cache() - - - def build_index(self, source_seq, key_from, key_to, use_slug=True): - dest_dict = {} - for item in source_seq: - name = (use_slug and item.has_key('slug')) and item['slug'] or item[key_to] - key = item[key_from] - dest_dict[key] = name - return dest_dict + def load_from_digital_ocean(self, resource=None): + '''Get JSON from DigitalOcean API''' + json_data = {} + if resource == 'droplets' or resource is None: + json_data['droplets'] = self.manager.all_active_droplets() + if resource == 'regions' or resource is None: + json_data['regions'] = self.manager.all_regions() + if resource == 'images' or resource is None: + json_data['images'] = self.manager.all_images(filter=None) + if resource == 'sizes' or resource is None: + json_data['sizes'] = self.manager.sizes() + if resource == 'ssh_keys' or resource is None: + json_data['ssh_keys'] = self.manager.all_ssh_keys() + if resource == 'domains' or resource is None: + json_data['domains'] = self.manager.all_domains() + return json_data def build_inventory(self): @@ -345,107 +289,27 @@ or environment variables (DO_CLIENT_ID and DO_API_KEY)''' self.inventory[droplet['id']] = [dest] self.push(self.inventory, droplet['name'], dest) - self.push(self.inventory, 'region_'+droplet['region_id'], dest) - self.push(self.inventory, 'image_' +droplet['image_id'], dest) - self.push(self.inventory, 'size_' +droplet['size_id'], dest) - self.push(self.inventory, 'status_'+droplet['status'], dest) + self.push(self.inventory, 'region_' + droplet['region']['slug'], dest) + self.push(self.inventory, 'image_' + str(droplet['image']['id']), dest) + self.push(self.inventory, 'size_' + droplet['size']['slug'], dest) - region_name = self.index['region_to_name'].get(droplet['region_id']) - if region_name: - self.push(self.inventory, 'region_'+region_name, dest) + image_slug = droplet['image']['slug'] + if image_slug: + self.push(self.inventory, 'image_' + self.to_safe(image_slug), dest) + else: + image_name = droplet['image']['name'] + if image_name: + self.push(self.inventory, 'image_' + self.to_safe(image_name), dest) - size_name = self.index['size_to_name'].get(droplet['size_id']) - if size_name: - self.push(self.inventory, 'size_'+size_name, dest) - - image_name = self.index['image_to_name'].get(droplet['image_id']) - if image_name: - self.push(self.inventory, 'image_'+image_name, dest) - - distro_name = self.index['image_to_distro'].get(droplet['image_id']) - if distro_name: - self.push(self.inventory, 'distro_'+distro_name, dest) + self.push(self.inventory, 'distro_' + self.to_safe(droplet['image']['distribution']), dest) + self.push(self.inventory, 'status_' + droplet['status'], dest) def load_droplet_variables_for_host(self): '''Generate a JSON response to a --host call''' - host = self.to_safe(str(self.args.host)) + host = int(self.args.host) - if not host in self.index['host_to_droplet']: - # try updating cache - if not self.args.force_cache: - self.load_all_data_from_digital_ocean() - if not host in self.index['host_to_droplet']: - # host might not exist anymore - return {} - - droplet = None - if self.cache_refreshed: - for drop in self.data['droplets']: - if drop['ip_address'] == host: - droplet = self.sanitize_dict(drop) - break - else: - # Cache wasn't refreshed this run, so hit DigitalOcean API - manager = DoManager(self.client_id, self.api_key) - droplet_id = self.index['host_to_droplet'][host] - droplet = self.sanitize_dict(manager.show_droplet(droplet_id)) - - if not droplet: - return {} - - # Put all the information in a 'do_' namespace - info = {} - for k, v in droplet.items(): - info['do_'+k] = v - - # Generate user-friendly variables (i.e. not the ID's) - if droplet.has_key('region_id'): - info['do_region'] = self.index['region_to_name'].get(droplet['region_id']) - if droplet.has_key('size_id'): - info['do_size'] = self.index['size_to_name'].get(droplet['size_id']) - if droplet.has_key('image_id'): - info['do_image'] = self.index['image_to_name'].get(droplet['image_id']) - info['do_distro'] = self.index['image_to_distro'].get(droplet['image_id']) - - return info - - - - ########################################################################### - # Cache Management - ########################################################################### - - def is_cache_valid(self): - ''' Determines if the cache files have expired, or if it is still valid ''' - if os.path.isfile(self.cache_filename): - mod_time = os.path.getmtime(self.cache_filename) - current_time = time() - if (mod_time + self.cache_max_age) > current_time: - return True - return False - - - def load_from_cache(self): - ''' Reads the data from the cache file and assigns it to member variables as Python Objects''' - cache = open(self.cache_filename, 'r') - json_data = cache.read() - cache.close() - data = json.loads(json_data) - - self.data = data['data'] - self.inventory = data['inventory'] - self.index = data['index'] - - - def write_to_cache(self): - ''' Writes data in JSON format to a file ''' - data = { 'data': self.data, 'index': self.index, 'inventory': self.inventory } - json_data = json.dumps(data, sort_keys=True, indent=2) - - cache = open(self.cache_filename, 'w') - cache.write(json_data) - cache.close() + return self.manager.show_droplet(host) @@ -456,7 +320,7 @@ or environment variables (DO_CLIENT_ID and DO_API_KEY)''' def push(self, my_dict, key, element): ''' Pushed an element onto an array that may not have been defined in the dict ''' if key in my_dict: - my_dict[key].append(element); + my_dict[key].append(element) else: my_dict[key] = [element] @@ -466,21 +330,6 @@ or environment variables (DO_CLIENT_ID and DO_API_KEY)''' return re.sub("[^A-Za-z0-9\-\.]", "_", word) - def sanitize_dict(self, d): - new_dict = {} - for k, v in d.items(): - if v != None: - new_dict[self.to_safe(str(k))] = self.to_safe(str(v)) - return new_dict - - - def sanitize_list(self, seq): - new_seq = [] - for d in seq: - new_seq.append(self.sanitize_dict(d)) - return new_seq - - ########################################################################### # Run the script