def load_droplets_from_digital_ocean(self): ''' Use dopy to get droplet information from DigitalOcean and save data in cache files ''' manager = DoManager(None, self.access_token, api_version=2) self.data['droplets'] = manager.all_active_droplets() self.index['host_to_droplet'] = self.build_index(self.data['droplets'], 'ip_address', 'id', False) self.build_inventory() self.write_to_cache()
def load_droplet_variables_for_host(self): '''Generate a JSON response to a --host call''' host = self.to_safe(str(self.args.host)) if not host in self.index['host_to_droplet']: # try updating cache if not self.args.force_cache: self.load_all_data_from_digital_ocean() if not host in self.index['host_to_droplet']: # host might not exist anymore return {} droplet = None if self.cache_refreshed: for drop in self.data['droplets']: if drop['ip_address'] == host: droplet = self.sanitize_dict(drop) break else: # Cache wasn't refreshed this run, so hit DigitalOcean API manager = DoManager(None, self.api_token, api_version=2) droplet_id = self.index['host_to_droplet'][host] droplet = self.sanitize_dict(manager.show_droplet(droplet_id)) if not droplet: return {} # Put all the information in a 'do_' namespace info = {} for k, v in droplet.items(): info['do_'+k] = v return info
def execute(self): api_key = os.getenv('DIGITALOCEAN_TOKEN') client = DoManager(None, api_key, 2) regions = [region["slug"] for region in client.all_regions()] return regions
def load_droplets_from_digital_ocean(self): ''' Use dopy to get droplet information from DigitalOcean and save data in cache files ''' manager = DoManager(self.client_id, self.api_key) self.data['droplets'] = self.sanitize_list(manager.all_active_droplets()) self.index['host_to_droplet'] = self.build_index(self.data['droplets'], 'ip_address', 'id', False) self.build_inventory() self.write_to_cache()
def __init__(self): super(DigitalOceanProvider, self).__init__(self.NAME, DOGeneral, DOExt) if (not self.env.initialized) or (not self.env.general) or (not self.env.general.api_token): g = self._input_general_env_conf() else: g = self.env.general self.manager = DoManager(None, g.api_token, api_version=2)
def execute(self): selected_region = self.region api_key = os.getenv('DIGITALOCEAN_TOKEN') client = DoManager(None, api_key, 2) if not self.instance_families: instances = [ item["slug"] for item in client.sizes() if selected_region in item["regions"] ] else: selected_family = self.instance_families instances = [ instance for instance in [ item["slug"].split("-", 1) for item in client.sizes() if selected_region in item["regions"] ] if len(instance) > 1 if instance[0] in selected_family ] instances.sort() instances = ['-'.join([s[0], s[1]]) for s in instances] return instances
def load_droplets_from_digital_ocean(self): ''' Use dopy to get droplet information from DigitalOcean and save data in cache files ''' manager = DoManager(None, self.access_token, api_version=2) self.data['droplets'] = manager.all_active_droplets() self.index['host_to_droplet'] = self.build_index( self.data['droplets'], 'ip_address', 'id', False) self.build_inventory() self.write_to_cache()
def load_all_data_from_digital_ocean(self): ''' Use dopy to get all the information from DigitalOcean ''' manager = DoManager(self.client_id, self.api_key) self.data = {} self.data['regions'] = self.sanitize_list(manager.all_regions()) self.data['images'] = self.sanitize_list(manager.all_images(filter=None)) self.data['sizes'] = self.sanitize_list(manager.sizes())
def load_all_data_from_digital_ocean(self): ''' Use dopy to get all the information from DigitalOcean and save data in cache files ''' manager = DoManager(self.client_id, self.api_key) self.data = {} self.data['droplets'] = self.sanitize_list( manager.all_active_droplets()) self.data['regions'] = self.sanitize_list(manager.all_regions()) self.data['images'] = self.sanitize_list( manager.all_images(filter=None)) self.data['sizes'] = self.sanitize_list(manager.sizes()) self.data['ssh_keys'] = self.sanitize_list(manager.all_ssh_keys()) self.data['domains'] = self.sanitize_list(manager.all_domains()) self.index = {} self.index['region_to_name'] = self.build_index( self.data['regions'], 'id', 'name') self.index['size_to_name'] = self.build_index(self.data['sizes'], 'id', 'name') self.index['image_to_name'] = self.build_index(self.data['images'], 'id', 'name') self.index['image_to_distro'] = self.build_index( self.data['images'], 'id', 'distribution') self.index['host_to_droplet'] = self.build_index( self.data['droplets'], 'ip_address', 'id', False) self.build_inventory() self.write_to_cache()
def __init__(self, settings_file='digital_ocean.ini'): super(DigitalOceanBase, self).__init__() self.config = Settings(prefix='DO', section='digital_ocean', filename=settings_file) self.data_params = {} self.do = DoManager(self.config.get('client_id'), self.config.get('api_key'))
def load_droplet_variables_for_host(self): '''Generate a JSON reponse to a --host call''' host = self.to_safe(str(self.args.host)) if not host in self.index['host_to_droplet']: # try updating cache if not self.args.force_cache: self.load_all_data_from_digital_ocean() if not host in self.index['host_to_droplet']: # host might not exist anymore return {} droplet = None if self.cache_refreshed: for drop in self.data['droplets']: if drop['ip_address'] == host: droplet = self.sanitize_dict(drop) break else: # Cache wasn't refreshed this run, so hit DigitalOcean API manager = DoManager(self.client_id, self.api_key) droplet_id = self.index['host_to_droplet'][host] droplet = self.sanitize_dict(manager.show_droplet(droplet_id)) if not droplet: return {} # Put all the information in a 'do_' namespace info = {} for k, v in droplet.items(): info['do_' + k] = v # Generate user-friendly variables (i.e. not the ID's) if droplet.has_key('region_id'): info['do_region'] = self.index['region_to_name'].get( droplet['region_id']) if droplet.has_key('size_id'): info['do_size'] = self.index['size_to_name'].get( droplet['size_id']) if droplet.has_key('image_id'): info['do_image'] = self.index['image_to_name'].get( droplet['image_id']) info['do_distro'] = self.index['image_to_distro'].get( droplet['image_id']) return info
class DigitalOceanBase(Inventory): address_field = 'ip_address' def __init__(self, settings_file='digital_ocean.ini'): super(DigitalOceanBase, self).__init__() self.config = Settings(prefix='DO', section='digital_ocean', filename=settings_file) self.data_params = {} self.do = DoManager(self.config.get('client_id'), self.config.get('api_key')) @file_cache('/tmp/ansible-droplets-cache.json') def get_data(self): if not self.data: self.data = self.do.all_active_droplets() return self.data @file_cache('/tmp/ansible-data-params-cache.json', timeout=3600) def get_data_params(self): if not self.data_params: def _convert(data): ret = {} for datum in data: # Ensure keys are always strings because JSON # strings are always strings ret[str(datum['id'])] = datum return ret self.data_params['regions'] = _convert(self.do.all_regions()) self.data_params['images'] = _convert(self.do.all_images()) self.data_params['ssh_keys'] = _convert(self.do.all_ssh_keys()) self.data_params['sizes'] = _convert(self.do.sizes()) return self.data_params def get_data_param(self, host, part, find_key, display_template): try: datum = self.get_data_params()[part][str(host[find_key])] except KeyError: pass else: return (display_template.format(**datum), self.get_address(host))
def load_droplet_variables_for_host(self): '''Generate a JSON response to a --host call''' host = self.to_safe(str(self.args.host)) if not host in self.index['host_to_droplet']: # try updating cache if not self.args.force_cache: self.load_all_data_from_digital_ocean() if not host in self.index['host_to_droplet']: # host might not exist anymore return {} droplet = None if self.cache_refreshed: for drop in self.data['droplets']: if drop['ip_address'] == host: droplet = self.sanitize_dict(drop) break else: # Cache wasn't refreshed this run, so hit DigitalOcean API manager = DoManager(self.client_id, self.api_key) droplet_id = self.index['host_to_droplet'][host] droplet = self.sanitize_dict(manager.show_droplet(droplet_id)) if not droplet: return {} # Put all the information in a 'do_' namespace info = {} for k, v in droplet.items(): info['do_'+k] = v # Generate user-friendly variables (i.e. not the ID's) if droplet.has_key('region_id'): info['do_region'] = self.index['region_to_name'].get(droplet['region_id']) if droplet.has_key('size_id'): info['do_size'] = self.index['size_to_name'].get(droplet['size_id']) if droplet.has_key('image_id'): info['do_image'] = self.index['image_to_name'].get(droplet['image_id']) info['do_distro'] = self.index['image_to_distro'].get(droplet['image_id']) return info
def load_all_data_from_digital_ocean(self): ''' Use dopy to get all the information from DigitalOcean and save data in cache files ''' manager = DoManager(None, self.access_token, api_version=2) self.data = {} self.data['droplets'] = manager.all_active_droplets() self.data['regions'] = manager.all_regions() self.data['images'] = manager.all_images(filter=None) self.data['sizes'] = manager.sizes() self.data['ssh_keys'] = manager.all_ssh_keys() self.data['domains'] = manager.all_domains() self.index = {} self.index['region_to_name'] = self.build_index(self.data['regions'], 'name', 'slug') self.index['size_to_name'] = self.build_index(self.data['sizes'], 'memory', 'slug') self.index['image_to_name'] = self.build_index(self.data['images'], 'id', 'name') self.index['image_to_distro'] = self.build_index(self.data['images'], 'id', 'distribution') self.index['host_to_droplet'] = self.build_index(self.data['droplets'], 'ip_address', 'id', False) self.build_inventory() self.write_to_cache()
def setup(cls, client_id, api_key): cls.manager = DoManager(client_id, api_key)
def __init__(self): ''' Main execution path ''' # DigitalOceanInventory data self.data = {} # All DigitalOcean data self.inventory = {} # Ansible Inventory # Define defaults self.cache_path = '.' self.cache_max_age = 0 self.use_private_network = False self.group_variables = {} # Read settings, environment variables, and CLI arguments self.read_settings() self.read_environment() self.read_cli_args() # Verify credentials were set if not hasattr(self, 'api_token'): print('''Could not find values for DigitalOcean api_token. They must be specified via either ini file, command line argument (--api-token), or environment variables (DO_API_TOKEN)''') sys.exit(-1) # env command, show DigitalOcean credentials if self.args.env: print("DO_API_TOKEN=%s" % self.api_token) sys.exit(0) # Manage cache self.cache_filename = self.cache_path + "/ansible-digital_ocean.cache" self.cache_refreshed = False if self.is_cache_valid: self.load_from_cache() if len(self.data) == 0: if self.args.force_cache: print('''Cache is empty and --force-cache was specified''') sys.exit(-1) self.manager = DoManager(None, self.api_token, api_version=2) # Pick the json_data to print based on the CLI command if self.args.droplets: self.load_from_digital_ocean('droplets') json_data = {'droplets': self.data['droplets']} elif self.args.regions: self.load_from_digital_ocean('regions') json_data = {'regions': self.data['regions']} elif self.args.images: self.load_from_digital_ocean('images') json_data = {'images': self.data['images']} elif self.args.sizes: self.load_from_digital_ocean('sizes') json_data = {'sizes': self.data['sizes']} elif self.args.ssh_keys: self.load_from_digital_ocean('ssh_keys') json_data = {'ssh_keys': self.data['ssh_keys']} elif self.args.domains: self.load_from_digital_ocean('domains') json_data = {'domains': self.data['domains']} elif self.args.all: self.load_from_digital_ocean() json_data = self.data elif self.args.host: json_data = self.load_droplet_variables_for_host() else: # '--list' this is last to make it default self.load_from_digital_ocean('droplets') self.build_inventory() json_data = self.inventory if self.cache_refreshed: self.write_to_cache() if self.args.pretty: print(json.dumps(json_data, sort_keys=True, indent=2)) else: print(json.dumps(json_data))
class DigitalOceanInventory(object): ########################################################################### # Main execution path ########################################################################### def __init__(self): ''' Main execution path ''' # DigitalOceanInventory data self.data = {} # All DigitalOcean data self.inventory = {} # Ansible Inventory # Define defaults self.cache_path = '.' self.cache_max_age = 0 self.use_private_network = False self.group_variables = {} # Read settings, environment variables, and CLI arguments self.read_settings() self.read_environment() self.read_cli_args() # Verify credentials were set if not hasattr(self, 'api_token'): print('''Could not find values for DigitalOcean api_token. They must be specified via either ini file, command line argument (--api-token), or environment variables (DO_API_TOKEN)''') sys.exit(-1) # env command, show DigitalOcean credentials if self.args.env: print("DO_API_TOKEN=%s" % self.api_token) sys.exit(0) # Manage cache self.cache_filename = self.cache_path + "/ansible-digital_ocean.cache" self.cache_refreshed = False if self.is_cache_valid: self.load_from_cache() if len(self.data) == 0: if self.args.force_cache: print('''Cache is empty and --force-cache was specified''') sys.exit(-1) self.manager = DoManager(None, self.api_token, api_version=2) # Pick the json_data to print based on the CLI command if self.args.droplets: self.load_from_digital_ocean('droplets') json_data = {'droplets': self.data['droplets']} elif self.args.regions: self.load_from_digital_ocean('regions') json_data = {'regions': self.data['regions']} elif self.args.images: self.load_from_digital_ocean('images') json_data = {'images': self.data['images']} elif self.args.sizes: self.load_from_digital_ocean('sizes') json_data = {'sizes': self.data['sizes']} elif self.args.ssh_keys: self.load_from_digital_ocean('ssh_keys') json_data = {'ssh_keys': self.data['ssh_keys']} elif self.args.domains: self.load_from_digital_ocean('domains') json_data = {'domains': self.data['domains']} elif self.args.all: self.load_from_digital_ocean() json_data = self.data elif self.args.host: json_data = self.load_droplet_variables_for_host() else: # '--list' this is last to make it default self.load_from_digital_ocean('droplets') self.build_inventory() json_data = self.inventory if self.cache_refreshed: self.write_to_cache() if self.args.pretty: print(json.dumps(json_data, sort_keys=True, indent=2)) else: print(json.dumps(json_data)) # That's all she wrote... ########################################################################### # Script configuration ########################################################################### def read_settings(self): ''' Reads the settings from the digital_ocean.ini file ''' config = ConfigParser.SafeConfigParser() config.read(os.path.dirname(os.path.realpath(__file__)) + '/digital_ocean.ini') # Credentials if config.has_option('digital_ocean', 'api_token'): self.api_token = config.get('digital_ocean', 'api_token') # Cache related if config.has_option('digital_ocean', 'cache_path'): self.cache_path = config.get('digital_ocean', 'cache_path') if config.has_option('digital_ocean', 'cache_max_age'): self.cache_max_age = config.getint('digital_ocean', 'cache_max_age') # Private IP Address if config.has_option('digital_ocean', 'use_private_network'): self.use_private_network = config.get('digital_ocean', 'use_private_network') # Group variables if config.has_option('digital_ocean', 'group_variables'): self.group_variables = ast.literal_eval(config.get('digital_ocean', 'group_variables')) def read_environment(self): ''' Reads the settings from environment variables ''' # Setup credentials if os.getenv("DO_API_TOKEN"): self.api_token = os.getenv("DO_API_TOKEN") if os.getenv("DO_API_KEY"): self.api_token = os.getenv("DO_API_KEY") def read_cli_args(self): ''' Command line argument processing ''' parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on DigitalOcean credentials') parser.add_argument('--list', action='store_true', help='List all active Droplets as Ansible inventory (default: True)') parser.add_argument('--host', action='store', help='Get all Ansible inventory variables about a specific Droplet') parser.add_argument('--all', action='store_true', help='List all DigitalOcean information as JSON') parser.add_argument('--droplets','-d', action='store_true', help='List Droplets as JSON') parser.add_argument('--regions', action='store_true', help='List Regions as JSON') parser.add_argument('--images', action='store_true', help='List Images as JSON') parser.add_argument('--sizes', action='store_true', help='List Sizes as JSON') parser.add_argument('--ssh-keys', action='store_true', help='List SSH keys as JSON') parser.add_argument('--domains', action='store_true',help='List Domains as JSON') parser.add_argument('--pretty','-p', action='store_true', help='Pretty-print results') parser.add_argument('--cache-path', action='store', help='Path to the cache files (default: .)') parser.add_argument('--cache-max_age', action='store', help='Maximum age of the cached items (default: 0)') parser.add_argument('--force-cache', action='store_true', default=False, help='Only use data from the cache') parser.add_argument('--refresh-cache','-r', action='store_true', default=False, help='Force refresh of cache by making API requests to DigitalOcean (default: False - use cache files)') parser.add_argument('--env','-e', action='store_true', help='Display DO_API_TOKEN') parser.add_argument('--api-token','-a', action='store', help='DigitalOcean API Token') self.args = parser.parse_args() if self.args.api_token: self.api_token = self.args.api_token # Make --list default if none of the other commands are specified if (not self.args.droplets and not self.args.regions and not self.args.images and not self.args.sizes and not self.args.ssh_keys and not self.args.domains and not self.args.all and not self.args.host): self.args.list = True ########################################################################### # Data Management ########################################################################### def load_from_digital_ocean(self, resource=None): '''Get JSON from DigitalOcean API''' if self.args.force_cache: return # We always get fresh droplets if self.is_cache_valid() and not (resource=='droplets' or resource is None): return if self.args.refresh_cache: resource=None if resource == 'droplets' or resource is None: self.data['droplets'] = self.manager.all_active_droplets() self.cache_refreshed = True if resource == 'regions' or resource is None: self.data['regions'] = self.manager.all_regions() self.cache_refreshed = True if resource == 'images' or resource is None: self.data['images'] = self.manager.all_images(filter=None) self.cache_refreshed = True if resource == 'sizes' or resource is None: self.data['sizes'] = self.manager.sizes() self.cache_refreshed = True if resource == 'ssh_keys' or resource is None: self.data['ssh_keys'] = self.manager.all_ssh_keys() self.cache_refreshed = True if resource == 'domains' or resource is None: self.data['domains'] = self.manager.all_domains() self.cache_refreshed = True def build_inventory(self): '''Build Ansible inventory of droplets''' self.inventory = {} # add all droplets by id and name for droplet in self.data['droplets']: #when using private_networking, the API reports the private one in "ip_address". if 'private_networking' in droplet['features'] and not self.use_private_network: for net in droplet['networks']['v4']: if net['type']=='public': dest=net['ip_address'] else: continue else: dest = droplet['ip_address'] dest = { 'hosts': [ dest ], 'vars': self.group_variables } self.inventory[droplet['id']] = dest self.inventory[droplet['name']] = dest self.inventory['region_' + droplet['region']['slug']] = dest self.inventory['image_' + str(droplet['image']['id'])] = dest self.inventory['size_' + droplet['size']['slug']] = dest image_slug = droplet['image']['slug'] if image_slug: self.inventory['image_' + self.to_safe(image_slug)] = dest else: image_name = droplet['image']['name'] if image_name: self.inventory['image_' + self.to_safe(image_name)] = dest self.inventory['distro_' + self.to_safe(droplet['image']['distribution'])] = dest self.inventory['status_' + droplet['status']] = dest def load_droplet_variables_for_host(self): '''Generate a JSON response to a --host call''' host = int(self.args.host) droplet = self.manager.show_droplet(host) # Put all the information in a 'do_' namespace info = {} for k, v in droplet.items(): info['do_'+k] = v return {'droplet': info} ########################################################################### # Cache Management ########################################################################### def is_cache_valid(self): ''' Determines if the cache files have expired, or if it is still valid ''' if os.path.isfile(self.cache_filename): mod_time = os.path.getmtime(self.cache_filename) current_time = time() if (mod_time + self.cache_max_age) > current_time: return True return False def load_from_cache(self): ''' Reads the data from the cache file and assigns it to member variables as Python Objects''' try: cache = open(self.cache_filename, 'r') json_data = cache.read() cache.close() data = json.loads(json_data) except IOError: data = {'data': {}, 'inventory': {}} self.data = data['data'] self.inventory = data['inventory'] def write_to_cache(self): ''' Writes data in JSON format to a file ''' data = { 'data': self.data, 'inventory': self.inventory } json_data = json.dumps(data, sort_keys=True, indent=2) cache = open(self.cache_filename, 'w') cache.write(json_data) cache.close() ########################################################################### # Utilities ########################################################################### def push(self, my_dict, key, element): ''' Pushed an element onto an array that may not have been defined in the dict ''' if key in my_dict: my_dict[key].append(element) else: my_dict[key] = [element] def to_safe(self, word): ''' Converts 'bad' characters in a string to underscores so they can be used as Ansible groups ''' return re.sub("[^A-Za-z0-9\-\.]", "_", word)
from dopy.manager import DoManager import os API_VERSION = 2 SIZE = '512mb' REGION = 'nyc2' PURPOSE = 'wordpress' VERSION = '1' IMAGE = 'ubuntu-14-04-x64' USER_DATA = '''#cloud-config users: - name: ansible ssh-authorized-keys: - ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDOd/iTQUd6Hb/KTpDjjPJotOWHfTH71U5L7x9Y4y9bo7Zvtp/m1WZyR55Ut6uBfGHscD5WRNv6VFsRIHRRjiHP+pGkB4piSUNuOduEOL/FCzmytQLmg7mYZZRGHLYXIoFJV/kdXmjexXSySxJSSp5X5EcH/pLWcKhRK9HiX4IGOBZfNwxaL/VODxU989jNXuKPnF6XfuNVf9p7JYkc4zaDy4752pPCWU2oTfq6y5Ll0vqoSpb62gCUf94CYU5eQddIeZEutTi2UiuqbsA7sEvsZpp/iXXvkaCAWMNRY6VMy5AUavDpNk4tZ/ITcieCWYdfDPCBDcfXwgANlVorHGaR gdunn@san sudo: ['ALL=(ALL) NOPASSWD:ALL'] groups: sudo shell: /bin/bash runcmd: - sed -i -e '/^PermitRootLogin/s/^.*$/PermitRootLogin no/' /etc/ssh/sshd_config - sed -i -e '$aAllowUsers ansible' /etc/ssh/sshd_config - restart ssh ''' do = DoManager(None, os.environ['DO_API_TOKEN'], API_VERSION) results = do.new_droplet('{0}-{1}-{2}'.format(REGION, PURPOSE, VERSION), SIZE, IMAGE, REGION, user_data=USER_DATA) for i in results.keys(): print('{0}: {1}'.format(i, results[i]))
def setup(cls, api_token): cls.manager = DoManager(None, api_token, api_version=2) DomainRecord.manager = cls.manager
""" dependencias: sudo pip install dopy pyopenssl ndg-httpsclient pyasn1 """ import os from dopy.manager import DoManager import urllib3.contrib.pyopenssl urllib3.contrib.pyopenssl.inject_into_urllib3() cliend_id = os.getenv("DO_CLIENT_ID") api_key = os.getenv("DO_API_KEY") do = DoManager(cliend_id, api_key) keys = do.all_ssh_keys() print "Nome da chave ssh\tid" for key in keys: print "%s\t%d" % (key["name"], key["id"]) print "Nome da imagem\tid" imgs = do.all_images() for img in imgs: if img["slug"] == "ubuntu-14-04-x64": print "%s\t%d" % (img["name"], img["id"]) print "Nome da regiao\tid" regions = do.all_regions() for region in regions: if region["slug"] == "nyc2": print "%s\t%d" % (region["slug"], region["id"])
logger = logging.getLogger("game_events") logging.basicConfig(level=logging.INFO) # Configures logging. client = discord.Client(loop=loop) # Defines the Discord client. config = yaml.load( open("config.yaml", "r") ) # Loads the config. do_client = DoManager( None, config["DIGITALOCEAN_API_KEY"], api_version=2 ) # Defines the DigitalOcean client. class BrandedEmbed(discord.Embed): def __init__(self, user, **kwargs): super().__init__(**kwargs) self.set_author( name=user.name, icon_url=user.avatar_url ) self.set_footer( text=f"{client.user.name} - Created by JakeMakesStuff#0001" )
def __init__(self, credentials): super().__init__(credentials) self.name = 'DigitalOcean' self.do_session = DoManager(None, self.access_key, api_version=2)
class DigitalOceanInventory(object): ########################################################################### # Main execution path ########################################################################### def __init__(self): """ Main execution path """ # DigitalOceanInventory data self.data = {} # All DigitalOcean data self.inventory = {} # Ansible Inventory # Define defaults self.cache_path = "." self.cache_max_age = 0 # Read settings, environment variables, and CLI arguments self.read_settings() self.read_environment() self.read_cli_args() # Verify credentials were set if not hasattr(self, "api_token"): print """Could not find values for DigitalOcean api_token. They must be specified via either ini file, command line argument (--api-token), or environment variables (DO_API_TOKEN)""" sys.exit(-1) # env command, show DigitalOcean credentials if self.args.env: print "DO_API_TOKEN=%s" % self.api_token sys.exit(0) # Manage cache self.cache_filename = self.cache_path + "/ansible-digital_ocean.cache" self.cache_refreshed = False if self.is_cache_valid: self.load_from_cache() if len(self.data) == 0: if self.args.force_cache: print """Cache is empty and --force-cache was specified""" sys.exit(-1) self.manager = DoManager(None, self.api_token, api_version=2) # Pick the json_data to print based on the CLI command if self.args.droplets: self.load_from_digital_ocean("droplets") json_data = {"droplets": self.data["droplets"]} elif self.args.regions: self.load_from_digital_ocean("regions") json_data = {"regions": self.data["regions"]} elif self.args.images: self.load_from_digital_ocean("images") json_data = {"images": self.data["images"]} elif self.args.sizes: self.load_from_digital_ocean("sizes") json_data = {"sizes": self.data["sizes"]} elif self.args.ssh_keys: self.load_from_digital_ocean("ssh_keys") json_data = {"ssh_keys": self.data["ssh_keys"]} elif self.args.domains: self.load_from_digital_ocean("domains") json_data = {"domains": self.data["domains"]} elif self.args.all: self.load_from_digital_ocean() json_data = self.data elif self.args.host: json_data = self.load_droplet_variables_for_host() else: # '--list' this is last to make it default self.load_from_digital_ocean("droplets") self.build_inventory() json_data = self.inventory if self.cache_refreshed: self.write_to_cache() if self.args.pretty: print json.dumps(json_data, sort_keys=True, indent=2) else: print json.dumps(json_data) # That's all she wrote... ########################################################################### # Script configuration ########################################################################### def read_settings(self): """ Reads the settings from the digital_ocean.ini file """ config = ConfigParser.SafeConfigParser() config.read(os.path.dirname(os.path.realpath(__file__)) + "/digital_ocean.ini") # Credentials if config.has_option("digital_ocean", "api_token"): self.api_token = config.get("digital_ocean", "api_token") # Cache related if config.has_option("digital_ocean", "cache_path"): self.cache_path = config.get("digital_ocean", "cache_path") if config.has_option("digital_ocean", "cache_max_age"): self.cache_max_age = config.getint("digital_ocean", "cache_max_age") def read_environment(self): """ Reads the settings from environment variables """ # Setup credentials if os.getenv("DO_API_TOKEN"): self.api_token = os.getenv("DO_API_TOKEN") if os.getenv("DO_API_KEY"): self.api_token = os.getenv("DO_API_KEY") def read_cli_args(self): """ Command line argument processing """ parser = argparse.ArgumentParser( description="Produce an Ansible Inventory file based on DigitalOcean credentials" ) parser.add_argument( "--list", action="store_true", help="List all active Droplets as Ansible inventory (default: True)" ) parser.add_argument( "--host", action="store", help="Get all Ansible inventory variables about a specific Droplet" ) parser.add_argument("--all", action="store_true", help="List all DigitalOcean information as JSON") parser.add_argument("--droplets", "-d", action="store_true", help="List Droplets as JSON") parser.add_argument("--regions", action="store_true", help="List Regions as JSON") parser.add_argument("--images", action="store_true", help="List Images as JSON") parser.add_argument("--sizes", action="store_true", help="List Sizes as JSON") parser.add_argument("--ssh-keys", action="store_true", help="List SSH keys as JSON") parser.add_argument("--domains", action="store_true", help="List Domains as JSON") parser.add_argument("--pretty", "-p", action="store_true", help="Pretty-print results") parser.add_argument("--cache-path", action="store", help="Path to the cache files (default: .)") parser.add_argument("--cache-max_age", action="store", help="Maximum age of the cached items (default: 0)") parser.add_argument("--force-cache", action="store_true", default=False, help="Only use data from the cache") parser.add_argument( "--refresh-cache", "-r", action="store_true", default=False, help="Force refresh of cache by making API requests to DigitalOcean (default: False - use cache files)", ) parser.add_argument("--env", "-e", action="store_true", help="Display DO_API_TOKEN") parser.add_argument("--api-token", "-a", action="store", help="DigitalOcean API Token") self.args = parser.parse_args() if self.args.api_token: self.api_token = self.args.api_token # Make --list default if none of the other commands are specified if ( not self.args.droplets and not self.args.regions and not self.args.images and not self.args.sizes and not self.args.ssh_keys and not self.args.domains and not self.args.all and not self.args.host ): self.args.list = True ########################################################################### # Data Management ########################################################################### def load_from_digital_ocean(self, resource=None): """Get JSON from DigitalOcean API""" if self.args.force_cache: return # We always get fresh droplets if self.is_cache_valid() and not (resource == "droplets" or resource is None): return if self.args.refresh_cache: resource = None if resource == "droplets" or resource is None: self.data["droplets"] = self.manager.all_active_droplets() self.cache_refreshed = True if resource == "regions" or resource is None: self.data["regions"] = self.manager.all_regions() self.cache_refreshed = True if resource == "images" or resource is None: self.data["images"] = self.manager.all_images(filter=None) self.cache_refreshed = True if resource == "sizes" or resource is None: self.data["sizes"] = self.manager.sizes() self.cache_refreshed = True if resource == "ssh_keys" or resource is None: self.data["ssh_keys"] = self.manager.all_ssh_keys() self.cache_refreshed = True if resource == "domains" or resource is None: self.data["domains"] = self.manager.all_domains() self.cache_refreshed = True def build_inventory(self): """Build Ansible inventory of droplets""" self.inventory = {} # add all droplets by id and name for droplet in self.data["droplets"]: dest = droplet["ip_address"] self.inventory[droplet["id"]] = [dest] self.push(self.inventory, droplet["name"], dest) self.push(self.inventory, "region_" + droplet["region"]["slug"], dest) self.push(self.inventory, "image_" + str(droplet["image"]["id"]), dest) self.push(self.inventory, "size_" + droplet["size"]["slug"], dest) image_slug = droplet["image"]["slug"] if image_slug: self.push(self.inventory, "image_" + self.to_safe(image_slug), dest) else: image_name = droplet["image"]["name"] if image_name: self.push(self.inventory, "image_" + self.to_safe(image_name), dest) self.push(self.inventory, "distro_" + self.to_safe(droplet["image"]["distribution"]), dest) self.push(self.inventory, "status_" + droplet["status"], dest) def load_droplet_variables_for_host(self): """Generate a JSON response to a --host call""" host = int(self.args.host) droplet = self.manager.show_droplet(host) # Put all the information in a 'do_' namespace info = {} for k, v in droplet.items(): info["do_" + k] = v return {"droplet": info} ########################################################################### # Cache Management ########################################################################### def is_cache_valid(self): """ Determines if the cache files have expired, or if it is still valid """ if os.path.isfile(self.cache_filename): mod_time = os.path.getmtime(self.cache_filename) current_time = time() if (mod_time + self.cache_max_age) > current_time: return True return False def load_from_cache(self): """ Reads the data from the cache file and assigns it to member variables as Python Objects""" try: cache = open(self.cache_filename, "r") json_data = cache.read() cache.close() data = json.loads(json_data) except IOError: data = {"data": {}, "inventory": {}} self.data = data["data"] self.inventory = data["inventory"] def write_to_cache(self): """ Writes data in JSON format to a file """ data = {"data": self.data, "inventory": self.inventory} json_data = json.dumps(data, sort_keys=True, indent=2) cache = open(self.cache_filename, "w") cache.write(json_data) cache.close() ########################################################################### # Utilities ########################################################################### def push(self, my_dict, key, element): """ Pushed an element onto an array that may not have been defined in the dict """ if key in my_dict: my_dict[key].append(element) else: my_dict[key] = [element] def to_safe(self, word): """ Converts 'bad' characters in a string to underscores so they can be used as Ansible groups """ return re.sub("[^A-Za-z0-9\-\.]", "_", word)
""" dependencias: sudo pip install dopy pyopenssl ndg-httpsclient pyasn1 """ import os from dopy.manager import DoManager import urllib3.contrib.pyopenssl urllib3.contrib.pyopenssl.inject_into_urllib3() cliend_id = os.getenv("DO_CLIENT_ID") api_key=os.getenv("DO_API_KEY") do = DoManager(cliend_id, api_key) keys = do.all_ssh_keys() print "Nome da chave ssh\tid" for key in keys: print "%s\t%d" % (key["name"], key["id"]) print "Nome da imagem\tid" imgs = do.all_images() for img in imgs: if img["slug"] == "ubuntu-14-04-x64": print "%s\t%d" % (img["name"], img["id"]) print "Nome da regiao\tid" regions = do.all_regions() for region in regions: if region["slug"] == "nyc2":
class DigitalOceanInventory(object): ########################################################################### # Main execution path ########################################################################### def __init__(self): ''' Main execution path ''' # DigitalOceanInventory data self.data = {} # All DigitalOcean data self.inventory = {} # Ansible Inventory # Define defaults self.cache_path = '.' self.cache_max_age = 0 self.use_private_network = False self.group_variables = {} # Read settings, environment variables, and CLI arguments self.read_settings() self.read_environment() self.read_cli_args() # Verify credentials were set if not hasattr(self, 'api_token'): sys.stderr.write( '''Could not find values for DigitalOcean api_token. They must be specified via either ini file, command line argument (--api-token), or environment variables (DO_API_TOKEN)\n''') sys.exit(-1) # env command, show DigitalOcean credentials if self.args.env: print("DO_API_TOKEN=%s" % self.api_token) sys.exit(0) # Manage cache self.cache_filename = self.cache_path + "/ansible-digital_ocean.cache" self.cache_refreshed = False if self.is_cache_valid(): self.load_from_cache() if len(self.data) == 0: if self.args.force_cache: sys.stderr.write( '''Cache is empty and --force-cache was specified\n''') sys.exit(-1) self.manager = DoManager(None, self.api_token, api_version=2) # Pick the json_data to print based on the CLI command if self.args.droplets: self.load_from_digital_ocean('droplets') json_data = {'droplets': self.data['droplets']} elif self.args.regions: self.load_from_digital_ocean('regions') json_data = {'regions': self.data['regions']} elif self.args.images: self.load_from_digital_ocean('images') json_data = {'images': self.data['images']} elif self.args.sizes: self.load_from_digital_ocean('sizes') json_data = {'sizes': self.data['sizes']} elif self.args.ssh_keys: self.load_from_digital_ocean('ssh_keys') json_data = {'ssh_keys': self.data['ssh_keys']} elif self.args.domains: self.load_from_digital_ocean('domains') json_data = {'domains': self.data['domains']} elif self.args.all: self.load_from_digital_ocean() json_data = self.data elif self.args.host: json_data = self.load_droplet_variables_for_host() else: # '--list' this is last to make it default self.load_from_digital_ocean('droplets') self.build_inventory() json_data = self.inventory if self.cache_refreshed: self.write_to_cache() if self.args.pretty: print(json.dumps(json_data, sort_keys=True, indent=2)) else: print(json.dumps(json_data)) # That's all she wrote... ########################################################################### # Script configuration ########################################################################### def read_settings(self): ''' Reads the settings from the digital_ocean.ini file ''' config = ConfigParser.SafeConfigParser() config.read( os.path.dirname(os.path.realpath(__file__)) + '/digital_ocean.ini') # Credentials if config.has_option('digital_ocean', 'api_token'): self.api_token = config.get('digital_ocean', 'api_token') # Cache related if config.has_option('digital_ocean', 'cache_path'): self.cache_path = config.get('digital_ocean', 'cache_path') if config.has_option('digital_ocean', 'cache_max_age'): self.cache_max_age = config.getint('digital_ocean', 'cache_max_age') # Private IP Address if config.has_option('digital_ocean', 'use_private_network'): self.use_private_network = config.getboolean( 'digital_ocean', 'use_private_network') # Group variables if config.has_option('digital_ocean', 'group_variables'): self.group_variables = ast.literal_eval( config.get('digital_ocean', 'group_variables')) def read_environment(self): ''' Reads the settings from environment variables ''' # Setup credentials if os.getenv("DO_API_TOKEN"): self.api_token = os.getenv("DO_API_TOKEN") if os.getenv("DO_API_KEY"): self.api_token = os.getenv("DO_API_KEY") def read_cli_args(self): ''' Command line argument processing ''' parser = argparse.ArgumentParser( description= 'Produce an Ansible Inventory file based on DigitalOcean credentials' ) parser.add_argument( '--list', action='store_true', help='List all active Droplets as Ansible inventory (default: True)' ) parser.add_argument( '--host', action='store', help='Get all Ansible inventory variables about a specific Droplet' ) parser.add_argument('--all', action='store_true', help='List all DigitalOcean information as JSON') parser.add_argument('--droplets', '-d', action='store_true', help='List Droplets as JSON') parser.add_argument('--regions', action='store_true', help='List Regions as JSON') parser.add_argument('--images', action='store_true', help='List Images as JSON') parser.add_argument('--sizes', action='store_true', help='List Sizes as JSON') parser.add_argument('--ssh-keys', action='store_true', help='List SSH keys as JSON') parser.add_argument('--domains', action='store_true', help='List Domains as JSON') parser.add_argument('--pretty', '-p', action='store_true', help='Pretty-print results') parser.add_argument('--cache-path', action='store', help='Path to the cache files (default: .)') parser.add_argument( '--cache-max_age', action='store', help='Maximum age of the cached items (default: 0)') parser.add_argument('--force-cache', action='store_true', default=False, help='Only use data from the cache') parser.add_argument( '--refresh-cache', '-r', action='store_true', default=False, help= 'Force refresh of cache by making API requests to DigitalOcean (default: False - use cache files)' ) parser.add_argument('--env', '-e', action='store_true', help='Display DO_API_TOKEN') parser.add_argument('--api-token', '-a', action='store', help='DigitalOcean API Token') self.args = parser.parse_args() if self.args.api_token: self.api_token = self.args.api_token # Make --list default if none of the other commands are specified if (not self.args.droplets and not self.args.regions and not self.args.images and not self.args.sizes and not self.args.ssh_keys and not self.args.domains and not self.args.all and not self.args.host): self.args.list = True ########################################################################### # Data Management ########################################################################### def load_from_digital_ocean(self, resource=None): '''Get JSON from DigitalOcean API''' if self.args.force_cache and os.path.isfile(self.cache_filename): return # We always get fresh droplets if self.is_cache_valid() and not (resource == 'droplets' or resource is None): return if self.args.refresh_cache: resource = None if resource == 'droplets' or resource is None: self.data['droplets'] = self.manager.all_active_droplets() self.cache_refreshed = True if resource == 'regions' or resource is None: self.data['regions'] = self.manager.all_regions() self.cache_refreshed = True if resource == 'images' or resource is None: self.data['images'] = self.manager.all_images(filter=None) self.cache_refreshed = True if resource == 'sizes' or resource is None: self.data['sizes'] = self.manager.sizes() self.cache_refreshed = True if resource == 'ssh_keys' or resource is None: self.data['ssh_keys'] = self.manager.all_ssh_keys() self.cache_refreshed = True if resource == 'domains' or resource is None: self.data['domains'] = self.manager.all_domains() self.cache_refreshed = True def build_inventory(self): '''Build Ansible inventory of droplets''' self.inventory = { 'all': { 'hosts': [], 'vars': self.group_variables }, '_meta': { 'hostvars': {} } } # add all droplets by id and name for droplet in self.data['droplets']: # when using private_networking, the API reports the private one in "ip_address". if 'private_networking' in droplet[ 'features'] and not self.use_private_network: for net in droplet['networks']['v4']: if net['type'] == 'public': dest = net['ip_address'] else: continue else: dest = droplet['ip_address'] self.inventory['all']['hosts'].append(dest) self.inventory[droplet['id']] = [dest] self.inventory[droplet['name']] = [dest] # groups that are always present for group in ('region_' + droplet['region']['slug'], 'image_' + str(droplet['image']['id']), 'size_' + droplet['size']['slug'], 'distro_' + self.to_safe(droplet['image']['distribution']), 'status_' + droplet['status']): if group not in self.inventory: self.inventory[group] = {'hosts': [], 'vars': {}} self.inventory[group]['hosts'].append(dest) # groups that are not always present for group in (droplet['image']['slug'], droplet['image']['name']): if group: image = 'image_' + self.to_safe(group) if image not in self.inventory: self.inventory[image] = {'hosts': [], 'vars': {}} self.inventory[image]['hosts'].append(dest) if droplet['tags']: for tag in droplet['tags']: if tag not in self.inventory: self.inventory[tag] = {'hosts': [], 'vars': {}} self.inventory[tag]['hosts'].append(dest) # hostvars info = self.do_namespace(droplet) self.inventory['_meta']['hostvars'][dest] = info def load_droplet_variables_for_host(self): '''Generate a JSON response to a --host call''' host = int(self.args.host) droplet = self.manager.show_droplet(host) info = self.do_namespace(droplet) return {'droplet': info} ########################################################################### # Cache Management ########################################################################### def is_cache_valid(self): ''' Determines if the cache files have expired, or if it is still valid ''' if os.path.isfile(self.cache_filename): mod_time = os.path.getmtime(self.cache_filename) current_time = time() if (mod_time + self.cache_max_age) > current_time: return True return False def load_from_cache(self): ''' Reads the data from the cache file and assigns it to member variables as Python Objects''' try: cache = open(self.cache_filename, 'r') json_data = cache.read() cache.close() data = json.loads(json_data) except IOError: data = {'data': {}, 'inventory': {}} self.data = data['data'] self.inventory = data['inventory'] def write_to_cache(self): ''' Writes data in JSON format to a file ''' data = {'data': self.data, 'inventory': self.inventory} json_data = json.dumps(data, sort_keys=True, indent=2) cache = open(self.cache_filename, 'w') cache.write(json_data) cache.close() ########################################################################### # Utilities ########################################################################### def push(self, my_dict, key, element): ''' Pushed an element onto an array that may not have been defined in the dict ''' if key in my_dict: my_dict[key].append(element) else: my_dict[key] = [element] def to_safe(self, word): ''' Converts 'bad' characters in a string to underscores so they can be used as Ansible groups ''' return re.sub("[^A-Za-z0-9\-\.]", "_", word) def do_namespace(self, data): ''' Returns a copy of the dictionary with all the keys put in a 'do_' namespace ''' info = {} for k, v in data.items(): info['do_' + k] = v return info
def setup_manager_hook(app): global manager manager = DoManager(None, token, api_version=2)
def setup(cls, api_token): cls.manager = DoManager(None, api_token, api_version=2)
# MAX_AGE in seconds of a VM MAX_AGE = get_config(config, 'do_clean', 'max_age', None, 10800, integer=True) # CHECK_FREQ in seconds of the DO VM status CHECK_FREQ = get_config(config, 'do_clean', 'check_freq', None, 300, integer=True) # DONT touch those Droplets (use droplet ids). ex. # DONT = [ 123456, 654321 ] RAW_DONT = get_config(config, 'do_clean', 'dont', None, '') try: DONT = [ int(val.strip()) for val in RAW_DONT.split(',') if val] except Exception as e: print "Invalid format for DONT - %s" % e sys.exit(1) if __name__ == '__main__': do = DoManager(client_id, api_key, api_version=api_version) while True: try: droplets = do.all_active_droplets() for droplet in droplets: if droplet.get('id') in DONT: # Don't kill meeeeee print 'Not killing %s (%s) - part of the DONT' % (droplet.get('name'), droplet.get('id')) continue created = datetime.strptime(droplet.get('created_at'), "%Y-%m-%dT%H:%M:%SZ") now = datetime.now() diff = now - created if diff.seconds > MAX_AGE: print 'gonna destroy droplet - %s (%s: %s)' % (droplet.get('name'), droplet.get('id'), droplet.get('ip_address')) do.destroy_droplet(droplet.get('id')) print 'destroyed droplet - %s (%s: %s)' % (droplet.get('name'), droplet.get('id'), droplet.get('ip_address'))
from os import getenv from dopy.manager import DoManager NAME = "serenata-update" def destroy_droplet(manager): droplet_id = None for droplet in manager.all_active_droplets(): if droplet["name"] == NAME: droplet_id = droplet["id"] break if not droplet_id: print("Droplet {} not found.".format(NAME)) return output = manager.destroy_droplet(droplet_id) print("Droplet {} ({}) deleted.".format(NAME, droplet_id)) return output if __name__ == "__main__": manager = DoManager(None, getenv("DO_API_TOKEN"), api_version=2) destroy_droplet(manager)
class DigitalOceanProvider(FactoryProvider): NAME = 'digital-ocean' DEFAULT_IMAGE_SLUG = "ubuntu-14-04-x64" # Ubuntu latest LTS 64bit DEFAULT_SIZE_SLUG = "1gb" DEFAULT_REGION_SLUG = "ams3" def __init__(self): super(DigitalOceanProvider, self).__init__(self.NAME, DOGeneral, DOExt) if (not self.env.initialized) or (not self.env.general) or (not self.env.general.api_token): g = self._input_general_env_conf() else: g = self.env.general self.manager = DoManager(None, g.api_token, api_version=2) def _input_general_env_conf(self): print ('\nThe DigitalOcean API (v2) token is not configured.') print ('If you don\'t have it please visit ' \ 'https://cloud.digitalocean.com/settings/applications and generate one.') api_token = io.input_value('api token') do_general = DOGeneral(api_token) self.env.set_general(do_general) return do_general def define_box(self): ip = None ext = DOExt() if io.input_yes_no('register an existing droplet'): droplet_id = io.input_value('droplet id') droplet_info = self.manager.show_droplet(droplet_id) ext.id = droplet_id name = droplet_info['name'] created = droplet_info['created_at'] print ('\nFound droplet \'{0}\' (created {1})'.format(name, created)) ip = io.xstr(droplet_info['ip_address']) print ('IP: %s' % ip) ext.image = droplet_info['image']['id'] print ('Image: %s' % ext.image) ext.size = droplet_info['size']['slug'] print ('Size: %s' % ext.size) ext.region = droplet_info['region']['slug'] print ('Region: %s\n' % ext.region) playbook = io.input_path('playbook path') hostname = self.fetch_box_hosts(playbook) user = io.input_value('remote user', self.active_user) else: playbook = io.input_path('playbook path') hostname = self.fetch_box_hosts(playbook) name = io.input_value('box name', self.suggest_name(hostname)) user = io.input_value('remote user', self.active_user) all_images = self.manager.all_images() print ('\nAvailable images: \n%s' % self._print_object_id_name(all_images)) default_image = next((img for img in all_images if img['slug'] and self.DEFAULT_IMAGE_SLUG in img['slug']), None) if default_image: image_desc = '{0} - {1} {2}'.format( default_image['id'], default_image['distribution'], default_image['name'] ) ext.image = io.input_value('image', default_image['id'], image_desc) else: ext.image = io.input_value('image') all_sizes = self.manager.sizes() sizes_slug = [o['slug'] for o in all_sizes] print ('\nAvailable sizes: \n%s' % '\n'.join(sizes_slug)) ext.size = io.input_choice('size', self.DEFAULT_SIZE_SLUG, choices=sizes_slug) all_regions = self.manager.all_regions() regions_slug = [o['slug'] for o in all_regions] print ('\nAvailable regions: \n%s' % '\n'.join(regions_slug)) ext.region = io.input_choice('region', self.DEFAULT_REGION_SLUG, choices=regions_slug) ext.keys = self._input_ssh_keys() return Box(name, playbook, hostname, ip, user, extra=ext) @staticmethod def _print_object_id_name(objs): def _dist_if_exists(prop): return prop['distribution'] + ' ' if 'distribution' in prop else '' return '\n'.join([str(o['id']) + ' -> ' + _dist_if_exists(o) + o['name'] for o in objs]) def redefine_box(self, previous_box): playbook = io.input_path('playbook path', previous_box.playbook) hostname = self.fetch_box_hosts(playbook) ip = previous_box.ip user = io.input_value('remote user', previous_box.remote_user) if not previous_box.extra.id: ext = DOExt() ext.id = previous_box.extra.id all_images = self.manager.all_images() print ('\nAvailable images: \n%s' % self._print_object_id_name(all_images)) prev_image = next((img for img in all_images if previous_box.extra.image == img['id']), None) if prev_image: prev_image_desc = '{0} - {1} {2}'.format( prev_image['id'], prev_image['distribution'], prev_image['name'] ) ext.image = io.input_value('image', prev_image['id'], prev_image_desc) else: ext.image = io.input_value('image') all_sizes = self.manager.sizes() sizes_slug = [o['slug'] for o in all_sizes] print ('\nAvailable sizes: \n%s' % '\n'.join(sizes_slug)) ext.size = io.input_value('size', previous_box.extra.size) all_regions = self.manager.all_regions() regions_slug = [o['slug'] for o in all_regions] print ('\nAvailable regions: \n%s' % '\n'.join(regions_slug)) ext.region = io.input_value('region', previous_box.extra.region) ext.keys = self._input_ssh_keys(previous_box.extra.keys) else: ext = previous_box.extra return Box(previous_box.name, playbook, hostname, ip, user, extra=ext) def add_box(self, box): if not box.ip: self.create(box) SimpleProvider.add_box(self, box) def create(self, box): ext = box.extra if not ext.id: if not ext.keys: print ('\nNo valid keys are defined.') print ('Please run `reconfigure {0}` to provide them.'.format(box.name)) return False print ('\nCreating instance \'{0}\' ...'.format(box.name)) droplet = self.manager.new_droplet( name=box.name, size_id=ext.size, image_id=ext.image, region_id=ext.region, ssh_key_ids=ext.keys.split(',') ) box.extra.id = droplet['id'] box.ip = self._wait_to_be_active(box.extra.id) else: info = self.manager.show_droplet(ext.id) print ('Droplet {0} already exists - status: {1}.'.format(ext.id, info['status'])) create_user(box, self.loader) def start(self, box): box_id = box.extra.id print ('Starting droplet %s ...' % box_id) self.manager.power_on_droplet(box_id) self._wait_to_be_active(box_id) def stop(self, box): box_id = box.extra.id print ('Stopping droplet %s ...' % box_id) self.manager.power_off_droplet(box_id) def destroy(self, box): if io.input_yes_no('destroy the droplet \'{0}\''.format(box.name)): box_id = box.extra.id box.ip = None box.extra.id = None print ('Destroying droplet %s ...' % box_id) self.manager.destroy_droplet(box_id, scrub_data=True) def rebuild(self, box): ext = box.extra print ('Rebuilding droplet %s ...' % ext.id) self.manager.rebuild_droplet(ext.id, ext.image) self._wait_to_be_active(ext.id) create_user(box, self.loader) def status(self, box): print (self.manager.show_droplet(box.extra.id)['status']) def _wait_to_be_active(self, droplet_id, wait_timeout=300): end_time = time.time() + wait_timeout while time.time() < end_time: print ('Waiting for droplet %s to be active ...' % droplet_id) time.sleep(min(20, end_time - time.time())) droplet = self.manager.show_droplet(droplet_id) if droplet['status'] == 'active': droplet_ip_address = droplet['ip_address'] if not droplet_ip_address: raise DoError('No ip is found.', droplet_id) print ('\nDroplet %s is now active with ip %s\n' % (droplet_id, droplet_ip_address)) time.sleep(10) # Wait for some network latency ... return droplet_ip_address raise DoError('Wait for droplet running timeout', droplet_id) def _input_ssh_keys(self, previous=None): all_keys = self.manager.all_ssh_keys() print ('\nAvailable keys: \n%s' % self._print_object_id_name(all_keys)) if not previous: default_keys = ','.join([str(k['id']) for k in all_keys]) else: default_keys = previous return io.input_value('keys', default_keys)
class DigitalOcean(Cloud): def __init__(self, credentials): super().__init__(credentials) self.name = 'DigitalOcean' self.do_session = DoManager(None, self.access_key, api_version=2) def get_session(self): pass def get_cloud_info(self, params): vals = { 'images': self.get_distribution_images(params), 'regions': self.get_regions(), 'sizes': self.get_sizes() } return vals def get_distribution_images(self, params): #data = self.do_session.all_images(params) data = ['ubuntu-14-04-x64'] if data: return data return False def get_regions(self): regions = self.do_session.all_regions() if regions: return regions return False def get_sizes(self): sizes = self.do_session.sizes() if sizes: return sizes return False def get_assets(self): droplets = self.do_session.all_active_droplets() if droplets: return droplets return False @staticmethod def get_js_ip_filter(): code = """ function() { var droplets_ip = [] db[collection].find(query).forEach(function(doc) { doc.plays.forEach(function(play) { play.tasks.forEach(function(task) { if(task.hosts.localhost.results) { task.hosts.localhost.results.forEach(function(result) { if(result.droplet) { droplets_ip.push(result.droplet.ip_address); } }); } }); }); }); if(droplets_ip.length > 0){ return droplets_ip; } return false; } """ return code
def __init__(self): ''' Main execution path ''' # DigitalOceanInventory data self.data = {} # All DigitalOcean data self.inventory = {} # Ansible Inventory # Define defaults self.cache_path = '.' self.cache_max_age = 0 self.use_private_network = False self.group_variables = {} # Read settings, environment variables, and CLI arguments self.read_settings() self.read_environment() self.read_cli_args() # Verify credentials were set if not hasattr(self, 'api_token'): sys.stderr.write( '''Could not find values for DigitalOcean api_token. They must be specified via either ini file, command line argument (--api-token), or environment variables (DO_API_TOKEN)\n''') sys.exit(-1) # env command, show DigitalOcean credentials if self.args.env: print("DO_API_TOKEN=%s" % self.api_token) sys.exit(0) # Manage cache self.cache_filename = self.cache_path + "/ansible-digital_ocean.cache" self.cache_refreshed = False if self.is_cache_valid(): self.load_from_cache() if len(self.data) == 0: if self.args.force_cache: sys.stderr.write( '''Cache is empty and --force-cache was specified\n''') sys.exit(-1) self.manager = DoManager(None, self.api_token, api_version=2) # Pick the json_data to print based on the CLI command if self.args.droplets: self.load_from_digital_ocean('droplets') json_data = {'droplets': self.data['droplets']} elif self.args.regions: self.load_from_digital_ocean('regions') json_data = {'regions': self.data['regions']} elif self.args.images: self.load_from_digital_ocean('images') json_data = {'images': self.data['images']} elif self.args.sizes: self.load_from_digital_ocean('sizes') json_data = {'sizes': self.data['sizes']} elif self.args.ssh_keys: self.load_from_digital_ocean('ssh_keys') json_data = {'ssh_keys': self.data['ssh_keys']} elif self.args.domains: self.load_from_digital_ocean('domains') json_data = {'domains': self.data['domains']} elif self.args.all: self.load_from_digital_ocean() json_data = self.data elif self.args.host: json_data = self.load_droplet_variables_for_host() else: # '--list' this is last to make it default self.load_from_digital_ocean('droplets') self.build_inventory() json_data = self.inventory if self.cache_refreshed: self.write_to_cache() if self.args.pretty: print(json.dumps(json_data, sort_keys=True, indent=2)) else: print(json.dumps(json_data))
def setup(cls, client_id, api_key): cls.manager = DoManager(client_id, api_key) DomainRecord.manager = cls.manager