def do_provision(self, token, instance_id): pbclient = PBClient(token, self.config['INTERNAL_API_BASE_URL'], ssl_verify=False) log_uploader = self.create_prov_log_uploader(token, instance_id, log_type='provisioning') self.logger.info('faking provisioning') log_uploader.info('dummy provisioning for 5 seconds\n') time.sleep(5) log_uploader.info('dummy provisioning completed\n') public_ip = '%s.%s.%s.%s' % (randint(1, 254), randint( 1, 254), randint(1, 254), randint(1, 254)) instance_data = { 'endpoints': [ { 'name': 'SSH', 'access': 'ssh cloud-user@%s' % public_ip }, { 'name': 'Some Web Interface', 'access': 'http://%s/service-x' % public_ip }, ] } pbclient.do_instance_patch(instance_id, { 'public_ip': public_ip, 'instance_data': json.dumps(instance_data) })
def periodic_update(): """ Runs periodic updates. In particular sets old instances up for deprovisioning after they are past their maximum_lifetime and sets instances up for up updates. Both deletion and update events are not guaranteed to take place immediately. If there are more than 10 instances a random sample of 10 updates and deletions will take place to ensure task is safe to run and won't slow down other tasks. """ token = get_token() pbclient = PBClient(token, local_config['INTERNAL_API_BASE_URL'], ssl_verify=False) instances = pbclient.get_instances() deprovision_list = [] update_list = [] for instance in instances: logger.debug('checking instance for actions %s' % instance['name']) deprovision_required = False if instance.get('state') in [Instance.STATE_RUNNING]: if not instance.get('lifetime_left') and instance.get( 'maximum_lifetime'): deprovision_required = True if deprovision_required: deprovision_list.append(instance) elif instance.get('state') not in [Instance.STATE_FAILED]: update_list.append(instance) # ToDo: refactor magic number to variable if len(deprovision_list) > 10: deprovision_list = random.sample(deprovision_list, 10) for instance in deprovision_list: logger.info( 'deprovisioning triggered for %s (reason: maximum lifetime exceeded)' % instance.get('id')) pbclient.do_instance_patch(instance['id'], {'to_be_deleted': True}) run_update.delay(instance.get('id')) if len(update_list) > 10: update_list = random.sample(update_list, 10) for instance in update_list: run_update.delay(instance.get('id'))
def do_provision(self, token, instance_id): pbclient = PBClient(token, self.config['INTERNAL_API_BASE_URL'], ssl_verify=False) log_uploader = self.create_prov_log_uploader(token, instance_id, log_type='provisioning') self.logger.info('faking provisioning') log_uploader.info('dummy provisioning for 5 seconds\n') time.sleep(5) log_uploader.info('dummy provisioning completed\n') public_ip = '%s.%s.%s.%s' % (randint(1, 254), randint(1, 254), randint(1, 254), randint(1, 254)) instance_data = { 'endpoints': [ {'name': 'SSH', 'access': 'ssh cloud-user@%s' % public_ip}, {'name': 'Some Web Interface', 'access': 'http://%s/service-x' % public_ip}, ] } pbclient.do_instance_patch( instance_id, { 'public_ip': public_ip, 'instance_data': json.dumps(instance_data) } )
def deprovision(self, token, instance_id): self.logger.debug('starting deprovisioning') pbclient = PBClient(token, self.config['INTERNAL_API_BASE_URL'], ssl_verify=False) try: pbclient.do_instance_patch(instance_id, {'state': Instance.STATE_DELETING}) self.logger.debug('calling subclass do_deprovision') self.do_deprovision(token, instance_id) self.logger.debug('finishing deprovisioning') pbclient.do_instance_patch(instance_id, {'deprovisioned_at': datetime.datetime.utcnow()}) pbclient.do_instance_patch(instance_id, {'state': Instance.STATE_DELETED}) except Exception as e: self.logger.exception('do_deprovision raised %s' % e) pbclient.do_instance_patch(instance_id, {'state': Instance.STATE_FAILED}) raise e
def provision(self, token, instance_id): self.logger.debug('starting provisioning') pbclient = PBClient(token, self.config['INTERNAL_API_BASE_URL'], ssl_verify=False) try: pbclient.do_instance_patch(instance_id, {'state': Instance.STATE_PROVISIONING}) self.logger.debug('calling subclass do_provision') new_state = self.do_provision(token, instance_id) if not new_state: new_state = Instance.STATE_RUNNING pbclient.do_instance_patch(instance_id, {'state': new_state}) except Exception as e: self.logger.exception('do_provision raised %s' % e) pbclient.do_instance_patch(instance_id, {'state': Instance.STATE_FAILED}) raise e
def do_provision(self, token, instance_id): self.logger.debug("do_provision %s" % instance_id) pbclient = PBClient(token, self.config['INTERNAL_API_BASE_URL'], ssl_verify=False) instance = pbclient.get_instance_description(instance_id) instance_name = instance['name'] instance_user = instance['user_id'] # fetch config blueprint_config = pbclient.get_blueprint_description( instance['blueprint_id']) config = blueprint_config['config'] log_uploader = self.create_prov_log_uploader(token, instance_id, log_type='provisioning') log_uploader.info("Provisioning OpenStack instance (%s)\n" % instance_id) ports_str = config['exposed_ports'] if ports_str: try: parse_ports_string(ports_str) except: error = 'Incorrect exposed ports definition in blueprint' error_body = { 'state': Instance.STATE_FAILED, 'error_msg': error } pbclient.do_instance_patch(instance_id, error_body) self.logger.debug(error) raise RuntimeError(error) # fetch user public key key_data = pbclient.get_user_key_data(instance_user).json() if not key_data: error = 'user\'s public key is missing' error_body = {'state': Instance.STATE_FAILED, 'error_msg': error} pbclient.do_instance_patch(instance_id, error_body) self.logger.debug(error) raise RuntimeError(error) oss = self.get_oss() result = oss.provision_instance(instance_name, config['image'], config['flavor'], public_key=key_data[0]['public_key'], userdata=config.get('userdata')) if 'error' in result: log_uploader.warn('Provisioning failed %s' % result['error']) return ip = result['address_data']['public_ip'] instance_data = { 'server_id': result['server_id'], 'floating_ip': ip, 'allocated_from_pool': result['address_data']['allocated_from_pool'], 'security_group_id': result['security_group'], 'endpoints': [ { 'name': 'SSH', 'access': 'ssh cloud-user@%s' % ip }, ] } log_uploader.info("Publishing server data\n") pbclient.do_instance_patch(instance_id, { 'instance_data': json.dumps(instance_data), 'public_ip': ip }) log_uploader.info("Provisioning complete\n")
def do_provision(self, token, instance_id): self.logger.debug("do_provision %s" % instance_id) pbclient = PBClient(token, self.config['INTERNAL_API_BASE_URL'], ssl_verify=False) instance = pbclient.get_instance_description(instance_id) instance_name = instance['name'] instance_user = instance['user_id'] # fetch config blueprint_config = pbclient.get_blueprint_description(instance['blueprint_id']) config = blueprint_config['full_config'] log_uploader = self.create_prov_log_uploader(token, instance_id, log_type='provisioning') log_uploader.info("Provisioning OpenStack instance (%s)\n" % instance_id) ports_str = config['exposed_ports'] if ports_str: try: parse_ports_string(ports_str) except: error = 'Incorrect exposed ports definition in blueprint' error_body = {'state': Instance.STATE_FAILED, 'error_msg': error} pbclient.do_instance_patch(instance_id, error_body) self.logger.debug(error) raise RuntimeError(error) # fetch user public key key_data = pbclient.get_user_key_data(instance_user).json() if not key_data: error = 'user\'s public key is missing' error_body = {'state': Instance.STATE_FAILED, 'error_msg': error} pbclient.do_instance_patch(instance_id, error_body) self.logger.debug(error) raise RuntimeError(error) oss = self.get_oss() result = oss.provision_instance( instance_name, config['image'], config['flavor'], nics=config.get('openstack_net_id', 'auto'), public_key=key_data[0]['public_key'], userdata=config.get('userdata')) if 'error' in result: log_uploader.warn('Provisioning failed %s' % result['error']) return ip = result['address_data']['public_ip'] instance_data = { 'server_id': result['server_id'], 'floating_ip': ip, 'allocated_from_pool': result['address_data']['allocated_from_pool'], 'security_group_id': result['security_group'], 'endpoints': [ {'name': 'SSH', 'access': 'ssh cloud-user@%s' % ip}, ] } log_uploader.info("Publishing server data\n") pbclient.do_instance_patch( instance_id, {'instance_data': json.dumps(instance_data), 'public_ip': ip}) log_uploader.info("Provisioning complete\n")