def build_bootstrap_aws(input_config): init_application() try: yaml_conf = input_config.read() payload = yaml.load(yaml_conf) # print(payload) base_config = bootstrapper_utils.build_base_configs(payload) if not set(['AWS_LOCATION', 'AWS_SECRET_KEY', 'AWS_ACCESS_KEY' ]).issubset(set(os.environ)): print( 's3 bootstrap type requires AWS_LOCATION, AWS_ACCESS_KEY, and AWS_SECRET_KEY' ) sys.exit(1) location = os.environ['AWS_LOCATION'] access_key = os.environ['AWS_ACCESS_KEY'] secret_key = os.environ['AWS_SECRET_KEY'] message = archive_utils.create_s3_bucket(base_config, payload['hostname'], location, access_key, secret_key) print(message) except IOError as ioe: print('Could not read from input') print(ioe) sys.exit(1) except yaml.YAMLError as ye: print('Could not load YAML input') print(ye) sys.exit(2) except (BadRequest, RequiredParametersError): sys.exit(3) except TemplateNotFoundError: print('Could not load templates!') sys.exit(4)
def bootstrap_aws(): try: input_params = request.get_json() or request.form.to_dict() base_config = bootstrapper_utils.build_base_configs(input_params) response = archive_utils.create_s3_bucket(base_config, input_params['hostname'], input_params['aws_location'], input_params['aws_key'], input_params['aws_secret']) return jsonify(response=response) except KeyError as ke: print(ke) abort( 400, 'Invalid input parameters! Not all required parameters are present' ) except (BadRequest, RequiredParametersError): abort(400, 'Invalid input parameters for basic configuration!') except TemplateNotFoundError: print('Could not load templates!') abort(500, 'Could not load template!')
def generate_bootstrap_package(): """ Main function to build a bootstrap archive. You must post the following params: hostname: we cannot build an archive without at least a hostname deployment_type: openstack, kvm, vmware, etc. archive_type: zip, iso You must also supply all the variables required from included templates :return: binary package containing variable interpolated templates """ input_params = dict() try: # input_params = request.get_json() or request.form.to_dict() input_params = bootstrapper_utils.normalize_input_params(request) base_config = bootstrapper_utils.build_base_configs(input_params) except (BadRequest, RequiredParametersError): err_string = '\nRequired variables: hostname' err_string += '\nOptional variables: ' vs = bootstrapper_utils.get_bootstrap_variables(input_params) for v in vs: err_string += '%s ' % v print('aborting due to bad request, invalid params') abort(400, 'Invalid input parameters %s' % err_string) except TemplateNotFoundError: print('aborting, Could not load templates!') abort(500, 'Could not load template!') # if desired deployment type is openstack, then add the heat templates and whatnot if 'deployment_type' in input_params and input_params[ 'deployment_type'] == 'openstack': print('Including openstack') try: base_config = bootstrapper_utils.build_openstack_heat(base_config, input_params, archive=True) except RequiredParametersError: abort(400, 'Could not parse JSON data') if 'hostname' not in input_params: abort(400, 'No hostname found in posted data') # if the user supplies an 'archive_type' parameter we can return either a ZIP or ISO archive_type = input_params.get('archive_type', 'zip') # user has specified they want an ISO built if archive_type == 'iso': archive = archive_utils.create_iso(base_config, input_params['hostname']) mime_type = 'application/iso-image' elif archive_type == 'tgz': archive = archive_utils.create_tgz(base_config, input_params['hostname']) mime_type = 'application/gzip' elif archive_type == 's3': required_keys = {'aws_location', 'aws_secret', 'aws_key'} if not required_keys.issubset(input_params): r = jsonify( message= f"Not all required keys for archive_type: {archive_type} are " f"present. Required are {required_keys}", success=False, status_code=400) r.status_code = 400 return r response = archive_utils.create_s3_bucket(base_config, input_params['hostname'], input_params['aws_location'], input_params['aws_key'], input_params['aws_secret']) return jsonify(response=response) elif archive_type == 'azure': required_keys = {'azure_account_name', 'azure_account_key'} if not required_keys.issubset(input_params): r = jsonify( message= f"Not all required keys for archive_type: {archive_type} are " f"present. Required are {required_keys}", success=False, status_code=400) r.status_code = 400 return r response = archive_utils.create_azure_fileshare( base_config, input_params['hostname'], input_params['azure_account_name'], input_params['azure_account_key']) return jsonify(response=response) elif archive_type == 'gcp': required_keys = {'project_id', 'access_token'} if not required_keys.issubset(input_params): r = jsonify( message= f"Not all required keys for archive_type: {archive_type} are " f"present. Required are {required_keys}", success=False, status_code=400) r.status_code = 400 return r response = archive_utils.create_gcp_bucket( base_config, input_params['hostname'], input_params['project_id'], input_params["access_token"]) return jsonify(response=response) else: # no ISO required, just make a zip archive = archive_utils.create_archive(base_config, input_params['hostname']) mime_type = 'application/zip' print("archive path is: %s" % archive) if archive is None: print('Aborting with no archive created') abort( 500, 'Could not create archive! Check bootstrapper logs for more information' ) return send_file(archive, mimetype=mime_type, as_attachment=True)