def _prepare_and_submit_blueprint(cls, file_server_root, app_dir, blueprint_id, visibility): args = get_args_and_verify_arguments([ Argument('private_resource', type=boolean), Argument('visibility'), Argument('application_file_name', default='') ]) app_file_name = cls._extract_application_file( file_server_root, app_dir, args.application_file_name) # add to blueprints manager (will also dsl_parse it) try: blueprint = get_resource_manager().publish_blueprint( app_dir, app_file_name, file_server_root, blueprint_id, args.private_resource, visibility) # moving the app directory in the file server to be under a # directory named after the blueprint id tenant_dir = os.path.join(file_server_root, FILE_SERVER_BLUEPRINTS_FOLDER, current_tenant.name) mkdirs(tenant_dir) shutil.move(os.path.join(file_server_root, app_dir), os.path.join(tenant_dir, blueprint.id)) cls._process_plugins(file_server_root, blueprint.id) return blueprint except manager_exceptions.DslParseException, ex: shutil.rmtree(os.path.join(file_server_root, app_dir)) raise manager_exceptions.InvalidBlueprintError( 'Invalid blueprint - {0}'.format(ex.message))
def _prepare_and_process_doc(self, data_id, file_server_root, archive_target_path, **kwargs): # support previous implementation wagon_target_path = archive_target_path # handle the archive_target_path, which may be zip or wagon if not self._is_wagon_file(archive_target_path): if not zipfile.is_zipfile(archive_target_path): raise manager_exceptions.InvalidPluginError( 'input can be only a wagon or a zip file.') archive_name = unzip(archive_target_path, logger=current_app.logger) os.remove(archive_target_path) shutil.move(archive_name, archive_target_path) try: wagon_target_path, _ = \ self._verify_archive(archive_target_path) except RuntimeError as re: raise manager_exceptions.InvalidPluginError(re.message) args = get_args_and_verify_arguments([ Argument('title'), Argument('private_resource', type=boolean), Argument('visibility')]) visibility = kwargs.get(_VISIBILITY, None) new_plugin = self._create_plugin_from_archive(data_id, args.title, wagon_target_path, args.private_resource, visibility) filter_by_name = {'package_name': new_plugin.package_name} sm = get_resource_manager().sm plugins = sm.list(Plugin, filters=filter_by_name) for plugin in plugins: if plugin.archive_name == new_plugin.archive_name: raise manager_exceptions.ConflictError( 'a plugin archive by the name of {archive_name} already ' 'exists for package with name {package_name} and version ' '{version}'.format(archive_name=new_plugin.archive_name, package_name=new_plugin.package_name, version=new_plugin.package_version)) if is_plugin_installing(new_plugin, plugin): raise manager_exceptions.ConflictError( 'a plugin archive by the name of {archive_name} for ' 'package with name {package_name} and version {version} ' 'is currently being installed'.format( archive_name=new_plugin.archive_name, package_name=new_plugin.package_name, version=new_plugin.package_version)) dest_path = new_plugin.archive_name new_plugin.archive_name = '{0}{1}'.format(INSTALLING_PREFIX, new_plugin.archive_name) sm.put(new_plugin) return new_plugin, dest_path
def receive_uploaded_data(self, data_id=None, **kwargs): blueprint_url = None visibility = kwargs.get(_VISIBILITY, None) labels = kwargs.get('labels', None) override_failed_blueprint = kwargs.get('override_failed', False) args = get_args_and_verify_arguments([ Argument('private_resource', type=boolean), Argument('application_file_name', default='') ]) # Handle importing blueprint through url if self._get_data_url_key() in request.args: if request.data or \ 'Transfer-Encoding' in request.headers or \ 'blueprint_archive' in request.files: raise manager_exceptions.BadParametersError( "Can pass {0} as only one of: URL via query parameters, " "request body, multi-form or " "chunked.".format(self._get_kind())) blueprint_url = request.args[self._get_data_url_key()] visibility = get_resource_manager().get_resource_visibility( Blueprint, data_id, visibility, args.private_resource) new_blueprint = self._prepare_and_process_doc( data_id, visibility, blueprint_url, application_file_name=args.application_file_name, override_failed_blueprint=override_failed_blueprint, labels=labels) return new_blueprint, 201
def get(self, _include=None, **kwargs): """ List nodes """ args = get_args_and_verify_arguments( [Argument('deployment_id', required=False), Argument('node_id', required=False)] ) deployment_id = args.get('deployment_id') node_id = args.get('node_id') if deployment_id and node_id: try: nodes = [get_node(deployment_id, node_id)] except manager_exceptions.NotFoundError: nodes = [] else: deployment_id_filter = ResourceManager.create_filters_dict( deployment_id=deployment_id) nodes = get_storage_manager().list( models.Node, filters=deployment_id_filter, include=_include ).items return nodes
def delete(self, deployment_id, **kwargs): """ Delete deployment by id """ args = get_args_and_verify_arguments( [Argument('ignore_live_nodes', type=boolean, default=False), Argument('delete_db_mode', type=boolean, default=False), Argument('delete_logs', type=boolean, default=False)] ) bypass_maintenance = is_bypass_maintenance_mode() deployment = get_resource_manager().delete_deployment( deployment_id, bypass_maintenance, args.ignore_live_nodes, args.delete_db_mode, args.delete_logs) if args.delete_db_mode: # Delete deployment resources from file server deployment_folder = os.path.join( config.instance.file_server_root, FILE_SERVER_DEPLOYMENTS_FOLDER, utils.current_tenant.name, deployment.id) if os.path.exists(deployment_folder): shutil.rmtree(deployment_folder) return deployment, 200
def put(self, deployment_id, **kwargs): """ Create a deployment """ validate_inputs({'deployment_id': deployment_id}) request_schema = self.create_request_schema() request_dict = get_json_and_verify_params(request_schema) blueprint_id = request_dict['blueprint_id'] bypass_maintenance = is_bypass_maintenance_mode() args = get_args_and_verify_arguments( [Argument('private_resource', type=boolean, default=False)] ) skip_plugins_validation = self.get_skip_plugin_validation_flag( request_dict) rm = get_resource_manager() sm = get_storage_manager() blueprint = sm.get(models.Blueprint, blueprint_id) rm.cleanup_failed_deployment(deployment_id) if not skip_plugins_validation: rm.check_blueprint_plugins_installed(blueprint.plan) deployment = rm.create_deployment( blueprint, deployment_id, private_resource=args.private_resource, visibility=None, ) try: rm.execute_workflow(deployment.make_create_environment_execution( inputs=request_dict.get('inputs', {}), ), bypass_maintenance=bypass_maintenance) except manager_exceptions.ExistingRunningExecutionError: rm.delete_deployment(deployment) raise return deployment, 201
def put(self, deployment_id, **kwargs): """ Create a deployment """ rest_utils.validate_inputs({'deployment_id': deployment_id}) request_schema = self.create_request_schema() request_dict = rest_utils.get_json_and_verify_params(request_schema) blueprint_id = request_dict['blueprint_id'] bypass_maintenance = is_bypass_maintenance_mode() args = rest_utils.get_args_and_verify_arguments( [Argument('private_resource', type=boolean)] ) visibility = rest_utils.get_visibility_parameter( optional=True, valid_values=VisibilityState.STATES ) deployment = get_resource_manager().create_deployment( blueprint_id, deployment_id, inputs=request_dict.get('inputs', {}), bypass_maintenance=bypass_maintenance, private_resource=args.private_resource, visibility=visibility, skip_plugins_validation=self.get_skip_plugin_validation_flag( request_dict), site_name=_get_site_name(request_dict) ) return deployment, 201
def put(self, deployment_id, **kwargs): """ Create a deployment """ rest_utils.validate_inputs({'deployment_id': deployment_id}) request_schema = self.create_request_schema() request_dict = rest_utils.get_json_and_verify_params(request_schema) blueprint_id = request_dict['blueprint_id'] bypass_maintenance = is_bypass_maintenance_mode() args = rest_utils.get_args_and_verify_arguments( [Argument('private_resource', type=boolean)] ) visibility = rest_utils.get_visibility_parameter( optional=True, valid_values=VisibilityState.STATES ) deployment = get_resource_manager().create_deployment( blueprint_id, deployment_id, inputs=request_dict.get('inputs', {}), bypass_maintenance=bypass_maintenance, private_resource=args.private_resource, visibility=visibility, skip_plugins_validation=self.get_skip_plugin_validation_flag( request_dict), site_name=_get_site_name(request_dict), runtime_only_evaluation=request_dict.get( 'runtime_only_evaluation', False) ) return deployment, 201
def patch(self, schedule_id, **kwargs): """Updates scheduling parameters of an existing execution schedule""" deployment_id = get_args_and_verify_arguments([ Argument('deployment_id', type=text_type, required=True) ])['deployment_id'] sm = get_storage_manager() schedule = sm.get( models.ExecutionSchedule, None, filters={'id': schedule_id, 'deployment_id': deployment_id} ) slip = request.json.get('slip') stop_on_fail = request.json.get('stop_on_fail') enabled = request.json.get('enabled') since = request.json.get('since') until = request.json.get('until') if since: schedule.since = parse_datetime_multiple_formats(since) if until: schedule.until = parse_datetime_multiple_formats(until) if slip is not None: schedule.slip = slip if stop_on_fail is not None: schedule.stop_on_fail = verify_and_convert_bool('stop_on_fail', stop_on_fail) if enabled is not None: schedule.enabled = verify_and_convert_bool('enabled', enabled) schedule.rule = compute_rule_from_scheduling_params( request.json, existing_rule=schedule.rule) schedule.next_occurrence = schedule.compute_next_occurrence() sm.update(schedule) return schedule, 201
def put(self, blueprint_id, **kwargs): """ Upload a blueprint (id specified) """ rest_utils.validate_inputs({'blueprint_id': blueprint_id}) args = get_args_and_verify_arguments([ Argument('async_upload', type=boolean, default=False), Argument('labels') ]) async_upload = args.async_upload visibility = rest_utils.get_visibility_parameter( optional=True, is_argument=True, valid_values=VisibilityState.STATES) labels = self._get_labels_from_args(args) # Fail fast if trying to upload a duplicate blueprint. # Allow overriding an existing blueprint which failed to upload current_tenant = request.headers.get('tenant') override_failed = False if visibility == VisibilityState.GLOBAL: existing_duplicates = get_storage_manager().list( models.Blueprint, filters={'id': blueprint_id}) if existing_duplicates: if existing_duplicates[0].state in \ BlueprintUploadState.FAILED_STATES: override_failed = True else: raise IllegalActionError( "Can't set or create the resource `{0}`, it's " "visibility can't be global because it also exists in " "other tenants".format(blueprint_id)) else: existing_duplicates = get_storage_manager().list(models.Blueprint, filters={ 'id': blueprint_id, 'tenant_name': current_tenant }) if existing_duplicates: if existing_duplicates[0].state in \ BlueprintUploadState.FAILED_STATES: override_failed = True else: raise ConflictError( 'blueprint with id={0} already exists on tenant {1} ' 'or with global visibility'.format( blueprint_id, current_tenant)) response = UploadedBlueprintsManager().\ receive_uploaded_data(data_id=blueprint_id, visibility=visibility, override_failed=override_failed, labels=labels) if not async_upload: sm = get_storage_manager() blueprint, _ = response response = rest_utils.get_uploaded_blueprint(sm, blueprint) return response
def get(self, _include=None, **kwargs): args = get_args_and_verify_arguments( [Argument('deployment_id', required=False)]) deployment_id_filter = ResourceManager.create_filters_dict( deployment_id=args.deployment_id) return get_storage_manager().list(models.DeploymentModification, filters=deployment_id_filter, include=_include).items
def delete(self, blueprint_id, **kwargs): """ Delete blueprint by id """ query_args = get_args_and_verify_arguments( [Argument('force', type=boolean, default=False)]) blueprint = get_resource_manager().delete_blueprint( blueprint_id, force=query_args.force) return blueprint, 200
def delete(self, blueprint_id, **kwargs): """ Delete blueprint by id """ query_args = get_args_and_verify_arguments( [Argument('force', type=boolean, default=False)]) blueprint = get_resource_manager().delete_blueprint( blueprint_id, force=query_args.force) return blueprint, 200
def get(self, _include=None, pagination=None, **kwargs): args = get_args_and_verify_arguments( [Argument('graph_id', type=unicode, required=True)]) sm = get_storage_manager() graph_id = args.get('graph_id') tasks_graph = sm.list(models.TasksGraph, filters={'id': graph_id})[0] return sm.list(models.Operation, filters={'tasks_graph': tasks_graph}, pagination=pagination, include=_include)
def get(self, _include=None, **kwargs): args = get_args_and_verify_arguments( [Argument('deployment_id', required=False)] ) deployment_id_filter = ResourceManager.create_filters_dict( deployment_id=args.deployment_id) return get_storage_manager().list( models.DeploymentModification, filters=deployment_id_filter, include=_include ).items
def delete(self, schedule_id): deployment_id = get_args_and_verify_arguments([ Argument('deployment_id', type=text_type, required=True) ])['deployment_id'] sm = get_storage_manager() schedule = sm.get( models.ExecutionSchedule, None, filters={'id': schedule_id, 'deployment_id': deployment_id}) sm.delete(schedule) return None, 204
def get(self, _include=None, **kwargs): """List executions""" args = get_args_and_verify_arguments([ Argument('deployment_id', required=False), Argument('include_system_workflows', type=boolean, default=False) ]) deployment_id_filter = ResourceManager.create_filters_dict( deployment_id=args.deployment_id) return get_resource_manager().list_executions( is_include_system_workflows=args.include_system_workflows, include=_include, filters=deployment_id_filter).items
def get(self, pagination=None): args = get_args_and_verify_arguments([ Argument('deployment_id', required=False), Argument('node_ids', required=False, action='append'), Argument('node_instance_ids', required=False, action='append'), Argument('install_methods', required=False, action='append'), ]) return get_resource_manager().list_agents( deployment_id=args.get('deployment_id'), node_ids=args.get('node_ids'), node_instance_ids=args.get('node_instance_ids'), install_method=args.get('install_methods'))
def get(self, _include=None, pagination=None, **kwargs): args = get_args_and_verify_arguments([ Argument('graph_id', type=unicode, required=True) ]) sm = get_storage_manager() graph_id = args.get('graph_id') tasks_graph = sm.list(models.TasksGraph, filters={'id': graph_id})[0] return sm.list( models.Operation, filters={'tasks_graph': tasks_graph}, pagination=pagination, include=_include )
def get(self, schedule_id, _include=None, **kwargs): """ Get execution schedule by id """ deployment_id = get_args_and_verify_arguments([ Argument('deployment_id', type=text_type, required=True) ])['deployment_id'] return get_storage_manager().get( models.ExecutionSchedule, None, filters={'id': schedule_id, 'deployment_id': deployment_id}, include=_include )
def get(self, _include=None, **kwargs): """List executions""" args = get_args_and_verify_arguments( [Argument('deployment_id', required=False), Argument('include_system_workflows', type=boolean, default=False)] ) deployment_id_filter = ResourceManager.create_filters_dict( deployment_id=args.deployment_id) return get_resource_manager().list_executions( is_include_system_workflows=args.include_system_workflows, include=_include, filters=deployment_id_filter).items
def get(self, _include=None, pagination=None, **kwargs): args = get_args_and_verify_arguments([ Argument('execution_id', type=text_type, required=True), Argument('name', type=text_type, required=True) ]) sm = get_storage_manager() execution_id = args.get('execution_id') name = args.get('name') execution = sm.list(models.Execution, filters={'id': execution_id})[0] return sm.list( models.TasksGraph, filters={'execution': execution, 'name': name}, pagination=pagination )
def get(self, _include=None, pagination=None, **kwargs): args = get_args_and_verify_arguments([ Argument('execution_id', type=unicode, required=True), Argument('name', type=unicode, required=True) ]) sm = get_storage_manager() execution_id = args.get('execution_id') name = args.get('name') execution = sm.list(models.Execution, filters={'id': execution_id})[0] return sm.list( models.TasksGraph, filters={'execution': execution, 'name': name}, pagination=pagination )
def get(self, pagination=None, _include=None): """ Get the list of managers in the database :param hostname: optional hostname to return only a specific manager :param _include: optional, what columns to include in the response """ args = rest_utils.get_args_and_verify_arguments( [Argument('hostname', type=text_type, required=False)]) hostname = args.get('hostname') if hostname: return get_storage_manager().list(models.Manager, None, filters={'hostname': hostname}) return get_storage_manager().list(models.Manager, include=_include)
def put(self, schedule_id, **kwargs): """Schedule a workflow execution""" validate_inputs({'schedule_id': schedule_id}) deployment_id = get_args_and_verify_arguments([ Argument('deployment_id', type=text_type, required=True) ])['deployment_id'] request_dict = get_json_and_verify_params({'workflow_id', 'since'}) workflow_id = request_dict['workflow_id'] execution_arguments = self._get_execution_arguments(request_dict) parameters = request_dict.get('parameters', None) if parameters is not None and not isinstance(parameters, dict): raise manager_exceptions.BadParametersError( "parameters: expected a dict, but got: {0}".format(parameters)) rm = get_resource_manager() deployment = rm.sm.get(models.Deployment, deployment_id) rm._verify_workflow_in_deployment(workflow_id, deployment, deployment_id) since = request_dict['since'] until = request_dict.get('until') if since: since = parse_datetime_multiple_formats(since) if until: until = parse_datetime_multiple_formats(until) rule = compute_rule_from_scheduling_params(request_dict) slip = request_dict.get('slip', 0) stop_on_fail = verify_and_convert_bool( 'stop_on_fail', request_dict.get('stop_on_fail', False)) now = get_formatted_timestamp() schedule = models.ExecutionSchedule( id=schedule_id, deployment=deployment, created_at=now, since=since, until=until, rule=rule, slip=slip, workflow_id=workflow_id, parameters=parameters, execution_arguments=execution_arguments, stop_on_fail=stop_on_fail, ) schedule.next_occurrence = schedule.compute_next_occurrence() return rm.sm.put(schedule), 201
def get(self, _include=None, **kwargs): """ List node instances """ args = get_args_and_verify_arguments([ Argument('deployment_id', type=str, required=False), Argument('node_name', type=str, required=False) ]) deployment_id = args.get('deployment_id') node_id = args.get('node_name') params_filter = ResourceManager.create_filters_dict( deployment_id=deployment_id, node_id=node_id) return get_storage_manager().list(models.NodeInstance, filters=params_filter, include=_include).items
def get(self, pagination=None): args = get_args_and_verify_arguments([ Argument('deployment_id', required=False), Argument('node_ids', required=False, action='append'), Argument('node_instance_ids', required=False, action='append'), Argument('install_methods', required=False, action='append'), ]) return get_resource_manager().list_agents( deployment_id=args.get('deployment_id'), node_ids=args.get('node_ids'), node_instance_ids=args.get('node_instance_ids'), install_method=args.get('install_methods'))
def _prepare_and_process_doc(self, data_id, file_server_root, archive_target_path, **kwargs): # support previous implementation wagon_target_path = archive_target_path # handle the archive_target_path, which may be zip or wagon if not self._is_wagon_file(archive_target_path): if not zipfile.is_zipfile(archive_target_path): raise manager_exceptions.InvalidPluginError( 'input can be only a wagon or a zip file.') archive_name = unzip(archive_target_path, logger=current_app.logger) os.remove(archive_target_path) shutil.move(archive_name, archive_target_path) try: wagon_target_path, _ = \ self._verify_archive(archive_target_path) except RuntimeError as re: raise manager_exceptions.InvalidPluginError(re.message) args = get_args_and_verify_arguments([ Argument('private_resource', type=boolean), Argument('visibility')]) visibility = kwargs.get(_VISIBILITY, None) new_plugin = self._create_plugin_from_archive(data_id, wagon_target_path, args.private_resource, visibility) filter_by_name = {'package_name': new_plugin.package_name} sm = get_resource_manager().sm plugins = sm.list(Plugin, filters=filter_by_name) for plugin in plugins: if plugin.archive_name == new_plugin.archive_name: raise manager_exceptions.ConflictError( 'a plugin archive by the name of {archive_name} already ' 'exists for package with name {package_name} and version ' '{version}'.format(archive_name=new_plugin.archive_name, package_name=new_plugin.package_name, version=new_plugin.package_version)) sm.put(new_plugin) return new_plugin, new_plugin.archive_name
def _prepare_and_submit_blueprint(cls, file_server_root, app_dir, blueprint_id, visibility): args = get_args_and_verify_arguments( [Argument('application_file_name', default='')]) app_file_name = cls._extract_application_file( file_server_root, app_dir, args.application_file_name) # add to blueprints manager (will also dsl_parse it) try: get_resource_manager().validate_blueprint(app_dir, app_file_name, file_server_root) except manager_exceptions.DslParseException as ex: raise manager_exceptions.InvalidBlueprintError( 'Invalid blueprint - {0}'.format(ex)) return {}
def get(self, _include=None, **kwargs): """ List node instances """ args = get_args_and_verify_arguments( [Argument('deployment_id', required=False), Argument('node_name', required=False)] ) deployment_id = args.get('deployment_id') node_id = args.get('node_name') params_filter = ResourceManager.create_filters_dict( deployment_id=deployment_id, node_id=node_id) return get_storage_manager().list( models.NodeInstance, filters=params_filter, include=_include ).items
def get(self): """Get the Manager config, optionally filtered to a scope. Scope can be eg. "rest" or "mgmtworker", for filtering out the settings only for a single Manager component. """ args = rest_utils.get_args_and_verify_arguments( [Argument('scope', type=text_type, required=False)]) scope = args.get('scope') result = {'metadata': {}} if scope: result['items'] = self._get_items(scope) else: result['items'] = self._get_items() if not scope or scope == 'authorization': result['authorization'] = self._authorization_config return result
def put(self, deployment_id, **kwargs): """ Create a deployment """ request_schema = self.create_request_schema() request_dict = get_json_and_verify_params(request_schema) blueprint_id = request_dict['blueprint_id'] bypass_maintenance = is_bypass_maintenance_mode() args = get_args_and_verify_arguments( [Argument('private_resource', type=types.boolean, default=False)]) deployment = get_resource_manager().create_deployment( blueprint_id, deployment_id, inputs=request_dict.get('inputs', {}), bypass_maintenance=bypass_maintenance, private_resource=args.private_resource, skip_plugins_validation=self.get_skip_plugin_validation_flag( request_dict)) return deployment, 201
def post(self, **kwargs): """ Create provider context """ request_dict = get_json_and_verify_params({'context', 'name'}) args = get_args_and_verify_arguments( [Argument('update', type=boolean, default=False)]) update = args['update'] context = dict(id=PROVIDER_CONTEXT_ID, name=request_dict['name'], context=request_dict['context']) status_code = 200 if update else 201 try: get_resource_manager().update_provider_context(update, context) return dict(status='ok'), status_code except dsl_parser_utils.ResolverInstantiationError, ex: raise manager_exceptions.ResolverInstantiationError(str(ex))
def get(self): """Get the Manager config, optionally filtered to a scope. Scope can be eg. "rest" or "mgmtworker", for filtering out the settings only for a single Manager component. """ args = rest_utils.get_args_and_verify_arguments([ Argument('scope', type=unicode, required=False) ]) scope = args.get('scope') result = {'metadata': {}} if scope: result['items'] = self._get_items(scope) else: result['items'] = self._get_items() if not scope or scope == 'authorization': result['authorization'] = self._authorization_config return result
def delete(self, deployment_id, **kwargs): """Delete deployment by id""" args = get_args_and_verify_arguments([ Argument('force', type=boolean, default=False), Argument('delete_logs', type=boolean, default=False) ]) bypass_maintenance = is_bypass_maintenance_mode() sm = get_storage_manager() dep = sm.get(models.Deployment, deployment_id) dep.deployment_status = DeploymentState.IN_PROGRESS sm.update(dep, modified_attrs=('deployment_status',)) rm = get_resource_manager() rm.check_deployment_delete(dep, force=args.force) delete_execution = dep.make_delete_environment_execution( delete_logs=args.delete_logs) rm.execute_workflow( delete_execution, bypass_maintenance=bypass_maintenance) workflow_executor.delete_source_plugins(dep.id) return None, 204
def delete(self, deployment_id, **kwargs): """ Delete deployment by id """ args = get_args_and_verify_arguments( [Argument('ignore_live_nodes', type=types.boolean, default=False)]) bypass_maintenance = is_bypass_maintenance_mode() deployment = get_resource_manager().delete_deployment( deployment_id, bypass_maintenance, args.ignore_live_nodes) # Delete deployment resources from file server deployment_folder = os.path.join( config.instance.file_server_root, FILE_SERVER_DEPLOYMENTS_FOLDER, current_app.config[CURRENT_TENANT_CONFIG].name, deployment.id) if os.path.exists(deployment_folder): shutil.rmtree(deployment_folder) return deployment, 200
def _prepare_and_submit_blueprint(cls, file_server_root, app_dir, blueprint_id, visibility): args = get_args_and_verify_arguments([ Argument('private_resource', type=boolean), Argument('visibility'), Argument('application_file_name', default='')]) app_file_name = cls._extract_application_file( file_server_root, app_dir, args.application_file_name) # add to blueprints manager (will also dsl_parse it) try: blueprint = get_resource_manager().publish_blueprint( app_dir, app_file_name, file_server_root, blueprint_id, args.private_resource, visibility ) # moving the app directory in the file server to be under a # directory named after the blueprint id tenant_dir = os.path.join( file_server_root, FILE_SERVER_BLUEPRINTS_FOLDER, current_tenant.name) mkdirs(tenant_dir) shutil.move(os.path.join(file_server_root, app_dir), os.path.join(tenant_dir, blueprint.id)) cls._process_plugins(file_server_root, blueprint.id) return blueprint except manager_exceptions.DslParseException, ex: shutil.rmtree(os.path.join(file_server_root, app_dir)) raise manager_exceptions.InvalidBlueprintError( 'Invalid blueprint - {0}'.format(ex.message))
def receive_uploaded_data(self, data_id=None, **kwargs): blueprint_url = None # avoid clashing with existing blueprint names blueprint_id = data_id + uuid.uuid4().hex[:16] args = get_args_and_verify_arguments( [Argument('application_file_name', default='')]) # Handle importing blueprint through url if self._get_data_url_key() in request.args: if request.data or \ 'Transfer-Encoding' in request.headers or \ 'blueprint_archive' in request.files: raise manager_exceptions.BadParametersError( "Can pass {0} as only one of: URL via query parameters, " "request body, multi-form or " "chunked.".format(self._get_kind())) blueprint_url = request.args[self._get_data_url_key()] self._prepare_and_process_doc( blueprint_id, blueprint_url, application_file_name=args.application_file_name) return "", 204
def put(self, deployment_id, **kwargs): """ Create a deployment """ request_schema = self.create_request_schema() request_dict = get_json_and_verify_params(request_schema) blueprint_id = request_dict['blueprint_id'] bypass_maintenance = is_bypass_maintenance_mode() args = get_args_and_verify_arguments( [Argument('private_resource', type=types.boolean)]) visibility = rest_utils.get_visibility_parameter( optional=True, valid_values=[VisibilityState.PRIVATE, VisibilityState.TENANT]) deployment = get_resource_manager().create_deployment( blueprint_id, deployment_id, inputs=request_dict.get('inputs', {}), bypass_maintenance=bypass_maintenance, private_resource=args.private_resource, visibility=visibility, skip_plugins_validation=self.get_skip_plugin_validation_flag( request_dict)) return deployment, 201
def put(self, deployment_id, **kwargs): """ Create a deployment """ validate_inputs({'deployment_id': deployment_id}) request_schema = self.create_request_schema() request_dict = get_json_and_verify_params(request_schema) blueprint_id = request_dict['blueprint_id'] bypass_maintenance = is_bypass_maintenance_mode() args = get_args_and_verify_arguments( [Argument('private_resource', type=boolean, default=False)] ) deployment = get_resource_manager().create_deployment( blueprint_id, deployment_id, private_resource=args.private_resource, visibility=None, inputs=request_dict.get('inputs', {}), bypass_maintenance=bypass_maintenance, skip_plugins_validation=self.get_skip_plugin_validation_flag( request_dict) ) return deployment, 201
def post(self, id, phase): """ Supports two stages of a plugin update. Phases: 1. (PHASES.INITIAL) Creates a temporary blueprint and executes a deployment update (will update only the plugins) for all the deployments of the given blueprint. 2. (PHASES.FINAL) Updates the original blueprint plan and deletes the temp one. :param id: the blueprint ID to update it's deployments' plugins if phase == PHASES.INITIAL, otherwise (phase == PHASES.FINAL) the plugin update ID. :param phase: either PHASES.INITIAL or PHASES.FINAL (internal). """ if phase == PHASES.INITIAL: args = rest_utils.get_args_and_verify_arguments([ Argument('force', type=boolean, required=False, default=False) ]) return get_plugins_updates_manager().initiate_plugins_update( blueprint_id=id, force=args.get('force')) elif phase == PHASES.FINAL: return get_plugins_updates_manager().finalize( plugins_update_id=id)