def get(self, blueprint_id): """ Download blueprint's archive """ # Verify blueprint exists. get_blueprints_manager().get_blueprint(blueprint_id, {'id'}) blueprint_path = '{0}/{1}/{2}/{2}.tar.gz'.format( config.instance().file_server_resources_uri, config.instance().file_server_uploaded_blueprints_folder, blueprint_id) local_path = os.path.join( config.instance().file_server_root, config.instance().file_server_uploaded_blueprints_folder, blueprint_id, '%s.tar.gz' % blueprint_id) response = make_response() response.headers['Content-Description'] = 'File Transfer' response.headers['Cache-Control'] = 'no-cache' response.headers['Content-Type'] = 'application/octet-stream' response.headers['Content-Disposition'] = \ 'attachment; filename=%s.tar.gz' % blueprint_id response.headers['Content-Length'] = os.path.getsize(local_path) response.headers['X-Accel-Redirect'] = blueprint_path response.headers['X-Accel-Buffering'] = 'yes' return response
def _handle_adding_node_instance(self, raw_instances, dep_update): """Handles adding a node instance :param raw_instances: :param dep_update: :return: the added and related node instances """ added_instances = [] add_related_instances = [] for raw_node_instance in raw_instances: if raw_node_instance.get('modification') == 'added': changes = { 'deployment_id': dep_update.deployment_id, 'version': None, 'state': None, 'runtime_properties': {} } raw_node_instance.update(changes) added_instances.append(raw_node_instance) else: add_related_instances.append(raw_node_instance) self._update_node_instance(raw_node_instance) get_blueprints_manager()._create_deployment_node_instances( dep_update.deployment_id, added_instances ) return { CHANGE_TYPE.AFFECTED: added_instances, CHANGE_TYPE.RELATED: add_related_instances }
def add(self, ctx, current_entities): get_blueprints_manager()._create_deployment_nodes( deployment_id=ctx.deployment_id, blueprint_id=None, plan=ctx.deployment_plan, node_ids=ctx.raw_node_id) current_entities[ctx.raw_node_id] = ctx.storage_node.to_dict() # node_handler.raw_node # Update new node relationships target nodes. Since any relationship # with target interface requires the target node to hold a plugin # which supports the operation, we should update the mapping for # this plugin under the target node. target_ids = [ r['target_id'] for r in ctx.raw_node.get(ctx.RELATIONSHIPS, []) ] for node_id in target_ids: node = self.sm.get_node(ctx.deployment_id, node_id) node.plugins = deployment_update_utils.get_raw_node( ctx.deployment_plan, node_id)['plugins'] self.sm.update_entity(node) current_entities[node_id] = node.to_dict() return ctx.raw_node_id
def patch(self, deployment_id): verify_json_content_type() request_json = request.json verify_parameter_in_request_body('stage', request_json) stage = request_json['stage'] if stage == 'start': verify_parameter_in_request_body('nodes', request_json, param_type=dict, optional=True) nodes = request_json.get('nodes', {}) modification = get_blueprints_manager().\ start_deployment_modification(deployment_id, nodes) return responses.DeploymentModification( deployment_id=deployment_id, node_instances=modification['node_instances'], modified_nodes=modification['modified_nodes']) elif stage == 'finish': verify_parameter_in_request_body('modification', request_json, param_type=dict) modification = request_json['modification'] get_blueprints_manager().finish_deployment_modification( deployment_id, modification) return responses.DeploymentModification( deployment_id=deployment_id, node_instances={}, modified_nodes={}) else: raise manager_exceptions.UnknownModificationStageError( 'Unknown modification stage: {0}'.format(stage))
def get(self, blueprint_id, **kwargs): """ Download blueprint's archive """ # Verify blueprint exists. get_blueprints_manager().get_blueprint(blueprint_id, {"id"}) for arc_type in SUPPORTED_ARCHIVE_TYPES: # attempting to find the archive file on the file system local_path = os.path.join( config.instance().file_server_root, config.instance().file_server_uploaded_blueprints_folder, blueprint_id, "{0}.{1}".format(blueprint_id, arc_type), ) if os.path.isfile(local_path): archive_type = arc_type break else: raise RuntimeError("Could not find blueprint's archive; " "Blueprint ID: {0}".format(blueprint_id)) blueprint_path = "{0}/{1}/{2}/{2}.{3}".format( config.instance().file_server_resources_uri, config.instance().file_server_uploaded_blueprints_folder, blueprint_id, archive_type, ) return make_streaming_response(blueprint_id, blueprint_path, os.path.getsize(local_path), archive_type)
def _handle_adding_node_instance(instances, dep_update): """Handles adding a node instance :param instances: :param dep_update: :return: the added and related node instances """ added_instances = [] add_related_instances = [] for node_instance in instances: modification = node_instance.get('modification', 'related') if modification == 'added': changes = { 'deployment_id': dep_update.deployment_id, 'version': None, 'state': None, 'runtime_properties': {} } node_instance.update(changes) added_instances.append(node_instance) else: add_related_instances.append(node_instance) get_blueprints_manager()._create_deployment_node_instances( dep_update.deployment_id, added_instances) return { NODE_MOD_TYPES.AFFECTED: added_instances, NODE_MOD_TYPES.RELATED: add_related_instances }
def get(self, _include=None): """List executions""" deployment_id = request.args.get('deployment_id') if deployment_id: get_blueprints_manager().get_deployment(deployment_id, include=['id']) executions = get_blueprints_manager().executions_list( deployment_id=deployment_id, include=_include) return [responses.Execution(**e.to_dict()) for e in executions]
def patch(self, snapshot_id): """ Update snapshot status by id """ verify_json_content_type() request_json = request.json verify_parameter_in_request_body('status', request_json) get_blueprints_manager().update_snapshot_status( snapshot_id, request_json['status'], request_json.get('error', ''))
def patch(self, execution_id, **kwargs): """ Update execution status by id """ verify_json_content_type() request_json = request.json verify_parameter_in_request_body("status", request_json) get_blueprints_manager().update_execution_status( execution_id, request_json["status"], request_json.get("error", "") ) return get_storage_manager().get_execution(execution_id)
def get(self, _include=None, **kwargs): """List executions""" deployment_id = request.args.get("deployment_id") if deployment_id: get_blueprints_manager().get_deployment(deployment_id, include=["id"]) is_include_system_workflows = verify_and_convert_bool( "include_system_workflows", request.args.get("include_system_workflows", "false") ) deployment_id_filter = BlueprintsManager.create_filters_dict(deployment_id=deployment_id) executions = get_blueprints_manager().executions_list( is_include_system_workflows=is_include_system_workflows, include=_include, filters=deployment_id_filter ) return executions.items
def get(self, execution_id, _include=None): """ Get execution by id """ execution = get_blueprints_manager().get_execution(execution_id, include=_include) return responses.Execution(**execution.to_dict())
def post(self): """Execute a workflow""" verify_json_content_type() request_json = request.json verify_parameter_in_request_body('deployment_id', request_json) verify_parameter_in_request_body('workflow_id', request_json) allow_custom_parameters = verify_and_convert_bool( 'allow_custom_parameters', request_json.get('allow_custom_parameters', 'false')) force = verify_and_convert_bool('force', request_json.get('force', 'false')) deployment_id = request.json['deployment_id'] workflow_id = request.json['workflow_id'] parameters = request.json.get('parameters', None) if parameters is not None and parameters.__class__ is not dict: raise manager_exceptions.BadParametersError( "request body's 'parameters' field must be a dict but" " is of type {0}".format(parameters.__class__.__name__)) execution = get_blueprints_manager().execute_workflow( deployment_id, workflow_id, parameters=parameters, allow_custom_parameters=allow_custom_parameters, force=force) return responses.Execution(**execution.to_dict()), 201
def get(self, _include=None, **kwargs): """ List uploaded blueprints """ blueprints = get_blueprints_manager().blueprints_list(include=_include) return blueprints.items
def get(self, blueprint_id, _include=None): """ Get blueprint by id """ blueprint = get_blueprints_manager().get_blueprint( blueprint_id, _include) return responses.BlueprintState(**blueprint.to_dict())
def post(self, **kwargs): """Execute a workflow""" verify_json_content_type() request_json = request.json verify_parameter_in_request_body("deployment_id", request_json) verify_parameter_in_request_body("workflow_id", request_json) allow_custom_parameters = verify_and_convert_bool( "allow_custom_parameters", request_json.get("allow_custom_parameters", "false") ) force = verify_and_convert_bool("force", request_json.get("force", "false")) deployment_id = request.json["deployment_id"] workflow_id = request.json["workflow_id"] parameters = request.json.get("parameters", None) if parameters is not None and parameters.__class__ is not dict: raise manager_exceptions.BadParametersError( "request body's 'parameters' field must be a dict but" " is of type {0}".format(parameters.__class__.__name__) ) execution = get_blueprints_manager().execute_workflow( deployment_id, workflow_id, parameters=parameters, allow_custom_parameters=allow_custom_parameters, force=force, ) return execution, 201
def _prepare_and_submit_blueprint(self, file_server_root, application_dir, blueprint_id): application_file = self._extract_application_file( file_server_root, application_dir) file_server_base_url = config.instance().file_server_base_uri dsl_path = '{0}/{1}'.format(file_server_base_url, application_file) alias_mapping = '{0}/{1}'.format(file_server_base_url, 'cloudify/alias-mappings.yaml') resources_base = file_server_base_url + '/' # add to blueprints manager (will also dsl_parse it) try: blueprint = get_blueprints_manager().publish_blueprint( dsl_path, alias_mapping, resources_base, blueprint_id) # moving the app directory in the file server to be under a # directory named after the blueprint id shutil.move( os.path.join(file_server_root, application_dir), os.path.join(file_server_root, config.instance().file_server_blueprints_folder, blueprint.id)) self._process_plugins(file_server_root, blueprint.id) return blueprint except DslParseException, ex: shutil.rmtree(os.path.join(file_server_root, application_dir)) raise manager_exceptions.InvalidBlueprintError( 'Invalid blueprint - {0}'.format(ex.args))
def get(self, _include=None, filters=None, pagination=None, **kwargs): """ List deployments """ deployments = get_blueprints_manager().deployments_list( include=_include, filters=filters, pagination=pagination) return deployments
def post(self, deployment_id): """ Execute a workflow """ verify_json_content_type() request_json = request.json verify_parameter_in_request_body('workflow_id', request_json) allow_custom_parameters = verify_and_convert_bool( 'allow_custom_parameters', request_json.get('allow_custom_parameters', 'false')) force = verify_and_convert_bool( 'force', request_json.get('force', 'false')) workflow_id = request.json['workflow_id'] parameters = request.json.get('parameters', None) if parameters is not None and parameters.__class__ is not dict: raise manager_exceptions.BadParametersError( "request body's 'parameters' field must be a dict but" " is of type {0}".format(parameters.__class__.__name__)) execution = get_blueprints_manager().execute_workflow( deployment_id, workflow_id, parameters=parameters, allow_custom_parameters=allow_custom_parameters, force=force) return responses.Execution(**execution.to_dict()), 201
def get(self, blueprint_id, _include=None): """ Get blueprint by id """ blueprint = get_blueprints_manager().get_blueprint(blueprint_id, _include) return responses.BlueprintState(**blueprint.to_dict())
def get(self, plugin_id, **kwargs): """ Download plugin archive """ # Verify plugin exists. plugin = get_blueprints_manager().get_plugin(plugin_id) archive_name = plugin.archive_name # attempting to find the archive file on the file system local_path = _get_plugin_archive_path(plugin_id, archive_name) if not os.path.isfile(local_path): raise RuntimeError("Could not find plugins archive; " "Plugin ID: {0}".format(plugin_id)) plugin_path = '{0}/{1}/{2}/{3}'.format( config.instance().file_server_resources_uri, 'plugins', plugin_id, archive_name) return make_streaming_response( plugin_id, plugin_path, os.path.getsize(local_path), 'tar.gz' )
def _prepare_and_submit_blueprint(self, file_server_root, application_dir, blueprint_id): application_file = self._extract_application_file(file_server_root, application_dir) file_server_base_url = config.instance().file_server_base_uri dsl_path = '{0}/{1}'.format(file_server_base_url, application_file) alias_mapping = '{0}/{1}'.format(file_server_base_url, 'cloudify/alias-mappings.yaml') resources_base = file_server_base_url + '/' # add to blueprints manager (will also dsl_parse it) try: blueprint = get_blueprints_manager().publish_blueprint( dsl_path, alias_mapping, resources_base, blueprint_id) # moving the app directory in the file server to be under a # directory named after the blueprint id shutil.move(os.path.join(file_server_root, application_dir), os.path.join( file_server_root, config.instance().file_server_blueprints_folder, blueprint.id)) self._process_plugins(file_server_root, blueprint.id) return blueprint except DslParseException, ex: shutil.rmtree(os.path.join(file_server_root, application_dir)) raise manager_exceptions.InvalidBlueprintError( 'Invalid blueprint - {0}'.format(ex.args))
def get(self, deployment_id, _include=None): """ List deployment executions """ get_storage_manager().get_deployment(deployment_id, include=['id']) executions = get_blueprints_manager().get_deployment_executions( deployment_id, include=_include) return [responses.Execution(**e.to_dict()) for e in executions]
def get(self, _include=None, filters=None, pagination=None, **kwargs): """ List executions """ deployment_id = request.args.get('deployment_id') if deployment_id: get_blueprints_manager().get_deployment(deployment_id, include=['id']) is_include_system_workflows = verify_and_convert_bool( '_include_system_workflows', request.args.get('_include_system_workflows', 'false')) executions = get_blueprints_manager().executions_list( filters=filters, pagination=pagination, is_include_system_workflows=is_include_system_workflows, include=_include) return executions
def get(self, _include=None, filters=None, pagination=None, sort=None, **kwargs): """ List uploaded blueprints """ return get_blueprints_manager().blueprints_list( include=_include, filters=filters, pagination=pagination, sort=sort)
def _prepare_and_process_doc(self, data_id, file_server_root, archive_target_path, **kwargs): return get_blueprints_manager().create_snapshot_model( data_id, status=models.Snapshot.UPLOADED ), None
def get(self, deployment_id, _include=None): """ Get deployment by id """ deployment = get_blueprints_manager().get_deployment(deployment_id, include=_include) return responses.Deployment( **_replace_workflows_field_for_deployment_response( deployment.to_dict()))
def post(self, **kwargs): """ Create provider context """ verify_json_content_type() request_json = request.json verify_parameter_in_request_body("context", request_json) verify_parameter_in_request_body("name", request_json) context = models.ProviderContext(name=request.json["name"], context=request.json["context"]) update = verify_and_convert_bool("update", request.args.get("update", "false")) status_code = 200 if update else 201 try: get_blueprints_manager().update_provider_context(update, context) return dict(status="ok"), status_code except dsl_parser_utils.ResolverInstantiationError, ex: raise manager_exceptions.ResolverInstantiationError(str(ex))
def delete(self, filters=None, pagination=None, sort=None, range_filters=None, **kwargs): """Delete events/logs connected to a certain Deployment ID """ query = self._build_query(filters=filters, pagination=pagination, sort=sort, range_filters=range_filters) events = ManagerElasticsearch.search_events(body=query) metadata = ManagerElasticsearch.build_list_result_metadata(query, events) events = ManagerElasticsearch.extract_info_for_deletion(events) get_blueprints_manager().delete_events(events) # We don't really want to return all of the deleted events, so it's a # bit of a hack to only return the number of events to delete - if any # of the events weren't deleted, we'd have gotten an error from the # method above return ListResult([len(events)], metadata)
def get(self, _include=None, filters=None, pagination=None, sort=None, **kwargs): return get_blueprints_manager().snapshots_list(include=_include, filters=filters, pagination=pagination, sort=sort)
def delete(self, deployment_id, **kwargs): """ Delete deployment by id """ args = self._args_parser.parse_args() ignore_live_nodes = verify_and_convert_bool("ignore_live_nodes", args["ignore_live_nodes"]) deployment = get_blueprints_manager().delete_deployment(deployment_id, ignore_live_nodes) return deployment, 200
def delete(self, plugin_id, **kwargs): """ Delete plugin by ID """ # Verify plugin exists. plugin = get_blueprints_manager().get_plugin(plugin_id) archive_name = plugin.archive_name archive_path = _get_plugin_archive_path(plugin_id, archive_name) shutil.rmtree(os.path.dirname(archive_path), ignore_errors=True) get_storage_manager().delete_plugin(plugin_id) return plugin
def post(self, **kwargs): verify_json_content_type() request_json = request.json verify_parameter_in_request_body("deployment_id", request_json) deployment_id = request_json["deployment_id"] verify_parameter_in_request_body("context", request_json, param_type=dict, optional=True) context = request_json.get("context", {}) verify_parameter_in_request_body("nodes", request_json, param_type=dict, optional=True) nodes = request_json.get("nodes", {}) modification = get_blueprints_manager().start_deployment_modification(deployment_id, nodes, context) return modification, 201
def get(self, snapshot_id): get_blueprints_manager().get_snapshot(snapshot_id) snapshot_path = os.path.join( _get_snapshot_path(snapshot_id), '{0}.zip'.format(snapshot_id) ) snapshot_uri = '{0}/{1}/{2}/{2}.zip'.format( config.instance().file_server_resources_uri, config.instance().file_server_snapshots_folder, snapshot_id ) return make_streaming_response( snapshot_id, snapshot_uri, os.path.getsize(snapshot_path), 'zip' )
def get(self, _include=None): """ List deployments """ deployments = get_blueprints_manager().deployments_list( include=_include) return [ responses.Deployment( **_replace_workflows_field_for_deployment_response( d.to_dict())) for d in deployments ]
def put(self, snapshot_id): verify_json_content_type() request_json = request.json include_metrics = verify_and_convert_bool( 'include_metrics', request_json.get('include_metrics', 'false')) include_credentials = verify_and_convert_bool( 'include_credentials', request_json.get('include_credentials', 'true')) execution = get_blueprints_manager().create_snapshot( snapshot_id, include_metrics, include_credentials) return execution, 201
def post(self, **kwargs): """ Upload a plugin """ plugin, code = UploadedPluginsManager().receive_uploaded_data( str(uuid4())) try: get_blueprints_manager().install_plugin(plugin) except manager_exceptions.ExecutionTimeout: tp, ex, tb = sys.exc_info() raise manager_exceptions.PluginInstallationTimeout( 'Timed out during plugin installation. ({0}: {1})' .format(tp.__name__, ex)), None, tb except Exception: get_blueprints_manager().remove_plugin( plugin_id=plugin.id, force=True) tp, ex, tb = sys.exc_info() raise manager_exceptions.PluginInstallationError( 'Failed during plugin installation. ({0}: {1})' .format(tp.__name__, ex)), None, tb return plugin, code
def post(self, snapshot_id): verify_json_content_type() request_json = request.json verify_parameter_in_request_body('recreate_deployments_envs', request_json) recreate_deployments_envs = verify_and_convert_bool( 'recreate_deployments_envs', request_json['recreate_deployments_envs']) force = verify_and_convert_bool('force', request_json['force']) execution = get_blueprints_manager().restore_snapshot( snapshot_id, recreate_deployments_envs, force) return execution, 200
def _add_node(self, ctx, current_nodes): """ handles adding a node :param ctx: :return: the new node """ get_blueprints_manager()._create_deployment_nodes( deployment_id=ctx.deployment_id, blueprint_id='N/A', plan=ctx.blueprint, node_ids=ctx.raw_node_id ) current_nodes[ctx.raw_node_id] = \ ctx.storage_node.to_dict() # node_handler.raw_node # Update new node relationships target nodes. Since any relationship # with target interface requires the target node to hold a plugin # which supports the operation, we should update the mapping for # this plugin under the target node. target_ids = [r['target_id'] for r in ctx.raw_node.get('relationships', [])] for node_id in target_ids: self.sm.update_node( deployment_id=ctx.deployment_id, node_id=node_id, changes={ 'plugins': utils.get_raw_node(ctx.blueprint, node_id)['plugins'] }) current_nodes[node_id] = \ self.sm.get_node(ctx.deployment_id, node_id).to_dict() return ctx.raw_node_id
def delete(self, deployment_id): """ Delete deployment by id """ args = self._args_parser.parse_args() ignore_live_nodes = verify_and_convert_bool('ignore_live_nodes', args['ignore_live_nodes']) deployment = get_blueprints_manager().delete_deployment( deployment_id, ignore_live_nodes) # not using '_replace_workflows_field_for_deployment_response' # method since the object returned only contains the deployment's id return responses.Deployment(**deployment.to_dict()), 200
def get_running_executions(): executions = get_blueprints_manager().list_executions( is_include_system_workflows=True).items running_executions = [] for execution in executions: if execution.status not in models.Execution.END_STATES: running_executions.append({ 'id': execution.id, 'status': execution.status, 'deployment_id': execution.deployment_id, 'workflow_id': execution.workflow_id }) return running_executions
def get(self, snapshot_id): snap = get_blueprints_manager().get_snapshot(snapshot_id) if snap.status == models.Snapshot.FAILED: raise manager_exceptions.SnapshotActionError( 'Failed snapshot cannot be downloaded') snapshot_path = os.path.join(_get_snapshot_path(snapshot_id), '{0}.zip'.format(snapshot_id)) snapshot_uri = '{0}/{1}/{2}/{2}.zip'.format( config.instance().file_server_resources_uri, config.instance().file_server_snapshots_folder, snapshot_id) return make_streaming_response(snapshot_id, snapshot_uri, os.path.getsize(snapshot_path), 'zip')
def put(self, deployment_id): """ Create a deployment """ verify_json_content_type() request_json = request.json verify_parameter_in_request_body('blueprint_id', request_json) verify_parameter_in_request_body('inputs', request_json, param_type=dict, optional=True) blueprint_id = request.json['blueprint_id'] deployment = get_blueprints_manager().create_deployment( blueprint_id, deployment_id, inputs=request_json.get('inputs', {})) return responses.Deployment( **_replace_workflows_field_for_deployment_response( deployment.to_dict())), 201
def post(self, snapshot_id): verify_json_content_type() request_json = request.json verify_parameter_in_request_body('recreate_deployments_envs', request_json) recreate_deployments_envs = verify_and_convert_bool( 'recreate_deployments_envs', request_json['recreate_deployments_envs']) bypass_maintenance = is_bypass_maintenance_mode() force = verify_and_convert_bool('force', request_json['force']) default_timeout_sec = 300 request_timeout = request_json.get('timeout', default_timeout_sec) timeout = convert_to_int(request_timeout) execution = get_blueprints_manager().restore_snapshot( snapshot_id, recreate_deployments_envs, force, bypass_maintenance, timeout) return execution, 200
def get(self, **kwargs): maintenance_file_path = get_maintenance_file_path() if os.path.isfile(maintenance_file_path): with open(maintenance_file_path, 'r') as f: status = f.read() if status == MAINTENANCE_MODE_ACTIVE: return {'status': MAINTENANCE_MODE_ACTIVE} if status == ACTIVATING_MAINTENANCE_MODE: executions = get_blueprints_manager().executions_list( is_include_system_workflows=True).items for execution in executions: if execution.status not in models.Execution.END_STATES: return {'status': ACTIVATING_MAINTENANCE_MODE} write_maintenance_state(MAINTENANCE_MODE_ACTIVE) return {'status': MAINTENANCE_MODE_ACTIVE} else: return {'status': NOT_IN_MAINTENANCE_MODE}
def post(self, execution_id): """ Apply execution action (cancel, force-cancel) by id """ verify_json_content_type() request_json = request.json verify_parameter_in_request_body('action', request_json) action = request.json['action'] valid_actions = ['cancel', 'force-cancel'] if action not in valid_actions: raise manager_exceptions.BadParametersError( 'Invalid action: {0}, Valid action values are: {1}'.format( action, valid_actions)) if action in ('cancel', 'force-cancel'): return get_blueprints_manager().cancel_execution( execution_id, action == 'force-cancel'), 201
def delete(self, blueprint_id): """ Delete blueprint by id """ # Note: The current delete semantics are such that if a deployment # for the blueprint exists, the deletion operation will fail. # However, there is no handling of possible concurrency issue with # regard to that matter at the moment. blueprint = get_blueprints_manager().delete_blueprint(blueprint_id) # Delete blueprint resources from file server blueprint_folder = os.path.join( config.instance().file_server_root, config.instance().file_server_blueprints_folder, blueprint.id) shutil.rmtree(blueprint_folder) uploaded_blueprint_folder = os.path.join( config.instance().file_server_root, config.instance().file_server_uploaded_blueprints_folder, blueprint.id) shutil.rmtree(uploaded_blueprint_folder) return responses.BlueprintState(**blueprint.to_dict()), 200