def test_model_serialization(self): now = utils.get_formatted_timestamp() blueprint = models.Blueprint(id='blueprint-id', created_at=now, updated_at=now, description=None, plan={'name': 'my-bp'}, main_file_name='aaa') self.sm.put(blueprint) now2 = utils.get_formatted_timestamp() dep = models.Deployment(id='dep-id', created_at=now2, updated_at=now2, permalink=None, description=None, workflows={}, inputs={}, policy_types={}, policy_triggers={}, groups={}, scaling_groups={}, outputs={}, capabilities={}) dep.blueprint = blueprint self.sm.put(dep) serialized_dep = dep.to_response() self.assertEqual(35, len(serialized_dep)) self.assertEqual(dep.id, serialized_dep['id']) self.assertEqual(dep.created_at, serialized_dep['created_at']) self.assertEqual(dep.updated_at, serialized_dep['updated_at']) self.assertEqual(dep.blueprint_id, serialized_dep['blueprint_id']) self.assertEqual(dep.permalink, serialized_dep['permalink']) self.assertEqual(dep.tenant.name, serialized_dep['tenant_name']) self.assertEqual(dep.description, None) # `blueprint_id` isn't a regular column, but a relationship serialized_dep.pop('blueprint_id') serialized_dep.pop('tenant_name') serialized_dep.pop('created_by') serialized_dep.pop('site_name') serialized_dep.pop('latest_execution_status') serialized_dep.pop('environment_type') serialized_dep.pop('latest_execution_total_operations') serialized_dep.pop('latest_execution_finished_operations') # Deprecated columns, for backwards compatibility - # was added to the response serialized_dep.pop('resource_availability') serialized_dep.pop('private_resource') deserialized_dep = models.Deployment(**serialized_dep) self.assertEqual(dep.id, deserialized_dep.id) self.assertEqual(dep.created_at, deserialized_dep.created_at) self.assertEqual(dep.updated_at, deserialized_dep.updated_at) self.assertEqual(dep.permalink, deserialized_dep.permalink) self.assertEqual(dep.description, deserialized_dep.description)
def test_model_serialization(self): now = utils.get_formatted_timestamp() blueprint = models.Blueprint(id='blueprint-id', created_at=now, updated_at=now, description=None, plan={'name': 'my-bp'}, main_file_name='aaa') self.sm.put(blueprint) now2 = utils.get_formatted_timestamp() dep = models.Deployment(id='dep-id', created_at=now2, updated_at=now2, permalink=None, description=None, workflows={}, inputs={}, policy_types={}, policy_triggers={}, groups={}, scaling_groups={}, outputs={}, capabilities={}) dep.blueprint = blueprint self.sm.put(dep) serialized_dep = dep.to_response() self.assertEquals(20, len(serialized_dep)) self.assertEquals(dep.id, serialized_dep['id']) self.assertEquals(dep.created_at, serialized_dep['created_at']) self.assertEquals(dep.updated_at, serialized_dep['updated_at']) self.assertEquals(dep.blueprint_id, serialized_dep['blueprint_id']) self.assertEquals(dep.permalink, serialized_dep['permalink']) self.assertEquals(dep.tenant.name, serialized_dep['tenant_name']) self.assertEquals(dep.description, None) # `blueprint_id` isn't a regular column, but a relationship serialized_dep.pop('blueprint_id') serialized_dep.pop('tenant_name') serialized_dep.pop('created_by') serialized_dep.pop('site_name') # Deprecated columns, for backwards compatibility - # was added to the response serialized_dep.pop('resource_availability') serialized_dep.pop('private_resource') deserialized_dep = models.Deployment(**serialized_dep) self.assertEquals(dep.id, deserialized_dep.id) self.assertEquals(dep.created_at, deserialized_dep.created_at) self.assertEquals(dep.updated_at, deserialized_dep.updated_at) self.assertEquals(dep.permalink, deserialized_dep.permalink) self.assertEquals(dep.description, deserialized_dep.description)
def test_model_serialization(self): now = utils.get_formatted_timestamp() blueprint = models.Blueprint(id='blueprint-id', created_at=now, updated_at=now, description=None, plan={'name': 'my-bp'}, main_file_name='aaa') self.sm.put(blueprint) now2 = utils.get_formatted_timestamp() dep = models.Deployment(id='dep-id', created_at=now2, updated_at=now2, permalink=None, description=None, workflows={}, inputs={}, policy_types={}, policy_triggers={}, groups={}, scaling_groups={}, outputs={}) dep.blueprint = blueprint self.sm.put(dep) serialized_dep = dep.to_response() self.assertEquals(18, len(serialized_dep)) self.assertEquals(dep.id, serialized_dep['id']) self.assertEquals(dep.created_at, serialized_dep['created_at']) self.assertEquals(dep.updated_at, serialized_dep['updated_at']) self.assertEquals(dep.blueprint_id, serialized_dep['blueprint_id']) self.assertEquals(dep.permalink, serialized_dep['permalink']) self.assertEquals(dep.tenant.name, serialized_dep['tenant_name']) self.assertEquals(dep.description, None) # `blueprint_id` isn't a regular column, but a relationship serialized_dep.pop('blueprint_id') serialized_dep.pop('tenant_name') serialized_dep.pop('created_by') serialized_dep.pop('resource_availability') deserialized_dep = models.Deployment(**serialized_dep) self.assertEquals(dep.id, deserialized_dep.id) self.assertEquals(dep.created_at, deserialized_dep.created_at) self.assertEquals(dep.updated_at, deserialized_dep.updated_at) self.assertEquals(dep.permalink, deserialized_dep.permalink) self.assertEquals(dep.description, deserialized_dep.description)
def mock_execute_task(execution_id, **_): sm = get_storage_manager() execution = sm.get(models.Execution, execution_id) execution.status = task_state() execution.ended_at = utils.get_formatted_timestamp() execution.error = '' sm.update(execution)
def _create_agent(self, name, state, request_dict): timestamp = utils.get_formatted_timestamp() rabbitmq_password = request_dict.get('rabbitmq_password') rabbitmq_password = encrypt(rabbitmq_password) if rabbitmq_password \ else rabbitmq_password # TODO: remove these fields from the runtime properties new_agent = models.Agent( id=name, name=name, ip=request_dict.get('ip'), install_method=request_dict.get('install_method'), system=request_dict.get('system'), state=state, version=request_dict.get('version'), rabbitmq_username=request_dict.get('rabbitmq_username'), rabbitmq_password=rabbitmq_password, rabbitmq_exchange=request_dict.get('rabbitmq_exchange'), created_at=timestamp, updated_at=timestamp, ) storage_manager = get_storage_manager() node_instance = storage_manager.get( models.NodeInstance, request_dict.get('node_instance_id') ) new_agent.node_instance = node_instance return storage_manager.put(new_agent)
def _get_mock_schedule(schedule_id='default', next_occurrence=None, rule={'recurrence': '1 min'}, slip=0, stop_on_fail=False, latest_execution=None, enabled=True): now = utils.get_formatted_timestamp() blueprint = models.Blueprint(id='mock-bp', created_at=now, updated_at=now, main_file_name='abcd', plan={}) deployment = models.Deployment( id='mock-depl', created_at=now, updated_at=now, ) deployment.blueprint = blueprint schedule = models.ExecutionSchedule(_storage_id=1, id=schedule_id, deployment=deployment, created_at=now, since=now, until=None, rule=rule, slip=slip, workflow_id='install', parameters=None, execution_arguments={}, stop_on_fail=stop_on_fail, next_occurrence=next_occurrence or now, latest_execution=latest_execution, enabled=enabled) return schedule
def _execute_workflow(self, deployment_update, workflow_id, parameters=None, allow_custom_parameters=False, force=False): """Executes the specified workflow :param deployment_update: :param workflow_id: :param parameters: :param allow_custom_parameters: :param force: :return: """ deployment_id = deployment_update.deployment_id deployment = self.sm.get(models.Deployment, deployment_id) blueprint_id = deployment.blueprint_id if workflow_id not in deployment.workflows: raise manager_exceptions.NonexistentWorkflowError( 'Workflow {0} does not exist in deployment {1}' .format(workflow_id, deployment_id)) workflow = deployment.workflows[workflow_id] execution_parameters = \ ResourceManager._merge_and_validate_execution_parameters( workflow, workflow_id, parameters, allow_custom_parameters) execution_id = str(uuid.uuid4()) new_execution = models.Execution( id=execution_id, status=ExecutionState.PENDING, created_at=utils.get_formatted_timestamp(), workflow_id=workflow_id, error='', parameters=ResourceManager._get_only_user_execution_parameters( execution_parameters), is_system_workflow=False) if deployment: new_execution.set_deployment(deployment) deployment_update.execution = new_execution self.sm.put(new_execution) # executing the user workflow workflow_plugins = \ deployment_update.deployment_plan[ constants.WORKFLOW_PLUGINS_TO_INSTALL] workflow_executor.execute_workflow( workflow_id, workflow, workflow_plugins=workflow_plugins, blueprint_id=blueprint_id, deployment_id=deployment_id, execution_id=execution_id, execution_parameters=execution_parameters) return new_execution
def _create_plugin_from_archive(self, plugin_id, archive_path, private_resource, visibility): plugin = self._load_plugin_package_json(archive_path) build_props = plugin.get('build_server_os_properties') plugin_info = {'package_name': plugin.get('package_name'), 'archive_name': plugin.get('archive_name')} resource_manager = get_resource_manager() visibility = resource_manager.get_resource_visibility( Plugin, plugin_id, visibility, private_resource, plugin_info ) return Plugin( id=plugin_id, package_name=plugin.get('package_name'), package_version=plugin.get('package_version'), archive_name=plugin.get('archive_name'), package_source=plugin.get('package_source'), supported_platform=plugin.get('supported_platform'), distribution=build_props.get('distribution'), distribution_version=build_props.get('distribution_version'), distribution_release=build_props.get('distribution_release'), wheels=plugin.get('wheels'), excluded_wheels=plugin.get('excluded_wheels'), supported_py_versions=plugin.get('supported_python_versions'), uploaded_at=get_formatted_timestamp(), visibility=visibility )
def test_storage_serialization_and_response(self): now = utils.get_formatted_timestamp() sm = get_storage_manager() deployment_update = models.DeploymentUpdate( deployment_id='deployment-id', deployment_plan={'name': 'my-bp'}, state='staged', id='depup-id', steps=[], deployment_update_nodes=None, deployment_update_node_instances=None, deployment_update_deployment=None, modified_entity_ids=None, execution_id='execution-id', created_at=now) sm.put_deployment_update(deployment_update) depup_from_client = self.client.deployment_updates.get('depup-id') depup_response_attributes = { 'id', 'state', 'deployment_id', 'steps', 'execution_id', 'created_at' } for att in depup_response_attributes: self.assertEqual(getattr(depup_from_client, att), getattr(deployment_update, att))
def put(self, key, **kwargs): """ Create a new secret or update an existing secret if the flag update_if_exists is set to true """ secret_params = self._get_secret_params(key) encrypted_value = encrypt(secret_params['value']) sm = get_storage_manager() timestamp = utils.get_formatted_timestamp() try: new_secret = models.Secret( id=key, value=encrypted_value, created_at=timestamp, updated_at=timestamp, visibility=secret_params['visibility'], is_hidden_value=secret_params['is_hidden_value'] ) return sm.put(new_secret) except ConflictError: secret = sm.get(models.Secret, key) if secret and secret_params['update_if_exists']: secret.value = encrypted_value secret.updated_at = timestamp return sm.update(secret, validate_global=True) raise
def _create_agent(self, name, state, request_dict): timestamp = utils.get_formatted_timestamp() rabbitmq_password = request_dict.get('rabbitmq_password') rabbitmq_password = encrypt(rabbitmq_password) if rabbitmq_password \ else rabbitmq_password # TODO: remove these fields from the runtime properties new_agent = models.Agent( id=name, name=name, ip=request_dict.get('ip'), install_method=request_dict.get('install_method'), system=request_dict.get('system'), state=state, version=request_dict.get('version'), rabbitmq_username=request_dict.get('rabbitmq_username'), rabbitmq_password=rabbitmq_password, rabbitmq_exchange=request_dict.get('rabbitmq_exchange'), created_at=timestamp, updated_at=timestamp, ) storage_manager = get_storage_manager() node_instance = storage_manager.get( models.NodeInstance, request_dict.get('node_instance_id')) new_agent.node_instance = node_instance return storage_manager.put(new_agent)
def put(self): """Creates an inter-deployment dependency. :param dependency_creator: a string representing the entity that is responsible for this dependency (e.g. an intrinsic function blueprint path, 'node_instances.some_node_instance', etc.). :param source_deployment: source deployment that depends on the target deployment. :param target_deployment: the deployment that the source deployment depends on. :return: an InterDeploymentDependency object containing the information of the dependency. """ sm = get_storage_manager() params = self._get_put_dependency_params(sm) now = utils.get_formatted_timestamp() # assert no cyclic dependencies are created dep_greph = rest_utils.RecursiveDeploymentDependencies(sm) source_id = str(params[SOURCE_DEPLOYMENT].id) target_id = str(params[TARGET_DEPLOYMENT].id) dep_greph.create_dependencies_graph() dep_greph.assert_no_cyclic_dependencies(source_id, target_id) deployment_dependency = models.InterDeploymentDependencies( id=str(uuid.uuid4()), dependency_creator=params[DEPENDENCY_CREATOR], source_deployment=params[SOURCE_DEPLOYMENT], target_deployment=params[TARGET_DEPLOYMENT], created_at=now) return sm.put(deployment_dependency)
def post(self, maintenance_action, **_): maintenance_file_path = get_maintenance_file_path() if maintenance_action == 'activate': if os.path.isfile(maintenance_file_path): state = utils.read_json_file(maintenance_file_path) return state, 304 now = utils.get_formatted_timestamp() try: user = current_user.username except AttributeError: user = '' remaining_executions = get_running_executions() status = MAINTENANCE_MODE_ACTIVATING \ if remaining_executions else MAINTENANCE_MODE_ACTIVATED activated_at = '' if remaining_executions else now utils.mkdirs(config.instance.maintenance_folder) new_state = prepare_maintenance_dict( status=status, activation_requested_at=now, activated_at=activated_at, remaining_executions=remaining_executions, requested_by=user) utils.write_dict_to_json_file(maintenance_file_path, new_state) return new_state if maintenance_action == 'deactivate': if not os.path.isfile(maintenance_file_path): return prepare_maintenance_dict( MAINTENANCE_MODE_DEACTIVATED), 304 os.remove(maintenance_file_path) return prepare_maintenance_dict(MAINTENANCE_MODE_DEACTIVATED) valid_actions = ['activate', 'deactivate'] raise BadParametersError('Invalid action: {0}, Valid action ' 'values are: {1}'.format( maintenance_action, valid_actions))
def _create_plugin_from_archive(self, plugin_id, archive_path, private_resource, visibility): plugin = self._load_plugin_package_json(archive_path) build_props = plugin.get('build_server_os_properties') plugin_info = { 'package_name': plugin.get('package_name'), 'archive_name': plugin.get('archive_name') } resource_manager = get_resource_manager() visibility = resource_manager.get_resource_visibility( Plugin, plugin_id, visibility, private_resource, plugin_info) return Plugin( id=plugin_id, package_name=plugin.get('package_name'), package_version=plugin.get('package_version'), archive_name=plugin.get('archive_name'), package_source=plugin.get('package_source'), supported_platform=plugin.get('supported_platform'), distribution=build_props.get('distribution'), distribution_version=build_props.get('distribution_version'), distribution_release=build_props.get('distribution_release'), wheels=plugin.get('wheels'), excluded_wheels=plugin.get('excluded_wheels'), supported_py_versions=plugin.get('supported_python_versions'), uploaded_at=get_formatted_timestamp(), visibility=visibility)
def post(self, maintenance_action, **_): maintenance_file_path = get_maintenance_file_path() if maintenance_action == 'activate': if os.path.isfile(maintenance_file_path): state = utils.read_json_file(maintenance_file_path) return state, 304 now = utils.get_formatted_timestamp() try: user = current_user.username except AttributeError: user = '' remaining_executions = get_running_executions() status = MAINTENANCE_MODE_ACTIVATING \ if remaining_executions else MAINTENANCE_MODE_ACTIVATED activated_at = '' if remaining_executions else now utils.mkdirs(config.instance.maintenance_folder) new_state = prepare_maintenance_dict( status=status, activation_requested_at=now, activated_at=activated_at, remaining_executions=remaining_executions, requested_by=user) utils.write_dict_to_json_file(maintenance_file_path, new_state) return new_state if maintenance_action == 'deactivate': if not os.path.isfile(maintenance_file_path): return prepare_maintenance_dict( MAINTENANCE_MODE_DEACTIVATED), 304 os.remove(maintenance_file_path) return prepare_maintenance_dict(MAINTENANCE_MODE_DEACTIVATED) valid_actions = ['activate', 'deactivate'] raise BadParametersError( 'Invalid action: {0}, Valid action ' 'values are: {1}'.format(maintenance_action, valid_actions))
def put(self, key, **kwargs): """ Create a new secret or update an existing secret if the flag update_if_exists is set to true """ secret_params = self._get_secret_params(key) encrypted_value = self._encrypt_secret_value(secret_params['value']) sm = get_storage_manager() timestamp = utils.get_formatted_timestamp() try: new_secret = models.Secret( id=key, value=encrypted_value, created_at=timestamp, updated_at=timestamp, visibility=secret_params['visibility'], is_hidden_value=secret_params['is_hidden_value']) return sm.put(new_secret) except ConflictError: secret = sm.get(models.Secret, key) if secret and secret_params['update_if_exists']: get_resource_manager().validate_modification_permitted(secret) secret.value = encrypted_value secret.updated_at = timestamp return sm.update(secret) raise
def _prepare_and_process_doc(self, data_id, blueprint_url, application_file_name): # Put a temporary blueprint entry in DB rm = get_resource_manager() now = get_formatted_timestamp() temp_blueprint = rm.sm.put( Blueprint(plan=None, id=data_id, description=None, created_at=now, updated_at=now, main_file_name=None, visibility=None, state=BlueprintUploadState.VALIDATING)) if not blueprint_url: self.upload_archive_to_file_server(data_id) try: temp_blueprint.upload_execution = rm.upload_blueprint( data_id, application_file_name, blueprint_url, config.instance.file_server_root, # for the import resolver validate_only=True, ) except manager_exceptions.ExistingRunningExecutionError: rm.sm.delete(temp_blueprint) self.cleanup_blueprint_archive_from_file_server( data_id, current_tenant.name) raise
def description(description: str, orig_blueprint_id: str) -> str: ts = utils.get_formatted_timestamp() comment = f'copied from {orig_blueprint_id} at {ts} on ' \ 'plugins update' if description: return f'{description}\n{comment}' return comment
def finalize(self, plugins_update_id=id): """Executes the following procedure: * Updates the original blueprint plan * Changes all the deployments' blueprint back from the temp blueprint to the original one * Deletes the temporary blueprint * Updates the plugins update state """ plugins_update = self.sm.get(models.PluginsUpdate, plugins_update_id) self._validate_plugins_update_state(plugins_update) self._validate_execution_status(plugins_update) plugins_update.state = STATES.FINALIZING self.sm.update(plugins_update) plugins_update.blueprint.plan = plugins_update.temp_blueprint.plan self.sm.update(plugins_update.blueprint) updated_deployments = self._get_deployments_to_update( plugins_update.temp_blueprint_id) for dep in updated_deployments: dep.blueprint = plugins_update.blueprint dep.updated_at = utils.get_formatted_timestamp() self.sm.update(dep) self.sm.delete(plugins_update.temp_blueprint) plugins_update.state = STATES.SUCCESSFUL self.sm.update(plugins_update) return plugins_update
def update_deployment_dependencies_from_plan(deployment_id, deployment_plan, storage_manager, dep_plan_filter_func, curr_dependencies=None): curr_dependencies = {} if curr_dependencies is None else curr_dependencies new_dependencies = deployment_plan.setdefault( INTER_DEPLOYMENT_FUNCTIONS, {}) new_dependencies_dict = { creator: target for creator, target in new_dependencies.items() if dep_plan_filter_func(creator) } dep_graph = RecursiveDeploymentDependencies(storage_manager) dep_graph.create_dependencies_graph() for dependency_creator, target_deployment_id \ in new_dependencies_dict.items(): target_deployment = storage_manager.get( models.Deployment, target_deployment_id) \ if target_deployment_id else None source_deployment = storage_manager.get( models.Deployment, deployment_id) if dependency_creator not in curr_dependencies: now = get_formatted_timestamp() storage_manager.put(models.InterDeploymentDependencies( dependency_creator=dependency_creator, source_deployment=source_deployment, target_deployment=target_deployment, created_at=now, id=str(uuid.uuid4()) )) continue if not target_deployment_id: # New target deployment is unknown, keep the current value continue curr_target_deployment = \ curr_dependencies[dependency_creator].target_deployment if curr_target_deployment == target_deployment_id: continue curr_dependencies[dependency_creator].target_deployment = \ target_deployment storage_manager.update(curr_dependencies[dependency_creator]) # verify that the new dependency doesn't create a cycle, # and update the dependencies graph accordingly if not hasattr(source_deployment, 'id'): continue # upcoming: handle the case of external dependencies source_id = source_deployment.id target_id = target_deployment.id old_target_id = curr_target_deployment.id dep_graph.assert_no_cyclic_dependencies(source_id, target_id) if target_deployment not in new_dependencies_dict.values(): dep_graph.remove_dependency_from_graph(source_id, old_target_id) dep_graph.add_dependency_to_graph(source_id, target_id) return new_dependencies_dict
def _execute_workflow(self, deployment_update, workflow_id, parameters=None, allow_custom_parameters=False, force=False): """Executes the specified workflow :param deployment_update: :param workflow_id: :param parameters: :param allow_custom_parameters: :param force: :return: """ deployment_id = deployment_update.deployment_id deployment = self.sm.get_deployment(deployment_id) blueprint_id = deployment.blueprint_id if workflow_id not in deployment.workflows: raise manager_exceptions.NonexistentWorkflowError( 'Workflow {0} does not exist in deployment {1}' .format(workflow_id, deployment_id)) workflow = deployment.workflows[workflow_id] execution_parameters = \ BlueprintsManager._merge_and_validate_execution_parameters( workflow, workflow_id, parameters, allow_custom_parameters) execution_id = str(uuid.uuid4()) new_execution = models.Execution( id=execution_id, status=models.Execution.PENDING, created_at=utils.get_formatted_timestamp(), blueprint_id=blueprint_id, workflow_id=workflow_id, deployment_id=deployment_id, error='', parameters=BlueprintsManager._get_only_user_execution_parameters( execution_parameters), is_system_workflow=False) self.sm.put_execution(new_execution.id, new_execution) # executing the user workflow workflow_plugins = \ deployment_update.deployment_plan[ constants.WORKFLOW_PLUGINS_TO_INSTALL] self.workflow_client.execute_workflow( workflow_id, workflow, workflow_plugins=workflow_plugins, blueprint_id=blueprint_id, deployment_id=deployment_id, execution_id=execution_id, execution_parameters=execution_parameters) return new_execution
def _stage_plugin_update(self, blueprint, force): update_id = str(uuid.uuid4()) plugins_update = models.PluginsUpdate( id=update_id, created_at=utils.get_formatted_timestamp(), forced=force) plugins_update.set_blueprint(blueprint) return self.sm.put(plugins_update)
def mock_send_mgmtworker_task(message, **_): execution_id = message['id'] sm = get_storage_manager() execution = sm.get(models.Execution, execution_id) execution.status = task_state() execution.ended_at = utils.get_formatted_timestamp() execution.error = '' sm.update(execution)
def _put_site(self, name='test_site'): site = models.Site() site.id = name site.name = name site.latitude = 42 site.longitude = 43 site.visibility = VisibilityState.TENANT site.created_at = utils.get_formatted_timestamp() self.sm.put(site)
def _put_mock_blueprint(self): blueprint_id = str(uuid.uuid4()) now = utils.get_formatted_timestamp() return self.sm.put( models.Blueprint(id=blueprint_id, created_at=now, updated_at=now, main_file_name='abcd', plan={}))
def _get_mock_deployment(deployment_id, blueprint): now = utils.get_formatted_timestamp() deployment = models.Deployment( id=deployment_id, created_at=now, updated_at=now, ) deployment.blueprint = blueprint return deployment
def _put_deployment_dependency(source_deployment, target_deployment, dependency_creator, sm): now = utils.get_formatted_timestamp() deployment_dependency = models.InterDeploymentDependencies( id=str(uuid.uuid4()), dependency_creator=dependency_creator, source_deployment=source_deployment, target_deployment=target_deployment, created_at=now) sm.put(deployment_dependency)
def finalize(self, plugins_update_id): """Executes the following procedure: * Updates the original blueprint plan * Changes all the deployments' blueprint back from the temp blueprint to the original one * Deletes the temporary blueprint * Updates the plugins update state """ plugins_update = self.sm.get(models.PluginsUpdate, plugins_update_id) self._validate_plugins_update_state(plugins_update) self._validate_execution_status(plugins_update) plugins_update.state = STATES.FINALIZING self.sm.update(plugins_update) updated_deployments = self._get_deployments_to_update( plugins_update.temp_blueprint_id) not_updated_deployments = self._get_deployments_to_update( plugins_update.blueprint_id) if not_updated_deployments: current_app.logger.error( "These deployments were not updated during plugins update " "ID %s, execution ID %s: %s", plugins_update_id, plugins_update.execution.id, ', '.join(dep.id for dep in not_updated_deployments)) if updated_deployments: # instantiate the updated blueprint (temp_blueprint) self._copy_blueprint_files(plugins_update.blueprint, plugins_update.temp_blueprint) plugins_update.temp_blueprint.is_hidden = False self.sm.update(plugins_update.temp_blueprint) else: self.sm.delete(plugins_update.temp_blueprint) self._raise_error(plugins_update) plugins_update.blueprint.plan = plugins_update.temp_blueprint.plan self.sm.update(plugins_update.blueprint) updated_deployments = self._get_deployments_to_update( plugins_update.temp_blueprint_id) for dep in updated_deployments: dep.blueprint = plugins_update.blueprint dep.updated_at = utils.get_formatted_timestamp() self.sm.update(dep) self.sm.delete(plugins_update.temp_blueprint) plugins_update.state = STATES.SUCCESSFUL return self.sm.update(plugins_update)
def stage_deployment_update(self, deployment_id, app_dir, app_blueprint, additional_inputs, new_blueprint_id=None, preview=False, runtime_only_evaluation=False, auto_correct_types=False, reevaluate_active_statuses=False): # validate no active updates are running for a deployment_id if reevaluate_active_statuses: self.reevaluate_updates_statuses_per_deployment(deployment_id) self.validate_no_active_updates_per_deployment(deployment_id) # enables reverting to original blueprint resources deployment = self.sm.get(models.Deployment, deployment_id) old_blueprint = deployment.blueprint runtime_only_evaluation = (runtime_only_evaluation or deployment.runtime_only_evaluation) parsed_deployment = get_parsed_deployment(old_blueprint, app_dir, app_blueprint) # Updating the new inputs with the deployment inputs # (overriding old values and adding new ones) old_inputs = copy.deepcopy(deployment.inputs) new_inputs = {k: old_inputs[k] for k in parsed_deployment.inputs if k in old_inputs} new_inputs.update(additional_inputs) # applying intrinsic functions plan = get_deployment_plan(parsed_deployment, new_inputs, runtime_only_evaluation, auto_correct_types) deployment_update_id = '{0}-{1}'.format(deployment.id, uuid.uuid4()) deployment_update = models.DeploymentUpdate( id=deployment_update_id, deployment_plan=plan, runtime_only_evaluation=runtime_only_evaluation, created_at=get_formatted_timestamp() ) deployment_update.set_deployment(deployment) deployment_update.preview = preview deployment_update.old_inputs = old_inputs deployment_update.new_inputs = new_inputs if new_blueprint_id: new_blueprint = self.sm.get(models.Blueprint, new_blueprint_id) verify_blueprint_uploaded_state(new_blueprint) deployment_update.old_blueprint = old_blueprint deployment_update.new_blueprint = new_blueprint self.sm.put(deployment_update) return deployment_update
def patch(self, filters_model, filter_id, filtered_resource): """Update a filter by its ID This function updates the filter rules and visibility """ rest_utils.validate_inputs({'filter_id': filter_id}) if not request.json: raise manager_exceptions.IllegalActionError( 'Update a filter request must include at least one parameter ' 'to update') request_dict = rest_utils.get_json_and_verify_params( {'filter_rules': { 'type': list, 'optional': True }}) filter_rules = request_dict.get('filter_rules') visibility = rest_utils.get_visibility_parameter( optional=True, valid_values=VisibilityState.STATES) storage_manager = get_storage_manager() filter_elem = storage_manager.get(filters_model, filter_id) _verify_not_a_system_filter(filter_elem, 'update') if visibility: get_resource_manager().validate_visibility_value( filters_model, filter_elem, visibility) filter_elem.visibility = visibility if filter_rules: new_filter_rules = create_filter_rules_list( filter_rules, filtered_resource) new_attrs_filter_rules = _get_filter_rules_by_type( new_filter_rules, 'attribute') new_labels_filter_rules = _get_filter_rules_by_type( new_filter_rules, 'label') if new_attrs_filter_rules: if new_labels_filter_rules: # Both need to be updated filter_elem.value = new_filter_rules else: # Only labels filter rules should be saved filter_elem.value = (filter_elem.labels_filter_rules + new_filter_rules) elif new_labels_filter_rules: # Only attributes filter rules should be saved filter_elem.value = (filter_elem.attrs_filter_rules + new_filter_rules) else: # Should not get here raise manager_exceptions.BadParametersError( 'Unknown filter rules type') filter_elem.updated_at = get_formatted_timestamp() return storage_manager.update(filter_elem)
def _prepare_and_process_doc(self, data_id, visibility, blueprint_url, application_file_name, override_failed_blueprint, labels=None): # Put a new blueprint entry in DB now = get_formatted_timestamp() rm = get_resource_manager() if override_failed_blueprint: new_blueprint = rm.sm.get(Blueprint, data_id) new_blueprint.plan = None new_blueprint.description = None new_blueprint.created_at = now new_blueprint.updated_at = now new_blueprint.main_file_name = None new_blueprint.visibility = visibility new_blueprint.state = BlueprintUploadState.PENDING rm.sm.update(new_blueprint) else: new_blueprint = rm.sm.put( Blueprint(plan=None, id=data_id, description=None, created_at=now, updated_at=now, main_file_name=None, visibility=visibility, state=BlueprintUploadState.PENDING)) if not blueprint_url: new_blueprint.state = BlueprintUploadState.UPLOADING rm.sm.update(new_blueprint) self.upload_archive_to_file_server(data_id) try: new_blueprint.upload_execution = rm.upload_blueprint( data_id, application_file_name, blueprint_url, config.instance.file_server_root, # for the import resolver labels=labels) rm.sm.update(new_blueprint) except manager_exceptions.ExistingRunningExecutionError as e: new_blueprint.state = BlueprintUploadState.FAILED_UPLOADING new_blueprint.error = str(e) new_blueprint.error_traceback = traceback.format_exc() rm.sm.update(new_blueprint) self.cleanup_blueprint_archive_from_file_server( data_id, current_tenant.name) raise return new_blueprint
def _add_blueprint(self, blueprint_id=None): if not blueprint_id: unique_str = str(uuid.uuid4()) blueprint_id = 'blueprint-{0}'.format(unique_str) now = utils.get_formatted_timestamp() blueprint = models.Blueprint(id=blueprint_id, created_at=now, updated_at=now, description=None, plan={'name': 'my-bp'}, main_file_name='aaa') return self.sm.put(blueprint)
def _put_system_filter(self): # We need to use the storage manager because system filters # cannot (and should not) be created using the rest-service. sm = get_storage_manager() now = get_formatted_timestamp() new_filter = self.filters_model(id='csys-test-filter', value=self.FILTER_RULES, created_at=now, updated_at=now, visibility=VisibilityState.TENANT, is_system_filter=True) sm.put(new_filter)
def put_agent(self, agent_name='agent_1', instance_id='node_instance_1', deployment_id='deployment_1'): node_instance = self._get_or_create_node_instance( instance_id, deployment_id) agent = Agent(id=agent_name, name=agent_name, ip='127.0.0.1', install_method='remote', system='centos core', version='4.5.5', visibility='tenant', state=AgentState.CREATING, rabbitmq_username='******'.format(agent_name), rabbitmq_password=encrypt( AMQPManager._generate_user_password()), rabbitmq_exchange=agent_name, created_at=get_formatted_timestamp(), updated_at=get_formatted_timestamp()) agent.node_instance = node_instance return self.sm.put(agent)
def _get_event(execution_id, message="Starting 'install' workflow execution"): return { 'message': { 'text': message, 'arguments': None }, 'event_type': 'workflow_started', 'context': { 'execution_id': execution_id, }, 'timestamp': get_formatted_timestamp() }
def put(self, name): """ Create a new site """ request_dict = self._validate_site_params(name) new_site = models.Site() new_site.id = name new_site.name = name new_site.latitude = request_dict.get('latitude') new_site.longitude = request_dict.get('longitude') new_site.visibility = (request_dict['visibility'] or VisibilityState.TENANT) new_site.created_at = utils.get_formatted_timestamp() return get_storage_manager().put(new_site)
def test_all_results_query(self): now = utils.get_formatted_timestamp() for i in range(1, 1002): secret = models.Secret(id='secret_{}'.format(i), value='value', created_at=now, updated_at=now, visibility=VisibilityState.TENANT) self.sm.put(secret) secret_list = self.sm.list(models.Secret, include=['id', 'created_at'], get_all_results=True) self.assertEquals(1000, len(secret_list))
def _add_execution(self, deployment, execution_id=None): if not execution_id: unique_str = str(uuid.uuid4()) execution_id = 'execution-{0}'.format(unique_str) execution = models.Execution( id=execution_id, status=ExecutionState.TERMINATED, workflow_id='', created_at=utils.get_formatted_timestamp(), error='', parameters=dict(), is_system_workflow=False) execution.deployment = deployment return self.sm.put(execution)
def put_agent(self, agent_name='agent_1', instance_id='node_instance_1', deployment_id='deployment_1'): node_instance = self._get_or_create_node_instance(instance_id, deployment_id) agent = Agent( id=agent_name, name=agent_name, ip='127.0.0.1', install_method='remote', system='centos core', version='4.5.5', visibility='tenant', state=AgentState.CREATING, rabbitmq_username='******'.format(agent_name), rabbitmq_password=encrypt(generate_user_password()), rabbitmq_exchange=agent_name, created_at=get_formatted_timestamp(), updated_at=get_formatted_timestamp() ) agent.node_instance = node_instance return self.sm.put(agent)
def _get_log(execution_id, message='Test log'): return { 'context': { 'execution_id': execution_id, 'node_id': 'vm_7j36my', 'operation': 'cloudify.interfaces.cloudify_agent.create', }, 'level': 'debug', 'logger': 'ctx.a13973d5-3866-4054-baa1-479e242fff75', 'message': { 'text': message }, 'timestamp': get_formatted_timestamp() }
def test_all_results_query(self): now = utils.get_formatted_timestamp() for i in range(1, 1002): secret = models.Secret(id='secret_{}'.format(i), value='value', created_at=now, updated_at=now, visibility=VisibilityState.TENANT) self.sm.put(secret) secret_list = self.sm.list( models.Secret, include=['id', 'created_at'], get_all_results=True ) self.assertEquals(1000, len(secret_list))
def _create_execution(self, execution_id): admin_user = self.sm.get(models.User, 0) default_tenant = self.sm.get(models.Tenant, 0) new_execution = models.Execution( id=execution_id, status='terminated', created_at=get_formatted_timestamp(), workflow_id='test', error='', parameters={}, is_system_workflow=False ) new_execution.creator = admin_user new_execution.tenant = default_tenant self.sm.put(new_execution)
def _create_plugin_from_archive(self, plugin_id, archive_path): plugin = self._load_plugin_package_json(archive_path) build_props = plugin.get('build_server_os_properties') now = utils.get_formatted_timestamp() return models.Plugin( id=plugin_id, package_name=plugin.get('package_name'), package_version=plugin.get('package_version'), archive_name=plugin.get('archive_name'), package_source=plugin.get('package_source'), supported_platform=plugin.get('supported_platform'), distribution=build_props.get('distribution'), distribution_version=build_props.get('distribution_version'), distribution_release=build_props.get('distribution_release'), wheels=plugin.get('wheels'), excluded_wheels=plugin.get('excluded_wheels'), supported_py_versions=plugin.get('supported_python_versions'), uploaded_at=now)
def _execute_workflow(self, deployment_update, workflow_id, parameters=None, allow_custom_parameters=False): deployment = deployment_update.deployment if workflow_id not in deployment.workflows: raise manager_exceptions.NonexistentWorkflowError( 'Workflow {0} does not exist in deployment {1}'.format( workflow_id, deployment.id)) workflow = deployment.workflows[workflow_id] execution_parameters = \ ResourceManager._merge_and_validate_execution_parameters( workflow, workflow_id, parameters, allow_custom_parameters) execution_id = str(uuid.uuid4()) new_execution = models.Execution( id=execution_id, status=ExecutionState.PENDING, created_at=utils.get_formatted_timestamp(), workflow_id=workflow_id, error='', parameters=ResourceManager._get_only_user_execution_parameters( execution_parameters), is_system_workflow=False ) if deployment: new_execution.set_deployment(deployment) deployment_update.execution = new_execution self.sm.put(new_execution) # executing the user workflow workflow_plugins = deployment_update.deployment_plan[ constants.WORKFLOW_PLUGINS_TO_INSTALL] workflow_executor.execute_workflow( workflow_id, workflow, workflow_plugins=workflow_plugins, blueprint_id=deployment.blueprint_id, deployment_id=deployment.id, execution_id=execution_id, execution_parameters=execution_parameters, execution_creator=current_user ) return new_execution
def _add_deployment(self, blueprint, deployment_id=None): if not deployment_id: unique_str = str(uuid.uuid4()) deployment_id = 'deployment-{0}'.format(unique_str) now = utils.get_formatted_timestamp() deployment = models.Deployment(id=deployment_id, created_at=now, updated_at=now, permalink=None, description=None, workflows={}, inputs={}, policy_types={}, policy_triggers={}, groups={}, scaling_groups={}, outputs={}) deployment.blueprint = blueprint return self.sm.put(deployment)
def test_store_load_delete_blueprint(self): now = utils.get_formatted_timestamp() blueprint = models.Blueprint(id='blueprint-id', created_at=now, updated_at=now, description=None, plan={'name': 'my-bp'}, main_file_name='aaa') self.sm.put(blueprint) blueprint_from_list = self.sm.list(models.Blueprint)[0] blueprint_restored = self.sm.get(models.Blueprint, 'blueprint-id') bp_from_delete = self.sm.delete(blueprint_restored) self.assertEquals(blueprint.to_dict(), blueprint_from_list.to_dict()) self.assertEquals(blueprint.to_dict(), blueprint_restored.to_dict()) # in bp returned from delete operation only 'id' is guaranteed to # return self.assertEquals(blueprint.id, bp_from_delete.id) blueprints_list = self.sm.list(models.Blueprint) self.assertEquals(0, len(blueprints_list))
def _add_deployment_update(self, deployment, execution, deployment_update_id=None): if not deployment_update_id: unique_str = str(uuid.uuid4()) deployment_update_id = 'deployment_update-{0}'.format(unique_str) now = utils.get_formatted_timestamp() deployment_update = models.DeploymentUpdate( deployment_plan={'name': 'my-bp'}, state='staged', id=deployment_update_id, deployment_update_nodes=None, deployment_update_node_instances=None, deployment_update_deployment=None, modified_entity_ids=None, created_at=now) deployment_update.deployment = deployment if execution: deployment_update.execution = execution return self.sm.put(deployment_update)
def test_fields_query(self): now = utils.get_formatted_timestamp() blueprint = models.Blueprint(id='blueprint-id', created_at=now, updated_at=now, description=None, plan={'name': 'my-bp'}, main_file_name='aaa') self.sm.put(blueprint) blueprint_restored = self.sm.get( models.Blueprint, 'blueprint-id', include=['id', 'created_at'] ) self.assertEquals('blueprint-id', blueprint_restored.id) self.assertEquals(now, blueprint_restored.created_at) self.assertFalse(hasattr(blueprint_restored, 'updated_at')) self.assertFalse(hasattr(blueprint_restored, 'plan')) self.assertFalse(hasattr(blueprint_restored, 'main_file_name'))
def maintenance_mode_handler(): # failed to route the request - this is a 404. Abort early. if not request.endpoint: return # enabling internal requests if _is_internal_request() and is_bypass_maintenance_mode(): return # Removing v*/ from the endpoint index = request.endpoint.find('/') request_endpoint = request.endpoint[index+1:] maintenance_file = os.path.join( config.instance().maintenance_folder, MAINTENANCE_MODE_STATUS_FILE) if os.path.isfile(maintenance_file): state = utils.read_json_file(maintenance_file) if state['status'] == MAINTENANCE_MODE_ACTIVATING: running_executions = get_running_executions() if not running_executions: now = utils.get_formatted_timestamp() state = prepare_maintenance_dict( MAINTENANCE_MODE_ACTIVATED, activated_at=now, remaining_executions=None, requested_by=state['requested_by'], activation_requested_at=state[ 'activation_requested_at']) utils.write_dict_to_json_file(maintenance_file, state) else: return _handle_activating_mode( state=state, request_endpoint=request_endpoint) if _check_allowed_endpoint(request_endpoint): return if state['status'] == MAINTENANCE_MODE_ACTIVATED: return _maintenance_mode_error()
def test_list_system_executions(self): (blueprint_id, deployment_id, blueprint_response, deployment_response) = self.put_deployment(self.DEPLOYMENT_ID) # manually pushing a system workflow execution to the storage system_wf_execution_id = 'mock_execution_id' system_wf_id = 'mock_system_workflow_id' system_wf_execution = models.Execution( id=system_wf_execution_id, status=models.Execution.TERMINATED, deployment_id=deployment_id, workflow_id=system_wf_id, blueprint_id=blueprint_id, created_at=utils.get_formatted_timestamp(), error='', parameters=dict(), is_system_workflow=True) storage_manager._get_instance().put_execution( system_wf_execution_id, system_wf_execution) # listing only non-system workflow executions executions = self.client.executions.list(deployment_id=deployment_id) # expecting 1 execution (create_deployment_environment) self.assertEquals(1, len(executions)) self.assertEquals('create_deployment_environment', executions[0]['workflow_id']) # listing all executions executions = self.client.executions.list(deployment_id=deployment_id, include_system_workflows=True) executions.sort(key=lambda e: e.created_at) # expecting 2 executions self.assertEquals(2, len(executions)) self.assertEquals('create_deployment_environment', executions[0]['workflow_id']) self.assertEquals(system_wf_id, executions[1]['workflow_id']) return deployment_id, system_wf_id
inputs = copy.deepcopy(deployment.inputs) inputs.update(additional_inputs) # applying intrinsic functions try: prepared_plan = tasks.prepare_deployment_plan(plan, inputs=inputs) except parser_exceptions.MissingRequiredInputError, e: raise manager_exceptions.MissingRequiredDeploymentInputError( str(e)) except parser_exceptions.UnknownInputError, e: raise manager_exceptions.UnknownDeploymentInputError(str(e)) deployment_update = \ models.DeploymentUpdate(deployment_id, prepared_plan, created_at=utils.get_formatted_timestamp()) self.sm.put_deployment_update(deployment_update) return deployment_update def create_deployment_update_step(self, deployment_update_id, action, entity_type, entity_id): """Create deployment update step :param deployment_update_id: :param action: add/remove/modify :param entity_type: add/relationship :param entity_id: :return:
def _test_deployment_modification(self, modified_nodes, expected_compute, expected_db, expected_webserver, modification_type, expected_total, deployment_id=None, rollback=False): if not deployment_id: dsl_path = resource("dsl/deployment_modification.yaml") test_id = str(uuid.uuid4()) deployment, _ = deploy(dsl_path, deployment_id=test_id, blueprint_id='b_{0}'.format(test_id)) deployment_id = deployment.id nodes_before_modification = { node.id: node for node in self.client.nodes.list(deployment_id) } before_modifications = self.client.deployment_modifications.list( deployment_id) workflow_id = 'deployment_modification_{0}'.format( 'rollback' if rollback else 'finish') execution = execute_workflow( workflow_id, deployment_id, parameters={'nodes': modified_nodes}) after_modifications = self.client.deployment_modifications.list( deployment_id) new_modifications = [ m for m in after_modifications if m.id not in [m2.id for m2 in before_modifications]] self.assertEqual(len(new_modifications), 1) modification = list(new_modifications)[0] self.assertEqual(self.client.deployment_modifications.get( modification.id), modification) expected_status = DeploymentModification.ROLLEDBACK if rollback \ else DeploymentModification.FINISHED self.assertEqual(modification.status, expected_status) self.assertEqual(modification.deployment_id, deployment_id) self.assertEqual(modification.modified_nodes, modified_nodes) self.assertDictContainsSubset({ 'workflow_id': workflow_id, 'execution_id': execution.id, 'deployment_id': deployment_id, 'blueprint_id': 'b_{0}'.format(deployment_id)}, modification.context) created_at = dateutil.parser.parse(modification.created_at) ended_at = dateutil.parser.parse(modification.ended_at) self.assertTrue( dateutil.parser.parse(utils.get_formatted_timestamp()) - datetime.timedelta(seconds=30) <= created_at <= ended_at <= dateutil.parser.parse(utils.get_formatted_timestamp())) for node_id, modified_node in modified_nodes.items(): node = self.client.nodes.get(deployment_id, node_id) if rollback: self.assertEqual( node.planned_number_of_instances, nodes_before_modification[ node.id].planned_number_of_instances) self.assertEqual( node.number_of_instances, nodes_before_modification[ node.id].number_of_instances) else: self.assertEqual(node.planned_number_of_instances, modified_node['instances']) self.assertEqual(node.number_of_instances, modified_node['instances']) state = self.get_plugin_data('testmockoperations', deployment_id)['state'] compute_instances = self._get_instances(state, 'compute') db_instances = self._get_instances(state, 'db') webserver_instances = self._get_instances(state, 'webserver') # existence self.assertEqual(expected_compute['existence'], len(compute_instances)) self.assertEqual(expected_db['existence'], len(db_instances)) self.assertEqual(expected_webserver['existence'], len(webserver_instances)) # modification self.assertEqual(expected_compute['modification'], len([i for i in compute_instances if i['modification'] == modification_type])) self.assertEqual(expected_db['modification'], len([i for i in db_instances if i['modification'] == modification_type])) self.assertEqual(expected_webserver['modification'], len([i for i in webserver_instances if i['modification'] == modification_type])) # relationships if compute_instances: self.assertEqual(expected_compute['relationships'], len(compute_instances[0]['relationships'])) if db_instances: self.assertEqual(expected_db['relationships'], len(db_instances[0]['relationships'])) if webserver_instances: self.assertEqual(expected_webserver['relationships'], len(webserver_instances[0]['relationships'])) node_instances = self.client.node_instances.list(deployment_id) self.assertEqual(expected_total, len(node_instances)) for node_id, modification in modified_nodes.items(): if rollback: self.assertEqual( nodes_before_modification[ node_id].number_of_instances, self.client.nodes.get( deployment_id, node_id).number_of_instances) else: self.assertEqual( modification['instances'], self.client.nodes.get( deployment_id, node_id).number_of_instances) for node_instance in node_instances: relationships_count = len(node_instance.relationships) if node_instance.node_id == 'compute': self.assertEqual(relationships_count, expected_compute['total_relationships']) if node_instance.node_id == 'db': self.assertEqual(relationships_count, expected_db['total_relationships']) if node_instance.node_id == 'webserver': self.assertEqual(relationships_count, expected_webserver['total_relationships']) return deployment_id
def test_get_blueprint_deployments(self): now = utils.get_formatted_timestamp() blueprint = models.Blueprint(id='blueprint-id', created_at=now, updated_at=now, description=None, plan={'name': 'my-bp'}, main_file_name='aaa') another_blueprint = models.Blueprint(id='another-blueprint-id', created_at=now, updated_at=now, description=None, plan={'name': 'my-bp'}, main_file_name='aaa') self.sm.put(blueprint) self.sm.put(another_blueprint) deployment1 = models.Deployment(id='dep-1', created_at=now, updated_at=now, permalink=None, description=None, workflows={}, inputs={}, policy_types={}, policy_triggers={}, groups={}, scaling_groups={}, outputs={}) deployment1.blueprint = blueprint self.sm.put(deployment1) deployment2 = models.Deployment(id='dep-2', created_at=now, updated_at=now, permalink=None, description=None, workflows={}, inputs={}, policy_types={}, policy_triggers={}, groups={}, scaling_groups={}, outputs={}) deployment2.blueprint = blueprint self.sm.put(deployment2) deployment3 = models.Deployment(id='dep-3', created_at=now, updated_at=now, description=None, permalink=None, workflows={}, inputs={}, policy_types={}, policy_triggers={}, groups={}, scaling_groups={}, outputs={}) deployment3.blueprint = another_blueprint self.sm.put(deployment3) filters_bp = {'blueprint_id': 'blueprint-id'} blueprint_deployments = \ self.sm.list(models.Deployment, filters=filters_bp) self.assertEquals(2, len(blueprint_deployments)) if blueprint_deployments[0].id != deployment1.id: blueprint_deployments[0], blueprint_deployments[1] =\ blueprint_deployments[1], blueprint_deployments[0] self.assertEquals(deployment1.to_dict(), blueprint_deployments[0].to_dict()) self.assertEquals(deployment2.to_dict(), blueprint_deployments[1].to_dict())
def finalize(self, dep_update): deployment = dep_update.deployment deployment.updated_at = utils.get_formatted_timestamp() self.sm.update(deployment)
def _update_agent(self, name, state): storage_manager = get_storage_manager() updated_agent = storage_manager.get(models.Agent, name) updated_agent.state = state updated_agent.updated_at = utils.get_formatted_timestamp() return storage_manager.update(updated_agent)