def create_storage(self, blueprint_path, blueprint_plan, deployment_plan, blueprint_id, deployment_id, main_file_name=None): resource_storage = application_resource_storage( FileSystemResourceDriver(local_resource_storage())) model_storage = application_model_storage( FileSystemModelDriver(local_model_storage())) resource_storage.setup() model_storage.setup() storage_manager = StorageManager(model_storage=model_storage, resource_storage=resource_storage, blueprint_path=blueprint_path, blueprint_id=blueprint_id, blueprint_plan=blueprint_plan, deployment_id=deployment_id, deployment_plan=deployment_plan) storage_manager.create_blueprint_storage(blueprint_path, main_file_name=main_file_name) storage_manager.create_nodes_storage() storage_manager.create_deployment_storage() storage_manager.create_node_instances_storage()
def simple(tmpdir, inmemory=False, context_kwargs=None, topology=None): initiator = init_inmemory_model_storage if inmemory else None initiator_kwargs = {} if inmemory else dict(base_dir=tmpdir) topology = topology or create_simple_topology_two_nodes model_storage = aria.application_model_storage( sql_mapi.SQLAlchemyModelAPI, initiator=initiator, initiator_kwargs=initiator_kwargs) resource_storage = aria.application_resource_storage( filesystem_rapi.FileSystemResourceAPI, api_kwargs=dict(directory=os.path.join(tmpdir, 'resources'))) service_id = topology(model_storage) execution = models.create_execution(model_storage.service.get(service_id)) model_storage.execution.put(execution) final_kwargs = dict(name='simple_context', model_storage=model_storage, resource_storage=resource_storage, service_id=service_id, workflow_name=models.WORKFLOW_NAME, execution_id=execution.id, task_max_attempts=models.TASK_MAX_ATTEMPTS, task_retry_interval=models.TASK_RETRY_INTERVAL) final_kwargs.update(context_kwargs or {}) return context.workflow.WorkflowContext(**final_kwargs)
def __init__(self, root_dir=None): self._root_dir = root_dir or \ os.path.join(os.path.expanduser('~'), '.arest') models_dir = os.path.join(self._root_dir, 'models') resource_dir = os.path.join(self._root_dir, 'resources') plugins_dir = os.path.join(self._root_dir, 'plugins') self._create_paths(models_dir, resource_dir, plugins_dir) # Create a model storage self._model_storage = aria_.application_model_storage( api=sql_mapi.SQLAlchemyModelAPI, initiator_kwargs={'base_dir': models_dir} ) self._resource_storage = aria_.application_resource_storage( api=filesystem_rapi.FileSystemResourceAPI, api_kwargs={'directory': resource_dir} ) self._plugin_manager = plugin.PluginManager( model=self._model_storage, plugins_dir=plugins_dir ) self._core = aria_core.Core( model_storage=self.model, resource_storage=self.resource, plugin_manager=self._plugin_manager )
def resource_storage(tmpdir): result = tmpdir.join('resources') result.mkdir() resource_storage = application_resource_storage( filesystem_rapi.FileSystemResourceAPI, api_kwargs=dict(directory=str(result))) yield resource_storage shutil.rmtree(str(result))
def instantiate_from_dict(cls, model_storage=None, resource_storage=None, **kwargs): if model_storage: model_storage = aria.application_model_storage(**model_storage) if resource_storage: resource_storage = aria.application_resource_storage(**resource_storage) return cls(model_storage=model_storage, resource_storage=resource_storage, destroy_session=True, **kwargs)
def operation_context_from_dict(context_dict): context_cls = context_dict['context_cls'] context = context_dict['context'] model_storage = context['model_storage'] if model_storage: api_cls = model_storage['api_cls'] api_kwargs = _deserialize_sql_mapi_kwargs( model_storage.get('api_kwargs', {})) context['model_storage'] = aria.application_model_storage( api=api_cls, api_kwargs=api_kwargs) resource_storage = context['resource_storage'] if resource_storage: api_cls = resource_storage['api_cls'] api_kwargs = _deserialize_file_rapi_kwargs( resource_storage.get('api_kwargs', {})) context['resource_storage'] = aria.application_resource_storage( api=api_cls, api_kwargs=api_kwargs) return context_cls(**context)
def simple(mapi_kwargs, resources_dir=None, **kwargs): model_storage = aria.application_model_storage(SQLAlchemyModelAPI, api_kwargs=mapi_kwargs) deployment_id = create_simple_topology_two_nodes(model_storage) # pytest tmpdir if resources_dir: resource_storage = aria.application_resource_storage( FileSystemResourceAPI, api_kwargs={'directory': resources_dir}) else: resource_storage = None final_kwargs = dict(name='simple_context', model_storage=model_storage, resource_storage=resource_storage, deployment_id=deployment_id, workflow_name=models.WORKFLOW_NAME, task_max_attempts=models.TASK_MAX_ATTEMPTS, task_retry_interval=models.TASK_RETRY_INTERVAL) final_kwargs.update(kwargs) return context.workflow.WorkflowContext(**final_kwargs)
def __call__(self, args_namespace): super(ExecuteCommand, self).__call__(args_namespace) parameters = (self.parse_inputs(args_namespace.parameters) if args_namespace.parameters else {}) resource_storage = application_resource_storage( FileSystemResourceDriver(local_resource_storage())) model_storage = application_model_storage( FileSystemModelDriver(local_model_storage())) deployment = model_storage.deployment.get(args_namespace.deployment_id) try: workflow = deployment.workflows[args_namespace.workflow_id] except KeyError: raise ValueError( '{0} workflow does not exist. existing workflows are: {1}'. format(args_namespace.workflow_id, deployment.workflows.keys())) workflow_parameters = self._merge_and_validate_execution_parameters( workflow, args_namespace.workflow_id, parameters) workflow_context = WorkflowContext( name=args_namespace.workflow_id, model_storage=model_storage, resource_storage=resource_storage, deployment_id=args_namespace.deployment_id, workflow_id=args_namespace.workflow_id, parameters=workflow_parameters, ) workflow_function = self._load_workflow_handler(workflow['operation']) tasks_graph = workflow_function(workflow_context, **workflow_context.parameters) executor = ThreadExecutor() workflow_engine = Engine(executor=executor, workflow_context=workflow_context, tasks_graph=tasks_graph) workflow_engine.execute() executor.close()
def resource_storage(self): if not self._resource_storage: api_kwargs = {'directory': self.resource_storage_dir} self._resource_storage = aria.application_resource_storage( api=FileSystemResourceAPI, api_kwargs=api_kwargs) return self._resource_storage
def resource(self): if self._resource is None: self._resource = application_resource_storage( storage.filesystem_rapi.FileSystemResourceAPI, api_kwargs=dict(directory=self._tmpdir)) return self._resource