def projects_create(project_name, organization_name, format_): """Create a new project. The created project will automatically become the current project. When only one organization exists, it will automatically be selected. When multiple organizations exist and the `<organization_name>` option is not provided, the user will be prompted to choose the organization. No organization yet? Please, use the user interface and follow the registration process or contact sales. """ client = init_client() if not organization_name: response = client.organizations_list() if len(response) == 1 and hasattr(response[0], 'name'): organization_name = response[0].name else: organization_name = click.prompt('Organization name') project = api.ProjectCreate(name=project_name, organization_name=organization_name) response = client.projects_create(data=project) client.api_client.close() user_config = Config() user_config.set('default.project', response.name) user_config.write() print_item(response, row_attrs=LIST_ITEMS, fmt=format_)
def schedules_create(schedule_name, object_type, object_name, object_version, data, format_, **kwargs): """ Create a new request schedule. - For express mode deployments, direct requests will be made - For batch mode deployments, batch requests will be made - For pipelines, batch requests will be made """ project_name = get_current_project(error=True) client = init_client() obj = get_schedule_object(client, project_name, object_type, object_name) if obj.input_type == STRUCTURED_TYPE: data = parse_json(data) schedule = api.ScheduleCreate(name=schedule_name, object_type=object_type, object_name=object_name, version=object_version, request_data=data, **kwargs) response = client.request_schedules_create(project_name=project_name, data=schedule) client.api_client.close() print_item(response, LIST_ITEMS, rename=RENAME_COLUMNS, fmt=format_)
def projects_get(project_name, format_): """Get the details of a project.""" client = init_client() response = client.projects_get(project_name=project_name) client.api_client.close() print_item(response, row_attrs=LIST_ITEMS, fmt=format_)
def schedules_update(schedule_name, new_name, data, format_, **kwargs): """Update a request schedule.""" project_name = get_current_project(error=True) client = init_client() if data is not None: schedule = client.request_schedules_get(project_name=project_name, schedule_name=schedule_name) obj = get_schedule_object(client, project_name, schedule.object_type, schedule.object_name) if obj.input_type == STRUCTURED_TYPE: data = parse_json(data) new_schedule = api.ScheduleUpdate( name=new_name, request_data=data, **{k: v for k, v in kwargs.items() if v is not None}) response = client.request_schedules_update(project_name=project_name, schedule_name=schedule_name, data=new_schedule) client.api_client.close() print_item(response, LIST_ITEMS, rename=RENAME_COLUMNS, fmt=format_)
def exports_get(export_id, output_path, quiet, format_): """ Get the details of an export. If you specify the `<output_path>` option, this location will be used to store the export details in a yaml file. You can either specify the `<output_path>` as file or directory. If the specified `<output_path>` is a directory, the settings will be stored in `export.yaml`. """ project_name = get_current_project(error=True) client = init_client() export = client.exports_get(project_name=project_name, export_id=export_id) client.api_client.close() if output_path is not None: dictionary = format_yaml( item=export, required_front=['id', 'deployments', 'pipelines', 'environment_variables'], as_str=False ) yaml_file = write_yaml(output_path, dictionary, default_file_name="export.yaml") if not quiet: click.echo('Export details are stored in: %s' % yaml_file) else: print_item(item=export, row_attrs=GET_ITEMS, fmt=format_)
def env_vars_create(env_var_name, env_var_value, secret, deployment_name, version_name, yaml_file, format_): """ Create an environment variable. \b - When deployment_name and version_name are provided: the environment variable will be created on deployment version level. - When a deployment name is provided, but not a version name: the environment variable will be created on deployment level. - When no deployment_name nor a version name is provided: the environment variable will be created on project level. \b It is possible to create multiple environment variables at ones by passing a yaml file. The structure of this file is assumed to look like: ``` environment_variables: - name: env_var_1 value: value_1 - name: env_var_2 value: value_2 secret: true - name: env_var_3 value: value_3 secret: true ``` The 'secret' parameter is optional, and is `false` by default. """ project_name = get_current_project(error=True) if not yaml_file and not env_var_name: raise Exception( "Please, specify the environment variable in either a yaml file or as a command argument" ) if yaml_file and (env_var_name or env_var_value or secret): raise Exception( "Please, use either a yaml file or command options, not both") if version_name and not deployment_name: raise Exception("Missing option <deployment_name>") if yaml_file: yaml_content = read_yaml(yaml_file, required_fields=['environment_variables']) check_required_fields(input_dict=yaml_content, list_name='environment_variables', required_fields=['name', 'value']) items = [] for env_var in yaml_content['environment_variables']: secret = env_var['secret'] if 'secret' in env_var else False item = create_env_var(project_name, deployment_name, version_name, env_var['name'], env_var['value'], secret) items.append(item) print_list(items, LIST_ITEMS, fmt=format_) else: item = create_env_var(project_name, deployment_name, version_name, env_var_name, env_var_value, secret) print_item(item, LIST_ITEMS, fmt=format_)
def pipelines_create(pipeline_name, yaml_file, format_): """ Create a new pipeline. \b Define the pipeline parameters using a yaml file. For example: ``` pipeline_name: my-pipeline-name pipeline_description: Pipeline created via command line. pipeline_labels: my-key-1: my-label-1 my-key-2: my-label-2 input_type: structured input_fields: - name: my-pipeline-param1 data_type: int output_type: structured output_fields: - name: my-pipeline-output1 data_type: int ``` Possible input/output types: [structured, plain]. Possible data_types: [blob, int, string, double, bool, array_string, array_int, array_double]. """ client = init_client() project_name = get_current_project(error=True) yaml_content = read_yaml(yaml_file, required_fields=PIPELINE_REQUIRED_FIELDS) assert 'pipeline_name' in yaml_content or pipeline_name, \ 'Please, specify the pipeline name in either the yaml file or as a command argument' pipeline_fields, input_fields, output_fields = define_pipeline( yaml_content, pipeline_name) pipeline_data = api.PipelineCreate(**pipeline_fields, **input_fields, **output_fields) pipeline_response = client.pipelines_create(project_name=project_name, data=pipeline_data) client.api_client.close() print_item(pipeline_response, row_attrs=LIST_ITEMS, required_front=['name', 'description', 'input_type'], optional=[ 'input_fields name', 'input_fields data_type', 'output_type', 'output_fields name', 'output_fields data_type', 'creation_date', 'last_updated' ], rename={ 'name': 'pipeline_name', 'description': 'pipeline_description' }, fmt=format_)
def pipelines_get(pipeline_name, output_path, quiet, format_): """ Get the pipeline settings, like, input_type and input_fields. If you specify the <output_path> option, this location will be used to store the pipeline structure in a yaml file. You can either specify the <output_path> as file or directory. If the specified <output_path> is a directory, the settings will be stored in `pipeline.yaml`. """ project_name = get_current_project(error=True) client = init_client() pipeline = client.pipelines_get(project_name=project_name, pipeline_name=pipeline_name) client.api_client.close() if output_path is not None: dictionary = format_yaml( pipeline, required_front=['name', 'description', 'input_type'], optional=[ 'input_fields name', 'input_fields data_type', 'output_type', 'output_fields name', 'output_fields data_type' ], rename={ 'name': 'pipeline_name', 'description': 'pipeline_description' }, as_str=False) yaml_file = write_yaml(output_path, dictionary, default_file_name="pipeline.yaml") if not quiet: click.echo('Pipeline file is stored in: %s' % yaml_file) else: print_item(pipeline, row_attrs=LIST_ITEMS, required_front=['name', 'description', 'input_type'], optional=[ 'input_fields name', 'input_fields data_type', 'output_type', 'output_fields name', 'output_fields data_type', 'creation_date', 'last_updated', 'default_version' ], rename={ 'name': 'pipeline_name', 'description': 'pipeline_description' }, fmt=format_)
def builds_get(deployment_name, version_name, build_id, format_): """Get the build of a deployment version.""" project_name = get_current_project(error=True) client = init_client() build = client.builds_get(project_name=project_name, deployment_name=deployment_name, version=version_name, build_id=build_id) client.api_client.close() print_item(build, row_attrs=LIST_ITEMS, fmt=format_)
def revisions_upload(deployment_name, version_name, zip_path, format_): """Create a revision of a deployment version by uploading a ZIP. Please, specify the deployment package `<zip_path>` that should be uploaded. """ project_name = get_current_project(error=True) client = init_client() revision = client.revisions_file_upload(project_name=project_name, deployment_name=deployment_name, version=version_name, file=zip_path) client.api_client.close() print_item(revision, row_attrs=['revision', 'build'], fmt=format_)
def schedules_get(schedule_name, format_): """Get a request schedule.""" project_name = get_current_project(error=True) client = init_client() response = client.request_schedules_get(project_name=project_name, schedule_name=schedule_name) client.api_client.close() print_item(response, row_attrs=LIST_ITEMS, rename=RENAME_COLUMNS, fmt=format_)
def revisions_get(deployment_name, version_name, revision_id, format_): """Get a revision of a deployment version.""" project_name = get_current_project(error=True) client = init_client() revision = client.revisions_get( project_name=project_name, deployment_name=deployment_name, version=version_name, revision_id=revision_id ) client.api_client.close() print_item(revision, row_attrs=LIST_ITEMS, fmt=format_)
def blobs_create(input_path, ttl, format_): """Upload a new blob.""" project_name = get_current_project(error=True) input_path = abs_path(input_path) client = init_client() response = client.blobs_create(project_name=project_name, file=input_path, blob_ttl=ttl) client.api_client.close() print_item(response, LIST_ITEMS, rename={'ttl': 'time_to_live'}, fmt=format_)
def imports_create(zip_path, skip_confirmation, format_): """ Create a new import by uploading a ZIP. Please, specify the import file `<zip_path>` that should be uploaded. """ project_name = get_current_project(error=True) client = init_client() _import = client.imports_create(project_name=project_name, file=zip_path, skip_confirmation=skip_confirmation) client.api_client.close() print_item(_import, row_attrs=LIST_ITEMS, fmt=format_)
def env_vars_get(env_var_id, deployment_name, version_name, format_): """ Get an environment variable. \b - When deployment_name and version_name are provided: the environment variable will be collected on deployment version level. - When a deployment name is provided, but not a version name: the environment variable will be collected on deployment level. - When no deployment_name nor a version name is provided: the environment variable will be collected on project level. """ project_name = get_current_project(error=True) if version_name and not deployment_name: raise Exception("Missing option <deployment_name>") client = init_client() try: if version_name: item = client.deployment_version_environment_variables_get( project_name=project_name, deployment_name=deployment_name, version=version_name, id=env_var_id) elif deployment_name: item = client.deployment_environment_variables_get( project_name=project_name, deployment_name=deployment_name, id=env_var_id) else: item = client.project_environment_variables_get( project_name=project_name, id=env_var_id) except api.exceptions.ApiException as e: if hasattr(e, "status") and e.status == 404: click.echo("%s %s" % (click.style('Warning:', fg='yellow'), WARNING_MSG)) raise e client.api_client.close() print_item(item, LIST_ITEMS, fmt=format_)
def deployments_get(deployment_name, output_path, quiet, format_): """ Get the deployment settings, like, input_type and output_type. If you specify the `<output_path>` option, this location will be used to store the deployment settings in a yaml file. You can either specify the `<output_path>` as file or directory. If the specified `<output_path>` is a directory, the settings will be stored in `deployment.yaml`. """ project_name = get_current_project(error=True) client = init_client() deployment = client.deployments_get(project_name=project_name, deployment_name=deployment_name) client.api_client.close() if output_path is not None: dictionary = format_yaml( item=deployment, required_front=['name', 'description', 'labels', 'input_type', 'output_type'], optional=['input_fields name', 'input_fields data_type', 'output_fields name', 'output_fields data_type'], rename={'name': 'deployment_name', 'description': 'deployment_description', 'labels': 'deployment_labels'}, as_str=False ) yaml_file = write_yaml(output_path, dictionary, default_file_name="deployment.yaml") if not quiet: click.echo('Deployment file is stored in: %s' % yaml_file) else: print_item( item=deployment, row_attrs=LIST_ITEMS, required_front=['id', 'name', 'project', 'description', 'labels', 'input_type', 'output_type'], optional=['input_fields name', 'input_fields data_type', 'output_fields name', 'output_fields data_type'], required_end=['creation_date', 'last_updated', 'default_version'], rename={'name': 'deployment_name', 'description': 'deployment_description', 'labels': 'deployment_labels'}, fmt=format_ )
def logs_get(log_id, format_): """ \b Get more details of a log: - date - deployment_name - deployment_version_name - pipeline_name - pipeline_version_name - pipeline_object_name - request_id - pipeline_request_id - system (boolean) """ project_name = get_current_project(error=True) client = init_client() log_filters = api.LogsCreate(filters={}, id=log_id, limit=1) log = client.projects_log_list(project_name=project_name, data=log_filters)[0] client.api_client.close() print_item(log, row_attrs=['id', 'date', 'log'], required_front=['id', 'date', 'system'], optional=[ 'deployment_request_id', 'pipeline_request_id', 'deployment_name', 'deployment_version', 'pipeline_name', 'pipeline_version', 'pipeline_object_name', 'build_id' ], required_end=['log'], rename={ 'deployment_version': 'deployment_version_name', 'pipeline_version': 'pipeline_version_name' }, fmt=format_)
def pipeline_versions_get(pipeline_name, version_name, output_path, quiet, format_): """ Get the pipeline version structure: input_type, version, objects and connections between the objects (attachments). If you specify the `<output_path>` option, this location will be used to store the pipeline version settings in a yaml file. You can either specify the `<output_path>` as file or directory. If the specified `<output_path>` is a directory, the settings will be stored in `version.yaml`. \b Example of yaml content: ``` pipeline_name: my-pipeline-name input_type: structured input_fields: - name: my-pipeline-param1 data_type: int output_type: structured output_fields: - name: my-pipeline-output1 data_type: int version_name: my-version-name version_description: Version created via command line. version_labels: my-key-1: my-label-1 my-key-2: my-label-2 request_retention_mode: none request_retention_time: 604800 objects: - name: object1 reference_name: my-deployment-name reference_version: my-deployment-version attachments: - destination_name: object1 sources: - source_name: pipeline_start mapping: - source_field_name: my-pipeline-param1 destination_field_name: my-deployment-param1 ``` """ project_name = get_current_project(error=True) # Get pipeline version structure - pipeline, objects and attachments details client = init_client() version = client.pipeline_versions_get(project_name=project_name, pipeline_name=pipeline_name, version=version_name) pipeline = client.pipelines_get(project_name=project_name, pipeline_name=pipeline_name) client.api_client.close() setattr(version, 'input_type', pipeline.input_type) setattr(version, 'input_fields', pipeline.input_fields) setattr(version, 'output_type', pipeline.output_type) setattr(version, 'output_fields', pipeline.output_fields) if output_path is not None: # Store only reusable settings dictionary = format_yaml( item=version, required_front=[ 'pipeline', 'input_type', 'input_fields', 'output_type', 'output_fields', 'version', *PIPELINE_VERSION_FIELDS ], optional=[ 'objects name', 'objects reference_name', 'objects version', 'attachments destination_name', 'attachments sources source_name', 'attachments sources mapping', 'input_fields name', 'input_fields data_type', 'output_fields name', 'output_fields data_type' ], rename={ 'pipeline': 'pipeline_name', 'version': 'version_name', 'objects version': 'reference_version', **PIPELINE_VERSION_FIELDS_RENAMED }, as_str=False) yaml_file = write_yaml(output_path, dictionary, default_file_name="pipeline_version.yaml") if not quiet: click.echo('Pipeline version file stored in: %s' % yaml_file) else: print_item(item=version, row_attrs=LIST_ITEMS, required_front=[ 'pipeline', 'input_type', 'input_fields', 'output_type', 'output_fields', 'version', *PIPELINE_VERSION_FIELDS ], optional=[ 'creation_date', 'last_updated', 'objects name', 'objects reference_name', 'objects version', 'attachments destination_name', 'attachments sources source_name', 'attachments sources mapping', 'input_fields name', 'input_fields data_type', 'output_fields name', 'output_fields data_type' ], rename={ 'creation_date': 'version_creation_date', 'last_updated': 'version_last_updated', 'pipeline': 'pipeline_name', 'version': 'version_name', 'objects version': 'reference_version', **PIPELINE_VERSION_FIELDS_RENAMED }, fmt=format_)
def pipeline_versions_create(pipeline_name, version_name, yaml_file, format_, **kwargs): """ Create a version of a pipeline. \b It is possible to define the parameters using a yaml file. For example: ``` pipeline_name: my-pipeline-name version_name: my-pipeline-version version_description: Version created via command line. version_labels: my-key-1: my-label-1 my-key-2: my-label-2 request_retention_mode: none request_retention_time: 604800 objects: - name: object1 reference_name: my-deployment-name reference_version: my-deployment-version attachments: - destination_name: object1 sources: - source_name: pipeline_start mapping: - source_field_name: my-pipeline-param1 destination_field_name: my-deployment-param1 ``` Those parameters can also be provided as command options. If both a `<yaml_file>` is set and options are given, the options defined by `<yaml_file>` will be overwritten by the specified command options. The version name can either be passed as command argument or specified inside the yaml file using `<version_name>`. """ project_name = get_current_project(error=True) yaml_content = read_yaml(yaml_file, required_fields=[]) client = init_client() assert 'pipeline_name' in yaml_content or pipeline_name, \ 'Please, specify the pipeline name in either the yaml file or as a command argument' assert 'version_name' in yaml_content or version_name, \ 'Please, specify the version name in either the yaml file or as a command argument' pipeline_name = set_dict_default(pipeline_name, yaml_content, 'pipeline_name') version_name = set_dict_default(version_name, yaml_content, 'version_name') # Define the pipeline version kwargs = set_pipeline_version_defaults(kwargs, yaml_content, None) # Rename objects reference version kwargs = rename_pipeline_object_reference_version(content=kwargs) version = api.PipelineVersionCreate( version=version_name, **{k: kwargs[k] for k in PIPELINE_VERSION_FIELDS}) response = client.pipeline_versions_create(project_name=project_name, pipeline_name=pipeline_name, data=version) client.api_client.close() print_item(item=response, row_attrs=LIST_ITEMS, rename={ 'pipeline': 'pipeline_name', 'version': 'version_name', **PIPELINE_VERSION_FIELDS_RENAMED }, fmt=format_)
def versions_create(deployment_name, version_name, yaml_file, format_, **kwargs): """Create a version of a deployment. \b It is possible to define the parameters using a yaml file. For example: ``` deployment_name: my-deployment-name version_name: my-deployment-version version_description: Version created via command line. version_labels: my-key-1: my-label-1 my-key-2: my-label-2 language: python3.7 instance_type: 2048mb minimum_instances: 0 maximum_instances: 1 maximum_idle_time: 300 request_retention_mode: none request_retention_time: 604800 deployment_mode: express ``` Provide either deployment mode 'express' or 'batch', default is 'express'. Those parameters can also be provided as command options. If both a `<yaml_file>` is set and options are given, the options defined by `<yaml_file>` will be overwritten by the specified command options. The version name can either be passed as command argument or specified inside the yaml file using `<version_name>`. """ project_name = get_current_project(error=True) yaml_content = read_yaml(yaml_file, required_fields=[]) client = init_client() assert 'deployment_name' in yaml_content or deployment_name, 'Please, specify the deployment name in either ' \ 'the yaml file or as a command argument' assert 'version_name' in yaml_content or version_name, 'Please, specify the version name in either ' \ 'the yaml file or as a command argument' kwargs = define_deployment_version(kwargs, yaml_content, extra_yaml_fields=['deployment_file']) if format_ != 'json' and kwargs[ 'memory_allocation'] and not kwargs['instance_type']: click.secho( "Deprecation warning: parameter 'memory_allocation' is deprecated, use 'instance_type' instead", fg='red') deployment_name = set_dict_default(deployment_name, yaml_content, 'deployment_name') version_name = set_dict_default(version_name, yaml_content, 'version_name') version = api.DeploymentVersionCreate( version=version_name, **{k: kwargs[k] for k in DEPLOYMENT_VERSION_FIELDS}) response = client.deployment_versions_create( project_name=project_name, deployment_name=deployment_name, data=version) update_deployment_file(client, project_name, deployment_name, version_name, kwargs['deployment_file']) client.api_client.close() print_item(item=response, row_attrs=LIST_ITEMS, rename={ 'deployment': 'deployment_name', 'version': 'version_name', **DEPLOYMENT_VERSION_FIELDS_RENAMED }, fmt=format_)
def versions_get(deployment_name, version_name, output_path, quiet, format_): """Get the version of a deployment. If you specify the `<output_path>` option, this location will be used to store the deployment version settings in a yaml file. You can either specify the `<output_path>` as file or directory. If the specified `<output_path>` is a directory, the settings will be stored in `version.yaml`. \b Example of yaml content: ``` deployment_name: my-deployment version_name: my-version version_description: Version created via command line. version_labels: my-key-1: my-label-1 my-key-2: my-label-2 language: python3.7 instance_type: 2048mb minimum_instances: 0 maximum_instances: 5 maximum_idle_time: 300 request_retention_mode: none request_retention_time: 604800 deployment_mode: express ``` """ project_name = get_current_project(error=True) # Show version details client = init_client() version = client.deployment_versions_get(project_name=project_name, deployment_name=deployment_name, version=version_name) client.api_client.close() if output_path is not None: # Store only reusable settings dictionary = format_yaml(item=version, required_front=[ 'version', 'deployment', *DEPLOYMENT_VERSION_FIELDS ], rename={ 'deployment': 'deployment_name', 'version': 'version_name', **DEPLOYMENT_VERSION_FIELDS_RENAMED }, as_str=False) yaml_file = write_yaml(output_path, dictionary, default_file_name="version.yaml") if not quiet: click.echo('Version file stored in: %s' % yaml_file) else: print_item(item=version, row_attrs=LIST_ITEMS, rename={ 'deployment': 'deployment_name', 'version': 'version_name', **DEPLOYMENT_VERSION_FIELDS_RENAMED }, fmt=format_)
def imports_confirm(import_id, yaml_file, format_): """ Confirm (and update) an import by selecting the objects in the import. \b Define the import object selection using a yaml file. For example: ``` deployments: deployment-1: description: My deployment labels: my-key-1: my-label-1 my-key-2: my-label-2 input_type: structured output_type: structured input_fields: - name: input data_type: int output_fields: - name: output data_type: int default_version: v1 versions: v1: zip: "deployments/deployment_deployment-1/versions/deployment_deployment-1_version_v1.zip" description: language: python3.8 deployment_mode: express maximum_idle_time: 300 minimum_instances: 0 maximum_instances: 5 memory_allocation: 512 request_retention_mode: full request_retention_time: 604800 environment_variables: deployment_version_env_var_1: value: env_var_value_1 secret: True deployment_version_env_var_2: value: env_var_value_2 secret: False v2: {} pipelines: pipeline-1: description: My pipeline labels: my-key-1: my-label-1 my-key-2: my-label-2 input_type: structured output_type: structured input_fields: - name: input data_type: int output_fields: - name: output data_type: int default_version: v1 versions: v1: description: labels: request_retention_mode: full request_retention_time: 604800 objects: - name: obj-1 reference_name: deployment-1 reference_version: v1 attachments: - sources: - mapping: - source_field_name: input destination_field_name: input source_name: pipeline_start destination_name: obj-1 - sources: - mapping: - source_field_name: input destination_field_name: output source_name: obj-1 destination_name: pipeline_end v2: {} ``` """ project_name = get_current_project(error=True) yaml_content = read_yaml(yaml_file) client = init_client() if 'deployments' in yaml_content: if not isinstance(yaml_content['deployments'], dict): raise Exception( "Deployments field should be a dictionary with deployment names as key and deployment and version " "details as value") deployments = yaml_content['deployments'] else: deployments = {} if 'pipelines' in yaml_content: if not isinstance(yaml_content['pipelines'], dict): raise Exception( "Pipelines field should be a dictionary with pipelines names as key and pipeline and version " "details as value") pipelines = yaml_content['pipelines'] else: pipelines = {} if 'environment_variables' in yaml_content: if not isinstance(yaml_content['environment_variables'], dict): raise Exception( "Environment_variables field should be a dictionary with environment variable name as key and variable " "details as value") environment_variables = yaml_content['environment_variables'] else: environment_variables = {} import_update_data = api.ImportUpdate( deployments=deployments, pipelines=pipelines, environment_variables=environment_variables) response = client.imports_update(project_name=project_name, import_id=import_id, data=import_update_data) client.api_client.close() print_item(item=response, row_attrs=GET_ITEMS, fmt=format_)
def exports_create(yaml_file, format_): """ Create a new export. \b Define the export objects parameters using a yaml file. For example: ``` deployments: deployment-1: versions: v1: environment_variables: deployment_version_env_var_1: include_value: True deployment_version_env_var_2: include_value: False v2: {} environment_variables: deployment_env_var: include_value: False pipelines: pipeline-1: versions: v1: {} v2: {} environment_variables: project_env_var: include_value: False ``` """ project_name = get_current_project(error=True) yaml_content = read_yaml(yaml_file) client = init_client() if 'deployments' in yaml_content: if not isinstance(yaml_content['deployments'], dict): raise Exception( "Deployments field should be a dictionary with deployment names as key and versions as value" ) deployments = yaml_content['deployments'] else: deployments = {} if 'pipelines' in yaml_content: if not isinstance(yaml_content['pipelines'], dict): raise Exception( "Pipelines field should be a dictionary with pipeline names as key and versions as value" ) pipelines = yaml_content['pipelines'] else: pipelines = {} if 'environment_variables' in yaml_content: if not isinstance(yaml_content['environment_variables'], dict): raise Exception( "Environment_variables field should be a dictionary with environment variable name as key and details " "of whether to include the variable value as value" ) environment_variables = yaml_content['environment_variables'] else: environment_variables = {} export = api.ExportCreate( deployments=deployments, pipelines=pipelines, environment_variables=environment_variables ) response = client.exports_create(project_name=project_name, data=export) client.api_client.close() print_item(item=response, row_attrs=LIST_ITEMS, fmt=format_)
def deployments_create(deployment_name, yaml_file, format_): """ Create a new deployment. \b Define the deployment parameters using a yaml file. For example: ``` deployment_name: my-deployment-name deployment_description: Deployment created via command line. deployment_labels: my-key-1: my-label-1 my-key-2: my-label-2 input_type: structured input_fields: - name: param1 data_type: int - name: param2 data_type: string output_type: plain ``` The deployment name can either be passed as argument or specified inside the yaml file. If it is both passed as argument and specified inside the yaml file, the value passed as argument is used. Possible input/output types: [structured, plain]. Possible data_types: [blob, int, string, double, bool, array_string, array_int, array_double]. """ project_name = get_current_project(error=True) yaml_content = read_yaml(yaml_file, required_fields=DEPLOYMENT_REQUIRED_FIELDS) client = init_client() assert 'deployment_name' in yaml_content or deployment_name, 'Please, specify the deployment name in either the ' \ 'yaml file or as a command argument' deployment_name = set_dict_default(deployment_name, yaml_content, 'deployment_name') description = set_dict_default(None, yaml_content, 'deployment_description') if 'input_fields' in yaml_content and isinstance(yaml_content['input_fields'], list): input_fields = [api.DeploymentInputFieldCreate(name=item['name'], data_type=item['data_type']) for item in yaml_content['input_fields']] else: input_fields = None if 'output_fields' in yaml_content and isinstance(yaml_content['output_fields'], list): output_fields = [api.DeploymentInputFieldCreate(name=item['name'], data_type=item['data_type']) for item in yaml_content['output_fields']] else: output_fields = None if 'deployment_labels' in yaml_content: labels = yaml_content['deployment_labels'] else: labels = {} deployment = api.DeploymentCreate( name=deployment_name, description=description, input_type=yaml_content['input_type'], output_type=yaml_content['output_type'], input_fields=input_fields, output_fields=output_fields, labels=labels ) response = client.deployments_create(project_name=project_name, data=deployment) client.api_client.close() print_item( item=response, row_attrs=LIST_ITEMS, required_front=['id', 'name', 'project', 'description', 'labels', 'input_type', 'output_type'], optional=['input_fields name', 'input_fields data_type', 'output_fields name', 'output_fields data_type'], required_end=['creation_date', 'last_updated'], rename={'name': 'deployment_name', 'description': 'deployment_description', 'labels': 'deployment_labels'}, fmt=format_ )