def upload_component(uploadfile: FileStorage,
                     name=None,
                     existing_id=None):  # noqa: E501
    """upload_component

    :param uploadfile: The component to upload. Maximum size of 32MB is supported.
    :type uploadfile: werkzeug.datastructures.FileStorage
    :param name: 
    :type name: str
    :param existing_id: The ID of a component to be replaced, INTERNAL USE ONLY
    :type existing_id: str

    :rtype: ApiComponent
    """
    yaml_file_content = get_yaml_file_content_from_uploadfile(uploadfile)

    return _upload_component_yaml(yaml_file_content, name, existing_id)
def upload_model(uploadfile: FileStorage,
                 name=None,
                 existing_id=None):  # noqa: E501
    """upload_model

    :param uploadfile: The model to upload. Maximum size of 32MB is supported.
    :type uploadfile: werkzeug.datastructures.FileStorage
    :param name: 
    :type name: str
    :param existing_id: The model identifier of the model to be replaced, INTERNAL USE ONLY
    :type existing_id: str

    :rtype: ApiModel
    """
    yaml_file_content = get_yaml_file_content_from_uploadfile(uploadfile)

    return _upload_model_yaml(yaml_file_content, name)
def upload_dataset(uploadfile: FileStorage,
                   name=None,
                   existing_id=None):  # noqa: E501
    """upload_dataset

    :param uploadfile: The dataset YAML file to upload. Can be a GZip-compressed TAR file (.tgz, .tar.gz) or a YAML file (.yaml, .yml). Maximum size is 32MB.
    :type uploadfile: werkzeug.datastructures.FileStorage
    :param name: 
    :type name: str
    :param existing_id: The ID of a dataset to be replaced, INTERNAL USE ONLY
    :type existing_id: str

    :rtype: ApiDataset
    """
    yaml_file_content = get_yaml_file_content_from_uploadfile(uploadfile)

    return _upload_dataset_yaml(yaml_file_content, name)
def upload_pipeline(uploadfile, name=None, description=None, labels=None, annotations=None):  # noqa: E501
    """upload_pipeline

    :param uploadfile: The pipeline to upload. Maximum size of 32MB is supported.
    :type uploadfile: werkzeug.datastructures.FileStorage
    :param name: A name for this pipeline, optional
    :type name: str
    :param description: A description for this pipeline, optional
    :type description: str
    :param labels: A string representation of a JSON dictionary of labels describing this pipeline, optional.See https://kubernetes.io/docs/concepts/overview/working-with-objects/labels
    :type labels: str
    :param annotations: A string representation of a JSON dictionary of annotations describing this pipeline, optional.See https://kubernetes.io/docs/concepts/overview/working-with-objects/annotations
    :type annotations: str

    :rtype: ApiPipelineExtended
    """
    yaml_file_content = get_yaml_file_content_from_uploadfile(uploadfile)

    return _upload_pipeline_yaml(yaml_file_content, name, description, labels, annotations)
Exemple #5
0
def upload_notebook(uploadfile: FileStorage,
                    name=None,
                    enterprise_github_token=None,
                    existing_id=None):  # noqa: E501
    """upload_notebook

    :param uploadfile: The notebook to upload. Maximum size of 32MB is supported.
    :type uploadfile: werkzeug.datastructures.FileStorage
    :param name: 
    :type name: str
    :param enterprise_github_token: Optional GitHub API token providing read-access to notebooks stored on Enterprise GitHub accounts.
    :type enterprise_github_token: str
    :param existing_id: The ID of a notebook to be replaced, INTERNAL USE ONLY
    :type existing_id: str

    :rtype: ApiNotebook
    """
    yaml_file_content = get_yaml_file_content_from_uploadfile(uploadfile)

    return _upload_notebook_yaml(yaml_file_content, name,
                                 enterprise_github_token)