Beispiel #1
0
def main():
    # Loading input values
    print("::debug::Loading input values")
    model_name = os.environ.get("INPUT_MODEL_NAME", default=None)
    model_version = os.environ.get("INPUT_MODEL_VERSION", default=None)

    # Casting input values
    print("::debug::Casting input values")
    try:
        model_version = int(model_version)
    except TypeError as exception:
        print(f"::debug::Could not cast model version to int: {exception}")
        model_version = None
    except ValueError as exception:
        print(f"::debug::Could not cast model version to int: {exception}")
        model_version = None

    # Loading azure credentials
    print("::debug::Loading azure credentials")
    azure_credentials = os.environ.get("INPUT_AZURE_CREDENTIALS", default="{}")
    try:
        azure_credentials = json.loads(azure_credentials)
    except JSONDecodeError:
        print(
            "::error::Please paste output of `az ad sp create-for-rbac --name <your-sp-name> --role contributor --scopes /subscriptions/<your-subscriptionId>/resourceGroups/<your-rg> --sdk-auth` as value of secret variable: AZURE_CREDENTIALS"
        )
        raise AMLConfigurationException(
            "Incorrect or poorly formed output from azure credentials saved in AZURE_CREDENTIALS secret. See setup in https://github.com/Azure/aml-compute/blob/master/README.md"
        )

    # Checking provided parameters
    print("::debug::Checking provided parameters")
    validate_json(data=azure_credentials,
                  schema=azure_credentials_schema,
                  input_name="AZURE_CREDENTIALS")

    # Mask values
    print("::debug::Masking parameters")
    mask_parameter(parameter=azure_credentials.get("tenantId", ""))
    mask_parameter(parameter=azure_credentials.get("clientId", ""))
    mask_parameter(parameter=azure_credentials.get("clientSecret", ""))
    mask_parameter(parameter=azure_credentials.get("subscriptionId", ""))

    # Loading parameters file
    print("::debug::Loading parameters file")
    parameters_file = os.environ.get("INPUT_PARAMETERS_FILE",
                                     default="deploy.json")
    parameters_file_path = os.path.join(".cloud", ".azure", parameters_file)
    try:
        with open(parameters_file_path) as f:
            parameters = json.load(f)
    except FileNotFoundError:
        print(
            f"::debug::Could not find parameter file in {parameters_file_path}. Please provide a parameter file in your repository  if you do not want to use default settings (e.g. .cloud/.azure/deploy.json)."
        )
        parameters = {}

    # Checking provided parameters
    print("::debug::Checking provided parameters")
    validate_json(data=parameters,
                  schema=parameters_schema,
                  input_name="PARAMETERS_FILE")

    # Loading Workspace
    print("::debug::Loading AML Workspace")
    sp_auth = ServicePrincipalAuthentication(
        tenant_id=azure_credentials.get("tenantId", ""),
        service_principal_id=azure_credentials.get("clientId", ""),
        service_principal_password=azure_credentials.get("clientSecret", ""))
    config_file_path = os.environ.get("GITHUB_WORKSPACE",
                                      default=".cloud/.azure")
    config_file_name = "aml_arm_config.json"
    try:
        ws = Workspace.from_config(path=config_file_path,
                                   _file_name=config_file_name,
                                   auth=sp_auth)
    except AuthenticationException as exception:
        print(
            f"::error::Could not retrieve user token. Please paste output of `az ad sp create-for-rbac --name <your-sp-name> --role contributor --scopes /subscriptions/<your-subscriptionId>/resourceGroups/<your-rg> --sdk-auth` as value of secret variable: AZURE_CREDENTIALS: {exception}"
        )
        raise AuthenticationException
    except AuthenticationError as exception:
        print(f"::error::Microsoft REST Authentication Error: {exception}")
        raise AuthenticationError
    except AdalError as exception:
        print(
            f"::error::Active Directory Authentication Library Error: {exception}"
        )
        raise AdalError
    except ProjectSystemException as exception:
        print(f"::error::Workspace authorizationfailed: {exception}")
        raise ProjectSystemException

    # Loading deployment target
    print("::debug::Loading deployment target")
    try:
        deployment_target = ComputeTarget(workspace=ws,
                                          name=parameters.get(
                                              "deployment_compute_target", ""))
    except ComputeTargetException:
        deployment_target = None
    except TypeError:
        deployment_target = None

    # Loading model
    print("::debug::Loading model")
    try:
        model = Model(workspace=ws, name=model_name, version=model_version)
    except WebserviceException as exception:
        print(
            f"::error::Could not load model with provided details: {exception}"
        )
        raise AMLConfigurationException(
            f"Could not load model with provided details: {exception}")

    # Creating inference config
    print("::debug::Creating inference config")
    if os.environ.get("CONTAINER_REGISTRY_ADRESS", None) is not None:
        container_registry = ContainerRegistry()
        container_registry.address = os.environ.get(
            "CONTAINER_REGISTRY_ADRESS", None)
        container_registry.username = os.environ.get(
            "CONTAINER_REGISTRY_USERNAME", None)
        container_registry.password = os.environ.get(
            "CONTAINER_REGISTRY_PASSWORD", None)
    else:
        container_registry = None

    try:
        inference_config = InferenceConfig(
            entry_script=parameters.get("inference_entry_script", "score.py"),
            runtime=parameters.get("runtime", "python"),
            conda_file=parameters.get("conda_file", "environment.yml"),
            extra_docker_file_steps=parameters.get("extra_docker_file_steps",
                                                   None),
            source_directory=parameters.get("inference_source_directory",
                                            "code/deploy/"),
            enable_gpu=parameters.get("enable_gpu", None),
            description=parameters.get("description", None),
            base_image=parameters.get("base_image", None),
            base_image_registry=container_registry,
            cuda_version=parameters.get("cuda_version", None))
    except WebserviceException as exception:
        print(
            f"::debug::Failed to create InferenceConfig. Trying to create no code deployment: {exception}"
        )
        inference_config = None
    except TypeError as exception:
        print(
            f"::debug::Failed to create InferenceConfig. Trying to create no code deployment: {exception}"
        )
        inference_config = None

    # Loading run config
    print("::debug::Loading run config")
    model_resource_config = model.resource_configuration
    cpu_cores = get_resource_config(config=parameters.get("cpu_cores", None),
                                    resource_config=model_resource_config,
                                    config_name="cpu")
    memory_gb = get_resource_config(config=parameters.get("memory_gb", None),
                                    resource_config=model_resource_config,
                                    config_name="memory_in_gb")
    gpu_cores = get_resource_config(config=parameters.get("gpu_cores", None),
                                    resource_config=model_resource_config,
                                    config_name="gpu")

    # Creating deployment config
    print("::debug::Creating deployment config")
    if type(deployment_target) is AksCompute:
        deployment_config = AksWebservice.deploy_configuration(
            autoscale_enabled=parameters.get("autoscale_enabled", None),
            autoscale_min_replicas=parameters.get("autoscale_min_replicas",
                                                  None),
            autoscale_max_replicas=parameters.get("autoscale_max_replicas",
                                                  None),
            autoscale_refresh_seconds=parameters.get(
                "autoscale_refresh_seconds", None),
            autoscale_target_utilization=parameters.get(
                "autoscale_target_utilization", None),
            collect_model_data=parameters.get("model_data_collection_enabled",
                                              None),
            auth_enabled=parameters.get("authentication_enabled", None),
            cpu_cores=cpu_cores,
            memory_gb=memory_gb,
            enable_app_insights=parameters.get("app_insights_enabled", None),
            scoring_timeout_ms=parameters.get("scoring_timeout_ms", None),
            replica_max_concurrent_requests=parameters.get(
                "replica_max_concurrent_requests", None),
            max_request_wait_time=parameters.get("max_request_wait_time",
                                                 None),
            num_replicas=parameters.get("num_replicas", None),
            primary_key=os.environ.get("PRIMARY_KEY", None),
            secondary_key=os.environ.get("SECONDARY_KEY", None),
            tags=parameters.get("tags", None),
            properties=parameters.get("properties", None),
            description=parameters.get("description", None),
            gpu_cores=gpu_cores,
            period_seconds=parameters.get("period_seconds", None),
            initial_delay_seconds=parameters.get("initial_delay_seconds",
                                                 None),
            timeout_seconds=parameters.get("timeout_seconds", None),
            success_threshold=parameters.get("success_threshold", None),
            failure_threshold=parameters.get("failure_threshold", None),
            namespace=parameters.get("namespace", None),
            token_auth_enabled=parameters.get("token_auth_enabled", None))
    else:
        deployment_config = AciWebservice.deploy_configuration(
            cpu_cores=cpu_cores,
            memory_gb=memory_gb,
            tags=parameters.get("tags", None),
            properties=parameters.get("properties", None),
            description=parameters.get("description", None),
            location=parameters.get("location", None),
            auth_enabled=parameters.get("authentication_enabled", None),
            ssl_enabled=parameters.get("ssl_enabled", None),
            enable_app_insights=parameters.get("app_insights_enabled", None),
            ssl_cert_pem_file=parameters.get("ssl_cert_pem_file", None),
            ssl_key_pem_file=parameters.get("ssl_key_pem_file", None),
            ssl_cname=parameters.get("ssl_cname", None),
            dns_name_label=parameters.get("dns_name_label", None),
            primary_key=os.environ.get("PRIMARY_KEY", None),
            secondary_key=os.environ.get("SECONDARY_KEY", None),
            collect_model_data=parameters.get("model_data_collection_enabled",
                                              None),
            cmk_vault_base_url=os.environ.get("CMK_VAULT_BASE_URL", None),
            cmk_key_name=os.environ.get("CMK_KEY_NAME", None),
            cmk_key_version=os.environ.get("CMK_KEY_VERSION", None))

    # Deploying model
    print("::debug::Deploying model")
    try:
        # Default service name
        repository_name = os.environ.get("GITHUB_REPOSITORY").split("/")[-1]
        branch_name = os.environ.get("GITHUB_REF").split("/")[-1]
        default_service_name = f"{repository_name}-{branch_name}".lower(
        ).replace("_", "-")[:32]

        service = Model.deploy(workspace=ws,
                               name=parameters.get("name",
                                                   default_service_name),
                               models=[model],
                               inference_config=inference_config,
                               deployment_config=deployment_config,
                               deployment_target=deployment_target,
                               overwrite=True)
        service.wait_for_deployment(show_output=True)
    except WebserviceException as exception:
        print(f"::error::Model deployment failed with exception: {exception}")
        service_logs = service.get_logs()
        raise AMLDeploymentException(
            f"Model deployment failedlogs: {service_logs} \nexception: {exception}"
        )

    # Checking status of service
    print("::debug::Checking status of service")
    if service.state != "Healthy":
        service_logs = service.get_logs()
        print(
            f"::error::Model deployment failed with state '{service.state}': {service_logs}"
        )
        raise AMLDeploymentException(
            f"Model deployment failed with state '{service.state}': {service_logs}"
        )

    if parameters.get("test_enabled", False):
        # Testing service
        print("::debug::Testing service")
        root = os.environ.get("GITHUB_WORKSPACE", default=None)
        test_file_path = parameters.get("test_file_path", "code/test/test.py")
        test_file_function_name = parameters.get("test_file_function_name",
                                                 "main")

        print("::debug::Adding root to system path")
        sys.path.insert(1, f"{root}")

        print("::debug::Importing module")
        test_file_path = f"{test_file_path}.py" if not test_file_path.endswith(
            ".py") else test_file_path
        try:
            test_spec = importlib.util.spec_from_file_location(
                name="testmodule", location=test_file_path)
            test_module = importlib.util.module_from_spec(spec=test_spec)
            test_spec.loader.exec_module(test_module)
            test_function = getattr(test_module, test_file_function_name, None)
        except ModuleNotFoundError as exception:
            print(
                f"::error::Could not load python script in your repository which defines theweb service tests (Script: /{test_file_path}, Function: {test_file_function_name}()): {exception}"
            )
            raise AMLConfigurationException(
                f"Could not load python script in your repository which defines the web service tests (Script: /{test_file_path}, Function: {test_file_function_name}()): {exception}"
            )
        except FileNotFoundError as exception:
            print(
                f"::error::Could not load python script or function in your repository which defines the web service tests (Script: /{test_file_path}, Function: {test_file_function_name}()): {exception}"
            )
            raise AMLConfigurationException(
                f"Could not load python script or function in your repository which defines the web service tests (Script: /{test_file_path}, Function: {test_file_function_name}()): {exception}"
            )
        except AttributeError as exception:
            print(
                f"::error::Could not load python script or function in your repository which defines the web service tests (Script: /{test_file_path}, Function: {test_file_function_name}()): {exception}"
            )
            raise AMLConfigurationException(
                f"Could not load python script or function in your repository which defines the web service tests (Script: /{test_file_path}, Function: {test_file_function_name}()): {exception}"
            )

        # Load experiment config
        print("::debug::Loading experiment config")
        try:
            test_function(service)
        except TypeError as exception:
            print(
                f"::error::Could not load experiment config from your module (Script: /{test_file_path}, Function: {test_file_function_name}()): {exception}"
            )
            raise AMLConfigurationException(
                f"Could not load experiment config from your module (Script: /{test_file_path}, Function: {test_file_function_name}()): {exception}"
            )
        except Exception as exception:
            print(
                f"::error::The webservice tests did not complete successfully: {exception}"
            )
            raise AMLDeploymentException(
                f"The webservice tests did not complete successfully: {exception}"
            )

    # Deleting service if desired
    if parameters.get("delete_service_after_deployment", False):
        service.delete()
    else:
        # Create outputs
        print("::debug::Creating outputs")
        print(f"::set-output name=service_scoring_uri::{service.scoring_uri}")
        print(f"::set-output name=service_swagger_uri::{service.swagger_uri}")
    print(
        "::debug::Successfully finished Azure Machine Learning Deploy Action")
Beispiel #2
0
def main():
    # # Loading input values
    # print("::debug::Loading input values")
    template_file = os.environ.get("INPUT_ARMTEMPLATE_FILE",
                                   default="arm_deploy.json")
    template_params_file = os.environ.get("INPUT_ARMTEMPLATEPARAMS_FILE",
                                          default="")
    azure_credentials = os.environ.get("INPUT_AZURE_CREDENTIALS", default="{}")
    resource_group = os.environ.get("INPUT_RESOURCE_GROUP", default=None)
    mapped_params = os.environ.get("INPUT_MAPPED_PARAMS", default="{}")
    deployment_mode = os.environ.get("INPUT_DEPLOYMENT_MODE",
                                     default="Incremental")

    deploy_enum = get_deploy_mode_obj(deployment_mode)
    try:
        azure_credentials = json.loads(azure_credentials)
    except JSONDecodeError:
        print(
            "::error::Please paste output of `az ad sp create-for-rbac --name <your-sp-name> --role contributor --scopes /subscriptions/<your-subscriptionId>/resourceGroups/<your-rg> --sdk-auth` as value of secret variable: AZURE_CREDENTIALS"
        )
        raise AMLConfigurationException(
            f"Incorrect or poorly formed output from azure credentials saved in AZURE_CREDENTIALS secret. See setup in https://github.com/Azure/aml-workspace/blob/master/README.md"
        )

    try:
        mapped_params = json.loads(mapped_params)
    except JSONDecodeError:
        print(
            "::error::Incorrect mapped parameters Format , please put mapped parameters strings like this {\"patToken\":\"${{secrets.PAT_TOKEN}}\", .... }"
        )
        raise AMLConfigurationException(
            f"Incorrect or poorly formed mapped params. See setup in https://github.com/Azure/aml_configure/blob/master/README.md"
        )

    if not resource_group:
        raise AMLConfigurationException(f"A resource group must be provided")
    # Checking provided parameters
    print("::debug::Checking provided parameters")
    required_parameters_provided(
        parameters=azure_credentials,
        keys=["tenantId", "clientId", "clientSecret"],
        message=
        "Required parameter(s) not found in your azure credentials saved in AZURE_CREDENTIALS secret for logging in to the workspace. Please provide a value for the following key(s): "
    )

    # # Loading parameters file
    # print("::debug::Loading parameters file")
    template_file_file_path = os.path.join(".cloud", ".azure", template_file)

    # Mask values
    print("::debug::Masking parameters")
    mask_parameter(parameter=azure_credentials.get("tenantId", ""))
    mask_parameter(parameter=azure_credentials.get("clientId", ""))
    mask_parameter(parameter=azure_credentials.get("clientSecret", ""))
    #mask_parameter(parameter=azure_credentials.get("subscriptionId", ""))

    # Login User on CLI
    tenant_id = azure_credentials.get("tenantId", "")
    service_principal_id = azure_credentials.get("clientId", "")
    service_principal_password = azure_credentials.get("clientSecret", "")
    subscriptionId = azure_credentials.get("subscriptionId", "")

    parameters = get_template_parameters(template_params_file, mapped_params)
    credentials = None
    try:
        credentials = ServicePrincipalCredentials(
            client_id=service_principal_id,
            secret=service_principal_password,
            tenant=tenant_id)
    except Exception as ex:
        raise CredentialsVerificationError(ex)

    client = None
    try:
        client = ResourceManagementClient(credentials, subscriptionId)
    except Exception as ex:
        raise ResourceManagementError(ex)

    template = None
    with open(template_file_file_path, 'r') as template_file_fd:
        template = json.load(template_file_fd)

    deployment_properties = {
        'properties': {
            'mode': deploy_enum,
            'template': template,
            'parameters': parameters
        }
    }
    deployment_async_operation = None
    try:
        validate = client.deployments.validate(resource_group, "azure-sample",
                                               deployment_properties)
        validate.wait()
    except Exception as ex:
        raise ActionDeploymentError(ex)
    try:
        deployment_async_operation = client.deployments.create_or_update(
            resource_group, 'azure-sample', deployment_properties)
        deployment_async_operation.wait()
    except Exception as ex:
        raise ActionDeploymentError(ex)
    print("Deployment done")
    print(deployment_async_operation)
    print("next----------")
    print(deployment_async_operation.result())
    print("next----------")
    print(deployment_async_operation.result().properties)
Beispiel #3
0
def main():
    # Loading input values
    print("::debug::Loading input values")
    experiment_name = os.environ.get("INPUT_EXPERIMENT_NAME", default=None)
    run_id = os.environ.get("INPUT_RUN_ID", default=None)

    # Loading azure credentials
    print("::debug::Loading azure credentials")
    azure_credentials = os.environ.get("INPUT_AZURE_CREDENTIALS", default="{}")
    try:
        azure_credentials = json.loads(azure_credentials)
    except JSONDecodeError:
        print(
            "::error::Please paste output of `az ad sp create-for-rbac --name <your-sp-name> --role contributor --scopes /subscriptions/<your-subscriptionId>/resourceGroups/<your-rg> --sdk-auth` as value of secret variable: AZURE_CREDENTIALS"
        )
        raise AMLConfigurationException(
            "Incorrect or poorly formed output from azure credentials saved in AZURE_CREDENTIALS secret. See setup in https://github.com/Azure/aml-workspace/blob/master/README.md"
        )

    # Checking provided parameters
    print("::debug::Checking provided parameters")
    validate_json(data=azure_credentials,
                  schema=azure_credentials_schema,
                  input_name="AZURE_CREDENTIALS")

    # Mask values
    print("::debug::Masking parameters")
    mask_parameter(parameter=azure_credentials.get("tenantId", ""))
    mask_parameter(parameter=azure_credentials.get("clientId", ""))
    mask_parameter(parameter=azure_credentials.get("clientSecret", ""))
    mask_parameter(parameter=azure_credentials.get("subscriptionId", ""))

    # Loading parameters file
    print("::debug::Loading parameters file")
    parameters_file = os.environ.get("INPUT_PARAMETERS_FILE",
                                     default="registermodel.json")
    parameters_file_path = os.path.join(".cloud", ".azure", parameters_file)
    try:
        with open(parameters_file_path) as f:
            parameters = json.load(f)
    except FileNotFoundError:
        print(
            f"::debug::Could not find parameter file in {parameters_file_path}. Please provide a parameter file in your repository if you do not want to use default settings (e.g. .cloud/.azure/registermodel.json)."
        )
        parameters = {}

    # Checking provided parameters
    print("::debug::Checking provided parameters")
    validate_json(data=parameters,
                  schema=parameters_schema,
                  input_name="PARAMETERS_FILE")

    # Define target cloud
    if azure_credentials.get(
            "resourceManagerEndpointUrl",
            "").startswith("https://management.usgovcloudapi.net"):
        cloud = "AzureUSGovernment"
    elif azure_credentials.get(
            "resourceManagerEndpointUrl",
            "").startswith("https://management.chinacloudapi.cn"):
        cloud = "AzureChinaCloud"
    else:
        cloud = "AzureCloud"

    # Loading Workspace
    print("::debug::Loading AML Workspace")
    sp_auth = ServicePrincipalAuthentication(
        tenant_id=azure_credentials.get("tenantId", ""),
        service_principal_id=azure_credentials.get("clientId", ""),
        service_principal_password=azure_credentials.get("clientSecret", ""),
        cloud=cloud)
    config_file_path = os.environ.get("GITHUB_WORKSPACE",
                                      default=".cloud/.azure")
    config_file_name = "aml_arm_config.json"
    try:
        ws = Workspace.from_config(path=config_file_path,
                                   _file_name=config_file_name,
                                   auth=sp_auth)
    except AuthenticationException as exception:
        print(
            f"::error::Could not retrieve user token. Please paste output of `az ad sp create-for-rbac --name <your-sp-name> --role contributor --scopes /subscriptions/<your-subscriptionId>/resourceGroups/<your-rg> --sdk-auth` as value of secret variable: AZURE_CREDENTIALS: {exception}"
        )
        raise AuthenticationException
    except AuthenticationError as exception:
        print(f"::error::Microsoft REST Authentication Error: {exception}")
        raise AuthenticationError
    except AdalError as exception:
        print(
            f"::error::Active Directory Authentication Library Error: {exception}"
        )
        raise AdalError
    except ProjectSystemException as exception:
        print(f"::error::Workspace authorization failed: {exception}")
        raise ProjectSystemException

    # Define default model name
    repository_name = os.environ.get("GITHUB_REPOSITORY").split("/")[-1]
    branch_name = os.environ.get("GITHUB_REF").split("/")[-1]
    default_model_name = f"{repository_name}-{branch_name}"
    print(
        f"::debug::experiment_name: '{experiment_name}' and run_id: '{run_id}'"
    )
    if not experiment_name or not run_id:
        # Registering model from local GitHub workspace
        print("::debug::Registering model from local GitHub workspace")
        local_model = True

        # Defining model path
        print("::debug::Defining model path")
        model_file_name = parameters.get("model_file_name", "model.pkl")
        if len(splitall(model_file_name)) > 1:
            model_path = model_file_name
        else:
            directory = config_file_path = os.environ.get("GITHUB_WORKSPACE",
                                                          default=None)
            model_paths = []
            for root, dirs, files in os.walk(directory):
                for filename in files:
                    if filename == model_file_name:
                        path = os.path.join(root, filename)
                        model_paths.append(path)
            model_path = model_paths[0]
    else:
        # Registering model from AML run
        print("::debug::Registering model from AML run")
        local_model = False

        # Loading experiment
        print("::debug::Loading experiment")
        try:
            experiment = Experiment(workspace=ws, name=experiment_name)
        except UserErrorException as exception:
            print(f"::error::Loading experiment failed: {exception}")
            raise AMLConfigurationException(
                "Could not load experiment. Please your experiment name as input parameter."
            )

        # Loading run by run id
        print("::debug::Loading run by run id")
        try:
            run = Run(experiment=experiment, run_id=run_id)
        except KeyError as exception:
            print(f"::error::Loading run failed: {exception}")
            raise AMLConfigurationException(
                "Could not load run. Please add your run id as input parameter."
            )

        # Loading best run
        print("::debug::Loading best run")
        best_run = get_best_run(experiment=experiment,
                                run=run,
                                pipeline_child_run_name=parameters.get(
                                    "pipeline_child_run_name",
                                    "model_training"))

        # Comparing metrics of runs
        print("::debug::Comparing metrics of runs")
        # Default model name
        if not parameters.get("force_registration", False):
            compare_metrics(workspace=ws,
                            run=best_run,
                            model_name=parameters.get("model_name",
                                                      default_model_name)[:32],
                            metrics_max=parameters.get("metrics_max", []),
                            metrics_min=parameters.get("metrics_min", []))

        # Defining model path
        print("::debug::Defining model path")
        model_file_name = parameters.get("model_file_name", "model.pkl")

        print(f"::debug:: best run file names: {best_run.get_file_names()}")
        if len(splitall(model_file_name)) > 1:
            model_path = model_file_name
        else:
            model_path = [
                file_name for file_name in best_run.get_file_names()
                if model_file_name in os.path.split(file_name)[-1]
            ][0]

    # Defining model framework
    print("::debug::Defining model framework")
    model_framework = get_model_framework(
        name=parameters.get("model_framework", None))

    # Defining datasets
    print("::debug::Defining datasets")
    datasets = []
    for dataset_name in parameters.get("datasets", []):
        dataset = get_dataset(workspace=ws, name=dataset_name)
        if dataset is not None:
            datasets.append((f"{dataset_name}", dataset))
    input_dataset = get_dataset(workspace=ws,
                                name=parameters.get("sample_input_dataset",
                                                    None))
    output_dataset = get_dataset(workspace=ws,
                                 name=parameters.get("sample_output_dataset",
                                                     None))

    # Defining resource configuration
    print("::debug::Defining resource configuration")
    cpu = parameters.get("cpu_cores", None)
    memory = parameters.get("memory_gb", None)
    resource_configuration = ResourceConfiguration(
        cpu=cpu, memory_in_gb=memory) if (cpu is not None
                                          and memory is not None) else None

    if local_model:
        try:
            model = Model.register(
                workspace=ws,
                model_path=model_path,
                model_name=parameters.get("model_name",
                                          default_model_name)[:32],
                tags=parameters.get("model_tags", None),
                properties=parameters.get("model_properties", None),
                description=parameters.get("model_description", None),
                datasets=datasets,
                model_framework=model_framework,
                model_framework_version=parameters.get(
                    "model_framework_version", None),
                child_paths=[],
                sample_input_dataset=input_dataset,
                sample_output_dataset=output_dataset,
                resource_configuration=resource_configuration)
        except TypeError as exception:
            print(f"::error::Model could not be registered: {exception}")
            raise AMLConfigurationException("Model could not be registered")
        except WebserviceException as exception:
            print(f"::error::Model could not be registered: {exception}")
            raise AMLConfigurationException("Model could not be registered")
    else:
        try:
            model = best_run.register_model(
                model_name=parameters.get("model_name",
                                          default_model_name)[:32],
                model_path=model_path,
                tags=parameters.get("model_tags", None),
                properties=parameters.get("model_properties", None),
                model_framework=model_framework,
                model_framework_version=parameters.get(
                    "model_framework_version", None),
                description=parameters.get("model_description", None),
                datasets=datasets,
                sample_input_dataset=input_dataset,
                sample_output_dataset=output_dataset,
                resource_configuration=resource_configuration)
        except ModelPathNotFoundException as exception:
            print(
                f"::error::Model name not found in outputs folder. Please provide the correct model file name and make sure that the model was saved by the run: {exception}"
            )
            raise AMLConfigurationException(
                "Model name not found in outputs folder. Please provide the correct model file name and make sure that the model was saved by the run."
            )
        except WebserviceException as exception:
            print(f"::error::Model could not be registered: {exception}")
            raise AMLConfigurationException("Model could not be registered")

    # Create outputs
    print("::debug::Creating outputs")
    print(f"::set-output name=model_name::{model.name}")
    print(f"::set-output name=model_version::{model.version}")
    print(f"::set-output name=model_id::{model.id}")
    print(
        "::debug::Successfully completed Azure Machine Learning Register Model Action"
    )
Beispiel #4
0
def main():
    # Loading azure credentials
    print("::debug::Loading azure credentials")
    azure_credentials = os.environ.get("INPUT_AZURE_CREDENTIALS", default="{}")
    try:
        azure_credentials = json.loads(azure_credentials)
    except JSONDecodeError:
        print(
            "::error::Please paste output of `az ad sp create-for-rbac --name <your-sp-name> --role contributor --scopes /subscriptions/<your-subscriptionId>/resourceGroups/<your-rg> --sdk-auth` as value of secret variable: AZURE_CREDENTIALS. The JSON should include the following keys: 'tenantId', 'clientId', 'clientSecret' and 'subscriptionId'."
        )
        raise AMLConfigurationException(
            "Incorrect or poorly formed output from azure credentials saved in AZURE_CREDENTIALS secret. See setup in https://github.com/Azure/aml-workspace/blob/master/README.md"
        )

    # Checking provided parameters
    print("::debug::Checking provided parameters")
    validate_json(data=azure_credentials,
                  schema=azure_credentials_schema,
                  input_name="AZURE_CREDENTIALS")

    # Mask values
    print("::debug::Masking parameters")
    mask_parameter(parameter=azure_credentials.get("tenantId", ""))
    mask_parameter(parameter=azure_credentials.get("clientId", ""))
    mask_parameter(parameter=azure_credentials.get("clientSecret", ""))
    mask_parameter(parameter=azure_credentials.get("subscriptionId", ""))

    # Loading parameters file
    print("::debug::Loading parameters file")
    parameters_file = os.environ.get("INPUT_PARAMETERS_FILE",
                                     default="workspace.json")
    parameters_file_path = os.path.join(".cloud", ".azure", parameters_file)
    try:
        with open(parameters_file_path) as f:
            parameters = json.load(f)
    except FileNotFoundError:
        print(
            f"::debug::Could not find parameter file in {parameters_file_path}. Please provide a parameter file in your repository if you do not want to use default settings (e.g. .cloud/.azure/workspace.json)."
        )
        parameters = {}

    # Checking provided parameters
    print("::debug::Checking provided parameters")
    validate_json(data=parameters,
                  schema=parameters_schema,
                  input_name="PARAMETERS_FILE")

    # Define target cloud
    if azure_credentials.get(
            "resourceManagerEndpointUrl",
            "").startswith("https://management.usgovcloudapi.net"):
        cloud = "AzureUSGovernment"
    elif azure_credentials.get(
            "resourceManagerEndpointUrl",
            "").startswith("https://management.chinacloudapi.cn"):
        cloud = "AzureChinaCloud"
    else:
        cloud = "AzureCloud"

    # Loading Workspace
    sp_auth = ServicePrincipalAuthentication(
        tenant_id=azure_credentials.get("tenantId", ""),
        service_principal_id=azure_credentials.get("clientId", ""),
        service_principal_password=azure_credentials.get("clientSecret", ""),
        cloud=cloud)
    try:
        print("::debug::Loading existing Workspace")
        # Default workspace and resource group name
        repository_name = str(
            os.environ.get("GITHUB_REPOSITORY")).split("/")[-1]

        ws = Workspace.get(
            name=parameters.get("name", repository_name),
            subscription_id=azure_credentials.get("subscriptionId", ""),
            resource_group=parameters.get("resource_group", repository_name),
            auth=sp_auth)
        print("::debug::Successfully loaded existing Workspace")
    except AuthenticationException as exception:
        print(
            f"::error::Could not retrieve user token. Please paste output of `az ad sp create-for-rbac --name <your-sp-name> --role contributor --scopes /subscriptions/<your-subscriptionId>/resourceGroups/<your-rg> --sdk-auth` as value of secret variable: AZURE_CREDENTIALS: {exception}"
        )
        raise AuthenticationException
    except AuthenticationError as exception:
        print(f"::error::Microsoft REST Authentication Error: {exception}")
        raise AuthenticationException
    except AdalError as exception:
        print(
            f"::error::Active Directory Authentication Library Error: {exception}"
        )
        raise AdalError
    except (WorkspaceException, ProjectSystemException) as exception:
        print(f"::debug::Loading existing Workspace failed: {exception}")
        if parameters.get("create_workspace", False):
            try:
                print("::debug::Creating new Workspace")
                ws = Workspace.create(
                    name=parameters.get("name", repository_name),
                    subscription_id=azure_credentials.get(
                        "subscriptionId", ""),
                    resource_group=parameters.get("resource_group",
                                                  repository_name),
                    location=parameters.get("location", None),
                    create_resource_group=parameters.get(
                        "create_resource_group", True),
                    sku=parameters.get("sku", "basic"),
                    friendly_name=parameters.get("friendly_name", None),
                    storage_account=parameters.get("storage_account", None),
                    key_vault=parameters.get("key_vault", None),
                    app_insights=parameters.get("app_insights", None),
                    container_registry=parameters.get("container_registry",
                                                      None),
                    cmk_keyvault=parameters.get("cmk_key_vault", None),
                    resource_cmk_uri=parameters.get("resource_cmk_uri", None),
                    hbi_workspace=parameters.get("hbi_workspace", None),
                    auth=sp_auth,
                    exist_ok=True,
                    show_output=True)
            except WorkspaceException as exception:
                print(f"::error::Creating new Workspace failed: {exception}")
                raise AMLConfigurationException(
                    f"Creating new Workspace failed with 'WorkspaceException': {exception}."
                )
        else:
            print(
                f"::error::Loading existing Workspace failed with 'WorkspaceException' and new Workspace will not be created because parameter 'create_workspace' was not defined or set to false in your parameter file: {exception}"
            )
            raise AMLConfigurationException(
                "Loading existing Workspace failed with 'WorkspaceException' and new Workspace will not be created because parameter 'create_workspace' was not defined or set to false in your parameter file."
            )

    # Write Workspace ARM properties to config file
    print("::debug::Writing Workspace ARM properties to config file")
    config_file_path = os.environ.get("GITHUB_WORKSPACE",
                                      default=".cloud/.azure")
    config_file_name = "aml_arm_config.json"
    ws.write_config(path=config_file_path, file_name=config_file_name)
    print(
        "::debug::Successfully finished Azure Machine Learning Workspace Action"
    )
Beispiel #5
0
def main():
    # # Loading input values
    print("::debug::Loading input values")

    template_file = os.environ.get("INPUT_ARMTEMPLATE_FILE",
                                   default="deploy.json")
    template_params_file = os.environ.get("INPUT_ARMTEMPLATEPARAMS_FILE",
                                          default="deploy.params.json")
    azure_credentials = os.environ.get("INPUT_AZURE_CREDENTIALS", default="{}")
    resource_group = os.environ.get("INPUT_RESOURCE_GROUP",
                                    default="newresource_group")
    print(azure_credentials)
    print("0--------------------------------------")

    try:
        azure_credentials = json.loads(azure_credentials)
        print(azure_credentials)
    except JSONDecodeError:
        print(
            "::error::Please paste output of `az ad sp create-for-rbac --name <your-sp-name> --role contributor --scopes /subscriptions/<your-subscriptionId>/resourceGroups/<your-rg> --sdk-auth` as value of secret variable: AZURE_CREDENTIALS"
        )
        raise AMLConfigurationException(
            f"Incorrect or poorly formed output from azure credentials saved in AZURE_CREDENTIALS secret. See setup in https://github.com/Azure/aml-workspace/blob/master/README.md"
        )
    print("1--------------------------------------")
    # Checking provided parameters
    print("::debug::Checking provided parameters")
    required_parameters_provided(
        parameters=azure_credentials,
        keys=["tenantId", "clientId", "clientSecret"],
        message=
        "Required parameter(s) not found in your azure credentials saved in AZURE_CREDENTIALS secret for logging in to the workspace. Please provide a value for the following key(s): "
    )
    print("2--------------------------------------")

    # Mask values
    print("::debug::Masking parameters")
    mask_parameter(parameter=azure_credentials.get("tenantId", ""))
    mask_parameter(parameter=azure_credentials.get("clientId", ""))
    mask_parameter(parameter=azure_credentials.get("clientSecret", ""))
    mask_parameter(parameter=azure_credentials.get("subscriptionId", ""))
    print("3--------------------------------------")

    # Loading parameters file
    print("::debug::Loading parameters file")
    template_file_file_path = os.path.join(".cloud", ".azure", template_file)
    template_params_file_path = os.path.join(".cloud", ".azure",
                                             template_params_file)
    print("4--------------------------------------")

    tenant_id = azure_credentials.get("tenantId", "")
    service_principal_id = azure_credentials.get("clientId", "")
    service_principal_password = azure_credentials.get("clientSecret", "")
    print(service_principal_password)

    #command = ('az login --service-principal --username {APP_ID} --password {PASSWORD} --tenant {TENANT_ID}').format(
    #       APP_ID=service_principal_id, PASSWORD=service_principal_password, TENANT_ID=tenant_id)
    command = 'az login --service-principal --username "ab96606e-49a7-45d3-a575-5172e11fdb7f" --password "^s:e6b4uCMXxN168t+i?[f](`E~8YeAP" --tenant "2d1aba9c-5938-402b-90b9-72a284a4bced"'
    try:
        app_create = subprocess.check_output(command, shell=True)
        print(app_create)
    except Exception as ex:
        print(ex)
    print(
        deploy_functionApp(template_file_file_path, template_params_file_path,
                           resource_group))
Beispiel #6
0
def main():
    # # Loading input values
    # print("::debug::Loading input values")
    template_file = os.environ.get("INPUT_ARMTEMPLATE_FILE",
                                   default="deploy.json")
    template_params_file = os.environ.get("INPUT_ARMTEMPLATEPARAMS_FILE",
                                          default="deploy.params.json")
    azure_credentials = os.environ.get("INPUT_AZURE_CREDENTIALS", default="{}")
    resource_group = os.environ.get("INPUT_RESOURCE_GROUP",
                                    default="newresource_group")
    repo_PatToken = os.environ.get("INPUT_PATTOKEN", default="")
    self_repoName = os.environ.get("INPUT_GITREPO", default="")

    try:
        azure_credentials = json.loads(azure_credentials)
    except JSONDecodeError:
        print(
            "::error::Please paste output of `az ad sp create-for-rbac --name <your-sp-name> --role contributor --scopes /subscriptions/<your-subscriptionId>/resourceGroups/<your-rg> --sdk-auth` as value of secret variable: AZURE_CREDENTIALS"
        )
        raise AMLConfigurationException(
            f"Incorrect or poorly formed output from azure credentials saved in AZURE_CREDENTIALS secret. See setup in https://github.com/Azure/aml-workspace/blob/master/README.md"
        )

    # Checking provided parameters
    print("::debug::Checking provided parameters")
    required_parameters_provided(
        parameters=azure_credentials,
        keys=["tenantId", "clientId", "clientSecret"],
        message=
        "Required parameter(s) not found in your azure credentials saved in AZURE_CREDENTIALS secret for logging in to the workspace. Please provide a value for the following key(s): "
    )

    # # Loading parameters file
    # print("::debug::Loading parameters file")
    template_file_file_path = os.path.join(".cloud", ".azure", template_file)
    template_params_file_path = os.path.join(".cloud", ".azure",
                                             template_params_file)

    # Mask values
    print("::debug::Masking parameters")
    mask_parameter(parameter=azure_credentials.get("tenantId", ""))
    mask_parameter(parameter=azure_credentials.get("clientId", ""))
    mask_parameter(parameter=azure_credentials.get("clientSecret", ""))
    #mask_parameter(parameter=azure_credentials.get("subscriptionId", ""))

    # Login User on CLI
    tenant_id = azure_credentials.get("tenantId", "")
    service_principal_id = azure_credentials.get("clientId", "")
    service_principal_password = azure_credentials.get("clientSecret", "")
    subscriptionId = azure_credentials.get("subscriptionId", "")
    command = (
        "az login --service-principal --username {APP_ID} --password \"{PASSWORD}\" --tenant {TENANT_ID}"
    ).format(APP_ID=service_principal_id,
             PASSWORD=service_principal_password,
             TENANT_ID=tenant_id)
    try:
        login_result = subprocess.check_output(command, shell=True)
        print(login_result)
    except Exception as ex:
        print(ex)
        return

    success = False
    try:
        jsonobject = None
        with open(template_params_file_path, "r") as f:
            jsonobject = json.load(f)
        jsonobject["parameters"]["subscriptionID"]["value"] = subscriptionId
        jsonobject["parameters"]["repo_name"]["value"] = self_repoName
        jsonobject["parameters"]["pat_token"]["value"] = repo_PatToken
        with open(template_params_file_path, "w") as f:
            json.dump(jsonobject, f)
        success = True
    except Exception as ex:
        print("error while updating parameters")
        return
    if success:
        print(
            deploy_functionApp(template_file_file_path,
                               template_params_file_path, resource_group))
Beispiel #7
0
def main():
    # Loading azure credentials
    print("::debug::Loading azure credentials")
    azure_credentials = os.environ.get("INPUT_AZURE_CREDENTIALS", default="{}")
    try:
        azure_credentials = json.loads(azure_credentials)
    except JSONDecodeError:
        print(
            "::error::Please paste output of `az ad sp create-for-rbac --name <your-sp-name> --role contributor --scopes /subscriptions/<your-subscriptionId>/resourceGroups/<your-rg> --sdk-auth` as value of secret variable: AZURE_CREDENTIALS. The JSON should include the following keys: 'tenantId', 'clientId', 'clientSecret' and 'subscriptionId'."
        )
        raise AMLConfigurationException(
            "Incorrect or poorly formed output from azure credentials saved in AZURE_CREDENTIALS secret. See setup in https://github.com/Azure/aml-workspace/blob/master/README.md"
        )

    # Checking provided parameters
    print("::debug::Checking provided parameters")
    validate_json(data=azure_credentials,
                  schema=azure_credentials_schema,
                  input_name="AZURE_CREDENTIALS")

    # Mask values
    print("::debug::Masking parameters")
    mask_parameter(parameter=azure_credentials.get("tenantId", ""))
    mask_parameter(parameter=azure_credentials.get("clientId", ""))
    mask_parameter(parameter=azure_credentials.get("clientSecret", ""))
    mask_parameter(parameter=azure_credentials.get("subscriptionId", ""))

    # Loading parameters file
    print("::debug::Loading parameters file")
    parameters_file = os.environ.get("INPUT_PARAMETERS_FILE",
                                     default="compute.json")
    parameters_file_path = os.path.join(".cloud", ".azure", parameters_file)
    try:
        with open(parameters_file_path) as f:
            parameters = json.load(f)
    except FileNotFoundError:
        print(
            f"::debug::Could not find parameter file in {parameters_file_path}. Please provide a parameter file in your repository if you do not want to use default settings (e.g. .cloud/.azure/compute.json)."
        )
        parameters = {}

    # Checking provided parameters
    print("::debug::Checking provided parameters")
    validate_json(data=parameters,
                  schema=parameters_schema,
                  input_name="PARAMETERS_FILE")

    # Define target cloud
    if azure_credentials.get(
            "resourceManagerEndpointUrl",
            "").startswith("https://management.usgovcloudapi.net"):
        cloud = "AzureUSGovernment"
    elif azure_credentials.get(
            "resourceManagerEndpointUrl",
            "").startswith("https://management.chinacloudapi.cn"):
        cloud = "AzureChinaCloud"
    else:
        cloud = "AzureCloud"

    # Loading Workspace
    print("::debug::Loading AML Workspace")
    sp_auth = ServicePrincipalAuthentication(
        tenant_id=azure_credentials.get("tenantId", ""),
        service_principal_id=azure_credentials.get("clientId", ""),
        service_principal_password=azure_credentials.get("clientSecret", ""),
        cloud=cloud)
    config_file_path = os.environ.get("GITHUB_WORKSPACE",
                                      default=".cloud/.azure")
    config_file_name = "aml_arm_config.json"
    try:
        ws = Workspace.from_config(path=config_file_path,
                                   _file_name=config_file_name,
                                   auth=sp_auth)
    except AuthenticationException as exception:
        print(
            f"::error::Could not retrieve user token. Please paste output of `az ad sp create-for-rbac --name <your-sp-name> --role contributor --scopes /subscriptions/<your-subscriptionId>/resourceGroups/<your-rg> --sdk-auth` as value of secret variable: AZURE_CREDENTIALS: {exception}"
        )
        raise AuthenticationException
    except AuthenticationError as exception:
        print(f"::error::Microsoft REST Authentication Error: {exception}")
        raise AuthenticationError
    except AdalError as exception:
        print(
            f"::error::Active Directory Authentication Library Error: {exception}"
        )
        raise AdalError
    except ProjectSystemException as exception:
        print(f"::error::Workspace authorizationfailed: {exception}")
        raise ProjectSystemException

    # Loading compute target
    try:
        # Default compute target name
        repository_name = os.environ.get("GITHUB_REPOSITORY").split(
            "/")[-1][:16]  # names can be max 16 characters

        print("::debug::Loading existing compute target")
        compute_target = ComputeTarget(workspace=ws,
                                       name=parameters.get(
                                           "name", repository_name))
        print(
            f"::debug::Found compute target with same name. Not updating the compute target: {compute_target.serialize()}"
        )
    except ComputeTargetException:
        print(
            "::debug::Could not find existing compute target with provided name"
        )

        # Checking provided parameters
        print("::debug::Checking provided parameters")
        required_parameters_provided(
            parameters=parameters,
            keys=["compute_type"],
            message=
            "Required parameter(s) not found in your parameters file for creating a compute target. Please provide a value for the following key(s): "
        )

        print("::debug::Creating new compute target")
        compute_type = parameters.get("compute_type", "")
        print(f"::debug::Compute type listed is{compute_type}")
        if compute_type == "amlcluster":
            compute_target = create_aml_cluster(workspace=ws,
                                                parameters=parameters)
            print(
                f"::debug::Successfully created AML cluster: {compute_target.serialize()}"
            )
        elif compute_type == "akscluster":
            compute_target = create_aks_cluster(workspace=ws,
                                                parameters=parameters)
            print(
                f"::debug::Successfully created AKS cluster: {compute_target.serialize()}"
            )
        else:
            print(f"::error::Compute type '{compute_type}' is not supported")
            raise AMLConfigurationException(
                f"Compute type '{compute_type}' is not supported.")
    print(
        "::debug::Successfully finished Azure Machine Learning Compute Action")
Beispiel #8
0
def main():
    # Loading azure credentials
    print("::debug::Loading azure credentials")
    azure_credentials = os.environ.get("INPUT_AZURE_CREDENTIALS", default="{}")
    try:
        azure_credentials = json.loads(azure_credentials)
    except JSONDecodeError:
        print(
            "::error::Please paste output of `az ad sp create-for-rbac --name <your-sp-name> --role contributor --scopes /subscriptions/<your-subscriptionId>/resourceGroups/<your-rg> --sdk-auth` as value of secret variable: AZURE_CREDENTIALS"
        )
        raise AMLConfigurationException(
            "Incorrect or poorly formed output from azure credentials saved in AZURE_CREDENTIALS secret. See setup in https://github.com/Azure/aml-workspace/blob/master/README.md"
        )

    # Checking provided parameters
    print("::debug::Checking provided parameters")
    validate_json(data=azure_credentials,
                  schema=azure_credentials_schema,
                  input_name="AZURE_CREDENTIALS")

    # Mask values
    print("::debug::Masking parameters")
    mask_parameter(parameter=azure_credentials.get("tenantId", ""))
    mask_parameter(parameter=azure_credentials.get("clientId", ""))
    mask_parameter(parameter=azure_credentials.get("clientSecret", ""))
    mask_parameter(parameter=azure_credentials.get("subscriptionId", ""))

    # Loading parameters file
    print("::debug::Loading parameters file")
    parameters_file = os.environ.get("INPUT_PARAMETERS_FILE",
                                     default="run.json")
    parameters_file_path = os.path.join(".cloud", ".azure", parameters_file)
    try:
        with open(parameters_file_path) as f:
            parameters = json.load(f)
    except FileNotFoundError:
        print(
            f"::debug::Could not find parameter file in {parameters_file_path}. Please provide a parameter file in your repository if you do not want to use default settings (e.g. .cloud/.azure/run.json)."
        )
        parameters = {}

    # Checking provided parameters
    print("::debug::Checking provided parameters")
    validate_json(data=parameters,
                  schema=parameters_schema,
                  input_name="PARAMETERS_FILE")

    # Define target cloud
    if azure_credentials.get(
            "resourceManagerEndpointUrl",
            "").startswith("https://management.usgovcloudapi.net"):
        cloud = "AzureUSGovernment"
    elif azure_credentials.get(
            "resourceManagerEndpointUrl",
            "").startswith("https://management.chinacloudapi.cn"):
        cloud = "AzureChinaCloud"
    else:
        cloud = "AzureCloud"

    # Loading Workspace
    print("::debug::Loading AML Workspace")
    sp_auth = ServicePrincipalAuthentication(
        tenant_id=azure_credentials.get("tenantId", ""),
        service_principal_id=azure_credentials.get("clientId", ""),
        service_principal_password=azure_credentials.get("clientSecret", ""),
        cloud=cloud)
    config_file_path = os.environ.get("GITHUB_WORKSPACE",
                                      default=".cloud/.azure")
    config_file_name = "aml_arm_config.json"
    try:
        ws = Workspace.from_config(path=config_file_path,
                                   _file_name=config_file_name,
                                   auth=sp_auth)
    except AuthenticationException as exception:
        print(
            f"::error::Could not retrieve user token. Please paste output of `az ad sp create-for-rbac --name <your-sp-name> --role contributor --scopes /subscriptions/<your-subscriptionId>/resourceGroups/<your-rg> --sdk-auth` as value of secret variable: AZURE_CREDENTIALS: {exception}"
        )
        raise AuthenticationException
    except AuthenticationError as exception:
        print(f"::error::Microsoft REST Authentication Error: {exception}")
        raise AuthenticationError
    except AdalError as exception:
        print(
            f"::error::Active Directory Authentication Library Error: {exception}"
        )
        raise AdalError
    except ProjectSystemException as exception:
        print(f"::error::Workspace authorizationfailed: {exception}")
        raise ProjectSystemException

    # Create experiment
    print("::debug::Creating experiment")
    try:
        # Default experiment name
        repository_name = os.environ.get("GITHUB_REPOSITORY").split("/")[-1]
        branch_name = os.environ.get("GITHUB_REF").split("/")[-1]
        default_experiment_name = f"{repository_name}-{branch_name}"

        experiment = Experiment(
            workspace=ws,
            name=parameters.get("experiment_name",
                                default_experiment_name)[:36])
    except TypeError as exception:
        experiment_name = parameters.get("experiment", None)
        print(
            f"::error::Could not create an experiment with the specified name {experiment_name}: {exception}"
        )
        raise AMLExperimentConfigurationException(
            f"Could not create an experiment with the specified name {experiment_name}: {exception}"
        )
    except UserErrorException as exception:
        experiment_name = parameters.get("experiment", None)
        print(
            f"::error::Could not create an experiment with the specified name {experiment_name}: {exception}"
        )
        raise AMLExperimentConfigurationException(
            f"Could not create an experiment with the specified name {experiment_name}: {exception}"
        )

    # Loading run config
    print("::debug::Loading run config")
    run_config = None
    if run_config is None:
        # Loading run config from runconfig yaml file
        print("::debug::Loading run config from runconfig yaml file")
        run_config = load_runconfig_yaml(runconfig_yaml_file=parameters.get(
            "runconfig_yaml_file", "code/train/run_config.yml"))
    if run_config is None:
        # Loading run config from pipeline yaml file
        print("::debug::Loading run config from pipeline yaml file")
        run_config = load_pipeline_yaml(workspace=ws,
                                        pipeline_yaml_file=parameters.get(
                                            "pipeline_yaml_file",
                                            "code/train/pipeline.yml"))
    if run_config is None:
        # Loading run config from python runconfig file
        print("::debug::Loading run config from python runconfig file")
        run_config = load_runconfig_python(
            workspace=ws,
            runconfig_python_file=parameters.get("runconfig_python_file",
                                                 "code/train/run_config.py"),
            runconfig_python_function_name=parameters.get(
                "runconfig_python_function_name", "main"))
    if run_config is None:
        # Loading values for errors
        pipeline_yaml_file = parameters.get("pipeline_yaml_file",
                                            "code/train/pipeline.yml")
        runconfig_yaml_file = parameters.get("runconfig_yaml_file",
                                             "code/train/run_config.yml")
        runconfig_python_file = parameters.get("runconfig_python_file",
                                               "code/train/run_config.py")
        runconfig_python_function_name = parameters.get(
            "runconfig_python_function_name", "main")

        print(
            f"::error::Error when loading runconfig yaml definition your repository (Path: /{runconfig_yaml_file})."
        )
        print(
            f"::error::Error when loading pipeline yaml definition your repository (Path: /{pipeline_yaml_file})."
        )
        print(
            f"::error::Error when loading python script or function in your repository which defines the experiment config (Script path: '/{runconfig_python_file}', Function: '{runconfig_python_function_name}()')."
        )
        print(
            "::error::You have to provide either a yaml definition for your run, a yaml definition of your pipeline or a python script, which returns a runconfig (Pipeline, ScriptRunConfig, AutoMlConfig, Estimator, etc.). Please read the documentation for more details."
        )
        raise AMLExperimentConfigurationException(
            "You have to provide a yaml definition for your run, a yaml definition of your pipeline or a python script, which returns a runconfig. Please read the documentation for more details."
        )

    # Submit run config
    print("::debug::Submitting experiment config")
    try:
        # Defining default tags
        print("::debug::Defining default tags")
        default_tags = {
            "GITHUB_ACTOR": os.environ.get("GITHUB_ACTOR"),
            "GITHUB_REPOSITORY": os.environ.get("GITHUB_REPOSITORY"),
            "GITHUB_SHA": os.environ.get("GITHUB_SHA"),
            "GITHUB_REF": os.environ.get("GITHUB_REF")
        }

        run = experiment.submit(config=run_config,
                                tags=dict(parameters.get("tags", {}),
                                          **default_tags))
    except AzureMLException as exception:
        print(
            f"::error::Could not submit experiment config. Your script passed object of type {type(run_config)}. Object must be correctly configured and of type e.g. estimator, pipeline, etc.: {exception}"
        )
        raise AMLExperimentConfigurationException(
            f"Could not submit experiment config. Your script passed object of type {type(run_config)}. Object must be correctly configured and of type e.g. estimator, pipeline, etc.: {exception}"
        )
    except TypeError as exception:
        print(
            f"::error::Could not submit experiment config. Your script passed object of type {type(run_config)}. Object must be correctly configured and of type e.g. estimator, pipeline, etc.: {exception}"
        )
        raise AMLExperimentConfigurationException(
            f"Could not submit experiment config. Your script passed object of type {type(run_config)}. Object must be correctly configured and of type e.g. estimator, pipeline, etc.: {exception}"
        )

    # Create outputs
    print("::debug::Creating outputs")
    print(f"::set-output name=experiment_name::{run.experiment.name}")
    print(f"::set-output name=run_id::{run.id}")
    print(f"::set-output name=run_url::{run.get_portal_url()}")

    # Waiting for run to complete
    print("::debug::Waiting for run to complete")
    if parameters.get("wait_for_completion", True):
        run.wait_for_completion(show_output=True)

        # Creating additional outputs of finished run
        run_metrics = run.get_metrics(recursive=True)
        print(f"::set-output name=run_metrics::{run_metrics}")
        run_metrics_markdown = convert_to_markdown(run_metrics)
        print(
            f"::set-output name=run_metrics_markdown::{run_metrics_markdown}")

        # Download artifacts if enabled
        if parameters.get("download_artifacts", False):
            # Defining artifacts folder
            print("::debug::Defining artifacts folder")
            root_path = os.environ.get("GITHUB_WORKSPACE", default=None)
            folder_name = f"aml_artifacts_{run.id}"
            artifact_path = os.path.join(root_path, folder_name)

            # Downloading artifacts
            print("::debug::Downloading artifacts")
            run.download_files(
                output_directory=os.path.join(artifact_path, "parent"))
            children = run.get_children(recursive=True)
            for i, child in enumerate(children):
                child.download_files(
                    output_directory=os.path.join(artifact_path, f"child_{i}"))

            # Creating additional outputs
            print(f"::set-output name=artifact_path::{artifact_path}")

    # Publishing pipeline
    print("::debug::Publishing pipeline")
    if type(run) is PipelineRun and parameters.get("publish_pipeline", False):
        # Default pipeline name
        repository_name = os.environ.get("GITHUB_REPOSITORY").split("/")[-1]
        branch_name = os.environ.get("GITHUB_REF").split("/")[-1]
        default_pipeline_name = f"{repository_name}-{branch_name}"

        published_pipeline = run.publish_pipeline(
            name=parameters.get("pipeline_name", default_pipeline_name),
            description="Pipeline registered by GitHub Run Action",
            version=parameters.get("pipeline_version", None),
            continue_on_step_failure=parameters.get(
                "pipeline_continue_on_step_failure", False))

        # Creating additional outputs
        print(
            f"::set-output name=published_pipeline_id::{published_pipeline.id}"
        )
        print(
            f"::set-output name=published_pipeline_status::{published_pipeline.status}"
        )
        print(
            f"::set-output name=published_pipeline_endpoint::{published_pipeline.endpoint}"
        )
    elif parameters.get("publish_pipeline", False):
        print(
            "::error::Could not register pipeline because you did not pass a pipeline to the action"
        )

    print("::debug::Successfully finished Azure Machine Learning Train Action")
Beispiel #9
0
def main():
    # # Loading input values
    # print("::debug::Loading input values")
    azure_credentials = os.environ.get("INPUT_AZURE_CREDENTIALS", default='{}')
    resource_group = os.environ.get("INPUT_RESOURCE_GROUP", default="")
    pattoken = os.environ.get("INPUT_PATTOKEN", default="")
    provider_type = os.environ.get("INPUT_PROVIDER_TYPE", default="")
    events_to_subscribe = os.environ.get("INPUT_EVENTS_TO_SUBSCRIBE",
                                         default="")

    try:
        azure_credentials = json.loads(azure_credentials)
    except JSONDecodeError:
        print(
            "::error::Please paste output of `az ad sp create-for-rbac --name <your-sp-name> --role contributor --scopes /subscriptions/<your-subscriptionId>/resourceGroups/<your-rg> --sdk-auth` as value of secret variable: AZURE_CREDENTIALS"
        )
        raise AMLConfigurationException(
            f"Incorrect or poorly formed output from azure credentials saved in AZURE_CREDENTIALS secret. See setup in https://github.com/Azure/aml-workspace/blob/master/README.md"
        )

    if not resource_group:
        raise AMLConfigurationException(f"A resource group must be provided")

    # Checking provided parameters
    print("::debug::Checking provided parameters")
    required_parameters_provided(
        parameters=azure_credentials,
        keys=["tenantId", "clientId", "clientSecret"],
        message=
        "Required parameter(s) not found in your azure credentials saved in AZURE_CREDENTIALS secret for logging in to the workspace. Please provide a value for the following key(s): "
    )

    # # Loading parameters file
    # print("::debug::Loading parameters file")

    template_file_file_path = os.path.join("/code", "func_deploy.json")

    # Mask values
    print("::debug::Masking parameters")
    mask_parameter(parameter=azure_credentials.get("tenantId", ""))
    mask_parameter(parameter=azure_credentials.get("clientId", ""))
    mask_parameter(parameter=azure_credentials.get("clientSecret", ""))
    mask_parameter(parameter=azure_credentials.get("subscriptionId", ""))

    # Login User on CLI
    tenant_id = azure_credentials.get("tenantId", "")
    service_principal_id = azure_credentials.get("clientId", "")
    service_principal_password = azure_credentials.get("clientSecret", "")
    subscriptionId = azure_credentials.get("subscriptionId", "")

    credentials = None
    try:
        credentials = ServicePrincipalCredentials(
            client_id=service_principal_id,
            secret=service_principal_password,
            tenant=tenant_id)
    except Exception as ex:
        raise CredentialsVerificationError(ex)

    ####################### Authentication Done ###################################

    # repository name
    repository_name = os.environ.get("GITHUB_REPOSITORY",
                                     "azureeventgridsample")
    functionAppName = repository_name.replace(
        "/", "")  # create a unique function-AppName
    functionAppName = functionAppName.replace("_", "").replace("-", "")[:32]
    functionFolder = 'fappdeploy'
    functionGitHubURL = "https://github.com/Ayaz43/function_app.git"
    functionGitHubBranch = "master"
    functionName = "generic_triggers"
    patToken = pattoken
    parameters = {
        'functionAppName': functionAppName,
        'functionFolder': functionFolder,
        'functionGitHubURL': functionGitHubURL,
        'functionGitHubBranch': functionGitHubBranch,
        'patToken': patToken,
        'ownerName': functionAppName
    }

    parameters = {k: {'value': v} for k, v in parameters.items()}

    client = None
    try:
        client = ResourceManagementClient(credentials, subscriptionId)
    except Exception as ex:
        raise ResourceManagementError(ex)

    template = None
    with open(template_file_file_path, 'r') as template_file_fd:
        template = json.load(template_file_fd)

    deployment_properties = {
        'properties': {
            'mode': DeploymentMode.incremental,
            'template': template,
            'parameters': parameters
        }
    }

    try:
        validate = client.deployments.validate(resource_group, "azure-sample",
                                               deployment_properties)
        validate.wait()

    except Exception as ex:
        raise ActionDeploymentError(ex)
    try:
        deployment_async_operation = client.deployments.create_or_update(
            resource_group, 'azure-sample', deployment_properties)
        deployment_async_operation.wait()
    except Exception as ex:
        raise ActionDeploymentError(ex)

    deploymemnt_result = deployment_async_operation.result()

    # parameters
    code = deploymemnt_result.properties.outputs['hostKey']['value']
    functionAppName = deploymemnt_result.properties.outputs['functionAppName'][
        'value']

    function_url = "https://{}.azurewebsites.net/api/{}?code={}&repoName={}".format(
        functionAppName, functionName, code, repository_name)
    resource_id = "/subscriptions/{}/resourceGroups/{}/providers/{}".format(
        subscriptionId, resource_group, provider_type)

    event_grid_client = EventGridManagementClient(credentials, subscriptionId)
    event_subscription_name = 'EventSubscription1'

    destination = WebHookEventSubscriptionDestination(
        endpoint_url=function_url)

    included_events = get_events_list(events_to_subscribe)
    filter = EventSubscriptionFilter(
        # By default, "All" event types are included
        included_event_types=included_events,
        is_subject_case_sensitive=False,
        subject_begins_with='',
        subject_ends_with='')

    event_subscription_info = EventSubscription(destination=destination,
                                                filter=filter)

    event_subscription_async_poller = event_grid_client.event_subscriptions.create_or_update(
        resource_id,
        event_subscription_name,
        event_subscription_info,
    )

    event_subscription = event_subscription_async_poller.result(
    )  # type: EventSubscription
    print(
        f"::set-output name=destination_url::{event_subscription.destination.endpoint_base_url}"
    )
Beispiel #10
0
def main():
    # Loading input values
    print("::debug::Loading input values")
    parameters_file = os.environ.get("INPUT_PARAMETERS_FILE", default="run.json")
    azure_credentials = os.environ.get("INPUT_AZURE_CREDENTIALS", default="{}")
    try:
        azure_credentials = json.loads(azure_credentials)
    except JSONDecodeError:
        print("::error::Please paste output of `az ad sp create-for-rbac --name <your-sp-name> --role contributor --scopes /subscriptions/<your-subscriptionId>/resourceGroups/<your-rg> --sdk-auth` as value of secret variable: AZURE_CREDENTIALS")
        raise AMLConfigurationException(f"Incorrect or poorly formed output from azure credentials saved in AZURE_CREDENTIALS secret. See setup in https://github.com/Azure/aml-workspace/blob/master/README.md")

    # Checking provided parameters
    print("::debug::Checking provided parameters")
    required_parameters_provided(
        parameters=azure_credentials,
        keys=["tenantId", "clientId", "clientSecret"],
        message="Required parameter(s) not found in your azure credentials saved in AZURE_CREDENTIALS secret for logging in to the workspace. Please provide a value for the following key(s): "
    )

    # Mask values
    print("::debug::Masking parameters")
    mask_parameter(parameter=azure_credentials.get("tenantId", ""))
    mask_parameter(parameter=azure_credentials.get("clientId", ""))
    mask_parameter(parameter=azure_credentials.get("clientSecret", ""))
    mask_parameter(parameter=azure_credentials.get("subscriptionId", ""))

    # Loading parameters file
    print("::debug::Loading parameters file")
    parameters_file_path = os.path.join(".cloud", ".azure", parameters_file)
    try:
        with open(parameters_file_path) as f:
            parameters = json.load(f)
    except FileNotFoundError:
        print(f"::debug::Could not find parameter file in {parameters_file_path}. Please provide a parameter file in your repository if you do not want to use default settings (e.g. .cloud/.azure/run.json).")
        parameters = [{}]  # we want to run atleast once with default values.

    # Loading Workspace
    print("::debug::Loading AML Workspace")
    sp_auth = ServicePrincipalAuthentication(
        tenant_id=azure_credentials.get("tenantId", ""),
        service_principal_id=azure_credentials.get("clientId", ""),
        service_principal_password=azure_credentials.get("clientSecret", "")
    )
    config_file_path = os.environ.get("GITHUB_WORKSPACE", default=".cloud/.azure")
    config_file_name = "aml_arm_config.json"
    try:
        ws = Workspace.from_config(
            path=config_file_path,
            _file_name=config_file_name,
            auth=sp_auth
        )
    except AuthenticationException as exception:
        print(f"::error::Could not retrieve user token. Please paste output of `az ad sp create-for-rbac --name <your-sp-name> --role contributor --scopes /subscriptions/<your-subscriptionId>/resourceGroups/<your-rg> --sdk-auth` as value of secret variable: AZURE_CREDENTIALS: {exception}")
        raise AuthenticationException
    except AuthenticationError as exception:
        print(f"::error::Microsoft REST Authentication Error: {exception}")
        raise AuthenticationError
    except AdalError as exception:
        print(f"::error::Active Directory Authentication Library Error: {exception}")
        raise AdalError
    except ProjectSystemException as exception:
        print(f"::error::Workspace authorizationfailed: {exception}")
        raise ProjectSystemException

    submittedRuns_for_wait = []
    for parameter in parameters:
        run, wait_for_completion = submitRun(ws, parameter)

        # add a list of tuple to be used later, we will use it to wait.
        if wait_for_completion is True:
            submittedRuns_for_wait.append(run)

    postRun(submittedRuns_for_wait)
    print("submission over")
Beispiel #11
0
def main():
    # Loading azure credentials
    print("::debug::Loading azure credentials")
    azure_credentials = os.environ.get("INPUT_AZURE_CREDENTIALS", default="{}")
    try:
        azure_credentials = json.loads(azure_credentials)
    except JSONDecodeError:
        print("::error::Please paste output of `az ad sp create-for-rbac --name <your-sp-name> --role contributor --scopes /subscriptions/<your-subscriptionId>/resourceGroups/<your-rg> --sdk-auth` as value of secret variable: AZURE_CREDENTIALS")
        raise AMLConfigurationException(
            "Incorrect or poorly formed output from azure credentials saved in AZURE_CREDENTIALS secret. See setup in https://github.com/Azure/aml-workspace/blob/master/README.md")

    # Checking provided parameters
    print("::debug::Checking provided parameters")
    validate_json(
        data=azure_credentials,
        schema=azure_credentials_schema,
        input_name="AZURE_CREDENTIALS"
    )

    # Mask values
    print("::debug::Masking parameters")
    mask_parameter(parameter=azure_credentials.get("tenantId", ""))
    mask_parameter(parameter=azure_credentials.get("clientId", ""))
    mask_parameter(parameter=azure_credentials.get("clientSecret", ""))
    mask_parameter(parameter=azure_credentials.get("subscriptionId", ""))

    # Loading parameters file
    print("::debug::Loading parameters file")
    parameters_file = os.environ.get(
        "INPUT_PARAMETERS_FILE", default="run.json")
    parameters_file_path = os.path.join(".cloud", ".azure", parameters_file)
    try:
        with open(parameters_file_path) as f:
            parameters = json.load(f)
    except FileNotFoundError:
        print(
            f"::debug::Could not find parameter file in {parameters_file_path}. Please provide a parameter file in your repository if you do not want to use default settings (e.g. .cloud/.azure/run.json).")
        parameters = {}

    # Checking provided parameters
    print("::debug::Checking provided parameters")
    validate_json(
        data=parameters,
        schema=parameters_schema,
        input_name="PARAMETERS_FILE"
    )

    # Define target cloud
    if azure_credentials.get("resourceManagerEndpointUrl", "").startswith("https://management.usgovcloudapi.net"):
        cloud = "AzureUSGovernment"
    elif azure_credentials.get("resourceManagerEndpointUrl", "").startswith("https://management.chinacloudapi.cn"):
        cloud = "AzureChinaCloud"
    else:
        cloud = "AzureCloud"

    # Loading Workspace
    print("::debug::Loading AML Workspace")
    sp_auth = ServicePrincipalAuthentication(
        tenant_id=azure_credentials.get("tenantId", ""),
        service_principal_id=azure_credentials.get("clientId", ""),
        service_principal_password=azure_credentials.get("clientSecret", ""),
        cloud=cloud
    )
    config_file_path = os.environ.get(
        "GITHUB_WORKSPACE", default=".cloud/.azure")
    config_file_name = "aml_arm_config.json"
    try:
        ws = Workspace.from_config(
            path=config_file_path,
            _file_name=config_file_name,
            auth=sp_auth
        )
    except AuthenticationException as exception:
        print(
            f"::error::Could not retrieve user token. Please paste output of `az ad sp create-for-rbac --name <your-sp-name> --role contributor --scopes /subscriptions/<your-subscriptionId>/resourceGroups/<your-rg> --sdk-auth` as value of secret variable: AZURE_CREDENTIALS: {exception}")
        raise AuthenticationException
    except AuthenticationError as exception:
        print(f"::error::Microsoft REST Authentication Error: {exception}")
        raise AuthenticationError
    except AdalError as exception:
        print(
            f"::error::Active Directory Authentication Library Error: {exception}")
        raise AdalError
    except ProjectSystemException as exception:
        print(f"::error::Workspace authorizationfailed: {exception}")
        raise ProjectSystemException

    # Create experiment
    print("::debug::Creating experiment")
    try:
        # Default experiment name
        repository_name = os.environ.get("GITHUB_REPOSITORY").split("/")[-1]
        branch_name = os.environ.get("GITHUB_REF").split("/")[-1]
        default_experiment_name = f"{repository_name}-{branch_name}"

        experiment = Experiment(
            workspace=ws,
            name=parameters.get("experiment_name",
                                default_experiment_name)[:36]
        )
    except TypeError as exception:
        experiment_name = parameters.get("experiment", None)
        print(
            f"::error::Could not create an experiment with the specified name {experiment_name}: {exception}")
        raise AMLExperimentConfigurationException(
            f"Could not create an experiment with the specified name {experiment_name}: {exception}")
    except UserErrorException as exception:
        experiment_name = parameters.get("experiment", None)
        print(
            f"::error::Could not create an experiment with the specified name {experiment_name}: {exception}")
        raise AMLExperimentConfigurationException(
            f"Could not create an experiment with the specified name {experiment_name}: {exception}")

    # Reading the dataset
    print("::debug::Reading the dataset")
    ds_workspace = Workspace(
        'aeefecca-6f22-4523-93ba-bd49686ea0ce', 'mshack-azureml', 'ms-hack')
    train = Dataset.get_by_name(ds_workspace, name='HistogramTrain')
    test = Dataset.get_by_name(ds_workspace, name='HistogramTest')
    label_column_name = 'class'

    # Setting AutoML config
    print("::debug::Setting AutoML config")

    automl_settings = {
        "primary_metric": 'accuracy',
        "verbosity": logging.INFO,
        "enable_stack_ensemble": True
    }
    compute_target = ComputeTarget(ws, 'githubcluster')
    automl_config = AutoMLConfig(task='classification',
                                 debug_log='automl_errors.log',
                                 compute_target=compute_target,
                                 training_data=train,
                                 validation_data=test,
                                 experiment_timeout_hours=.25,
                                 label_column_name=label_column_name,
                                 **automl_settings
                                 )

    # Submit run config
    print("::debug::Submitting experiment config")
    try:
        # Defining default tags
        print("::debug::Defining default tags")
        default_tags = {
            "GITHUB_ACTOR": os.environ.get("GITHUB_ACTOR"),
            "GITHUB_REPOSITORY": os.environ.get("GITHUB_REPOSITORY"),
            "GITHUB_SHA": os.environ.get("GITHUB_SHA"),
            "GITHUB_REF": os.environ.get("GITHUB_REF")
        }

        run = experiment.submit(
            automl_config,
            tags=dict(parameters.get("tags", {}), **default_tags)
        )
        best_run, fitted_model = run.get_output(metric='accuracy')
    except AzureMLException as exception:
        print(
            f"::error::Could not submit experiment config. Your script passed object of type {type(automl_config)}. Object must be correctly configured and of type e.g. estimator, pipeline, etc.: {exception}")
        raise AMLExperimentConfigurationException(
            f"Could not submit experiment config. Your script passed object of type {type(automl_config)}. Object must be correctly configured and of type e.g. estimator, pipeline, etc.: {exception}")
    except TypeError as exception:
        print(
            f"::error::Could not submit experiment config. Your script passed object of type {type(automl_config)}. Object must be correctly configured and of type e.g. estimator, pipeline, etc.: {exception}")
        raise AMLExperimentConfigurationException(
            f"Could not submit experiment config. Your script passed object of type {type(automl_config)}. Object must be correctly configured and of type e.g. estimator, pipeline, etc.: {exception}")

    # Create outputs
    print("::debug::Creating outputs")
    print(f"::set-output name=experiment_name::{best_run.experiment.name}")
    print(f"::set-output name=run_id::{best_run.id}")
    print(f"::set-output name=run_url::{best_run.get_portal_url()}")

    # Waiting for run to complete
    print("::debug::Waiting for run to complete")
    if parameters.get("wait_for_completion", True):
        run.wait_for_completion(show_output=True)

        # Creating additional outputs of finished run
        run_metrics = run.get_metrics() if type(
            run) is HyperDriveRun else run.get_metrics(recursive=True)
        # run_metrics = run.get_metrics(recursive=True) # Not working atm because HyperDriveRun thrown error
        print(f"::set-output name=run_metrics::{run_metrics}")
        run_metrics_markdown = convert_to_markdown(run_metrics)
        print(
            f"::set-output name=run_metrics_markdown::{run_metrics_markdown}")

        # Download artifacts if enabled
        if parameters.get("download_artifacts", False):
            # Defining artifacts folder
            print("::debug::Defining artifacts folder")
            root_path = os.environ.get("GITHUB_WORKSPACE", default=None)
            folder_name = f"aml_artifacts_{run.id}"
            artifact_path = os.path.join(root_path, folder_name)

            # Downloading artifacts
            print("::debug::Downloading artifacts")
            run.download_files(
                output_directory=os.path.join(artifact_path, "parent"))
            children = run.get_children(recursive=True)
            for i, child in enumerate(children):
                child.download_files(output_directory=os.path.join(
                    artifact_path, f"child_{i}"))

            # Creating additional outputs
            print(f"::set-output name=artifact_path::{artifact_path}")

    # Publishing pipeline
    print("::debug::Publishing pipeline")
    if type(run) is PipelineRun and parameters.get("pipeline_publish", False):
        # Default pipeline name
        repository_name = os.environ.get("GITHUB_REPOSITORY").split("/")[-1]
        branch_name = os.environ.get("GITHUB_REF").split("/")[-1]
        default_pipeline_name = f"{repository_name}-{branch_name}"

        published_pipeline = run.publish_pipeline(
            name=parameters.get("pipeline_name", default_pipeline_name),
            description="Pipeline registered by GitHub Run Action",
            version=parameters.get("pipeline_version", None),
            continue_on_step_failure=parameters.get(
                "pipeline_continue_on_step_failure", False)
        )

        # Creating additional outputs
        print(
            f"::set-output name=published_pipeline_id::{published_pipeline.id}")
        print(
            f"::set-output name=published_pipeline_status::{published_pipeline.status}")
        print(
            f"::set-output name=published_pipeline_endpoint::{published_pipeline.endpoint}")
    elif parameters.get("pipeline_publish", False):
        print("::error::Could not register pipeline because you did not pass a pipeline to the action")

    print("::debug::Successfully finished Azure Machine Learning Train Action")