예제 #1
0
def deploy_webservice_from_image(amls_config, workspace, image):
    """
    Deploy an AMLS docker image in AMLS' ACI

    :param amls_config:
    :param workspace:
    :param image:
    :return:
    """
    aciconfig = AciWebservice.deploy_configuration(
        cpu_cores=1,
        memory_gb=1,
        tags=amls_config['tags'],
        description=amls_config['description'])

    try:
        Webservice(workspace=workspace, name=amls_config['name']) \
            .delete()
        logger.info(f"Deleted existing webservice {amls_config['name']}")
    except WebserviceException:
        # No need to delete
        pass

    logger.info(f"Creating webservice {amls_config['name']}")
    service = Webservice.deploy_from_image(deployment_config=aciconfig,
                                           image=image,
                                           name=amls_config['name'],
                                           workspace=workspace)
    service.wait_for_deployment(show_output=True)
    return service
예제 #2
0
def update_service(aml_interface):
    inference_config = get_inference_config(aml_interface)
    service = Webservice(name=DEPLOYMENT_SERVICE_NAME,
                         workspace=aml_interface.workspace)
    model = aml_interface.workspace.models.get(MODEL_NAME)
    service.update(models=[model], inference_config=inference_config)
    print(service.state)
    print(service.scoring_uri)
예제 #3
0
def deploy_service(ws, model, inference_config, service_name, compute_target):
    tags = {'model': '{}:{}'.format(model.name, model.version)}

    try:
        service = Webservice(ws, service_name)
        print("Service {} exists, update it".format(service_name))
        service.update(models=[model],
                       inference_config=inference_config,
                       tags=tags)
    except Exception:
        print('deploy a new service {}'.format(service_name))
        deployment_config = AksWebservice.deploy_configuration(
            cpu_cores=1,
            memory_gb=2,
            tags=tags,
            collect_model_data=True,
            enable_app_insights=True)
        service = Model.deploy(ws, service_name, [model], inference_config,
                               deployment_config, compute_target)

    service.wait_for_deployment(show_output=True)

    if service.auth_enabled:
        token = service.get_keys()[0]
    elif service.token_auth_enabled:
        token = service.get_token()[0]

    return service.scoring_uri, token
예제 #4
0
def createWebservice(workspace, container_image, service_name, replica_count,
                     cores_count, compute_target):
    '''
        TODO: Should allow for the overwrite flag. 

        Attach a azureml.core.webservice.Webservice for a given container on an AKS cluster. 

        If a WebService already exists (by name) on the given workspace, return it instead. 


        PARAMS: 
            workspace        : azureml.core.Workspace               : Existing AMLS Workspace
            container_image  : azureml.core.image.ContainerImage    : Name of an existing AKS cluster 
            service_name     : String                               : Name of the webservice (deployment) in the AMLS workpsace.
            replica_count    : int                                  : Number of requested instances of container on cluster.
            cores_count      : int                                  : Number of cores to allocate to each container
            compute_target   : azureml.core.compute.AksCompute      : AKS cluster to create the service on

        RETURNS: 
            azureml.core.webservice.Webservice

    '''
    web_service = None

    services = Webservice.list(workspace=workspace,
                               image_name=container_image.name)
    if len(services) > 0:
        for svc in services:
            if svc.name == service_name:
                print("Returning existing deployed web service ....",
                      service_name)
                web_service = svc
                break

    if web_service == None:
        print("Creating new web service.....", service_name)
        aks_config = AksWebservice.deploy_configuration(
            num_replicas=replica_count, cpu_cores=cores_count)

        web_service = Webservice.deploy_from_image(
            workspace=workspace,
            name=service_name,
            image=container_image,
            deployment_config=aks_config,
            deployment_target=compute_target,
        )

        web_service.wait_for_deployment(show_output=True)

    return web_service
def main():
    # get workspace
    ws = load_workspace()
    model = Model.register(ws,
                           model_name='pytorch_mnist',
                           model_path='model.pth')

    # create dep file
    myenv = CondaDependencies()
    myenv.add_pip_package('numpy')
    myenv.add_pip_package('torch')
    with open('pytorchmnist.yml', 'w') as f:
        print('Writing out {}'.format('pytorchmnist.yml'))
        f.write(myenv.serialize_to_string())
        print('Done!')

    # create image
    image_config = ContainerImage.image_configuration(
        execution_script="score.py",
        runtime="python",
        conda_file="pytorchmnist.yml",
        dependencies=['./models.py'])

    image = Image.create(ws, 'pytorchmnist', [model], image_config)
    image.wait_for_creation(show_output=True)

    # create service
    aciconfig = AciWebservice.deploy_configuration(
        cpu_cores=1, memory_gb=1, description='simple MNIST digit detection')
    service = Webservice.deploy_from_image(workspace=ws,
                                           image=image,
                                           name='pytorchmnist-svc',
                                           deployment_config=aciconfig)
    service.wait_for_deployment(show_output=True)
    def __deploy_service(self, image, compute):
        service_config = self.__config['deploy']
        services = AksWebservice.list(self.__ws)
        service = services.find_by_property('name', service_config['name'])
        if service:
            service.update(auth_enabled=service_config['auth'])
            service.wait_for_deployment(show_output=True)
            return service
        aks_config = AksWebservice.deploy_configuration(
            auth_enabled=True,
            max_request_wait_time=75000,
            replica_max_concurrent_requests=100,
            autoscale_enabled=False,
            num_replicas=15)
        aks_service_name = service_config['name']
        aks_service = Webservice.deploy_from_image(
            workspace=self.__ws,
            name=aks_service_name,
            image=image,
            deployment_config=aks_config,
            deployment_target=compute)

        aks_service.wait_for_deployment(show_output=True)
        print(aks_service.state)
        return aks_service
예제 #7
0
def teardown_service(subscription_id, resource_group, workspace_name, workspace_region):

    yield

    # connect to workspace
    ws = azureml_utils.get_or_create_workspace(
        subscription_id=subscription_id,
        resource_group=resource_group,
        workspace_name=workspace_name,
        workspace_region=workspace_region,
    )

    # connect to aci_service
    aci_service = Webservice(workspace=ws, name="aci-test-service")

    # delete aci_service
    aci_service.delete()
예제 #8
0
def deleteWebService(ws, args):
    services = Webservice.list(workspace=ws, model_name=args.modelName)
    if (len(services) == 0):
        print("Webservice is not deployed.")
    else:
        print("Webservice is deployed")
        services[0].delete()
        print("Deleted webservice")
예제 #9
0
파일: utils.py 프로젝트: grecoe/AMLSSDK
def getWebservice(workspace, webserviceName):
    webservice = None
    services = Webservice.list(workspace)
    for svc in services:
        if svc.name == webserviceName:
            webservice = svc
            break
    return webservice
예제 #10
0
def get_result(players):
    import os
    import azureml
    from azureml.core import Workspace
    from azureml.core.webservice import Webservice
    from azureml.core.authentication import ServicePrincipalAuthentication

    print("getting results")
    filename = os.path.join(app.static_folder, 'champion.json')
    with open(filename) as json_file:
        champions = json.load(json_file)['data']
    X = [create_feature_row(players, champions)]

    # Check core SDK version number
    print("SDK version:", azureml.core.VERSION)
    workspace = "league-ws-deploy"
    subscription_id = "79451499-b2c0-4513-8dea-ef7f37173fbb"
    resource_grp = "league"

    svc_pr = ServicePrincipalAuthentication(
        tenant_id="1f0113ce-bee6-43b0-9e26-61617eced2e4",
        service_principal_id="4c9cfeac-dda9-4298-af3c-d51003c7438b",
        service_principal_password="******")

    ws = Workspace(workspace_name=workspace,
                   subscription_id=subscription_id,
                   resource_group=resource_grp,
                   auth=svc_pr)

    ws.get_details()

    print('Workspace name: ' + ws.name,
          'Azure region: ' + ws.location,
          'Subscription id: ' + ws.subscription_id,
          'Resource group: ' + ws.resource_group,
          sep='\n')

    print("Send to server to predict")
    sample = json.dumps({"data": X})
    sample = bytes(sample, encoding='utf8')
    service = Webservice(workspace=ws, name='lrmpredictfinal6')
    # predict using the deployed model
    result = service.run(input_data=sample)
    return result[0][1]
예제 #11
0
def deploy(workspace,
           name,
           model,
           script,
           source_directory,
           environment=None,
           target='local',
           cpu_cores=1,
           memory_gb=1,
           compute_target_name=None):
    inference_config = InferenceConfig(entry_script=script,
                                       source_directory=source_directory,
                                       environment=environment)

    if target == 'local':
        deployment_config = LocalWebservice.deploy_configuration(port=8890)
    elif target == 'aci':
        deployment_config = AciWebservice.deploy_configuration(
            cpu_cores=cpu_cores, memory_gb=memory_gb)
    elif target == 'aks':
        if compute_target_name is None:
            print("compute_target_name required when target='aks'")
            return None
        deployment_config = AksWebservice.deploy_configuration(
            cpu_cores=cpu_cores,
            memory_gb=memory_gb,
            compute_target_name=compute_target_name,
            auth_enabled=False)

    try:
        service = Webservice(workspace, name)
    except WebserviceException:
        service = None

    if service is None:
        service = Model.deploy(workspace, name, [model], inference_config,
                               deployment_config)
    else:
        print(
            "Existing service with that name found, updating InferenceConfig\n"
            "If you meant to redeploy or change the deployment option, first "
            "delete the existing service.")
        service.update(models=[model], inference_config=inference_config)
    return service
예제 #12
0
    def deploy(self, model_id, locally):
        if locally:
            self.ctx.log('Local deployment step is not required for Azure..')
            return {'model_id': model_id}

        ws = AzureProject(self.ctx)._get_ws()
        experiment_name = self.ctx.config.get('experiment/name', None)
        if experiment_name is None:
            raise AzureException('Please specify Experiment name...')

        iteration, run_id = self._get_iteration(model_id)

        experiment = Experiment(ws, experiment_name)
        experiment_run = AutoMLRun(experiment=experiment, run_id=run_id)
        model_run = AutoMLRun(experiment=experiment, run_id=model_id)
        model_name = model_run.properties['model_name']
        self.ctx.log('Regestiring model: %s' % model_name)

        description = '%s-%s' % (model_name, iteration)
        model = experiment_run.register_model(model_name=model_name,
                                              iteration=iteration,
                                              description=description,
                                              tags=None)

        script_file_name = '.azureml/score_script.py'
        model_run.download_file('outputs/scoring_file_v_1_0_0.py',
                                script_file_name)

        # Deploying ACI Service
        aci_service_name = self._aci_service_name(model_name)
        self.ctx.log('Deploying AciWebservice %s ...' % aci_service_name)

        inference_config = InferenceConfig(
            environment=model_run.get_environment(),
            entry_script=script_file_name)

        aciconfig = AciWebservice.deploy_configuration(
            cpu_cores=1,
            memory_gb=2,
            tags={'type': "inference-%s" % aci_service_name},
            description="inference-%s" % aci_service_name)

        # Remove any existing service under the same name.
        try:
            Webservice(ws, aci_service_name).delete()
            self.ctx.log('Remove any existing service under the same name...')
        except WebserviceException:
            pass

        aci_service = Model.deploy(ws, aci_service_name, [model],
                                   inference_config, aciconfig)
        aci_service.wait_for_deployment(True)
        self.ctx.log('%s state %s' %
                     (aci_service_name, str(aci_service.state)))

        return {'model_id': model_id, 'aci_service_name': aci_service_name}
def deploy_new_web_service(workspace, service_name, aciconfig, image_config,
                           model):
    service = Webservice.deploy_from_model(workspace=workspace,
                                           name=service_name,
                                           deployment_config=aciconfig,
                                           models=[model],
                                           image_config=image_config)

    service.wait_for_deployment(show_output=True)
    print('The URI to access the web service is: ', service.scoring_uri)
예제 #14
0
def deployModelAsWebService(
        ws,
        model_folder_path="models",
        model_name="component_compliance",
        scoring_script_filename="scoring_service.py",
        conda_packages=['numpy', 'pandas'],
        pip_packages=['azureml-sdk', 'onnxruntime'],
        conda_file="dependencies.yml",
        runtime="python",
        cpu_cores=1,
        memory_gb=1,
        tags={'name': 'scoring'},
        description='Compliance classification web service.',
        service_name="complianceservice"):
    # notice for the model_path, we supply the name of the outputs folder without a trailing slash
    # this will ensure both the model and the customestimators get uploaded.
    print("Registering and uploading model...")
    registered_model = Model.register(model_path=model_folder_path,
                                      model_name=model_name,
                                      workspace=ws)

    # create a Conda dependencies environment file
    print("Creating conda dependencies file locally...")
    from azureml.core.conda_dependencies import CondaDependencies
    mycondaenv = CondaDependencies.create(conda_packages=conda_packages,
                                          pip_packages=pip_packages)
    with open(conda_file, "w") as f:
        f.write(mycondaenv.serialize_to_string())

    # create container image configuration
    print("Creating container image configuration...")
    from azureml.core.image import ContainerImage
    image_config = ContainerImage.image_configuration(
        execution_script=scoring_script_filename,
        runtime=runtime,
        conda_file=conda_file)

    # create ACI configuration
    print("Creating ACI configuration...")
    from azureml.core.webservice import AciWebservice, Webservice
    aci_config = AciWebservice.deploy_configuration(cpu_cores=cpu_cores,
                                                    memory_gb=memory_gb,
                                                    tags=tags,
                                                    description=description)

    # deploy the webservice to ACI
    print("Deploying webservice to ACI...")
    webservice = Webservice.deploy_from_model(workspace=ws,
                                              name=service_name,
                                              deployment_config=aci_config,
                                              models=[registered_model],
                                              image_config=image_config)
    webservice.wait_for_deployment(show_output=True)

    return webservice
예제 #15
0
def run(model_path, model_name):
    auth_args = {
        'tenant_id': os.environ['TENANT_ID'],
        'service_principal_id': os.environ['SERVICE_PRINCIPAL_ID'],
        'service_principal_password': os.environ['SERVICE_PRINCIPAL_PASSWORD']
    }

    ws_args = {
        'auth': ServicePrincipalAuthentication(**auth_args),
        'subscription_id': os.environ['SUBSCRIPTION_ID'],
        'resource_group': os.environ['RESOURCE_GROUP']
    }

    ws = Workspace.get(os.environ['WORKSPACE_NAME'], **ws_args)

    print(ws.get_details())

    print('\nSaving model {} to {}'.format(model_path, model_name))
    model = Model.register(ws, model_name=model_name, model_path=model_path)
    print('Done!')

    print('Checking for existing service {}'.format(model_name))
    service_name = 'simplemnist-svc'
    if model_name in ws.webservices:
        print('Found it!\nRemoving Existing service...')
        ws.webservices[model_name].delete()
        print('Done!')
    else:
        print('Not found, creating new one!')

    # image configuration
    image_config = ContainerImage.image_configuration(
        execution_script="score.py",
        runtime="python",
        conda_file="environment.yml")

    # deployement configuration
    aciconfig = AciWebservice.deploy_configuration(cpu_cores=1,
                                                   memory_gb=1,
                                                   description=model_name)

    # deploy
    service = Webservice.deploy_from_model(workspace=ws,
                                           name=model_name,
                                           models=[model],
                                           image_config=image_config,
                                           deployment_config=aciconfig)

    service.wait_for_deployment(show_output=True)

    #print logs
    print(service.get_logs())

    print('Done!')
예제 #16
0
def deploy_new_webservice(ws, compute_name, webservice_name, image):
    aks_target = ws.compute_targets[compute_name]
    aks_config = AksWebservice.deploy_configuration(collect_model_data=True,
                                                    enable_app_insights=True)
    service = Webservice.deploy_from_image(workspace=ws,
                                           name=webservice_name,
                                           image=image,
                                           deployment_config=aks_config,
                                           deployment_target=aks_target)
    service.wait_for_deployment(show_output=True)
    print(service.state)
예제 #17
0
파일: model.py 프로젝트: chrinide/a2ml
    def _deploy_remotly(self, model_id, model_run, ws, experiment):
        from azureml.core.model import Model
        from azureml.core.model import InferenceConfig
        from azureml.core.webservice import Webservice
        from azureml.core.webservice import AciWebservice
        from azureml.exceptions import WebserviceException
        from azureml.train.automl.run import AutoMLRun

        # ws, experiment = self._get_experiment()
        iteration, run_id = self._get_iteration(model_id)

        experiment_run = AutoMLRun(experiment = experiment, run_id = run_id)
        model_name = model_run.properties['model_name']
        self.ctx.log('Registering model: %s' % model_id)

        description = '%s-%s' % (model_name, iteration)
        model = experiment_run.register_model(
            model_name = model_name, iteration=iteration,
            description = description, tags = None)

        script_file_name = '.azureml/score_script.py'
        model_run.download_file(
            'outputs/scoring_file_v_1_0_0.py', script_file_name)

        self._edit_score_script(script_file_name)

        # Deploying ACI Service
        aci_service_name = self._aci_service_name(model_name)
        self.ctx.log('Deploying AciWebservice %s ...' % aci_service_name)

        inference_config = InferenceConfig(
            environment = model_run.get_environment(),
            entry_script = script_file_name)

        aciconfig = AciWebservice.deploy_configuration(
            cpu_cores = 1,
            memory_gb = 2,
            tags = {'type': "inference-%s" % aci_service_name},
            description = "inference-%s" % aci_service_name)

        # Remove any existing service under the same name.
        try:
            Webservice(ws, aci_service_name).delete()
            self.ctx.log('Remove any existing service under the same name...')
        except WebserviceException:
            pass

        aci_service = Model.deploy(
            ws, aci_service_name, [model], inference_config, aciconfig)
        aci_service.wait_for_deployment(True)
        self.ctx.log('%s state %s' % (aci_service_name, str(aci_service.state)))

        return {'model_id': model_id, 'aci_service_name': aci_service_name}
예제 #18
0
def deploy_image():
    ws = get_workspace()
    azure_image = get_image()
    aci_config = AciWebservice.deploy_configuration(cpu_cores=1,
                                                    memory_gb=1,
                                                    tags={'method': 'sklearn'},
                                                    description='Worst model',
                                                    location=LOCATION)
    webservice = Webservice.deploy_from_image(image=azure_image,
                                              workspace=ws,
                                              name=MODEL_NAME,
                                              deployment_config=aci_config)
    webservice.wait_for_deployment(show_output=True)
예제 #19
0
def deploy_to_aci(model_image, workspace, dev_webservice_name):
    from azureml.core.webservice import AciWebservice, Webservice
    # Deploy a model image to ACI
    print("Deploying to ACI...")
    # make sure this dev_webservice_name is unique and doesnt already exist, else need to replace
    dev_webservice_deployment_config = AciWebservice.deploy_configuration()
    dev_webservice = Webservice.deploy_from_image(
        name=dev_webservice_name,
        image=model_image,
        deployment_config=dev_webservice_deployment_config,
        workspace=workspace)
    dev_webservice.wait_for_deployment()
    print("Deployment to ACI successfully complete")
    return dev_webservice
예제 #20
0
def deploy(aciconfig, envfile, name, model):
    # configure the image
    image_config = ContainerImage.image_configuration(
        execution_script="./score.py", runtime="python", conda_file=envfile)

    service = Webservice.deploy_from_model(workspace=ws,
                                           name=name,
                                           deployment_config=aciconfig,
                                           models=[model],
                                           image_config=image_config)

    service.wait_for_deployment(show_output=True)

    print(service.scoring_uri)
예제 #21
0
def main():
    ws = AzureMLUtils.get_workspace()
    print("Workspace lookup successful")
    # read command line parameters
    service_name = getRuntimeArgs()

    #look up service
    service = Webservice(ws, service_name)

    # look up scoring uri
    scoring_uri = service.scoring_uri

    #Get the first api key
    if service.compute_type == "AKS":
        api_key = service.get_keys()[0]
    elif service.compute_type == "ACI":
        api_key = "dummy"
    else:
        raise Exception("Unknown compute type")

    # This line is needed to for Azure Devops to set api key and scoring uri as environment variable.
    print("##vso[task.setvariable variable=TMP_SCORING_URI]", scoring_uri)
    print("##vso[task.setvariable variable=TMP_API_KEY]", api_key)
class AzureMLService():

    def __init__(self, ws:Workspace, service_name: str):
        self.__ws = ws
        self.__azure_service = Webservice(ws, service_name)

    def make_request(self, inference_dataset_name):

        inference_dataset = Dataset.get_by_name(self.__ws,inference_dataset_name)
        df = inference_dataset.to_pandas_dataframe()

        body = json.dumps({'data': json.loads(df.to_json(orient='values'))})
        result = self.__azure_service.run(body)
        print(result)
           
예제 #23
0
def deploy_container_instance(workspace, endpoint_name, inference_config,
                              model_azure):
    # Remove any existing service under the same name.
    try:
        Webservice(workspace, endpoint_name).delete()
    except WebserviceException:
        pass

    deployment_config = AciWebservice.deploy_configuration(cpu_cores=1,
                                                           memory_gb=1)
    service = Model.deploy(workspace, endpoint_name, [model_azure],
                           inference_config, deployment_config)
    service.wait_for_deployment(show_output=True)
    print('A API {} foi gerada no estado {}'.format(service.scoring_uri,
                                                    service.state))
    return service.scoring_uri
예제 #24
0
def deploy_to_aks(workspace, model_image, aks_target, prod_webservice_name="dsswe-mprodm"):
    from azureml.core.webservice import Webservice, AksWebservice
    # Deploy a model image to AKS
    print("Deploying to AKS...")
    # Set configuration and service name
    prod_webservice_deployment_config = AksWebservice.deploy_configuration()
    # Deploy from image
    prod_webservice = Webservice.deploy_from_image(workspace=workspace,
                                                   name=prod_webservice_name,
                                                   image=model_image,
                                                   deployment_config=prod_webservice_deployment_config,
                                                   deployment_target=aks_target)
    # Wait for the deployment to complete
    prod_webservice.wait_for_deployment(show_output=True)
    print("Deployment to AKS completed sucessfully")
    return prod_webservice
예제 #25
0
def deployWebservice(ws, args, folders):
    # this section requries that the processing is done in the directory where the execution script and the conda_file resides
    os.chdir(folders.script_folder)
    model = Model(ws, args.modelName)
    aciconfig = AciWebservice.deploy_configuration(cpu_cores=args.cpuCores,
                                                   memory_gb=args.memoryGB)
    # configure the image
    image_config = ContainerImage.image_configuration(
        execution_script=args.scoringScript,
        runtime="python",
        conda_file=args.environmentFileName)
    service = Webservice.deploy_from_model(workspace=ws,
                                           name=args.webserviceName,
                                           deployment_config=aciconfig,
                                           models=[model],
                                           image_config=image_config)
    service.wait_for_deployment(show_output=True)
    return service.scoring_uri
예제 #26
0
def deploy_image():
    ws = get_workspace()
    azure_image = get_image()

    # Set the web service configuration (using default here with app insights)
    aks_config = AksWebservice.deploy_configuration(enable_app_insights=True)

    # Unique service name
    service_name = AKS_NAME

    aks_target = get_cluster()

    # Webservice creation using single command
    aks_service = Webservice.deploy_from_image(workspace=ws,
                                               name=service_name,
                                               deployment_config=aks_config,
                                               image=azure_image,
                                               deployment_target=aks_target)

    aks_service.wait_for_deployment(show_output=True)
예제 #27
0
def deploy_service(execution_script,
                   conda_file,
                   aciconfig,
                   service_name,
                   model,
                   workspace,
                   runtime="python"):

    image_config = ContainerImage.image_configuration(
        execution_script=execution_script,
        runtime=runtime,
        conda_file=conda_file)

    service = Webservice.deploy_from_model(workspace=workspace,
                                           name=service_name,
                                           deployment_config=aciconfig,
                                           models=[model],
                                           image_config=image_config)
    service.wait_for_deployment(show_output=True)
    print(service.scoring_uri)
    return service
예제 #28
0
def run_deployment() -> None:
    """Train the model to Azure."""

    # Create or load an existing Azure ML workspace. You can also load an existing workspace using
    azure_workspace = Workspace.get(
        name=model_config.WORKSPACE_NAME,
        subscription_id=model_config.SUBSCRIPTION_ID,
        resource_group=model_config.RESOURCE_GROUP)
    # azure_workspace = Workspace.create(name=model_config.WORKSPACE_NAME,
    #                                subscription_id=model_config.SUBSCRIPTION_ID,
    #                                resource_group=model_config.RESOURCE_GROUP,
    #                                location=model_config.LOCATION,
    #                                create_resource_group=False,
    #                                exist_okay=True)

    # Build an Azure ML container image for deployment
    model_path = config.S3_MODEL_PATH
    azure_image, azure_model = mlflow.azureml.build_image(
        model_uri=model_path,
        workspace=azure_workspace,
        description="Loan classification model 1",
        synchronous=True)
    # If your image build failed, you can access build logs at the following URI:
    print("Access the following URI for build logs: {}".format(
        azure_image.image_build_log_uri))

    # Deploy the container image to ACI
    webservice_deployment_config = AciWebservice.deploy_configuration()
    webservice = Webservice.deploy_from_image(
        image=azure_image,
        workspace=azure_workspace,
        name=model_config.DEPLOYMENT_NAME)
    webservice.wait_for_deployment()

    # After the image deployment completes, requests can be posted via HTTP to the new ACI
    # webservice's scoring URI.
    print("Scoring URI is: %s", webservice.scoring_uri)
예제 #29
0
def azureml_build_deploy(runid, workspace, model_name, image_name,
                         deploy_name):
    # Build an Azure ML Container Image for an MLflow
    azure_image, azure_model = mlflow.azureml.build_image(
        model_uri='runs:/{}/{}'.format(runid, MODEL_SAVE_PATH),
        workspace=workspace,
        model_name=model_name,
        image_name=image_name,
        synchronous=True)

    # Deploy the image to Azure Container Instances (ACI) for real-time serving
    aci_config = AciWebservice.deploy_configuration()
    deployment_stub = ''.join(
        [random.choice(string.ascii_lowercase) for i in range(5)])
    print("Deploying as " + deploy_name + "-" + deployment_stub)
    webservice = Webservice.deploy_from_image(image=azure_image,
                                              workspace=workspace,
                                              name=deploy_name + "-" +
                                              deployment_stub,
                                              deployment_config=aci_config)

    webservice.wait_for_deployment()

    return webservice
예제 #30
0
from azureml.core import Workspace
from azureml.core.webservice import Webservice

# Requires the config to be downloaded first to the current working directory
ws = Workspace.from_config()

# Set with the deployment name
name = "automl-voting-ensemble"

# load existing web service
service = Webservice(name=name, workspace=ws)
service.update(enable_app_insights=True)
logs = service.get_logs()

for line in logs.split('\n'):
    print(line)