def create_aml_environment(aml_interface):
    aml_env = Environment(name=AML_ENVIRONMENT_NAME)
    conda_dep = CondaDependencies()
    conda_dep.add_pip_package("numpy==1.18.2")
    conda_dep.add_pip_package("pandas==1.0.3")
    conda_dep.add_pip_package("scikit-learn==0.22.2.post1")
    conda_dep.add_pip_package("joblib==0.14.1")
    conda_dep.add_pip_package("azure-storage-blob==12.3.0")

    aml_env.environment_variables[AZURE_STORAGE_ACCOUNT_NAME] = os.getenv(
        AZURE_STORAGE_ACCOUNT_NAME)
    aml_env.environment_variables[AZURE_STORAGE_ACCOUNT_KEY] = os.getenv(
        AZURE_STORAGE_ACCOUNT_KEY)
    aml_env.environment_variables[MODEL_NAME_VARIABLE] = MODEL_NAME

    logger.info(
        f"set environment variables on compute environment: {aml_env.environment_variables}"
    )

    whl_filepath = retrieve_whl_filepath()
    whl_url = Environment.add_private_pip_wheel(
        workspace=aml_interface.workspace,
        file_path=whl_filepath,
        exist_ok=True)
    conda_dep.add_pip_package(whl_url)
    aml_env.python.conda_dependencies = conda_dep
    aml_env.docker.enabled = True
    return aml_env
示例#2
0
    def get_run_cfg(ws, pip_packages, conda_packages, ext_wheels, gpu=True):
        '''
        get_run_cfg - Retrieves the AMLS run configuration.


        :returns: AMLS run configuration
        :rtype: RunConfiguration object
        '''
        conda_dep = CondaDependencies()
        for pip_package in pip_packages:
            conda_dep.add_pip_package(pip_package)
        for conda_package in conda_packages:
            conda_dep.add_conda_package(conda_package)
        for whl_path in ext_wheels:
            whl_url = Environment.add_private_pip_wheel(workspace=ws,
                                                        file_path=whl_path,
                                                        exist_ok=True)
            conda_dep.add_pip_package(whl_url)
        run_cfg = RunConfiguration(conda_dependencies=conda_dep)
        run_cfg.environment.docker.enabled = True
        run_cfg.environment.docker.gpu_support = gpu
        if gpu:
            run_cfg.environment.docker.base_image = DEFAULT_GPU_IMAGE
        else:
            run_cfg.environment.docker.base_image = DEFAULT_CPU_IMAGE
        run_cfg.environment.spark.precache_packages = False
        return run_cfg
示例#3
0
def save_conda_dependencies(amls_config, filename):
    conda_dependencies = CondaDependencies()
    for dependency in amls_config['conda_dependencies']:
        conda_dependencies.add_pip_package(dependency)

    with open(filename, "w") as f:
        f.write(conda_dependencies.serialize_to_string())
def main():
    # get workspace
    ws = load_workspace()
    model = Model.register(ws,
                           model_name='pytorch_mnist',
                           model_path='model.pth')

    # create dep file
    myenv = CondaDependencies()
    myenv.add_pip_package('numpy')
    myenv.add_pip_package('torch')
    with open('pytorchmnist.yml', 'w') as f:
        print('Writing out {}'.format('pytorchmnist.yml'))
        f.write(myenv.serialize_to_string())
        print('Done!')

    # create image
    image_config = ContainerImage.image_configuration(
        execution_script="score.py",
        runtime="python",
        conda_file="pytorchmnist.yml",
        dependencies=['./models.py'])

    image = Image.create(ws, 'pytorchmnist', [model], image_config)
    image.wait_for_creation(show_output=True)

    # create service
    aciconfig = AciWebservice.deploy_configuration(
        cpu_cores=1, memory_gb=1, description='simple MNIST digit detection')
    service = Webservice.deploy_from_image(workspace=ws,
                                           image=image,
                                           name='pytorchmnist-svc',
                                           deployment_config=aciconfig)
    service.wait_for_deployment(show_output=True)
示例#5
0
def main():
    try:
        ws = connectToWorkspace(TENANT_ID, APP_ID, SP_PASSWORD,
                                SUBSCRIPTION_ID, RESOURCE_GROUP,
                                WORKSPACE_NAME)
    except ProjectSystemException as err:
        print('Authentication did not work.')
        return json.dumps('ProjectSystemException')
    except Exception as err:
        print(err)
        sys.exit()
    print("connect")
    model = Model.register(model_path=os.path.join(
        os.getcwd(), "retailai_recommendation_model.zip"),
                           model_name="retailai_recommendation_model",
                           description="Retail.AI Item-Based Recommender",
                           workspace=ws)
    print("model registered")

    myenv = Environment.get(ws, name='AzureML-PySpark-MmlSpark-0.15')
    myenv.name = "myenv"
    conda_dep = CondaDependencies()
    conda_dep.add_pip_package("azureml-defaults")
    conda_dep.add_pip_package("azure-storage-file-datalake")
    myenv.python.conda_dependencies = conda_dep
    print("Environment Configured")
    inference_config = InferenceConfig(entry_script='score.py',
                                       environment=myenv)

    aks_target_name = AKS_CLUSTER_NAME

    try:
        aks_target = AksCompute(ws, aks_target_name)
        print(aks_target)
    except ComputeTargetException as err:
        aks_target = attachAksComputeToWorkspace(ws, RESOURCE_GROUP,
                                                 AKS_CLUSTER_NAME,
                                                 aks_target_name, True)
        print(aks_target)
    except Exception as err:
        print(err)
        sys.exit()
    try:
        deployToAks(ws, aks_target, "retail-ai-item-recommender", model,
                    inference_config, True)
    except Exception as err:
        print(err)
        sys.exit()
示例#6
0
    def deploy(self):
        myenv = CondaDependencies()
        myenv.add_pip_package("azureml-sdk")
        myenv.add_pip_package("joblib")
        myenv.add_pip_package("tensorflow")
        myenv.add_pip_package("Pillow")
        myenv.add_pip_package("azureml-dataprep[pandas,fuse]>=1.1.14")

        with open("diagnoz_env.yml", "w") as f:
            f.write(myenv.serialize_to_string())

        huml_env = Environment.from_conda_specification(
            name="diagnoz_env", file_path="diagnoz_env.yml")

        inference_config = InferenceConfig(entry_script="score.py",
                                           source_directory='.',
                                           environment=huml_env)
        print("file deployement : ")
        for root, dir_, files in os.walk(os.getcwd()):
            print("dir_", dir_)
            for filename in files:
                print("filename :", filename)

        aciconfig = AciWebservice.deploy_configuration(
            cpu_cores=1,
            memory_gb=1,
            tags={
                "data": "cancer-data",
                "method": "tensorflow"
            },
            description='Predicting cancer with tensorflow')

        try:
            AciWebservice(self.ws, self.config.DEPLOY_SERVICE_NAME).delete()
            print("webservice deleted")
        except WebserviceException:
            pass

        model = self.ws.models[self.config.MODEL_NAME]

        service = Model.deploy(workspace=self.ws,
                               name=self.config.DEPLOY_SERVICE_NAME,
                               models=[model],
                               inference_config=inference_config,
                               deployment_config=aciconfig)

        service.wait_for_deployment(show_output=True)
        print("success deployement")
    def conda_dependencies(self):
        """
        Get module conda dependencies

        :return: CondaDependencies instance
        """
        cd = CondaDependencies()
        for c in self._get_value('CondaDependencies/CondaChannels'):
            cd.add_channel(c)
        for c in self._get_value('CondaDependencies/CondaPackages'):
            cd.add_conda_package(c)
        for p in self._get_value('CondaDependencies/PipPackages'):
            cd.add_pip_package(p)
        for p in self._get_value('CondaDependencies/PipOptions'):
            cd.set_pip_option(p)
        return cd
示例#8
0
def get_inference_config(environment_name, conda_file, entry_script):
    # Create the environment
    env = Environment(name=environment_name)

    conda_dep = CondaDependencies(conda_file)

    # Define the packages needed by the model and scripts
    conda_dep.add_pip_package("azureml-defaults")
    conda_dep.add_pip_package("xgboost")

    # Adds dependencies to PythonSection of myenv
    env.python.conda_dependencies = conda_dep

    inference_config = InferenceConfig(entry_script=entry_script,
                                       environment=env)

    return inference_config
示例#9
0
def create_aml_environment(aml_interface):
    aml_env = Environment(name=AML_ENV_NAME)
    conda_dep = CondaDependencies()
    conda_dep.add_pip_package("numpy==1.18.2")
    conda_dep.add_pip_package("pandas==1.0.3")
    conda_dep.add_pip_package("scikit-learn==0.22.2.post1")
    conda_dep.add_pip_package("joblib==0.14.1")
    whl_filepath = retrieve_whl_filepath()
    whl_url = Environment.add_private_pip_wheel(
        workspace=aml_interface.workspace,
        file_path=whl_filepath,
        exist_ok=True)
    conda_dep.add_pip_package(whl_url)
    aml_env.python.conda_dependencies = conda_dep
    aml_env.docker.enabled = True
    return aml_env
示例#10
0
def get_config(entry_script):
    # Create the environment
    env = Environment(name="tensorflow_env")

    conda_dep = CondaDependencies()

    # Define the packages needed by the model and scripts
    conda_dep.add_conda_package("tensorflow")

    # You must list azureml-defaults as a pip dependency
    conda_dep.add_pip_package("azureml-defaults")
    conda_dep.add_pip_package("keras")
    conda_dep.add_pip_package("pandas")

    # Adds dependencies to PythonSection of myenv
    env.python.conda_dependencies = conda_dep

    inference_config = InferenceConfig(entry_script=entry_script,
                                       environment=env)

    print('Configuração do Endpoint retornada')
    return inference_config
示例#11
0
    # Unpack the generator and look through the list to find your desired model
    model, = (m for m in model_list
              if m.version == model_version and m.name == model_name)
    print(
        'Model picked: {} \nModel Description: {} \nModel Version: {}'.format(
            model.name, model.description, model.version))

    dependencies = CondaDependencies()
    dependencies.add_conda_package("numpy")
    dependencies.add_conda_package("matplotlib")
    dependencies.add_conda_package("scikit-learn")
    dependencies.add_conda_package("tensorflow")
    dependencies.add_conda_package("keras")
    dependencies.add_conda_package("scikit-image")
    dependencies.add_pip_package("pynacl==1.2.1")

    os.makedirs("./score/", exist_ok=True)
    with open("./score/dependencies.yml", "w") as f:
        f.write(dependencies.serialize_to_string())

    original_dir = os.getcwd()
    # Change directory since the docker container is expecting thing at the TLD
    os.chdir("./score")
    image_config = ContainerImage.image_configuration(
        execution_script="score.py",
        runtime="python",
        conda_file="dependencies.yml",
        description="Image with Uploaded Model")

    # Image Name can only include alphanumeric or '.' and '-'
示例#12
0
    def deploy(self):

        try:
            AciWebservice(self.ws, self.DEPLOY_SERVICE_NAME).delete()
            print("webservice deleted")
        except WebserviceException:
            pass

        conda_dep = CondaDependencies()                                        
        conda_dep.add_pip_package("joblib")
        conda_dep.add_pip_package("torch")
        conda_dep.add_pip_package("torchvision")
        conda_dep.add_pip_package("azureml-sdk")
        conda_dep.add_pip_package("azure-storage-blob")
        conda_dep.add_pip_package("PyYAML")
        conda_dep.add_pip_package("scikit-learn")
        conda_dep.add_pip_package("matplotlib")
        conda_dep.add_pip_package("opencensus-ext-azure")
        
        
        shoes_designer_env_file = "shoes_designer_env.yml"
        with open(shoes_designer_env_file,"w") as f:
            f.write(conda_dep.serialize_to_string())

        shoes_designer_env = Environment.from_conda_specification(name="shoes_designer_env", file_path=shoes_designer_env_file)

        inference_config = InferenceConfig(entry_script="score.py", environment=shoes_designer_env)

        aciconfig = AciWebservice.deploy_configuration(cpu_cores=1, 
                                                    memory_gb=2, 
                                                    tags={"method" : "torch"}, 
                                                    description='Generate shoes with torch')

        model = self.ws.models[self.MODEL_NAME]

        service = Model.deploy(workspace=self.ws, 
                            name=self.DEPLOY_SERVICE_NAME, 
                            models=[model], 
                            inference_config=inference_config, 
                            deployment_config=aciconfig,
                            overwrite=True)
        service.wait_for_deployment(show_output=True)

        print("success deployement")        

        return service
示例#13
0
from azureml.core.model import InferenceConfig
from azureml.core.environment import Environment
from azureml.core.conda_dependencies import CondaDependencies

# Create the environment
myenv = Environment(name="myenv")
conda_dep = CondaDependencies()

# Define the packages needed by the model and scripts
conda_dep.add_conda_package("tensorflow")
conda_dep.add_conda_package("numpy")
conda_dep.add_conda_package("scikit-learn")
# You must list azureml-defaults as a pip dependency
conda_dep.add_pip_package("azureml-defaults")
conda_dep.add_pip_package("keras")
conda_dep.add_pip_package("gensim")

# Adds dependencies to PythonSection of myenv
myenv.python.conda_dependencies = conda_dep

inference_config = InferenceConfig(entry_script="score.py", environment=myenv)
示例#14
0
    parser.add_argument('--workspace_name', help='the workspace name of aml')
    parser.add_argument('--compute_target',
                        help='the compute cluster name of aml')
    parser.add_argument('--docker_image', help='the docker image of job')
    parser.add_argument('--experiment_name', help='the experiment name')
    parser.add_argument('--script_dir', help='script directory')
    parser.add_argument('--script_name', help='script name')
    args = parser.parse_args()

    ws = Workspace(args.subscription_id, args.resource_group,
                   args.workspace_name)
    compute_target = ComputeTarget(workspace=ws, name=args.compute_target)
    experiment = Experiment(ws, args.experiment_name)
    run_config = RunConfiguration()
    dependencies = CondaDependencies()
    dependencies.add_pip_package("azureml-sdk")
    dependencies.add_pip_package("azureml")
    run_config.environment.python.conda_dependencies = dependencies
    run_config.environment.docker.enabled = True
    run_config.environment.docker.base_image = args.docker_image
    run_config.target = compute_target
    run_config.node_count = 1
    config = ScriptRunConfig(source_directory=args.script_dir,
                             script=args.script_name,
                             run_config=run_config)
    run = experiment.submit(config)
    print(run.get_details()["runId"])
    while True:
        line = sys.stdin.readline().rstrip()
        if line == 'update_status':
            print('status:' + run.get_status())
from azureml.core.conda_dependencies import CondaDependencies

myenv = CondaDependencies()
myenv.add_pip_package("numpy")
myenv.add_pip_package("azureml-core")
myenv.add_pip_package("keras")

with open("myenv.yml", "w") as f:
    f.write(myenv.serialize_to_string())
示例#16
0
# Inference Configuration
from azureml.core.model import InferenceConfig
from azureml.core.environment import Environment
from azureml.core.conda_dependencies import CondaDependencies

# Create the environment
myenv = Environment(name="myenv")
conda_dep = CondaDependencies()

# Define the packages needed by the model and scripts
conda_dep.add_conda_package("python=3.6.2")
conda_dep.add_conda_package("numpy=1.18.5")
conda_dep.add_conda_package("scikit-learn")
conda_dep.add_conda_package("pip")
# You must list azureml-defaults as a pip dependency
conda_dep.add_pip_package("azureml-defaults")
conda_dep.add_pip_package("tensorflow==2.3.0")
conda_dep.add_pip_package("keras==2.4.3")

# Adds dependencies to PythonSection of myenv
myenv.python.conda_dependencies = conda_dep

inference_config = InferenceConfig(entry_script="./source_dir/score.py",
                                   environment=myenv)

# Deployment config
deployment_config = AciWebservice.deploy_configuration(cpu_cores=1,
                                                       memory_gb=1)
# deployment_config = LocalWebservice.deploy_configuration(port=7000)

# deploy model
示例#17
0
#get the latest model
model = ws.models['diabetes']
print(model.name, 'version', model.version)

#Create service folder
folder_name = 'diabetes_service'
# Create a folder for the web service files
experiment_folder = './' + folder_name
os.makedirs(folder_name, exist_ok=True)
print(folder_name, 'folder created.')

#Create a container config yml file

# Add the dependencies for our model (AzureML defaults is already included)
myenv = CondaDependencies()
myenv.add_pip_package("scikit-learn")
# myenv.add_pip_package("azureml-sdk[automl]") # Required for AutoML models

# Save the environment config as a .yml file
env_file = folder_name + "/diabetes_env.yml"
with open(env_file, "w") as f:
    f.write(myenv.serialize_to_string())
print("Saved dependency info in", env_file)

# Print the .yml file
with open(env_file, "r") as f:
    print(f.read())

#Deploy web service

# Configure the scoring environment
示例#18
0
print(f'Workspace: {ws.name}')

experiment_name = 'train-bert-ner-on-amlcompute'
experiment = Experiment(workspace=ws, name=experiment_name)

supported_vms = AmlCompute.supported_vmsizes(workspace=ws)
# print(supported_vms)

project_folder = './ner'

bert_env = Environment("bert_aml_env")

conda_dep = CondaDependencies()
conda_dep.set_python_version('3.7.3')
conda_dep.add_pip_package("torch")
conda_dep.add_pip_package("adal")
conda_dep.add_pip_package("cloudpickle")
conda_dep.add_pip_package("docker")
conda_dep.add_pip_package("numpy")
conda_dep.add_pip_package("scipy")
conda_dep.add_pip_package("tokenizers")
conda_dep.add_pip_package("transformers")
conda_dep.add_pip_package("matplotlib")
conda_dep.add_pip_package("apex==0.9.10dev")
conda_dep.add_pip_package("pandas")
conda_dep.add_pip_package("pillow")
conda_dep.add_pip_package("requests")
conda_dep.add_pip_package("scikit-learn")
conda_dep.add_pip_package("tqdm")
bert_env.docker.enabled = True
示例#19
0
from azureml.core.model import InferenceConfig
from azureml.core.environment import Environment
if not 'temp' in os.listdir():
    os.mkdir('temp')
df_test.to_json("temp/test_sample.json")  # save data for external tests

# Create the environment
myenv = Environment(name="mortgage_score_env_hd")
conda_dep = CondaDependencies()

# Define the packages needed by the model and scripts
conda_dep.add_conda_package("numpy")
conda_dep.add_conda_package("pip")
conda_dep.add_conda_package("scikit-learn=0.20.3")
# You must list azureml-defaults as a pip dependency
conda_dep.add_pip_package("azureml-defaults==1.11.0")
conda_dep.add_pip_package("azureml-core")
conda_dep.add_pip_package("azureml-automl-runtime")
conda_dep.add_pip_package("packaging")
conda_dep.add_pip_package("azureml-explain-model==1.11.0")
conda_dep.add_pip_package("inference-schema")
conda_dep.add_conda_package("numpy")
# scikit-learn>=0.19.0,<=0.20.3
conda_dep.add_conda_package("pandas")
conda_dep.add_conda_package("py-xgboost")
# Save environment also locally to disk so we can test the score script directly by creating a local environment
conda_dep.save('temp/mortgage_score_env.yml')
myenv.python.conda_dependencies = conda_dep
# -

webservice_config = AciWebservice.deploy_configuration(cpu_cores=1,
# Create workspace from config file
ws = Workspace.from_config()

# Create experiment to submit training
experiment_name = 'road-segmentation-train'
experiment = Experiment(ws, EXPERIMENT_NAME)

# Create the environment
tf_env = Environment(ENV_NAME)
tf_env.docker.enabled = True
tf_env.docker.base_image = BASE_IMAGE

# Define additional packages to be installed
conda_dep = CondaDependencies()
conda_dep.add_pip_package('tensorflow-gpu==2.3.0')
conda_dep.add_pip_package('pillow')

# Add packages to the environment
tf_env.python.conda_dependencies = conda_dep

# Create the configuration of an experiment
aml_run_config = RunConfiguration()
aml_run_config.environment = tf_env
# The name of the custome environment must not start by 'AzureML'
# https://github.com/MicrosoftDocs/azure-docs/issues/65770#issuecomment-724536550
aml_run_config.environment.name = 'road-segmentation-GPU'

# Create the compute target
compute_target = createAmlCompute(ws, CLUSTER_NAME, VM_SIZE)
示例#21
0
print('create Batch AI run config')

rc = RunConfiguration(project, "dask_run_config")
rc.environment.docker.enabled = True
rc.prepare_environment = True
rc.target = batchai_cluster_name
rc.environment.python.user_managed_dependencies = False
rc.batchai.node_count = 2

# create a new CondaDependencies obj
cd = CondaDependencies()
# add scikit-learn as a conda dependency
cd.add_conda_package('dask')
cd.add_conda_package('joblib')
cd.add_pip_package('azureml-contrib-daskonbatch')

# overwrite the default conda_dependencies.yml file
cd.save_to_file(project_dir=project_folder, file_name='conda_dependencies.yml')

print()
print('##################################################')
print('submitting {} for a batch ai run...'.format(train_script))
print('##################################################')
print()

print("prepare run...")
prep = Run.prepare_compute_target(project_object=project, run_config=rc)

print(helpers.get_run_history_url(prep))
示例#22
0
from azureml.pipeline.core.graph import PipelineParameter

data_path = DataPath(datastore=mydatastore, path_on_datastore='rawdata')
datapath1_pipeline_param = PipelineParameter(name="input_datapath",
                                             default_value=data_path)
datapath_input = (datapath1_pipeline_param,
                  DataPathComputeBinding(mode='mount'))

string_pipeline_param = PipelineParameter(name="input_string",
                                          default_value='sample_string1')

compute_config = RunConfiguration()
compute_config.target = "cpu-cluster"

dependencies = CondaDependencies()
dependencies.add_pip_package("adal==0.4.7")
compute_config.environment.python.conda_dependencies = dependencies

StepToWriteDateFile = PythonScriptStep(
    name='StepToWriteDateFile',
    script_name="./DataIngest/StepToWriteDateFile.py",
    arguments=["--arg1", string_pipeline_param, "--arg2", datapath_input],
    inputs=[datapath_input],
    runconfig=compute_config,
    #compute_target='manishautomlstuff',
    source_directory='.')
print("StepToWriteDateFile created")

mydatastore = Datastore.get(workspace, 'billingdatablobstorage')
run = Run.get_context()
runId = run.id
示例#23
0
from azureml.core import Workspace
from azureml.core.model import Model
from azureml.core.webservice import AciWebservice
from azureml.core.webservice import Webservice
from azureml.core.image import ContainerImage
from azureml.core.conda_dependencies import CondaDependencies

ws = Workspace.from_config()

myenv = CondaDependencies()
myenv.add_pip_package("tensorflow==1.12.0")
myenv.add_pip_package("keras==2.2.4")
myenv.add_pip_package("numpy")

with open("dlenv.yml", "w") as f:
    f.write(myenv.serialize_to_string())

model = Model.register(model_path = "tf_mnist_model.h5",
                       model_name = "tf_mnist_model",
                       tags = {"key": "1"},
                       description = "MNIST Prediction",
                       workspace = ws)

aciconfig = AciWebservice.deploy_configuration(cpu_cores=1,
                                               memory_gb=1,
                                               tags={"data": "MNIST", "method": "tf"},
                                               description='Predict MNIST with tf')
# configure the image
image_config = ContainerImage.image_configuration(execution_script="score.py",
                                                  runtime="python",
                                                  conda_file="dlenv.yml")
示例#24
0
ws = Workspace.create(name='fashiondeeplearning',
                   subscription_id='02e39ba6-b26e-47cd-a81e-90c4c236aabb', 
                   resource_group='myresourcegroup',
                   create_resource_group=True,
                   location='westeurope' 
                  )


model = Model.register(model_path = "fashion.onnx",
                       model_name = "FashionDLModel",
                       description = "Fashion Keras Model",
                       workspace = ws)

myenv = CondaDependencies()
myenv.add_pip_package("numpy")
myenv.add_pip_package("azureml-core")
myenv.add_pip_package("onnxruntime")

with open("myenv.yml","w") as f:
    f.write(myenv.serialize_to_string())

image_config = ContainerImage.image_configuration(execution_script = "score.py",
                                                  runtime = "python",
                                                  conda_file = "myenv.yml",
                                                  description = "test"
                                                 )
image = ContainerImage.create(name = "myonnxmodelimage",
                              models = [model],
                              image_config = image_config,
                              workspace = ws)
示例#25
0
def build_pipeline_steps(automlconfig: AutoMLConfig,
                         data: Dataset,
                         target_column: str,
                         compute_target: ComputeTarget,
                         group_column_names: list,
                         time_column_name: str,
                         deploy: bool,
                         service_name: str = 'grouping-demo') -> StepSequence:
    steps = []

    metrics_output_name = 'metrics_{}'
    best_model_output_name = 'best_model_{}'
    count = 0
    model_names = []

    # get all automl configs by group
    configs = _get_configs(automlconfig, data, target_column, compute_target, group_column_names)

    # build a runconfig for register model
    register_config = RunConfiguration()
    cd = CondaDependencies()
    cd.add_pip_package('azureml-pipeline')
    register_config.environment.python.conda_dependencies = cd

    # create each automl step end-to-end (train, register)
    for group_name, conf in configs.items():
        # create automl metrics output
        metrics_data = PipelineData(
            name='metrics_data_{}'.format(group_name),
            pipeline_output_name=metrics_output_name.format(group_name),
            training_output=TrainingOutput(type='Metrics'))
        # create automl model output
        model_data = PipelineData(
            name='model_data_{}'.format(group_name),
            pipeline_output_name=best_model_output_name.format(group_name),
            training_output=TrainingOutput(type='Model', metric=conf.user_settings['primary_metric']))

        automl_step = AutoMLStep(
            name='automl_{}'.format(group_name),
            automl_config=conf,
            outputs=[metrics_data, model_data],
            allow_reuse=True)
        steps.append(automl_step)

        # pass the group name as a parameter to the register step ->
        # this will become the name of the model for this group.
        group_name_param = PipelineParameter("group_name_{}".format(count), default_value=group_name)
        count += 1

        reg_model_step = PythonScriptStep(
            'register.py',
            name='register_{}'.format(group_name),
            arguments=["--model_name", group_name_param, "--model_path", model_data],
            inputs=[model_data],
            compute_target=compute_target,
            runconfig=register_config,
            source_directory="register",
            allow_reuse=True
        )
        steps.append(reg_model_step)
        model_names.append(group_name)

    final_steps = steps
    if deploy:
        # modify the conda dependencies to ensure we pick up correct
        # versions of azureml-defaults and azureml-train-automl
        cd = CondaDependencies.create(pip_packages=['azureml-defaults', 'azureml-train-automl'])
        automl_deps = CondaDependencies(conda_dependencies_file_path='deploy/myenv.yml')
        cd._merge_dependencies(automl_deps)
        cd.save('deploy/myenv.yml')

        # add deployment step
        pp_group_column_names = PipelineParameter(
            "group_column_names",
            default_value="#####".join(list(reversed(group_column_names))))

        pp_model_names = PipelineParameter(
            "model_names",
            default_value=json.dumps(model_names))

        pp_service_name = PipelineParameter(
            "service_name",
            default_value=service_name)

        deployment_step = PythonScriptStep(
            'deploy.py',
            name='service_deploy',
            arguments=["--group_column_names", pp_group_column_names,
                       "--model_names", pp_model_names,
                       "--service_name", pp_service_name,
                       "--time_column_name", time_column_name],
            compute_target=compute_target,
            runconfig=RunConfiguration(),
            source_directory="deploy"
        )
        final_steps = StepSequence(steps=[steps, deployment_step])

    return final_steps
def main():

    ws = Workspace.from_config()

    conda = CondaDependencies()
    conda.add_conda_package("python==3.5")
    conda.add_pip_package("h5py==2.8.0")
    conda.add_pip_package("html5lib==1.0.1")
    conda.add_pip_package("keras==2.2.0")
    conda.add_pip_package("Keras-Applications==1.0.2")
    conda.add_pip_package("Keras-Preprocessing==1.0.1")
    conda.add_pip_package("matplotlib==2.2.2")
    conda.add_pip_package("numpy==1.14.5")
    conda.add_pip_package("opencv-python==3.3.0.9")
    conda.add_pip_package("pandas==0.23.3")
    conda.add_pip_package("Pillow==5.2.0")
    conda.add_pip_package("requests==2.19.1")
    conda.add_pip_package("scikit-image==0.14.0")
    conda.add_pip_package("scikit-learn==0.19.2")
    conda.add_pip_package("scipy==1.1.0")
    conda.add_pip_package("sklearn==0.0")
    conda.add_pip_package("tensorflow==1.9.0")
    conda.add_pip_package("urllib3==1.23")
    conda.add_pip_package("azureml-sdk")

    with open("environment.yml", "w") as f:
        f.write(conda.serialize_to_string())

    with open("environment.yml", "r") as f:
        print(f.read())

    image_config = ContainerImage.image_configuration(
        execution_script="score.py",
        runtime="python",
        conda_file="environment.yml",
        docker_file="Dockerfile",
        dependencies=DEPENDENCIES)

    webservices = ws.webservices(compute_type='ACI')

    image = ContainerImage.create(name="ai-bootcamp",
                                  models=[],
                                  image_config=image_config,
                                  workspace=ws)

    image.wait_for_creation(show_output=True)

    webservices_list = []
    for key in webservices:
        webservices_list.append(key)

    service_name = webservices_list[0]

    aciwebservice = AciWebservice(ws, service_name)

    aciwebservice.update(image=image)
# # Describe your environment
# Each modelling process may require a unique set of packages. Therefore we need to create a dependency file providing instructions to AML on how to contstruct a docker image that can support the models and any other objects required for inferencing. In the following cell, we create a environment dependency file, myenv.yml that specifies which libraries are needed by the scoring script. You can create this file manually, or use the CondaDependencies class to create it for you.
#
# Next we use this environment file to describe the docker container that we need to create in order to deploy our model. This container is created using our environment description and includes our scoring script.
#

# In[28]:

from azureml.core.conda_dependencies import CondaDependencies
from azureml.core.environment import Environment

env = Environment(name="env")

myenv = CondaDependencies()
myenv.add_pip_package("numpy")
myenv.add_pip_package("azureml-core")
myenv.add_pip_package("sklearn")

# Adds dependencies to PythonSection of myenv
env.python.conda_dependencies = myenv

print(myenv.serialize_to_string())

with open("myenv.yml", "w") as f:
    f.write(myenv.serialize_to_string())

# # Create an image configuration

# # Deploy your webservice
# The final step to deploying your webservice is to call WebService.deploy_from_model(). This function uses the deployment and image configurations created above to perform the following:
示例#28
0
#!/usr/bin/env python
# coding: utf-8

# In[1]:

from azureml.core.conda_dependencies import CondaDependencies

myenv = CondaDependencies()
myenv.add_conda_package("pytorch")
myenv.add_conda_package("torchvision")
myenv.add_channel("pytorch")
myenv.add_pip_package("transformers")
myenv.add_pip_package("tokenizers")

env_file = "env_pytorch.yml"

with open(env_file, "w") as f:
    f.write(myenv.serialize_to_string())
print("Saved dependency info in", env_file)

with open(env_file, "r") as f:
    print(f.read())

# In[ ]:

# In[2]:

from azureml.core.environment import Environment

# In[3]:
示例#29
0
environment_variables = {
    'POSTGRES_PASSWORD':
    os.environ['POSTGRES_PASSWORD'],
    'POSTGRES_HOSTNAME':
    'ackbar-postgres.postgres.database.azure.com',
    'AZURE_STORAGE_CONNECTION_STRING':
    os.environ['AZURE_STORAGE_CONNECTION_STRING']
}
env = Environment(name='env', environment_variables=environment_variables)
conda = CondaDependencies()
conda.add_conda_package('psycopg2')
conda.add_conda_package('numpy')
conda.add_conda_package('Pillow')
# have to use pip to install azure packages...
conda.add_pip_package('azure-storage-blob')
env.python.conda_dependencies = conda
run_config = RunConfiguration()
run_config.environment = env

PROJECT = 'caltech'

prepare_step = PythonScriptStep(
    script_name='prepare.py',
    arguments=['--output', batch_input, '--project', PROJECT],
    inputs=[],
    outputs=[batch_input],
    compute_target=compute_target,
    source_directory='pipeline',
    runconfig=run_config,
    params=environment_variables,
from azureml.core.conda_dependencies import CondaDependencies

myenv = CondaDependencies()

myenv.add_pip_package("numpy")
myenv.add_pip_package("sklearn")
# myenv.add_conda_package("nltk")


with open("aml_config/myenv.yml", "w") as f:
    f.write(myenv.serialize_to_string())