예제 #1
0
def get_environment(
    ws,
    environment_name,
    docker_image="todrabas/aml_rapids:latest",
    python_interpreter="/opt/conda/envs/rapids/bin/python",
    conda_packages=["matplotlib"],
):
    if environment_name not in ws.environments:
        env = Environment(name=environment_name)
        env.docker.enabled = True
        env.docker.base_image = docker_image

        env.python.interpreter_path = python_interpreter
        env.python.user_managed_dependencies = True

        conda_dep = CondaDependencies()

        for conda_package in conda_packages:
            conda_dep.add_conda_package(conda_package)

        env.python.conda_dependencies = conda_dep
        env.register(workspace=ws)
    else:
        env = ws.environments[environment_name]

    return env
예제 #2
0
    def get_run_cfg(ws, pip_packages, conda_packages, ext_wheels, gpu=True):
        '''
        get_run_cfg - Retrieves the AMLS run configuration.


        :returns: AMLS run configuration
        :rtype: RunConfiguration object
        '''
        conda_dep = CondaDependencies()
        for pip_package in pip_packages:
            conda_dep.add_pip_package(pip_package)
        for conda_package in conda_packages:
            conda_dep.add_conda_package(conda_package)
        for whl_path in ext_wheels:
            whl_url = Environment.add_private_pip_wheel(workspace=ws,
                                                        file_path=whl_path,
                                                        exist_ok=True)
            conda_dep.add_pip_package(whl_url)
        run_cfg = RunConfiguration(conda_dependencies=conda_dep)
        run_cfg.environment.docker.enabled = True
        run_cfg.environment.docker.gpu_support = gpu
        if gpu:
            run_cfg.environment.docker.base_image = DEFAULT_GPU_IMAGE
        else:
            run_cfg.environment.docker.base_image = DEFAULT_CPU_IMAGE
        run_cfg.environment.spark.precache_packages = False
        return run_cfg
예제 #3
0
def createOrGetEnvironment(ws, login_config, app_config):
    environment_name = login_config["aml_compute"]["environment_name"]
    python_interpreter = login_config["aml_compute"]["python_interpreter"]
    conda_packages = login_config["aml_compute"]["conda_packages"]

    ### CREATE OR RETRIEVE THE ENVIRONMENT
    if environment_name not in ws.environments:
        logger.info(f"Creating {environment_name} environment...")
        env = Environment(name=environment_name)
        env.docker.enabled = login_config["aml_compute"]["docker_enabled"]
        env.docker.base_image = None
        env.docker.base_dockerfile = f'FROM {app_config["base_dockerfile"]}'
        env.python.interpreter_path = python_interpreter
        env.python.user_managed_dependencies = True
        conda_dep = CondaDependencies()

        for conda_package in conda_packages:
            conda_dep.add_conda_package(conda_package)

        env.python.conda_dependencies = conda_dep
        env.register(workspace=ws)
        evn = env
    else:
        logger.info(f"    Environment {environment_name} found...")
        env = ws.environments[environment_name]

    return env
def main():

    ws = Workspace.from_config()

    conda = CondaDependencies()
    conda.add_conda_package("python==3.5")
    conda.add_pip_package("h5py==2.8.0")
    conda.add_pip_package("html5lib==1.0.1")
    conda.add_pip_package("keras==2.2.0")
    conda.add_pip_package("Keras-Applications==1.0.2")
    conda.add_pip_package("Keras-Preprocessing==1.0.1")
    conda.add_pip_package("matplotlib==2.2.2")
    conda.add_pip_package("numpy==1.14.5")
    conda.add_pip_package("opencv-python==3.3.0.9")
    conda.add_pip_package("pandas==0.23.3")
    conda.add_pip_package("Pillow==5.2.0")
    conda.add_pip_package("requests==2.19.1")
    conda.add_pip_package("scikit-image==0.14.0")
    conda.add_pip_package("scikit-learn==0.19.2")
    conda.add_pip_package("scipy==1.1.0")
    conda.add_pip_package("sklearn==0.0")
    conda.add_pip_package("tensorflow==1.9.0")
    conda.add_pip_package("urllib3==1.23")
    conda.add_pip_package("azureml-sdk")

    with open("environment.yml", "w") as f:
        f.write(conda.serialize_to_string())

    with open("environment.yml", "r") as f:
        print(f.read())

    image_config = ContainerImage.image_configuration(
        execution_script="score.py",
        runtime="python",
        conda_file="environment.yml",
        docker_file="Dockerfile",
        dependencies=DEPENDENCIES)

    webservices = ws.webservices(compute_type='ACI')

    image = ContainerImage.create(name="ai-bootcamp",
                                  models=[],
                                  image_config=image_config,
                                  workspace=ws)

    image.wait_for_creation(show_output=True)

    webservices_list = []
    for key in webservices:
        webservices_list.append(key)

    service_name = webservices_list[0]

    aciwebservice = AciWebservice(ws, service_name)

    aciwebservice.update(image=image)
예제 #5
0
def create_yaml_file():
    myenv = CondaDependencies()
    myenv.add_conda_package("scikit-learn")
    myenv.add_conda_package("pandas")

    with open("myenv.yml", "w") as f:
        f.write(myenv.serialize_to_string())

    with open("myenv.yml", "r") as f:
        print(f.read())
    def conda_dependencies(self):
        """
        Get module conda dependencies

        :return: CondaDependencies instance
        """
        cd = CondaDependencies()
        for c in self._get_value('CondaDependencies/CondaChannels'):
            cd.add_channel(c)
        for c in self._get_value('CondaDependencies/CondaPackages'):
            cd.add_conda_package(c)
        for p in self._get_value('CondaDependencies/PipPackages'):
            cd.add_pip_package(p)
        for p in self._get_value('CondaDependencies/PipOptions'):
            cd.set_pip_option(p)
        return cd
예제 #7
0
def get_config(entry_script):
    # Create the environment
    env = Environment(name="tensorflow_env")

    conda_dep = CondaDependencies()

    # Define the packages needed by the model and scripts
    conda_dep.add_conda_package("tensorflow")

    # You must list azureml-defaults as a pip dependency
    conda_dep.add_pip_package("azureml-defaults")
    conda_dep.add_pip_package("keras")
    conda_dep.add_pip_package("pandas")

    # Adds dependencies to PythonSection of myenv
    env.python.conda_dependencies = conda_dep

    inference_config = InferenceConfig(entry_script=entry_script,
                                       environment=env)

    print('Configuração do Endpoint retornada')
    return inference_config
예제 #8
0
def create_env():
    myenv = CondaDependencies()
    myenv.add_conda_package("pytorch")
    myenv.add_conda_package("numpy")
    myenv.add_conda_package("torchvision=0.4.1")

    with open("./myenv.yml", "w") as f:
        f.write(myenv.serialize_to_string())

    return "./myenv.yml"
예제 #9
0
from azureml.core import Workspace, Environment
from azureml.core.conda_dependencies import CondaDependencies

ws = Workspace.from_config()

# Start from minimal
my_env = Environment.get(workspace=ws, name="AzureML-Minimal")

# Rename the env
my_env.name = "gensim-environment"

# Add dependencies
conda_deps = CondaDependencies()
conda_deps.add_conda_package("gensim")
conda_deps.add_conda_package("nltk")

my_env.python.conda_dependencies = conda_deps

# Register the env
my_env.register(workspace=ws)
예제 #10
0
# Shows output of the run on stdout.
run.wait_for_completion(show_output=True)

# Editing a run configuration property on-fly.
run_config = RunConfiguration.load(project_object=project,
                                   run_config_name="local")

# Use a new conda environment that is to be created from the conda_dependencies.yml file
run_config.environment.python.user_managed_dependencies = False

# Automatically create the conda environment before the run
run_config.prepare_environment = True

from azureml.core.conda_dependencies import CondaDependencies
cd = CondaDependencies()
cd.add_conda_package('scikit-learn')
cd.save_to_file(project_dir=project_folder, file_name='conda_dependencies.yml')

print()
print('##################################################')
print("Submitting {} for a local conda run...".format(train_script))
print('##################################################')
print()
run = Run.submit(project_object=project,
                 run_config=run_config,
                 script_to_run=train_script)

print(helpers.get_run_history_url(run))

# Shows output of the run on stdout.
run.wait_for_completion(show_output=True)
예제 #11
0
def main():
    parser = argparse.ArgumentParser(
        description="NGC Set Up on AzureML Compute Cluster")
    parser.add_argument("--config_file",
                        type=str,
                        help="location of config file")
    args = parser.parse_args()
    config_file = args.config_file

    print(config_file)
    configdata = ngccontent.get_config(config_file)
    subscription_id = configdata["azureml_user"]["subscription_id"]
    resource_group = configdata["azureml_user"]["resource_group"]
    workspace_name = configdata["azureml_user"]["workspace_name"]

    ws = Workspace(workspace_name=workspace_name,
                   subscription_id=subscription_id,
                   resource_group=resource_group)

    verify = f'''
    Subscription ID: {subscription_id}
    Resource Group: {resource_group}
    Workspace: {workspace_name}'''
    print(verify)

    ### vnet settings
    vnet_rg = ws.resource_group
    vnet_name = configdata["aml_compute"]["vnet_name"]
    subnet_name = configdata["aml_compute"]["subnet_name"]

    ### azure ml names
    ct_name = configdata["aml_compute"]["ct_name"]
    exp_name = configdata["aml_compute"]["exp_name"]

    ### trust but verify
    verify = f'''
    vNET RG: {vnet_rg}
    vNET name: {vnet_name}
    vNET subnet name: {subnet_name}
    Compute target: {ct_name}
    Experiment name: {exp_name}'''
    print(verify)

    if configdata["aml_compute"]["vm_name"] in configdata[
            "supported_vm_sizes"].keys():
        vm_name = configdata["aml_compute"]["vm_name"]
        gpus_per_node = configdata["supported_vm_sizes"][vm_name]

        print(
            "Setting up compute target {ct_name} with vm_size: {vm_name} with {gpus_per_node} GPUs"
            .format(ct_name=ct_name,
                    vm_name=vm_name,
                    gpus_per_node=gpus_per_node))

        if ct_name not in ws.compute_targets:
            config = AmlCompute.provisioning_configuration(
                vm_size=vm_name,
                min_nodes=configdata["aml_compute"]["min_nodes"],
                max_nodes=configdata["aml_compute"]["max_nodes"],
                vnet_resourcegroup_name=vnet_rg,
                vnet_name=vnet_name,
                subnet_name=subnet_name,
                idle_seconds_before_scaledown=configdata["aml_compute"]
                ["idle_seconds_before_scaledown"],
                remote_login_port_public_access='Enabled')
            ct = ComputeTarget.create(ws, ct_name, config)
            ct.wait_for_completion(show_output=True)
        else:
            print("Loading Pre-existing Compute Target {ct_name}".format(
                ct_name=ct_name))
            ct = ws.compute_targets[ct_name]
    else:
        print("Unsupported vm_size {vm_size}".format(vm_size=vm_name))
        print("The specified vm size must be one of ...")
        for azure_gpu_vm_size in configdata["supported_vm_sizes"].keys():
            print("... " + azure_gpu_vm_size)
        raise Exception(
            "{vm_size} does not support Pascal or above GPUs".format(
                vm_size=vm_name))

    environment_name = configdata["aml_compute"]["environment_name"]
    python_interpreter = configdata["aml_compute"]["python_interpreter"]
    conda_packages = configdata["aml_compute"]["conda_packages"]
    from azureml.core import ContainerRegistry

    if environment_name not in ws.environments:
        env = Environment(name=environment_name)
        env.docker.enabled = configdata["aml_compute"]["docker_enabled"]
        env.docker.base_image = None
        env.docker.base_dockerfile = "FROM {dockerfile}".format(
            dockerfile=configdata["ngc_content"]["base_dockerfile"])
        env.python.interpreter_path = python_interpreter
        env.python.user_managed_dependencies = True
        conda_dep = CondaDependencies()

        for conda_package in conda_packages:
            conda_dep.add_conda_package(conda_package)

        env.python.conda_dependencies = conda_dep
        env.register(workspace=ws)
        evn = env
    else:
        env = ws.environments[environment_name]

    amlcluster = AzureMLComputeCluster(
        workspace=ws,
        compute_target=ct,
        initial_node_count=1,
        experiment_name=configdata["aml_compute"]["exp_name"],
        environment_definition=env,
        use_gpu=True,
        n_gpus_per_node=1,
        jupyter=True,
        jupyter_port=configdata["aml_compute"]["jupyter_port"],
        dashboard_port=9001,
        scheduler_port=9002,
        scheduler_idle_timeout=1200,
        worker_death_timeout=30,
        additional_ports=[],
        datastores=[],
        telemetry_opt_out=True,
        asynchronous=False)

    print(amlcluster.jupyter_link)
    amlcluster.jupyter_link
    print('Exiting script')
예제 #12
0
model = Model.register(model_path = filename,
                       model_name = "textblob",
                       tags = {"key": "2"},
                       description = "Sentiment Prediction",
                       workspace = ws)



aciconfig = AciWebservice.deploy_configuration(cpu_cores=1, 
                                               memory_gb=1, 
                                               tags={"data": "sentiment",  "method" : "textblob"}, 
                                               description='Predict Sentiment Score')


textblobenv = CondaDependencies()
textblobenv.add_conda_package("scikit-learn")
# textblobenv.add_conda_package("textblob")
# textblobenv.add_conda_package("pickle")
# textblobenv.add_conda_package("dill")

with open("textblobenv.yml","w") as f:
    f.write(textblobenv.serialize_to_string())
with open("textblobenv.yml","r") as f:
    print(f.read())

#############################
%%writefile score.py

import json
import numpy as np
import os
예제 #13
0
        },
        description=
        "Output labels of the retrained Inception V3 model with flower photos",
        workspace=ws)

    print("Checking AKS state...")
    status = aks_target.get_status()
    while status != 'Succeeded' and status != 'Failed':
        print('current status: {} - waiting...'.format(status))
        time.sleep(30)
        status = aks_target.get_status()

    print("Creating image and service configuration...")
    myenv = CondaDependencies()
    myenv.add_tensorflow_conda_package(core_type='cpu')
    myenv.add_conda_package("numpy")

    with open(os.path.join(project_folder, "myenv.yml"), "w") as f:
        f.write(myenv.serialize_to_string())

    shutil.copy("./scripts/score_flowers.py", './')

    # configure the image
    image_config = ContainerImage.image_configuration(
        execution_script="score_flowers.py",
        runtime="python",
        conda_file=os.path.join(project_folder, "myenv.yml"))

    aks_config = AksWebservice.deploy_configuration(
        cpu_cores=1,
        memory_gb=1,
예제 #14
0
                                   output_name='classification_data',
                                   is_directory=True)

compute_target = ws.compute_targets['cpu-cluster']

environment_variables = {
    'POSTGRES_PASSWORD':
    os.environ['POSTGRES_PASSWORD'],
    'POSTGRES_HOSTNAME':
    'ackbar-postgres.postgres.database.azure.com',
    'AZURE_STORAGE_CONNECTION_STRING':
    os.environ['AZURE_STORAGE_CONNECTION_STRING']
}
env = Environment(name='env', environment_variables=environment_variables)
conda = CondaDependencies()
conda.add_conda_package('psycopg2')
conda.add_conda_package('numpy')
conda.add_conda_package('Pillow')
# have to use pip to install azure packages...
conda.add_pip_package('azure-storage-blob')
env.python.conda_dependencies = conda
run_config = RunConfiguration()
run_config.environment = env

PROJECT = 'caltech'

prepare_step = PythonScriptStep(
    script_name='prepare.py',
    arguments=['--output', batch_input, '--project', PROJECT],
    inputs=[],
    outputs=[batch_input],
        x = F.dropout(x, training=self.training)
        x = x.view(-1, 32 * 32 * 24)
        x = self.fc(x)
        return F.log_softmax(x, dim=1)


'''
Create an environment file

The web service will be hosted in a container, and the container will need to install any Python dependencies when it gets initialized. In this case, our scoring code requires the torch and torchvision Python libraries, so we'll create a .yml file that tells the container host to install these into the environment along with the default libraries used by Azure ML.
'''

from azureml.core.conda_dependencies import CondaDependencies 

myenv = CondaDependencies()
myenv.add_conda_package("pytorch")
myenv.add_conda_package("torchvision")
myenv.add_channel("pytorch")

env_file = "env_pytorch.yml"

with open(env_file,"w") as f:
    f.write(myenv.serialize_to_string())
print("Saved dependency info in", env_file)

with open(env_file,"r") as f:
    print(f.read())

'''
Deploy the web service
예제 #16
0
# +
# Prepare environment config
from azureml.core.conda_dependencies import CondaDependencies
from azureml.core.model import InferenceConfig
from azureml.core.environment import Environment
if not 'temp' in os.listdir():
    os.mkdir('temp')
df_test.to_json("temp/test_sample.json")  # save data for external tests

# Create the environment
myenv = Environment(name="mortgage_score_env_hd")
conda_dep = CondaDependencies()

# Define the packages needed by the model and scripts
conda_dep.add_conda_package("numpy")
conda_dep.add_conda_package("pip")
conda_dep.add_conda_package("scikit-learn=0.20.3")
# You must list azureml-defaults as a pip dependency
conda_dep.add_pip_package("azureml-defaults==1.11.0")
conda_dep.add_pip_package("azureml-core")
conda_dep.add_pip_package("azureml-automl-runtime")
conda_dep.add_pip_package("packaging")
conda_dep.add_pip_package("azureml-explain-model==1.11.0")
conda_dep.add_pip_package("inference-schema")
conda_dep.add_conda_package("numpy")
# scikit-learn>=0.19.0,<=0.20.3
conda_dep.add_conda_package("pandas")
conda_dep.add_conda_package("py-xgboost")
# Save environment also locally to disk so we can test the score script directly by creating a local environment
conda_dep.save('temp/mortgage_score_env.yml')
예제 #17
0
model = Model.register(model_name=MODEL_NAME,
                       model_path=MODEL_PATH,
                       workspace=ws,
                       description=MODEL_DESCRIPTION)
print(model.name, model.id, model.version, sep='\t')

#Set the image
aciconfig = AciWebservice.deploy_configuration(cpu_cores=CPU_CORES,
                                               memory_gb=MEMORY_GB,
                                               description=SERVICE_DESCRIPTION,
                                               auth_enabled=AUTH_ENABLED)

if CONDA_FILE_URL == '' and DOCKER_FILE_URL == '':
    from azureml.core.conda_dependencies import CondaDependencies
    myenv = CondaDependencies()
    myenv.add_conda_package("scikit-learn")
    #myenv.add_pip_package("joblib")
    with open("myenv.yml", "w") as f:
        f.write(myenv.serialize_to_string())
    # configure the image
    image_config = ContainerImage.image_configuration(
        execution_script=EXECUTION_SCRIPT_PATH,
        runtime="python",
        conda_file="myenv.yml")
elif CONDA_FILE_URL is not '' and DOCKER_FILE_URL == '':
    wget.download(CONDA_FILE_URL, CONDA_FILE_PATH)
    image_config = ContainerImage.image_configuration(
        execution_script=EXECUTION_SCRIPT_PATH,
        runtime="python",
        conda_file=CONDA_FILE_PATH)
elif DOCKER_FILE_URL is not '':
예제 #18
0
# register model 
from azureml.core import Run
model = Model.register(ws, model_name='objectmodelchal5', model_path='objectmodelchal5.h5')
print(model.name, model.id, model.version, sep = '\t')


# ## YAML EXPORT

# In[90]:


from azureml.core.conda_dependencies import CondaDependencies 

myenv = CondaDependencies()
myenv.add_conda_package("keras")
myenv.add_conda_package("pillow")
myenv.add_conda_package("numpy")
myenv.add_conda_package("requests")
myenv.add_conda_package("h5py")

with open("myenv.yml","w") as f:
    f.write(myenv.serialize_to_string())


# In[91]:


model=Model(ws, 'objectmodelchal5')
model.download(target_dir = '.')
예제 #19
0
                                              provisioning_config)
    compute_target.wait_for_provisioning(show_output=True)

print('create Batch AI run config')

rc = RunConfiguration(project, "dask_run_config")
rc.environment.docker.enabled = True
rc.prepare_environment = True
rc.target = batchai_cluster_name
rc.environment.python.user_managed_dependencies = False
rc.batchai.node_count = 2

# create a new CondaDependencies obj
cd = CondaDependencies()
# add scikit-learn as a conda dependency
cd.add_conda_package('dask')
cd.add_conda_package('joblib')
cd.add_pip_package('azureml-contrib-daskonbatch')

# overwrite the default conda_dependencies.yml file
cd.save_to_file(project_dir=project_folder, file_name='conda_dependencies.yml')

print()
print('##################################################')
print('submitting {} for a batch ai run...'.format(train_script))
print('##################################################')
print()

print("prepare run...")
prep = Run.prepare_compute_target(project_object=project, run_config=rc)
예제 #20
0
 def conda_deps():
     deps = CondaDependencies(f'{project_folder}/environment.yml')
     deps.add_channel("conda-forge")
     deps.add_conda_package('curl')
     return deps
예제 #21
0
from azureml.core.model import InferenceConfig
from azureml.core.environment import Environment
from azureml.core.conda_dependencies import CondaDependencies

# Create the environment
myenv = Environment(name="myenv")
conda_dep = CondaDependencies()

# Define the packages needed by the model and scripts
conda_dep.add_conda_package("tensorflow")
conda_dep.add_conda_package("numpy")
conda_dep.add_conda_package("scikit-learn")
# You must list azureml-defaults as a pip dependency
conda_dep.add_pip_package("azureml-defaults")
conda_dep.add_pip_package("keras")
conda_dep.add_pip_package("gensim")

# Adds dependencies to PythonSection of myenv
myenv.python.conda_dependencies = conda_dep

inference_config = InferenceConfig(entry_script="score.py", environment=myenv)
예제 #22
0
    ws = load_workspace()

    # Grab the model object from the list of available models
    model_list = Model.list(workspace=ws)
    model = None

    # Unpack the generator and look through the list to find your desired model
    model, = (m for m in model_list
              if m.version == model_version and m.name == model_name)
    print(
        'Model picked: {} \nModel Description: {} \nModel Version: {}'.format(
            model.name, model.description, model.version))

    dependencies = CondaDependencies()
    dependencies.add_conda_package("numpy")
    dependencies.add_conda_package("matplotlib")
    dependencies.add_conda_package("scikit-learn")
    dependencies.add_conda_package("tensorflow")
    dependencies.add_conda_package("keras")
    dependencies.add_conda_package("scikit-image")
    dependencies.add_pip_package("pynacl==1.2.1")

    os.makedirs("./score/", exist_ok=True)
    with open("./score/dependencies.yml", "w") as f:
        f.write(dependencies.serialize_to_string())

    original_dir = os.getcwd()
    # Change directory since the docker container is expecting thing at the TLD
    os.chdir("./score")
    image_config = ContainerImage.image_configuration(
예제 #23
0
    model_path = Model.get_model_path("classi_model")
    model = joblib.load(model_path)


def run(raw_data):
    data = np.array(json.loads(raw_data)['data'])
    pred = model.predict(data)
    return pred.tolist()


# Need to create a script for Environment

from azureml.core.conda_dependencies import CondaDependencies

my_env = CondaDependencies()
my_env.add_conda_package('scikit-learn')

env_file = 'service_files/env.yml'

with open(env_file, "w") as f:
    f.write(my_env.serialize_to_string())

print("Saved dependency info in", env_file)

#combining both of the script in inference config

from azureml.core.model import InferenceConfig

class_inference_config = InferenceConfig(runtime='python',
                                         source_directory='service_files',
                                         entry_script='score.py',
예제 #24
0
from azureml.core.model import Model

from azureml.core import Workspace
ws = Workspace.from_config(path=".azureml/config.json")

# Inference Configuration
from azureml.core.model import InferenceConfig
from azureml.core.environment import Environment
from azureml.core.conda_dependencies import CondaDependencies

# Create the environment
myenv = Environment(name="myenv")
conda_dep = CondaDependencies()

# Define the packages needed by the model and scripts
conda_dep.add_conda_package("python=3.6.2")
conda_dep.add_conda_package("numpy=1.18.5")
conda_dep.add_conda_package("scikit-learn")
conda_dep.add_conda_package("pip")
# You must list azureml-defaults as a pip dependency
conda_dep.add_pip_package("azureml-defaults")
conda_dep.add_pip_package("tensorflow==2.3.0")
conda_dep.add_pip_package("keras==2.4.3")

# Adds dependencies to PythonSection of myenv
myenv.python.conda_dependencies = conda_dep

inference_config = InferenceConfig(entry_script="./source_dir/score.py",
                                   environment=myenv)

# Deployment config
예제 #25
0
run.log('averge: ', average)
run.log("Experiment end time", str(datetime.datetime.now()))
run.complete()


model = Model.register(model_path = filename,
                       model_name = "ta_model",
                       tags = {"key": "1"},
                       description = "TextBlob Prediction",
                       workspace = ws)



ta_env = CondaDependencies()
ta_env.add_conda_package("scikit-learn")
ta_env.add_conda_package("dill")
 
with open("ta_env.yml","w") as f:
    f.write(ta_env.serialize_to_string())
with open("ta_env.yml","r") as f:
    print(f.read())                       


%%time
 
image_config = ContainerImage.image_configuration(execution_script="score.py", 
                                                  runtime="python", 
                                                  conda_file="ta_env.yml")    

예제 #26
0
    df = pd.DataFrame([data[1:]],columns=featurenames)
    
    # make prediction
    if data[0] == 'iq':
        result = iq_model.predict(df).astype(int)
    elif data[0] == 'sj':
        result = sj_model.predict(df).astype(int)

    # you can return any data type as long as it is JSON-serializable
    return result.tolist()

#%% create environment file for deployment
from azureml.core.conda_dependencies import CondaDependencies 

mymodelenv = CondaDependencies()
mymodelenv.add_conda_package("scikit-learn")
mymodelenv.add_conda_package("pandas")
mymodelenv.add_conda_package("statsmodels")
mymodelenv.add_conda_package("scipy=1.2")
mymodelenv.add_conda_package("numpy")

with open("mymodelenv.yml","w") as f:
    f.write(mymodelenv.serialize_to_string())
    
with open("mymodelenv.yml","r") as f:
    print(f.read())
    
print('Complete') 


#%% create container image
예제 #27
0
run_config.container_instance.memory_gb = 2

# enable Docker 
run_config.environment.docker.enabled = True

# set Docker base image to the default CPU-based image
run_config.environment.docker.base_image = azureml.core.runconfig.DEFAULT_MMLSPARK_CPU_IMAGE
print('base image is', run_config.environment.docker.base_image)
#run_config.environment.docker.base_image = 'microsoft/mmlspark:plus-0.9.9'

# use conda_dependencies.yml to create a conda environment in the Docker image for execution
# please update this file if you need additional packages.
run_config.environment.python.user_managed_dependencies = False

cd = CondaDependencies()
cd.add_conda_package('numpy')
# overwrite the default conda_dependencies.yml file
cd.save_to_file(project_dir = project_folder, file_name='conda_dependencies.yml')

# auto-prepare the Docker image when used for execution (if it is not already prepared)
run_config.prepare_environment = True

print()
print('##################################################')
print('submitting {} for a Spark run on ACI...'.format(train_script))
print('##################################################')
print()

run = Run.submit(project_object = project, 
                 run_config = run_config, 
                 script_to_run = "train-spark.py")
예제 #28
0
def generate_yaml(
    directory: str,
    ref_filename: str,
    needed_libraries: list,
    conda_filename: str,
):
    """
    Creates a deployment-specific yaml file as a subset of
    the image classification environment.yml

    Also adds extra libraries, if not present in environment.yml

    Args:
        directory (string): Directory name of reference yaml file
        ref_filename (string): Name of reference yaml file
        needed_libraries (list of strings): List of libraries needed
        in the Docker container
        conda_filename (string): Name of yaml file to be deployed
        in the Docker container

    Returns: Nothing

    """

    with open(os.path.join(directory, ref_filename), "r") as f:
        yaml_content = yaml.load(f, Loader=yaml.FullLoader)

    # Extract libraries to be installed using conda
    extracted_libraries = [
        depend for depend in yaml_content["dependencies"]
        if any(lib in depend for lib in needed_libraries)
    ]

    # Extract libraries to be installed using pip
    if any(isinstance(x, dict) for x in yaml_content["dependencies"]):
        # if the reference yaml file contains a "pip" section,
        # find where it is in the list of dependencies
        ind = [
            yaml_content["dependencies"].index(depend)
            for depend in yaml_content["dependencies"]
            if isinstance(depend, dict)
        ][0]
        extracted_libraries += [
            depend for depend in yaml_content["dependencies"][ind]["pip"]
            if any(lib in depend for lib in needed_libraries)
        ]

    # Check whether additional libraries are needed
    not_found = [
        lib for lib in needed_libraries
        if not any(lib in ext for ext in extracted_libraries)
    ]

    # Create the deployment-specific yaml file
    conda_env = CondaDependencies()
    for ch in yaml_content["channels"]:
        conda_env.add_channel(ch)
    for library in extracted_libraries + not_found:
        conda_env.add_conda_package(library)

    # Display the environment
    print(conda_env.serialize_to_string())

    # Save the file to disk
    conda_env.save_to_file(base_directory=os.getcwd(),
                           conda_file_path=conda_filename)
예제 #29
0
                          data_reference_name="input_data",
                          path_on_datastore="churn")

processed_dir = PipelineData(name='processed_data', datastore=default_store)

#%% [markdown]
# ## Pipeline 1st step: Data Preprocessing
#
# We start by defining the run configuration with the needed dependencies by the preprocessing step.
#
# In the cell that follow, we compose the first step of the pipeline.
#

#%%
cd = CondaDependencies()
cd.add_conda_package('pandas')
cd.add_conda_package('matplotlib')
cd.add_conda_package('numpy')
cd.add_conda_package('scikit-learn')

run_config = RunConfiguration(framework="python", conda_dependencies=cd)
run_config.target = cluster
run_config.environment.docker.enabled = True
run_config.environment.docker.base_image = DEFAULT_GPU_IMAGE
run_config.environment.python.user_managed_dependencies = False

#%%
pre_processing = PythonScriptStep(
                            name='preprocess dataset',
                            script_name='preprocess.py',
                            arguments=['--input_path', input_dir,\
예제 #30
0
import azureml.core
from azureml.core import Workspace
from azureml.core.authentication import InteractiveLoginAuthentication
from azureml.core.conda_dependencies import CondaDependencies 
from azureml.core.model import Model
from azureml.core.image import ContainerImage
from azureml.core.webservice import AciWebservice
from azureml.core.webservice import Webservice

auth_config = InteractiveLoginAuthentication(False, "72f988bf-86f1-41af-91ab-2d7cd011db47")
ws=Workspace.from_config('aml_config/config.json', auth_config)
ws.get_details()

myenv = CondaDependencies()
myenv.add_conda_package("keras")
myenv.add_conda_package("tensorflow")
myenv.add_conda_package("pillow")

with open("myenv.yml","w") as f:
    f.write(myenv.serialize_to_string())

# Register a trained model
print('Registering model...')
model = Model.register(model_path = "modelfiles",
                       model_name = "dogs-vs-cat",
                       description = "ready lab 314",
                       workspace = ws)

# Image configuration
print('Creating image configuration...')
image_config = ContainerImage.image_configuration(execution_script = "score.py",