print('Setting up dataset') mnistFileDataset = Dataset.get_by_name(workspace=ws, name=datasetName) print("Defining environment") # to install required packages env = Environment('sklearn') #cd = CondaDependencies.create(pip_packages=['azureml-dataprep[pandas,fuse]>=1.1.14', 'azureml-defaults'], conda_packages = ['scikit-learn==0.22.1']) cd = CondaDependencies.create(pip_packages=[ 'azureml-sdk', 'scikit-learn==0.22.1', 'azureml-dataprep[pandas,fuse]>=1.1.14' ]) env.python.conda_dependencies = cd # Register environment to re-use later env.register(workspace=ws) print("Creating estimator") script_params = { # to mount files referenced by mnist dataset '--data-folder': mnistFileDataset.as_named_input(datasetName).as_mount(), '--regularization': 0.5 } est = Estimator(source_directory=scriptFolder, script_params=script_params, compute_target=compute_target, environment_definition=env, entry_script='train_logistic.py') print("Running experiment")
def create_env(ws): '''Creates an azureml enviornment''' # Create enviornment object env = Environment(name='birdsong-env-gpu') # define packages for image cd = CondaDependencies.create( pip_packages=[ 'azureml-dataset-runtime[pandas,fuse]', 'azureml-defaults', 'tensorflow==2.4.0', #'tensorflow==2.5.0', 'tensorflow-io==0.17.1', # 'tensorflow-io==0.18.0', 'tensorflow-addons==0.13.0', 'Pillow', 'sklearn', 'kapre', 'sndfile', 'librosa', 'psutil' ], conda_packages=['SciPy']) env.python.conda_dependencies = cd #Docker file dockerfile = r''' # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. FROM mcr.microsoft.com/azureml/o16n-base/python-assets:20210210.31228572 AS inferencing-assets # Tag: cuda:11.0.3-devel-ubuntu18.04 # Env: CUDA_VERSION=11.0.3 # Env: NCCL_VERSION=2.8.3 # Env: CUDNN_VERSION=8.0.5.39 FROM nvidia/cuda:11.0.3-cudnn8-devel-ubuntu18.04 USER root:root ENV com.nvidia.cuda.version $CUDA_VERSION ENV com.nvidia.volumes.needed nvidia_driver ENV LANG=C.UTF-8 LC_ALL=C.UTF-8 ENV DEBIAN_FRONTEND noninteractive ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/usr/local/cuda/lib64:/usr/local/cuda/extras/CUPTI/lib64 ENV NCCL_DEBUG=INFO ENV HOROVOD_GPU_ALLREDUCE=NCCL # Install Common Dependencies RUN apt-get update && \ apt-get install -y --no-install-recommends \ # SSH and RDMA libmlx4-1 \ libmlx5-1 \ librdmacm1 \ libibverbs1 \ libmthca1 \ libdapl2 \ dapl2-utils \ openssh-client \ openssh-server \ iproute2 && \ # Others apt-get install -y \ build-essential \ bzip2 \ libbz2-1.0 \ systemd \ git \ wget \ cpio \ pciutils \ libnuma-dev \ ibutils \ ibverbs-utils \ rdmacm-utils \ infiniband-diags \ perftest \ librdmacm-dev \ libibverbs-dev \ libsm6 \ libxext6 \ libxrender-dev \ libssl1.0.0 \ linux-image-aws \ linux-image-azure \ linux-image-generic \ linux-image-kvm \ linux-image-lowlatency \ linux-image-virtual \ linux-image-gke \ linux-image-oem \ slapd \ perl \ ca-certificates \ apt \ p11-kit \ libp11-kit0 \ tar \ libsndfile-dev \ fuse && \ apt-get clean -y && \ rm -rf /var/lib/apt/lists/* # Inference # Copy logging utilities, nginx and rsyslog configuration files, IOT server binary, etc. COPY --from=inferencing-assets /artifacts /var/ RUN /var/requirements/install_system_requirements.sh && \ cp /var/configuration/rsyslog.conf /etc/rsyslog.conf && \ cp /var/configuration/nginx.conf /etc/nginx/sites-available/app && \ ln -s /etc/nginx/sites-available/app /etc/nginx/sites-enabled/app && \ rm -f /etc/nginx/sites-enabled/default ENV SVDIR=/var/runit ENV WORKER_TIMEOUT=300 EXPOSE 5001 8883 8888 # Conda Environment ENV MINICONDA_VERSION py37_4.9.2 ENV PATH /opt/miniconda/bin:$PATH RUN wget -qO /tmp/miniconda.sh https://repo.continuum.io/miniconda/Miniconda3-${MINICONDA_VERSION}-Linux-x86_64.sh && \ bash /tmp/miniconda.sh -bf -p /opt/miniconda && \ conda clean -ay && \ rm -rf /opt/miniconda/pkgs && \ rm /tmp/miniconda.sh && \ find / -type d -name __pycache__ | xargs rm -rf # Open-MPI-UCX installation RUN mkdir /tmp/ucx && \ cd /tmp/ucx && \ wget -q https://github.com/openucx/ucx/releases/download/v1.6.1-rc2/ucx-1.6.1.tar.gz && \ tar zxf ucx-1.6.1.tar.gz && \ cd ucx-1.6.1 && \ ./configure --prefix=/usr/local --enable-optimizations --disable-assertions --disable-params-check --enable-mt && \ make -j $(nproc --all) && \ make install && \ rm -rf /tmp/ucx # Open-MPI installation ENV OPENMPI_VERSION 4.1.0 RUN mkdir /tmp/openmpi && \ cd /tmp/openmpi && \ wget https://download.open-mpi.org/release/open-mpi/v4.1/openmpi-${OPENMPI_VERSION}.tar.gz && \ tar zxf openmpi-${OPENMPI_VERSION}.tar.gz && \ cd openmpi-${OPENMPI_VERSION} && \ ./configure --with-ucx=/usr/local/ --enable-mca-no-build=btl-uct --enable-orterun-prefix-by-default && \ make -j $(nproc) all && \ make install && \ ldconfig && \ rm -rf /tmp/openmpi # Msodbcsql17 installation RUN apt-get update && \ apt-get install -y curl && \ curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add - && \ curl https://packages.microsoft.com/config/ubuntu/18.04/prod.list > /etc/apt/sources.list.d/mssql-release.list && \ apt-get update && \ ACCEPT_EULA=Y apt-get install -y msodbcsql17 #Cmake Installation RUN apt-get update && \ apt-get install -y cmake ''' env.docker.base_image = None env.docker.base_dockerfile = dockerfile # Register environment to re-use later env = env.register(workspace=ws)
def deploy(ws_name,model_name,path_to_model, environment_name,register_environment,pip_packages,conda_packages, cpu_cores , memory_gb, path_to_entry_script,service_name): ''' Get Workspace ''' ws = Workspace.from_config() print("Got Workspace {}".format(ws_name)) ''' Register Model ''' model = Model.register(workspace = ws, model_path =path_to_model, model_name = model_name, ) print("Registered Model {}".format(model_name)) ''' Register Environment ''' # to install required packages if register_environment: env = Environment(environment_name) cd = CondaDependencies.create(pip_packages=pip_packages, conda_packages = conda_packages) env.python.conda_dependencies = cd # Register environment to re-use later env.register(workspace = ws) print("Registered Environment") myenv = Environment.get(workspace=ws, name=environment_name) # Uncomment to save environment # myenv.save_to_directory('./environ', overwrite=True) ''' Config Objects ''' aciconfig = AciWebservice.deploy_configuration( cpu_cores=cpu_cores, memory_gb=memory_gb, ) inference_config = InferenceConfig(entry_script=path_to_entry_script, environment=myenv) ''' Deploying ''' print("Deploying....... This may take a few mins, check the status in MLS after the function finishes executing") service = Model.deploy(workspace=ws, name=ws_name, models=[model], inference_config=inference_config, deployment_config=aciconfig, overwrite = True) service.wait_for_deployment(show_output=True) url = service.scoring_uri print(url) service = Webservice(ws,ws_name) print(service.get_logs()) return url