Пример #1
0
#
# Azure ML Orchestration Script, Ben C 2019
# Fetch model and pickle files for building the model API app 
# - Requires env vars: AZML_WORKSPACE, AZML_SUBID, AZML_RESGRP, AZML_MODEL
#

import os, argparse
from dotenv import load_dotenv
from amllib.utils import connectToAML, downloadPickles, checkVars

# When local testing, load .env files for convenience
load_dotenv()
checkVars(['AZML_SUBID', 'AZML_RESGRP', 'AZML_WORKSPACE', 'AZML_MODEL'])

parser = argparse.ArgumentParser()
parser.add_argument('--model-ver', type=str, dest='ver', help='Model version')
parser.add_argument('--use-best', dest='best', help='Find best model based on accuracy', default=False, action='store_true')
parser.add_argument('--output-path', type=str, dest='output', help='Output path for pickles', default='../model-api/pickles')
args, unknown = parser.parse_known_args()

outputPath = args.output

ws = connectToAML(os.environ['AZML_SUBID'], os.environ['AZML_RESGRP'], os.environ['AZML_WORKSPACE'])

if args.best:
  downloadPickles(ws, os.environ['AZML_MODEL'], outputPath, "best")
if args.ver:
  downloadPickles(ws, os.environ['AZML_MODEL'], outputPath, int(args.ver))
elif not args.best:
  print(f"### No model version specified, latest will be used")
  downloadPickles(ws, os.environ['AZML_MODEL'], outputPath)
Пример #2
0
#
# Azure ML Orchestration Script, Ben C 2019
# Upload data from local machine to Azure ML workspace in default datastore
# - Requires env vars: AZML_WORKSPACE, AZML_SUBID, AZML_RESGRP, AZML_DATAPATH
#

import os, argparse
from dotenv import load_dotenv
from amllib.utils import connectToAML, checkVars

# When local testing, load .env files for convenience
load_dotenv()
checkVars(['AZML_SUBID', 'AZML_RESGRP', 'AZML_WORKSPACE', 'AZML_DATAPATH'])

parser = argparse.ArgumentParser()
parser.add_argument('--data-dir',
                    type=str,
                    dest='data_dir',
                    help='Directory holding local data to upload',
                    required=True)
args, unknown = parser.parse_known_args()

ws = connectToAML(os.environ['AZML_SUBID'], os.environ['AZML_RESGRP'],
                  os.environ['AZML_WORKSPACE'])

# Resolve local path
localFolder = os.path.abspath(os.path.join(os.getcwd(), args.data_dir))
print(f"### Local path resolves to {localFolder}")

# Upload to AML default datastore
ds = ws.get_default_datastore()
Пример #3
0
# Run a Python training script in remote Azure ML computer cluster
# - Requires env vars: AZML_WORKSPACE, AZML_SUBID, AZML_RESGRP, AZML_MODEL
#   AZML_EXPERIMENT, AZML_DATAPATH, AZML_SCRIPT, AZML_COMPUTE_NAME
# - Optional: AZML_RUN_LOCAL set to "true" in order to run training locally

import os, argparse
from dotenv import load_dotenv
from amllib.utils import connectToAML, getComputeAML, checkVars
from azureml.core import Experiment, ScriptRunConfig
from azureml.core.conda_dependencies import CondaDependencies
from azureml.core.runconfig import RunConfiguration, DataReferenceConfiguration

# When local testing, load .env files for convenience
load_dotenv()
checkVars([
    'AZML_SUBID', 'AZML_RESGRP', 'AZML_WORKSPACE', 'AZML_MODEL',
    'AZML_EXPERIMENT', 'AZML_DATAPATH', 'AZML_SCRIPT', 'AZML_COMPUTE_NAME'
])

parser = argparse.ArgumentParser()
parser.add_argument('--estimators',
                    type=int,
                    dest='estimators',
                    help='Number of estimators',
                    default=40)
args, unknown = parser.parse_known_args()

# Some consts
trainingScriptDir = "../training"
dataPathRemote = os.environ['AZML_DATAPATH']
trainingScript = os.environ['AZML_SCRIPT']
estimators = args.estimators