Esempio n. 1
0
def ComputeCompute():
    subscription_id = request.json['subscription_id']
    resource_group = request.json['resource_group']
    workspace_name = request.json['workspace_name']
    location = request.json['location']
    cluster_name = request.json['cluster_name']
    vm_size = request.json['vm_size']
    min_nodes = request.json['min_nodes']
    max_nodes = request.json['max_nodes']
    ws = Workspace(subscription_id=subscription_id,
                   resource_group=resource_group,
                   workspace_name=workspace_name)

    print("Found workspace {} at location {}".format(ws.name, ws.location))
    print('Found existing Workspace.')
    #aml_compute = AmlCompute(ws, cluster_name)
    #cluster_name = 'cpu-cluster'

    try:
        aml_compute = AmlCompute(ws, cluster_name)
        print('Found existing AML compute context.')
        return "Found existing AML compute context."
    except:
        print('need to create new Compute.')
        print('Creating new AML compute context.')
        aml_config = AmlCompute.provisioning_configuration(vm_size=vm_size,
                                                           min_nodes=min_nodes,
                                                           max_nodes=max_nodes)
        aml_compute = AmlCompute.create(ws,
                                        name=cluster_name,
                                        provisioning_configuration=aml_config)
        aml_compute.wait_for_completion(show_output=True)
        return "Compute successfully created"
Esempio n. 2
0
    def from_directory(path, auth=None):
        """(Deprecated) Load an experiment from the specified path.

        :param path: Directory containing the experiment configuration files.
        :type path: str
        :param auth: The auth object.
            If None the default Azure CLI credentials will be used or the API will prompt for credentials.
        :type auth: azureml.core.authentication.ServicePrincipalAuthentication or
            azureml.core.authentication.InteractiveLoginAuthentication
        :return: Returns the Experiment
        :rtype: azureml.core.Experiment
        """
        from azureml.core.workspace import Workspace

        info_dict = _commands.get_project_info(auth, path)

        # TODO: Fix this
        subscription = info_dict[_commands.SUBSCRIPTION_KEY]
        resource_group_name = info_dict[_commands.RESOURCE_GROUP_KEY]
        workspace_name = info_dict[_commands.WORKSPACE_KEY]
        experiment_name = info_dict[_commands.PROJECT_KEY]

        workspace = Workspace(subscription_id=subscription,
                              resource_group=resource_group_name,
                              workspace_name=workspace_name,
                              auth=auth)
        return Experiment(workspace=workspace, name=experiment_name)
Esempio n. 3
0
def AKSCompute():
    subscription_id = request.json['subscription_id']
    resource_group = request.json['resource_group']
    workspace_name = request.json['workspace_name']
    location = request.json['location']
    cluster_name = request.json['cluster_name']
    vm_size = request.json['vm_size']
    agent_count = request.json['agent_count']
    ws = Workspace(subscription_id=subscription_id,
                   resource_group=resource_group,
                   workspace_name=workspace_name)

    print("Found workspace {} at location {}".format(ws.name, ws.location))
    print('Found existing Workspace.')
    #aml_compute = AmlCompute(ws, cluster_name)
    #cluster_name = 'cpu-cluster'
    try:
        aks_target = AksCompute(ws, cluster_name)
        print('Found existing AKS compute context.')
        return "Found existing AKS compute context."
    except:
        print('need to create new Compute.')
        print('Creating new AKS compute context.')
        prov_config = AksCompute.provisioning_configuration(
            vm_size=vm_size, agent_count=agent_count, location=location)
        aks_target = ComputeTarget.create(
            workspace=ws,
            name=cluster_name,
            provisioning_configuration=prov_config)

        # Wait for the create process to complete
        aks_target.wait_for_completion(show_output=True)
        return "Compute successfully created"
Esempio n. 4
0
def RegisterCSV():
    subscription_id = request.json['subscription_id']
    resource_group = request.json['resource_group']
    workspace_name = request.json['workspace_name']
    location = request.json['location']

    ws = Workspace(subscription_id=subscription_id,
                   resource_group=resource_group,
                   workspace_name=workspace_name)

    print("Found workspace {} at location {}".format(ws.name, ws.location))
    print('Found existing Workspace.')

    ds = ws.get_default_datastore()
    #print(ds.datastore_type, ds.account_name, ds.container_name)
    #file_path = request.json['file_path']
    #print(file_path)
    file_name = request.json['file_name']
    #ds.upload(src_dir=file_path, target_path= None, overwrite=True, show_progress=True)
    try:
        stock_ds = Dataset.Tabular.from_delimited_files(
            path=ds.path(file_name))
        stock_ds = stock_ds.register(workspace=ws,
                                     name=file_name,
                                     description='stock training data',
                                     create_new_version=True)
        print('Data Registered to the ML Workspace.')
        return "Data Registered to the ML Workspace."
    except:
        print('dataset is not Registered, please check')
    return "Dataset is not Registered, please check"
def UploadCSV():
    subscription_id = request.json['subscription_id']
    resource_group = request.json['resource_group']
    workspace_name = request.json['workspace_name']
    #location = request.json['location']

    ws = Workspace(subscription_id=subscription_id,
                   resource_group=resource_group,
                   workspace_name=workspace_name)

    print("Found workspace {} at location {}".format(ws.name, ws.location))
    print('Found existing Workspace.')

    ds = ws.get_default_datastore()
    print(ds.datastore_type, ds.account_name, ds.container_name)
    file_path = request.json['file_path']
    print(file_path)
    file_name = request.json['file_name']
    ds.upload(src_dir=file_path,
              target_path=None,
              overwrite=True,
              show_progress=True)
    try:
        stock_ds = Dataset.Tabular.from_delimited_files(
            path=ds.path(file_name))
        stock_ds = stock_ds.register(workspace=ws,
                                     name=file_name,
                                     description='stock training data')
        print('Found existing file name')
        #return "This file name exist. Please rename or upload new file"
    except:
        print('Uploading new file, please wait')
    return "new file uploaded"
Esempio n. 6
0
def ComputeDelete():
    subscription_id = request.json['subscription_id']
    resource_group = request.json['resource_group']
    workspace_name = request.json['workspace_name']
    location = request.json['location']
    Cluster_type = request.json['Cluster_type']
    cluster_name = request.json['cluster_name']
    ws = Workspace(subscription_id=subscription_id,
                   resource_group=resource_group,
                   workspace_name=workspace_name)
    print("Found workspace {} at location {}".format(ws.name, ws.location))
    try:
        if Cluster_type == 'Training':
            aml_compute = AmlCompute(ws, cluster_name)
            print('Found existing AML compute context.')
            aml_compute.delete()
        else:
            aks_target = AksCompute(ws, cluster_name)
            print('Found existing AKS compute context.')
            aks_target.delete()
        print('compute deleted')
        return "compute deleted"
    except Exception as e:
        error_statement = str(e)
        print("Error statement: ", error_statement)
        return error_statement
def WSCreate():
    subscription_id = request.json['subscription_id']
    resource_group = request.json['resource_group']
    workspace_name = request.json['workspace_name']
    location = request.json['location']

    ##Existing \ new workspace
    try:
        ws = Workspace(subscription_id=subscription_id,
                       resource_group=resource_group,
                       workspace_name=workspace_name)

        print("Found workspace {} at location {}".format(ws.name, ws.location))
        print('Found existing Workspace.')
        return "existing Workspace"
    except:
        print('need to create new Workspace.')
        print('Creating new Workspace.')
        ws = Workspace.create(
            name=workspace_name,
            subscription_id=subscription_id,
            resource_group=resource_group,
            #create_resource_group=True,
            location=location)
        return "ok"
Esempio n. 8
0
    def get_aml_ws(self, ws_name):

        creds = self.config.get("external-services",
                                ws_name,
                                suppress_warning=True)
        if not creds:
            errors.config_error(
                "Azure ML workspace '{}' is not defined in [external-services] section of the XT config file"
                .format(ws_name))

        subscription_id = self.config.get_required_service_property(
            creds, "subscription-id", ws_name)
        resource_group = self.config.get_required_service_property(
            creds, "resource-group", ws_name)

        #from azureml.core.authentication import ServicePrincipalAuthentication
        # ws_ex = ws_name + "-ex"
        # svc_pr = None
        # if self.config.name_exists(section, ws_ex):
        #     client_id = self.config.get(section, ws_ex, "client-id")
        #     tenant_id = self.config.get(section, ws_ex, "tenant-id")
        #     client_secret = self.config.get(section, ws_ex, "client-secret")
        #     svc_pr = ServicePrincipalAuthentication(tenant_id=tenant_id, service_principal_id=client_id, service_principal_password=client_secret)

        ws = Workspace(subscription_id, resource_group,
                       ws_name)  # , auth=svc_pr)
        return ws
Esempio n. 9
0
    def _get_workspace():
        from azureml.core.workspace import Workspace
        from azureml.core.authentication import AzureMLTokenAuthentication
        from azureml.exceptions import RunEnvironmentException

        try:
            # Load authentication scope environment variables
            subscription_id = os.environ["AZUREML_ARM_SUBSCRIPTION"]
            resource_group = os.environ["AZUREML_ARM_RESOURCEGROUP"]
            workspace_name = os.environ["AZUREML_ARM_WORKSPACE_NAME"]
            experiment_name = os.environ["AZUREML_ARM_PROJECT_NAME"]
            run_id = os.environ["AZUREML_RUN_ID"]

            # Initialize an AMLToken auth, authorized for the current run
            token, token_expiry_time = AzureMLTokenAuthentication._get_initial_token_and_expiry(
            )
            url = os.environ["AZUREML_SERVICE_ENDPOINT"]
            location = re.compile("//(.*?)\\.").search(url).group(1)
        except KeyError as key_error:
            raise_from(RunEnvironmentException(), key_error)
        else:
            auth = AzureMLTokenAuthentication.create(
                token,
                AzureMLTokenAuthentication._convert_to_datetime(
                    token_expiry_time), url, subscription_id, resource_group,
                workspace_name, experiment_name, run_id)
            # Disabling service check as this code executes in the remote context, without arm token.
            workspace_object = Workspace(subscription_id,
                                         resource_group,
                                         workspace_name,
                                         auth=auth,
                                         _location=location,
                                         _disable_service_check=True)
            return workspace_object
Esempio n. 10
0
def ComputeExist():
    subscription_id = request.json['subscription_id']
    resource_group = request.json['resource_group']
    workspace_name = request.json['workspace_name']
    location = request.json['location']
    Cluster_type = request.json['Cluster_type']
    cluster_name = request.json['cluster_name']
    ws = Workspace(subscription_id=subscription_id,
                   resource_group=resource_group,
                   workspace_name=workspace_name)

    print("Found workspace {} at location {}".format(ws.name, ws.location))
    print('Found existing Workspace.')
    #aml_compute = AmlCompute(ws, cluster_name)
    #cluster_name = 'cpu-cluster'
    try:
        if Cluster_type == 'Training':
            aml_compute = AmlCompute(ws, cluster_name)
        else:
            aks_target = AksCompute(ws, cluster_name)
        print('Found existing AML compute context.')
        return "compute exist"
    except:
        print('need to create new Compute.')
        return "compute not exist"
Esempio n. 11
0
def run():
    print("entered run")
    variables_received = "sub_id: {}, rg: {}, work_name: {}, state: {}, author: {}, model_name: {}" \
                            .format(resolve_sub_id(),
                                    resolve_rg(),
                                    resolve_workspace_name(),
                                    resolve_state(),
                                    resolve_author(),
                                    resolve_model_name())
    print(variables_received)

    az_ws = Workspace(resolve_sub_id(), resolve_rg(), resolve_workspace_name())
    print("initialized workspace")
    #Get & Download model
    model = Model(az_ws,
                  name=resolve_model_name(),
                  tags={
                      "state": resolve_state(),
                      "created_by": resolve_author()
                  })
    print("initialized model")
    model.download(target_dir="./assets/")
    print("downloaded model assets")
    #TODO: remove workaround for ml sdk dropping assets into /assets/dacrook folder when files dropped to consistent location
    for dir_p, _, f_n in walk("./assets"):
        for f in f_n:
            abs_path = os.path.abspath(os.path.join(dir_p, f))
            shutil.move(abs_path, "./assets/" + f)

    #Configure Image
    my_env = CondaDependencies.create(conda_packages=["numpy", "scikit-learn"])
    with open("myenv.yml", "w") as f:
        f.write(my_env.serialize_to_string())
    image_config = ContainerImage.image_configuration(
        execution_script="score.py",
        runtime="python",
        conda_file="myenv.yml",
        dependencies=["assets", "inference_code"],
        tags={
            "state": resolve_state(),
            "created_by": resolve_author()
        })
    print("configured image")
    #TODO: use this once model is dropped to a consistent location
    #    image = Image.create(workspace = az_ws, name=resolve_image_name(), models=[model], image_config = image_config)
    image = Image.create(workspace=az_ws,
                         name=resolve_image_name(),
                         models=[model],
                         image_config=image_config)
    image.wait_for_creation()
    print("created image")
    if (image.creation_state != "Succeeded"):
        raise Exception("Failed to create image.")
    print("image location: {}".format(image.image_location))
    artifacts = {"image_location": image.image_location}
    if (not os.path.exists("/artifacts/")):
        os.makedirs("/artifacts/")
    with open("/artifacts/artifacts.json", "w") as outjson:
        json.dump(artifacts, outjson)
Esempio n. 12
0
def workspace_delete_private_endpoint(subscription_id,
                                      resource_group_name,
                                      workspace_name,
                                      pe_connection_name,
                                      logger=None):
    workspace = Workspace(subscription_id=subscription_id,
                          resource_group=resource_group_name,
                          workspace_name=workspace_name)
    workspace.delete_private_endpoint_connection(pe_connection_name)
def RunAutoML():
        subscription_id = request.json['subscription_id']
        resource_group = request.json['resource_group']
        workspace_name = request.json['workspace_name']
        file_name = request.json['file_name']
        #location = request.json['location']
    
        ws = Workspace(subscription_id=subscription_id,
                                  resource_group=resource_group,
                                  workspace_name=workspace_name)
                                            
        print("Found workspace {} at location {}".format(ws.name, ws.location))
        print('Found existing Workspace.')
            
        dataset_name = file_name

        # Get a dataset by name
        df = Dataset.get_by_name(workspace=ws, name=dataset_name)
        stock_dataset_df = df.to_pandas_dataframe()
        print('file successfully recieved.')
        stock_dataset_df.head()
        #stock_dataset_json = stock_dataset_df.to_json(orient='split')
        #print(stock_dataset_json)
        y_df = stock_dataset_df['ActionTaken'].values
        x_df = stock_dataset_df.drop(['ActionTaken'], axis=1)
        
        ExperimentName = request.json['ExperimentName']       
        tasks = request.json['tasks']
        iterations = request.json['iterations']
        iteration_timeout_minutes = request.json['iteration_timeout_minutes']
        primary_metric = request.json['primary_metric']
        
        #n_cross_validations = request.json['n_cross_validations']
        
        try:
            automl_config = AutoMLConfig(
                task=tasks,
                X=x_df,
                y=y_df,
                iterations=iterations,
                iteration_timeout_minutes=iteration_timeout_minutes,
                primary_metric=primary_metric,
                #n_cross_validations=n_cross_validations,
                preprocess=True,
                )
            experiment = Experiment(ws, ExperimentName)
            run = experiment.submit(config=automl_config, show_output=True)
    
            best_model,fitted_model = run.get_output()

            return 'ok'
        except:

            return 'error'
def train_model(data_file, random_seed):
    """Train the automl model."""
    target = "utilization"
    df = pd.read_parquet(data_file)

    x = df.loc[:, [c for c in df if c != target]].values
    y = df[target].values
    project_folder = "./automl"

    automl_config = AutoMLConfig(
        task="regression",
        iteration_timeout_minutes=5,
        iterations=10,
        primary_metric="spearman_correlation",
        n_cross_validations=5,
        debug_log="automl.log",
        verbosity=logging.INFO,
        X=x,
        y=y,
        path=project_folder,
    )

    load_dotenv(find_dotenv())
    ws = Workspace(
        workspace_name=getenv("AML_WORKSPACE_NAME"),
        subscription_id=getenv("AML_SUBSCRIPTION_ID"),
        resource_group=getenv("AML_RESOURCE_GROUP"),
    )
    experiment = Experiment(ws, getenv("AML_EXPERIMENT_NAME"))

    local_run = experiment.submit(automl_config, show_output=True)

    sub_runs = list(local_run.get_children())

    best_run = None
    best_score = 0

    for sub_run in sub_runs:
        props = sub_run.get_properties()
        if props["run_algorithm"] != "Ensemble":
            if float(props["score"]) > best_score:
                best_run = sub_run

    model_name = "Automl{}".format(str(uuid.uuid4()).replace("-", ""))[:20]
    best_run.register_model(model_name=model_name,
                            model_path="outputs/model.pkl")

    # best_run, fitted_model = local_run.get_output()
    # local_run.register_model(
    #     description="automl meetup best model"
    # )
    print("Model name is {}".format(model_name))
Esempio n. 15
0
def WSDelete():
    subscription_id = request.json['subscription_id']
    resource_group = request.json['resource_group']
    workspace_name = request.json['workspace_name']
    location = request.json['location']
    ws = Workspace(subscription_id=subscription_id,
                   resource_group=resource_group,
                   workspace_name=workspace_name)
    print("Found workspace {} at location {}".format(ws.name, ws.location))
    try:
        ws.delete(delete_dependent_resources=True, no_wait=False)
        print('Workspace deleted')
        return "Workspace deleted"
    except Exception as e:
        error_statement = str(e)
        print("Error statement: ", error_statement)
        return error_statement
Esempio n. 16
0
    def __init__(self,
                 directory=".",
                 experiment=None,
                 auth=None,
                 _disable_service_check=False):
        """
        Creates the project object using the local project path.
        :param directory: Project path.
        :type directory: str
        :param experiment:
        :type experiment: azureml.core.Experiment
        :param auth: An authentication object of a subclass of azureml.core.authentication.AbstractAuthentication
        :type auth: azureml.core.authentication.AbstractAuthentication
        :return:
        """
        from azureml.core.experiment import Experiment
        if not directory:
            directory = "."
        if experiment:
            self._workspace = experiment.workspace
            self.directory = directory
            self._project_path = os.path.abspath(directory)
            self._experiment = experiment
            self._snapshots_client = SnapshotsClient(
                self._workspace.service_context)

        else:
            if not auth:
                auth = InteractiveLoginAuthentication()

            self._project_path = os.path.abspath(directory)

            info_dict = _commands.get_project_info(auth, self._project_path)

            from azureml.core.workspace import Workspace
            self._workspace = Workspace(
                info_dict[_commands.SUBSCRIPTION_KEY],
                info_dict[_commands.RESOURCE_GROUP_KEY],
                info_dict[_commands.WORKSPACE_KEY],
                auth,
                _disable_service_check=_disable_service_check)
            self._experiment = Experiment(self._workspace,
                                          info_dict[_commands.PROJECT_KEY])
            self._snapshots_client = SnapshotsClient(
                self._workspace.service_context)
Esempio n. 17
0
def connect_workspace(service_principal_password):
    with open('config/aml_config.json') as f:
        aml_config = json.load(f)

    svc_pr = ServicePrincipalAuthentication(
        tenant_id = aml_config["tenant_id"],
        service_principal_id = aml_config["service_principal_id"],
        service_principal_password = service_principal_password
    )

    ws = Workspace(
            subscription_id = aml_config["subscription_id"],
            resource_group = aml_config["resource_group"],
            workspace_name = aml_config["workspace_name"],
            auth = svc_pr
        )

    return ws
def WSExist():
    subscription_id = request.json['subscription_id']
    resource_group = request.json['resource_group']
    workspace_name = request.json['workspace_name']
    location = request.json['location']

    ##Existing \ new workspace
    try:
        ws = Workspace(subscription_id=subscription_id,
                       resource_group=resource_group,
                       workspace_name=workspace_name)

        print("Found workspace {} at location {}".format(ws.name, ws.location))
        print('Found existing Workspace.')
        return "Workspace exist"
    except:
        print('need to create new Workspace.')
        return "Workspace not exist. Please create new workspace."
Esempio n. 19
0
    def mount(self, storage_name, storage_key, container):

        ws = Workspace(subscription_id, resource_group,
                       ws_name)  # , auth=svc_pr)

        from azureml.core import Datastore
        datastore = Datastore.register_azure_blob_container(
            workspace=ws,
            datastore_name=container,
            container_name=container,
            account_name=storage_name,
            account_key=storage_key,
            create_if_not_exists=True)

        console.print("datastore=", datastore)

        dataref = datastore.as_mount()
        dir_name = dataref.path_on_compute
        console.print("daatastore MOUNT dir_name=", dir_name)
        return dir_name
Esempio n. 20
0
File: az.py Progetto: JC1005/IOT2
 def __init__(self):
     configFilePath = "credentials/azure.ini"
     self.azureConfig = configparser.ConfigParser()
     try:
         self.azureConfig.read(configFilePath)
         self.ws = Workspace(
             subscription_id=self.azureConfig["default"]["subscription_id"],
             resource_group=self.azureConfig["default"]["resource_group"],
             workspace_name=self.azureConfig["default"]["workspace_name"],
         )
     except:
         print("Azure config file not found at " + configFilePath)
         print("A template will be provided.")
         self.azureConfig["default"] = {
             "subscription_id": "paste your subscription ID here",
             "resource_group": "paste your resource group here",
             "workspace_name": "paste your workspace name here",
             "scoring_uri": "paste your scoring uri here",
         }
         with open(configFilePath, "w") as configfile:
             self.azureConfig.write(configfile)
def DataBlob():
    subscription_id = request.json['subscription_id']
    resource_group = request.json['resource_group']
    workspace_name = request.json['workspace_name']
    file_name = request.json['file_name']
    #location = request.json['location']

    ws = Workspace(subscription_id=subscription_id,
                   resource_group=resource_group,
                   workspace_name=workspace_name)

    print("Found workspace {} at location {}".format(ws.name, ws.location))
    print('Found existing Workspace.')
    ds = ws.get_default_datastore()
    print(ds.datastore_type, ds.account_name, ds.container_name)
    try:
        stock_ds = Dataset.Tabular.from_delimited_files(
            path=ds.path(file_name))
        stock_ds = stock_ds.register(workspace=ws,
                                     name=file_name,
                                     description='stock training data')
        print('Found existing file name')
        return "This file name exist. Please rename or upload new file"
    except:
        print('Uploading new file, please wait')

    stock_dataset = Dataset.Tabular.from_delimited_files(
        path=ds.path(file_name))
    stock_dataset = stock_dataset.register(workspace=ws,
                                           name=file_name,
                                           description='stock training data')
    #file_name = json.loads(file_name)
    print(type(file_name))
    new_data = Dataset.get_by_name(ws, file_name, version='latest')
    print(new_data.name)
    print(type(new_data.name))
    stock_dataset_df = eval(new_data.name).to_pandas_dataframe()
    print('file successfully recieved.')
    stock_dataset_json = stock_dataset_df.to_json(orient='split')
    return stock_dataset_json
Esempio n. 22
0
def BlobData():
    subscription_id = request.json['subscription_id']
    resource_group = request.json['resource_group']
    workspace_name = request.json['workspace_name']
    location = request.json['location']

    ws = Workspace(subscription_id=subscription_id,
                   resource_group=resource_group,
                   workspace_name=workspace_name)

    print("Found workspace {} at location {}".format(ws.name, ws.location))
    print('Found existing Workspace.')

    ds = ws.get_default_datastore()
    print(ds.datastore_type, ds.account_name, ds.container_name)
    block_blob_service = BlockBlobService(account_name=ds.account_name,
                                          account_key=ds.account_key)
    try:
        blobs = []
        my_list = []
        marker = None
        while True:
            batch = block_blob_service.list_blobs(ds.container_name,
                                                  prefix='H')
            blobs.extend(batch)
            if not batch.next_marker:
                break
            marker = batch.next_marker
        for blob in blobs:
            print(blob.name)
            my_list.append(blob.name)
        print(my_list)
        my_json_string = json.dumps(my_list)
        print('dataset is fetched from blob, please check')
        return my_json_string
    except:
        print('dataset is not fetched from blob, please check')
    return "Dataset is not fetched from blob, please check"
def UploadCSV():
    subscription_id = request.json['subscription_id']
    resource_group = request.json['resource_group']
    workspace_name = request.json['workspace_name']
    #location = request.json['location']

    ws = Workspace(subscription_id=subscription_id,
                   resource_group=resource_group,
                   workspace_name=workspace_name)

    print("Found workspace {} at location {}".format(ws.name, ws.location))
    print('Found existing Workspace.')

    ds = ws.get_default_datastore()
    print(ds.datastore_type, ds.account_name, ds.container_name)
    file_path = request.json['file_path']
    print(file_path)
    file_name = request.json['file_name']
    ds.upload(src_dir=file_path,
              target_path=None,
              overwrite=True,
              show_progress=True)
    return "new file uploaded"
Esempio n. 24
0
def get_ws(svc_pr):
    # obtain ws env variables
    try:
        workspace = os.environ.get('WORKSPACE')
        resource_group = os.environ.get('RESOURCE_GROUP')
        subscription_id = os.environ.get('SUBSCRIPTION_ID')
    except Exception as e:
        print('Error fetching ws variables')
        print('Exception: ', e)
        return None

    # connect to workspace
    try:
        ws = Workspace(subscription_id=subscription_id,
                       resource_group=resource_group,
                       workspace_name=workspace,
                       auth=svc_pr)
    except Exception as e:
        print('Error connecting to ws')
        print('Exception: ', e)
        return None
    else:
        return ws
Esempio n. 25
0
def build_image():
    """Build the docker image to hold the model."""
    load_dotenv(find_dotenv())

    chdir("deploy")
    ws = Workspace(
        workspace_name=getenv("AML_WORKSPACE_NAME"),
        subscription_id=getenv("AML_SUBSCRIPTION_ID"),
        resource_group=getenv("AML_RESOURCE_GROUP"),
    )
    model = Model(ws, getenv("AML_MODEL_NAME"))

    image_config = ContainerImage.image_configuration(
        runtime="python",
        execution_script="score.py",
        conda_file="container_conda_env.yml")

    image = Image.create(name=getenv("AML_IMAGE_NAME"),
                         models=[model],
                         image_config=image_config,
                         workspace=ws)

    image.wait_for_creation(show_output=True)
Esempio n. 26
0
def BlobDataDownload():
    subscription_id = request.json['subscription_id']
    resource_group = request.json['resource_group']
    workspace_name = request.json['workspace_name']
    location = request.json['location']
    file_name = request.json['file_name']

    ws = Workspace(subscription_id=subscription_id,
                   resource_group=resource_group,
                   workspace_name=workspace_name)

    print("Found workspace {} at location {}".format(ws.name, ws.location))
    print('Found existing Workspace.')

    ds = ws.get_default_datastore()
    print(ds.datastore_type, ds.account_name, ds.container_name)
    block_blob_service = BlockBlobService(account_name=ds.account_name,
                                          account_key=ds.account_key)
    #file_name = 'RetailChurnTemplate_FeatureEngg_ProcessedData_20.csv'
    try:
        local_path = 'D:\\DCSAIAUTOML\\TempFolder'
        full_path_to_file = os.path.join(local_path, file_name)
        print(full_path_to_file)
        block_blob_service.get_blob_to_path(ds.container_name,
                                            file_name,
                                            full_path_to_file,
                                            start_range=0,
                                            end_range=1100)
        df = pd.read_csv(full_path_to_file)
        df.head(100)
        dfff_json = df.to_json(orient='records')
        #print(dfff_json)
        return dfff_json
    except:
        print('dataset is not saved from blob, please check')
    return "Dataset is not saved from blob, please check"
"""Module for training the utilization prediction model."""
import logging
import uuid

import azureml.dataprep as dprep
from azureml.core.experiment import Experiment
from azureml.core.workspace import Workspace
from azureml.train.automl import AutoMLConfig

target = "utilization"
ws = Workspace(
    workspace_name=dbutils.secrets.get("azureml",
                                       "AML_WORKSPACE_NAME"),  # noqa
    subscription_id=dbutils.secrets.get("azureml",
                                        "AML_SUBSCRIPTION_ID"),  # noqa
    resource_group=dbutils.secrets.get("azureml",
                                       "AML_RESOURCE_GROUP"),  # noqa
)
ds = ws.get_default_datastore()

x = dprep.read_parquet_file(ds.path('model_data_x.parquet'))
y = dprep.read_parquet_file(ds.path('model_data_y.parquet')).to_long(
    dprep.ColumnSelector(term='.*', use_regex=True))

project_folder = './automl'
automl_config = AutoMLConfig(
    task="regression",
    iteration_timeout_minutes=10,
    iterations=10,
    primary_metric="r2_score",
    n_cross_validations=5,
Esempio n. 28
0
def get_workspace_or_default(subscription_id=None,
                             resource_group=None,
                             workspace_name=None,
                             auth=None,
                             project_path=None,
                             logger=None):
    """
    Order is
    1) Get workspace from the specified parameters,
    2) From project context,
    3) Using az configure defaults.
    :param workspace_name:
    :param resource_group:
    :param auth:
    :param project_path:
    :return:
    """

    if not logger:
        logger = module_logger

    if not auth:
        auth = get_cli_specific_auth()
        logger.debug("No auth specified, using authentication {}".format(
            type(auth).__name__))

    if resource_group and workspace_name:
        # Simple case where both are specified. The only way to get workspace with no
        # az configure support for 'mlworkspace' is user explicitly specified parameters
        # Technically resource group can be az configured in
        if not subscription_id:
            subscription_id = get_default_subscription_id(auth)
        return Workspace(subscription_id,
                         resource_group,
                         workspace_name,
                         auth=auth)

    if project_path:
        logger.debug("Project path %s set", project_path)
        try:
            return Workspace.from_config(path=project_path,
                                         auth=auth,
                                         _logger=logger)
        except UserErrorException as ex:
            if project_path != ".":
                logger.warning(
                    "The provided path %s did not contain a config.json, "
                    "falling back to CLI configuration.", project_path)

    if not subscription_id:
        subscription_id = get_default_subscription_id(auth)

    if not workspace_name:
        workspace_name = get_workspace_or_default_name(
            workspace_name,
            throw_error=True,
            subscription_id=subscription_id,
            auth=auth,
            project_path=project_path)
    if not resource_group:
        resource_group = get_resource_group_or_default_name(
            resource_group,
            throw_error=True,
            subscription_id=subscription_id,
            auth=auth,
            project_path=project_path)

    return Workspace(subscription_id,
                     resource_group,
                     workspace_name,
                     auth=auth)
from azureml.core.model import Model as azModel
import os
import shutil
import argparse

try:
    sub_id = os.environ["SUBSCRIPTION_ID"]
    rg = os.environ["RESOURCE_GROUP"]
    ml_ws_name = os.environ["ML_WS_NAME"]
except Exception:
    parser = argparse.ArgumentParser()
    parser.add_argument("--sub_id")
    parser.add_argument("--rg")
    parser.add_argument("--ml_ws_name")
    args = parser.parse_args()
    sub_id = args.sub_id
    rg = args.rg
    ml_ws_name = args.ml_ws_name

az_ws = Workspace(sub_id, rg, ml_ws_name)

prefix = "./src/ai_acc_quality/ml/ml_assets/"

if not os.path.exists(prefix):
    os.makedirs(prefix)
else:
    shutil.rmtree(prefix)
    os.makedirs(prefix, exist_ok=True)

azml_model = azModel(az_ws, name="anomaly_enc_dec")
azml_model.download(target_dir=prefix)
# -*- coding: utf-8 -*-
"""
Created on Mon Nov 18 17:20:22 2019

@author: datacore
"""

from azureml.core.authentication import AzureCliAuthentication
import azure.cli.core
#cli_auth = AzureCliAuthentication()
from azureml.core.workspace import Workspace

ws = Workspace(subscription_id="24075937-2687-4457-bac6-ec16dec514c3",
               resource_group="VstsRG-784AbhijitC-8a31",
               workspace_name="automldc")

from azureml.core.experiment import Experiment
from azureml.core import Run
experiment = Experiment(ws, 'Myexp2_v1_test21')
best_run = Run(experiment=experiment,
               run_id='AutoML_74e9d9dc-f347-4392-b8bb-3edeb4a6afad_8')
fitted_model = Run(experiment=experiment,
                   run_id='AutoML_74e9d9dc-f347-4392-b8bb-3edeb4a6afad_8')
#print(best_run.register_model()
print(fitted_model)

# Get a dataset by name
from azureml.core.dataset import Dataset

file_name = '2018Q4PredictionTrainedSet101.csv'
stock_dataset = Dataset.get_by_name(ws, '2018Q4PredictionTrainedSet101.csv')