Example #1
0
def test_param_override1():
    conf = Config('confs/algos/darts.yaml;confs/datasets/cifar10.yaml')
    assert not conf['nas']['eval']['trainer']['apex']['enabled']
    assert not conf['nas']['eval']['loader']['apex']['enabled']

    conf = Config('confs/algos/darts.yaml;confs/datasets/cifar10.yaml',
                  param_args=["--nas.eval.trainer.apex.enabled", "True"])
    assert conf['nas']['eval']['trainer']['apex']['enabled']
    assert conf['nas']['eval']['loader']['apex']['enabled']
Example #2
0
def test_param_override2():
    conf = Config('confs/algos/darts.yaml;confs/datasets/imagenet.yaml')
    assert conf['nas']['eval']['trainer']['apex']['distributed_enabled']
    assert conf['nas']['eval']['loader']['apex']['distributed_enabled']

    conf = Config('confs/algos/darts.yaml;confs/datasets/imagenet.yaml',
                  param_args=[
                      "--nas.eval.trainer.apex.enabled", "True",
                      "--nas.eval.trainer.apex.distributed_enabled", "False"
                  ])
    assert not conf['nas']['eval']['trainer']['apex']['distributed_enabled']
    assert not conf['nas']['eval']['loader']['apex']['distributed_enabled']
Example #3
0
def main():
    parser = argparse.ArgumentParser(description='Archai data install')
    parser.add_argument('--dataroot',
                        type=str,
                        default='~/dataroot',
                        help='path to dataroot on local drive')
    parser.add_argument(
        '--dataset',
        type=str,
        default='cifar10',
        help=
        'Name of the dataset for which confs/dataset/name.yaml should exist and have name of folder or tar file it resides in'
    )
    args, extra_args = parser.parse_known_args()

    pt_data_dir = os.environ.get('PT_DATA_DIR', '')
    if not pt_data_dir:
        raise RuntimeError(
            'This script needs PT_DATA_DIR environment variable with path to dataroot on cloud drive'
        )
    pt_data_dir = utils.full_path(pt_data_dir)
    print('pt_data_dir:', pt_data_dir)

    conf_data_filepath = f'confs/datasets/{args.dataset}.yaml'
    print('conf_data_filepath:', conf_data_filepath)

    conf = Config(config_filepath=conf_data_filepath)
    for dataset_key in ['dataset', 'dataset_search', 'dataset_eval']:
        if dataset_key in conf:
            conf_data = conf[dataset_key]
            untar_dataset(dataset_key, pt_data_dir, conf_data, args.dataroot)
Example #4
0
def imagenet_test():
    conf = Config('confs/algos/darts.yaml;confs/datasets/imagenet.yaml', )
    conf_loader = conf['nas']['eval']['loader']
    dl_train, *_ = data.get_data(conf_loader)
Example #5
0

def cancel_experiment(ws, experiment_name):
    exp = get_experiment(ws, experiment_name)
    print(
        'Cancelling existing experiment with name: {}'.format(experiment_name))
    for run in tqdm(list(exp.get_runs())):
        run.cancel()


if __name__ == "__main__":
    print("SDK Version:", azureml.core.VERSION)
    set_diagnostics_collection(send_diagnostics=True)

    # Read in config
    conf = Config(config_filepath='~/aml_secrets/aml_secrets_rr2msrlabs.yaml')

    # Config region
    conf_aml = conf['aml_config']
    conf_cluster = conf['cluster_config']
    conf_docker = conf['azure_docker']
    conf_experiment = conf['experiment']
    # endregion

    # Initialize workspace
    # Make sure you have downloaded your workspace config
    ws = Workspace.from_config(path=conf_aml['aml_config_file'])
    print('Workspace name: ' + ws.name,
          'Azure region: ' + ws.location,
          'Subscription id: ' + ws.subscription_id,
          'Resource group: ' + ws.resource_group,
Example #6
0
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.

from archai.common.config import Config
from archai.datasets import data
from torchvision import transforms
from archai.common.ml_utils import channel_norm

if __name__ == '__main__':
    conf = Config(config_filepath='confs/datasets/flower102.yaml')

    conf_dataset = conf['dataset']

    ds_provider = data.create_dataset_provider(conf_dataset)

    train_ds, _ = ds_provider.get_datasets(
        True, False,
        transforms.Compose([
            transforms.Resize(256),
            transforms.CenterCrop(224),
            transforms.ToTensor()
        ]), transforms.Compose([]))

    print(channel_norm(train_ds))

    exit(0)
                        compute_target=compute_target,
                        entry_script='scripts/random/cifar_eval.py',
                        custom_docker_image=conf_docker['image_name'],
                        image_registry_details=image_registry_details,
                        user_managed=user_managed_dependencies,
                        source_directory_data_store=input_ds)

        run = experiment.submit(est)


if __name__ == "__main__":
    print("SDK Version:", azureml.core.VERSION)
    set_diagnostics_collection(send_diagnostics=True)

    # Read in config
    conf = Config(config_filepath='~/aml_secrets/aml_config_dedey.yaml')

    # Config region
    conf_aml = conf['aml_config']
    conf_cluster = conf['cluster_config']
    conf_docker = conf['azure_docker']
    conf_experiment = conf['experiment']
    # endregion

    # Initialize workspace
    # Make sure you have downloaded your workspace config
    ws = Workspace.from_config(path=conf_aml['aml_config_file'])
    print('Workspace name: ' + ws.name,
          'Azure region: ' + ws.location,
          'Subscription id: ' + ws.subscription_id,
          'Resource group: ' + ws.resource_group,
Example #8
0
def get_dataroot() -> str:
    conf = Config(config_filepath='confs/algos/manual.yaml')
    return conf['dataset']['dataroot']
Example #9
0
def test_serialize():
    conf = Config()
    conf['decay'] = 1
    s = yaml.dump(conf)
    conf2 = yaml.load(s, Loader=yaml.Loader)
    assert len(conf2) == 1
Example #10
0
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.

from archai.common.config import Config
from archai.datasets import data
from torchvision import transforms
from archai.common.ml_utils import channel_norm

if __name__ == '__main__':
    conf = Config(config_filepath='confs/datasets/cifar10.yaml')

    conf_data = conf['dataset']

    ds_provider = data.create_dataset_provider(conf_data)

    train_ds, _ = ds_provider.get_datasets(
        True, False,
        transforms.Compose(
            [transforms.Resize((32, 32)),
             transforms.ToTensor()]), transforms.Compose([]))

    print(channel_norm(train_ds))

    exit(0)
Example #11
0
    def __init__(self, config_filepath: str) -> None:

        # read in config
        self.conf = Config(config_filepath)

        # config region
        self.conf_aml = self.conf['aml_config']
        self.conf_storage = self.conf['storage']
        self.conf_cluster = self.conf['cluster_config']
        self.conf_docker = self.conf['azure_docker']
        self.conf_experiment = self.conf['experiment']
        # end region

        # initialize workspace
        self.ws = Workspace.from_config(path=self.conf_aml['aml_config_file'])
        print('Workspace name: ' + self.ws.name,
              'Azure region: ' + self.ws.location,
              'Subscription id: ' + self.ws.subscription_id,
              'Resource group: ' + self.ws.resource_group,
              sep='\n')

        # register blobs
        # TODO: make blob registration more flexible
        self.input_ds = Datastore.register_azure_blob_container(
            workspace=self.ws,
            datastore_name=self.conf_storage['input_datastore_name'],
            container_name=self.conf_storage['input_container_name'],
            account_name=self.conf_storage['input_azure_storage_account_name'],
            account_key=self.conf_storage['input_azure_storage_account_key'],
            create_if_not_exists=False)

        self.output_ds = Datastore.register_azure_blob_container(
            workspace=self.ws,
            datastore_name=self.conf_storage['output_datastore_name'],
            container_name=self.conf_storage['output_container_name'],
            account_name=self.
            conf_storage['output_azure_storage_account_name'],
            account_key=self.conf_storage['output_azure_storage_account_key'],
            create_if_not_exists=False)

        # create compute cluster
        try:
            self.compute_target = ComputeTarget(
                workspace=self.ws, name=self.conf_cluster['cluster_name'])
            print(self.compute_target.get_status().serialize())
        except Exception as e:
            print('Encountered error trying to get the compute target')
            print(f'Exception was {e}')
            sys.exit(1)

        self.project_folder = self.conf_experiment['project_folder']

        # setup custom docker usage
        self.image_registry_details = ContainerRegistry()
        self.image_registry_details.address = self.conf_docker[
            'image_registry_address']
        self.image_registry_details.username = self.conf_docker[
            'image_registry_username']
        self.image_registry_details.password = self.conf_docker[
            'image_registry_password']

        self.user_managed_dependencies = True