def add_tasks(batch_service_client, job_id, filedirs, output_container_name,
              output_container_sas_token, input_container_name):
    """
    Adds a task for each input file in the collection to the specified job.

    :param batch_service_client: A Batch service client.
    :type batch_service_client: `azure.batch.BatchServiceClient`
    :param str job_id: The ID of the job to which to add the tasks.
    :param list input_files: A collection of input files. One task will be
     created for each input file.
    :param output_container_name: The ID of an Azure Blob storage container to
    which the tasks will upload their results.
    :param output_container_sas_token: A SAS token granting write access to
    the specified Azure Blob storage container.
    """

    print('Adding {} tasks to job [{}]...'.format(len(filedirs), job_id))

    tasks = list()

    for idx, filedir in enumerate(filedirs):

        command = [
            'python $AZ_BATCH_NODE_SHARED_DIR/{} '
            '--filedir {} --storageaccount {} '
            '--storagecontainer {} --sastoken "{}" '
            '--inputcontainer {} --accountkey {}'.format(
                _TUTORIAL_TASK_FILE,
                '{:0>4}'.format(
                    filedir),  ## format as nnnn/ from 0000/ to 9999/
                _STORAGE_ACCOUNT_NAME,
                output_container_name,
                output_container_sas_token,
                input_container_name,
                _STORAGE_ACCOUNT_KEY)
        ]

        #print('adding task {} for filedir {}'.format(idx,'{:0>4}'.format(filedir)))

        tasks.append(
            batch.models.TaskAddParameter(
                'GISTtask{:0>2}'.format(idx),
                common_helpers.wrap_commands_in_shell('linux', command)))

    batch_service_client.task.add_collection(job_id, tasks)
def create_job(batch_service_client, job_id, pool_id, pattern):
    """
    Creates a job with the specified ID, associated with the specified pool.

    :param batch_service_client: A Batch service client.
    :type batch_service_client: `azure.batch.BatchServiceClient`
    :param str job_id: The ID for the job.
    :param str pool_id: The ID for the pool.
    """
    print('Creating job [{}] for startqkey {}...'.format(job_id, pattern))

    prep_commands=[
        #'sudo docker run -it mlcrpacr31b520505a57.azurecr.io/ghanaimg:azbatch /bin/bash',
        'mkdir -p $AZ_BATCH_NODE_SHARED_DIR/tiles',
        'azcopy --source https://{0}.blob.core.windows.net/{1}/ ' \
        '--destination $AZ_BATCH_NODE_SHARED_DIR/tiles --source-key {2} ' \
        '--include "{3}" --recursive --exclude-older --exclude-newer --resume $AZ_BATCH_NODE_SHARED_DIR/journal'.format(
            _STORAGE_ACCOUNT_NAME,
            _STORAGE_INPUT_CONTAINER,
            _STORAGE_ACCOUNT_KEY,
            pattern)
        ] #modificare qui e fare il download delle immagini
    task_containersettings = batchmodels.TaskContainerSettings(
        image_name=_ACR_IMG_NAME)
    job_prep = batch.models.JobPreparationTask(
        command_line=common_helpers.wrap_commands_in_shell(
            'linux', prep_commands),
        container_settings=task_containersettings)
    job = batch.models.JobAddParameter(
        job_id,
        batch.models.PoolInformation(pool_id=pool_id),
        job_preparation_task=job_prep)

    try:
        batch_service_client.job.add(job)
    except batchmodels.batch_error.BatchErrorException as err:
        print_batch_exception(err)
        raise
def add_tasks(batch_service_client, job_id, startqkey, output_container_name,
              output_container_sas_token):
    """
    Adds a task for each input file in the collection to the specified job.

    :param batch_service_client: A Batch service client.
    :type batch_service_client: `azure.batch.BatchServiceClient`
    :param str job_id: The ID of the job to which to add the tasks.
    :param list input_files: A collection of input files. One task will be
     created for each input file.
    :param output_container_name: The ID of an Azure Blob storage container to
    which the tasks will upload their results.
    :param output_container_sas_token: A SAS token granting write access to
    the specified Azure Blob storage container.

    container_settings
    TaskContainerSettings 
    The settings for the container under which the task runs. If the pool that 
    will run this task has containerConfiguration set, this must be set as well. 
    If the pool that will run this task doesn't have containerConfiguration set, 
    this must not be set. When this is specified, all directories recursively 
    below the AZ_BATCH_NODE_ROOT_DIR (the root of Azure Batch directories on the node) 
    are mapped into the container, all task environment variables are mapped 
    into the container, and the task command line is executed in the container.

    """

    print('Adding 1 tasks to job [{}]...'.format(job_id))

    task_containersettings = batchmodels.TaskContainerSettings(
        image_name=_ACR_IMG_NAME)

    tasks = list()

    for idx in range(tasks_per_job):
        taskqkey = startqkey + int2base(idx, 4, 1)

        command = [
                    'python $AZ_BATCH_NODE_SHARED_DIR/{} ' \
                    '--filedir $AZ_BATCH_NODE_SHARED_DIR/tiles ' \
                    '--model $AZ_BATCH_NODE_SHARED_DIR/ghanamines.h5 ' \
                    '--storageaccount {} ' \
                    '--storagecontainer {} --sastoken "{}" ' \
                    '--startqkey {}'.format(
                        _TUTORIAL_TASK_FILE,
                        _STORAGE_ACCOUNT_NAME,
                        output_container_name,
                        output_container_sas_token,
                        taskqkey)]

        print('adding task {}'.format(idx))

        tasks.append(
            batch.models.TaskAddParameter(
                'TileScoretask{:0>2}'.format(idx),
                common_helpers.wrap_commands_in_shell('linux', command),
                container_settings=task_containersettings,
                user_identity=batchmodels.UserIdentity(  #user_name='root')
                    auto_user=batchmodels.AutoUserSpecification(
                        scope=batchmodels.AutoUserScope.task,
                        elevation_level=batchmodels.ElevationLevel.admin))))

    batch_service_client.task.add_collection(job_id, tasks)
def create_pool_with_containers(batch_service_client, pool_id, resource_files,
                                publisher, offer, sku):
    """
    Creates a pool of compute nodes with the specified OS settings.

    :param batch_service_client: A Batch service client.
    :type batch_service_client: `azure.batch.BatchServiceClient`
    :param str pool_id: An ID for the new pool.
    :param list resource_files: A collection of resource files for the pool's
    start task.
    :param str publisher: Marketplace image publisher
    :param str offer: Marketplace image offer
    :param str sku: Marketplace image sku
    """
    print('Creating pool [{}]...'.format(pool_id))

    # Create a new pool of Linux compute nodes using an Azure Virtual Machines
    # Marketplace image. For more information about creating pools of Linux
    # nodes, see:
    # https://azure.microsoft.com/documentation/articles/batch-linux-nodes/

    # Specify the commands for the pool's start task. The start task is run
    # on each node as it joins the pool, and when it's rebooted or re-imaged.
    # We use the start task to prep the node for running our task script.
    task_commands = [
        # Copy the python_tutorial_task.py script to the "shared" directory
        # that all tasks that run on the node have access to. Note that
        # we are using the -p flag with cp to preserve the file uid/gid,
        # otherwise since this start task is run as an admin, it would not
        # be accessible by tasks run as a non-admin user.
        #'wget https://packages.microsoft.com/config/ubuntu/16.04/packages-microsoft-prod.deb',
        #'sudo dpkg -i packages-microsoft-prod.deb',
        #'wget -O azcopy.tar.gz https://aka.ms/downloadazcopylinux64',
        #'tar -xf azcopy.tar.gz',
        #'sudo ./install.sh',
        #'wget https://repo.anaconda.com/archive/Anaconda3-5.1.0-Linux-x86_64.sh -O ~/conda.sh',
        #'bash ~/conda.sh -b -p $AZ_BATCH_NODE_SHARED_DIR/conda',
        #'export PATH="$AZ_BATCH_NODE_SHARED_DIR/conda/bin:$PATH"',
        #'sudo apt-get -y update',
        #'sudo apt-get -y install azcopy',
        'cp -p {} $AZ_BATCH_NODE_SHARED_DIR'.format(_TUTORIAL_TASK_FILE),
        #'cp -p {} $AZ_BATCH_NODE_SHARED_DIR'.format(_ENV_YML_FILE),
        'azcopy --source https://{0}.blob.core.windows.net/model/ghanamines.h5 --destination $AZ_BATCH_NODE_SHARED_DIR/ghanamines.h5 --source-key {1}'
        .format(_STORAGE_ACCOUNT_NAME, _STORAGE_ACCOUNT_KEY),
        #'sudo $AZ_BATCH_NODE_SHARED_DIR/conda/bin/conda env create -f {}'.format(_ENV_YML_FILE)
    ]

    # Get the node agent SKU and image reference for the virtual machine
    # configuration.
    # For more information about the virtual machine configuration, see:
    # https://azure.microsoft.com/documentation/articles/batch-linux-nodes/
    sku_to_use, image_ref_to_use = \
        common_helpers.select_latest_verified_vm_image_with_node_agent_sku(
            batch_service_client, publisher, offer, sku)

    user = batchmodels.AutoUserSpecification(
        scope=batchmodels.AutoUserScope.pool,
        elevation_level=batchmodels.ElevationLevel.admin)

    container_reg = batchmodels.ContainerRegistry(user_name=CLIENT_ID,
                                                  password=SECRET,
                                                  registry_server=_ACR_URL)
    container_cfg = batchmodels.ContainerConfiguration(
        container_image_names=[_ACR_IMG_NAME],
        container_registries=[container_reg])
    my_img_ref = batchmodels.ImageReference(
        virtual_machine_image_id=_CUSTOM_VM_IMG_ID)

    vm_cfg = batchmodels.VirtualMachineConfiguration(
        image_reference=my_img_ref,
        node_agent_sku_id=
        sku_to_use,  #'batch.node.ubuntu 16.04', ##verificare che l'immagine ghanaimg abbia gpu
        container_configuration=container_cfg)
    task_containersettings = batchmodels.TaskContainerSettings(
        image_name=_ACR_IMG_NAME)

    new_pool = batchmodels.PoolAddParameter(
        id=pool_id,
        virtual_machine_configuration=vm_cfg,
        vm_size=_POOL_VM_SIZE,
        target_dedicated_nodes=_POOL_NODE_COUNT,
        target_low_priority_nodes=1,
        start_task=batch.models.StartTask(
            command_line=common_helpers.wrap_commands_in_shell(
                'linux', task_commands),
            user_identity=batchmodels.UserIdentity(auto_user=user),
            wait_for_success=True,
            resource_files=resource_files,
            container_settings=task_containersettings))

    try:
        batch_service_client.pool.add(new_pool)
    except batchmodels.batch_error.BatchErrorException as err:
        print_batch_exception(err)
        raise
def create_pool_with_custom_image(batch_service_client, pool_id,
                                  resource_files, publisher, offer, sku):
    """
    Creates a pool of compute nodes with the specified OS settings.

    :param batch_service_client: A Batch service client.
    :type batch_service_client: `azure.batch.BatchServiceClient`
    :param str pool_id: An ID for the new pool.
    :param list resource_files: A collection of resource files for the pool's
    start task.
    :param str publisher: Marketplace image publisher
    :param str offer: Marketplace image offer
    :param str sku: Marketplace image sku
    """
    print('Creating pool [{}]...'.format(pool_id))

    # Create a new pool of Linux compute nodes using an Azure Virtual Machines
    # Marketplace image. For more information about creating pools of Linux
    # nodes, see:
    # https://azure.microsoft.com/documentation/articles/batch-linux-nodes/

    # Specify the commands for the pool's start task. The start task is run
    # on each node as it joins the pool, and when it's rebooted or re-imaged.
    # We use the start task to prep the node for running our task script.
    task_commands = [
        # Copy the python_tutorial_task.py script to the "shared" directory
        # that all tasks that run on the node have access to. Note that
        # we are using the -p flag with cp to preserve the file uid/gid,
        # otherwise since this start task is run as an admin, it would not
        # be accessible by tasks run as a non-admin user.
        'cp -p {} $AZ_BATCH_NODE_SHARED_DIR'.format(_TUTORIAL_TASK_FILE)
    ]

    # Get the node agent SKU and image reference for the virtual machine
    # configuration.
    # For more information about the virtual machine configuration, see:
    # https://azure.microsoft.com/documentation/articles/batch-linux-nodes/
    sku_to_use, image_ref_to_use = \
        common_helpers.select_latest_verified_vm_image_with_node_agent_sku(
            batch_service_client, publisher, offer, sku)

    user = batchmodels.AutoUserSpecification(
        scope=batchmodels.AutoUserScope.pool,
        elevation_level=batchmodels.ElevationLevel.admin)

    my_img_ref = batchmodels.ImageReference(
        virtual_machine_image_id=VM_IMAGE_ID)
    new_pool = batchmodels.PoolAddParameter(
        id=pool_id,
        virtual_machine_configuration=batchmodels.VirtualMachineConfiguration(
            image_reference=my_img_ref, node_agent_sku_id=sku_to_use),
        vm_size=_POOL_VM_SIZE,
        target_dedicated_nodes=_POOL_NODE_COUNT,
        start_task=batch.models.StartTask(
            command_line=common_helpers.wrap_commands_in_shell(
                'linux', task_commands),
            user_identity=batchmodels.UserIdentity(auto_user=user),
            wait_for_success=True,
            resource_files=resource_files))

    try:
        batch_service_client.pool.add(new_pool)
    except batchmodels.batch_error.BatchErrorException as err:
        print_batch_exception(err)
        raise
def create_pool(batch_service_client, pool_id, resource_files, publisher,
                offer, sku):
    """
    Creates a pool of compute nodes with the specified OS settings.

    :param batch_service_client: A Batch service client.
    :type batch_service_client: `azure.batch.BatchServiceClient`
    :param str pool_id: An ID for the new pool.
    :param list resource_files: A collection of resource files for the pool's
    start task.
    :param str publisher: Marketplace image publisher
    :param str offer: Marketplace image offer
    :param str sku: Marketplace image sku
    """
    print('Creating pool [{}]...'.format(pool_id))

    # Create a new pool of Linux compute nodes using an Azure Virtual Machines
    # Marketplace image. For more information about creating pools of Linux
    # nodes, see:
    # https://azure.microsoft.com/documentation/articles/batch-linux-nodes/

    # Specify the commands for the pool's start task. The start task is run
    # on each node as it joins the pool, and when it's rebooted or re-imaged.
    # We use the start task to prep the node for running our task script.
    task_commands = [
        # Copy the python_tutorial_task.py script to the "shared" directory
        # that all tasks that run on the node have access to. Note that
        # we are using the -p flag with cp to preserve the file uid/gid,
        # otherwise since this start task is run as an admin, it would not
        # be accessible by tasks run as a non-admin user.
        'cp -p {} $AZ_BATCH_NODE_SHARED_DIR'.format(_TUTORIAL_TASK_FILE),
        'sudo apt-get -y update',
        'sudo apt -y install gcc',
        'sudo apt-get -y install fftw3 fftw3-dev python2.7-dev',
        'curl -fSsL https://bootstrap.pypa.io/get-pip.py | python',
        'pip install Cython',
        'pip install numpy',
        'pip install azure-storage==0.36.0',
        'pip install pillow',
        'curl -O https://pypi.python.org/packages/f7/4a/2eef58a73c48aec6aca09254ef0f39148fd39b8dc7ec96d6b39d513b03eb/pyleargist-2.0.5.tar.gz',
        'tar -xf pyleargist-2.0.5.tar.gz',
        'cd pyleargist-2.0.5/src/',
        'curl -O https://bitbucket.org/ogrisel/pyleargist/raw/8024021a0d229ed1e1459a5d6d1700da4aee28b1/src/leargist.pxd',
        'cd ..',
        'python setup.py build_ext',
        'python setup.py build',
        'sudo python setup.py install'
    ]

    # Get the node agent SKU and image reference for the virtual machine
    # configuration.
    # For more information about the virtual machine configuration, see:
    # https://azure.microsoft.com/documentation/articles/batch-linux-nodes/
    sku_to_use, image_ref_to_use = \
        common_helpers.select_latest_verified_vm_image_with_node_agent_sku(
            batch_service_client, publisher, offer, sku)
    user = batchmodels.AutoUserSpecification(
        scope=batchmodels.AutoUserScope.pool,
        elevation_level=batchmodels.ElevationLevel.admin)
    new_pool = batch.models.PoolAddParameter(
        id=pool_id,
        virtual_machine_configuration=batchmodels.VirtualMachineConfiguration(
            image_reference=image_ref_to_use, node_agent_sku_id=sku_to_use),
        vm_size=_POOL_VM_SIZE,
        target_dedicated_nodes=_POOL_NODE_COUNT,
        start_task=batch.models.StartTask(
            command_line=common_helpers.wrap_commands_in_shell(
                'linux', task_commands),
            user_identity=batchmodels.UserIdentity(auto_user=user),
            wait_for_success=True,
            resource_files=resource_files),
    )

    try:
        batch_service_client.pool.add(new_pool)
    except batchmodels.batch_error.BatchErrorException as err:
        print_batch_exception(err)
        raise