예제 #1
0
    def add_tasks(self, part_file, JOB_ID):
        """
        Adds a task for each input file in the collection to the specified job.
        """
        # print("Adding {} tasks to job [{}]...".format(count, job_id))
        tasks = list()
        for i in range(self.K):
            output_file = self.build_output_file(i)
            command_line = "/bin/bash -c 'echo $AZ_BATCH_TASK_WORKING_DIR && daemon status && scgrad {} {} {} {}'".format(
                _GRAD_COMMON_FILE, _GRAD_PART_FILE, _CONTAINER_OUTPUT_FILE, i)

            if self.config.REGISTRY_USERNAME:
                registry = models.ContainerRegistry(
                    user_name=self.config.REGISTRY_USERNAME,
                    password=self.config.REGISTRY_PASSWORD,
                    registry_server=self.config.REGISTRY_SERVER,
                )
                task_container_settings = models.TaskContainerSettings(
                    image_name=self.config.DOCKER_IMAGE, registry=registry)
            else:
                task_container_settings = models.TaskContainerSettings(
                    image_name=self.config.DOCKER_IMAGE)

            tasks.append(
                models.TaskAddParameter(
                    id="grad_part_{}".format(i),
                    command_line=command_line,
                    resource_files=[self.common_file, part_file],
                    output_files=[output_file],
                    container_settings=task_container_settings,
                ))

        self.batch_client.task.add_collection(JOB_ID, [tasks[0]])
예제 #2
0
def add_tasks(batch_service_client, job_id, task_id):
    """
    Adds a task for each input file in the collection to the specified job.

    :param batch_service_client: A Batch service client.
    :type batch_service_client: `azure.batch.BatchServiceClient`
    :param str job_id: The ID of the job to which to add the tasks.
     created for each input file.
    :number_to_test: number you want to know if it's prime or not.
    """

    print('Adding tasks to job [{}]...'.format(job_id))

    # This is the user who run the command inside the container.
    # An unprivileged one
    user = batchmodels.AutoUserSpecification(
        scope=batchmodels.AutoUserScope.task,
        elevation_level=batchmodels.ElevationLevel.non_admin)

    # This is the docker image we want to run
    task_container_settings = batchmodels.TaskContainerSettings(
        image_name=config._DOCKER_IMAGE,
        container_run_options='--rm -e PYTHONUNBUFFERED=1')

    # The container needs this argument to be executed
    # remember we run the container like: docker ... imagename python /is_prime.py number
    task = batchmodels.TaskAddParameter(
        id=task_id,
        command_line=
        "python main.py quantity --job-id=2 --keyword='covid19' --question='Is covid19 man made' --num-papers=10",
        container_settings=task_container_settings,
        user_identity=batchmodels.UserIdentity(auto_user=user))

    batch_service_client.task.add(job_id, task)
예제 #3
0
def add_tasks(batch_service_client, job_id, task_id, number_to_test):
    """
    Adds a task for each input file in the collection to the specified job.

    :param batch_service_client: A Batch service client.
    :type batch_service_client: `azure.batch.BatchServiceClient`
    :param str job_id: The ID of the job to which to add the tasks.
     created for each input file.
    :number_to_test: number you want to know if it's prime or not.
    """

    print('Adding tasks to job [{}]...'.format(job_id))

    # This is the user who run the command inside the container.
    # An unprivileged one
    user = batchmodels.AutoUserSpecification(
        scope=batchmodels.AutoUserScope.task,
        elevation_level=batchmodels.ElevationLevel.non_admin)

    # This is the docker image we want to run
    task_container_settings = batchmodels.TaskContainerSettings(
        image_name=config._DOCKER_IMAGE, container_run_options='--rm')

    # The container needs this argument to be executed
    task = batchmodels.TaskAddParameter(
        id=task_id,
        command_line='python /is_prime.py ' + str(number_to_test),
        container_settings=task_container_settings,
        user_identity=batchmodels.UserIdentity(auto_user=user))

    batch_service_client.task.add(job_id, task)
예제 #4
0
    def add_task(
        self,
        resource_files: List[models.ResourceFile],
        output_files: List[models.OutputFile],
        command_line=None,
    ):
        """
        Adds a task for each input file in the collection to the specified job.

        :param list resource_files: A list of ResouceFile descriptions for the task
        :param list output_files: A list of OutputFile descriptions for the task
        :param str command_line: The command used to for the task.  Optional;
            if missing, defaults to the command_line parameter provided when
            instantiating this object
        """
        self.tasks.append(
            models.TaskAddParameter(
                id="Task_{}".format(len(self.tasks)),
                command_line=self.config.COMMAND_LINE
                if command_line is None else command_line,
                resource_files=resource_files,
                output_files=output_files,
                container_settings=models.TaskContainerSettings(
                    image_name=self.config.DOCKER_IMAGE),
            ))
예제 #5
0
def add_tasks(
    config,
    _blob_client,
    batch_service_client,
    container_sas_url,
    job_id,
    _input_file,
    count,
):
    """
    Adds a task for each input file in the collection to the specified job.

    :param batch_service_client: A Batch service client.
    :type batch_service_client: `azure.batch.BatchServiceClient`
    :param str job_id: The ID of the job to which to add the tasks.
    :param list input_files: The input files
    :param output_container_sas_token: A SAS token granting write access to
    the specified Azure Blob storage container.
    """

    print("Adding {} tasks to job [{}]...".format(count, job_id))

    tasks = list()

    for fold_number in range(count):
        output_file = build_output_file(container_sas_url, fold_number)
        # command_line = '/bin/bash -c \'echo "Hello World" && echo "hello: world" > output.yaml\''
        command_line = "/bin/bash -c 'stt {} {} {}'".format(
            _CONTAINER_INPUT_FILE, _CONTAINER_OUTPUT_FILE, fold_number
        )

        task_container_settings = models.TaskContainerSettings(
            image_name=config.DOCKER_CONTAINER
        )

        tasks.append(
            batch.models.TaskAddParameter(
                id="Task_{}".format(fold_number),
                command_line=command_line,
                resource_files=[_input_file],
                output_files=[output_file],
                container_settings=task_container_settings,
            )
        )

    batch_service_client.task.add_collection(job_id, tasks)
def create_job(batch_service_client, job_id, pool_id, pattern):
    """
    Creates a job with the specified ID, associated with the specified pool.

    :param batch_service_client: A Batch service client.
    :type batch_service_client: `azure.batch.BatchServiceClient`
    :param str job_id: The ID for the job.
    :param str pool_id: The ID for the pool.
    """
    print('Creating job [{}] for startqkey {}...'.format(job_id, pattern))

    prep_commands=[
        #'sudo docker run -it mlcrpacr31b520505a57.azurecr.io/ghanaimg:azbatch /bin/bash',
        'mkdir -p $AZ_BATCH_NODE_SHARED_DIR/tiles',
        'azcopy --source https://{0}.blob.core.windows.net/{1}/ ' \
        '--destination $AZ_BATCH_NODE_SHARED_DIR/tiles --source-key {2} ' \
        '--include "{3}" --recursive --exclude-older --exclude-newer --resume $AZ_BATCH_NODE_SHARED_DIR/journal'.format(
            _STORAGE_ACCOUNT_NAME,
            _STORAGE_INPUT_CONTAINER,
            _STORAGE_ACCOUNT_KEY,
            pattern)
        ] #modificare qui e fare il download delle immagini
    task_containersettings = batchmodels.TaskContainerSettings(
        image_name=_ACR_IMG_NAME)
    job_prep = batch.models.JobPreparationTask(
        command_line=common_helpers.wrap_commands_in_shell(
            'linux', prep_commands),
        container_settings=task_containersettings)
    job = batch.models.JobAddParameter(
        job_id,
        batch.models.PoolInformation(pool_id=pool_id),
        job_preparation_task=job_prep)

    try:
        batch_service_client.job.add(job)
    except batchmodels.batch_error.BatchErrorException as err:
        print_batch_exception(err)
        raise
예제 #7
0
def add_tasks(batch_service_client, pool_id, task_id, docker_image,
              storage_account, storage_key, container_name, file_name,
              output_container):
    job_id = "batchjob"
    try:
        job = batchmodels.JobAddParameter(
            id=job_id, pool_info=batchmodels.PoolInformation(pool_id=pool_id))
        batch_service_client.job.add(job)
        logging.info('Adding job {} to pool...'.format(job_id))
    except Exception:
        logging.info(
            'Job ID: {} already exists and associated with pool...'.format(
                job_id))
        pass

    logging.info('Adding tasks to job [{}]...'.format(job_id))

    # This is the user who run the command inside the container.
    # An unprivileged one
    user = batchmodels.AutoUserSpecification(
        scope=batchmodels.AutoUserScope.task,
        elevation_level=batchmodels.ElevationLevel.admin)

    # This is the docker image we want to run
    task_container_settings = batchmodels.TaskContainerSettings(
        image_name=docker_image,
        container_run_options='--rm -v /scratch:/scratch')

    # The container needs this argument to be executed
    task = batchmodels.TaskAddParameter(
        id=task_id,
        command_line='/opt/azureblobworker.sh %s %s %s %s %s %s' %
        (storage_account, storage_key, task_id, container_name, file_name,
         output_container),
        container_settings=task_container_settings,
        user_identity=batchmodels.UserIdentity(auto_user=user))
    batch_service_client.task.add(job_id, task)
    def test_batch_tasks(self, batch_job, **kwargs):
        client = self.create_sharedkey_client(**kwargs)

        # Test Create Task with Auto Complete
        exit_conditions = models.ExitConditions(
            exit_codes=[models.ExitCodeMapping(1, models.ExitOptions(models.JobAction.terminate))],
            exit_code_ranges=[models.ExitCodeRangeMapping(2, 4, models.ExitOptions(models.JobAction.disable))],
            default=models.ExitOptions(models.JobAction.none))
        task_param = models.TaskAddParameter(
            id=self.get_resource_name('batch_task1_'),
            command_line='cmd /c "echo hello world"',
            exit_conditions=exit_conditions
        )
        try:
            client.task.add(batch_job.id, task_param)
        except models.BatchErrorException as e:
            message = "{}: ".format(e.error.code, e.error.message)
            for v in e.error.values:
                message += "\n{}: {}".format(v.key, v.value)
            raise Exception(message)
        task = client.task.get(batch_job.id, task_param.id)
        self.assertIsInstance(task, models.CloudTask)
        self.assertEqual(task.exit_conditions.default.job_action, models.JobAction.none)
        self.assertEqual(task.exit_conditions.exit_codes[0].code, 1)
        self.assertEqual(task.exit_conditions.exit_codes[0].exit_options.job_action, models.JobAction.terminate)

        # Test Create Task with Output Files
        container_url = "https://test.blob.core.windows.net:443/test-container"
        outputs = [
            models.OutputFile(
                file_pattern="../stdout.txt",
                destination=models.OutputFileDestination(
                    container=models.OutputFileBlobContainerDestination(
                        container_url=container_url, path="taskLogs/output.txt")),
                upload_options=models.OutputFileUploadOptions(
                    upload_condition=models.OutputFileUploadCondition.task_completion)),
            models.OutputFile(
                file_pattern="../stderr.txt",
                destination=models.OutputFileDestination(
                    container=models.OutputFileBlobContainerDestination(
                        container_url=container_url, path="taskLogs/error.txt")),
                upload_options=models.OutputFileUploadOptions(
                    upload_condition=models.OutputFileUploadCondition.task_failure)),
        ]
        task_param = models.TaskAddParameter(
            id=self.get_resource_name('batch_task2_'),
            command_line='cmd /c "echo hello world"',
            output_files=outputs
        )
        client.task.add(batch_job.id, task_param)
        task = client.task.get(batch_job.id, task_param.id)
        self.assertIsInstance(task, models.CloudTask)
        self.assertEqual(len(task.output_files), 2)

        # Test Create Task with Auto User
        auto_user = models.AutoUserSpecification(
            scope=models.AutoUserScope.task,
            elevation_level=models.ElevationLevel.admin)
        task_param = models.TaskAddParameter(
            id=self.get_resource_name('batch_task3_'),
            command_line='cmd /c "echo hello world"',
            user_identity=models.UserIdentity(auto_user=auto_user)
        )
        client.task.add(batch_job.id, task_param)
        task = client.task.get(batch_job.id, task_param.id)
        self.assertIsInstance(task, models.CloudTask)
        self.assertEqual(task.user_identity.auto_user.scope, models.AutoUserScope.task)
        self.assertEqual(task.user_identity.auto_user.elevation_level, models.ElevationLevel.admin)

        # Test Create Task with Token Settings
        task_param = models.TaskAddParameter(
            id=self.get_resource_name('batch_task4_'),
            command_line='cmd /c "echo hello world"',
            authentication_token_settings=models.AuthenticationTokenSettings(
                access=[models.AccessScope.job])
        )
        client.task.add(batch_job.id, task_param)
        task = client.task.get(batch_job.id, task_param.id)
        self.assertIsInstance(task, models.CloudTask)
        self.assertEqual(task.authentication_token_settings.access[0], models.AccessScope.job)

        # Test Create Task with Container Settings
        task_param = models.TaskAddParameter(
            id=self.get_resource_name('batch_task5_'),
            command_line='cmd /c "echo hello world"',
            container_settings=models.TaskContainerSettings(
                image_name='windows_container:latest',
                registry=models.ContainerRegistry('username', 'password'))
        )
        client.task.add(batch_job.id, task_param)
        task = client.task.get(batch_job.id, task_param.id)
        self.assertIsInstance(task, models.CloudTask)
        self.assertEqual(task.container_settings.image_name, 'windows_container:latest')
        self.assertEqual(task.container_settings.registry.user_name, 'username')

        # Test Create Task with Run-As-User
        task_param = models.TaskAddParameter(
            id=self.get_resource_name('batch_task6_'),
            command_line='cmd /c "echo hello world"',
            user_identity=models.UserIdentity(user_name='task-user')
        )
        client.task.add(batch_job.id, task_param)
        task = client.task.get(batch_job.id, task_param.id)
        self.assertIsInstance(task, models.CloudTask)
        self.assertEqual(task.user_identity.user_name, 'task-user')

        # Test Add Task Collection
        tasks = []
        for i in range(7, 10):
            tasks.append(models.TaskAddParameter(
                self.get_resource_name('batch_task{}_'.format(i)), 'cmd /c "echo hello world"'))
        result = client.task.add_collection(batch_job.id, tasks)
        self.assertIsInstance(result, models.TaskAddCollectionResult)
        self.assertEqual(len(result.value), 3)
        self.assertEqual(result.value[0].status, models.TaskAddStatus.success)

        # Test List Tasks
        tasks = list(client.task.list(batch_job.id))
        self.assertEqual(len(tasks), 9)

        # Test Count Tasks
        task_counts = client.job.get_task_counts(batch_job.id)
        self.assertIsInstance(task_counts, models.TaskCounts)
        self.assertEqual(task_counts.completed, 0)
        self.assertEqual(task_counts.succeeded, 0)
        self.assertEqual(task_counts.validation_status, models.TaskCountValidationStatus.validated)

        # Test Terminate Task
        response = client.task.terminate(batch_job.id, task_param.id)
        self.assertIsNone(response)
        task = client.task.get(batch_job.id, task_param.id)
        self.assertEqual(task.state, models.TaskState.completed)

        # Test Reactivate Task
        response = client.task.reactivate(batch_job.id, task_param.id)
        self.assertIsNone(response)
        task = client.task.get(batch_job.id, task_param.id)
        self.assertEqual(task.state, models.TaskState.active)

        # Test Update Task
        response = client.task.update(
            batch_job.id, task_param.id,
            constraints=models.TaskConstraints(max_task_retry_count=1))
        self.assertIsNone(response)

        # Test Get Subtasks 
        # TODO: Test with actual subtasks
        subtasks = client.task.list_subtasks(batch_job.id, task_param.id)
        self.assertIsInstance(subtasks, models.CloudTaskListSubtasksResult)
        self.assertEqual(subtasks.value, [])

        # Test Delete Task
        response = client.task.delete(batch_job.id, task_param.id)
        self.assertIsNone(response)
def add_tasks(batch_service_client, job_id, startqkey, output_container_name,
              output_container_sas_token):
    """
    Adds a task for each input file in the collection to the specified job.

    :param batch_service_client: A Batch service client.
    :type batch_service_client: `azure.batch.BatchServiceClient`
    :param str job_id: The ID of the job to which to add the tasks.
    :param list input_files: A collection of input files. One task will be
     created for each input file.
    :param output_container_name: The ID of an Azure Blob storage container to
    which the tasks will upload their results.
    :param output_container_sas_token: A SAS token granting write access to
    the specified Azure Blob storage container.

    container_settings
    TaskContainerSettings 
    The settings for the container under which the task runs. If the pool that 
    will run this task has containerConfiguration set, this must be set as well. 
    If the pool that will run this task doesn't have containerConfiguration set, 
    this must not be set. When this is specified, all directories recursively 
    below the AZ_BATCH_NODE_ROOT_DIR (the root of Azure Batch directories on the node) 
    are mapped into the container, all task environment variables are mapped 
    into the container, and the task command line is executed in the container.

    """

    print('Adding 1 tasks to job [{}]...'.format(job_id))

    task_containersettings = batchmodels.TaskContainerSettings(
        image_name=_ACR_IMG_NAME)

    tasks = list()

    for idx in range(tasks_per_job):
        taskqkey = startqkey + int2base(idx, 4, 1)

        command = [
                    'python $AZ_BATCH_NODE_SHARED_DIR/{} ' \
                    '--filedir $AZ_BATCH_NODE_SHARED_DIR/tiles ' \
                    '--model $AZ_BATCH_NODE_SHARED_DIR/ghanamines.h5 ' \
                    '--storageaccount {} ' \
                    '--storagecontainer {} --sastoken "{}" ' \
                    '--startqkey {}'.format(
                        _TUTORIAL_TASK_FILE,
                        _STORAGE_ACCOUNT_NAME,
                        output_container_name,
                        output_container_sas_token,
                        taskqkey)]

        print('adding task {}'.format(idx))

        tasks.append(
            batch.models.TaskAddParameter(
                'TileScoretask{:0>2}'.format(idx),
                common_helpers.wrap_commands_in_shell('linux', command),
                container_settings=task_containersettings,
                user_identity=batchmodels.UserIdentity(  #user_name='root')
                    auto_user=batchmodels.AutoUserSpecification(
                        scope=batchmodels.AutoUserScope.task,
                        elevation_level=batchmodels.ElevationLevel.admin))))

    batch_service_client.task.add_collection(job_id, tasks)
def create_pool_with_containers(batch_service_client, pool_id, resource_files,
                                publisher, offer, sku):
    """
    Creates a pool of compute nodes with the specified OS settings.

    :param batch_service_client: A Batch service client.
    :type batch_service_client: `azure.batch.BatchServiceClient`
    :param str pool_id: An ID for the new pool.
    :param list resource_files: A collection of resource files for the pool's
    start task.
    :param str publisher: Marketplace image publisher
    :param str offer: Marketplace image offer
    :param str sku: Marketplace image sku
    """
    print('Creating pool [{}]...'.format(pool_id))

    # Create a new pool of Linux compute nodes using an Azure Virtual Machines
    # Marketplace image. For more information about creating pools of Linux
    # nodes, see:
    # https://azure.microsoft.com/documentation/articles/batch-linux-nodes/

    # Specify the commands for the pool's start task. The start task is run
    # on each node as it joins the pool, and when it's rebooted or re-imaged.
    # We use the start task to prep the node for running our task script.
    task_commands = [
        # Copy the python_tutorial_task.py script to the "shared" directory
        # that all tasks that run on the node have access to. Note that
        # we are using the -p flag with cp to preserve the file uid/gid,
        # otherwise since this start task is run as an admin, it would not
        # be accessible by tasks run as a non-admin user.
        #'wget https://packages.microsoft.com/config/ubuntu/16.04/packages-microsoft-prod.deb',
        #'sudo dpkg -i packages-microsoft-prod.deb',
        #'wget -O azcopy.tar.gz https://aka.ms/downloadazcopylinux64',
        #'tar -xf azcopy.tar.gz',
        #'sudo ./install.sh',
        #'wget https://repo.anaconda.com/archive/Anaconda3-5.1.0-Linux-x86_64.sh -O ~/conda.sh',
        #'bash ~/conda.sh -b -p $AZ_BATCH_NODE_SHARED_DIR/conda',
        #'export PATH="$AZ_BATCH_NODE_SHARED_DIR/conda/bin:$PATH"',
        #'sudo apt-get -y update',
        #'sudo apt-get -y install azcopy',
        'cp -p {} $AZ_BATCH_NODE_SHARED_DIR'.format(_TUTORIAL_TASK_FILE),
        #'cp -p {} $AZ_BATCH_NODE_SHARED_DIR'.format(_ENV_YML_FILE),
        'azcopy --source https://{0}.blob.core.windows.net/model/ghanamines.h5 --destination $AZ_BATCH_NODE_SHARED_DIR/ghanamines.h5 --source-key {1}'
        .format(_STORAGE_ACCOUNT_NAME, _STORAGE_ACCOUNT_KEY),
        #'sudo $AZ_BATCH_NODE_SHARED_DIR/conda/bin/conda env create -f {}'.format(_ENV_YML_FILE)
    ]

    # Get the node agent SKU and image reference for the virtual machine
    # configuration.
    # For more information about the virtual machine configuration, see:
    # https://azure.microsoft.com/documentation/articles/batch-linux-nodes/
    sku_to_use, image_ref_to_use = \
        common_helpers.select_latest_verified_vm_image_with_node_agent_sku(
            batch_service_client, publisher, offer, sku)

    user = batchmodels.AutoUserSpecification(
        scope=batchmodels.AutoUserScope.pool,
        elevation_level=batchmodels.ElevationLevel.admin)

    container_reg = batchmodels.ContainerRegistry(user_name=CLIENT_ID,
                                                  password=SECRET,
                                                  registry_server=_ACR_URL)
    container_cfg = batchmodels.ContainerConfiguration(
        container_image_names=[_ACR_IMG_NAME],
        container_registries=[container_reg])
    my_img_ref = batchmodels.ImageReference(
        virtual_machine_image_id=_CUSTOM_VM_IMG_ID)

    vm_cfg = batchmodels.VirtualMachineConfiguration(
        image_reference=my_img_ref,
        node_agent_sku_id=
        sku_to_use,  #'batch.node.ubuntu 16.04', ##verificare che l'immagine ghanaimg abbia gpu
        container_configuration=container_cfg)
    task_containersettings = batchmodels.TaskContainerSettings(
        image_name=_ACR_IMG_NAME)

    new_pool = batchmodels.PoolAddParameter(
        id=pool_id,
        virtual_machine_configuration=vm_cfg,
        vm_size=_POOL_VM_SIZE,
        target_dedicated_nodes=_POOL_NODE_COUNT,
        target_low_priority_nodes=1,
        start_task=batch.models.StartTask(
            command_line=common_helpers.wrap_commands_in_shell(
                'linux', task_commands),
            user_identity=batchmodels.UserIdentity(auto_user=user),
            wait_for_success=True,
            resource_files=resource_files,
            container_settings=task_containersettings))

    try:
        batch_service_client.pool.add(new_pool)
    except batchmodels.batch_error.BatchErrorException as err:
        print_batch_exception(err)
        raise
예제 #11
0
    def add_task(self, job_id: str, default_max_tries=None):
        """
        Adds a task for each input file in the collection to the specified job.
        :param str job_id: The ID of the job to which to add the tasks.
         created for each input file.
        :param int default_max_tries: Fallback max tries.
        :output task: Azure Batch task
        """
        from azure.batch import models as batchmodels

        task_id = self.task_definition.get('id', job_id)
        display_name = self.task_definition.get('displayName', task_id)

        logging.info('Adding {} tasks to job [{}]...'.format(task_id, job_id))

        container_settings = batchmodels.TaskContainerSettings(
            image_name=self.image, container_run_options='--rm')

        platform = self.conf[utils.PLATFORM]
        environment_settings = [
            batchmodels.EnvironmentSetting(name='AZURE_SUBSCRIPTION_ID',
                                           value=platform['subscription']),
            batchmodels.EnvironmentSetting(name='AZURE_STORAGE_ACCOUNT',
                                           value=platform['storage_account']),
            batchmodels.EnvironmentSetting(
                name='AZURE_STORAGE_CONTAINER',
                value=platform['storage_container']),
            batchmodels.EnvironmentSetting(
                name='AZURE_STORAGE_CONNECTION_STRING',
                value=platform['storage_connection_string']),
        ]

        if 'environmentSettings' in self.task_definition and self.task_definition[
                'environmentSettings'] is not None:
            environment_settings.extend([
                batchmodels.EnvironmentSetting(**setting)
                for setting in self.task_definition['environmentSettings']
            ])

        constraints = None
        if 'constraints' in self.task_definition and self.task_definition[
                'constraints']:
            constraints = batchmodels.TaskConstraints(
                max_wall_clock_time=self.task_definition['constraints'].get(
                    'maxWallClockTime', "P1D"),
                max_task_retry_count=self.task_definition['constraints'].get(
                    'maxTaskRetryCount', default_max_tries),
                retention_time=self.task_definition['constraints'].get(
                    'retentionTime', "P1D"),
            ),

        user_identity = batchmodels.UserIdentity(
            auto_user=batchmodels.AutoUserSpecification(
                scope=batchmodels.AutoUserScope.pool,
                elevation_level=batchmodels.ElevationLevel.admin))

        task = batchmodels.TaskAddParameter(
            id=task_id,
            display_name=display_name,
            command_line=self.task_definition['commandLine'],
            constraints=constraints[0],
            container_settings=container_settings,
            environment_settings=environment_settings,
            user_identity=user_identity,
        )

        for validation in task.validate():
            logging.info(validation)

        self.batch_client.task.add(job_id=job_id, task=task)

        return task
def create_task(dataset, command, dependencies, max_wall_clock, production):

    if production:
        container = RESULTS_CONTAINER
    else:
        container = TEST_RESULTS_CONTAINER + "/" + \
                    generate_task_name(dataset.name)

    output_files = [
        # Upload results
        batch_models.OutputFile(
            file_pattern="$AZ_BATCH_TASK_WORKING_DIR/results/**/*",
            upload_options=batch_models.OutputFileUploadOptions(
                upload_condition=batch_models.OutputFileUploadCondition.
                task_success),
            destination=batch_models.OutputFileDestination(
                container=batch_models.OutputFileBlobContainerDestination(
                    path=dataset.data_dir, container_url=container +
                    SAS_TOKEN))),
        batch_models.OutputFile(
            file_pattern=
            f"$AZ_BATCH_NODE_ROOT_DIR/fsmounts/{FILE_SHARE_NAME}/*.csv",
            upload_options=batch_models.OutputFileUploadOptions(
                upload_condition=batch_models.OutputFileUploadCondition.
                task_success),
            destination=batch_models.OutputFileDestination(
                container=batch_models.OutputFileBlobContainerDestination(
                    container_url=container + SAS_TOKEN))),
        batch_models.OutputFile(
            file_pattern=
            f"$AZ_BATCH_NODE_ROOT_DIR/fsmounts/{FILE_SHARE_NAME}/last-update/*",
            upload_options=batch_models.OutputFileUploadOptions(
                upload_condition=batch_models.OutputFileUploadCondition.
                task_success),
            destination=batch_models.OutputFileDestination(
                container=batch_models.OutputFileBlobContainerDestination(
                    path="last-update", container_url=container + SAS_TOKEN))),
        # Upload stderr and stdout
        batch_models.OutputFile(
            file_pattern="$AZ_BATCH_TASK_DIR/std*.txt",
            upload_options=batch_models.OutputFileUploadOptions(
                upload_condition=batch_models.OutputFileUploadCondition.
                task_completion),
            destination=batch_models.OutputFileDestination(
                container=batch_models.OutputFileBlobContainerDestination(
                    path=DATETIME_NOWISH + "/" +
                    generate_task_name(dataset.name),
                    container_url=PROCESS_LOG_CONTAINER + "/" + SAS_TOKEN)))
    ]

    return batch_models.TaskAddParameter(
        id=generate_task_name(dataset.name),
        display_name=(dataset.name + "_python_script_job"),
        command_line=command,
        resource_files=[
            batch_models.ResourceFile(storage_container_url=CONFIG_CONTAINER +
                                      SAS_TOKEN,
                                      blob_prefix=dataset.name + CONFIG_FILE)
        ],
        depends_on=batch_models.TaskDependencies(task_ids=dependencies),
        user_identity=batch_models.UserIdentity(
            auto_user=batch_models.AutoUserSpecification(
                scope='pool', elevation_level='admin')),
        container_settings=batch_models.TaskContainerSettings(
            image_name=DOCKER_CONTAINER_URL,
            container_run_options='-w /home/rstudio/covid-rt-estimates'),
        constraints=batch_models.TaskConstraints(
            max_wall_clock_time=datetime.timedelta(minutes=max_wall_clock)),
        output_files=output_files)