コード例 #1
0
def submit_calculation(task, input_, cluster, image, run_parameters,
                       root_folder, container_description, input_folder,
                       output_folder, scratch_folder):
    job = _create_job(task, cluster, image, run_parameters,
                      container_description, input_folder, output_folder,
                      scratch_folder)

    girder_token = task.taskflow.girder_token
    task.taskflow.set_metadata('cluster', cluster)

    # Now download and submit job to the cluster
    task.taskflow.logger.info('Downloading input files to cluster.')
    download_job_input_folders(cluster,
                               job,
                               girder_token=girder_token,
                               submit=False)
    task.taskflow.logger.info('Downloading complete.')

    task.taskflow.logger.info('Submitting job %s to cluster.' % job['_id'])

    submit_job(cluster, job, girder_token=girder_token, monitor=False)

    monitor_job.apply_async(
        (cluster, job), {
            'girder_token': girder_token,
            'monitor_interval': 10
        },
        link=postprocess_job.s(input_, cluster, image, run_parameters,
                               root_folder, container_description,
                               input_folder, output_folder, scratch_folder,
                               job))
コード例 #2
0
def submit(task, input_, cluster, run_folder, input_file, input_folder):
    job = _create_job(task, cluster, input_file, input_folder)

    girder_token = task.taskflow.girder_token
    task.taskflow.set_metadata('cluster', cluster)

    # Now download and submit job to the cluster
    task.taskflow.logger.info('Downloading input files to cluster.')
    download_job_input_folders(cluster,
                               job,
                               girder_token=girder_token,
                               submit=False)
    task.taskflow.logger.info('Downloading complete.')

    task.taskflow.logger.info('Submitting job %s to cluster.' % job['_id'])
    girder_token = task.taskflow.girder_token

    try:
        submit_job(cluster, job, girder_token=girder_token, monitor=False)
    except:
        import traceback
        traceback.print_exc()

    monitor_job.apply_async((cluster, job), {
        'girder_token': girder_token,
        'monitor_interval': 10
    },
                            link=postprocess.s(run_folder, input_, cluster,
                                               job))
コード例 #3
0
ファイル: __init__.py プロジェクト: xinlaoda/HPCCloud
def submit(task, job, *args, **kwargs):
    task.taskflow.logger.info('Submitting job to cluster.')
    girder_token = task.taskflow.girder_token
    cluster = kwargs.pop('cluster')
    task.taskflow.set_metadata('cluster', cluster)

    # Now download and submit job to the cluster
    task.logger.info('Uploading input files to cluster.')
    download_job_input_folders(cluster, job, log_write_url=None,
                        girder_token=girder_token, submit=False)
    task.logger.info('Uploading complete.')

    submit_pyfr_job.delay(cluster, job, *args, **kwargs)
コード例 #4
0
ファイル: pyfr.py プロジェクト: sunhughees/HPCCloud
def submit(task, job, *args, **kwargs):
    task.taskflow.logger.info('Submitting job to cluster.')
    girder_token = task.taskflow.girder_token
    cluster = kwargs.pop('cluster')
    task.taskflow.set_metadata('cluster', cluster)

    # Now download and submit job to the cluster
    task.logger.info('Uploading input files to cluster.')
    download_job_input_folders(cluster, job, log_write_url=None,
                        girder_token=girder_token, submit=False)
    task.logger.info('Uploading complete.')

    submit_pyfr_job.delay(cluster, job, *args, **kwargs)
コード例 #5
0
def start(task, input_, user, cluster, image, run_parameters):
    """
    The flow is the following:
    - Dry run the container with the -d flag to obtain a description of the input/output formats
    - Convert the cjson input geometry to conform to the container's expected format
    - Run the container
    - Convert the container output format into cjson
    - Ingest the output in the database
    """
    client = create_girder_client(task.taskflow.girder_api_url,
                                  task.taskflow.girder_token)

    if cluster.get('name') == 'cori':
        cluster = get_cori(client)

    if '_id' not in cluster:
        log_and_raise(task, 'Invalid cluster configurations: %s' % cluster)

    oc_folder = get_oc_folder(client)
    root_folder = client.createFolder(
        oc_folder['_id'],
        datetime.datetime.now().strftime("%Y_%m_%d-%H_%M_%f"))
    # temporary folder to save the container in/out description
    description_folder = client.createFolder(root_folder['_id'], 'description')

    job = _create_description_job(task, cluster, description_folder, image,
                                  run_parameters)

    task.taskflow.logger.info(
        'Preparing job to obtain the container description.')
    download_job_input_folders(cluster,
                               job,
                               girder_token=task.taskflow.girder_token,
                               submit=False)
    task.taskflow.logger.info(
        'Submitting job to obtain the container description.')

    submit_job(cluster,
               job,
               girder_token=task.taskflow.girder_token,
               monitor=False)

    monitor_job.apply_async(
        (cluster, job), {
            'girder_token': task.taskflow.girder_token,
            'monitor_interval': 10
        },
        countdown=countdown(cluster),
        link=postprocess_description.s(input_, user, cluster, image,
                                       run_parameters, root_folder, job,
                                       description_folder))
コード例 #6
0
def start(task, user, cluster, image, container):
    client = create_girder_client(task.taskflow.girder_api_url,
                                  task.taskflow.girder_token)

    if cluster.get('name') == 'cori':
        cluster = get_cori(client)

    if '_id' not in cluster:
        log_and_raise(task, 'Invalid cluster configurations: %s' % cluster)

    oc_folder = get_oc_folder(client)
    root_folder = client.createFolder(
        oc_folder['_id'],
        datetime.datetime.now().strftime("%Y_%m_%d-%H_%M_%f"))
    # temporary folder to save the container in/out description
    folder = client.createFolder(root_folder['_id'], 'pull_folder')

    # save the pull.py script to the job directory
    with open(os.path.join(os.path.dirname(__file__), 'utils/pull.py'),
              'rb') as f:
        # Get the size of the file
        size = f.seek(0, 2)
        f.seek(0)
        name = 'pull.py'
        client.uploadFile(folder['_id'], f, name, size, parentType='folder')

    job = _create_job(task, cluster, folder, image, container)

    # Now download pull.py script to the cluster
    task.taskflow.logger.info('Preparing job to pull the container.')
    download_job_input_folders(cluster,
                               job,
                               girder_token=task.taskflow.girder_token,
                               submit=False)

    task.taskflow.logger.info('Submitting job to pull the container.')
    submit_job(cluster,
               job,
               girder_token=task.taskflow.girder_token,
               monitor=False)

    monitor_job.apply_async((cluster, job), {
        'girder_token': task.taskflow.girder_token,
        'monitor_interval': 10
    },
                            countdown=countdown(cluster),
                            link=postprocess_job.s(user, cluster, image, job,
                                                   folder, container))
コード例 #7
0
ファイル: __init__.py プロジェクト: Kitware/HPCCloud
def submit(task, upstream_result):
    task.taskflow.logger.info('Submitting job to cluster.')
    girder_token = task.taskflow.girder_token
    cluster = upstream_result['cluster']
    job = upstream_result['job']
    task.taskflow.set_metadata('cluster', cluster)

    # Now download and submit job to the cluster
    task.logger.info('Uploading input files to cluster.')
    download_job_input_folders(cluster, job, log_write_url=None,
                        girder_token=girder_token, submit=False)

    if 'geometryFilename' in upstream_result:
        create_geometry_symlink(task, job, cluster, upstream_result['geometryFilename'])

    task.logger.info('Uploading complete.')

    return upstream_result
コード例 #8
0
ファイル: windtunnel.py プロジェクト: webcae/HPCCloud
def submit_open_foam_job(task, cluster, job, *args, **kwargs):
    # Now download job inputs
    task.logger.info('Uploading input files to cluster.')
    download_job_input_folders(cluster,
                               job,
                               log_write_url=None,
                               girder_token=task.taskflow.girder_token,
                               submit=False)
    task.logger.info('Uploading complete.')

    # Setup job parameters
    task.taskflow.logger.info('Submitting job to cluster.')
    job['params'] = {}

    ## parallel_environment
    parallel_environment = parse('config.parallelEnvironment').find(cluster)
    if parallel_environment:
        parallel_environment = parallel_environment[0].value
        job['params']['parallelEnvironment'] = parallel_environment

    ## slots
    job['params']['numberOfSlots'] = 1

    ## output dir
    job_output_dir = get_cluster_job_output_dir(cluster)
    if job_output_dir:
        job['params']['jobOutputDir'] = job_output_dir

    # Submit job to the queue
    submit_job(cluster,
               job,
               log_write_url=None,
               girder_token=task.taskflow.girder_token,
               monitor=False)

    # Move to the next task
    monitor_open_foam_job.delay(cluster, job, *args, **kwargs)
コード例 #9
0
def start(task, input_, cluster, image, run_parameters):
    """
    The flow is the following:
    - Dry run the container with the -d flag to obtain a description of the input/output formats
    - Convert the cjson input geometry to conform to the container's expected format
    - Run the container
    - Convert the container output format into cjson
    - Ingest the output in the database
    """
    client = create_girder_client(task.taskflow.girder_api_url,
                                  task.taskflow.girder_token)

    if cluster.get('name') == 'cori':
        cluster = _get_cori(client)

    if '_id' not in cluster:
        raise Exception('Invalid cluster configurations: %s' % cluster)

    oc_folder = _get_oc_folder(client)
    root_folder = client.createFolder(
        oc_folder['_id'],
        datetime.datetime.now().strftime("%Y_%m_%d-%H_%M_%f"))
    # temporary folder to save the container in/out description
    description_folder = client.createFolder(root_folder['_id'], 'description')

    # save the pull.py script to the job directory
    with open(os.path.join(os.path.dirname(__file__), 'utils/pull.py'),
              'rb') as f:
        # Get the size of the file
        size = f.seek(0, 2)
        f.seek(0)
        name = 'pull.py'
        input_parameters_file = client.uploadFile(description_folder['_id'],
                                                  f,
                                                  name,
                                                  size,
                                                  parentType='folder')

    job = _create_description_job(task, cluster, description_folder, image,
                                  run_parameters)

    # Now download pull.py script to the cluster
    task.taskflow.logger.info(
        'Downloading description input files to cluster.')
    download_job_input_folders(cluster,
                               job,
                               girder_token=task.taskflow.girder_token,
                               submit=False)
    task.taskflow.logger.info('Downloading complete.')

    submit_job(cluster,
               job,
               girder_token=task.taskflow.girder_token,
               monitor=False)

    monitor_job.apply_async(
        (cluster, job), {
            'girder_token': task.taskflow.girder_token,
            'monitor_interval': 10
        },
        link=postprocess_description.s(input_, cluster, image, run_parameters,
                                       root_folder, job, description_folder))