def submit(task, input_, cluster, run_folder, input_file, input_folder): job = _create_job(task, cluster, input_file, input_folder) girder_token = task.taskflow.girder_token task.taskflow.set_metadata('cluster', cluster) # Now download and submit job to the cluster task.taskflow.logger.info('Downloading input files to cluster.') download_job_input_folders(cluster, job, girder_token=girder_token, submit=False) task.taskflow.logger.info('Downloading complete.') task.taskflow.logger.info('Submitting job %s to cluster.' % job['_id']) girder_token = task.taskflow.girder_token try: submit_job(cluster, job, girder_token=girder_token, monitor=False) except: import traceback traceback.print_exc() monitor_job.apply_async((cluster, job), { 'girder_token': girder_token, 'monitor_interval': 10 }, link=postprocess.s(run_folder, input_, cluster, job))
def submit_calculation(task, input_, cluster, image, run_parameters, root_folder, container_description, input_folder, output_folder, scratch_folder): job = _create_job(task, cluster, image, run_parameters, container_description, input_folder, output_folder, scratch_folder) girder_token = task.taskflow.girder_token task.taskflow.set_metadata('cluster', cluster) # Now download and submit job to the cluster task.taskflow.logger.info('Downloading input files to cluster.') download_job_input_folders(cluster, job, girder_token=girder_token, submit=False) task.taskflow.logger.info('Downloading complete.') task.taskflow.logger.info('Submitting job %s to cluster.' % job['_id']) submit_job(cluster, job, girder_token=girder_token, monitor=False) monitor_job.apply_async( (cluster, job), { 'girder_token': girder_token, 'monitor_interval': 10 }, link=postprocess_job.s(input_, cluster, image, run_parameters, root_folder, container_description, input_folder, output_folder, scratch_folder, job))
def monitor_pyfr_job(task, cluster, job, *args, **kwargs): task.logger.info('Monitoring job on cluster.') girder_token = task.taskflow.girder_token monitor_job.apply_async((cluster, job), {'girder_token': girder_token, 'monitor_interval': 30}, link=upload_output.s(cluster, job, *args, **kwargs))
def start(task, input_, user, cluster, image, run_parameters): """ The flow is the following: - Dry run the container with the -d flag to obtain a description of the input/output formats - Convert the cjson input geometry to conform to the container's expected format - Run the container - Convert the container output format into cjson - Ingest the output in the database """ client = create_girder_client(task.taskflow.girder_api_url, task.taskflow.girder_token) if cluster.get('name') == 'cori': cluster = get_cori(client) if '_id' not in cluster: log_and_raise(task, 'Invalid cluster configurations: %s' % cluster) oc_folder = get_oc_folder(client) root_folder = client.createFolder( oc_folder['_id'], datetime.datetime.now().strftime("%Y_%m_%d-%H_%M_%f")) # temporary folder to save the container in/out description description_folder = client.createFolder(root_folder['_id'], 'description') job = _create_description_job(task, cluster, description_folder, image, run_parameters) task.taskflow.logger.info( 'Preparing job to obtain the container description.') download_job_input_folders(cluster, job, girder_token=task.taskflow.girder_token, submit=False) task.taskflow.logger.info( 'Submitting job to obtain the container description.') submit_job(cluster, job, girder_token=task.taskflow.girder_token, monitor=False) monitor_job.apply_async( (cluster, job), { 'girder_token': task.taskflow.girder_token, 'monitor_interval': 10 }, countdown=countdown(cluster), link=postprocess_description.s(input_, user, cluster, image, run_parameters, root_folder, job, description_folder))
def start(task, user, cluster, image, container): client = create_girder_client(task.taskflow.girder_api_url, task.taskflow.girder_token) if cluster.get('name') == 'cori': cluster = get_cori(client) if '_id' not in cluster: log_and_raise(task, 'Invalid cluster configurations: %s' % cluster) oc_folder = get_oc_folder(client) root_folder = client.createFolder( oc_folder['_id'], datetime.datetime.now().strftime("%Y_%m_%d-%H_%M_%f")) # temporary folder to save the container in/out description folder = client.createFolder(root_folder['_id'], 'pull_folder') # save the pull.py script to the job directory with open(os.path.join(os.path.dirname(__file__), 'utils/pull.py'), 'rb') as f: # Get the size of the file size = f.seek(0, 2) f.seek(0) name = 'pull.py' client.uploadFile(folder['_id'], f, name, size, parentType='folder') job = _create_job(task, cluster, folder, image, container) # Now download pull.py script to the cluster task.taskflow.logger.info('Preparing job to pull the container.') download_job_input_folders(cluster, job, girder_token=task.taskflow.girder_token, submit=False) task.taskflow.logger.info('Submitting job to pull the container.') submit_job(cluster, job, girder_token=task.taskflow.girder_token, monitor=False) monitor_job.apply_async((cluster, job), { 'girder_token': task.taskflow.girder_token, 'monitor_interval': 10 }, countdown=countdown(cluster), link=postprocess_job.s(user, cluster, image, job, folder, container))
def start(task, input_, cluster, image, run_parameters): """ The flow is the following: - Dry run the container with the -d flag to obtain a description of the input/output formats - Convert the cjson input geometry to conform to the container's expected format - Run the container - Convert the container output format into cjson - Ingest the output in the database """ client = create_girder_client(task.taskflow.girder_api_url, task.taskflow.girder_token) if cluster.get('name') == 'cori': cluster = _get_cori(client) if '_id' not in cluster: raise Exception('Invalid cluster configurations: %s' % cluster) oc_folder = _get_oc_folder(client) root_folder = client.createFolder( oc_folder['_id'], datetime.datetime.now().strftime("%Y_%m_%d-%H_%M_%f")) # temporary folder to save the container in/out description description_folder = client.createFolder(root_folder['_id'], 'description') # save the pull.py script to the job directory with open(os.path.join(os.path.dirname(__file__), 'utils/pull.py'), 'rb') as f: # Get the size of the file size = f.seek(0, 2) f.seek(0) name = 'pull.py' input_parameters_file = client.uploadFile(description_folder['_id'], f, name, size, parentType='folder') job = _create_description_job(task, cluster, description_folder, image, run_parameters) # Now download pull.py script to the cluster task.taskflow.logger.info( 'Downloading description input files to cluster.') download_job_input_folders(cluster, job, girder_token=task.taskflow.girder_token, submit=False) task.taskflow.logger.info('Downloading complete.') submit_job(cluster, job, girder_token=task.taskflow.girder_token, monitor=False) monitor_job.apply_async( (cluster, job), { 'girder_token': task.taskflow.girder_token, 'monitor_interval': 10 }, link=postprocess_description.s(input_, cluster, image, run_parameters, root_folder, job, description_folder))