def upload_input(task, cluster, job, *args, **kwargs): file_id = parse('input.file.id').find(kwargs) if file_id: file_id = file_id[0].value task.logger.info('Visualizing file ID: %s' % file_id) job['params']['dataDir'] = '$PWD' # Fetch the file girder_client = create_girder_client(task.taskflow.girder_api_url, task.taskflow.girder_token) file = girder_client.getResource('file', file_id) # Set the file to load filename = file['name'] job['params']['fileName'] = filename task.logger.info('Filename is: %s' % filename) # Set main script if not visualizer if MAIN_SCRIPT: job['params']['scriptToRun'] = MAIN_SCRIPT task.logger.info('Swap run script to: %s' % MAIN_SCRIPT) task.logger.info('Uploading file to cluster.') job_dir = job_directory(cluster, job) upload_file(cluster, task.taskflow.girder_token, file, job_dir) task.logger.info('Upload complete.')
def create_paraview_job(task, *args, **kwargs): _update_cluster_config(task, kwargs['cluster']) task.logger.info('Validating args passed to flow.') validate_args(kwargs) cluster = kwargs.pop('cluster') # Save the cluster in the taskflow for termination task.taskflow.set_metadata('cluster', cluster) client = create_girder_client(task.taskflow.girder_api_url, task.taskflow.girder_token) task.taskflow.logger.info('Creating ParaView job.') task.logger.info('Load ParaView submission script.') base_path = os.path.dirname(__file__) script_path = os.path.join(base_path, 'pvw.sh') if not os.path.exists(script_path): msg = 'Script path %s does not exists.' % script_path task.logger.info(msg) raise Exception(msg) with open(script_path, 'r') as fp: commands = fp.read().splitlines() body = { 'name': 'paraview', 'commands': commands, 'input': [], 'output': [] } job = client.post('jobs', data=json.dumps(body)) task.logger.info('ParaView job created: %s' % job['_id']) task.taskflow.logger.info('ParaView job created.') task.taskflow.set_metadata('jobs', [job]) # Upload the visualizer code task.logger.info('Uploading visualizer') viz_path = os.path.abspath( os.path.join(os.path.dirname(__file__), '../../../../../', 'node_modules/pvw-visualizer/server/pvw-visualizer.py')) if not os.path.exists(viz_path): task.logger.error( 'Unable to locate pvw-visualizer.py for upload. (%s)' % viz_path) return target_dir = job_directory(cluster, job) target_path = os.path.join(target_dir, 'pvw-visualizer.py') with get_connection(task.taskflow.girder_token, cluster) as conn: conn.makedirs(target_dir) with open(viz_path, 'r') as fp: conn.put(fp, target_path) submit_paraview_job.delay(cluster, job, *args, **kwargs)
def create_paraview_job(task, *args, **kwargs): _update_cluster_config(task, kwargs['cluster']) task.logger.info('Validating args passed to flow.') validate_args(kwargs) cluster = kwargs.pop('cluster') # Save the cluster in the taskflow for termination task.taskflow.set_metadata('cluster', cluster) client = create_girder_client( task.taskflow.girder_api_url, task.taskflow.girder_token) task.taskflow.logger.info('Creating ParaView job.') task.logger.info('Load ParaView submission script.') base_path = os.path.dirname(__file__) script_path = os.path.join(base_path, 'pvw.sh') if not os.path.exists(script_path): msg = 'Script path %s does not exists.' % script_path task.logger.info(msg) raise Exception(msg) with open(script_path, 'r') as fp: commands = fp.read().splitlines() body = { 'name': 'paraview', 'commands': commands, 'input': [], 'output': [] } job = client.post('jobs', data=json.dumps(body)) task.logger.info('ParaView job created: %s' % job['_id']) task.taskflow.logger.info('ParaView job created.') task.taskflow.set_metadata('jobs', [job]) # Upload the visualizer code task.logger.info('Uploading visualizer') viz_path = os.path.abspath( os.path.join(os.path.dirname(__file__), '../', '../', '../','../', 'node_modules/pvw-visualizer/server/pvw-visualizer.py')) if not os.path.exists(viz_path): task.logger.error('Unable to local pvw-visualizer.py for upload.') return target_dir = job_directory(cluster, job) target_path = os.path.join(target_dir, 'pvw-visualizer.py') with get_connection(task.taskflow.girder_token, cluster) as conn: conn.makedirs(target_dir) with open(viz_path, 'r') as fp: conn.put(fp, target_path) submit_paraview_job.delay(cluster, job, *args, **kwargs)
def create_openfoam_job(task, *args, **kwargs): # Girder client client = create_girder_client( task.taskflow.girder_api_url, task.taskflow.girder_token) # Save the cluster in the taskflow for termination cluster = kwargs.pop('cluster') task.taskflow.set_metadata('cluster', cluster) # Create job definition task.taskflow.logger.info('Creating OpenFoam job.') body = { 'name': 'openfoam_run', 'commands': [ 'python $PWD/simput-unpack.py $PWD/input-deck.json $PWD', 'docker start of_v1612_plus', 'docker exec -t of_v1612_plus $PWD/DockerRun $PWD' ], 'input': [ { 'folderId': kwargs['input']['folder']['id'], 'path': '.' }, { 'folderId': kwargs['input']['project']['folder']['id'], 'path': '.' } ], 'output': [ ] } # Register job in girder + attach to taskflow job = client.post('jobs', data=json.dumps(body)) task.logger.info('OpenFOAM job created: %s' % job['_id']) task.taskflow.logger.info('OpenFOAM job created.') task.taskflow.set_metadata('jobs', [job]) # Capture job working directory target_dir = job_directory(cluster, job) task.taskflow.set_metadata('dataDir', target_dir) source_path = os.path.abspath( os.path.join(os.path.dirname(__file__), '../../../../../', 'node_modules/simput/bin/unpack/simput-unpack.py')) target_path = os.path.join(target_dir, 'simput-unpack.py') # Upload unpack script with get_connection(task.taskflow.girder_token, cluster) as conn: conn.makedirs(target_dir) with open(source_path, 'r') as fp: conn.put(fp, target_path) # Move to the next task submit_open_foam_job.delay(cluster, job, *args, **kwargs)
def create_openfoam_job(task, *args, **kwargs): # Girder client client = create_girder_client(task.taskflow.girder_api_url, task.taskflow.girder_token) # Save the cluster in the taskflow for termination cluster = kwargs.pop('cluster') task.taskflow.set_metadata('cluster', cluster) # Create job definition task.taskflow.logger.info('Creating OpenFoam job.') body = { 'name': 'openfoam_run', 'commands': [ # TODO: Setting command for Visualizer's access 'chmod +x $PWD/run.sh', # NOTE: idk what the command below #'xhost +local:of_v1612_plus', #'docker start of_v1612_plus', '$PWD/run.sh $PWD' ], 'input': [{ 'folderId': kwargs['input']['folder']['id'], 'path': '.' }], 'output': [{ 'path': 'simulation/log.blockMesh' }, { 'path': 'simulation/log.patchSummary' }, { 'path': 'simulation/log.potentialFoam' }, { 'path': 'simulation/log.reconstructParMesh' }, { 'path': 'simulation/log.surfaceFeatureExtract' }, { 'path': 'simulation/log.snappyHexMesh' }, { 'path': 'simulation/log.simpleFoam' }] } # Register job in girder + attach to taskflow job = client.post('jobs', data=json.dumps(body)) task.logger.info('OpenFOAM job created: %s' % job['_id']) task.taskflow.logger.info('OpenFOAM job created.') task.taskflow.set_metadata('jobs', [job]) # Capture job working directory target_dir = job_directory(cluster, job) task.taskflow.set_metadata('dataDir', target_dir) # Move to the next task submit_open_foam_job.delay(cluster, job, *args, **kwargs)
def upload_input(task, cluster, job, *args, **kwargs): file_id = parse('input.file.id').find(kwargs) if file_id: file_id = file_id[0].value task.logger.info('Visualizing file ID: %s' % file_id) job['params']['dataDir'] = '.' # Fetch the file girder_client = create_girder_client( task.taskflow.girder_api_url, task.taskflow.girder_token) file = girder_client.getResource('file', file_id) # Set the file to load filename = file['name'] job['params']['fileName'] = filename task.logger.info('Filename is: %s' % filename) task.logger.info('Uploading file to cluster.') job_dir = job_directory(cluster, job) upload_file(cluster, task.taskflow.girder_token, file, job_dir) task.logger.info('Upload complete.')
def create_geometry_symlink(task, job, cluster, fileName): job_dir = job_directory(cluster, job) filePath = '%s/%s/%s' % (job_dir, job['input'][0]['path'], fileName) linkPath = '%s/%s' % (job_dir, fileName) with get_connection(task.taskflow.girder_token, cluster) as conn: conn.execute('ln -s %s %s' % (filePath, linkPath))
def create_paraview_job(task, *args, **kwargs): _update_cluster_config(task, kwargs['cluster']) task.logger.info('Validating args passed to flow.') validate_args(kwargs) cluster = kwargs.pop('cluster') # Save the cluster in the taskflow for termination task.taskflow.set_metadata('cluster', cluster) client = create_girder_client(task.taskflow.girder_api_url, task.taskflow.girder_token) task.taskflow.logger.info('Creating ParaView job.') task.logger.info('Load ParaView submission script.') base_path = os.path.dirname(__file__) script_path = os.path.join(base_path, 'pvw.sh') if not os.path.exists(script_path): msg = 'Script path %s does not exists.' % script_path task.logger.info(msg) raise Exception(msg) with open(script_path, 'r') as fp: commands = fp.read().splitlines() body = { 'name': 'paraview', 'commands': commands, 'input': [], 'output': [] } job = client.post('jobs', data=json.dumps(body)) task.logger.info('ParaView job created: %s' % job['_id']) task.taskflow.logger.info('ParaView job created.') task.taskflow.set_metadata('jobs', [job]) # Upload the visualizer code task.logger.info('Uploading visualization application') target_dir = job_directory(cluster, job) # Gather files to copy filesToCopy = [] for localFile in LOCAL_FILES: srcFile = os.path.abspath(os.path.join(LOCAL_DIRECTORY, localFile)) dstFile = os.path.join(target_dir, localFile) if not os.path.exists(srcFile): task.logger.error('Unable to locate file for upload. (%s)' % srcFile) return filesToCopy.append((srcFile, dstFile)) # Copy the files to the server with get_connection(task.taskflow.girder_token, cluster) as conn: conn.makedirs(target_dir) for dstDir in DESTINATION_DIRECTORIES: conn.makedirs(os.path.join(target_dir, dstDir)) for srcDst in filesToCopy: with open(srcDst[0], 'r') as fp: conn.put(fp, srcDst[1]) submit_paraview_job.delay(cluster, job, *args, **kwargs)