try: utils.copy(PARAMETERS_FROM, PARAMETERS_TO) json_object = utils.read_json(PARAMETERS_FROM) dictionary = json_object[0] # Get the hostname HOSTNAME = dictionary['db-hostname'] # Build the constants we need TABLE_STEM = dictionary['table-stem'] PARAMETERS = TABLE_STEM + '_parameters' PORT_INT = int(dictionary['db-port']) PORT_STR = dictionary['db-port'] max_parameter_number = get_parameter_numbers(PARAMETERS, HOSTNAME, PORT_INT)[0] LOG.info('Max parameter number = %(max_parameter_number)s' % {'max_parameter_number': str(max_parameter_number)}) cmd = 'java -Xms15g -Xmx30g -jar /home/kevin/yabi/MergeSkyNetResults.jar -database ' + TABLE_STEM + ' -db_hostname ' + HOSTNAME + ' -db_port ' + PORT_STR LOG.info('Running the command in PBS: %s', cmd) pbs_array_queue.run_pbs_job('/home/kevin/yabi/merge_skynet_results.pbs', 'merge_skynet_results', '1-' + str(max_parameter_number), '/home/kevin/yabi/pbs_output', cmd, 'nodes=1:ppn=4:compute,pmem=30gb,walltime=06:00:00', sleep_time=30, queue_name='usmall') except Exception as e: LOG.exception(e)
LOG.info('Copying the parameters file.') utils.copy(PARAMETERS_FROM, PARAMETERS_TO) json_object = utils.read_json(PARAMETERS_FROM) dictionary = json_object[0] # Should we bypass the production of CSV data bypass = dictionary['bypass-produce-csv'] if utils.isTrue(bypass): LOG.info('Bypassing the production of CSV files') else: LOG.info('Building the command line') cmd = build_command_produce_csv(dictionary) LOG.info('Building the file list') file_count = build_file_list(dictionary) LOG.info('Running the command in PBS: %s', cmd) pbs_array_queue.run_pbs_job('/home/kevin/yabi/produce_csv.pbs', 'produce_csv', '1-' + str(file_count), '/home/kevin/yabi/pbs_output', cmd, 'nodes=1:ppn=4:compute,pmem=30gb,walltime=04:00:00', sleep_time=300, queue_name='usmall') LOG.info('Done') except Exception as e: LOG.exception(e)