def post(CURRENT_DIR, precal_uvfits, lta): os.chdir('fits/') process_target_log = open('process_log.txt', 'a+') old_datfil_log = os.listdir('../datfil/') old_files = os.listdir('./') try: spam.process_target(precal_uvfits) process_target_log.write(hostname + "\n" + "Successful final processing of " + str(precal_uvfits) + '\n') process_target_log.flush() succeeded_files.write(CURRENT_DIR + '/' + '\n') succeeded_files.flush() #print "*****Final Processing of UVFITS DONE*****" except Exception, r: process_target_log.write( hostname + "\n" + "Error in final processing of " + str(precal_uvfits) + '\n' + str(r) + '\n*******************************************************\n') process_target_log.flush() failed_files.write(CURRENT_DIR + '/' + " : Final Processing error\n") failed_files.flush()
def process_targets(self): print("Started Stage5: ") """ SPAM's process_target :return: """ cycle_id = self.pipeline_configuration()["cycle_id"] fileutils = FileUtils() aips_id = int(random.random() * 100) spam.set_aips_userid(11) # Setting the Process Start Date Time start_time = str(datetime.datetime.now()) # Taking system's in/out to backup variable original_stdout = sys.stdout original_stderr = sys.stderr thread_dir = os.getcwd() # Changing directory to fits/ os.chdir("fits/") datfil_dir = thread_dir + "/datfil/" fits_dir = thread_dir + "/fits/" curr_dir = thread_dir + "/fits/" process_status = False db_model = DBUtils() # Get random imaging_id & project_id column_keys = [ tableSchema.imaginginputId, tableSchema.projectobsnoId, "calibrated_fits_file" ] where_con = {"status": str(cycle_id)} to_be_processed = db_model.select_from_table("imaginginput", column_keys, where_con, None) imaginginput_details = random.choice(to_be_processed) print(imaginginput_details) imaging_id = imaginginput_details["imaging_id"] # Update status for imaginginput for selected imaging_id current_time_in_sec = time.time() current_date_timestamp = datetime.datetime.fromtimestamp( current_time_in_sec).strftime('%Y-%m-%d %H:%M:%S') update_data = { "set": { "status": "processing", "start_time": current_date_timestamp, "comments": "", "end_time": current_date_timestamp }, "where": { "imaging_id": imaging_id, } } db_model.update_table(update_data, "imaginginput") project_id = imaginginput_details["project_id"] calibrated_fits_file = imaginginput_details["calibrated_fits_file"] # Using the above project_id, fetch base_path column_keys = ["base_path"] where_con = {"project_id": project_id} process_target_log = open('process_target.log', 'a+') process_target_log.write( '\n\n\n******PROCESS TARGET STARTED******\n\n\n') process_target_log.write("---> Start Time " + start_time) # Logging all Standard In/Output sys.stdout = process_target_log sys.stderr = process_target_log base_path = db_model.select_from_table("projectobsno", column_keys, where_con, 0) base_path = base_path[0] uvfits_full_path = base_path + "/PRECALIB/" + calibrated_fits_file # uvfits_full_path = base_path+"/PRECALIB/"+calibrated_fits_file print "Copying " + uvfits_full_path + " to " + fits_dir copying_fits = os.system("cp " + uvfits_full_path + " " + fits_dir) uvfits_file = calibrated_fits_file # Starting spam.process_target(SPLIT_FITS_FILE) try: spam.process_target(uvfits_file, allow_selfcal_skip=True, add_freq_to_name=True) # If this process_target is success call # GADPU API setSuccessStatus for the current fits_id current_time_in_sec = time.time() current_date_timestamp = datetime.datetime.fromtimestamp( current_time_in_sec).strftime('%Y-%m-%d %H:%M:%S') success_update_data = { "set": { "status": "checking", "end_time": current_date_timestamp, "comments": "processing done, checking" }, "where": { "imaging_id": imaging_id } } db_model.update_table(success_update_data, "imaginginput") except Exception, e: process_target_log.write("Error: " + str(e)) # If this process_target is a failure call # GADPU API setFailedStatus for the current fits_id current_date_timestamp = datetime.datetime.fromtimestamp( time.time()).strftime('%Y-%m-%d %H:%M:%S') success_update_data = { "set": { "status": "failed", "end_time": current_date_timestamp, }, "where": { "imaging_id": imaging_id } } db_model.update_table(success_update_data, "imaginginput") print("Error: spam.process_tagret Failed " + uvfits_file)
def __main__(): spam.set_aips_userid(11) hostname = socket.gethostname() # Setting the Process Start Date Time start_time = str(datetime.datetime.now()) # Taking system's in/out to backup variable original_stdout = sys.stdout original_stderr = sys.stderr thread_dir = os.getcwd() # Changing directory to fits/ os.chdir("fits/") process_target_log = open('process_target.log', 'a+') process_target_log.write('\n\n\n******PROCESS TARGET STARTED******\n\n\n') process_target_log.write("---> Start Time " + start_time) # Logging Compute Node Hostname process_target_log.write("\n Node name" + hostname + "\n") # Logging all Standard In/Output sys.stdout = process_target_log sys.stderr = process_target_log datfil_dir = thread_dir + "/datfil/" fits_dir = thread_dir + "/fits/" curr_dir = thread_dir + "/fits/" process_status = False db_model = project_model.ProjectModel() # Get random imaging_id & project_id column_keys = [ tableSchema.imaginginputId, tableSchema.projectobsnoId, "calibrated_fits_file" ] where_con = {"status": "unprocessed", "comments": "cycle 19"} to_be_processed = db_model.select_from_table("imaginginput", column_keys, where_con, None) imaginginput_details = random.choice(to_be_processed) imaging_id = imaginginput_details["imaging_id"] # Update status for imaginginput for selected imaging_id current_time_in_sec = time.time() current_date_timestamp = datetime.datetime.fromtimestamp( current_time_in_sec).strftime('%Y-%m-%d %H:%M:%S') update_data = { "set": { "status": "processing", "start_time": current_date_timestamp, "comments": "", "end_time": current_date_timestamp }, "where": { "imaging_id": imaging_id, } } db_model.update_table(update_data, "imaginginput") project_id = imaginginput_details["project_id"] calibrated_fits_file = imaginginput_details["calibrated_fits_file"] # Using the above project_id, fetch base_path column_keys = ["file_path"] where_con = {"project_id": project_id} base_path = db_model.select_from_table("projectobsno", column_keys, where_con, 0) base_path = base_path[0] # Insert new thread to computethread column_keys = [tableSchema.computenodeId, "threads_count"] where_condition = {"node_name": hostname} node_details = db_model.select_from_table("computenode", column_keys, where_condition, 1) node_id = node_details[0] threads_count = node_details[1] computethread_data = { 'pipeline_id': 1, 'node_id': node_id, 'thread_dir': thread_dir, 'status': 'processing', 'file_name': calibrated_fits_file, 'comments': "{'imaging_id': " + str(imaging_id) + ", 'project_id': " + str(project_id) + "}" } thread_id = db_model.insert_into_table("computethread", computethread_data, tableSchema.computethreadId) # Update computenode with the above generated node_id & increment threads_count node_update_data = { "set": { "threads_count": threads_count + 1, "status": "processing" }, "where": { "node_id": node_id } } db_model.update_table(node_update_data, "computenode") uvfits_full_path = base_path + "/PRECALIB/" + calibrated_fits_file #uvfits_full_path = base_path+"/PRECALIB/"+calibrated_fits_file print "Copying " + uvfits_full_path + " to " + fits_dir copying_fits = os.system("cp " + uvfits_full_path + " " + fits_dir) uvfits_file = calibrated_fits_file # Starting spam.process_target(SPLIT_FITS_FILE) try: spam.process_target(uvfits_file, allow_selfcal_skip=True, add_freq_to_name=True) # If this process_target is success call # GADPU API setSuccessStatus for the current fits_id current_time_in_sec = time.time() current_date_timestamp = datetime.datetime.fromtimestamp( current_time_in_sec).strftime('%Y-%m-%d %H:%M:%S') success_update_data = { "set": { "status": "checking", "end_time": current_date_timestamp, "comments": "processing done, checking" }, "where": { "imaging_id": imaging_id } } db_model.update_table(success_update_data, "imaginginput") except Exception, e: process_target_log.write("Error: " + str(e)) # If this process_target is a failure call # GADPU API setFailedStatus for the current fits_id current_time_in_sec = time.time() current_date_timestamp = datetime.datetime.fromtimestamp( current_time_in_sec).strftime('%Y-%m-%d %H:%M:%S') success_update_data = { "set": { "status": "failed", "end_time": current_date_timestamp, }, "where": { "imaging_id": imaging_id } } db_model.update_table(success_update_data, "imaginginput") db_model.update_table( { "set": { "status": "failed" }, "where": { "thread_id": thread_id, "comments": str(e) } }, "computethread") print("Error: spam.process_tagret Failed " + uvfits_file)