def insert_details(self, data, project_path, isghb, cycle_id, status, obs_no): print("------------------==insert_details==----------------------") print(data) print(project_path) dbutils = DBUtils() for each_rec in data: print("==each_rec==", each_rec) lta_file = os.path.basename(each_rec) if len(lta_file) == 1: lta_file = lta_file[0] try: current_date_timestamp = datetime.datetime.fromtimestamp(currentTimeInSec).strftime('%Y-%m-%d %H:%M:%S') lta_details = gS.get_naps_scangroup_details(lta_file) utils = self lta_details["ltacomb_size"] = int(utils.calculalate_file_sizse_in_MB(each_rec)) lta_details["status"] = "unprocessed" lta_details["base_path"] = project_path lta_details["start_time"] = current_date_timestamp lta_details["proposal_dir"] = project_path.split('/')[-1] lta_details["pipeline_id"] = 1 lta_details["comments"] = status lta_details["counter"] = 0 lta_details["ltacomb_file"] = lta_file lta_details["isghb"] = isghb lta_details["cycle_id"] = cycle_id if obs_no: lta_details["observation_no"] = int(obs_no) projectobsno_data = {} for key in tableSchema.projectobsnoData.iterkeys(): if key in lta_details.iterkeys(): projectobsno_data[key] = lta_details[key] ltadetails_data = {} for key in tableSchema.ltadetailsData.iterkeys(): if key in lta_details.iterkeys(): ltadetails_data[key] = lta_details[key] #print ltadetails_data # print("ltadetails_data") # print(ltadetails_data) columnKeys = {"project_id"} whereKeys = {"proposal_dir": lta_details["proposal_dir"], "cycle_id": cycle_id} project_id = dbutils.select_test_table("projectobsno", columnKeys, whereKeys, 0) if project_id: project_id = project_id[0] else: project_id = dbutils.insert_into_table("projectobsno", projectobsno_data, tableSchema.projectobsnoId) ltadetails_data["project_id"] = project_id lta_id = dbutils.insert_into_table("ltadetails", ltadetails_data, tableSchema.ltadetailsId) print lta_id print("projectobsno") print(projectobsno_data) except Exception as e: print e
def seed_projectobsno(cycle_id): cycle_location = "/GARUDATA/IMAGING" + cycle_id + "/CYCLE" + cycle_id + "/*/" project_dir_list = glob.glob(cycle_location+"*") db_model = project_model.ProjectModel() for eachDir in project_dir_list: lta_file_list = glob.glob(eachDir+"/*.lta") if len(lta_file_list) >= 1: lta_file = lta_file_list[0].split('/')[-1] try: current_date_timestamp = datetime.datetime.fromtimestamp(currentTimeInSec).strftime('%Y-%m-%d %H:%M:%S') lta_details = gS.get_naps_scangroup_details(lta_file) utils = FileUtilities() lta_details["ltacomb_size"] = int(utils.calculate_file_size(lta_file_list[0])) lta_details["status"] = "unprocessed" lta_details["file_path"] = eachDir lta_details["start_time"] = current_date_timestamp lta_details["proposal_dir"] = eachDir.split('/')[-1] lta_details["pipeline_id"] = 1 lta_details["comments"] = "cycle18" lta_details["counter"] = 0 lta_details["ltacomb_file"] = lta_file projectobsno_data = {} for key in tableSchema.projectobsnoData.iterkeys(): if key in lta_details.iterkeys(): projectobsno_data[key] = lta_details[key] ltadetails_data = {} for key in tableSchema.ltadetailsData.iterkeys(): if key in lta_details.iterkeys(): ltadetails_data[key] = lta_details[key] #print ltadetails_data print("ltadetails_data") print(ltadetails_data) project_id = db_model.insert_into_table("projectobsno", projectobsno_data, tableSchema.projectobsnoId) ltadetails_data["project_id"] = project_id lta_id = db_model.insert_into_table("ltadetails", ltadetails_data, tableSchema.ltadetailsId) print lta_id print("projectobsno") print(projectobsno_data) except Exception as e: print e
def cycle21(): cycle_area = '/GARUDATA/IMAGING21/IMAGES/' precalib_uvfits_path = '/GARUDATA/FITS/CYCLE21/SPLIT_CYCLE_21_2/' process_target_fits_path = glob.glob('/GARUDATA/IMAGING21/IMAGES/*') uvfits_list = [] for each_uvfits_dir in precalib_uvfits_path: uvfits_list.append(each_uvfits_dir.split('/')[-1]) uvfits_list.sort() uvfits_list = list(set(uvfits_list)) fits_list = [] for each_uvfits_dir in process_target_fits_path: fits_list.append(each_uvfits_dir.split('/')[-1]) fits_list.sort() fits_list = list(set(fits_list)) sorted_list = [] cycle_20_path = '/GARUDATA/IMAGING21/CYCLE21/' for each_dir in fits_list: # print('============' + each_dir+'============') dir_path = cycle_20_path + each_dir proj_code = each_dir fname = proj_code if not proj_code.isdigit(): proj_code = dir_path.split('/')[-1] fname = proj_code else: print("--- " + each_dir) # print(glob.glob(cycle_area+each_dir+'/*')) link_loc = glob.glob( "/GARUDATA/FITS/CYCLE21/SPLIT_LINK_CYCLE_21/" + each_dir + '/*') for each_file in link_loc: # print(each_file) if (os.path.islink(each_file)): lta_file = os.readlink(each_file) print("-------->>> " + lta_file) proj_code = lta_file.split('/')[6] fname = proj_code print(dir_path) proj = proj_code.replace('_lta', '.lta') sorted_list.append(dir_path) nproj = proj.replace('lta_', 'lta.') proj = proj_code print(nproj) lta_data = gs.get_naps_scangroup_details(nproj) dir_name = nproj.upper().split('.')[0] observation_no = lta_data['observation_no'] print(lta_data, dir_name, dir_path) # FOR PRECALIB new_precalib_path = cycle_20_path + str( observation_no) + '/' + dir_name + '/PRECALIB/' if not os.path.exists(new_precalib_path): os.makedirs(new_precalib_path) # print(new_precalib_path) else: print('PRECALIB -- ' + new_precalib_path) # print('EXISTS: '+new_precalib_path) # print(cycle_area+each_dir) # print(glob.glob(precalib_uvfits_path+each_dir+'/*')) copying(precalib_uvfits_path + fname, new_precalib_path) # FOR FITS_IMAGE new_fits_path = cycle_20_path + str( observation_no) + '/' + dir_name + '/FITS_IMAGE/' # # print(new_fits_path) if not os.path.exists(new_fits_path): os.makedirs(new_fits_path) else: print('FITS_IMAGE -- ' + new_fits_path) # print('Exists: '+new_fits_path) # print(cycle_area+each_dir) # print(glob.glob(cycle_area+each_dir+'/*')) copying(cycle_area + each_dir, new_fits_path) print('----------------' + each_dir + '---------------')