예제 #1
0
def __main__():
    UVFITS_FILE_LIST = glob.glob("/GARUDATA/IMAGING18/CYCLE18/*/*/*.UVFITS")
    print UVFITS_DATA
    shuffle(UVFITS_FILE_LIST)
    if not UVFITS_FILE_LIST:
        print "UVFITS_FILE_LIST is empty"
        print UVFITS_FILE_LIST
    for EACH_UVFITS_FILE in UVFITS_FILE_LIST:
        UVFITS_FILE_NAME = os.path.basename(EACH_UVFITS_FILE)
        UVFITS_BASE_DIR = os.path.dirname(EACH_UVFITS_FILE) + "/"
        if not check_pipeline_flag(UVFITS_BASE_DIR):
            set_flag(UVFITS_BASE_DIR, PRECAL_PROCESSING)
            is_fits_dir = os.getcwd().split('/')
            SPAM_WORKING_DIR = os.getcwd()
            SPAM_THREAD_DIR = ""
            for num in range(1, 4):
                SPAM_THREAD_DIR += "/" + is_fits_dir[num]
            if 'fits' not in is_fits_dir:
                SPAM_THREAD_DIR = os.getcwd()
                SPAM_WORKING_DIR = os.getcwd() + "/fits/"
            copy_files(EACH_UVFITS_FILE, SPAM_WORKING_DIR)
            print "Copying done ==> Moving to pre_cal_target"
            run_spam_precalibration_stage(UVFITS_BASE_DIR, SPAM_WORKING_DIR,
                                          EACH_UVFITS_FILE)
            delete_dir(SPAM_THREAD_DIR)
            spam.exit()
예제 #2
0
    def combining_lsb_usb(self):
        print("Started Stage4: ")
        cycle_id = self.pipeline_configuration()["cycle_id"]

        spam.set_aips_userid(11)
        dbutils = DBUtils()
        fileutils = FileUtils()
        status = "failed"
        comments = "combine usb lsb failed"
        # fileutils = FileUtils()
        # query conditions for projectobsno
        columnKeys = {"project_id", "base_path", "observation_no"}
        whereKeys = {"isghb": True, "cycle_id": 16, "status": "now"}

        project_data = dbutils.select_from_table("projectobsno", columnKeys,
                                                 whereKeys, 0)
        print(project_data)

        project_id = project_data[1]
        base_path = project_data[2]
        obsno = project_data[0]

        start_time = datetime.datetime.fromtimestamp(
            time.time()).strftime('%Y-%m-%d %H:%M:%S')
        # print(project_id, base_path, obsno)

        # query conditions for calibrationinput
        columnKeys = {"calibration_id", "uvfits_file"}
        whereKeys = {"project_id": project_id, "status": "now"}
        calibration_data = dbutils.select_from_table("calibrationinput",
                                                     columnKeys, whereKeys,
                                                     None)
        print(calibration_data)

        if not calibration_data:
            print(
                "All the data is processed ... OR \n ==> please check the DB for combinelsbusb"
            )
            spam.exit()

        print(len(calibration_data))
        if len(calibration_data) < 2:
            status = "success"
            comments = "single file combinelsbusb not required"
            usb_lsb_file = glob.glob(base_path + "/PRECALIB/*GMRT*.UVFITS")
            if calibration_data:
                projectobsno_update_data = {
                    "set": {
                        "status": status,
                        "comments": comments
                    },
                    "where": {
                        "project_id": project_id
                    }
                }
                print("Updating the projectobsno ... ")
                dbutils.update_table(projectobsno_update_data, "projectobsno")
                if calibration_data:
                    calibration_update_data = {
                        "set": {
                            "status": status,
                            "comments": comments
                        },
                        "where": {
                            "calibration_id":
                            calibration_data[0]["calibration_id"]
                        }
                    }
                    dbutils.update_table(calibration_update_data,
                                         "calibrationinput")
            else:
                projectobsno_update_data = {
                    "set": {
                        "status": "failed",
                        "comments": "Failed Error: Something went wrong"
                    },
                    "where": {
                        "project_id": project_id
                    }
                }
                print("Updating the projectobsno ... ")
                dbutils.update_table(projectobsno_update_data, "projectobsno")
        else:
            print("Values > 2")
            print("*************" + str(os.getcwd()))
            for each_uvfits in calibration_data:
                precalib_files = glob.glob(base_path + "/PRECALIB/*")
                lsb_list = glob.glob(base_path + "/PRECALIB/*_LL_*.UVFITS")
                usb_list = glob.glob(base_path + "/PRECALIB/*_RR_*.UVFITS")

                if len(lsb_list) == 0 or len(usb_list) == 0:
                    print(len(lsb_list), len(usb_list))
                    lsb_list = glob.glob(base_path + "/PRECALIB/*LSB*.UVFITS")
                    usb_list = glob.glob(base_path + "/PRECALIB/*USB*.UVFITS")

                projectobsno_update_data = {
                    "set": {
                        "status": "processing",
                        "comments": "combining_lsb_usb"
                    },
                    "where": {
                        "project_id": project_id
                    }
                }
                dbutils.update_table(projectobsno_update_data, "projectobsno")
                calibration_id = each_uvfits["calibration_id"]
                uvfits_file = each_uvfits["uvfits_file"]
                calibration_update_data = {
                    "set": {
                        "status": "processing",
                        "comments": "combining_lsb_usb",
                        "start_time": start_time
                    },
                    "where": {
                        "calibration_id": calibration_id
                    }
                }
                dbutils.update_table(calibration_update_data,
                                     "calibrationinput")
                print("lsb_list : " + str(len(lsb_list)))
                print("usb_list : " + str(len(usb_list)))
                status = "failed"
                comments = "combining lsb usb"
                if len(lsb_list) == len(usb_list):
                    print(">>>>>>COMBINE_LSB_USB<<<<<<<")
                    usb_list.sort()
                    lsb_list.sort()
                    print(usb_list)
                    print(lsb_list)
                    to_spam = list(zip(usb_list, lsb_list))
                    file_size = 0
                    print(to_spam)
                    for each_pair in to_spam:
                        print("-------------------------")
                        comb = each_pair[0].replace('USB', 'COMB')
                        data = each_pair, comb
                        print("++++++++++++++++" + comb)
                        currentTimeInSec = time.time()
                        fits_comb = comb.split('/')[-1]
                        check_comb_file = glob.glob("fits/" + fits_comb)
                        if not check_comb_file:
                            status, comments = fileutils.run_spam_combine_usb_lsb(
                                data)
                            if status == 'success':
                                status = str(cycle_id)
                            print("__________________________________________")
                            print(glob.glob("fits/*"))
                            print("__________________________________________")
                            end_time = datetime.datetime.fromtimestamp(
                                time.time()).strftime('%Y-%m-%d %H:%M:%S')
                            if not comments:
                                comments = "done combining usb lsb"
                            if glob.glob(comb):
                                file_size = fileutils.calculalate_file_sizse_in_MB(
                                    comb)
                            imagininput_data = {
                                "project_id": project_id,
                                "calibration_id": calibration_id,
                                "calibrated_fits_file": os.path.basename(comb),
                                "file_size": file_size,
                                "start_time": start_time,
                                "end_time": end_time,
                                "comments": "c16 " + comments,
                            }
                            dbutils.insert_into_table("imaginginput",
                                                      imagininput_data,
                                                      "imaging_id")
                            print("-------------------------")
                end_time = datetime.datetime.fromtimestamp(
                    time.time()).strftime('%Y-%m-%d %H:%M:%S')
                calibration_update_data = {
                    "set": {
                        "status": status,
                        "comments": comments,
                        "start_time": start_time,
                        "end_time": end_time
                    },
                    "where": {
                        "calibration_id": calibration_id
                    }
                }
                dbutils.update_table(calibration_update_data,
                                     "calibrationinput")

                projectobsno_update_data = {
                    "set": {
                        "status": status,
                        "comments": comments
                    },
                    "where": {
                        "project_id": project_id
                    }
                }
                dbutils.update_table(projectobsno_update_data, "projectobsno")
예제 #3
0
    def pre_calibration_targets(self):
        print("Started Stage3: ")
        spam.set_aips_userid(33)
        dbutils = DBUtils()
        fileutils = FileUtils()

        cycle_id = self.pipeline_configuration()["cycle_id"]

        # while True:
        #     columnKeys = {"calibration_id"}
        #     whereData = {"comments": "c15", "status": "copying"}
        #     uncalibrated_uvfits = dbutils.select_from_table("calibrationinput", columnKeys, whereData, 0)
        #     if not uncalibrated_uvfits:
        #         break
        #     print("Waiting for bandwidth ... ")
        #     time.sleep(50)

        columnKeys = {"calibration_id", "project_id", "uvfits_file"}
        whereData = {"status": str(cycle_id)}
        uncalibrated_uvfits = dbutils.select_from_table(
            "calibrationinput", columnKeys, whereData, 0)

        if not uncalibrated_uvfits:
            print(
                "All for the data is processed ... please check the DB for pre_calib"
            )
            spam.exit()

        calibration_id = uncalibrated_uvfits[0]
        project_id = uncalibrated_uvfits[1]
        uvfits_file = uncalibrated_uvfits[2]

        columnKeys = {"base_path", "observation_no"}
        whereData = {"project_id": project_id, "cycle_id": int(cycle_id)}
        project_details = dbutils.select_from_table("projectobsno", columnKeys,
                                                    whereData, 0)

        base_path = project_details[1]
        observation_no = project_details[0]

        current_date_timestamp = datetime.datetime.fromtimestamp(
            time.time()).strftime('%Y-%m-%d %H:%M:%S')

        projectobsno_update_data = {
            "set": {
                "status":
                "processing",
                "comments":
                "running precalibrate_target, calibration_id = " +
                str(calibration_id),
            },
            "where": {
                "project_id": project_id
            }
        }

        calibration_update_data = {
            "set": {
                "status": "copying",
                "start_time": current_date_timestamp
            },
            "where": {
                "calibration_id": calibration_id
            }
        }

        dbutils.update_table(projectobsno_update_data, "projectobsno")
        dbutils.update_table(calibration_update_data, "calibrationinput")

        UVFITS_FILE_NAME = uvfits_file
        UVFITS_BASE_DIR = base_path
        is_fits_dir = os.getcwd().split('/')
        print(is_fits_dir)
        SPAM_WORKING_DIR = os.getcwd()
        print(SPAM_WORKING_DIR)
        # for num in range(1, 3):
        #     SPAM_THREAD_DIR += "/" + is_fits_dir[num]
        # if 'fits' not in is_fits_dir:
        #     SPAM_THREAD_DIR = os.getcwd()
        SPAM_WORKING_DIR = os.getcwd() + "/fits/"
        print(SPAM_WORKING_DIR, UVFITS_BASE_DIR, UVFITS_FILE_NAME)
        UVFITS_FILE_PATH = UVFITS_BASE_DIR + "/" + UVFITS_FILE_NAME
        print(UVFITS_FILE_PATH)
        print(SPAM_WORKING_DIR)
        fileutils.copy_files(UVFITS_FILE_PATH, SPAM_WORKING_DIR)
        print("Copying done ==> Moving to pre_cal_target")
        current_date_timestamp = datetime.datetime.fromtimestamp(
            time.time()).strftime('%Y-%m-%d %H:%M:%S')
        calibration_update_data = {
            "set": {
                "status": "processing",
                "start_time": current_date_timestamp
            },
            "where": {
                "calibration_id": calibration_id
            }
        }
        dbutils.update_table(calibration_update_data, "calibrationinput")

        fileutils.run_spam_precalibration_stage(UVFITS_BASE_DIR,
                                                SPAM_WORKING_DIR,
                                                UVFITS_FILE_NAME,
                                                observation_no)
        current_time_in_sec = time.time()
        current_date_timestamp = datetime.datetime.fromtimestamp(
            current_time_in_sec).strftime('%Y-%m-%d %H:%M:%S')

        check_status_file = glob.glob(base_path + "/PRECALIB/failed_log.txt")
        comments = "failed"
        if check_status_file:
            status = "failed"
            comments = str(open(check_status_file[0], 'r').read())
        else:
            status = "success"
            comments = "precalibrate_target done, calibration_id = " + str(
                calibration_id)

        projectobsno_update_data = {
            "set": {
                "status": status,
                "comments": comments
            },
            "where": {
                "project_id": project_id
            }
        }

        calibration_update_data = {
            "set": {
                "status": status,
                "end_time": current_date_timestamp,
                "comments": comments
            },
            "where": {
                "calibration_id": calibration_id
            }
        }

        dbutils.update_table(projectobsno_update_data, "projectobsno")
        dbutils.update_table(calibration_update_data, "calibrationinput")
예제 #4
0
class Pipeline:
    def pipeline_configuration(
            self,
            filename='/home/gadpu/gadpu_pipeline/database.ini',
            section='pipeline'):
        print("Fetching Pipeline Configuration ... ")
        parser = ConfigParser.ConfigParser()
        parser.read(filename)
        pconf = {}
        if parser.has_section(section):
            params = parser.items(section)
            for param in params:
                pconf[param[0]] = param[1]
        else:
            raise Exception('Section {0} not found in the {1} file'.format(
                section, filename))
        return pconf

    def copying_and_ltacomb(self, gdata):
        print("Started Stage1: ")
        spamutils = SpamUtils()
        fileutils = FileUtils()

        data = gdata[0]
        path = gdata[1]

        for each_obs in data:
            print("=========================================")
            print(each_obs)
            print("=========================================")
            proposal_id = data[each_obs]['proposal_id']
            file_path = data[each_obs]['file_path']
            backend_type = data[each_obs]['backend_type']
            cycle_id = data[each_obs]['cycle_id']
            print("********" + file_path)
            project_path = path + "/".join(file_path.split('/')[-3:])
            print("---------" + project_path)
            lta, ltb, gsb = None, None, None
            isghb = False
            status = str(cycle_id)

            files_list = glob.glob(file_path + '*.lt*')
            files_list.sort()
            print("Processing .... " + str(project_path))

            if len(files_list) == 1:
                lta = files_list[0]
            elif any("gsb" in lt_list for lt_list in files_list):
                gsb = [x for x in files_list if "gsb" in x][0]
                print(os.path.basename(gsb))
                cgsb = fileutils.check_for_multiples(gsb)
                if cgsb:
                    print("LTACOMB: " + str(cgsb) + " " + project_path + " " +
                          os.path.basename(gsb))
                    status = spamutils.run_ltacomb(cgsb, project_path)
            elif any("ltb" in lt_list for lt_list in files_list):
                # print(files_list)
                isghb = True
                lta = [x for x in files_list if "lta" in x][0]
                ltb = [x for x in files_list if "ltb" in x][0]
                clta = fileutils.check_for_multiples(lta)
                cltb = fileutils.check_for_multiples(ltb)
                if clta:
                    print("LTACOMB: " + str(clta) + " " + project_path + "" +
                          os.path.basename(lta))
                    status = spamutils.run_ltacomb(clta, project_path)
                if cltb:
                    print("LTACOMB: " + str(cltb) + " " + project_path + "" +
                          os.path.basename(ltb))
                    status = spamutils.run_ltacomb(cltb, project_path)

            print(isghb, lta, ltb, gsb)

            if isghb:
                fileutils.copy_files(lta, project_path)
                fileutils.copy_files(ltb, project_path)
                fileutils.insert_details([lta, ltb], project_path, isghb,
                                         cycle_id, status)
            else:
                if gsb:
                    fileutils.copy_files(gsb, project_path)
                    fileutils.insert_details(gsb, project_path, isghb,
                                             cycle_id, status)
                if lta:
                    fileutils.copy_files(lta, project_path)
                    fileutils.insert_details(lta, project_path, isghb,
                                             cycle_id, status)

    def running_gvfits(self):
        print("Started Stage2: ")

        cycle_id = self.pipeline_configuration()["cycle_id"]

        dbutils = DBUtils()
        spamutils = SpamUtils()
        fileutils = FileUtils()

        currentTimeInSec = time.time()
        current_date_timestamp = datetime.datetime.fromtimestamp(
            currentTimeInSec).strftime('%Y-%m-%d %H:%M:%S')

        columnKeys = {"project_id", "ltacomb_file", "lta_id"}
        whereKeys = {"comments": str(cycle_id)}

        lta_details = dbutils.select_from_table("ltadetails", columnKeys,
                                                whereKeys, None)

        print(lta_details)

        for each_lta in lta_details:
            print(each_lta)
            project_id = each_lta["project_id"]
            # project_id = each_lta[0]
            lta_file = each_lta["ltacomb_file"]
            # lta_file = each_lta[1]
            # lta_id = each_lta[2]
            lta_id = each_lta["lta_id"]
            columnKeys = {"base_path"}
            whereKeys = {"project_id": project_id}
            lta_path_details = dbutils.select_test_table(
                "projectobsno", columnKeys, whereKeys, 0)
            print(lta_path_details)
            base_path = lta_path_details[0]
            print(base_path)
            uvfits_file = lta_file + '.UVFITS'
            base_lta = base_path + '/' + lta_file
            if os.path.exists(base_lta):
                base_uvfits = base_path + '/' + uvfits_file
                gvfits_status = spamutils.run_gvfits(base_lta, base_uvfits)
                if os.path.exists(base_uvfits):
                    status = str(cycle_id)
                else:
                    status = "failed"

                calibration_data = {
                    "project_id":
                    project_id,
                    "lta_id":
                    lta_id,
                    "uvfits_file":
                    uvfits_file,
                    "status":
                    status,
                    "comments":
                    gvfits_status,
                    "uvfits_size":
                    fileutils.calculalate_file_sizse_in_MB(base_uvfits),
                    "start_time":
                    current_date_timestamp
                }

                dbutils.insert_into_table("calibrationinput", calibration_data,
                                          "calibration_id")

            else:
                project_update_data = {
                    "set": {
                        "status": "failed",
                        "comments": "ltacomb failed"
                    },
                    "where": {
                        "project_id": project_id
                    }
                }
                lta_details_update_data = {
                    "set": {
                        "status": "failed",
                        "comments": "ltacomb failed"
                    },
                    "where": {
                        "lta_id": lta_id
                    }
                }
                dbutils.update_table(project_update_data, "projectobsno")
                dbutils.update_table(lta_details_update_data, "ltadetails")

    def pre_calibration_targets(self):
        print("Started Stage3: ")
        spam.set_aips_userid(33)
        dbutils = DBUtils()
        fileutils = FileUtils()

        cycle_id = self.pipeline_configuration()["cycle_id"]

        # while True:
        #     columnKeys = {"calibration_id"}
        #     whereData = {"comments": "c15", "status": "copying"}
        #     uncalibrated_uvfits = dbutils.select_from_table("calibrationinput", columnKeys, whereData, 0)
        #     if not uncalibrated_uvfits:
        #         break
        #     print("Waiting for bandwidth ... ")
        #     time.sleep(50)

        columnKeys = {"calibration_id", "project_id", "uvfits_file"}
        whereData = {"status": str(cycle_id)}
        uncalibrated_uvfits = dbutils.select_from_table(
            "calibrationinput", columnKeys, whereData, 0)

        if not uncalibrated_uvfits:
            print(
                "All for the data is processed ... please check the DB for pre_calib"
            )
            spam.exit()

        calibration_id = uncalibrated_uvfits[0]
        project_id = uncalibrated_uvfits[1]
        uvfits_file = uncalibrated_uvfits[2]

        columnKeys = {"base_path", "observation_no"}
        whereData = {"project_id": project_id, "cycle_id": int(cycle_id)}
        project_details = dbutils.select_from_table("projectobsno", columnKeys,
                                                    whereData, 0)

        base_path = project_details[1]
        observation_no = project_details[0]

        current_date_timestamp = datetime.datetime.fromtimestamp(
            time.time()).strftime('%Y-%m-%d %H:%M:%S')

        projectobsno_update_data = {
            "set": {
                "status":
                "processing",
                "comments":
                "running precalibrate_target, calibration_id = " +
                str(calibration_id),
            },
            "where": {
                "project_id": project_id
            }
        }

        calibration_update_data = {
            "set": {
                "status": "copying",
                "start_time": current_date_timestamp
            },
            "where": {
                "calibration_id": calibration_id
            }
        }

        dbutils.update_table(projectobsno_update_data, "projectobsno")
        dbutils.update_table(calibration_update_data, "calibrationinput")

        UVFITS_FILE_NAME = uvfits_file
        UVFITS_BASE_DIR = base_path
        is_fits_dir = os.getcwd().split('/')
        print(is_fits_dir)
        SPAM_WORKING_DIR = os.getcwd()
        print(SPAM_WORKING_DIR)
        # for num in range(1, 3):
        #     SPAM_THREAD_DIR += "/" + is_fits_dir[num]
        # if 'fits' not in is_fits_dir:
        #     SPAM_THREAD_DIR = os.getcwd()
        SPAM_WORKING_DIR = os.getcwd() + "/fits/"
        print(SPAM_WORKING_DIR, UVFITS_BASE_DIR, UVFITS_FILE_NAME)
        UVFITS_FILE_PATH = UVFITS_BASE_DIR + "/" + UVFITS_FILE_NAME
        print(UVFITS_FILE_PATH)
        print(SPAM_WORKING_DIR)
        fileutils.copy_files(UVFITS_FILE_PATH, SPAM_WORKING_DIR)
        print("Copying done ==> Moving to pre_cal_target")
        current_date_timestamp = datetime.datetime.fromtimestamp(
            time.time()).strftime('%Y-%m-%d %H:%M:%S')
        calibration_update_data = {
            "set": {
                "status": "processing",
                "start_time": current_date_timestamp
            },
            "where": {
                "calibration_id": calibration_id
            }
        }
        dbutils.update_table(calibration_update_data, "calibrationinput")

        fileutils.run_spam_precalibration_stage(UVFITS_BASE_DIR,
                                                SPAM_WORKING_DIR,
                                                UVFITS_FILE_NAME,
                                                observation_no)
        current_time_in_sec = time.time()
        current_date_timestamp = datetime.datetime.fromtimestamp(
            current_time_in_sec).strftime('%Y-%m-%d %H:%M:%S')

        check_status_file = glob.glob(base_path + "/PRECALIB/failed_log.txt")
        comments = "failed"
        if check_status_file:
            status = "failed"
            comments = str(open(check_status_file[0], 'r').read())
        else:
            status = "success"
            comments = "precalibrate_target done, calibration_id = " + str(
                calibration_id)

        projectobsno_update_data = {
            "set": {
                "status": status,
                "comments": comments
            },
            "where": {
                "project_id": project_id
            }
        }

        calibration_update_data = {
            "set": {
                "status": status,
                "end_time": current_date_timestamp,
                "comments": comments
            },
            "where": {
                "calibration_id": calibration_id
            }
        }

        dbutils.update_table(projectobsno_update_data, "projectobsno")
        dbutils.update_table(calibration_update_data, "calibrationinput")

    def combining_lsb_usb(self):
        print("Started Stage4: ")
        cycle_id = self.pipeline_configuration()["cycle_id"]

        spam.set_aips_userid(11)
        dbutils = DBUtils()
        fileutils = FileUtils()
        status = "failed"
        comments = "combine usb lsb failed"
        # fileutils = FileUtils()
        # query conditions for projectobsno
        columnKeys = {"project_id", "base_path", "observation_no"}
        whereKeys = {"isghb": True, "cycle_id": 16, "status": "now"}

        project_data = dbutils.select_from_table("projectobsno", columnKeys,
                                                 whereKeys, 0)
        print(project_data)

        project_id = project_data[1]
        base_path = project_data[2]
        obsno = project_data[0]

        start_time = datetime.datetime.fromtimestamp(
            time.time()).strftime('%Y-%m-%d %H:%M:%S')
        # print(project_id, base_path, obsno)

        # query conditions for calibrationinput
        columnKeys = {"calibration_id", "uvfits_file"}
        whereKeys = {"project_id": project_id, "status": "now"}
        calibration_data = dbutils.select_from_table("calibrationinput",
                                                     columnKeys, whereKeys,
                                                     None)
        print(calibration_data)

        if not calibration_data:
            print(
                "All the data is processed ... OR \n ==> please check the DB for combinelsbusb"
            )
            spam.exit()

        print(len(calibration_data))
        if len(calibration_data) < 2:
            status = "success"
            comments = "single file combinelsbusb not required"
            usb_lsb_file = glob.glob(base_path + "/PRECALIB/*GMRT*.UVFITS")
            if calibration_data:
                projectobsno_update_data = {
                    "set": {
                        "status": status,
                        "comments": comments
                    },
                    "where": {
                        "project_id": project_id
                    }
                }
                print("Updating the projectobsno ... ")
                dbutils.update_table(projectobsno_update_data, "projectobsno")
                if calibration_data:
                    calibration_update_data = {
                        "set": {
                            "status": status,
                            "comments": comments
                        },
                        "where": {
                            "calibration_id":
                            calibration_data[0]["calibration_id"]
                        }
                    }
                    dbutils.update_table(calibration_update_data,
                                         "calibrationinput")
            else:
                projectobsno_update_data = {
                    "set": {
                        "status": "failed",
                        "comments": "Failed Error: Something went wrong"
                    },
                    "where": {
                        "project_id": project_id
                    }
                }
                print("Updating the projectobsno ... ")
                dbutils.update_table(projectobsno_update_data, "projectobsno")
        else:
            print("Values > 2")
            print("*************" + str(os.getcwd()))
            for each_uvfits in calibration_data:
                precalib_files = glob.glob(base_path + "/PRECALIB/*")
                lsb_list = glob.glob(base_path + "/PRECALIB/*_LL_*.UVFITS")
                usb_list = glob.glob(base_path + "/PRECALIB/*_RR_*.UVFITS")

                if len(lsb_list) == 0 or len(usb_list) == 0:
                    print(len(lsb_list), len(usb_list))
                    lsb_list = glob.glob(base_path + "/PRECALIB/*LSB*.UVFITS")
                    usb_list = glob.glob(base_path + "/PRECALIB/*USB*.UVFITS")

                projectobsno_update_data = {
                    "set": {
                        "status": "processing",
                        "comments": "combining_lsb_usb"
                    },
                    "where": {
                        "project_id": project_id
                    }
                }
                dbutils.update_table(projectobsno_update_data, "projectobsno")
                calibration_id = each_uvfits["calibration_id"]
                uvfits_file = each_uvfits["uvfits_file"]
                calibration_update_data = {
                    "set": {
                        "status": "processing",
                        "comments": "combining_lsb_usb",
                        "start_time": start_time
                    },
                    "where": {
                        "calibration_id": calibration_id
                    }
                }
                dbutils.update_table(calibration_update_data,
                                     "calibrationinput")
                print("lsb_list : " + str(len(lsb_list)))
                print("usb_list : " + str(len(usb_list)))
                status = "failed"
                comments = "combining lsb usb"
                if len(lsb_list) == len(usb_list):
                    print(">>>>>>COMBINE_LSB_USB<<<<<<<")
                    usb_list.sort()
                    lsb_list.sort()
                    print(usb_list)
                    print(lsb_list)
                    to_spam = list(zip(usb_list, lsb_list))
                    file_size = 0
                    print(to_spam)
                    for each_pair in to_spam:
                        print("-------------------------")
                        comb = each_pair[0].replace('USB', 'COMB')
                        data = each_pair, comb
                        print("++++++++++++++++" + comb)
                        currentTimeInSec = time.time()
                        fits_comb = comb.split('/')[-1]
                        check_comb_file = glob.glob("fits/" + fits_comb)
                        if not check_comb_file:
                            status, comments = fileutils.run_spam_combine_usb_lsb(
                                data)
                            if status == 'success':
                                status = str(cycle_id)
                            print("__________________________________________")
                            print(glob.glob("fits/*"))
                            print("__________________________________________")
                            end_time = datetime.datetime.fromtimestamp(
                                time.time()).strftime('%Y-%m-%d %H:%M:%S')
                            if not comments:
                                comments = "done combining usb lsb"
                            if glob.glob(comb):
                                file_size = fileutils.calculalate_file_sizse_in_MB(
                                    comb)
                            imagininput_data = {
                                "project_id": project_id,
                                "calibration_id": calibration_id,
                                "calibrated_fits_file": os.path.basename(comb),
                                "file_size": file_size,
                                "start_time": start_time,
                                "end_time": end_time,
                                "comments": "c16 " + comments,
                            }
                            dbutils.insert_into_table("imaginginput",
                                                      imagininput_data,
                                                      "imaging_id")
                            print("-------------------------")
                end_time = datetime.datetime.fromtimestamp(
                    time.time()).strftime('%Y-%m-%d %H:%M:%S')
                calibration_update_data = {
                    "set": {
                        "status": status,
                        "comments": comments,
                        "start_time": start_time,
                        "end_time": end_time
                    },
                    "where": {
                        "calibration_id": calibration_id
                    }
                }
                dbutils.update_table(calibration_update_data,
                                     "calibrationinput")

                projectobsno_update_data = {
                    "set": {
                        "status": status,
                        "comments": comments
                    },
                    "where": {
                        "project_id": project_id
                    }
                }
                dbutils.update_table(projectobsno_update_data, "projectobsno")

    def process_targets(self):
        print("Started Stage5: ")
        """
        SPAM's process_target
        :return:
        """
        cycle_id = self.pipeline_configuration()["cycle_id"]

        fileutils = FileUtils()
        aips_id = int(random.random() * 100)
        spam.set_aips_userid(11)
        # Setting the Process Start Date Time
        start_time = str(datetime.datetime.now())
        # Taking system's in/out to backup variable
        original_stdout = sys.stdout
        original_stderr = sys.stderr
        thread_dir = os.getcwd()
        # Changing directory to fits/
        os.chdir("fits/")
        datfil_dir = thread_dir + "/datfil/"
        fits_dir = thread_dir + "/fits/"
        curr_dir = thread_dir + "/fits/"
        process_status = False
        db_model = DBUtils()
        # Get random imaging_id & project_id
        column_keys = [
            tableSchema.imaginginputId, tableSchema.projectobsnoId,
            "calibrated_fits_file"
        ]
        where_con = {"status": str(cycle_id)}
        to_be_processed = db_model.select_from_table("imaginginput",
                                                     column_keys, where_con,
                                                     None)
        imaginginput_details = random.choice(to_be_processed)
        print(imaginginput_details)
        imaging_id = imaginginput_details["imaging_id"]

        # Update status for imaginginput for selected imaging_id
        current_time_in_sec = time.time()
        current_date_timestamp = datetime.datetime.fromtimestamp(
            current_time_in_sec).strftime('%Y-%m-%d %H:%M:%S')
        update_data = {
            "set": {
                "status": "processing",
                "start_time": current_date_timestamp,
                "comments": "",
                "end_time": current_date_timestamp
            },
            "where": {
                "imaging_id": imaging_id,
            }
        }
        db_model.update_table(update_data, "imaginginput")

        project_id = imaginginput_details["project_id"]
        calibrated_fits_file = imaginginput_details["calibrated_fits_file"]

        # Using the above project_id, fetch base_path
        column_keys = ["base_path"]
        where_con = {"project_id": project_id}
        process_target_log = open('process_target.log', 'a+')
        process_target_log.write(
            '\n\n\n******PROCESS TARGET STARTED******\n\n\n')
        process_target_log.write("--->  Start Time " + start_time)
        # Logging all Standard In/Output
        sys.stdout = process_target_log
        sys.stderr = process_target_log
        base_path = db_model.select_from_table("projectobsno", column_keys,
                                               where_con, 0)
        base_path = base_path[0]
        uvfits_full_path = base_path + "/PRECALIB/" + calibrated_fits_file
        # uvfits_full_path = base_path+"/PRECALIB/"+calibrated_fits_file
        print "Copying " + uvfits_full_path + " to " + fits_dir
        copying_fits = os.system("cp " + uvfits_full_path + " " + fits_dir)
        uvfits_file = calibrated_fits_file
        # Starting spam.process_target(SPLIT_FITS_FILE)
        try:
            spam.process_target(uvfits_file,
                                allow_selfcal_skip=True,
                                add_freq_to_name=True)
            # If this process_target is success call
            # GADPU API setSuccessStatus for the current fits_id
            current_time_in_sec = time.time()
            current_date_timestamp = datetime.datetime.fromtimestamp(
                current_time_in_sec).strftime('%Y-%m-%d %H:%M:%S')
            success_update_data = {
                "set": {
                    "status": "checking",
                    "end_time": current_date_timestamp,
                    "comments": "processing done, checking"
                },
                "where": {
                    "imaging_id": imaging_id
                }
            }
            db_model.update_table(success_update_data, "imaginginput")
        except Exception, e:
            process_target_log.write("Error: " + str(e))
            # If this process_target is a failure call
            # GADPU API setFailedStatus for the current fits_id
            current_date_timestamp = datetime.datetime.fromtimestamp(
                time.time()).strftime('%Y-%m-%d %H:%M:%S')
            success_update_data = {
                "set": {
                    "status": "failed",
                    "end_time": current_date_timestamp,
                },
                "where": {
                    "imaging_id": imaging_id
                }
            }
            db_model.update_table(success_update_data, "imaginginput")
            print("Error: spam.process_tagret Failed " + uvfits_file)
            # Even the process is Success/Failed we remove
            # the Initially copied SPLIT_FITS_file, to save
            # disk space
        os.system('rm ' + uvfits_file)
        # recording the process end time to Log
        end_time = str(datetime.datetime.now())
        image_base_dir = base_path + "/FITS_IMAGE/"
        # Creating a new dir at BASE_DIR - FITS_IMAGES
        print "Make dir at " + image_base_dir
        os.system("mkdir -p " + image_base_dir)
        # STDIN/OUT controls are reverted.
        process_target_log.write("End Time " + end_time)
        # Flushing the all processed out log to the log_file
        process_target_log.flush()
        # reverting stdin/out controls
        sys.stdout = original_stdout
        sys.stderr = original_stderr
        # Getting the list of datfil/spam_logs for summarize the process
        spam_log = glob.glob(datfil_dir + "spam*.log")
        # if spam_log is non-empty list, proceed
        print spam_log
        failed_msg = "something went wrong"
        try:
            if spam_log:
                # for every spam*.log file in the datfile file
                for each_spam_log in spam_log:
                    original_stdout = sys.stdout
                    original_stderr = sys.stderr
                    failed = os.popen('grep "processing of field" ' +
                                      each_spam_log +
                                      ' | grep "failed" | wc -l').read()
                    if int(failed.strip()) > 0:
                        failed_msg = os.popen('fgrep "Error:" ' +
                                              each_spam_log + '').read()
                        current_date_timestamp = datetime.datetime.fromtimestamp(
                            time.time()).strftime('%Y-%m-%d %H:%M:%S')
                        failed_update_data = {
                            "set": {
                                "status": "failed",
                                "end_time": current_date_timestamp,
                                "comments": failed_msg
                            },
                            "where": {
                                "imaging_id": imaging_id
                            }
                        }
                        db_model.update_table(failed_update_data,
                                              "imaginginput")
                    else:
                        process_status = True
                    print each_spam_log
                    # getting summary of the log file
                    summ_file = each_spam_log.replace(".log", ".summary")
                    print summ_file
                    summary_filename = open(summ_file, 'a+')
                    # making the spam*.summary file and write the
                    # summarize_spam_log output
                    summary_filename.write('\n\n******SUMMARY LOG******\n\n')
                    sys.stdout = summary_filename
                    sys.stderr = summary_filename
                    spam.summarize_spam_log(each_spam_log)
                    sys.stdout = original_stdout
                    sys.stderr = original_stderr
                    summary_filename.flush()
        except Exception as ex:
            print(ex)
        # Once the summary file is created inside the fits/
        # Moving all the files from datfil/ to fits/
        # Moving all the processed files from fits/ to FITS_IMAGE@BASE_DIR
        print "moving back the processed files from " + fits_dir + " to " + image_base_dir
        # The below print statement is only for recording purpose,
        # actual removing the THREAD directory is done after the
        # Move all the fits/ to FITS_IMAGE@BASE_DIR
        print "Moving datfil/ to fits/"
        current_date_timestamp = datetime.datetime.fromtimestamp(
            time.time()).strftime('%Y-%m-%d %H:%M:%S')

        moving_update_data = {
            "set": {
                "status": "moving",
                "end_time": current_date_timestamp,
                "comments": "Moving to NAS"
            },
            "where": {
                "imaging_id": imaging_id
            }
        }

        db_model.update_table(moving_update_data, "imaginginput")
        movedata = os.system('mv ' + datfil_dir + '* ' + fits_dir)
        if process_status:
            self.update_datproducts(curr_dir, project_id, imaging_id, db_model)
            sleep(5)
        movefits = os.system("mv " + fits_dir + "* " + image_base_dir)
        sleep(5)
        # current THREAD dir
        # Changing the directory to /home/gadpu, inorder to delete the
        os.chdir('../../')
        print "Changed to " + os.getcwd()

        # Removing the current THREAD directory
        removethread = os.system('rm -rf ' + thread_dir)
        current_date_timestamp = datetime.datetime.fromtimestamp(
            time.time()).strftime('%Y-%m-%d %H:%M:%S')
        if process_status:
            status = "success"
        else:
            status = "failed"
        done_update_data = {
            "set": {
                "status": status,
                "end_time": current_date_timestamp,
                "comments": failed_msg
            },
            "where": {
                "imaging_id": imaging_id
            }
        }
        db_model.update_table(done_update_data, "imaginginput")
        # exiting the SPAM process and cleaning the cache memory
        spam.exit()
예제 #5
0
    print "Changed to " + os.getcwd()
    # Removing the current THREAD directory

    removethread = os.system('rm -rf ' + thread_dir)
    done_update_data = {
        "set": {
            "status": "success",
            "end_time": current_date_timestamp,
            "comments": "processed - files generated"
        },
        "where": {
            "imaging_id": imaging_id
        }
    }
    # exiting the SPAM process and cleaning the cache memory
    spam.exit()


def update_datproducts(curr_dir, project_id, imaging_id, db_model):
    products_list = glob.glob(curr_dir + '/*')
    for each_product in products_list:
        current_time_in_sec = time.time()
        product_data = {
            'project_id':
            project_id,
            'imaging_id':
            imaging_id,
            "file_size":
            calculate_file_size(each_product),
            "file_type":
            each_product.split('.')[-1],
예제 #6
0
class Pipeline:
    def stage5(self):
        """
        Stage5: Running SPAM's process_target and populates dataproducts table
        """
        fileutils = FileUtils()
        # aips_id = int(random.random()*100)
        spam.set_aips_userid(11)
        # Setting the Process Start Date Time
        start_time = str(datetime.datetime.now())
        # Taking system's in/out to backup variable
        original_stdout = sys.stdout
        original_stderr = sys.stderr
        thread_dir = os.getcwd()
        # Changing directory to fits/
        os.chdir("fits/")
        datfil_dir = thread_dir + "/datfil/"
        fits_dir = thread_dir + "/fits/"
        curr_dir = thread_dir + "/fits/"
        process_status = False
        db_model = DBUtils()
        # Get random imaging_id & project_id
        column_keys = [
            tableSchema.imaginginputId, tableSchema.projectobsnoId,
            "calibrated_fits_file"
        ]
        where_con = {"status": "cycle226"}
        to_be_processed = db_model.select_from_table("imaginginput",
                                                     column_keys, where_con,
                                                     None)
        print(len(to_be_processed))
        print(to_be_processed)
        imaginginput_details = random.choice(to_be_processed)
        print(imaginginput_details)
        imaging_id = imaginginput_details["imaging_id"]

        # Update status for imaginginput for selected imaging_id
        current_time_in_sec = time.time()
        current_date_timestamp = datetime.datetime.fromtimestamp(
            current_time_in_sec).strftime('%Y-%m-%d %H:%M:%S')
        update_data = {
            "set": {
                "status": "processing",
                "start_time": current_date_timestamp,
                "comments": "processing_cycle20_process_target ",
                "end_time": current_date_timestamp
            },
            "where": {
                "imaging_id": imaging_id,
            }
        }
        db_model.update_table(update_data, "imaginginput")

        project_id = imaginginput_details["project_id"]
        calibrated_fits_file = imaginginput_details["calibrated_fits_file"]

        # Using the above project_id, fetch base_path
        column_keys = ["file_path"]
        where_con = {"project_id": project_id}
        process_target_log = open('process_target.log', 'a+')
        process_target_log.write(
            '\n\n\n******PROCESS TARGET STARTED******\n\n\n')
        process_target_log.write("--->  Start Time " + start_time)
        # Logging all Standard In/Output
        sys.stdout = process_target_log
        sys.stderr = process_target_log
        base_path = db_model.select_from_table("projectobsno", column_keys,
                                               where_con, 0)
        base_path = base_path[0]
        uvfits_full_path = base_path + "/PRECALIB/" + calibrated_fits_file
        # uvfits_full_path = base_path+"/PRECALIB/"+calibrated_fits_file
        print "Copying " + uvfits_full_path + " to " + fits_dir
        copying_fits = os.system("cp " + uvfits_full_path + " " + fits_dir)
        uvfits_file = calibrated_fits_file
        # Starting spam.process_target(SPLIT_FITS_FILE)
        try:
            spam.process_target(uvfits_file,
                                allow_selfcal_skip=True,
                                add_freq_to_name=True)
            # If this process_target is success call
            # GADPU API setSuccessStatus for the current fits_id
            current_time_in_sec = time.time()
            current_date_timestamp = datetime.datetime.fromtimestamp(
                current_time_in_sec).strftime('%Y-%m-%d %H:%M:%S')
            success_update_data = {
                "set": {
                    "status": "checking",
                    "end_time": current_date_timestamp,
                    "comments": "processing done, checking"
                },
                "where": {
                    "imaging_id": imaging_id
                }
            }
            db_model.update_table(success_update_data, "imaginginput")
        except Exception, e:
            process_target_log.write("Error: " + str(e))
            # If this process_target is a failure call
            # GADPU API setFailedStatus for the current fits_id
            current_date_timestamp = datetime.datetime.fromtimestamp(
                time.time()).strftime('%Y-%m-%d %H:%M:%S')
            success_update_data = {
                "set": {
                    "status": "failed",
                    "end_time": current_date_timestamp,
                },
                "where": {
                    "imaging_id": imaging_id
                }
            }
            db_model.update_table(success_update_data, "imaginginput")
            print("Error: spam.process_tagret Failed " + uvfits_file)
            # Even the process is Success/Failed we remove
            # the Initially copied SPLIT_FITS_file, to save
            # disk space
        os.system('rm ' + uvfits_file)
        # recording the process end time to Log
        end_time = str(datetime.datetime.now())
        image_base_dir = base_path + "/FITS_IMAGE/"
        # Creating a new dir at BASE_DIR - FITS_IMAGES
        print "Make dir at " + image_base_dir
        os.system("mkdir -p " + image_base_dir)
        # STDIN/OUT controls are reverted.
        process_target_log.write("End Time " + end_time)
        # Flushing the all processed out log to the log_file
        process_target_log.flush()
        # reverting stdin/out controls
        sys.stdout = original_stdout
        sys.stderr = original_stderr
        # Getting the list of datfil/spam_logs for summarize the process
        spam_log = glob.glob(datfil_dir + "spam*.log")
        # if spam_log is non-empty list, proceed
        print spam_log
        failed_msg = "something went wrong"
        try:
            if spam_log:
                # for every spam*.log file in the datfile file
                for each_spam_log in spam_log:
                    original_stdout = sys.stdout
                    original_stderr = sys.stderr
                    failed = os.popen('grep "processing of field" ' +
                                      each_spam_log +
                                      ' | grep "failed" | wc -l').read()
                    if int(failed.strip()) > 0:
                        failed_msg = os.popen('fgrep "Error:" ' +
                                              each_spam_log + '').read()
                        current_date_timestamp = datetime.datetime.fromtimestamp(
                            time.time()).strftime('%Y-%m-%d %H:%M:%S')
                        failed_update_data = {
                            "set": {
                                "status": "failed",
                                "end_time": current_date_timestamp,
                                "comments": failed_msg
                            },
                            "where": {
                                "imaging_id": imaging_id
                            }
                        }
                        db_model.update_table(failed_update_data,
                                              "imaginginput")
                    else:
                        process_status = True
                    print each_spam_log
                    # getting summary of the log file
                    summ_file = each_spam_log.replace(".log", ".summary")
                    print summ_file
                    summary_filename = open(summ_file, 'a+')
                    # making the spam*.summary file and write the
                    # summarize_spam_log output
                    summary_filename.write('\n\n******SUMMARY LOG******\n\n')
                    sys.stdout = summary_filename
                    sys.stderr = summary_filename
                    spam.summarize_spam_log(each_spam_log)
                    sys.stdout = original_stdout
                    sys.stderr = original_stderr
                    summary_filename.flush()
        except Exception as ex:
            print(ex)
        # Once the summary file is created inside the fits/
        # Moving all the files from datfil/ to fits/
        # Moving all the processed files from fits/ to FITS_IMAGE@BASE_DIR
        print "moving back the processed files from " + fits_dir + " to " + image_base_dir
        # The below print statement is only for recording purpose,
        # actual removing the THREAD directory is done after the
        # Move all the fits/ to FITS_IMAGE@BASE_DIR
        print "Moving datfil/ to fits/"
        current_date_timestamp = datetime.datetime.fromtimestamp(
            time.time()).strftime('%Y-%m-%d %H:%M:%S')

        moving_update_data = {
            "set": {
                "status": "moving",
                "end_time": current_date_timestamp,
                "comments": "Moving to NAS"
            },
            "where": {
                "imaging_id": imaging_id
            }
        }

        db_model.update_table(moving_update_data, "imaginginput")
        movedata = os.system('mv ' + datfil_dir + '* ' + fits_dir)
        if process_status:
            self.update_datproducts(curr_dir, project_id, imaging_id, db_model)
            sleep(5)
        movefits = os.system("mv " + fits_dir + "* " + image_base_dir)
        sleep(5)
        # current THREAD dir
        # Changing the directory to /home/gadpu, inorder to delete the
        os.chdir('../../')
        print "Changed to " + os.getcwd()

        # Removing the current THREAD directory
        removethread = os.system('rm -rf ' + thread_dir)
        current_date_timestamp = datetime.datetime.fromtimestamp(
            time.time()).strftime('%Y-%m-%d %H:%M:%S')
        if process_status:
            status = "success"
        else:
            status = "failed"
        done_update_data = {
            "set": {
                "status": status,
                "end_time": current_date_timestamp,
                "comments": failed_msg
            },
            "where": {
                "imaging_id": imaging_id
            }
        }
        db_model.update_table(done_update_data, "imaginginput")
        # exiting the SPAM process and cleaning the cache memory
        spam.exit()
예제 #7
0
def __main__():
    dbutils = DBUtils()
    fileutils = FileUtils()

    columnKeys = {"calibration_id", "project_id", "uvfits_file"}
    whereData = {"comments": "c17", "status": "success"}
    uncalibrated_uvfits = dbutils.select_from_table("calibrationinput",
                                                    columnKeys, whereData, 0)

    calibration_id = uncalibrated_uvfits[0]
    project_id = uncalibrated_uvfits[1]
    uvfits_file = uncalibrated_uvfits[2]

    columnKeys = {"file_path", "observation_no"}
    whereData = {"project_id": project_id, "cycle_id": 17}
    project_details = dbutils.select_from_table("projectobsno", columnKeys,
                                                whereData, 0)

    base_path = project_details[1]
    observation_no = project_details[0]

    current_time_in_sec = time.time()
    current_date_timestamp = datetime.datetime.fromtimestamp(
        current_time_in_sec).strftime('%Y-%m-%d %H:%M:%S')

    projectobsno_update_data = {
        "set": {
            "status":
            "processing",
            "comments":
            "running precalibrate_target, calibration_id = " +
            str(calibration_id),
        },
        "where": {
            "project_id": project_id,
            "status": "unprocessed"
        }
    }

    calibration_update_data = {
        "set": {
            "status": "processing",
            "start_time": current_date_timestamp
        },
        "where": {
            "calibration_id": calibration_id,
            "status": "success"
        }
    }

    dbutils.update_table(projectobsno_update_data, "projectobsno")
    dbutils.update_table(calibration_update_data, "calibrationinput")

    EACH_UVFITS_FILE = base_path + '/' + uvfits_file

    UVFITS_BASE_DIR = base_path + "/"
    if not check_pipeline_flag(UVFITS_BASE_DIR):
        set_flag(UVFITS_BASE_DIR, PRECAL_PROCESSING)
        is_fits_dir = os.getcwd().split('/')
        SPAM_WORKING_DIR = os.getcwd()
        SPAM_THREAD_DIR = ""
        for num in range(1, 4):
            SPAM_THREAD_DIR += "/" + is_fits_dir[num]
        if 'fits' not in is_fits_dir:
            SPAM_THREAD_DIR = os.getcwd()
            SPAM_WORKING_DIR = os.getcwd() + "/fits/"
        copy_files(EACH_UVFITS_FILE, SPAM_WORKING_DIR)
        print "Copying done ==> Moving to pre_cal_target"

        run_spam_precalibration_stage(UVFITS_BASE_DIR, SPAM_WORKING_DIR,
                                      uvfits_file)

        check_status_file = glob.glob(base_path + "/PRECALIB/failed_log.txt")

        if check_status_file:
            status = "failed"
        else:
            status = "success"

        projectobsno_update_data = {
            "set": {
                "status":
                status,
                "comments":
                "precalibrate_target " + status + ", calibration_id = " +
                str(calibration_id),
            },
            "where": {
                "project_id": project_id
            }
        }

        calibration_update_data = {
            "set": {
                "status": status,
                "end_time": current_date_timestamp
            },
            "where": {
                "calibration_id": calibration_id
            }
        }

        dbutils.update_table(projectobsno_update_data, "projectobsno")
        dbutils.update_table(calibration_update_data, "calibrationinput")

        if status == 'success':
            calibrated_uvfits_list = glob.glob(base_path +
                                               '/PRECALIB/*.UVFITS')
            if calibrated_uvfits_list:
                for each_uvfits in calibrated_uvfits_list:
                    imaging_data = {
                        "project_id":
                        project_id,
                        "calibration_id":
                        calibration_id,
                        "calibrated_fits_file":
                        os.path.basename(each_uvfits),
                        "status":
                        "unprocessed",
                        "comments":
                        "c17",
                        "file_size":
                        fileutils.calculalate_file_sizse_in_MB(each_uvfits)
                    }
                    dbutils.insert_into_table("imaginginput", imaging_data,
                                              "imaging_id")

        delete_dir(SPAM_THREAD_DIR)
        spam.exit()