예제 #1
0
    def process_targets(self):
        print("Started Stage5: ")
        """
        SPAM's process_target
        :return:
        """
        cycle_id = self.pipeline_configuration()["cycle_id"]

        fileutils = FileUtils()
        aips_id = int(random.random() * 100)
        spam.set_aips_userid(11)
        # Setting the Process Start Date Time
        start_time = str(datetime.datetime.now())
        # Taking system's in/out to backup variable
        original_stdout = sys.stdout
        original_stderr = sys.stderr
        thread_dir = os.getcwd()
        # Changing directory to fits/
        os.chdir("fits/")
        datfil_dir = thread_dir + "/datfil/"
        fits_dir = thread_dir + "/fits/"
        curr_dir = thread_dir + "/fits/"
        process_status = False
        db_model = DBUtils()
        # Get random imaging_id & project_id
        column_keys = [
            tableSchema.imaginginputId, tableSchema.projectobsnoId,
            "calibrated_fits_file"
        ]
        where_con = {"status": str(cycle_id)}
        to_be_processed = db_model.select_from_table("imaginginput",
                                                     column_keys, where_con,
                                                     None)
        imaginginput_details = random.choice(to_be_processed)
        print(imaginginput_details)
        imaging_id = imaginginput_details["imaging_id"]

        # Update status for imaginginput for selected imaging_id
        current_time_in_sec = time.time()
        current_date_timestamp = datetime.datetime.fromtimestamp(
            current_time_in_sec).strftime('%Y-%m-%d %H:%M:%S')
        update_data = {
            "set": {
                "status": "processing",
                "start_time": current_date_timestamp,
                "comments": "",
                "end_time": current_date_timestamp
            },
            "where": {
                "imaging_id": imaging_id,
            }
        }
        db_model.update_table(update_data, "imaginginput")

        project_id = imaginginput_details["project_id"]
        calibrated_fits_file = imaginginput_details["calibrated_fits_file"]

        # Using the above project_id, fetch base_path
        column_keys = ["base_path"]
        where_con = {"project_id": project_id}
        process_target_log = open('process_target.log', 'a+')
        process_target_log.write(
            '\n\n\n******PROCESS TARGET STARTED******\n\n\n')
        process_target_log.write("--->  Start Time " + start_time)
        # Logging all Standard In/Output
        sys.stdout = process_target_log
        sys.stderr = process_target_log
        base_path = db_model.select_from_table("projectobsno", column_keys,
                                               where_con, 0)
        base_path = base_path[0]
        uvfits_full_path = base_path + "/PRECALIB/" + calibrated_fits_file
        # uvfits_full_path = base_path+"/PRECALIB/"+calibrated_fits_file
        print "Copying " + uvfits_full_path + " to " + fits_dir
        copying_fits = os.system("cp " + uvfits_full_path + " " + fits_dir)
        uvfits_file = calibrated_fits_file
        # Starting spam.process_target(SPLIT_FITS_FILE)
        try:
            spam.process_target(uvfits_file,
                                allow_selfcal_skip=True,
                                add_freq_to_name=True)
            # If this process_target is success call
            # GADPU API setSuccessStatus for the current fits_id
            current_time_in_sec = time.time()
            current_date_timestamp = datetime.datetime.fromtimestamp(
                current_time_in_sec).strftime('%Y-%m-%d %H:%M:%S')
            success_update_data = {
                "set": {
                    "status": "checking",
                    "end_time": current_date_timestamp,
                    "comments": "processing done, checking"
                },
                "where": {
                    "imaging_id": imaging_id
                }
            }
            db_model.update_table(success_update_data, "imaginginput")
        except Exception, e:
            process_target_log.write("Error: " + str(e))
            # If this process_target is a failure call
            # GADPU API setFailedStatus for the current fits_id
            current_date_timestamp = datetime.datetime.fromtimestamp(
                time.time()).strftime('%Y-%m-%d %H:%M:%S')
            success_update_data = {
                "set": {
                    "status": "failed",
                    "end_time": current_date_timestamp,
                },
                "where": {
                    "imaging_id": imaging_id
                }
            }
            db_model.update_table(success_update_data, "imaginginput")
            print("Error: spam.process_tagret Failed " + uvfits_file)
예제 #2
0
    def combining_lsb_usb(self):
        print("Started Stage4: ")
        cycle_id = self.pipeline_configuration()["cycle_id"]

        spam.set_aips_userid(11)
        dbutils = DBUtils()
        fileutils = FileUtils()
        status = "failed"
        comments = "combine usb lsb failed"
        # fileutils = FileUtils()
        # query conditions for projectobsno
        columnKeys = {"project_id", "base_path", "observation_no"}
        whereKeys = {"isghb": True, "cycle_id": 16, "status": "now"}

        project_data = dbutils.select_from_table("projectobsno", columnKeys,
                                                 whereKeys, 0)
        print(project_data)

        project_id = project_data[1]
        base_path = project_data[2]
        obsno = project_data[0]

        start_time = datetime.datetime.fromtimestamp(
            time.time()).strftime('%Y-%m-%d %H:%M:%S')
        # print(project_id, base_path, obsno)

        # query conditions for calibrationinput
        columnKeys = {"calibration_id", "uvfits_file"}
        whereKeys = {"project_id": project_id, "status": "now"}
        calibration_data = dbutils.select_from_table("calibrationinput",
                                                     columnKeys, whereKeys,
                                                     None)
        print(calibration_data)

        if not calibration_data:
            print(
                "All the data is processed ... OR \n ==> please check the DB for combinelsbusb"
            )
            spam.exit()

        print(len(calibration_data))
        if len(calibration_data) < 2:
            status = "success"
            comments = "single file combinelsbusb not required"
            usb_lsb_file = glob.glob(base_path + "/PRECALIB/*GMRT*.UVFITS")
            if calibration_data:
                projectobsno_update_data = {
                    "set": {
                        "status": status,
                        "comments": comments
                    },
                    "where": {
                        "project_id": project_id
                    }
                }
                print("Updating the projectobsno ... ")
                dbutils.update_table(projectobsno_update_data, "projectobsno")
                if calibration_data:
                    calibration_update_data = {
                        "set": {
                            "status": status,
                            "comments": comments
                        },
                        "where": {
                            "calibration_id":
                            calibration_data[0]["calibration_id"]
                        }
                    }
                    dbutils.update_table(calibration_update_data,
                                         "calibrationinput")
            else:
                projectobsno_update_data = {
                    "set": {
                        "status": "failed",
                        "comments": "Failed Error: Something went wrong"
                    },
                    "where": {
                        "project_id": project_id
                    }
                }
                print("Updating the projectobsno ... ")
                dbutils.update_table(projectobsno_update_data, "projectobsno")
        else:
            print("Values > 2")
            print("*************" + str(os.getcwd()))
            for each_uvfits in calibration_data:
                precalib_files = glob.glob(base_path + "/PRECALIB/*")
                lsb_list = glob.glob(base_path + "/PRECALIB/*_LL_*.UVFITS")
                usb_list = glob.glob(base_path + "/PRECALIB/*_RR_*.UVFITS")

                if len(lsb_list) == 0 or len(usb_list) == 0:
                    print(len(lsb_list), len(usb_list))
                    lsb_list = glob.glob(base_path + "/PRECALIB/*LSB*.UVFITS")
                    usb_list = glob.glob(base_path + "/PRECALIB/*USB*.UVFITS")

                projectobsno_update_data = {
                    "set": {
                        "status": "processing",
                        "comments": "combining_lsb_usb"
                    },
                    "where": {
                        "project_id": project_id
                    }
                }
                dbutils.update_table(projectobsno_update_data, "projectobsno")
                calibration_id = each_uvfits["calibration_id"]
                uvfits_file = each_uvfits["uvfits_file"]
                calibration_update_data = {
                    "set": {
                        "status": "processing",
                        "comments": "combining_lsb_usb",
                        "start_time": start_time
                    },
                    "where": {
                        "calibration_id": calibration_id
                    }
                }
                dbutils.update_table(calibration_update_data,
                                     "calibrationinput")
                print("lsb_list : " + str(len(lsb_list)))
                print("usb_list : " + str(len(usb_list)))
                status = "failed"
                comments = "combining lsb usb"
                if len(lsb_list) == len(usb_list):
                    print(">>>>>>COMBINE_LSB_USB<<<<<<<")
                    usb_list.sort()
                    lsb_list.sort()
                    print(usb_list)
                    print(lsb_list)
                    to_spam = list(zip(usb_list, lsb_list))
                    file_size = 0
                    print(to_spam)
                    for each_pair in to_spam:
                        print("-------------------------")
                        comb = each_pair[0].replace('USB', 'COMB')
                        data = each_pair, comb
                        print("++++++++++++++++" + comb)
                        currentTimeInSec = time.time()
                        fits_comb = comb.split('/')[-1]
                        check_comb_file = glob.glob("fits/" + fits_comb)
                        if not check_comb_file:
                            status, comments = fileutils.run_spam_combine_usb_lsb(
                                data)
                            if status == 'success':
                                status = str(cycle_id)
                            print("__________________________________________")
                            print(glob.glob("fits/*"))
                            print("__________________________________________")
                            end_time = datetime.datetime.fromtimestamp(
                                time.time()).strftime('%Y-%m-%d %H:%M:%S')
                            if not comments:
                                comments = "done combining usb lsb"
                            if glob.glob(comb):
                                file_size = fileutils.calculalate_file_sizse_in_MB(
                                    comb)
                            imagininput_data = {
                                "project_id": project_id,
                                "calibration_id": calibration_id,
                                "calibrated_fits_file": os.path.basename(comb),
                                "file_size": file_size,
                                "start_time": start_time,
                                "end_time": end_time,
                                "comments": "c16 " + comments,
                            }
                            dbutils.insert_into_table("imaginginput",
                                                      imagininput_data,
                                                      "imaging_id")
                            print("-------------------------")
                end_time = datetime.datetime.fromtimestamp(
                    time.time()).strftime('%Y-%m-%d %H:%M:%S')
                calibration_update_data = {
                    "set": {
                        "status": status,
                        "comments": comments,
                        "start_time": start_time,
                        "end_time": end_time
                    },
                    "where": {
                        "calibration_id": calibration_id
                    }
                }
                dbutils.update_table(calibration_update_data,
                                     "calibrationinput")

                projectobsno_update_data = {
                    "set": {
                        "status": status,
                        "comments": comments
                    },
                    "where": {
                        "project_id": project_id
                    }
                }
                dbutils.update_table(projectobsno_update_data, "projectobsno")
예제 #3
0
    def pre_calibration_targets(self):
        print("Started Stage3: ")
        spam.set_aips_userid(33)
        dbutils = DBUtils()
        fileutils = FileUtils()

        cycle_id = self.pipeline_configuration()["cycle_id"]

        # while True:
        #     columnKeys = {"calibration_id"}
        #     whereData = {"comments": "c15", "status": "copying"}
        #     uncalibrated_uvfits = dbutils.select_from_table("calibrationinput", columnKeys, whereData, 0)
        #     if not uncalibrated_uvfits:
        #         break
        #     print("Waiting for bandwidth ... ")
        #     time.sleep(50)

        columnKeys = {"calibration_id", "project_id", "uvfits_file"}
        whereData = {"status": str(cycle_id)}
        uncalibrated_uvfits = dbutils.select_from_table(
            "calibrationinput", columnKeys, whereData, 0)

        if not uncalibrated_uvfits:
            print(
                "All for the data is processed ... please check the DB for pre_calib"
            )
            spam.exit()

        calibration_id = uncalibrated_uvfits[0]
        project_id = uncalibrated_uvfits[1]
        uvfits_file = uncalibrated_uvfits[2]

        columnKeys = {"base_path", "observation_no"}
        whereData = {"project_id": project_id, "cycle_id": int(cycle_id)}
        project_details = dbutils.select_from_table("projectobsno", columnKeys,
                                                    whereData, 0)

        base_path = project_details[1]
        observation_no = project_details[0]

        current_date_timestamp = datetime.datetime.fromtimestamp(
            time.time()).strftime('%Y-%m-%d %H:%M:%S')

        projectobsno_update_data = {
            "set": {
                "status":
                "processing",
                "comments":
                "running precalibrate_target, calibration_id = " +
                str(calibration_id),
            },
            "where": {
                "project_id": project_id
            }
        }

        calibration_update_data = {
            "set": {
                "status": "copying",
                "start_time": current_date_timestamp
            },
            "where": {
                "calibration_id": calibration_id
            }
        }

        dbutils.update_table(projectobsno_update_data, "projectobsno")
        dbutils.update_table(calibration_update_data, "calibrationinput")

        UVFITS_FILE_NAME = uvfits_file
        UVFITS_BASE_DIR = base_path
        is_fits_dir = os.getcwd().split('/')
        print(is_fits_dir)
        SPAM_WORKING_DIR = os.getcwd()
        print(SPAM_WORKING_DIR)
        # for num in range(1, 3):
        #     SPAM_THREAD_DIR += "/" + is_fits_dir[num]
        # if 'fits' not in is_fits_dir:
        #     SPAM_THREAD_DIR = os.getcwd()
        SPAM_WORKING_DIR = os.getcwd() + "/fits/"
        print(SPAM_WORKING_DIR, UVFITS_BASE_DIR, UVFITS_FILE_NAME)
        UVFITS_FILE_PATH = UVFITS_BASE_DIR + "/" + UVFITS_FILE_NAME
        print(UVFITS_FILE_PATH)
        print(SPAM_WORKING_DIR)
        fileutils.copy_files(UVFITS_FILE_PATH, SPAM_WORKING_DIR)
        print("Copying done ==> Moving to pre_cal_target")
        current_date_timestamp = datetime.datetime.fromtimestamp(
            time.time()).strftime('%Y-%m-%d %H:%M:%S')
        calibration_update_data = {
            "set": {
                "status": "processing",
                "start_time": current_date_timestamp
            },
            "where": {
                "calibration_id": calibration_id
            }
        }
        dbutils.update_table(calibration_update_data, "calibrationinput")

        fileutils.run_spam_precalibration_stage(UVFITS_BASE_DIR,
                                                SPAM_WORKING_DIR,
                                                UVFITS_FILE_NAME,
                                                observation_no)
        current_time_in_sec = time.time()
        current_date_timestamp = datetime.datetime.fromtimestamp(
            current_time_in_sec).strftime('%Y-%m-%d %H:%M:%S')

        check_status_file = glob.glob(base_path + "/PRECALIB/failed_log.txt")
        comments = "failed"
        if check_status_file:
            status = "failed"
            comments = str(open(check_status_file[0], 'r').read())
        else:
            status = "success"
            comments = "precalibrate_target done, calibration_id = " + str(
                calibration_id)

        projectobsno_update_data = {
            "set": {
                "status": status,
                "comments": comments
            },
            "where": {
                "project_id": project_id
            }
        }

        calibration_update_data = {
            "set": {
                "status": status,
                "end_time": current_date_timestamp,
                "comments": comments
            },
            "where": {
                "calibration_id": calibration_id
            }
        }

        dbutils.update_table(projectobsno_update_data, "projectobsno")
        dbutils.update_table(calibration_update_data, "calibrationinput")
예제 #4
0
    def running_gvfits(self):
        print("Started Stage2: ")

        cycle_id = self.pipeline_configuration()["cycle_id"]

        dbutils = DBUtils()
        spamutils = SpamUtils()
        fileutils = FileUtils()

        currentTimeInSec = time.time()
        current_date_timestamp = datetime.datetime.fromtimestamp(
            currentTimeInSec).strftime('%Y-%m-%d %H:%M:%S')

        columnKeys = {"project_id", "ltacomb_file", "lta_id"}
        whereKeys = {"comments": str(cycle_id)}

        lta_details = dbutils.select_from_table("ltadetails", columnKeys,
                                                whereKeys, None)

        print(lta_details)

        for each_lta in lta_details:
            print(each_lta)
            project_id = each_lta["project_id"]
            # project_id = each_lta[0]
            lta_file = each_lta["ltacomb_file"]
            # lta_file = each_lta[1]
            # lta_id = each_lta[2]
            lta_id = each_lta["lta_id"]
            columnKeys = {"base_path"}
            whereKeys = {"project_id": project_id}
            lta_path_details = dbutils.select_test_table(
                "projectobsno", columnKeys, whereKeys, 0)
            print(lta_path_details)
            base_path = lta_path_details[0]
            print(base_path)
            uvfits_file = lta_file + '.UVFITS'
            base_lta = base_path + '/' + lta_file
            if os.path.exists(base_lta):
                base_uvfits = base_path + '/' + uvfits_file
                gvfits_status = spamutils.run_gvfits(base_lta, base_uvfits)
                if os.path.exists(base_uvfits):
                    status = str(cycle_id)
                else:
                    status = "failed"

                calibration_data = {
                    "project_id":
                    project_id,
                    "lta_id":
                    lta_id,
                    "uvfits_file":
                    uvfits_file,
                    "status":
                    status,
                    "comments":
                    gvfits_status,
                    "uvfits_size":
                    fileutils.calculalate_file_sizse_in_MB(base_uvfits),
                    "start_time":
                    current_date_timestamp
                }

                dbutils.insert_into_table("calibrationinput", calibration_data,
                                          "calibration_id")

            else:
                project_update_data = {
                    "set": {
                        "status": "failed",
                        "comments": "ltacomb failed"
                    },
                    "where": {
                        "project_id": project_id
                    }
                }
                lta_details_update_data = {
                    "set": {
                        "status": "failed",
                        "comments": "ltacomb failed"
                    },
                    "where": {
                        "lta_id": lta_id
                    }
                }
                dbutils.update_table(project_update_data, "projectobsno")
                dbutils.update_table(lta_details_update_data, "ltadetails")
예제 #5
0
def __main__():
    dbutils = DBUtils()
    fileutils = FileUtils()

    columnKeys = {"calibration_id", "project_id", "uvfits_file"}
    whereData = {"comments": "c17", "status": "success"}
    uncalibrated_uvfits = dbutils.select_from_table("calibrationinput",
                                                    columnKeys, whereData, 0)

    calibration_id = uncalibrated_uvfits[0]
    project_id = uncalibrated_uvfits[1]
    uvfits_file = uncalibrated_uvfits[2]

    columnKeys = {"file_path", "observation_no"}
    whereData = {"project_id": project_id, "cycle_id": 17}
    project_details = dbutils.select_from_table("projectobsno", columnKeys,
                                                whereData, 0)

    base_path = project_details[1]
    observation_no = project_details[0]

    current_time_in_sec = time.time()
    current_date_timestamp = datetime.datetime.fromtimestamp(
        current_time_in_sec).strftime('%Y-%m-%d %H:%M:%S')

    projectobsno_update_data = {
        "set": {
            "status":
            "processing",
            "comments":
            "running precalibrate_target, calibration_id = " +
            str(calibration_id),
        },
        "where": {
            "project_id": project_id,
            "status": "unprocessed"
        }
    }

    calibration_update_data = {
        "set": {
            "status": "processing",
            "start_time": current_date_timestamp
        },
        "where": {
            "calibration_id": calibration_id,
            "status": "success"
        }
    }

    dbutils.update_table(projectobsno_update_data, "projectobsno")
    dbutils.update_table(calibration_update_data, "calibrationinput")

    EACH_UVFITS_FILE = base_path + '/' + uvfits_file

    UVFITS_BASE_DIR = base_path + "/"
    if not check_pipeline_flag(UVFITS_BASE_DIR):
        set_flag(UVFITS_BASE_DIR, PRECAL_PROCESSING)
        is_fits_dir = os.getcwd().split('/')
        SPAM_WORKING_DIR = os.getcwd()
        SPAM_THREAD_DIR = ""
        for num in range(1, 4):
            SPAM_THREAD_DIR += "/" + is_fits_dir[num]
        if 'fits' not in is_fits_dir:
            SPAM_THREAD_DIR = os.getcwd()
            SPAM_WORKING_DIR = os.getcwd() + "/fits/"
        copy_files(EACH_UVFITS_FILE, SPAM_WORKING_DIR)
        print "Copying done ==> Moving to pre_cal_target"

        run_spam_precalibration_stage(UVFITS_BASE_DIR, SPAM_WORKING_DIR,
                                      uvfits_file)

        check_status_file = glob.glob(base_path + "/PRECALIB/failed_log.txt")

        if check_status_file:
            status = "failed"
        else:
            status = "success"

        projectobsno_update_data = {
            "set": {
                "status":
                status,
                "comments":
                "precalibrate_target " + status + ", calibration_id = " +
                str(calibration_id),
            },
            "where": {
                "project_id": project_id
            }
        }

        calibration_update_data = {
            "set": {
                "status": status,
                "end_time": current_date_timestamp
            },
            "where": {
                "calibration_id": calibration_id
            }
        }

        dbutils.update_table(projectobsno_update_data, "projectobsno")
        dbutils.update_table(calibration_update_data, "calibrationinput")

        if status == 'success':
            calibrated_uvfits_list = glob.glob(base_path +
                                               '/PRECALIB/*.UVFITS')
            if calibrated_uvfits_list:
                for each_uvfits in calibrated_uvfits_list:
                    imaging_data = {
                        "project_id":
                        project_id,
                        "calibration_id":
                        calibration_id,
                        "calibrated_fits_file":
                        os.path.basename(each_uvfits),
                        "status":
                        "unprocessed",
                        "comments":
                        "c17",
                        "file_size":
                        fileutils.calculalate_file_sizse_in_MB(each_uvfits)
                    }
                    dbutils.insert_into_table("imaginginput", imaging_data,
                                              "imaging_id")

        delete_dir(SPAM_THREAD_DIR)
        spam.exit()
예제 #6
0
    def stage6(self):
        """
        Post processing, extract RMS from the summary file for corresponding PBCOR_IFTS file
        extract BMIN, BMAJ, BPA from the HISTORY keyword from the PBCOR FITS header and put
        KEY Value pairs in the same PBCOR FITS header using astropy.io
        :return:
        """
        dbutils = DBUtils()
        fits_images_list = glob.glob('/GARUDATA/IMAGING24/CYCLE24/*/FITS_IMAGE/*PBCOR*.FITS')
        # fits_images_list = ['/GARUDATA/IMAGING17/CYCLE17/4575/17_024_04NOV09/FITS_IMAGE/A3376-W.GMRT325.SP2B.PBCOR.FITS']
        # fits_images_list = ['/GARUDATA/IMAGING17/CYCLE17/4572/17_024_03NOV09/FITS_IMAGE/A3376-E.GMRT325.SP2B.PBCOR.FITS']
        counter = 1
        for fits_file in fits_images_list:
            counter += 1
            # fits_file = '/GARUDATA/IMAGING19/CYCLE19/5164/19_085_27DEC10/FITS_IMAGE/1445+099.GMRT325.SP2B.PBCOR.FITS'

            fits_dir = os.path.dirname(fits_file)

            fits_table = fits.open(fits_file)
            fits_header = fits_table[0].header

            data_keys = {}

            object = os.path.basename(fits_file).split('.')[0]
            # object = "A3376_E"

            # summary_file = glob.glob(fits_dir + '/spam_A3376-E*.summary')
            summary_file = glob.glob(fits_dir + '/spam_' + object + '*.summary')
            rms = "NA"
            for each_summary in summary_file:
                if 'DONE' in open(each_summary).read():
                    # print each_summary
                    lines = open(each_summary).readlines()
                    rms = lines[-1].split(' ')[-5]
                    # print rms
                else:
                    # print "Needs to be deleted"
                    if rms == "NA":
                        log_file = each_summary.replace('summary', 'log')
                        lines = open(log_file).readlines()
                        rms = lines[-2].split(' ')[0]
            if rms == "NA":
                rms = 2.11

            print(fits_file)

            if "CYCLE24" in fits_file:
                dir_path = os.path.dirname(os.path.dirname(fits_file))
                observation_no = glob.glob(dir_path+"/*.obslog")[0].split('/')[-1].split('.')[0]
                print(observation_no)
            else:
                observation_no = fits_file.split('/')[4]

            columnKeys = {
                "project_id"
            }

            if observation_no == 'MIXCYCLE':
                mix_path = fits_file.split('/')[4]+'/'+fits_file.split('/')[5]
                mix_sql = "select observation_no from projectobsno where file_path like '%"+mix_path+"%'"
                mix_cycle_data = dbutils.select_gadpu_query(mix_sql)
                observation_no = mix_cycle_data[0][0]

            whereKeys = {
                "observation_no": observation_no
            }
            print(columnKeys, whereKeys)
            project_id = dbutils.select_from_table("projectobsno", columnKeys, whereKeys, 0)
            print(project_id)
            if project_id:
                columnKeys = {
                    "das_scangroup_id",
                    "ltacomb_file"
                }
                whereKeys = {
                    "project_id": project_id,
                }
                result = dbutils.select_from_table("ltadetails", columnKeys, whereKeys, 0)

                print(result)
                print(result[1])

                sql = "select ant_mask, band_mask, calcode, chan_width, corr_version, g.observation_no, " \
                      "date_obs, ddec, dec_2000, dec_date, dra, lsr_vel1, lsr_vel2, lta_time, " \
                      "net_sign1, net_sign2, net_sign3, net_sign4, num_chans, num_pols, onsrc_time, " \
                      "proj_code, qual, ra_2000, ra_date, rest_freq1, rest_freq2, sky_freq1, " \
                      "sky_freq2, source, sta_time from das.scangroup g inner join " \
                      "das.scans s on s.scangroup_id = g.scangroup_id " \
                      "where s.scangroup_id = " + str(result[1]) + " AND source like '" + object + "'"
                scangroup_data = dbutils.select_scangroup_query(sql)

                # print(scangroup_data)

                if scangroup_data:
                    data_keys = {
                        "ANTMASK": scangroup_data[0],
                        "BANDMASK": scangroup_data[1],
                        "CALCODE": scangroup_data[2],
                        "CHANWIDT": scangroup_data[3],
                        "CORRVERS": scangroup_data[4],
                        "OBSNUM": scangroup_data[5],
                        "DATEOBS": str(scangroup_data[6]),
                        "DDEC": scangroup_data[7],
                        "DEC2000": scangroup_data[8],
                        "DECDATE": scangroup_data[9],
                        "DRA": scangroup_data[10],
                        "LSRVEL1": scangroup_data[11],
                        "LSRVEL2": scangroup_data[12],
                        "LTATIME": scangroup_data[13],
                        "NETSIGN1": scangroup_data[14],
                        "NETSIGN2": scangroup_data[15],
                        "NETSIGN3": scangroup_data[16],
                        "NETSIGN4": scangroup_data[17],
                        "NUMCHANS": scangroup_data[18],
                        "NUMPOLS": scangroup_data[19],
                        "ONSRCTIM": scangroup_data[20],
                        "PROJCODE": scangroup_data[21],
                        "QUAL": scangroup_data[22],
                        "RA2000": scangroup_data[23],
                        "RADATE": scangroup_data[24],
                        "RESTFRE1": scangroup_data[25],
                        "RESTFRE2": scangroup_data[26],
                        "SKYFREQ1": scangroup_data[27],
                        "SKYFREQ2": scangroup_data[28],
                        "STATIME": scangroup_data[30],
                        "RMS": float(rms)
                    }

                    # print(data_keys)
                    filename = fits_file
                    hdulist = fits.open(filename, mode='update')
                    header = hdulist[0].header

                    try:
                        histroy = str(fits_header["HISTORY"]).strip().split(' ')
                        nh = [x for x in histroy if x]
                        data_keys["BMAJ"] = float(nh[3])
                        data_keys["BMIN"] = float(nh[5])
                        data_keys["BPA"] = float(nh[7])
                        print(histroy)
                        try:
                            del header['HISTORY']
                        except Exception as exh:
                            print(exh)
                    except Exception as ex:
                        print(ex)
                    try:
                        if fits_header["BMAJ"]:
                            data_keys["BMAJ"] = float(fits_header["BMAJ"])
                            data_keys["BMIN"] = float(fits_header["BMIN "])
                            data_keys["BPA"] = float(fits_header["BPA"])
                    except Exception as ex:
                        print(ex)

                    pbcor_file = os.path.basename(fits_file).split('.')[0]
                    spam_log = glob.glob(os.path.dirname(fits_file) + "/spam_" + pbcor_file + "*.log")
                    spam_log.sort()
                    spam_log = spam_log[0]
                    reading_spam_log = open(spam_log).readlines()
                    bmaj_bmin = []
                    if len(reading_spam_log) > 0:
                        for each_line in reading_spam_log:
                            if "BMAJ" in each_line:
                                bmaj_bmin.append(each_line)
                        bmaj_bmin_data = bmaj_bmin[0].replace('   ',' ').replace("  "," ").replace("= ","=").split((
                            ' '))
                        print(bmaj_bmin_data)
                        for each_key in bmaj_bmin_data:
                            if "BMAJ" in each_key:
                                data_keys["BMAJ"] = float(each_key.split('=')[1])
                            if "BMIN" in each_key:
                                data_keys["BMIN"] = float(each_key.split('=')[1])
                            if "BPA" in each_key:
                                data_keys["BPA"] = float(each_key.split('/')[0].split('=')[1])
                        print( data_keys["BMAJ"], data_keys["BMIN"], data_keys["BPA"])
                        try:
                            for key, value in data_keys.iteritems():
                                print key, value
                                header.set(key, value)
                            hdulist.flush()
                        except Exception as ex:
                            print(ex)