Ejemplo n.º 1
0
    def makedirs(self, filelist, endpoint):
        """ get list of dirs to make

            Parameters
            ----------
            filelist : list
                The list of files

            endpoint : str
                The endpoint to use
        """
        print "makedirs: filelist=", filelist
        dirlist = miscutils.get_list_directories(filelist)
        print "makedirs: dirlist=", dirlist
        for path in sorted(
                dirlist):  # should already be sorted, but just in case
            miscutils.fwdebug(0, 'GLOBUS_ONLINE_DEBUG',
                              'endpoint=%s, path=%s' % (endpoint, path))
            try:
                _ = self.goclient.endpoint_mkdir(endpoint, path)
            except Exception as e:
                if 'already exists' not in str(e):
                    raise
                else:
                    miscutils.fwdebug(
                        2, 'GLOBUS_ONLINE_DEBUG',
                        'already exists endpoint=%s, path=%s' %
                        (endpoint, path))
Ejemplo n.º 2
0
    def insert_dictionary_2Db(self, dbh, query, dictionary, debug=None):
        """Execute a query and return a cursor to a query
        :param query: string with query statement
        :param dictionary: dictionary to use in query
        :param debug: verbosity
    
        """

        try:
            cur = dbh.cursor()
            cur.execute(query, dictionary)
            miscutils.fwdebug(6, 'DTSSNMANIFEST_DEBUG',
                              "dictionary into database " % (dictionary))
            success = 1
        #except cx_Oracle.IntegrityError as e:
        except cx_Oracle.DatabaseError as e:
            error, = e.args
            if error.code == 955:
                print('Table already exists')
            elif error.code == 1031:
                print("Insufficient privileges")
            print(error.code)
            print(error.message)
            print(error.context)
            success = 0
            raise
        return success
 def insert_dictionary_2Db(self, dbh, query, dictionary,debug=None):
     """Execute a query and return a cursor to a query
     :param query: string with query statement
     :param dictionary: dictionary to use in query
     :param debug: verbosity
 
     """
 
     try:      
         cur = dbh.cursor()        
         cur.execute(query,dictionary)
         miscutils.fwdebug(6, 'DTSSNMANIFEST_DEBUG', "dictionary into database " % (dictionary))
         success = 1
     #except cx_Oracle.IntegrityError as e:
     except cx_Oracle.DatabaseError as e:
         error, = e.args
         if error.code == 955:
             print('Table already exists')
         elif error.code == 1031:
             print("Insufficient privileges")
         print(error.code)
         print(error.message)
         print(error.context)
         success = 0
         raise
     return success
def run_post_steps(filelist, config, fmobj):
    """ Performs steps necessary for each file """

    #print config.keys()
    firstname = miscutils.parse_fullname(filelist[0], miscutils.CU_PARSE_FILENAME)
    filetype = dtsutils.determine_filetype(firstname)
    miscutils.fwdebug(3, "DTSFILEHANDLER_DEBUG", "filetype = %s" % filetype)

    # dynamically load class specific to filetype
    classkey = 'dts_filetype_class_' + filetype
    filetype_class = miscutils.dynamically_load_class(config[classkey])
    valdict = fmutils.get_config_vals({}, config, filetype_class.requested_config_vals())
    ftobj = filetype_class(dbh=fmobj, config=valdict)

    for fullname in filelist:
        filename = miscutils.parse_fullname(fullname, miscutils.CU_PARSE_FILENAME)
        miscutils.fwdebug(3, "DTSFILEHANDLER_DEBUG", "filename = %s" % filename)

        if dtsutils.check_already_registered(filename, fmobj):
            ftobj.post_steps(fullname)  # e.g., Rasicam

            # if success
            fmobj.commit()
        else:
            print "File must already be registered in order to run post_steps"
def run_post_steps(filelist, config, fmobj):
    """ Performs steps necessary for each file """

    #print config.keys()
    firstname = miscutils.parse_fullname(filelist[0], miscutils.CU_PARSE_FILENAME)
    filetype = dtsutils.determine_filetype(firstname)
    miscutils.fwdebug(3, "DTSFILEHANDLER_DEBUG", "filetype = %s" % filetype)

    # dynamically load class specific to filetype
    classkey = 'dts_filetype_class_' + filetype
    filetype_class = miscutils.dynamically_load_class(config[classkey])
    valdict = fmutils.get_config_vals({}, config, filetype_class.requested_config_vals())
    ftobj = filetype_class(dbh=fmobj, config=valdict)

    for fullname in filelist:
        filename = miscutils.parse_fullname(fullname, miscutils.CU_PARSE_FILENAME)
        miscutils.fwdebug(3, "DTSFILEHANDLER_DEBUG", "filename = %s" % filename)

        if dtsutils.check_already_registered(filename, fmobj):
            ftobj.post_steps(fullname)  # e.g., Rasicam

            # if success
            fmobj.commit()
        else:
            print "File must already be registered in order to run post_steps"
Ejemplo n.º 6
0
def get_config_vals(archive_info, config, keylist):
    """ Search given dicts for specific values

        Parameters
        ----------
        archive_info : dict
            Dictionary of the archive data

        config : dict
            Dictionary of the config data

        keylist : dict
            Dictionary of the keys to be searched for and whether they are required or optional

        Returns
        -------
        dict
            The serach results
    """
    info = {}
    for k, st in keylist.items():
        if archive_info is not None and k in archive_info:
            info[k] = archive_info[k]
        elif config is not None and k in config:
            info[k] = config[k]
        elif st.lower() == 'req':
            miscutils.fwdebug(0, 'FMUTILS_DEBUG', '******************************')
            miscutils.fwdebug(0, 'FMUTILS_DEBUG', 'keylist = %s' % keylist)
            miscutils.fwdebug(0, 'FMUTILS_DEBUG', 'archive_info = %s' % archive_info)
            miscutils.fwdebug(0, 'FMUTILS_DEBUG', 'config = %s' % config)
            miscutils.fwdie('Error: Could not find required key (%s)' % k, 1, 2)
    return info
Ejemplo n.º 7
0
    def start_transfer(self, filelist):
        """ activate src endpoint

            Parameters
            ----------
            filelist : list
                List of files to transfer

            Returns
            -------
            str of the globus task id
        """
        src_endpoint = self.srcinfo['endpoint']
        result = self.endpoint_activate(src_endpoint)

        # activate dst endpoint
        dst_endpoint = self.dstinfo['endpoint']
        result = self.endpoint_activate(dst_endpoint)

        # create dst directories
        self.makedirs([finfo['dst'] for finfo in filelist.values()],
                      dst_endpoint)

        ##    Get a submission id:
        _, _, result = self.goclient.transfer_submission_id()
        self.submission_id = result["value"]
        miscutils.fwdebug(1, 'GLOBUS_ONLINE_DEBUG',
                          "\tsubmission id = %s" % self.submission_id)

        ##    Create a transfer object:
        #t = Transfer(submission_id, src_endpoint, dst_endpoint, notify_on_succeeded = False,
        #  notify_on_failed = False, notify_on_inactive= False, deadline='2m')
        deadline = datetime.utcnow() + timedelta(minutes=30)
        t = Transfer(self.submission_id,
                     src_endpoint,
                     dst_endpoint,
                     notify_on_succeeded=False,
                     notify_on_failed=False,
                     notify_on_inactive=False,
                     deadline=deadline)
        #print t.as_data()

        # add files to transfer
        for _, finfo in filelist.items():
            sfile = finfo['src']
            dfile = finfo['dst']
            miscutils.fwdebug(2, 'GLOBUS_ONLINE_DEBUG',
                              "\tadding to transfer %s = %s" % (sfile, dfile))
            if sfile.endswith('/'):
                t.add_item(sfile, dfile,
                           recursive=True)  # error if true for file
            else:
                t.add_item(sfile, dfile)

        # start transfer
        _, _, result = self.goclient.transfer(t)
        task_id = result["task_id"]
        miscutils.fwdebug(1, 'GLOBUS_ONLINE_DEBUG', "\ttask id = %s" % task_id)

        return task_id
Ejemplo n.º 8
0
def get_config_vals(archive_info, config, keylist):
    """ Search given dicts for specific values """
    info = {}
    for k, st in keylist.items():
        if archive_info is not None and k in archive_info:
            info[k] = archive_info[k]
        elif config is not None and k in config:
            info[k] = config[k]
        elif st.lower() == 'req':
            miscutils.fwdebug(0, 'FMUTILS_DEBUG',
                              '******************************')
            miscutils.fwdebug(0, 'FMUTILS_DEBUG', f'keylist = {keylist}')
            miscutils.fwdebug(0, 'FMUTILS_DEBUG',
                              f'archive_info = {archive_info}')
            miscutils.fwdebug(0, 'FMUTILS_DEBUG', f'config = {config}')
            miscutils.fwdie(f'Error: Could not find required key ({k})', 1, 2)
    return info
Ejemplo n.º 9
0
    def get_archive_transfer_info(self):
        """ Return contents of ops_archive_transfer and ops_archive_transfer_val tables as a dictionary

            Returns
            -------
            dict
        """

        archive_transfer = collections.OrderedDict()
        sql = "select src,dst,transfer from ops_archive_transfer"
        curs = self.cursor()
        curs.execute(sql)
        for row in curs:
            if row[0] not in archive_transfer:
                archive_transfer[row[0]] = collections.OrderedDict()
            archive_transfer[row[0]][row[1]] = collections.OrderedDict(
                {'transfer': row[2]})

        sql = "select src,dst,key,val from ops_archive_transfer_val"
        curs = self.cursor()
        curs.execute(sql)
        for row in curs:
            if row[0] not in archive_transfer:
                miscutils.fwdebug(
                    0, 'DESDBI_DEBUG',
                    f"WARNING: found info in ops_archive_transfer_val for src archive {row[0]} which is not in ops_archive_transfer"
                )
                archive_transfer[row[0]] = collections.OrderedDict()
            if row[1] not in archive_transfer[row[0]]:
                miscutils.fwdebug(
                    0, 'DESDBI_DEBUG',
                    f"WARNING: found info in ops_archive_transfer_val for dst archive {row[1]} which is not in ops_archive_transfer"
                )
                archive_transfer[row[0]][row[1]] = collections.OrderedDict()
            archive_transfer[row[0]][row[1]][row[2]] = row[3]
        return archive_transfer
Ejemplo n.º 10
0
    def __del__(self):
        """
        Do the semaphore signal and close DB connection
        """
        if self.slot is not None and str(self.slot) != 'None':
            try:
                miscutils.fwdebug(3, "SEMAPHORE_DEBUG", "SEM - BEG - signal")
                curs = self.dbh.cursor()
                curs.callproc("SEM_SIGNAL", [self.semname, self.slot])
                miscutils.fwdebug(3, "SEMAPHORE_DEBUG", "SEM - END - signal")
                self.dbh.basic_update_row(
                    'SEMINFO',
                    {'release_time': self.dbh.get_current_timestamp_str()},
                    {'id': self.id})
                self.dbh.commit()
            except Exception as e:
                miscutils.fwdebug(0, "SEMAPHORE_DEBUG",
                                  "SEM - ERROR - " + str(e))

        self.slot = None
        self.dbh.close()
Ejemplo n.º 11
0
    def read_json_single(self, json_file, allMandatoryExposureKeys, debug):

        miscutils.fwdebug(3, 'DTSSNMANIFEST_DEBUG',
                          "reading file %s" % json_file)

        allExposures = []

        my_header = {}
        numseq = {}
        all_exposures = dict()
        with open(json_file) as my_json:
            for line in my_json:
                all_data = json.loads(line)

                for key, value in all_data.items():
                    errorFlag = 0
                    if key == 'header':
                        #read the values for the header (date and set_type are here)
                        my_head = value

                        allExposures.append(str(my_head['set_type']))
                        allExposures.append(str(my_head['createdAt']))

                    if key == 'exposures':
                        #read all the exposures that were taken for the set_type in header
                        my_header = value

                        #Total Number of exposures in manifest file
                        tot_exposures = len(my_header)

                        if tot_exposures is None or tot_exposures == 0:
                            raise Exception(
                                "0 SN exposures parsed from json file")

                        for i in range(tot_exposures):
                            numseq = my_header[i]['sequence']
                            mytime = my_header[i]['acttime']
                            if mytime > 10 and numseq['seqnum'] == 2:
                                first_expnum = my_header[i]['expid']

                            #Validate if acctime has a meaningful value. If acttime = 0.0, then it's a bad exposure. Skip it from the manifest.
                            if mytime == 0.0:
                                continue

                            try:
                                for mandatoryExposureKey in (
                                        allMandatoryExposureKeys):
                                    miscutils.fwdebug(
                                        3, 'DTSSNMANIFEST_DEBUG',
                                        "mandatory key %s" %
                                        mandatoryExposureKey)
                                    key = str(mandatoryExposureKey)

                                    if my_header[i][mandatoryExposureKey]:
                                        miscutils.fwdebug(
                                            3, 'DTSSNMANIFEST_DEBUG',
                                            "mandatory key '%s' found %s" %
                                            (mandatoryExposureKey, my_header[i]
                                             [mandatoryExposureKey]))
                                        miscutils.fwdebug(
                                            6, 'DTSSNMANIFEST_DEBUG',
                                            "allExposures in for: %s" %
                                            allExposures)

                                        try:
                                            if key == 'acttime':
                                                key = 'EXPTIME'
                                                all_exposures[key].append(
                                                    my_header[i]
                                                    [mandatoryExposureKey])
                                            elif key == 'filter':
                                                key = 'BAND'
                                                all_exposures[key].append(
                                                    str(my_header[i][
                                                        mandatoryExposureKey]))
                                            elif key == 'expid':
                                                key = 'EXPNUM'
                                                all_exposures[key].append(
                                                    my_header[i]
                                                    [mandatoryExposureKey])
                                            else:
                                                all_exposures[key].append(
                                                    my_header[i]
                                                    [mandatoryExposureKey])
                                        except KeyError:
                                            all_exposures[key] = [
                                                my_header[i]
                                                [mandatoryExposureKey]
                                            ]

                            except KeyError:
                                miscutils.fwdebug(
                                    0, 'DTSSNMANIFEST_DEBUG',
                                    "keyError: missing key %s in json entity: %s "
                                    % (mandatoryExposureKey, line))
                                errorFlag = 1
                                raise

                        timestamp = all_exposures['date'][0]
                        nite = dtsutils.convert_UTCstr_to_nite(timestamp)

                        # get field by parsing set_type
                        #print 'xxxx', my_head['set_type']
                        myfield = my_head['set_type']
                        if len(myfield) > 5:
                            newfield = myfield[:5]
                        else:
                            newfield = myfield

                        camsym = 'D'  # no way to currently tell CAMSYM/INSTRUME from manifest file

                        if not newfield.startswith('SN-'):
                            raise ValueError(
                                "Invalid field (%s).  set_type = '%s'" %
                                (newfield, my_head['set_type']))

                        #if json_file contains a path or compression extension, then cut it to only the filename
                        jsonFile = miscutils.parse_fullname(
                            json_file, miscutils.CU_PARSE_FILENAME)

                        if tot_exposures is None or tot_exposures == 0:
                            raise Exception(
                                "0 SN exposures parsed from json file")

                        for i in range(tot_exposures):
                            if my_header[i]['acttime'] == 0.0:
                                continue
                            if i == 0:
                                #all_exposures['FIELD'] = [str(my_head['set_type'])]
                                all_exposures['FIELD'] = [newfield]
                                all_exposures['CREATEDAT'] = [
                                    str(my_head['createdAt'])
                                ]
                                all_exposures['MANIFEST_FILENAME'] = [jsonFile]
                                all_exposures['NITE'] = [nite]
                                all_exposures['SEQNUM'] = [1]
                                all_exposures['CAMSYM'] = [camsym]
                            else:
                                #all_exposures['FIELD'].append(str(my_head['set_type']))
                                all_exposures['FIELD'].append(newfield)
                                all_exposures['CREATEDAT'].append(
                                    str(my_head['createdAt']))
                                all_exposures['MANIFEST_FILENAME'].append(
                                    jsonFile)
                                all_exposures['NITE'].append(nite)
                                all_exposures['SEQNUM'].append(1)
                                all_exposures['CAMSYM'].append(camsym)

        # Add the manifest filename value in the dictionary
        #all_exposures['MANIFEST_FILENAME'] = json_file
        miscutils.fwdebug(6, 'DTSSNMANIFEST_DEBUG',
                          "allExposures " % (all_exposures))

        return all_exposures
Ejemplo n.º 12
0
    def blocking_transfer(self, filelist):
        """ Do a blocking transfer

            Parameters
            ----------
            filelist : list
                The files to transfer

            Returns
            -------
            dict of the transfer results
        """
        task_id = self.start_transfer(filelist)
        miscutils.fwdebug(0, 'GLOBUS_ONLINE_DEBUG', "\ttask_id = %s" % task_id)

        # wait for transfer to complete
        ##    Check the progress of the new transfer:
        MAX_NUM_CHKS = 600
        MAX_NUM_RETRY = 5
        CHK_INTERVAL_SECS = 30

        status = "ACTIVE"
        chk_cnt = 0
        retry_cnt = 0
        errstrs = {}
        while status == "ACTIVE" and chk_cnt < MAX_NUM_CHKS and retry_cnt < MAX_NUM_RETRY:
            miscutils.fwdebug(1, 'GLOBUS_ONLINE_DEBUG',
                              "Checking transfer task status")
            status, reason, result = self.goclient.task(task_id)
            status = result["status"]
            miscutils.fwdebug(1, 'GLOBUS_ONLINE_DEBUG',
                              "\tstatus = %s" % result["status"])
            miscutils.fwdebug(1, 'GLOBUS_ONLINE_DEBUG',
                              "\tfiles = %s" % result["files"])
            miscutils.fwdebug(
                1, 'GLOBUS_ONLINE_DEBUG',
                "\tsubtasks_total = %s" % result["subtasks_total"])
            miscutils.fwdebug(
                1, 'GLOBUS_ONLINE_DEBUG',
                "\tsubtasks_failed = %s" % result["subtasks_failed"])
            miscutils.fwdebug(
                1, 'GLOBUS_ONLINE_DEBUG',
                "\tsubtasks_retrying = %s" % result["subtasks_retrying"])
            miscutils.fwdebug(
                1, 'GLOBUS_ONLINE_DEBUG',
                "\tnice_status_details = %s" % result["nice_status_details"])

            if status == "ACTIVE":
                chk_cnt += 1

                # cannot call task_successful_transfers on task that is still active
                if result["nice_status_details"] is not None and result[
                        "nice_status_details"].startswith("Error"):
                    # only print error message once
                    if result["nice_status_details"] not in errstrs:
                        print result["nice_status_details"]
                        errstrs[result["nice_status_details"]] = True

                    if result['subtasks_retrying'] != 0:
                        retry_cnt += 1
                    else:
                        miscutils.fwdebug(0, 'GLOBUS_ONLINE_DEBUG',
                                          "\tstatus = %s" % result["status"])
                        miscutils.fwdebug(0, 'GLOBUS_ONLINE_DEBUG',
                                          "\tfiles = %s" % result["files"])
                        miscutils.fwdebug(
                            0, 'GLOBUS_ONLINE_DEBUG',
                            "\tsubtasks_total = %s" % result["subtasks_total"])
                        miscutils.fwdebug(
                            0, 'GLOBUS_ONLINE_DEBUG',
                            "\tsubtasks_failed = %s" %
                            result["subtasks_failed"])
                        miscutils.fwdebug(
                            0, 'GLOBUS_ONLINE_DEBUG',
                            "\tsubtasks_retrying = %s" %
                            result["subtasks_retrying"])
                        miscutils.fwdebug(
                            0, 'GLOBUS_ONLINE_DEBUG',
                            "\tnice_status_details = %s" %
                            result["nice_status_details"])
                        miscutils.fwdie("Error while transfering files",
                                        fmdefs.FM_EXIT_FAILURE)

                if chk_cnt < MAX_NUM_CHKS and retry_cnt < MAX_NUM_RETRY:
                    time.sleep(CHK_INTERVAL_SECS)

        self.goclient.task_cancel(task_id)

        status, reason, successes = self.goclient.task_successful_transfers(
            task_id)
        print status
        print reason
        print "----------\n\n\n"
        print "subtask_list=", result
        print "\n\n\n"

        transresults = copy.deepcopy(filelist)
        if len(successes['DATA']) != len(filelist):
            for fname, _ in transresults.items():
                transresults[fname]['err'] = 'problems transferring file'
        return transresults
def handle_bad_file(config, notify_file, delivery_fullname, dbh, 
                    filetype, metadata, disk_info, prov, msg):
    """ Perform steps required by any bad file """

    dbh.rollback()  # undo any db changes for this file

    miscutils.fwdebug(0, "DTSFILEHANDLER_DEBUG", "delivery_fullname = %s" % delivery_fullname)
    miscutils.fwdebug(0, "DTSFILEHANDLER_DEBUG", "filetype = %s" % filetype)
    miscutils.fwdebug(0, "DTSFILEHANDLER_DEBUG", "msg = %s" % msg)
    miscutils.fwdebug(0, "DTSFILEHANDLER_DEBUG", "metadata = %s" % metadata)
    miscutils.fwdebug(0, "DTSFILEHANDLER_DEBUG", "disk_info = %s" % disk_info)
    miscutils.fwdebug(0, "DTSFILEHANDLER_DEBUG", "prov = %s" % prov)

    today = datetime.now()
    datepath = "%04d/%02d" % (today.year, today.month)

    # where is file now
    if disk_info is None:
        orig_fullname = delivery_fullname
    else:
        orig_fullname = disk_info['fullname']

    # create a uniq name for living in the "bad file" area
    # contains relative path for storing in DB
    uniq_fullname = "%s/%s.%s" % (datepath, os.path.basename(orig_fullname), 
                                  today.strftime("%Y%m%d%H%M%S%f")[:-3])

    # absolute path
    destbad = "%s/%s" % (config['bad_file_dir'], uniq_fullname)

    if os.path.exists(destbad):
        miscutils.fwdebug(0, "DTSFILEHANDLER_DEBUG", "WARNING: bad file already exists (%s)" % destbad)
        os.remove(destbad)
    
    # make directory in "bad file" area and move file there
    miscutils.coremakedirs(os.path.dirname(destbad))
    shutil.move(orig_fullname, destbad) 

    # save information in db about bad file
    row = {}

    # save extra metadata if it exists
    if metadata is not None:
        badcols = dbh.get_column_names('DTS_BAD_FILE')

        for c in badcols:
            if c in metadata:
                row[c] = metadata[c]

    row['task_id'] = config['dts_task_id']
    t = os.path.getmtime(notify_file)
    row['delivery_date'] = datetime.fromtimestamp(t)
    row['orig_filename'] = os.path.basename(orig_fullname)
    row['uniq_fullname'] = uniq_fullname
    row['rejected_date'] = today
    row['rejected_msg'] = msg
    row['filesize'] = os.path.getsize(destbad)
    if filetype is not None:
        row['filetype'] = filetype


    dbh.basic_insert_row('DTS_BAD_FILE', row)
    dbh.commit()
    os.unlink(notify_file)
Ejemplo n.º 14
0
    def __init__(self,
                 semname,
                 task_id,
                 desfile=None,
                 section=None,
                 connection=None,
                 threaded=False):
        """
        Create the DB connection and do the semaphore wait.
        """
        self.desfile = desfile
        self.section = section
        self.semname = semname
        self.task_id = task_id
        self.slot = None

        miscutils.fwdebug(3, f"SEMAPHORE_DEBUG",
                          "SEM - INFO - semname {self.semname}")
        miscutils.fwdebug(3, "SEMAPHORE_DEBUG",
                          "SEM - BEG - db-specific imports")
        import despydmdb.desdmdbi as desdmdbi
        import cx_Oracle
        miscutils.fwdebug(3, "SEMAPHORE_DEBUG",
                          "SEM - END - db-specific imports")

        miscutils.fwdebug(3, "SEMAPHORE_DEBUG", "SEM - BEG - db connection")
        self.dbh = desdmdbi.DesDmDbi(desfile, section, threaded=threaded)
        miscutils.fwdebug(3, "SEMAPHORE_DEBUG", "SEM - END - db connection")

        curs = self.dbh.cursor()

        sql = f"select count(*) from semlock where name={self.dbh.get_named_bind_string('name')}"
        curs.execute(sql, {'name': semname})
        num_slots = curs.fetchone()[0]
        if num_slots == 0:
            miscutils.fwdebug(0, "SEMAPHORE_DEBUG",
                              f"SEM - ERROR - no locks with name {semname}")
            raise ValueError(f'No locks with name {semname}')

        self.id = self.dbh.get_seq_next_value('seminfo_seq')
        self.dbh.basic_insert_row(
            'seminfo', {
                'id': self.id,
                'name': self.semname,
                'request_time': self.dbh.get_current_timestamp_str(),
                'task_id': task_id,
                'num_slots': num_slots
            })
        self.dbh.commit()

        self.slot = curs.var(cx_Oracle.NUMBER)
        done = False
        trycnt = 1
        while not done and trycnt <= MAXTRIES:
            try:
                miscutils.fwdebug(3, "SEMAPHORE_DEBUG", "SEM - BEG - wait")
                curs.callproc("SEM_WAIT", [self.semname, self.slot])
                miscutils.fwdebug(3, "SEMAPHORE_DEBUG", "SEM - END - wait")
                miscutils.fwdebug(3, "SEMAPHORE_DEBUG",
                                  f"SEM - INFO - slot {self.slot}")
                done = True
                if not self.dbh.is_oracle():
                    self.dbh.commit()  # test database must commit
            except Exception as e:
                miscutils.fwdebug(0, "SEMAPHORE_DEBUG",
                                  f"SEM - ERROR - {str(e)}")

                time.sleep(TRYINTERVAL)

                miscutils.fwdebug(3, "SEMAPHORE_DEBUG",
                                  "SEM - BEG - remake db connection")
                self.dbh = desdmdbi.DesDmDbi(desfile, section)
                miscutils.fwdebug(3, "SEMAPHORE_DEBUG",
                                  "SEM - END - remake db connection")

                curs = self.dbh.cursor()
                self.slot = curs.var(cx_Oracle.NUMBER)

                miscutils.fwdebug(3, "SEMAPHORE_DEBUG", "SEM - BEG - dequeue")
                curs.callproc("SEM_DEQUEUE", [self.semname, self.slot])
                miscutils.fwdebug(3, "SEMAPHORE_DEBUG", "SEM - END - dequeue")

                trycnt += 1

        if done:
            # need different connection to do the commit of the grant info as commit will release lock
            dbh2 = desdmdbi.DesDmDbi(desfile, section, connection)
            dbh2.basic_update_row(
                'SEMINFO', {
                    'grant_time': dbh2.get_current_timestamp_str(),
                    'num_requests': trycnt,
                    'slot': self.slot
                }, {'id': self.id})
            dbh2.commit()
    def ingestAllExposures(self, allExposures,dbh, debug=None):
        """
        Ingest all the exposures in EXPOSURES_IN_MANIFEST and SN_SUBMIT_REQUEST
    
        #If SEQNUM is > 1, then it means the same field was taken again during the same night.
        #This will only happens in rare occasion when the sequence had to be aborted before it finished.
    
        :param allExposures: Dictionary with the following keys:
        [set_type,createdAt,expid,object,date,acttime,filter]
    
        """
    
    
        newdictionary = {}
        for key in ['CAMSYM', 'EXPNUM','MANIFEST_FILENAME','FIELD','BAND','EXPTIME', 'NITE']:
            newdictionary[key] = allExposures[key]
    
        #print "xx",allExposures
        dict2Ingest = {}    
        for i in range(len(allExposures['EXPTIME'])):
            for key in newdictionary.keys():
                keytoingest = key
                valuetoingest = newdictionary[key][i]
                dict2Ingest[keytoingest] = valuetoingest
            miscutils.fwdebug(6, 'DTSSNMANIFEST_DEBUG', "dict2Ingest %s " % (dict2Ingest))
            try:
                sqlInsertExposuresInManifest = """insert into MANIFEST_EXPOSURE (CAMSYM,EXPNUM,MANIFEST_FILENAME,NITE,FIELD,BAND,EXPTIME) VALUES 
                                    (:CAMSYM, :EXPNUM, :MANIFEST_FILENAME, :NITE, :FIELD, :BAND, :EXPTIME)""" 
            
    
                miscutils.fwdebug(3, 'DTSSNMANIFEST_DEBUG', "sqlInsertExposuresInManifest %s " % (sqlInsertExposuresInManifest))
                success = self.insert_dictionary_2Db(dbh, sqlInsertExposuresInManifest, dict2Ingest, debug=debug)
            
                if success:
                    miscutils.fwdebug(1, 'DTSSNMANIFEST_DEBUG', "Insert into EXPOSURES_IN_MANIFEST was successful..")
                
            except cx_Oracle.IntegrityError as e:
                print "error while inserting into EXPOSURES_IN_MANIFEST: ", e 
                raise
    

        ########################################################################################
        #
        #Fix first expnum. First expnum is the first exposure for each filter set. In case of
        #one a field with one filter exposure, then first_expnum = expnum.
        #For more than one exposure / band/field, then first_expnum = first exposure of set.
        #
    
        #Determine index of list for exptime = 10. (poiting exposure)
        allexps=  allExposures['EXPTIME']
        miscutils.fwdebug(6, 'DTSSNMANIFEST_DEBUG', "all exptimes %s" % (allexps))
        for i,val in enumerate(allexps):
            if val == 10.0:
                pointingIndex = i
            
        miscutils.fwdebug(3, 'DTSSNMANIFEST_DEBUG', "pointing Exposure index is %s" % pointingIndex)

        #find where there are repetead bands, but exclude the band where the exptime = 10
        ListofBands = allExposures['BAND']
        miscutils.fwdebug(3, 'DTSSNMANIFEST_DEBUG', "listOfaBands...%s" % ListofBands)

        bandsDicIndexes = defaultdict(list)

        for i,item in enumerate(allExposures['BAND']):
            bandsDicIndexes[item].append(i)
    
        #I have to loop trorugh the dictionary for all the bands. Cound how many bands. Get the vaues from this dictionary
        #which is the index to the list, and use that to determine the elementes for all the other dictionaries.
        #I need the follwoing elementsl 'FIELD','NITE','BAND','MANIFEST_FILENAME','FIRST_EXPNUM','SEQNUM'
        ind2use = []
        flag_first = 0
        for ind, b in enumerate(ListofBands):
            miscutils.fwdebug(6, 'DTSSNMANIFEST_DEBUG', "indexes %s %s" % (bandsDicIndexes[b], ind))
            if ind == pointingIndex:
                miscutils.fwdebug(3, 'DTSSNMANIFEST_DEBUG', "found pointing index %s %s " % (ind, pointingIndex))
                continue
            else:
                #for two exposures and one of them is the poiting
                if len(bandsDicIndexes[b]) <= 2 and ind == pointingIndex+1:
                    ind2use.append((max(bandsDicIndexes[b])))
                    #print "the index", ind2use
                #if there are more than 2 exposures (generally for deep fields
                elif len(bandsDicIndexes[b]) > 2 and ind == pointingIndex+1:
                    ind2use.append(bandsDicIndexes[b][ind])
                    flag_first = 1
                elif len(bandsDicIndexes[b]) == 1:
                    ind2use.append(bandsDicIndexes[b][0])
                elif len(bandsDicIndexes[b]) == 2 and ind != pointingIndex and flag_first==0:
                    ind2use.append(min(bandsDicIndexes[b]))
                    flag_first = 1
            if flag_first:
                break
    
        #contruct the dictionary with only the elements that needs to go into the DB
        #To do this use the ind2use extracted from the above list.
        newDic = {}
        for index in ind2use:
            #print index
            newDic['FIELD'] = allExposures['FIELD'][index]
            newDic['NITE'] = allExposures['NITE'][index]
            newDic['BAND'] = allExposures['BAND'][index]
            newDic['MANIFEST_FILENAME'] = allExposures['MANIFEST_FILENAME'][index]
            newDic['FIRST_EXPNUM'] = allExposures['EXPNUM'][index]
            newDic['SEQNUM'] = allExposures['SEQNUM'][index]
            miscutils.fwdebug(6, 'DTSSNMANIFEST_DEBUG', "index=%s, newDic=%s" % (index, newDic))
        
            #Ingest into the database each of them
            try:
                sqlInsertSNSubmitRequest = """insert into SN_SUBMIT_REQUEST (FIELD,NITE,BAND,MANIFEST_FILENAME,FIRST_EXPNUM,SEQNUM) VALUES 
                                            (:FIELD, :NITE, :BAND, :MANIFEST_FILENAME, :FIRST_EXPNUM, :SEQNUM)""" 
        
                miscutils.fwdebug(3, 'DTSSNMANIFEST_DEBUG', "sqlInsertSNSubmitRequest = %s" % sqlInsertExposuresInManifest)

                success = self.insert_dictionary_2Db(dbh,sqlInsertSNSubmitRequest, newDic,debug=debug)
                if success:
                    miscutils.fwdebug(1, 'DTSSNMANIFEST_DEBUG', "Insert into SN_SUBMIT_REQUEST was successful..")
        
            except cx_Oracle.IntegrityError as e:
                print "error while inserting into SN_SUBMIT_REQUEST: ", e 
                raise
Ejemplo n.º 16
0
    def load_artifact_gtt(self, filelist):
        """ insert file artifact information into global temp table

            Parameters
            ----------
            filelist : list
                List of dictionaries, one for each file, giving the file
                metadata to store.

            Returns
            -------
            str
                The name of the temp table
        """
        # filelist is list of file dictionaries
        # returns artifact GTT table name

        parsemask = miscutils.CU_PARSE_FILENAME | miscutils.CU_PARSE_EXTENSION

        # make sure table is empty before loading it
        self.empty_gtt(dmdbdefs.DB_GTT_ARTIFACT)

        colmap = [
            dmdbdefs.DB_COL_FILENAME, dmdbdefs.DB_COL_COMPRESSION,
            dmdbdefs.DB_COL_MD5SUM, dmdbdefs.DB_COL_FILESIZE
        ]
        rows = []
        for _file in filelist:
            miscutils.fwdebug(3, 'DESDBI_DEBUG', f"file = {_file}")
            fname = None
            comp = None
            md5sum = None
            filesize = None
            if dmdbdefs.DB_COL_FILENAME in _file or dmdbdefs.DB_COL_FILENAME.lower(
            ) in _file:
                if dmdbdefs.DB_COL_COMPRESSION in _file:
                    fname = _file[dmdbdefs.DB_COL_FILENAME]
                    comp = _file[dmdbdefs.DB_COL_COMPRESSION]
                elif dmdbdefs.DB_COL_COMPRESSION.lower() in _file:
                    fname = _file[dmdbdefs.DB_COL_FILENAME.lower()]
                    comp = _file[dmdbdefs.DB_COL_COMPRESSION.lower()]
                elif dmdbdefs.DB_COL_FILENAME in _file:
                    (fname, comp) = miscutils.parse_fullname(
                        _file[dmdbdefs.DB_COL_FILENAME], parsemask)
                else:
                    (fname, comp) = miscutils.parse_fullname(
                        _file[dmdbdefs.DB_COL_FILENAME.lower()], parsemask)
                miscutils.fwdebug(3, 'DESDBI_DEBUG',
                                  f"fname={fname}, comp={comp}")
            elif 'fullname' in _file:
                (fname,
                 comp) = miscutils.parse_fullname(_file['fullname'], parsemask)
                miscutils.fwdebug(
                    3, 'DESDBI_DEBUG',
                    f"parse_fullname: fname={fname}, comp={comp}")
            else:
                miscutils.fwdebug(3, 'DESDBI_DEBUG', f"file={_file}")
                raise ValueError(f"Invalid entry filelist({_file})")

            if dmdbdefs.DB_COL_FILESIZE in _file:
                filesize = _file[dmdbdefs.DB_COL_FILESIZE]
            elif dmdbdefs.DB_COL_FILESIZE.lower() in _file:
                filesize = _file[dmdbdefs.DB_COL_FILESIZE.lower()]

            if dmdbdefs.DB_COL_MD5SUM in _file:
                md5sum = _file[dmdbdefs.DB_COL_MD5SUM]
            elif dmdbdefs.DB_COL_MD5SUM.lower() in _file:
                md5sum = _file[dmdbdefs.DB_COL_MD5SUM.lower()]

            miscutils.fwdebug(
                3, 'DESDBI_DEBUG',
                f"row: fname={fname}, comp={comp}, filesize={filesize}, md5sum={md5sum}"
            )
            rows.append({
                dmdbdefs.DB_COL_FILENAME: fname,
                dmdbdefs.DB_COL_COMPRESSION: comp,
                dmdbdefs.DB_COL_FILESIZE: filesize,
                dmdbdefs.DB_COL_MD5SUM: md5sum
            })

        self.insert_many(dmdbdefs.DB_GTT_ARTIFACT, colmap, rows)
        return dmdbdefs.DB_GTT_ARTIFACT
def handle_file(notify_file, delivery_fullname, config, filemgmt, task_id):
    """ Performs steps necessary for each file """

    filetype = None
    metadata = None
    disk_info = None
    prov = None

    # read values from notify file
    notifydict = read_notify_file(notify_file)

    # use dts_md5sum from notify_file
    dts_md5sum = None
    if 'md5sum' in notifydict:
        dts_md5sum = notifydict['md5sum']

    print "%s: dts md5sum = %s" % (delivery_fullname, dts_md5sum)

    #print config.keys()
    try: 
        filename = miscutils.parse_fullname(delivery_fullname, miscutils.CU_PARSE_FILENAME)
        miscutils.fwdebug(0, "DTSFILEHANDLER_DEBUG", "filename = %s" % filename)

        if not os.path.exists(delivery_fullname):
            print "Warning:  delivered file does not exist:"
            print "\tnotification file: %s" % notify_file
            print "\tdelivered file: %s" % delivery_fullname
            print "\tRemoving notification file and continuing"
            os.unlink(notify_file)
            return
            
        if dts_md5sum is not None:
            starttime = datetime.now()
            fileinfo_before_move = diskutils.get_single_file_disk_info(deliver_fullname, True, None) 
            endtime = datetime.now()
            print "%s: md5sum before move %s (%0.2f secs)" % (delivery_fullname, 
                                                              fileinfo_before_move['md5sum'], 
                                                              endtime-starttime)
            if fileinfo_before_move['md5sum'] != dts_md5sum:
                print "%s: dts md5sum = %s" % (delivery_fullname, dts_md5sum)
                print "%s: py  md5sum = %s" % (delivery_fullname, fileinfo_before_move['md5sum'])
                raise Exception("Error: md5sum in delivery dir not the same as DTS-provided md5sum")

        if not dtsutils.check_already_registered(filename, filemgmt):
            filetype = dtsutils.determine_filetype(filename)
            miscutils.fwdebug(3, "DTSFILEHANDLER_DEBUG", "filetype = %s" % filetype)

            # dynamically load class specific to filetype
            classkey = 'dts_filetype_class_' + filetype
            filetype_class = miscutils.dynamically_load_class(config[classkey]) 
            valDict = fmutils.get_config_vals({}, config, filetype_class.requested_config_vals())
            filetypeObj = filetype_class(dbh=filemgmt, config=valDict)

            metadata = filetypeObj.get_metadata(delivery_fullname)
            metadata['filename'] = filename 
            metadata['filetype'] = filetype
            miscutils.fwdebug(3, "DTSFILEHANDLER_DEBUG", 'len(metadata) = %s' % len(metadata))
            miscutils.fwdebug(6, "DTSFILEHANDLER_DEBUG", 'metadata = %s' % metadata)

            filetypeObj.check_valid(delivery_fullname)  # should raise exception if not valid
            prov = generate_provenance(delivery_fullname)

            miscutils.fwdebug(3, "DTSFILEHANDLER_DEBUG", 'archive_rel_path = %s' % archive_rel_path)
            miscutils.fwdebug(3, "DTSFILEHANDLER_DEBUG", 'prov = %s' % prov)

            archive_rel_path = filetypeObj.get_archive_path(delivery_fullname)
            disk_info = move_file_to_archive(config, delivery_fullname, archive_rel_path, dts_md5sum)

            save_data_db(filemgmt, task_id, {'file_1': metadata}, disk_info, prov)

            filetypeObj.post_steps(disk_info['fullname'])  # e.g., Rasicam

            # if success
            filemgmt.commit()
            os.unlink(notify_file)
        else:
            handle_bad_file(config, notify_file, delivery_fullname, filemgmt, 
                            filetype, metadata, disk_info, prov, 
                            "already registered")
    except Exception as err:
        (type, value, trback) = sys.exc_info()
        print "******************************"
        print "Error: %s" % delivery_fullname
        traceback.print_exception(type, value, trback, file=sys.stdout)
        print "******************************"

        handle_bad_file(config, notify_file, delivery_fullname, filemgmt, 
                        filetype, metadata, disk_info, prov, 
                        "Exception: %s" % err)
    except SystemExit:   # Wrappers code calls exit if cannot find header value
        handle_bad_file(config, notify_file, delivery_fullname, filemgmt, 
                        filetype, metadata, disk_info, prov, 
                        "SystemExit: Probably missing header value.  Check log for error msg.")
        
    filemgmt.commit()
Ejemplo n.º 18
0
    def ingestAllExposures(self, allExposures, dbh, debug=None):
        """
        Ingest all the exposures in EXPOSURES_IN_MANIFEST and SN_SUBMIT_REQUEST
    
        #If SEQNUM is > 1, then it means the same field was taken again during the same night.
        #This will only happens in rare occasion when the sequence had to be aborted before it finished.
    
        :param allExposures: Dictionary with the following keys:
        [set_type,createdAt,expid,object,date,acttime,filter]
    
        """

        newdictionary = {}
        for key in [
                'CAMSYM', 'EXPNUM', 'MANIFEST_FILENAME', 'FIELD', 'BAND',
                'EXPTIME', 'NITE'
        ]:
            newdictionary[key] = allExposures[key]

        #print "xx",allExposures
        dict2Ingest = {}
        for i in range(len(allExposures['EXPTIME'])):
            for key in newdictionary.keys():
                keytoingest = key
                valuetoingest = newdictionary[key][i]
                dict2Ingest[keytoingest] = valuetoingest
            miscutils.fwdebug(6, 'DTSSNMANIFEST_DEBUG',
                              "dict2Ingest %s " % (dict2Ingest))
            try:
                sqlInsertExposuresInManifest = """insert into MANIFEST_EXPOSURE (CAMSYM,EXPNUM,MANIFEST_FILENAME,NITE,FIELD,BAND,EXPTIME) VALUES 
                                    (:CAMSYM, :EXPNUM, :MANIFEST_FILENAME, :NITE, :FIELD, :BAND, :EXPTIME)"""

                miscutils.fwdebug(
                    3, 'DTSSNMANIFEST_DEBUG',
                    "sqlInsertExposuresInManifest %s " %
                    (sqlInsertExposuresInManifest))
                success = self.insert_dictionary_2Db(
                    dbh,
                    sqlInsertExposuresInManifest,
                    dict2Ingest,
                    debug=debug)

                if success:
                    miscutils.fwdebug(
                        1, 'DTSSNMANIFEST_DEBUG',
                        "Insert into EXPOSURES_IN_MANIFEST was successful..")

            except cx_Oracle.IntegrityError as e:
                print "error while inserting into EXPOSURES_IN_MANIFEST: ", e
                raise

        ########################################################################################
        #
        #Fix first expnum. First expnum is the first exposure for each filter set. In case of
        #one a field with one filter exposure, then first_expnum = expnum.
        #For more than one exposure / band/field, then first_expnum = first exposure of set.
        #

        #Determine index of list for exptime = 10. (poiting exposure)
        allexps = allExposures['EXPTIME']
        miscutils.fwdebug(6, 'DTSSNMANIFEST_DEBUG',
                          "all exptimes %s" % (allexps))
        for i, val in enumerate(allexps):
            if val == 10.0:
                pointingIndex = i

        miscutils.fwdebug(3, 'DTSSNMANIFEST_DEBUG',
                          "pointing Exposure index is %s" % pointingIndex)

        #find where there are repetead bands, but exclude the band where the exptime = 10
        ListofBands = allExposures['BAND']
        miscutils.fwdebug(3, 'DTSSNMANIFEST_DEBUG',
                          "listOfaBands...%s" % ListofBands)

        bandsDicIndexes = defaultdict(list)

        for i, item in enumerate(allExposures['BAND']):
            bandsDicIndexes[item].append(i)

        #I have to loop trorugh the dictionary for all the bands. Cound how many bands. Get the vaues from this dictionary
        #which is the index to the list, and use that to determine the elementes for all the other dictionaries.
        #I need the follwoing elementsl 'FIELD','NITE','BAND','MANIFEST_FILENAME','FIRST_EXPNUM','SEQNUM'
        ind2use = []
        flag_first = 0
        for ind, b in enumerate(ListofBands):
            miscutils.fwdebug(6, 'DTSSNMANIFEST_DEBUG',
                              "indexes %s %s" % (bandsDicIndexes[b], ind))
            if ind == pointingIndex:
                miscutils.fwdebug(
                    3, 'DTSSNMANIFEST_DEBUG',
                    "found pointing index %s %s " % (ind, pointingIndex))
                continue
            else:
                #for two exposures and one of them is the poiting
                if len(bandsDicIndexes[b]) <= 2 and ind == pointingIndex + 1:
                    ind2use.append((max(bandsDicIndexes[b])))
                    #print "the index", ind2use
                #if there are more than 2 exposures (generally for deep fields
                elif len(bandsDicIndexes[b]) > 2 and ind == pointingIndex + 1:
                    ind2use.append(bandsDicIndexes[b][ind])
                    flag_first = 1
                elif len(bandsDicIndexes[b]) == 1:
                    ind2use.append(bandsDicIndexes[b][0])
                elif len(bandsDicIndexes[b]
                         ) == 2 and ind != pointingIndex and flag_first == 0:
                    ind2use.append(min(bandsDicIndexes[b]))
                    flag_first = 1
            if flag_first:
                break

        #contruct the dictionary with only the elements that needs to go into the DB
        #To do this use the ind2use extracted from the above list.
        newDic = {}
        for index in ind2use:
            #print index
            newDic['FIELD'] = allExposures['FIELD'][index]
            newDic['NITE'] = allExposures['NITE'][index]
            newDic['BAND'] = allExposures['BAND'][index]
            newDic['MANIFEST_FILENAME'] = allExposures['MANIFEST_FILENAME'][
                index]
            newDic['FIRST_EXPNUM'] = allExposures['EXPNUM'][index]
            newDic['SEQNUM'] = allExposures['SEQNUM'][index]
            miscutils.fwdebug(6, 'DTSSNMANIFEST_DEBUG',
                              "index=%s, newDic=%s" % (index, newDic))

            #Ingest into the database each of them
            try:
                sqlInsertSNSubmitRequest = """insert into SN_SUBMIT_REQUEST (FIELD,NITE,BAND,MANIFEST_FILENAME,FIRST_EXPNUM,SEQNUM) VALUES 
                                            (:FIELD, :NITE, :BAND, :MANIFEST_FILENAME, :FIRST_EXPNUM, :SEQNUM)"""

                miscutils.fwdebug(
                    3, 'DTSSNMANIFEST_DEBUG', "sqlInsertSNSubmitRequest = %s" %
                    sqlInsertExposuresInManifest)

                success = self.insert_dictionary_2Db(dbh,
                                                     sqlInsertSNSubmitRequest,
                                                     newDic,
                                                     debug=debug)
                if success:
                    miscutils.fwdebug(
                        1, 'DTSSNMANIFEST_DEBUG',
                        "Insert into SN_SUBMIT_REQUEST was successful..")

            except cx_Oracle.IntegrityError as e:
                print "error while inserting into SN_SUBMIT_REQUEST: ", e
                raise
    def read_json_single(self, json_file,allMandatoryExposureKeys, debug):
    
        miscutils.fwdebug(3, 'DTSSNMANIFEST_DEBUG', "reading file %s" % json_file)

        allExposures = []
            
        my_header = {}
        numseq = {}
        all_exposures = dict()
        with open(json_file) as my_json:
            for line in my_json:
                all_data = json.loads(line)
    
                for key, value in all_data.items():
                    errorFlag = 0
                    if key == 'header':
                        #read the values for the header (date and set_type are here)
                        my_head = value
    
                        allExposures.append(str(my_head['set_type']))
                        allExposures.append(str(my_head['createdAt']))
                        
                    if key == 'exposures':
                        #read all the exposures that were taken for the set_type in header
                        my_header =  value
    
                        #Total Number of exposures in manifest file 
                        tot_exposures = len(my_header)

                        if tot_exposures is None or tot_exposures == 0:    
                            raise Exception("0 SN exposures parsed from json file")

                        for i in range(tot_exposures):
                            numseq = my_header[i]['sequence']
                            mytime = my_header[i]['acttime']
                            if mytime > 10 and numseq['seqnum'] == 2:
                                first_expnum = my_header[i]['expid']
                            
                            #Validate if acctime has a meaningful value. If acttime = 0.0, then it's a bad exposure. Skip it from the manifest.
                            if mytime == 0.0:
                                continue
                                
                            try:
                                for mandatoryExposureKey in (allMandatoryExposureKeys):
                                    miscutils.fwdebug(3, 'DTSSNMANIFEST_DEBUG', "mandatory key %s" % mandatoryExposureKey)
                                    key = str(mandatoryExposureKey)
                                    
                                    if my_header[i][mandatoryExposureKey]:
                                        miscutils.fwdebug(3, 'DTSSNMANIFEST_DEBUG', "mandatory key '%s' found %s" % (mandatoryExposureKey, my_header[i][mandatoryExposureKey]))
                                        miscutils.fwdebug(6, 'DTSSNMANIFEST_DEBUG', "allExposures in for: %s" % allExposures)

                                        try:
                                            if key == 'acttime':
                                                key = 'EXPTIME'
                                                all_exposures[key].append(my_header[i][mandatoryExposureKey])
                                            elif key == 'filter':
                                                key = 'BAND'
                                                all_exposures[key].append(str(my_header[i][mandatoryExposureKey]))
                                            elif key == 'expid':
                                                key = 'EXPNUM'
                                                all_exposures[key].append(my_header[i][mandatoryExposureKey])
                                            else:
                                                all_exposures[key].append(my_header[i][mandatoryExposureKey])
                                        except KeyError:
                                            all_exposures[key] = [my_header[i][mandatoryExposureKey]]
    
    
                            except KeyError:
                                miscutils.fwdebug(0, 'DTSSNMANIFEST_DEBUG', "keyError: missing key %s in json entity: %s " % (mandatoryExposureKey,line))
                                errorFlag = 1
                                raise
                        
                        timestamp = all_exposures['date'][0]
                        nite = dtsutils.convert_UTCstr_to_nite(timestamp)
            
                        # get field by parsing set_type
                        #print 'xxxx', my_head['set_type']
                        myfield = my_head['set_type']
                        if len(myfield) > 5:
                            newfield = myfield[:5]
                        else: 
                            newfield = myfield

                        camsym = 'D'   # no way to currently tell CAMSYM/INSTRUME from manifest file

                        if not newfield.startswith('SN-'):
                            raise ValueError("Invalid field (%s).  set_type = '%s'" % (newfield, my_head['set_type']))

                        #if json_file contains a path or compression extension, then cut it to only the filename
                        jsonFile = miscutils.parse_fullname(json_file, miscutils.CU_PARSE_FILENAME)
                        
                        if tot_exposures is None or tot_exposures == 0:    
                            raise Exception("0 SN exposures parsed from json file")

                        for i in range(tot_exposures):
                            if my_header[i]['acttime'] == 0.0:
                                continue
                            if i == 0:
                                #all_exposures['FIELD'] = [str(my_head['set_type'])]
                                all_exposures['FIELD'] = [newfield]
                                all_exposures['CREATEDAT'] = [str(my_head['createdAt'])]                
                                all_exposures['MANIFEST_FILENAME'] = [jsonFile]
                                all_exposures['NITE'] = [nite]
                                all_exposures['SEQNUM'] = [1]
                                all_exposures['CAMSYM'] = [camsym]
                            else:
                                #all_exposures['FIELD'].append(str(my_head['set_type']))
                                all_exposures['FIELD'].append(newfield)
                                all_exposures['CREATEDAT'].append(str(my_head['createdAt']))                
                                all_exposures['MANIFEST_FILENAME'].append(jsonFile)
                                all_exposures['NITE'].append(nite)
                                all_exposures['SEQNUM'].append(1)
                                all_exposures['CAMSYM'].append(camsym)
        
        # Add the manifest filename value in the dictionary
        #all_exposures['MANIFEST_FILENAME'] = json_file
        miscutils.fwdebug(6, 'DTSSNMANIFEST_DEBUG', "allExposures " % (all_exposures))
        
        return all_exposures