Exemplo n.º 1
0
def extract_retrieval(output_directory, ignore=None, *filenames):
    comment = """Data extraction from %s\nPressure levels are in Pa\n""" % (
        repr(filenames))
    for (key, (stem, _, _, sf, units, _)) in FIELD_LIST.items():
        comment += '%s (%s): from %s\n' % (stem, units, key)
    write = iasi_retrieval_fbf_writer(output_directory,
                                      comment,
                                      ignore=ignore or [])
    for filename in filenames:
        LOG.info('processing %s...' % filename)
        prod = iasi.open_product(filename)
        ancil = iasi.retrieval_read_ancillary(prod)
        write.write_levels(ancil)  # only writes for first file
        data = retrieval_record()
        for (key, (stem, _, _, sf, units, onelevel)) in FIELD_LIST.items():
            LOG.info("reading %s as %s..." % (key, stem))
            # read that field in for the whole file, noting that integrated quantities are single-level
            # note that INEGRATED_N2O is for real (dammit guys)
            # apply scaling factor
            field = iasi.retrieval_read(prod, key, single_level=onelevel)
            setattr(data, key, field.squeeze())
        data.lat, data.lon = iasi.retrieval_read_location(prod)
        data.line_number, data.field_number, data.detector_number = iasi.retrieval_sfd(
            prod)
        data.solzen, data.satzen, data.solaz, data.sataz = iasi.retrieval_read_orientation(
            prod)
        data.refTimeUsec, data.refTimeSec, data.refTimeDay, data.refTimeMonth, data.refTimeYear = datetime_shatter(
            list(iasi.retrieval_read_fov_times(prod)))
        LOG.debug("writing record...")
        write(data)
Exemplo n.º 2
0
def extract_sounding(output_directory, detector_number=None, as_scan_lines=True, use_cloud=False, cloud_mask_pathname=None, lines=None, iis_images=False, ignore=[], *filenames):
    """ iterate through all the records for a given detector for a series of files, writing them to flat binary
    """
    LOG.info("creating %s..." % output_directory)
    rec_num = 0
    if detector_number is None:
        detector_info = "all detectors"
    else:
        detector_info = "detector %d" % detector_number
    comment = """Data extraction from %s for %s""" % (`filenames`, detector_info)
    write = None # delay creation until we have first file open

    for filename in filenames:

        filename = filename if not '*' in filename else glob.glob(filename)[0]
        LOG.info("processing %s..." % filename)

        cloud_dict = None
        has_clusters = False
        if use_cloud or cloud_mask_pathname:
            if not cloud_mask_pathname:
                dn,fn = os.path.split(filename)
                cn = os.path.join(dn,'mask_%s.pickle' % fn)
            else:
                cn = cloud_mask_pathname
                use_cloud = True
            LOG.debug("reading cloud data from %s" % cn)
            assert( os.path.exists(cn) )
            cloud_dict = cloud_mask_load(cn)
            has_clusters = _cloud_mask_has_clusters(cloud_dict) # check or extended information from par_maia

        if write is None:
            write = iasi_record_fbf_writer(output_directory, detector_number, comment, ignore=ignore, use_cloud=use_cloud, use_clusters=has_clusters)

        prod = iasi.open_product(filename)
        if not as_scan_lines:
            datagen = iasi.sounder_records(prod, detector_number, lines, cloud_dict = cloud_dict)
        else:
            datagen = iasi.sounder_scanlines(prod, detector_number, lines, cloud_dict = cloud_dict)
        if iis_images:
            tiles = iasi.imager_tiles(prod)
            LOG.debug("writing IIS tiles %s as one record" % str(tiles.GIrcImage.shape))
            write.write_iis(tiles)
        for record in datagen:
            LOG.debug(str(record))
            write( record )
            write.write_wavenumbers(record.wavenumbers) # only does it once
            rec_num += 1
            print 'wrote %5d records..   \r' % rec_num,
            sys.stdout.flush()
    print "\ndone!"
Exemplo n.º 3
0
def extract_retrieval(output_directory, ignore=[], *filenames):
    comment = """Data extraction from %s\nPressure levels are in Pa\n""" % (`filenames`)
    for (key,(stem,_,_,sf,units,_)) in FIELD_LIST.items():
        comment += '%s (%s): from %s\n' % (stem,units,key)
    write = iasi_retrieval_fbf_writer(output_directory, comment, ignore=ignore)
    for filename in filenames:
        LOG.info('processing %s...' % filename)
        prod = iasi.open_product(filename)
        ancil = iasi.retrieval_read_ancillary(prod)
        write.write_levels(ancil) # only writes for first file
        data = retrieval_record()
        for (key,(stem,_,_,sf,units,onelevel)) in FIELD_LIST.items():
            LOG.info("reading %s as %s..." % (key,stem))
            # read that field in for the whole file, noting that integrated quantities are single-level
            # note that INEGRATED_N2O is for real (dammit guys)
            # apply scaling factor
            field =  iasi.retrieval_read(prod, key, single_level=onelevel)
            setattr(data, key, field.squeeze())
        data.lat, data.lon = iasi.retrieval_read_location(prod)
        data.line_number, data.field_number, data.detector_number = iasi.retrieval_sfd(prod)
        data.solzen, data.satzen, data.solaz, data.sataz = iasi.retrieval_read_orientation(prod)
        data.refTimeUsec, data.refTimeSec, data.refTimeDay, data.refTimeMonth, data.refTimeYear = datetime_shatter(list(iasi.retrieval_read_fov_times(prod)))
        LOG.debug("writing record...")
        write(data)
Exemplo n.º 4
0
def extract_sounding(output_directory,
                     detector_number=None,
                     as_scan_lines=True,
                     use_cloud=False,
                     cloud_mask_pathname=None,
                     lines=None,
                     iis_images=False,
                     ignore=None,
                     *filenames):
    """ iterate through all the records for a given detector for a series of files, writing them to flat binary
    """
    LOG.info("creating %s..." % output_directory)
    rec_num = 0
    if detector_number is None:
        detector_info = "all detectors"
    else:
        detector_info = "detector %d" % detector_number
    comment = """Data extraction from %s for %s""" % (repr(filenames),
                                                      detector_info)
    write = None  # delay creation until we have first file open

    for filename in filenames:

        filename = filename if not '*' in filename else glob.glob(filename)[0]
        LOG.info("processing %s..." % filename)

        cloud_dict = None
        has_clusters = False
        if use_cloud or cloud_mask_pathname:
            if not cloud_mask_pathname:
                dn, fn = os.path.split(filename)
                cn = os.path.join(dn, 'mask_%s.pickle' % fn)
            else:
                cn = cloud_mask_pathname
                use_cloud = True
            LOG.debug("reading cloud data from %s" % cn)
            assert (os.path.exists(cn))
            cloud_dict = cloud_mask_load(cn)
            has_clusters = _cloud_mask_has_clusters(
                cloud_dict)  # check or extended information from par_maia

        if write is None:
            write = iasi_record_fbf_writer(output_directory,
                                           detector_number,
                                           comment,
                                           ignore=ignore or [],
                                           use_cloud=use_cloud,
                                           use_clusters=has_clusters)

        prod = iasi.open_product(filename)
        if not as_scan_lines:
            datagen = iasi.sounder_records(prod,
                                           detector_number,
                                           lines,
                                           cloud_dict=cloud_dict)
        else:
            datagen = iasi.sounder_scanlines(prod,
                                             detector_number,
                                             lines,
                                             cloud_dict=cloud_dict)
        if iis_images:
            tiles = iasi.imager_tiles(prod)
            LOG.debug("writing IIS tiles %s as one record" %
                      str(tiles.GIrcImage.shape))
            write.write_iis(tiles)
        for record in datagen:
            LOG.debug(str(record))
            write(record)
            write.write_wavenumbers(record.wavenumbers)  # only does it once
            rec_num += 1
            print('wrote %5d records..   \r' % (rec_num, ))
            sys.stdout.flush()
    print("\ndone!")