Ejemplo n.º 1
0
    def test_read(self):
        """
        Test reading of HDF5 files.
        """
        fn = tempfile.mktemp()
        d = Dataset(fn, 'w')
        tb = TargetBuffer(tags=['WI001'], name='White Island main vent',
                          position=(177.2, -37.5, 50),
                          position_error=(0.2, 0.2, 20),
                          description='Main vent in January 2017')
        d.register_tags(['WI001', 'MD01', 'measurement'])
        t = d.new(tb)
        ib = InstrumentBuffer(tags=['MD01'], sensor_id='F00975',
                              location='West rim',
                              no_bits=16, type='DOAS',
                              description='GeoNet permanent instrument')
        i = d.new(ib)
        rdtb = RawDataTypeBuffer(tags=['measurement'],
                                 name='1st round measurements',
                                 acquisition='stationary')
        rdt = d.new(rdtb)
        rb = RawDataBuffer(target=t, instrument=i, type=rdt,
                           d_var=np.zeros((1, 2048)), ind_var=np.arange(2048),
                           datetime=['2017-01-10T15:23:00'])
        d.new(rb)
        d.close()

        d1 = Dataset.open(fn)
        r1 = d1.elements['RawData'][0]
        self.assertEqual(r1.target.name, 'White Island main vent')
        self.assertEqual(list(r1.instrument.tags)[0], 'MD01')
Ejemplo n.º 2
0
def main(datapath, outputpath, start, end, pg=True, deletefiles=False):
    msg = "Data path is: {}\n".format(datapath)
    msg += "Output path is: {}\n".format(outputpath)
    msg += "Start date: {}\n".format(start)
    msg += "End date: {}\n".format(end)
    logging.info(msg)
    dates = pd.date_range(start=start, end=end, freq='D')
    if pg:
        ndays = len(dates)
        bar = Bar('Processing', max=ndays)

    for date in dates:
        if pg:
            bar.next()
        else:
            print(date)
        outputfile = 'MiniDOAS_{:d}{:02d}{:02d}.h5'.format(
            date.year, date.month, date.day)
        h5file = os.path.join(outputpath, outputfile)
        if True:
            d = Dataset(h5file, 'w')

            # ToDo: get correct plume coordinates
            tb = TargetBuffer(name='White Island main plume',
                              target_id='WI001',
                              position=[177.18375770, -37.52170799, 321.0])
            t = d.new(tb)

            wpoptions = "{'Pixel316nm':479, 'TrimLower':30, 'LPFilterCount':3,"
            wpoptions += "'MinWindSpeed':3,'BrightEnough':500, 'BlueStep':5, "
            wpoptions += "'MinR2:0.8, 'MaxFitCoeffError':50.0, "
            wpoptions += "'InPlumeThresh':0.05, 'MinPlumeAngle':0.1, "
            wpoptions += "'MaxPlumeAngle':3.0, 'MinPlumeSect':0.4, "
            wpoptions += "'MaxPlumeSect':2.0, 'MeanPlumeCtrHeight':310, "
            wpoptions += "'SEMeanPlumeCtrHeight':0.442, "
            wpoptions += " 'MaxRangeSeperation':5000, 'MaxRangeToPlume':5000, "
            wpoptions += " 'MaxPlumeWidth':2600'MaxPlumeCentreAltitude':2000, "
            wpoptions += "'MaxAltSeperation':1000, 'MaxTimeDiff':30,"
            wpoptions += "'MinTriLensAngle':0.1745, 'MaxTriLensAngle':2.9671,"
            wpoptions += "'SEWindSpeed':0.20, 'WindMultiplier':1.24, "
            wpoptions += "'SEWindDir':0.174}"
            mb1 = MethodBuffer(name='WidPro v1.2',
                               description='Jscript wrapper for DOASIS',
                               settings=wpoptions)
            m1 = d.new(mb1)

            station_info = {}
            location_name = 'White Island North-East Point'
            station_info['WI301'] = {
                'files': {},
                'stationID': 'WI301',
                'stationLoc': location_name,
                'target': t,
                'bearing': 6.0214,
                'lon': 177.192979384,
                'lat': -37.5166903535,
                'elev': 49.0,
                'widpro_method': m1,
                'wp_station_id': 'NE'
            }

            station_info['WI302'] = {
                'files': {},
                'stationID': 'WI302',
                'stationLoc': 'White Island South Rim',
                'target': t,
                'bearing': 3.8223,
                'lon': 177.189013316,
                'lat': -37.5265334424,
                'elev': 96.0,
                'widpro_method': m1,
                'wp_station_id': 'SR'
            }

            for station in ['WI301', 'WI302']:

                # Find the raw data
                raw_data_filename = "{:s}_{:d}{:02d}{:02d}.zip"
                station_id = station_info[station]['wp_station_id']
                raw_data_filename = raw_data_filename.format(
                    station_id, date.year, date.month, date.day)
                raw_data_filepath = os.path.join(datapath, 'spectra',
                                                 station_id, raw_data_filename)
                if os.path.isfile(raw_data_filepath):
                    try:
                        with ZipFile(raw_data_filepath) as myzip:
                            myzip.extractall('/tmp')
                    except:
                        msg = "ERROR 05: Can't unzip file {}"
                        logging.error(msg.format(raw_data_filepath))
                        raw_data_filepath = None
                    else:
                        raw_data_filename = raw_data_filename.replace(
                            '.zip', '.csv')
                        raw_data_filepath = os.path.join(
                            '/tmp', raw_data_filename)
                else:
                    logging.error(
                        "file {} does not exist".format(raw_data_filepath))
                    continue
                try:
                    if not is_file_OK(raw_data_filepath):
                        raw_data_filepath = None
                except Exception as e:
                    print(raw_data_filepath)
                    raise (e)
                station_info[station]['files']['raw'] = raw_data_filepath

                # Find the concentration data
                monthdir = '{:d}-{:02d}'.format(date.year, date.month)
                spectra_filename = "{:s}_{:d}_{:02d}_{:02d}_Spectra.csv"
                spectra_filename = spectra_filename.format(
                    station_id, date.year, date.month, date.day)
                spectra_filepath = os.path.join(datapath, 'results', monthdir,
                                                spectra_filename)
                if not is_file_OK(spectra_filepath):
                    spectra_filepath = None

                station_info[station]['files']['spectra'] = spectra_filepath

                # Find the flux data
                flux_ah_filename = spectra_filename.replace(
                    'Spectra.csv', 'Scans.csv')
                flux_ah_filepath = os.path.join(datapath, 'results', monthdir,
                                                flux_ah_filename)
                if not is_file_OK(flux_ah_filepath):
                    flux_ah_filepath = None

                station_info[station]['files']['flux_ah'] = flux_ah_filepath

                flux_ch_filename = "XX_{:d}_{:02d}_{:02d}_Combined.csv"
                flux_ch_filename = flux_ch_filename.format(
                    date.year, date.month, date.day)
                flux_ch_filepath = os.path.join(datapath, 'results', monthdir,
                                                flux_ch_filename)
                if not is_file_OK(flux_ch_filepath):
                    flux_ch_filepath = None

                station_info[station]['files']['flux_ch'] = flux_ch_filepath

                fits_flux_ah, fits_flux_ch = FITS_download(date, station)
                station_info[station]['files']['fits_flux_ah'] = fits_flux_ah
                station_info[station]['files']['fits_flux_ch'] = fits_flux_ch

                try:
                    read_single_station(d, station_info[station], date)
                except MiniDoasException as e:
                    logging.error(str(e))
                fn = raw_data_filename.replace('.zip', '.csv')
                fn = os.path.join('/tmp', fn)
                if fn is not None and os.path.isfile(fn):
                    os.remove(fn)

            # Wind data
            windd_dir = os.path.join(datapath, 'wind', 'direction')
            winds_dir = os.path.join(datapath, 'wind', 'speed')
            sub_dir = '{:02d}-{:02d}'.format(date.year - 2000, date.month)
            winds_filename = '{:d}{:02d}{:02d}_WS_00.txt'.format(
                date.year, date.month, date.day)
            windd_filename = winds_filename.replace('WS', 'WD')
            winds_filepath = os.path.join(winds_dir, sub_dir, winds_filename)
            windd_filepath = os.path.join(windd_dir, sub_dir, windd_filename)

            if is_file_OK(winds_filepath) and is_file_OK(windd_filepath):
                # Read in the raw wind data; this is currently
                # not needed to reproduce flux estimates so it's
                # just stored for reference
                e = d.read(
                    {
                        'direction': windd_filepath,
                        'speed': winds_filepath
                    },
                    ftype='minidoas-wind',
                    timeshift=13)
                gfb = e['GasFlowBuffer']
                gf = d.new(gfb)

            d.close()
        try:
            verify_flux(os.path.join(outputpath, outputfile), 1.)
        except MDOASException as e:
            msg = str(e)
            logging.error(msg)

        if deletefiles:
            if h5file is not None and os.path.isfile(h5file):
                os.remove(h5file)
            for station in ['WI301', 'WI302']:
                files = [
                    station_info[station]['files']['raw'],
                    station_info[station]['files']['fits_flux_ah'],
                    station_info[station]['files']['fits_flux_ch']
                ]
                for _f in files:
                    if _f is not None and os.path.isfile(_f):
                        os.remove(_f)
    if pg:
        bar.finish()
Ejemplo n.º 3
0
    def test_readall(self):
        """
        Produce a complete HDF5 file for 1 day of
        MiniDOAS analysis at one station.
        """
        d = Dataset(tempfile.mktemp(), 'w')

        # ToDo: get correct plume coordinates
        tb = TargetBuffer(name='White Island main plume',
                          target_id='WI001',
                          position=[177.18375770, -37.52170799, 321.0])
        t = d.new(tb)

        wpoptions = "{'Pixel316nm':479, 'TrimLower':30, 'LPFilterCount':3,"
        wpoptions += "'MinWindSpeed':3, 'BrightEnough':500, 'BlueStep':5,"
        wpoptions += "'MinR2:0.8, 'MaxFitCoeffError':50.0,"
        wpoptions += "'InPlumeThresh':0.05, 'MinPlumeAngle':0.1,"
        wpoptions += "'MaxPlumeAngle':3.0, 'MinPlumeSect':0.4,"
        wpoptions += "'MaxPlumeSect':2.0, 'MeanPlumeCtrHeight':310,"
        wpoptions += "'SEMeanPlumeCtrHeight':0.442, 'MaxRangeToPlume':5000,"
        wpoptions += "'MaxPlumeWidth':2600, 'MaxPlumeCentreAltitude':2000,"
        wpoptions += "'MaxRangeSeperation':5000, 'MaxAltSeperation':1000,"
        wpoptions += "'MaxTimeDiff':30, 'MinTriLensAngle':0.1745,"
        wpoptions += "'MaxTriLensAngle':2.9671, 'SEWindSpeed':0.20,"
        wpoptions += "'WindMultiplier':1.24, 'SEWindDir':0.174}"
        mb1 = MethodBuffer(name='WidPro v1.2',
                           description='Jscript wrapper for DOASIS',
                           settings=wpoptions)
        m1 = d.new(mb1)

        # Read in the raw wind data; this is currently not needed to reproduce
        # flux estimates so it's just stored for reference
        fn_wd = os.path.join(self.data_dir, 'minidoas', 'wind',
                             '20161101_WD_00.txt')
        fn_ws = os.path.join(self.data_dir, 'minidoas', 'wind',
                             '20161101_WS_00.txt')
        e2 = d.read({'direction': fn_wd, 'speed': fn_ws},
                    ftype='minidoas-wind', timeshift=13)
        gfb = e2['GasFlowBuffer']
        d.new(gfb)

        station_info = {}
        files = {'raw': os.path.join(self.data_dir, 'minidoas',
                                     'NE_20161101.csv'),
                 'spectra': os.path.join(self.data_dir, 'minidoas',
                                         'NE_2016_11_01_Spectra.csv'),
                 'flux_ah': os.path.join(self.data_dir, 'minidoas',
                                         'NE_2016_11_01_Scans.csv'),
                 'flux_ch': os.path.join(self.data_dir, 'minidoas',
                                         'XX_2016_11_01_Combined.csv'),
                 'fits_flux_ah': os.path.join(self.data_dir, 'minidoas',
                                              'FITS_NE_20161101_ah.csv'),
                 'fits_flux_ch': os.path.join(self.data_dir, 'minidoas',
                                              'FITS_NE_20161101_ch.csv')}
        station_info['WI301'] = {'files': files,
                                 'stationID': 'WI301',
                                 'stationLoc': 'White Island North-East Point',
                                 'target': t,
                                 'bearing': 6.0214,
                                 'lon': 177.192979384, 'lat': -37.5166903535,
                                 'elev': 49.0,
                                 'widpro_method': m1,
                                 'wp_station_id': 'NE'}
        files = {'raw': os.path.join(self.data_dir, 'minidoas',
                                     'SR_20161101.csv'),
                 'spectra': os.path.join(self.data_dir, 'minidoas',
                                         'SR_2016_11_01_Spectra.csv'),
                 'flux_ah': os.path.join(self.data_dir, 'minidoas',
                                         'SR_2016_11_01_Scans.csv'),
                 'flux_ch': os.path.join(self.data_dir, 'minidoas',
                                         'XX_2016_11_01_Combined.csv'),
                 'fits_flux_ah': os.path.join(self.data_dir, 'minidoas',
                                              'FITS_SR_20161101_ah.csv'),
                 'fits_flux_ch': os.path.join(self.data_dir, 'minidoas',
                                              'FITS_SR_20161101_ch.csv')}
        station_info['WI302'] = {'files': files,
                                 'stationID': 'WI302',
                                 'stationLoc': 'White Island South Rim',
                                 'target': t,
                                 'bearing': 3.8223,
                                 'lon': 177.189013316, 'lat': -37.5265334424,
                                 'elev': 96.0,
                                 'widpro_method': m1,
                                 'wp_station_id': 'SR'}

        self.read_single_station(d, station_info['WI301'])
        self.read_single_station(d, station_info['WI302'])
        d.close()