def test_wera_mask(): radial_file = data_path / 'radials' / 'WERA' / 'RDL_csw_2019_10_24_162300.ruv' rad1 = Radial(radial_file, mask_over_land=False, replace_invalid=False) # Total points before masking assert len(rad1.data) == 6327 rad1.mask_over_land() # Make sure we subset the land points assert len(rad1.data) == 5745
def main(radial_file, save_path): """ Main function to parse and qc radial files :param radial_file: Path to radial file :param save_path: Path to save quality controlled radial file """ try: r = Radial(radial_file) except Exception: return if r.is_valid(): try: r.export( os.path.join(save_path, r.file_name.replace('.ruv', '.nc')), 'netcdf') except ValueError: pass
def setUp(self): self.file_paths = list((data_path / 'radials' / 'SEAB').glob('*.ruv')) self.radial_files = [str(r) for r in self.file_paths] self.radial_objects = [Radial(str(r)) for r in self.radial_files] # Select even indexed file_paths and odd indexed radial objects # into one array of mixed content types for concating self.radial_mixed = self.radial_files[::2] + self.radial_objects[ 1:][::2]
def main(file): r = Radial(file) r.clean_header() print(r.file_name) r.metadata['filename'] = r.file_name # Upload site information to database try: site = Site(name=r.metadata['Site'], center_frequency=r.metadata['TransmitCenterFreqMHz']) site.save() except NotUniqueError: # except if its already uploaded site = Site.objects(name=r.metadata['Site'])[0] r.metadata['site_code'] = site.id hardware_diagnostics = HardwareDiagnostics( **r.diagnostics_hardware.to_dict(orient='list')) radial_diagnostics = RadialDiagnostics(**r.diagnostics_radial.to_dict( orient='list')) radial_metadata = RadialMetadata(**r.metadata) object_info = {} object_info['filename'] = r.file_name object_info['site_code'] = site.id object_info['radial_metadata'] = radial_metadata object_info['hardware_diagnostics'] = hardware_diagnostics object_info['radial_diagnostics'] = radial_diagnostics # Upload the radial file header information RadialFile(**object_info).save()
def test_wera_radial_to_netcdf(): radial_file = data_path / 'radials' / 'WERA' / 'RDL_csw_2019_10_24_162300.ruv' nc_file = output_path / 'radials_nc' / 'WERA' / 'RDL_csw_2019_10_24_162300.nc' # Converts the underlying .data (natively a pandas DataFrame) # to an xarray object when `create_netcdf` is called. # This automatically 'enhances' the netCDF file # with better variable names and attributes. rad1 = Radial(radial_file) rad1.export(str(nc_file), file_type='netcdf') # Convert it to an xarray Dataset with no variable # or attribte enhancements xds2 = rad1.to_xarray(enhance=False) # Convert it to xarray Dataset with increased usability # by changing variables names, adding attributes, # and decoding the CF standards like scale_factor xds3 = rad1.to_xarray(enhance=True) with xr.open_dataset(nc_file) as xds1: # The two enhanced files should be identical assert xds1.identical(xds3) # Enhanced and non-enhanced files should not # be equal assert not xds1.identical(xds2)
class TestCombineRadials: file_paths = list((data_path / 'radials' / 'SEAB').glob('*.ruv')) radial_files = [str(r) for r in file_paths] radial_objects = [Radial(str(r)) for r in radial_files] # Select even indexed file_paths and odd indexed radial objects # into one array of mixed content types for concating radial_mixed = radial_files[::2] + radial_objects[1:][::2] def test_concat_radial_objects(self): combined = concatenate_radials(self.radial_objects) assert combined.time.size == len(self.file_paths) # Make sure the dataset was sorted by time assert np.array_equal(combined.time.values, np.sort(combined.time.values)) def test_concat_radial_files(self): combined = concatenate_radials(self.radial_files) assert combined.time.size == len(self.file_paths) # Make sure the dataset was sorted by time assert np.array_equal(combined.time.values, np.sort(combined.time.values)) def test_concat_mixed_radials(self): combined = concatenate_radials(self.radial_mixed) assert combined.time.size == len(self.file_paths) # Make sure the dataset was sorted by time assert np.array_equal(combined.time.values, np.sort(combined.time.values)) def test_concat_mixed_radials_enhance(self): # Select even indexed file_paths and odd indexed radial objects # into one array of mixed content types for concating combined = concatenate_radials(self.radial_mixed, enhance=True) assert combined.time.size == len(self.file_paths) # Make sure the dataset was sorted by time assert np.array_equal(combined.time.values, np.sort(combined.time.values))
def main(file_list): client = MongoClient() db = client.codar db.radials.create_indexes([index1, index2]) bulk_info = [] for radial in file_list: # TODO Add multiprocessing here. # print(radial) r = Radial(radial) # print(r) if r.is_valid(): r.metadata['Site'] = r.metadata['Site'] try: r.metadata['PatternType'] = r.metadata['PatternType'].lower() except KeyError: pass # print(r.file_name) r.clean_header(split_origin=True) r.metadata['filename'] = r.file_name # assign a system type so we can sort on this r.metadata['SystemType'] = frequency_check( r.metadata['TransmitCenterFreqMHz']) r.metadata['RadialSolutions'] = r.data.__len__() # Try statements in case a radial file doesn't contain a diagnostic table. try: r.metadata[ 'diagnostics_hardware'] = r.diagnostics_hardware.to_dict( orient='r') except AttributeError as ae: logging.error(ae) try: r.metadata[ 'diagnostics_radial'] = r.diagnostics_radial.to_dict( orient='r') except AttributeError as ae: logging.error(ae) bulk_info.append(InsertOne(r.metadata)) try: db.radials.bulk_write(bulk_info, ordered=False) logging.info( 'Bulk radial insert successful. {} radials inserted.'.format( len(bulk_info))) except BulkWriteError as bwe: logging.error(bwe.details)
def parse_radial_file(radial_file): """ Parse CODAR radial files utilizing the Radial class and upload to MySQL database. :param radial_file: Path to CODAR Radial File """ basename = os.path.basename(radial_file).split('.')[0] logging.debug( '{} - Checking if file is uploaded to MySQL database.'.format( basename)) uploaded = db.check_file_upload(session, basename, RadialMetadata) if not uploaded: # Check if the file has been uploaded already. If it hasn't, upload it completely. logging.debug('{} - Loading'.format(radial_file)) try: r = Radial(radial_file) if not r.is_valid(): return r.clean_header( ) # Clean up header information for entry into mysql database r.metadata['filename'] = os.path.splitext( os.path.basename(radial_file))[0] r.metadata['fileModTime'] = dt.datetime.fromtimestamp( os.stat(radial_file).st_mtime) # Fill certain table columns with relational ids # Check to see if the site has been uploaded to the HfrSites table of the MySQL database try: site_info = sites[sites.site == r.metadata['Site']] site_id = int(site_info.id.iloc[0]) except IndexError: logging.info( '{} not found. Uploading site to hfrSites table'.format( r.metadata['Site'])) site_info = db.update_site_table( session, r.metadata['Site'], r.metadata['TransmitCenterFreqMHz'], r.metadata['Origin']) site_id = int(site_info) r.metadata['Site'] = site_id try: patt_type = pattern_types[pattern_types.type == r.metadata['PatternType']] pattern_id = int(patt_type.id.iloc[0]) except IndexError: logging.error('{} not found. Pattern type invalid'.format( r.metadata['PatternType'])) return r.metadata['PatternType'] = pattern_id # Add extra information to header r.metadata['TableType'] = r._tables['1']['TableType'] r.metadata['TableColumns'] = r._tables['1']['TableColumns'] r.metadata['TableColumnTypes'] = r._tables['1']['TableColumnTypes'] r.metadata['TableRows'] = r._tables['1']['TableRows'] # Upload radial header information and update latest radials table r.metadata = dbr.upload_radial_header(session, r.metadata) dbr.update_latest_radials(session, r.metadata) try: # Upload radial diagnostic data r.diagnostics_radial = r.diagnostics_radial.drop( ['TIME', 'TYRS', 'TMON', 'TDAY', 'THRS', 'TMIN', 'TSEC'], axis=1) r.diagnostics_radial['id_site'] = r.metadata['Site'] r.diagnostics_radial['id_radial'] = r.metadata['radial_id'] dbr.upload_diagnostics(session, RadialDiagnostics, r.diagnostics_radial, r.metadata['Site']) logging.debug( '{} - Table `{}` - Diagnostic data uploaded '.format( r.metadata['filename'], 'hfrRadialDiagnostics')) except: pass try: # Upload hardware diagnostic data r.diagnostics_hardware = r.diagnostics_hardware.drop( ['TIME', 'TYRS', 'TMON', 'TDAY', 'THRS', 'TMIN', 'TSEC'], axis=1) r.diagnostics_hardware['id_site'] = r.metadata['Site'] r.diagnostics_hardware['id_radial'] = r.metadata['radial_id'] dbr.upload_diagnostics(session, HardwareDiagnostics, r.diagnostics_hardware, r.metadata['Site']) logging.debug( '{} - Table `{}` - Diagnostic data uploaded '.format( r.metadata['filename'], 'hfrHardwareDiagnostics')) except: pass logging.info('{} - File uploaded successfully'.format(radial_file)) except: logging.error('{} - File failed to upload'.format(radial_file))
import numpy as np import pandas as pd sites = ['SEAB', 'BRAD', 'SPRK', 'BRNT', 'BRMR', 'RATH'] time = '2018_01_01_0000' pattern_type = 'RDLi' radials = [] for site in sites: radials.append( f'/Volumes/home/codaradm/data/radials/{site}/2018_01/{pattern_type}_{site}_{time}.ruv' ) loaded = {} for radial in radials: loaded[radial] = Radial(radial, mask_over_land=False) grid_file = '../totals/grid_files/maracoos_grid_2km.txt' # load csv file containing the grid grid = pd.read_csv(grid_file, sep=',', header=None, names=['lon', 'lat'], delim_whitespace=True) lon = np.unique(grid['lon'].values.astype(np.float32)) lat = np.unique(grid['lat'].values.astype(np.float32)) [x, y] = np.meshgrid(lon, lat) x = x.ravel() y = y.ravel()
def test_wera_raw_to_quality_nc(): radial_file = data_path / 'radials' / 'WERA' / 'RDL_csw_2019_10_24_162300.ruv' nc_file = output_path / 'radials_qc_nc' / 'WERA' / 'RDL_csw_2019_10_24_162300.nc' rad1 = Radial(radial_file, mask_over_land=False, replace_invalid=False) rad1.mask_over_land() rad1.qc_qartod_radial_count() rad1.qc_qartod_valid_location() rad1.qc_qartod_maximum_velocity() rad1.qc_qartod_spatial_median() rad1.export(str(nc_file), file_type='netcdf') xds2 = rad1.to_xarray(enhance=True) with xr.open_dataset(nc_file) as xds1: assert len(xds1.QCTest) == 3 # no VFLG column so one test not run # The two enhanced files should be identical assert xds1.identical(xds2)
def test_wera_qc(): radial_file = data_path / 'radials' / 'WERA' / 'RDL_csw_2019_10_24_162300.ruv' rad1 = Radial(radial_file, mask_over_land=False, replace_invalid=False) assert len(rad1.data) == 6327 rad1.mask_over_land() rad1.qc_qartod_radial_count() rad1.qc_qartod_valid_location() rad1.qc_qartod_maximum_velocity() rad1.qc_qartod_spatial_median() assert len(rad1.data) == 5745 assert 'QC07' in rad1.data assert 'QC08' not in rad1.data # no VFLG column so we can't run it assert 'QC09' in rad1.data assert 'QC10' in rad1.data
def main(radial_file, save_path, qc_values): """ Main function to parse and qc radial files :param radial_file: Path to radial file :param save_path: Path to save quality controlled radial file :param qc_values: Dictionary containing thresholds for each QC test """ try: r = Radial(radial_file) except Exception as err: logging.error('{} - {}'.format(radial_file, err)) return if r.is_valid(): # run high frequency radar qartod tests on open radial file r.initialize_qc() r.qc_qartod_syntax() r.qc_qartod_maximum_velocity(**qc_values['qc_qartod_maximum_velocity']) r.qc_qartod_valid_location() r.qc_qartod_radial_count(**qc_values['qc_qartod_radial_count']) r.qc_qartod_spatial_median(**qc_values['qc_qartod_spatial_median']) # r.qc_qartod_avg_radial_bearing(qc_values['average_bearing_threshold']) # Export radial file to either a radial or netcdf try: r.export(os.path.join(save_path, r.file_name), 'radial') except ValueError as err: logging.error('{} - {}'.format(radial_file, err)) pass