def doWindfieldPlotting(configFile): """ Plot the wind field on a map. :param str configFile: Path to the configuration file. :Note: the file name is assumed to be 'gust.interp.nc' """ from netCDF4 import Dataset import numpy as np config = ConfigParser() config.read(configFile) outputPath = config.get('Output', 'Path') windfieldPath = pjoin(outputPath, 'windfield') inputFile = config.get('DataProcess', 'InputFile') if inputFile.endswith(".nc"): # We have a netcdf track file. Work under the assumption it is # drawn directly from TCRM. trackFile = os.path.basename(inputFile) trackId = trackFile.split('.')[1] gustFile = 'gust.{0}.nc'.format(trackId) outputWindFile = pjoin(windfieldPath, gustFile) else: # Note the assumption about the file name! outputWindFile = pjoin(windfieldPath, 'gust.001-00001.nc') plotPath = pjoin(outputPath, 'plots', 'maxwind.png') f = Dataset(outputWindFile, 'r') xdata = f.variables['lon'][:] ydata = f.variables['lat'][:] vdata = f.variables['vmax'][:] gridLimit = None if config.has_option('Region', 'gridLimit'): gridLimit = config.geteval('Region', 'gridLimit') ii = np.where((xdata >= gridLimit['xMin']) & (xdata <= gridLimit['xMax'])) jj = np.where((ydata >= gridLimit['yMin']) & (ydata <= gridLimit['yMax'])) [xgrid, ygrid] = np.meshgrid(xdata[ii], ydata[jj]) ig, jg = np.meshgrid(ii, jj) vdata = vdata[jg, ig] else: [xgrid, ygrid] = np.meshgrid(xdata, ydata) map_kwargs = dict(llcrnrlon=xgrid.min(), llcrnrlat=ygrid.min(), urcrnrlon=xgrid.max(), urcrnrlat=ygrid.max(), projection='merc', resolution='i') title = "Maximum wind speed" cbarlabel = "Wind speed ({0})".format(f.variables['vmax'].units) levels = np.arange(30, 101., 5.) saveWindfieldMap(vdata, xgrid, ygrid, title, levels, cbarlabel, map_kwargs, plotPath)
def __init__(self, configFile): config = ConfigParser() config.read(configFile) # Check for wind multiplier file path in config file if config.has_option('Input', 'RawMultipliers'): self.WMPath = config.get('Input', 'RawMultipliers') log.info('Using multiplier files from {0}'.format(self.WMPath)) else: log.info( 'Using default multiplier files from /g/data/fj6/multipliers/') self.WMPath = '/g/data/fj6/multipliers/'
def doWindfieldPlotting(configFile): """ Plot the wind field on a map. :param str configFile: Path to the configuration file. :Note: the file name is assumed to be 'gust.interp.nc' """ from netCDF4 import Dataset import numpy as np config = ConfigParser() config.read(configFile) outputPath = config.get('Output', 'Path') windfieldPath = pjoin(outputPath, 'windfield') # Note the assumption about the file name! outputWindFile = pjoin(windfieldPath, 'gust.interp.nc') plotPath = pjoin(outputPath, 'plots', 'maxwind.png') f = Dataset(outputWindFile, 'r') xdata = f.variables['lon'][:] ydata = f.variables['lat'][:] vdata = f.variables['vmax'][:] gridLimit = None if config.has_option('Region','gridLimit'): gridLimit = config.geteval('Region', 'gridLimit') ii = np.where((xdata >= gridLimit['xMin']) & (xdata <= gridLimit['xMax'])) jj = np.where((ydata >= gridLimit['yMin']) & (ydata <= gridLimit['yMax'])) [xgrid, ygrid] = np.meshgrid(xdata[ii], ydata[jj]) ig, jg = np.meshgrid(ii, jj) vdata = vdata[jg, ig] else: [xgrid, ygrid] = np.meshgrid(xdata, ydata) map_kwargs = dict(llcrnrlon=xgrid.min(), llcrnrlat=ygrid.min(), urcrnrlon=xgrid.max(), urcrnrlat=ygrid.max(), projection='merc', resolution='i') title = "Maximum wind speed" cbarlabel = "Wind speed ({0})".format(f.variables['vmax'].units) levels = np.arange(30, 101., 5.) saveWindfieldMap(vdata, xgrid, ygrid, title, levels, cbarlabel, map_kwargs, plotPath)
def loadDatasets(): config = ConfigParser() datasets = config.get('Input', 'Datasets').split(',') global DATASETS for dataset in datasets: url = config.get(dataset, 'URL') path = config.get(dataset, 'path') if config.has_option(dataset, 'filename'): filename = config.get(dataset, 'filename') else: filename = None data = DataSet(dataset, url, path, filename) DATASETS.append(data)
def copyTranslateMultipliers(configFile, type_mapping, output_path): ''' Copy wind multipliers from directory specified in the configuration file, to an output directory. Once the files have been copied, they are translated into Geotiffs :param str configFile: Path to configuration file :param str type_mapping: dict containing the three wind multiplier inputs ''' config = ConfigParser() config.read(configFile) # Check for wind multiplier file path in config file if config.has_option('Input', 'Multipliers'): WMPath = config.get('Input', 'Multipliers') log.info('Using multiplier files from %s', WMPath) else: log.info( 'Using default multiplier files from /g/data/fj6/multipliers/') WMPath = '/g/data/fj6/multipliers/' tiles = config.get('Input', 'Tiles') tiles = [item.strip() for item in tiles.split(',')] log.info('Multipliers will be written out to %s', output_path) checkOutputFolders(output_path) for tile in tiles: for wm in type_mapping: var = type_mapping[wm] pathn = WMPath + wm + '/' log.debug('Copying files from %s', pathn) log.debug('Beginning to translate tiles into Geotiff') for file in glob.glob(pathn + tile + '*'): file_break = file.split('/') output = file_break[-1] output_name = wm + '/' + output copyfile(file, output_path + output_name) os.system( 'gdal_translate -of GTiff NETCDF:{0}{1}/{2}:{3} {4}{5}.tif' .format(output_path, wm, output, var, output_path, output_name[:-3])) # -3 to drop '.nc' log.debug('%s translated to Geotif', output_name[:-3])
def loadDatasets(): """ Load the details of the datasets to be downloaded from the configuration settings. This updates the :data:`DATASETS` list. """ config = ConfigParser() datasets = config.get('Input', 'Datasets').split(',') global DATASETS for dataset in datasets: url = config.get(dataset, 'URL') path = config.get(dataset, 'path') if config.has_option(dataset, 'filename'): filename = config.get(dataset, 'filename') else: filename = None data = DataSet(dataset, url, path, filename) DATASETS.append(data)
def loadDatasets(configFile): """ Load the details of the datasets to be downloaded from the configuration settings. This updates the :data:`DATASETS` list. """ config = ConfigParser() config.read(configFile) datasets = config.get('Input', 'Datasets').split(',') global DATASETS for dataset in datasets: url = config.get(dataset, 'URL') path = config.get(dataset, 'path') if config.has_option(dataset, 'filename'): filename = config.get(dataset, 'filename') else: filename = None data = DataSet(dataset, url, path, filename) DATASETS.append(data)
def __init__(self, configFile, auto_calc_grid_limit): """ :type configFile: string :param configFile: Configuration file name :type auto_calc_grid_limit: :class:`dict` :param auto_calc_grid_limit: the domain where the frequency will be calculated. The :class:`dict` should contain the keys :attr:`xMin`, :attr:`xMax`, :attr:`yMin` and :attr:`yMax`. The *x* variable bounds the longitude and the *y* variable bounds the latitude. """ config = ConfigParser() config.read(configFile) if config.has_option('TrackGenerator', 'gridLimit'): self.tg_domain = config.geteval('TrackGenerator', 'gridLimit') else: self.tg_domain = auto_calc_grid_limit self.outputPath = config.get('Output', 'Path')
def processData(self, restrictToWindfieldDomain=False): """ Process raw data into ASCII files that can be read by the main components of the system :param bool restrictToWindfieldDomain: if True, only process data within the wind field domain, otherwise, process data from across the track generation domain. """ config = ConfigParser() config.read(self.configFile) self.logger.info("Running {0}".format(flModuleName())) if config.has_option('DataProcess', 'InputFile'): inputFile = config.get('DataProcess', 'InputFile') if config.has_option('DataProcess', 'Source'): source = config.get('DataProcess', 'Source') self.logger.info('Loading %s dataset', source) fn = config.get(source, 'filename') path = config.get(source, 'path') inputFile = pjoin(path, fn) # If input file has no path information, default to tcrm input folder if len(os.path.dirname(inputFile)) == 0: inputFile = pjoin(self.tcrm_input_dir, inputFile) self.logger.info("Processing {0}".format(inputFile)) self.source = config.get('DataProcess', 'Source') inputData = colReadCSV(self.configFile, inputFile, self.source) inputSpeedUnits = config.get(self.source, 'SpeedUnits') inputPressureUnits = config.get(self.source, 'PressureUnits') inputLengthUnits = config.get(self.source, 'LengthUnits') startSeason = config.getint('DataProcess', 'StartSeason') indicator = loadData.getInitialPositions(inputData) lat = np.array(inputData['lat'], 'd') lon = np.mod(np.array(inputData['lon'], 'd'), 360) if restrictToWindfieldDomain: # Filter the input arrays to only retain the tracks that # pass through the windfield domain. CD = CalcTrackDomain(self.configFile) self.domain = CD.calcDomainFromTracks(indicator, lon, lat) domainIndex = self.extractTracks(indicator, lon, lat) inputData = inputData[domainIndex] indicator = indicator[domainIndex] lon = lon[domainIndex] lat = lat[domainIndex] if self.progressbar is not None: self.progressbar.update(0.125) # Sort date/time information try: dt = np.empty(indicator.size, 'f') dt[1:] = np.diff(inputData['age']) except (ValueError, KeyError): try: self.logger.info(("Filtering input data by season:" "season > {0}".format(startSeason))) # Find indicies that satisfy minimum season filter idx = np.where(inputData['season'] >= startSeason)[0] # Filter records: inputData = inputData[idx] indicator = indicator[idx] lon = lon[idx] lat = lat[idx] except (ValueError, KeyError): pass year, month, day, hour, minute, datetimes \ = loadData.parseDates(inputData, indicator) # Time between observations: dt = loadData.getTimeDelta(year, month, day, hour, minute) # Calculate julian days: jdays = loadData.julianDays(year, month, day, hour, minute) delta_lon = np.diff(lon) delta_lat = np.diff(lat) # Split into separate tracks if large jump occurs (delta_lon > # 15 degrees or delta_lat > 5 degrees) This avoids two tracks # being accidentally combined when seasons and track numbers # match but basins are different as occurs in the IBTrACS # dataset. This problem can also be prevented if the # 'tcserialno' column is specified. indicator[np.where(delta_lon > 15)[0] + 1] = 1 indicator[np.where(delta_lat > 5)[0] + 1] = 1 # Save information required for frequency auto-calculation try: origin_seasonOrYear = np.array(inputData['season'], 'i').compress(indicator) header = 'Season' except (ValueError, KeyError): origin_seasonOrYear = year.compress(indicator) header = 'Year' flSaveFile(self.origin_year, np.transpose(origin_seasonOrYear), header, ',', fmt='%d') pressure = np.array(inputData['pressure'], 'd') novalue_index = np.where(pressure == sys.maxint) pressure = metutils.convert(pressure, inputPressureUnits, "hPa") pressure[novalue_index] = sys.maxint # Convert any non-physical central pressure values to maximum integer # This is required because IBTrACS has a mix of missing value codes # (i.e. -999, 0, 9999) in the same global dataset. pressure = np.where((pressure < 600) | (pressure > 1100), sys.maxint, pressure) if self.progressbar is not None: self.progressbar.update(0.25) try: vmax = np.array(inputData['vmax'], 'd') except (ValueError, KeyError): self.logger.warning("No max wind speed data") vmax = np.empty(indicator.size, 'f') else: novalue_index = np.where(vmax == sys.maxint) vmax = metutils.convert(vmax, inputSpeedUnits, "mps") vmax[novalue_index] = sys.maxint assert lat.size == indicator.size assert lon.size == indicator.size assert pressure.size == indicator.size #assert vmax.size == indicator.size try: rmax = np.array(inputData['rmax']) novalue_index = np.where(rmax == sys.maxint) rmax = metutils.convert(rmax, inputLengthUnits, "km") rmax[novalue_index] = sys.maxint self._rmax(rmax, indicator) self._rmaxRate(rmax, dt, indicator) except (ValueError, KeyError): self.logger.warning("No rmax data available") if self.ncflag: self.data['index'] = indicator # ieast : parameter used in latLon2Azi # FIXME: should be a config setting describing the input data. ieast = 1 # Determine the index of initial cyclone observations, excluding # those cyclones that have only one observation. This is used # for calculating initial bearing and speed indicator2 = np.where(indicator > 0, 1, 0) initIndex = np.concatenate( [np.where(np.diff(indicator2) == -1, 1, 0), [0]]) # Calculate the bearing and distance (km) of every two # consecutive records using ll2azi bear_, dist_ = maputils.latLon2Azi(lat, lon, ieast, azimuth=0) assert bear_.size == indicator.size - 1 assert dist_.size == indicator.size - 1 bear = np.empty(indicator.size, 'f') bear[1:] = bear_ dist = np.empty(indicator.size, 'f') dist[1:] = dist_ self._lonLat(lon, lat, indicator, initIndex) self._bearing(bear, indicator, initIndex) self._bearingRate(bear, dt, indicator) if self.progressbar is not None: self.progressbar.update(0.375) self._speed(dist, dt, indicator, initIndex) self._speedRate(dist, dt, indicator) self._pressure(pressure, indicator) self._pressureRate(pressure, dt, indicator) self._windSpeed(vmax) try: self._frequency(year, indicator) self._juliandays(jdays, indicator, year) except (ValueError, KeyError): pass self.logger.info("Completed {0}".format(flModuleName())) if self.progressbar is not None: self.progressbar.update(0.5)
def processData(self, restrictToWindfieldDomain=False): """ Process raw data into ASCII files that can be read by the main components of the system :param bool restrictToWindfieldDomain: if True, only process data within the wind field domain, otherwise, process data from across the track generation domain. """ config = ConfigParser() config.read(self.configFile) self.logger.info("Running %s" % flModuleName()) if config.has_option('DataProcess', 'InputFile'): inputFile = config.get('DataProcess', 'InputFile') if config.has_option('DataProcess', 'Source'): source = config.get('DataProcess', 'Source') self.logger.info('Loading %s dataset', source) fn = config.get(source, 'filename') path = config.get(source, 'path') inputFile = pjoin(path, fn) # If input file has no path information, default to tcrm input folder if len(os.path.dirname(inputFile)) == 0: inputFile = pjoin(self.tcrm_input_dir, inputFile) self.logger.info("Processing %s" % inputFile) self.source = config.get('DataProcess', 'Source') inputData = colReadCSV(self.configFile, inputFile, self.source) inputSpeedUnits = config.get(self.source, 'SpeedUnits') inputPressureUnits = config.get(self.source, 'PressureUnits') inputLengthUnits = config.get(self.source, 'LengthUnits') startSeason = config.getint('DataProcess', 'StartSeason') indicator = loadData.getInitialPositions(inputData) lat = np.array(inputData['lat'], 'd') lon = np.mod(np.array(inputData['lon'], 'd'), 360) if restrictToWindfieldDomain: # Filter the input arrays to only retain the tracks that # pass through the windfield domain. CD = CalcTrackDomain(self.configFile) self.domain = CD.calcDomainFromTracks(indicator, lon, lat) domainIndex = self.extractTracks(indicator, lon, lat) inputData = inputData[domainIndex] indicator = indicator[domainIndex] lon = lon[domainIndex] lat = lat[domainIndex] if self.progressbar is not None: self.progressbar.update(0.125) # Sort date/time information try: dt = np.empty(indicator.size, 'f') dt[1:] = np.diff(inputData['age']) except (ValueError, KeyError): try: self.logger.info("Filtering input data by season: season > %d"%startSeason) # Find indicies that satisfy minimum season filter idx = np.where(inputData['season'] >= startSeason)[0] # Filter records: inputData = inputData[idx] indicator = indicator[idx] lon = lon[idx] lat = lat[idx] except (ValueError, KeyError): pass year, month, day, hour, minute, datetimes \ = loadData.parseDates(inputData, indicator) # Time between observations: dt = loadData.getTimeDelta(year, month, day, hour, minute) # Calculate julian days: jdays = loadData.julianDays(year, month, day, hour, minute) delta_lon = np.diff(lon) delta_lat = np.diff(lat) # Split into separate tracks if large jump occurs (delta_lon > # 15 degrees or delta_lat > 5 degrees) This avoids two tracks # being accidentally combined when seasons and track numbers # match but basins are different as occurs in the IBTrACS # dataset. This problem can also be prevented if the # 'tcserialno' column is specified. indicator[np.where(delta_lon > 15)[0] + 1] = 1 indicator[np.where(delta_lat > 5)[0] + 1] = 1 # Save information required for frequency auto-calculation try: origin_seasonOrYear = np.array( inputData['season'], 'i').compress(indicator) header = 'Season' except (ValueError, KeyError): origin_seasonOrYear = year.compress(indicator) header = 'Year' flSaveFile(self.origin_year, np.transpose(origin_seasonOrYear), header, ',', fmt='%d') pressure = np.array(inputData['pressure'], 'd') novalue_index = np.where(pressure == sys.maxint) pressure = metutils.convert(pressure, inputPressureUnits, "hPa") pressure[novalue_index] = sys.maxint # Convert any non-physical central pressure values to maximum integer # This is required because IBTrACS has a mix of missing value codes # (i.e. -999, 0, 9999) in the same global dataset. pressure = np.where((pressure < 600) | (pressure > 1100), sys.maxint, pressure) if self.progressbar is not None: self.progressbar.update(0.25) try: vmax = np.array(inputData['vmax'], 'd') except (ValueError, KeyError): self.logger.warning("No max wind speed data") vmax = np.empty(indicator.size, 'f') else: novalue_index = np.where(vmax == sys.maxint) vmax = metutils.convert(vmax, inputSpeedUnits, "mps") vmax[novalue_index] = sys.maxint assert lat.size == indicator.size assert lon.size == indicator.size assert pressure.size == indicator.size #assert vmax.size == indicator.size try: rmax = np.array(inputData['rmax']) novalue_index = np.where(rmax == sys.maxint) rmax = metutils.convert(rmax, inputLengthUnits, "km") rmax[novalue_index] = sys.maxint self._rmax(rmax, indicator) self._rmaxRate(rmax, dt, indicator) except (ValueError, KeyError): self.logger.warning("No rmax data available") if self.ncflag: self.data['index'] = indicator # ieast : parameter used in latLon2Azi # FIXME: should be a config setting describing the input data. ieast = 1 # Determine the index of initial cyclone observations, excluding # those cyclones that have only one observation. This is used # for calculating initial bearing and speed indicator2 = np.where(indicator > 0, 1, 0) initIndex = np.concatenate([np.where(np.diff(indicator2) == -1, 1, 0), [0]]) # Calculate the bearing and distance (km) of every two # consecutive records using ll2azi bear_, dist_ = maputils.latLon2Azi(lat, lon, ieast, azimuth=0) assert bear_.size == indicator.size - 1 assert dist_.size == indicator.size - 1 bear = np.empty(indicator.size, 'f') bear[1:] = bear_ dist = np.empty(indicator.size, 'f') dist[1:] = dist_ self._lonLat(lon, lat, indicator, initIndex) self._bearing(bear, indicator, initIndex) self._bearingRate(bear, dt, indicator) if self.progressbar is not None: self.progressbar.update(0.375) self._speed(dist, dt, indicator, initIndex) self._speedRate(dist, dt, indicator) self._pressure(pressure, indicator) self._pressureRate(pressure, dt, indicator) self._windSpeed(vmax) try: self._frequency(year, indicator) self._juliandays(jdays, indicator, year) except (ValueError, KeyError): pass self.logger.info("Completed %s" % flModuleName()) if self.progressbar is not None: self.progressbar.update(0.5)
def run(configFile, callback=None): """ Run the wind field calculations. :param str configFile: path to a configuration file. :param func callback: optional callback function to track progress. """ log.info('Loading wind field calculation settings') # Get configuration config = ConfigParser() config.read(configFile) profileType = config.get('WindfieldInterface', 'profileType') windFieldType = config.get('WindfieldInterface', 'windFieldType') beta = config.getfloat('WindfieldInterface', 'beta') beta1 = config.getfloat('WindfieldInterface', 'beta1') beta2 = config.getfloat('WindfieldInterface', 'beta2') thetaMax = config.getfloat('WindfieldInterface', 'thetaMax') margin = config.getfloat('WindfieldInterface', 'Margin') resolution = config.getfloat('WindfieldInterface', 'Resolution') domain = config.get('WindfieldInterface', 'Domain') outputPath = config.get('Output', 'Path') windfieldPath = pjoin(outputPath, 'windfield') trackPath = pjoin(outputPath, 'tracks') gridLimit = None if config.has_option('Region', 'gridLimit'): gridLimit = config.geteval('Region', 'gridLimit') if config.has_option('WindfieldInterface', 'gridLimit'): gridLimit = config.geteval('WindfieldInterface', 'gridLimit') if config.getboolean('Timeseries', 'Extract', fallback=False): from Utilities.timeseries import Timeseries ts = Timeseries(configFile) timestepCallback = ts.extract else: timestepCallback = None multipliers = None if config.has_option('Input', 'Multipliers'): multipliers = config.get('Input', 'Multipliers') thetaMax = math.radians(thetaMax) # Attempt to start the track generator in parallel global MPI MPI = attemptParallel() comm = MPI.COMM_WORLD log.info('Running windfield generator') wfg = WindfieldGenerator(config=config, margin=margin, resolution=resolution, profileType=profileType, windFieldType=windFieldType, beta=beta, beta1=beta1, beta2=beta2, thetaMax=thetaMax, gridLimit=gridLimit, domain=domain, multipliers=multipliers, windfieldPath=windfieldPath) log.info(f'Dumping gusts to {windfieldPath}') # Get the trackfile names and count files = os.listdir(trackPath) trackfiles = [pjoin(trackPath, f) for f in files if f.startswith('tracks')] nfiles = len(trackfiles) log.info('Processing {0} track files in {1}'.format(nfiles, trackPath)) # Do the work comm.barrier() wfg.dumpGustsFromTrackfiles(trackfiles, windfieldPath, timestepCallback) try: ts.shutdown() except NameError: pass comm.barrier() log.info('Completed windfield generator')
def run(configFile, callback=None): """ Run the wind field calculations. :param str configFile: path to a configuration file. :param func callback: optional callback function to track progress. """ log.info('Loading wind field calculation settings') # Get configuration config = ConfigParser() config.read(configFile) profileType = config.get('WindfieldInterface', 'profileType') windFieldType = config.get('WindfieldInterface', 'windFieldType') beta = config.getfloat('WindfieldInterface', 'beta') beta1 = config.getfloat('WindfieldInterface', 'beta1') beta2 = config.getfloat('WindfieldInterface', 'beta2') thetaMax = config.getfloat('WindfieldInterface', 'thetaMax') margin = config.getfloat('WindfieldInterface', 'Margin') resolution = config.getfloat('WindfieldInterface', 'Resolution') domain = config.get('WindfieldInterface', 'Domain') outputPath = config.get('Output', 'Path') windfieldPath = pjoin(outputPath, 'windfield') trackPath = pjoin(outputPath, 'tracks') gridLimit = None if config.has_option('Region', 'gridLimit'): gridLimit = config.geteval('Region', 'gridLimit') if config.has_option('WindfieldInterface', 'gridLimit'): gridLimit = config.geteval('WindfieldInterface', 'gridLimit') if config.has_section('Timeseries'): if config.has_option('Timeseries', 'Extract'): if config.getboolean('Timeseries', 'Extract'): from Utilities.timeseries import Timeseries log.debug("Timeseries data will be extracted") ts = Timeseries(configFile) timestepCallback = ts.extract else: def timestepCallback(*args): """Dummy timestepCallback function""" pass else: def timestepCallback(*args): """Dummy timestepCallback function""" pass thetaMax = math.radians(thetaMax) # Attempt to start the track generator in parallel global pp pp = attemptParallel() log.info('Running windfield generator') wfg = WindfieldGenerator(config=config, margin=margin, resolution=resolution, profileType=profileType, windFieldType=windFieldType, beta=beta, beta1=beta1, beta2=beta2, thetaMax=thetaMax, gridLimit=gridLimit, domain=domain) msg = 'Dumping gusts to %s' % windfieldPath log.info(msg) # Get the trackfile names and count files = os.listdir(trackPath) trackfiles = [pjoin(trackPath, f) for f in files if f.startswith('tracks')] nfiles = len(trackfiles) def progressCallback(i): """Define the callback function""" callback(i, nfiles) msg = 'Processing %d track files in %s' % (nfiles, trackPath) log.info(msg) # Do the work pp.barrier() wfg.dumpGustsFromTrackfiles(trackfiles, windfieldPath, timestepCallback) try: ts.shutdown() except NameError: pass pp.barrier() log.info('Completed windfield generator')
def run(configFile, callback=None): """ Run the wind field calculations. :param str configFile: path to a configuration file. :param func callback: optional callback function to track progress. """ log.info('Loading wind field calculation settings') # Get configuration config = ConfigParser() config.read(configFile) outputPath = config.get('Output', 'Path') profileType = config.get('WindfieldInterface', 'profileType') windFieldType = config.get('WindfieldInterface', 'windFieldType') beta = config.getfloat('WindfieldInterface', 'beta') beta1 = config.getfloat('WindfieldInterface', 'beta1') beta2 = config.getfloat('WindfieldInterface', 'beta2') thetaMax = config.getfloat('WindfieldInterface', 'thetaMax') margin = config.getfloat('WindfieldInterface', 'Margin') resolution = config.getfloat('WindfieldInterface', 'Resolution') domain = config.get('WindfieldInterface', 'Domain') windfieldPath = pjoin(outputPath, 'windfield') trackPath = pjoin(outputPath, 'tracks') windfieldFormat = 'gust-%i-%04d.nc' gridLimit = None if config.has_option('Region','gridLimit'): gridLimit = config.geteval('Region', 'gridLimit') if config.has_option('WindfieldInterface', 'gridLimit'): gridLimit = config.geteval('WindfieldInterface', 'gridLimit') if config.has_section('Timeseries'): if config.has_option('Timeseries', 'Extract'): if config.getboolean('Timeseries', 'Extract'): from Utilities.timeseries import Timeseries log.debug("Timeseries data will be extracted") ts = Timeseries(configFile) timestepCallback = ts.extract else: def timestepCallback(*args): """Dummy timestepCallback function""" pass thetaMax = math.radians(thetaMax) # Attempt to start the track generator in parallel global pp pp = attemptParallel() log.info('Running windfield generator') wfg = WindfieldGenerator(config=config, margin=margin, resolution=resolution, profileType=profileType, windFieldType=windFieldType, beta=beta, beta1=beta1, beta2=beta2, thetaMax=thetaMax, gridLimit=gridLimit, domain=domain) msg = 'Dumping gusts to %s' % windfieldPath log.info(msg) # Get the trackfile names and count files = os.listdir(trackPath) trackfiles = [pjoin(trackPath, f) for f in files if f.startswith('tracks')] nfiles = len(trackfiles) def progressCallback(i): """Define the callback function""" callback(i, nfiles) msg = 'Processing %d track files in %s' % (nfiles, trackPath) log.info(msg) # Do the work pp.barrier() wfg.dumpGustsFromTrackfiles(trackfiles, windfieldPath, windfieldFormat, progressCallback, timestepCallback) try: ts.shutdown() except NameError: pass pp.barrier() log.info('Completed windfield generator')