def historic(self): """Load historic data and calculate histogram""" log.info("Processing historic track records") config = ConfigParser() config.read(self.configFile) inputFile = config.get('DataProcess', 'InputFile') if len(os.path.dirname(inputFile)) == 0: inputFile = pjoin(self.inputPath, inputFile) source = config.get('DataProcess', 'Source') try: tracks = loadTrackFile(self.configFile, inputFile, source) except (TypeError, IOError, ValueError): log.critical("Cannot load historical track file: {0}".\ format(inputFile)) raise else: startYr = 9999 endYr = 0 for t in tracks: startYr = min(startYr, min(t.Year)) endYr = max(endYr, max(t.Year)) numYears = endYr - startYr log.info("Range of years: %d - %d" % (startYr, endYr)) try: self.hist = self._calculate(tracks) #self.hist = self._calculate(tracks) / numYears except (ValueError): log.critical( "KDE error: The number of observations must be larger than the number of variables" ) raise
def historic(self): """ Load historic data and calculate histogram. Note that the input historical data is filtered by year when it's loaded in `interpolateTracks.parseTracks()`. The timestep to interpolate to is set to match that of the synthetic event set (normally set to 1 hour). """ config = ConfigParser() config.read(self.configFile) inputFile = config.get('DataProcess', 'InputFile') if len(os.path.dirname(inputFile)) == 0: inputFile = pjoin(self.inputPath, inputFile) source = config.get('DataProcess', 'Source') try: tracks = loadTrackFile(self.configFile, inputFile, source) except (TypeError, IOError, ValueError): log.critical("Cannot load historical track file: {0}".\ format(inputFile)) raise else: startYr = 9999 endYr = 0 for t in tracks: startYr = min(startYr, min(t.Year)) endYr = max(endYr, max(t.Year)) numYears = endYr - startYr self.hist = self.calculate(tracks) / numYears
def historic(self): """Load historic data and calculate histogram""" config = ConfigParser() config.read(self.configFile) inputFile = config.get('DataProcess', 'InputFile') if len(os.path.dirname(inputFile)) == 0: inputFile = pjoin(self.inputPath, inputFile) source = config.get('DataProcess', 'Source') try: tracks = loadTrackFile(self.configFile, inputFile,source) except (TypeError, IOError, ValueError): log.critical("Cannot load historical track file: {0}".format(inputFile)) raise else: startYr = 9999 endYr = 0 for t in tracks: startYr = min(startYr, min(t.Year)) endYr = max(endYr, max(t.Year)) numYears = endYr - startYr log.info("Range of years: %d - %d" % (startYr, endYr)) self.hist = self._calculate(tracks) / numYears
def parseTracks(configFile, trackFile, source, delta, outputFile=None, interpolation_type=None): """ Load a track dataset, then interpolate to some time delta (given in hours). Events with only a single record are not altered. :type configFile: string :param configFile: Configuration file containing settings that describe the data source. :type trackFile: string :param trackFile: Path to the input data source. :type source: string :param source: Name of the data source. `configFile` must have a corresponding section which contains options that describe the data format. :type delta: float :param delta: Time difference to interpolate the dataset to. Must be positive. :type outputFile: string :param outputFile: Path to the destination of output, if it is to be saved. :type results: `list` of :class:`Track` objects containing the interpolated track data """ LOG.info("Interpolating tracks from {0}".format(trackFile)) if delta < 0.0: raise ValueError("Time step for interpolation must be positive") if trackFile.endswith("nc"): from Utilities.track import ncReadTrackData tracks = ncReadTrackData(trackFile) else: tracks = loadTrackFile(configFile, trackFile, source) results = [] for track in tracks: if len(track.data) == 1: results.append(track) else: newtrack = interpolate(track, delta, interpolation_type) results.append(newtrack) if outputFile: # Save data to file: ncSaveTracks(outputFile, results) return results
def parseTracks(configFile, trackFile, source, delta, outputFile=None, interpolation_type=None): """ Load a track dataset, then interpolate to some time delta (given in hours). Events with only a single record are not altered. :type configFile: string :param configFile: Configuration file containing settings that describe the data source. :type trackFile: string :param trackFile: Path to the input data source. :type source: string :param source: Name of the data source. `configFile` must have a corresponding section which contains options that describe the data format. :type delta: float :param delta: Time difference to interpolate the dataset to. Must be positive. :type outputFile: string :param outputFile: Path to the destination of output, if it is to be saved. :type results: `list` of :class:`Track` objects containing the interpolated track data """ if delta < 0.0: raise ValueError("Time step for interpolation must be positive") tracks = loadTrackFile(configFile, trackFile, source) results = [] for track in tracks: if len(track.data) == 1: results.append(track) else: newtrack = interpolate(track, delta, interpolation_type) results.append(newtrack) if outputFile: # Save data to file: saveTracks(results, outputFile) return results
def main(configFile): from Utilities.loadData import loadTrackFile from Utilities.config import ConfigParser from os.path import join as pjoin, normpath, dirname baseDir = normpath(pjoin(dirname(__file__), '..')) inputPath = pjoin(baseDir, 'input') config = ConfigParser() config.read(configFile) inputFile = config.get('DataProcess', 'InputFile') source = config.get('DataProcess', 'Source') gridLimit = config.geteval('Region', 'gridLimit') xx = np.arange(gridLimit['xMin'], gridLimit['xMax'] + .1, 0.1) yy = np.arange(gridLimit['yMin'], gridLimit['yMax'] + .1, 0.1) xgrid, ygrid = np.meshgrid(xx, yy) if len(dirname(inputFile)) == 0: inputFile = pjoin(inputPath, inputFile) try: tracks = loadTrackFile(configFile, inputFile, source) except (TypeError, IOError, ValueError): log.critical("Cannot load historical track file: {0}".format(inputFile)) raise title = source outputPath = config.get('Output', 'Path') outputPath = pjoin(outputPath, 'plots', 'stats') outputFile = pjoin(outputPath, 'tctracks.png') map_kwargs = dict(llcrnrlon=xgrid.min(), llcrnrlat=ygrid.min(), urcrnrlon=xgrid.max(), urcrnrlat=ygrid.max(), projection='merc', resolution='i') figure = TrackMapFigure() figure.add(tracks, xgrid, ygrid, title, map_kwargs) figure.plot() saveFigure(figure, outputFile)
def main(configFile): from Utilities.loadData import loadTrackFile from Utilities.config import ConfigParser from os.path import join as pjoin, normpath, dirname baseDir = normpath(pjoin(dirname(__file__), '..')) inputPath = pjoin(baseDir, 'input') config = ConfigParser() config.read(configFile) inputFile = config.get('DataProcess', 'InputFile') source = config.get('DataProcess', 'Source') gridLimit = config.geteval('Region', 'gridLimit') xx = np.arange(gridLimit['xMin'], gridLimit['xMax'] + .1, 0.1) yy = np.arange(gridLimit['yMin'], gridLimit['yMax'] + .1, 0.1) xgrid, ygrid = np.meshgrid(xx, yy) if len(dirname(inputFile)) == 0: inputFile = pjoin(inputPath, inputFile) try: tracks = loadTrackFile(configFile, inputFile, source) except (TypeError, IOError, ValueError): log.critical("Cannot load historical track file: {0}".format(inputFile)) raise title = source outputPath = config.get('Output', 'Path') outputPath = pjoin(outputPath, 'plots','stats') outputFile = pjoin(outputPath, 'tctracks.png') map_kwargs = dict(llcrnrlon=xgrid.min(), llcrnrlat=ygrid.min(), urcrnrlon=xgrid.max(), urcrnrlat=ygrid.max(), projection='merc', resolution='i') figure = TrackMapFigure() figure.add(tracks, xgrid, ygrid, title, map_kwargs) figure.plot() saveFigure(figure, outputFile)
def historic(self): """Load historic data and calculate histogram""" log.info("Processing historical pressure distributions") config = ConfigParser() config.read(self.configFile) inputFile = config.get('DataProcess', 'InputFile') source = config.get('DataProcess', 'Source') if len(os.path.dirname(inputFile)) == 0: inputFile = pjoin(self.inputPath, inputFile) try: tracks = loadTrackFile(self.configFile, inputFile, source) except (TypeError, IOError, ValueError): log.critical("Cannot load historical track file: {0}".format(inputFile)) raise else: self.histMean, self.histMin, \ self.histMax, self.histMed = self.calculate(tracks) self.histMinCPDist, self.histMinCP = self.calcMinPressure(tracks)
def historic(self): """Load historic data and calculate histogram""" log.info("Processing historical pressure distributions") config = ConfigParser() config.read(self.configFile) inputFile = config.get('DataProcess', 'InputFile') source = config.get('DataProcess', 'Source') if len(os.path.dirname(inputFile)) == 0: inputFile = pjoin(self.inputPath, inputFile) try: tracks = loadTrackFile(self.configFile, inputFile, source) except (TypeError, IOError, ValueError): log.critical( "Cannot load historical track file: {0}".format(inputFile)) raise else: self.histMean, self.histMin, \ self.histMax, self.histMed = self.calculate(tracks) self.histMinCPDist, self.histMinCP = self.calcMinPressure(tracks)
def historic(self): """Calculate historical rates of landfall""" log.info("Processing landfall rates of historical tracks") config = ConfigParser() config.read(self.configFile) inputFile = config.get('DataProcess', 'InputFile') source = config.get('DataProcess', 'Source') timestep = config.getfloat('TrackGenerator', 'Timestep') if len(os.path.dirname(inputFile)) == 0: inputFile = pjoin(self.inputPath, inputFile) try: tracks = loadTrackFile(self.configFile, inputFile, source) except (TypeError, IOError, ValueError): log.critical("Cannot load historical track file: {0}".format(inputFile)) raise else: self.historicLandfall, self.historicOffshore = self.processTracks(tracks) return
def historic(self): """Calculate historical rates of landfall""" LOG.info("Processing landfall rates of historical tracks") config = ConfigParser() config.read(self.configFile) inputFile = config.get('DataProcess', 'InputFile') source = config.get('DataProcess', 'Source') if len(os.path.dirname(inputFile)) == 0: inputFile = pjoin(self.inputPath, inputFile) try: tracks = loadTrackFile(self.configFile, inputFile, source) except (TypeError, IOError, ValueError): LOG.critical("Cannot load historical track file: {0}".\ format(inputFile)) raise else: self.historicLandfall, self.historicOffshore = \ self.processTracks(tracks) return
'lon': lambda s: fmtlon(s), 'vmax': lambda s: float(s.strip()), 'pcentre': lambda s: float(s.strip()), 'poci': lambda s: float(s.strip()), 'rmax': lambda s: float(s.strip()) * 1.852 }, "autostrip" : True } source="BDECK" config_file="B:/CHARS/B_Wind/data/derived/tc/events/bsh132016/TCDebbie.ini" for f in os.listdir(inputPath): inputFile = pjoin(inputPath, f) data = np.genfromtxt(inputFile, **bdeck) print(inputFile) header = 'basin,num,date,lat,lon,vmax,pcentre,poci,rmax,name' fmt = '%s,%i,%s,%8.2f,%8.2f,%6.1f,%7.1f,%7.1f,%6.2f,%s' outputFile = pjoin(outputPath, f) np.savetxt(outputFile, data, fmt=fmt, delimiter=",", header=header) fname, ext = splitext(outputFile) pt_output_file = fname + '_pt.shp' line_output_file = fname + '_line.shp' dissolve_output_file = fname + '_dissolve.shp' tracks = loadTrackFile(config_file, outputFile, source, calculateWindSpeed=False) tracks2point(tracks, pt_output_file) tracks2line(tracks, line_output_file) tracks2line(tracks, dissolve_output_file, dissolve=True)
def interpolateTrack(configFile, trackFile, source, delta=0.1, interpolation_type=None): """ Interpolate the data in a track file to the time interval delta hours. :param str configFile: Configuration file that contains information on the source format of the track file. :param str trackFile: Path to csv format track file. :param str source: Name of the data source. There must be a corresponding section in the configuration file that contains the description of the data. :param float delta: Time interval in hours to interpolate to. Default is 0.1 hours :param str interpolation_type: Optionally use Akima or linear interpolation for the track positions. Default is linear 1-dimensional spline interpolation. :returns: 10 arrays (id, time, date, lon, lat, bearing, forward speed, central pressure, environmental pressure and radius to maximum wind) that describe the track at ``delta`` hours intervals. """ logger = logging.getLogger() indicator, year, month, day, hour, minute, lon, lat, \ pressure, speed, bearing, windspeed, rmax, penv = \ loadTrackFile(configFile, trackFile, source) # Time between observations: day_ = [ datetime.datetime(year[i], month[i], day[i], hour[i], minute[i]) for i in xrange(year.size) ] time_ = date2num(day_) dt_ = 24.0 * numpy.diff(time_) dt = numpy.empty(hour.size, 'f') dt[1:] = dt_ # At this stage, convert all times to a time after initial observation: timestep = 24.0 * (time_ - time_[0]) newtime = numpy.arange(timestep[0], timestep[-1] + .01, delta) newtime[-1] = timestep[-1] _newtime = (newtime / 24.) + time_[0] newdates = num2date(_newtime) nid = numpy.ones(newtime.size) logger.info("Interpolating data...") if len(indicator) <= 2: # Use linear interpolation only (only a start and end point given): nLon = scint.interp1d(timestep, lon, kind='linear')(newtime) nLat = scint.interp1d(timestep, lat, kind='linear')(newtime) npCentre = scint.interp1d(timestep, pressure, kind='linear')(newtime) npEnv = scint.interp1d(timestep, penv, kind='linear')(newtime) nrMax = scint.interp1d(timestep, rmax, kind='linear')(newtime) else: if interpolation_type == 'akima': # Use the Akima interpolation method: try: import _akima except ImportError: logger.exception(("Akima interpolation module unavailable - " "default to scipy.interpolate")) nLon = scint.splev(newtime, scint.splrep(timestep, lon, s=0), der=0) nLat = scint.splev(newtime, scint.splrep(timestep, lat, s=0), der=0) else: nLon = _akima.interpolate(timestep, lon, newtime) nLat = _akima.interpolate(timestep, lat, newtime) elif interpolation_type == 'linear': nLon = scint.interp1d(timestep, lon, kind='linear')(newtime) nLat = scint.interp1d(timestep, lat, kind='linear')(newtime) else: nLon = scint.splev(newtime, scint.splrep(timestep, lon, s=0), der=0) nLat = scint.splev(newtime, scint.splrep(timestep, lat, s=0), der=0) npCentre = scint.interp1d(timestep, pressure, kind='linear')(newtime) npEnv = scint.interp1d(timestep, penv, kind='linear')(newtime) nrMax = scint.interp1d(timestep, rmax, kind='linear')(newtime) bear_, dist_ = maputils.latLon2Azi(nLat, nLon, 1, azimuth=0) nthetaFm = numpy.zeros(newtime.size, 'f') nthetaFm[:-1] = bear_ nthetaFm[-1] = bear_[-1] dist = numpy.zeros(newtime.size, 'f') dist[:-1] = dist_ dist[-1] = dist_[-1] nvFm = dist / delta return nid, newtime, newdates, nLon, nLat, nthetaFm, nvFm, npCentre, npEnv, nrMax
output_path = dirname(realpath(track_file)) filename, ext = splitext(track_file) pt_output_file = filename + '_pt.shp' line_output_file = filename + '_line.shp' dissolve_output_file = filename + '_dissolve.shp' if track_file.endswith(".nc"): from Utilities.track import ncReadTrackData tracks = ncReadTrackData(track_file) netcdf_format = True elif track_file.endswith(".csv"): tracks = loadTrackFile(config_file, track_file, source, calculateWindSpeed=True) netcdf_format = False else: raise ValueError("format of {} is not recognizable".format(track_file)) tracks2point(tracks, pt_output_file, netcdf_format=netcdf_format) tracks2line(tracks, line_output_file, netcdf_format=netcdf_format) tracks2line(tracks, dissolve_output_file, dissolve=True, netcdf_format=netcdf_format) LOG.info("Completed tracks2shp")
verbose = config.getboolean('Logging', 'Verbose') datestamp = config.getboolean('Logging', 'Datestamp') if args.verbose: verbose = True flStartLog(logfile, logLevel, verbose, datestamp) if args.file: track_file = args.file else: track_file = config.get('DataProcess', 'InputFile') if args.source: source = args.source else: source = config.get('DataProcess', 'Source') output_path = dirname(realpath(track_file)) filename, ext = splitext(track_file) pt_output_file = filename + '_pt.shp' line_output_file = filename + '_line.shp' dissolve_output_file = filename + '_dissolve.shp' tracks = loadTrackFile(config_file, track_file, source) tracks2point(tracks, pt_output_file) tracks2line(tracks, line_output_file) tracks2line(tracks, dissolve_output_file, dissolve=True) LOG.info("Completed tracks2shp")