def paramPerPage(config, dssFilePath): """ Plot timeseries, 1 location per plot, 1 parameter per page. Also adds specified thresholds. """ plotted = 0 # Number of plots exported messages = [] outputFolder = tbu.relativeFolder(config['output_folder'], config['config_file']) minDate = HecTime(config['period']['start']) maxDate = HecTime(config['period']['end']) dssFile = HecDss.open(dssFilePath, str(minDate), str(maxDate)) for param, paramConfig in config['params'].iteritems(): plots = [] dataPaths = [ '/{}/{}/{}//{}/{}/'.format(config['site'].upper(), loc.upper(), param.upper(), config['interval'].upper(), config['version'].upper()) for loc in config['locations'] ] datasets = [dssFile.get(dp) for dp in dataPaths] datasets = [d for d in datasets if d.numberValues > 0] if not datasets: messages.append("No data for parameter '{}'.".format(param)) continue for dataset in datasets: plot = Plot.newPlot(param) layout = Plot.newPlotLayout() layout.setHasLegend(0) vp = layout.addViewport() vp.addCurve('Y1', dataset) plot.configurePlotLayout(layout) plots.append(plot) # Format normal data curves ymin, ymax = float('+inf'), float('-inf') for dataset, plot in zip(datasets, plots): plot.setPlotTitleText("{0.parameter} at {0.location}".format(dataset)) plot.setPlotTitleVisible(1) plot.setLocation(-10000, -10000) plot.setSize(config['width'], config['height']) plot.setLegendLabelText(dataset, dataset.location) panelProp = plot.getPlotpanel().getProperties() panelProp.setViewportSpaceSize(0) curve = plot.getCurve(dataset) curve.setLineColor('{}, {}, {}'.format(*config['line']['colour'])) curve.setLineWidth(config['line']['width']) if config['line']['markers']: curve.setSymbolsVisible(1) curve.setSymbolType('Circle') curve.setSymbolLineColor('{}, {}, {}'.format(*config['line']['colour'])) curve.setSymbolFillColor('{}, {}, {}'.format(*config['line']['colour'])) vp = plot.getViewport(dataset.fullName) vp.setMinorGridXVisible(1) vp.getAxis('Y1').setLabel(dataset.units) if _paramScale(param, config) == 'log': vp.setLogarithmic('Y1') # This throws a warning message if y-values <= 0. We can't catch this as an exception. # Horizontal lines thresholds = _get_thresholds(dataset, dssFilePath, config) for marker in _thresholdMarkers(thresholds): vp.addAxisMarker(marker) # Vertical lines if _baselinePeriod(dataset.location, config): vp.addAxisMarker(_baselineMarker(dataset.location, config)) ymin = min(ymin, vp.getAxis('Y1').getScaleMin()) ymax = max(ymax, vp.getAxis('Y1').getScaleMax()) for dataset, plot in zip(datasets, plots): plot.showPlot() plot.setSize(config['width'], config['height']) # Set all y-axes same limits vp = plot.getViewports()[0] vp.getAxis('Y1').setScaleLimits(ymin, ymax) vp.getAxis('X1').setScaleLimits(minDate.value(), maxDate.value()) plot.saveToJpeg(os.path.join(outputFolder, "TH plot-{0.parameter}-{0.version}-{0.location}" .format(dataset)), 95) plot.close() plotted += 1 dssFile.done() return plotted, messages
def onePerParam(config, dssFilePath): plotted = 0 # Number of plots exported messages = [] outputFolder = tbu.relativeFolder(config['output_folder'], config['config_file']) minDate = HecTime(config['period']['start']) maxDate = HecTime(config['period']['end']) dssFile = HecDss.open(dssFilePath, str(minDate), str(maxDate)) colours = _coloursByLocation(config) for param, paramConfig in config['params'].iteritems(): plot = Plot.newPlot() dataPaths = [ "/%s/%s/%s//%s/%s/" % (config['site'].upper(), location.upper(), param.upper(), config['interval'].upper(), config['version'].upper()) for location in config['locations'] ] datasets = [dssFile.get(p) for p in dataPaths] datasets = [d for d in datasets if d.numberValues > 0] if not datasets: messages.append("No data for parameter '%s'." % param) continue map(plot.addData, datasets) plot.showPlot() plot.setPlotTitleText(param) plot.setPlotTitleVisible(1) plot.setSize(int(config['width']), int(config['height'])) # We can only access labels and curves at this point map(lambda d: plot.getLegendLabel(d).setText(d.location), datasets) # Style curves for dataset in datasets: curve = plot.getCurve(dataset) curve.setLineColor('{}, {}, {}'.format(*colours[dataset.location])) curve.setLineWidth(config['line']['width']) if config['line']['markers']: curve.setSymbolsVisible(1) curve.setSymbolType('Circle') curve.setSymbolLineColor('{}, {}, {}' .format(*colours[dataset.location])) curve.setSymbolFillColor('{}, {}, {}' .format(*colours[dataset.location])) # Axes scales units = set(ds.units for ds in datasets) for vp_index, unit in enumerate(units): # 1 viewport per distinct unit viewport = plot.getViewport(vp_index) viewport.getAxis("X1").setScaleLimits(minDate.value(), maxDate.value()) viewport.getAxis("Y1").setLabel(unit) viewport.setMinorGridXVisible(1) viewport.setMinorGridYVisible(1) if paramConfig: if paramConfig['scale'].lower() == 'log': viewport.setLogarithmic('Y1') # This throws a warning message if y-values <= 0. We can't catch this as an exception. # Horizontal threshold lines thresholds = _get_thresholds(datasets[0], dssFilePath, config) for marker in _thresholdMarkers(thresholds): viewport.addAxisMarker(marker) # Export plot plot.saveToJpeg(os.path.join(outputFolder, param + "-" + config['version']), 95) plot.close() plotted += 1 dssFile.done() return plotted, messages
def locationsAcross(config): records = [] for fileName in config['files']: importFile = os.path.join( tbu.relativeFolder(config['folder'], config['config_file'], createFolder='never'), fileName) with open(importFile) as f: csvReader = csv.reader(f) for row in csvReader: # Find the row with locations try: startCol = tbu.index_ign_case(row, config['rows']['location']['title']) + 1 # Dict of {'locationId': columnNo} locationCols = {} for col, cell in enumerate(row[startCol:]): if cell.strip(): locationCols[cell.upper()] = col + startCol firstDataCol = min(locationCols.values()) break except ValueError: continue # Date row (use first value for just now) for row in csvReader: try: tbu.index_ign_case(row, config['rows']['date']['title']) sampleDate = tbu.parseDateTime(row[firstDataCol], "12:00:00", config['rows']['date']['format']) break except ValueError: continue # Find data header row for row in csvReader: # If header row, we must have parameter and unit header try: paramCol = tbu.index_ign_case(row, config['columns']['parameter']['title']) unitCol = tbu.index_ign_case(row, config['columns']['unit']['title']) break except ValueError: continue # Then actual data for row in csvReader: try: param = config['mapping'][row[paramCol]] if param in config['params']: for location, col in locationCols.iteritems(): value, quality = tbu.parseMeasurement(row[col]) if not value is None: record = mon.Record(site=config['site'], location=location, parameter=param, version=config['version'], units=config['params'][param]['unit'], startTime=sampleDate.value(), values=value, qualities=quality) records.append(record) except KeyError: # Skip if param not in import file pass return records
def timeseries(config): records = [] for fileName, loc in config['files'].iteritems(): importFile = os.path.join( tbu.relativeFolder(config['folder'], config['config_file'], createFolder='never'), fileName) with open(importFile) as f: csvReader = csv.reader(f) for row in csvReader: if len([cell for cell in row if cell in config['mapping']]) < 2: # Not in header if less than 2 parameter headings found continue # Parameter columns paramCols = {} for col, cell in enumerate(row): try: # Map cell onto param. Ignore non-ascii characters. param = config['mapping'][cell.encode(encoding='ascii', errors='ignore')] # Only use param if in `config['params']` if param in config['params']: paramCols[param] = col except KeyError: # Cell doesn't map onto param pass break dateCol = timeCol = interval = startTime = None # Dict of {'param': [value1, values2, ...]} values = defaultdict(list) for row in csvReader: # Find date and time columns if dateCol is None or timeCol is None: for col, cell in enumerate(row): if dateCol is None: try: datetime.strptime(cell, config['date_format']) dateCol = col except ValueError: pass if timeCol is None: try: datetime.strptime(cell, "%H:%M:%S") timeCol = col except ValueError: pass # If date and time columns founds, we're on a data row if dateCol >= 0 and timeCol >= 0: # First row gives start time if startTime is None: startTime = tbu.parseDateTime(row[dateCol], row[timeCol], config['date_format']).value() # Second row gives interval elif interval is None: interval = tbu.parseDateTime(row[dateCol], row[timeCol], config['date_format']).value() - startTime # In all rows we read all params for param, col in paramCols.iteritems(): try: values[param].append(float(row[col])) except ValueError: values[param].append(Constants.UNDEFINED) # Check if interval matches number of row and end date/time endTime = tbu.parseDateTime(row[dateCol], row[timeCol], config['date_format']).value() if not endTime == startTime + interval * (len(values[param])-1): raise ValueError("Import file {} does not appear to have a regular interval".format(importFile)) # Shift the times to match proper interval times if config['interval_snap']: startTime = int(round(startTime / float(interval))) * interval for param in paramCols: record = mon.Record(site=config['site'], location=loc, parameter=param, version=config['version'], units=config['params'][param]['unit'], startTime=startTime, interval=interval, values=values[param]) records.append(record) return records
def locationsDown(config): records = [] for fileName in config['files']: importFile = os.path.join( tbu.relativeFolder(config['folder'], config['config_file'], createFolder='never'), fileName) with open(importFile) as f: csvReader = csv.reader(f) for row in csvReader: # Find the header row first try: # If header row, we must have date and location dateCol = tbu.index_ign_case(row, config['columns']['date']['title']) locationCol = tbu.index_ign_case(row, config['columns']['location']['title']) except ValueError: # We're not in a header row, move to next line continue # Optional time col try: if config['columns']['time']: timeCol = tbu.index_ign_case(row, config['columns']['time']['title']) else: timeCol = None except KeyError: timeCol = None # Parameter columns paramCols = {} for col, cell in enumerate(row): try: # Map cell onto param. Ignore non-ascii characters. param = config['mapping'][cell.encode(encoding='ascii', errors='ignore')] # Only use param if in `config['params']` if param in config['params']: paramCols[param] = col except KeyError: # Cell doesn't map onto param pass break # Then actual data for row in csvReader: if len(row[locationCol]) > 0: dateStr = row[dateCol] if not timeCol is None: timeStr = row[timeCol] else: timeStr = "12:00:00" sampleDate = tbu.parseDateTime(dateStr, timeStr, config['columns']['date']['format']) for param, col in paramCols.iteritems(): value, quality = tbu.parseMeasurement(row[col]) if not value is None: record = mon.Record(site=config['site'], location=row[locationCol], parameter=param, version=config['version'], units=config['params'][param]['unit'], startTime=sampleDate.value(), values=value, qualities=quality) records.append(record) return records
def onePerParam(config, dssFilePath): plotted = 0 # Number of plots exported messages = [] outputFolder = tbu.relativeFolder(config['output_folder'], dssFilePath) dssFile = HecDss.open(dssFilePath) minDate = HecTime(config['period']['start']) maxDate = HecTime(config['period']['end']) colours = _coloursByLocation(config) for param, paramConfig in config['params'].iteritems(): thePlot = Plot.newPlot() dataPaths = [ "/%s/%s/%s//%s/%s/" % (config['site'].upper(), location.upper(), param.upper(), config['interval'].upper(), config['version'].upper()) for location in config['locations'] ] datasets = [dssFile.get(p, 1) for p in dataPaths] datasets = [d for d in datasets if d.numberValues > 0] if not datasets: messages.append("No data for parameter '%s'." % param) continue map(thePlot.addData, datasets) thePlot.showPlot() thePlot.setPlotTitleText(param) thePlot.setPlotTitleVisible(1) thePlot.setSize(int(config['width']), int(config['height'])) # We can only access labels and curves at this point map(lambda d: thePlot.getLegendLabel(d).setText(d.location), datasets) for dataset in datasets: curve = thePlot.getCurve(dataset) curve.setLineColor("%s, %s, %s" % tuple(colours[dataset.location])) curve.setLineWidth(config['line']['width']) units = set(ds.units for ds in datasets) for vp_index, unit in enumerate(units): # 1 viewport per distinct unit viewport = thePlot.getViewport(vp_index) viewport.getAxis("X1").setScaleLimits(minDate.value(), maxDate.value()) viewport.getAxis("Y1").setLabel(unit) viewport.setMinorGridXVisible(1) viewport.setMinorGridYVisible(1) if paramConfig: if paramConfig['scale'].lower() == 'log': viewport.setLogarithmic('Y1') # This throws a warning message if y-values <= 0. We can't catch this as an exception. thePlot.saveToJpeg(os.path.join(outputFolder, config['version'] + "_" + param), 95) thePlot.close() plotted += 1 dssFile.done() return plotted, messages