def plot(config, graph_type, group_by, sample_width, weekends_only, business_days_only, no_outliers, out_file=None, included_channels=None, limit_value=None): """Plot the gathered data. """ # We import plotly local to the function not to slow down the rest of the program, # for example printing the help text. Import plotly adds 1.4s to execution time. import plotly rows = csvio.loadAll(config) device_name = rows[0][1] channel_labels = data.get_labels(rows) rows = rows[data.get_first_data_point_index(rows):] if weekends_only: rows = data.filter_weekends(rows, True) if business_days_only: rows = data.filter_weekends(rows, False) if not rows: raise RuntimeError("After filtering the data for weekends or weekdays " "there was no data left to plot.") if graph_type == 'scatter': columns = data.get_columns(rows) figure = construct_line_or_scatter(channel_labels, columns, included_channels, device_name, 'markers', limit_value) elif graph_type == 'line': columns = data.get_columns(rows) figure = construct_line_or_scatter(channel_labels, columns, included_channels, device_name, 'line', limit_value) elif graph_type == 'box': groups = data.group(rows, group_by, sample_width) groups = data.rotate_group_with_time_to_start(groups, datetime.time(3, 0)) figure = construct_box(channel_labels, groups, group_by, included_channels, device_name, no_outliers, limit_value) if not out_file: out_file = graph_type + '-plot_grouped_by_' + group_by + '.html' plotly.offline.plot(figure, filename=out_file)
def fetch(config, url): """Fetches and stores metrics from Sensor at the URL given.""" new_path = os.path.join( config.data_folder, datetime.datetime.now().strftime('%Y-%m-%d_%H:%M:%S.csv')) new_temp_path = new_path + 'temp' if not url.startswith('http://'): url = 'http://' + url url += '/export.csv' if config.verbose: click.echo('Fetching data from ' + url + ' and saving it in ' + new_temp_path) try: previous_path = sorted(glob.glob(config.data_folder + '/*.csv'))[-1] except IndexError: previous_path = None try: urllib.request.urlretrieve(url, new_temp_path) except urllib.error.URLError as e: click.echo('Failed to establish an HTTP connection.') click.echo(e.reason) sys.exit(1) except urllib.error.HTTPError as e: click.echo('Managed to connect but failed with HTTP Error code: ' + e.code) click.echo(e.reason) sys.exit(2) try: new_rows = csvio.loadOne(new_temp_path) if not new_rows[0][0] == "Device:": click.echo('Managed to connect and fetch data from something, ' 'but it was not a CSV from a Comet Web Sensor.') click.echo((new_rows[0][0])) sys.exit(3) # Here we'll try to remove overlapping data points with the last file. # It get's nasty due to time ajustments done by the sensor. if previous_path is not None: previous_rows = csvio.loadOne(previous_path) data_start = data.get_first_data_point_index(previous_rows) time_of_newest_data_in_previous = time.datetime_from_row( previous_rows[data_start], 'second') filtered_rows = [] for row in new_rows: if data.not_data_point(row): continue time_of_row = time.datetime_from_row(row) if time_of_newest_data_in_previous < time_of_row: filtered_rows.append(row) if not filtered_rows: if config.verbose: click.echo('No new rows found in fetched data.') sys.exit(0) else: filtered_rows = new_rows if config.verbose: click.echo('Rewriting treated CSV to: ' + new_path) csvio.writeRows(filtered_rows, new_path) finally: os.remove(new_temp_path)
def fetch(config, url): """Fetches and stores metrics from Sensor at the URL given.""" new_path = os.path.join(config.data_folder, datetime.datetime.now().strftime('%Y-%m-%d_%H:%M:%S.csv')) new_temp_path = new_path + 'temp' if not url.startswith('http://'): url = 'http://' + url url += '/export.csv' if config.verbose: click.echo('Fetching data from ' + url + ' and saving it in ' + new_temp_path) try: previous_path = sorted(glob.glob(config.data_folder + '/*.csv'))[-1] except IndexError: previous_path = None try: urllib.request.urlretrieve(url, new_temp_path) except urllib.error.URLError as e: click.echo('Failed to establish an HTTP connection.') click.echo(e.reason) sys.exit(1) except urllib.error.HTTPError as e: click.echo('Managed to connect but failed with HTTP Error code: ' + e.code) click.echo(e.reason) sys.exit(2) try: new_rows = csvio.loadOne(new_temp_path) if not new_rows[0][0] == "Device:": click.echo('Managed to connect and fetch data from something, ' 'but it was not a CSV from a Comet Web Sensor.') click.echo((new_rows[0][0])) sys.exit(3) # Here we'll try to remove overlapping data points with the last file. # It get's nasty due to time ajustments done by the sensor. if previous_path is not None: previous_rows = csvio.loadOne(previous_path) data_start = data.get_first_data_point_index(previous_rows) time_of_newest_data_in_previous = time.datetime_from_row(previous_rows[data_start], 'second') filtered_rows = [] for row in new_rows: if data.not_data_point(row): continue time_of_row = time.datetime_from_row(row) if time_of_newest_data_in_previous < time_of_row: filtered_rows.append(row) if not filtered_rows: if config.verbose: click.echo('No new rows found in fetched data.') sys.exit(0) else: filtered_rows = new_rows if config.verbose: click.echo('Rewriting treated CSV to: ' + new_path) csvio.writeRows(filtered_rows, new_path) finally: os.remove(new_temp_path)