Example #1
0
def loadAll(config):
    rows = []
    reader = None
    for path in sorted(glob.iglob(config.data_folder + '/*.csv')):
        with codecs.open(path, 'r', encoding='latin1') as in_file:
            reader = csv.reader(in_file)
            for row in reader:
                if row:
                    if not data.not_data_point(row):
                        row.pop()
                        for i in range(1, 5):
                            if row[i] != 'n/a':
                                row[i] = float(row[i])
                            else:
                                row[i] = 0.0
                    rows.append(row)
    if reader is None:
        click.echo('No csv files found in ' + config.data_folder + ', nothing to do.')
        sys.exit(4)
    # Deduplication.
    seen = set()
    rows = [x for x in rows if not (x[0] in seen or seen.add(x[0]))]
    return sorted(rows, key=time.datetime_from_row)
Example #2
0
def loadAll(config):
    rows = []
    reader = None
    for path in sorted(glob.iglob(config.data_folder + '/*.csv')):
        with codecs.open(path, 'r', encoding='latin1') as in_file:
            reader = csv.reader(in_file)
            for row in reader:
                if row:
                    if not data.not_data_point(row):
                        row.pop()
                        for i in range(1, 5):
                            if row[i] != 'n/a':
                                row[i] = float(row[i])
                            else:
                                row[i] = 0.0
                    rows.append(row)
    if reader is None:
        click.echo('No csv files found in ' + config.data_folder +
                   ', nothing to do.')
        sys.exit(4)
    # Deduplication.
    seen = set()
    rows = [x for x in rows if not (x[0] in seen or seen.add(x[0]))]
    return sorted(rows, key=time.datetime_from_row)
Example #3
0
def datetime_from_row(row, cutoff=None):
    """Get the full date and time from the data point row."""
    from comet.data import not_data_point
    if not_data_point(row):
        return datetime.datetime(1970, 1, 1)
    return datetime_from_field(row[0], cutoff)
Example #4
0
def datetime_from_row(row, cutoff=None):
    """Get the full date and time from the data point row."""
    from comet.data import not_data_point
    if not_data_point(row):
        return datetime.datetime(1970, 1, 1)
    return datetime_from_field(row[0], cutoff)
Example #5
0
def fetch(config, url):
    """Fetches and stores metrics from Sensor at the URL given."""
    new_path = os.path.join(
        config.data_folder,
        datetime.datetime.now().strftime('%Y-%m-%d_%H:%M:%S.csv'))
    new_temp_path = new_path + 'temp'
    if not url.startswith('http://'):
        url = 'http://' + url
    url += '/export.csv'

    if config.verbose:
        click.echo('Fetching data from ' + url + ' and saving it in ' +
                   new_temp_path)

    try:
        previous_path = sorted(glob.glob(config.data_folder + '/*.csv'))[-1]
    except IndexError:
        previous_path = None

    try:
        urllib.request.urlretrieve(url, new_temp_path)
    except urllib.error.URLError as e:
        click.echo('Failed to establish an HTTP connection.')
        click.echo(e.reason)
        sys.exit(1)
    except urllib.error.HTTPError as e:
        click.echo('Managed to connect but failed with HTTP Error code: ' +
                   e.code)
        click.echo(e.reason)
        sys.exit(2)

    try:
        new_rows = csvio.loadOne(new_temp_path)
        if not new_rows[0][0] == "Device:":
            click.echo('Managed to connect and fetch data from something, '
                       'but it was not a CSV from a Comet Web Sensor.')
            click.echo((new_rows[0][0]))
            sys.exit(3)

        # Here we'll try to remove overlapping data points with the last file.
        # It get's nasty due to time ajustments done by the sensor.
        if previous_path is not None:
            previous_rows = csvio.loadOne(previous_path)
            data_start = data.get_first_data_point_index(previous_rows)
            time_of_newest_data_in_previous = time.datetime_from_row(
                previous_rows[data_start], 'second')
            filtered_rows = []
            for row in new_rows:
                if data.not_data_point(row):
                    continue
                time_of_row = time.datetime_from_row(row)
                if time_of_newest_data_in_previous < time_of_row:
                    filtered_rows.append(row)

            if not filtered_rows:
                if config.verbose:
                    click.echo('No new rows found in fetched data.')
                sys.exit(0)
        else:
            filtered_rows = new_rows

        if config.verbose:
            click.echo('Rewriting treated CSV to: ' + new_path)
        csvio.writeRows(filtered_rows, new_path)
    finally:
        os.remove(new_temp_path)
Example #6
0
def fetch(config, url):
    """Fetches and stores metrics from Sensor at the URL given."""
    new_path = os.path.join(config.data_folder, datetime.datetime.now().strftime('%Y-%m-%d_%H:%M:%S.csv'))
    new_temp_path = new_path + 'temp'
    if not url.startswith('http://'):
        url = 'http://' + url
    url += '/export.csv'

    if config.verbose:
        click.echo('Fetching data from ' + url + ' and saving it in ' + new_temp_path)

    try:
        previous_path = sorted(glob.glob(config.data_folder + '/*.csv'))[-1]
    except IndexError:
        previous_path = None

    try:
        urllib.request.urlretrieve(url, new_temp_path)
    except urllib.error.URLError as e:
        click.echo('Failed to establish an HTTP connection.')
        click.echo(e.reason)
        sys.exit(1)
    except urllib.error.HTTPError as e:
        click.echo('Managed to connect but failed with HTTP Error code: ' + e.code)
        click.echo(e.reason)
        sys.exit(2)

    try:
        new_rows = csvio.loadOne(new_temp_path)
        if not new_rows[0][0] == "Device:":
            click.echo('Managed to connect and fetch data from something, '
                       'but it was not a CSV from a Comet Web Sensor.')
            click.echo((new_rows[0][0]))
            sys.exit(3)

        # Here we'll try to remove overlapping data points with the last file.
        # It get's nasty due to time ajustments done by the sensor.
        if previous_path is not None:
            previous_rows = csvio.loadOne(previous_path)
            data_start = data.get_first_data_point_index(previous_rows)
            time_of_newest_data_in_previous = time.datetime_from_row(previous_rows[data_start], 'second')
            filtered_rows = []
            for row in new_rows:
                if data.not_data_point(row):
                    continue
                time_of_row = time.datetime_from_row(row)
                if time_of_newest_data_in_previous < time_of_row:
                    filtered_rows.append(row)

            if not filtered_rows:
                if config.verbose:
                    click.echo('No new rows found in fetched data.')
                sys.exit(0)
        else:
            filtered_rows = new_rows

        if config.verbose:
            click.echo('Rewriting treated CSV to: ' + new_path)
        csvio.writeRows(filtered_rows, new_path)
    finally:
        os.remove(new_temp_path)