def create_json(root):
    """
    Create the json file containing reading data.

    Params:
        root string
    """
    data_dir = defaults.downloads(root)
    output_dir = defaults.json_dir(root)
    archive = defaults.data_archive(root)

    catalog = []
    data = []
    json_file = {}

    data_files = utils.get_files_in_dir(data_dir)
    if not data_files:
        utils.warn('No csv files to process. Terminating')
        exit()

    utils.print_time('PROCESSOR START')
    print('Begin JSON file generation')
    for data_file in data_files:
        with open(data_file, 'rb') as f:
            reader = csv.reader(f)
            meterId, meterName = reader.next()

            print('Processing meterId %s ...' % (meterId)),

            info = {'meterId': meterId, 'meterName': meterName}
            catalog.append(info)

            for row in reader:
                ts = row[0]
                val = float(row[1])
                reading = {'timestamp': ts,
                           'value': val,
                           'meterId': meterId}
                data.append(reading)

            print('done')
        utils.move(data_file, archive)

    json_file['datasource'] = defaults.URI
    json_file['meterCatalog'] = catalog
    json_file['readings'] = data

    print('End JSON file generation')

    curr_dt = datetime.now()
    json_fname = 'dump_%s.json' % (utils.format_dt(curr_dt))
    save_path = os.path.join(output_dir, json_fname)

    print('Writing JSON to file %s ...' % (save_path)),
    with open(save_path, 'wb') as out:
        json.dump(json_file, out)
        print('done')

    utils.print_time('PROCESSOR END')
Beispiel #2
0
def run_batch(root, start, end, idx=None):
    """
    Run this script in batch mode. Download reading data whose timestamps
    lie within start and end dates.

    The date must follow the following format (Note the T between date and time):
        YYYY-MM-DDTHH:MM:SS

    where 24 hour time is used.
    
    If idx is a non-negative integer, instead download the meter at that index.
    idx is zero-indexed. If idx is greater than the number of meters, nothing
    happens; no files are downloaded. Default behavior is to download data for
    all meters.

    Params:
        root string
        start string
        end string
        idx integer
    """
    s_date = get_date(start)
    e_date = get_date(end)

    if not s_date or not e_date:
        raise ValueError('Invalid/missing dates')
    elif start > end:
        raise ValueError('Start date must come before end date')
    elif not utils.exists_dir(root):
        raise ValueError('Root directory not found')
    elif idx is not None and not is_valid_index(idx):
        raise ValueError('Index must be non-negative integer')

    creds_file = defaults.creds(root)
    cnxn_str = utils.get_cnxn_str(creds_file)
    output_dir = defaults.downloads(root)
    meter_file = defaults.meter_file(root)

    utils.print_time('GETTER START')

    with Cursor.Cursor(cnxn_str) as cursor:
        dq = get_reading_from_name_query_str()
        meters = utils.read_meter_file(meter_file)
        for i, m in enumerate(meters):
            if idx is not None and idx != i:
                continue
            ion_name = utils.get_ion_name(m)
            qid = utils.get_ion_qid(m)
            try:
                cursor.execute(dq, ion_name, qid, str(s_date), str(e_date))
            except pyodbc.Error:
                utils.error(
                    'Problem with query to get data for meter %s qid %d' %
                    (ion_name, qid))
                continue
            if cursor.rowcount == 0:
                utils.warn('No data found for meter %s qid %d' %
                           (ion_name, qid))
                continue

            meterId, meterName = utils.get_lucid_id_and_name(m)
            s_date_str = utils.make_lucid_ts(str(s_date))
            e_date_str = utils.make_lucid_ts(str(e_date))
            dl_fname = "%sT%sT%s.csv" % (meterId, s_date_str, e_date_str)
            path = os.path.join(output_dir, dl_fname)

            print('Writing data for meter %s qid %d to file: %s ...' %
                  (ion_name, qid, path)),
            with open(path, 'wb') as data_file:
                writer = csv.writer(data_file)
                writer.writerow([meterId, meterName])

                for row in cursor:
                    ts = row.TimestampUTC
                    val = row.Value
                    data_row = [utils.make_lucid_ts(ts), val]
                    writer.writerow(data_row)
                print('done')
    utils.print_time('GETTER END')
Beispiel #3
0
def run_batch(root, start, end, idx=None):
    """
    Run this script in batch mode. Download reading data whose timestamps
    lie within start and end dates.

    The date must follow the following format (Note the T between date and time):
        YYYY-MM-DDTHH:MM:SS

    where 24 hour time is used.
    
    If idx is a non-negative integer, instead download the meter at that index.
    idx is zero-indexed. If idx is greater than the number of meters, nothing
    happens; no files are downloaded. Default behavior is to download data for
    all meters.

    Params:
        root string
        start string
        end string
        idx integer
    """
    s_date = get_date(start)
    e_date = get_date(end)

    if not s_date or not e_date:
        raise ValueError('Invalid/missing dates')
    elif start > end:
        raise ValueError('Start date must come before end date')
    elif not utils.exists_dir(root):
        raise ValueError('Root directory not found')
    elif idx is not None and not is_valid_index(idx):
        raise ValueError('Index must be non-negative integer')

    creds_file = defaults.creds(root)
    cnxn_str = utils.get_cnxn_str(creds_file)
    output_dir = defaults.downloads(root)
    meter_file = defaults.meter_file(root)

    utils.print_time('GETTER START')

    with Cursor.Cursor(cnxn_str) as cursor:
        dq = get_reading_from_name_query_str()
        meters = utils.read_meter_file(meter_file)
        for i, m in enumerate(meters):
            if idx is not None and idx != i:
                continue
            ion_name = utils.get_ion_name(m)
            qid = utils.get_ion_qid(m)
            try:
                cursor.execute(dq, ion_name, qid, str(s_date), str(e_date))
            except pyodbc.Error:
                utils.error('Problem with query to get data for meter %s qid %d' % (ion_name, qid))
                continue
            if cursor.rowcount == 0:
                utils.warn('No data found for meter %s qid %d' % (ion_name, qid))
                continue

            meterId, meterName = utils.get_lucid_id_and_name(m)
            s_date_str = utils.make_lucid_ts(str(s_date))
            e_date_str = utils.make_lucid_ts(str(e_date))
            dl_fname = "%sT%sT%s.csv" % (meterId, s_date_str, e_date_str)
            path = os.path.join(output_dir, dl_fname)

            print('Writing data for meter %s qid %d to file: %s ...' % (ion_name, qid, path)),
            with open(path, 'wb') as data_file:
                writer = csv.writer(data_file)
                writer.writerow([meterId, meterName])

                for row in cursor:
                    ts = row.TimestampUTC
                    val = row.Value
                    data_row = [utils.make_lucid_ts(ts), val]
                    writer.writerow(data_row)
                print('done')
    utils.print_time('GETTER END')