Exemplo n.º 1
0
def post_json_files(root):
    """
    Post json objects in a designated directory to BuildingOS.

    Params:
        root string
    """
    json_dir = defaults.json_dir(root)
    archive = defaults.json_archive(root)
    post_url = defaults.BOS_URL

    json_files = utils.get_files_in_dir(json_dir)
    if not json_files:
        utils.warn('No JSON files to process. Terminating')
        exit()

    utils.print_time('LOADER START')
    for json_file in json_files:
        print('Posting file: %s ...' % (json_file)),
        with open(json_file, 'rb') as jf:
            payload = {'data': jf}
            response = requests.post(post_url, files=payload)
            print('done')

            print('Server response: %s' % (response.text))

        utils.move(json_file, archive)

    utils.print_time('LOADER END')
Exemplo n.º 2
0
def post_json_files(root):
    """
    Post json objects in a designated directory to BuildingOS.

    Params:
        root string
    """
    json_dir = defaults.json_dir(root)
    archive = defaults.json_archive(root)
    post_url = defaults.BOS_URL

    json_files = utils.get_files_in_dir(json_dir)
    if not json_files:
        utils.warn('No JSON files to process. Terminating')
        exit()

    utils.print_time('LOADER START')
    for json_file in json_files:
        print('Posting file: %s ...' % (json_file)),
        with open(json_file, 'rb') as jf:
            payload = {'data': jf}
            response = requests.post(post_url, files=payload)
            print('done')

            print('Server response: %s' % (response.text))

        utils.move(json_file, archive)

    utils.print_time('LOADER END')
def create_json(root):
    """
    Create the json file containing reading data.

    Params:
        root string
    """
    data_dir = defaults.downloads(root)
    output_dir = defaults.json_dir(root)
    archive = defaults.data_archive(root)

    catalog = []
    data = []
    json_file = {}

    data_files = utils.get_files_in_dir(data_dir)
    if not data_files:
        utils.warn('No csv files to process. Terminating')
        exit()

    utils.print_time('PROCESSOR START')
    print('Begin JSON file generation')
    for data_file in data_files:
        with open(data_file, 'rb') as f:
            reader = csv.reader(f)
            meterId, meterName = reader.next()

            print('Processing meterId %s ...' % (meterId)),

            info = {'meterId': meterId, 'meterName': meterName}
            catalog.append(info)

            for row in reader:
                ts = row[0]
                val = float(row[1])
                reading = {'timestamp': ts,
                           'value': val,
                           'meterId': meterId}
                data.append(reading)

            print('done')
        utils.move(data_file, archive)

    json_file['datasource'] = defaults.URI
    json_file['meterCatalog'] = catalog
    json_file['readings'] = data

    print('End JSON file generation')

    curr_dt = datetime.now()
    json_fname = 'dump_%s.json' % (utils.format_dt(curr_dt))
    save_path = os.path.join(output_dir, json_fname)

    print('Writing JSON to file %s ...' % (save_path)),
    with open(save_path, 'wb') as out:
        json.dump(json_file, out)
        print('done')

    utils.print_time('PROCESSOR END')