# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #

def writeStationsToFile(stations, filepath, fmt='dump', mode='w'):
    out_file = open(filepath, mode)

    # write each station as a line in a tab separated values file
    if fmt == 'tsv':
        keys = list(stations[0].keys())
        keys.sort()
        if 'ucanid' in keys:
            del(keys[keys.index('ucanid')])
            keys.insert(0, 'ucanid')
        if 'name' in keys:
            del(keys[keys.index('name')])
            keys.insert(0, 'name')
        if 'sid' in keys:
            del(keys[keys.index('sid')])
            keys.insert(0, 'sid')
        out_file.write('\t'.join(keys))
        
        for station in stations:
            values = [ station[key] for key in keys ]
            out_file.write('\n%s' % '\t'.join(values))

    # write to a json file
    elif fmt == 'json':
        import json
        out_file.write(json.dumps( {'stations':list(stations)} ))

    # write each station as a python dictionary
    else:
        station = stations[0]
        template = getDictTemplate(station)
        out_file.write(template % station)
        for station in stations[1:]:
            out_file.write('\n')
            template = getDictTemplate(station)
            out_file.write(template % station)
                    # don't erase and don't change if not different
                    if meta_value and meta_value != current_value:
                        before[name] = current_value
                        #TODO: make sure to deserialize new value
                        manager.hdf5_file[name][stn_index] = meta_value
                        after[name] = meta_value
                else: # OK to set to NULL
                    # change only when different
                    if meta_value != current_value:
                        before[name] = current_value
                        #TODO: make sure to deserialize new value
                        manager.hdf5_file[name][stn_index] = meta_value
                        after[name] = meta_value

            if after:
                tmpl = getDictTemplate(after.keys())

                if change_log is None:
                    change_log = open(change_log_path, 'a')
                if change_log_empty:
                    change_log.write('[')
                    change_log_empty = False

                if change_counter == 0:
                    if state != first_state: change_log.write(',\n')
                    change_log.write('{"state":"%s","stations":[' % state)
                change_counter += 1

                if change_counter > 1: change_log.write(',')
                change_log.write('\n{"sid":"%s",' % sid)
                change_log.write('\n       "before":%s,' % (tmpl % before))
parser.add_option('-z', action='store_true', dest='debug', default=False)
options, args = parser.parse_args()

# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #

factory = ObsnetDataFactory(options)
manager = factory.getFileManager('index', 'r')

dataset_name = args[0]
value = args[1]
if dataset_name == 'ucanid':
    value = N.int64(value)

debug = options.debug
test_run = options.test_run
verbose = debug or test_run

if options.dataset_names is not None:
    dataset_names = options.dataset_names.split(',')
else:
    dataset_names = manager.listDatasets()

# get all stations currently in the index file
data = manager.getData(dataset_name)
indx = N.where(data == value)[0][0]

station = {}
for name in dataset_names:
    station[name] = manager.getDataset(name)[indx]
print getDictTemplate(station) % station
#! /Volumes/projects/venvs/newa/bin/python

import os, sys

import numpy as N

from newa.factory import ObsnetDataFactory
from newa.ucan import updateWithUcanMetadata

# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #

from newa.database.index import INDEX, getDictTemplate
INDEX_KEYS = INDEX.keys()
MISSING = dict(zip(INDEX_KEYS, [INDEX[key].missing for key in INDEX_KEYS]))
STATION_TEMPLATE = getDictTemplate()

# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #

from optparse import OptionParser
parser = OptionParser()

parser.add_option('-o',
                  action='store',
                  type='string',
                  dest='output_filepath',
                  default='merge_dumps_with_index_output.py')
parser.add_option('-w',
                  action='store',
                  type='string',
                  dest='working_dir',
                  default=None)
    attrs = {
        'created': column_created[name],
        'updated': update_time,
        'description': INDEX[name].description,
        'missing': MISSING[name],
    }
    units = INDEX[name].units
    if units: attrs['units'] = units

    dtype = INDEX[name].data_type
    if verbose: print 'adding "%s" dataset to the index file' % name
    manager.createDataset(name, N.array(data, dtype=dtype), attrs)
manager.closeFile()

# get log directory path
log_dirpath = os.path.join(factory.getDirectoryPath('working'), 'updates')
if not os.path.exists(log_dirpath):
    os.makedirs(log_dirpath)
# get file path for change log
change_log_name = '%s_added.log' % time_str
change_log_path = os.path.join(log_dirpath, change_log_name)

change_log = open(change_log_path, 'a')
station = new_stations[0]
change_log.write(getDictTemplate(station) % station)
if new_stations > 1:
    for station in new_stations[1:]:
        change_log.write('\n')
        change_log.write(getDictTemplate(station) % station)
change_log.close()