criteria = factory._validCriteria(options, SEARCH_KEYS)
    ucanids = factory.getStationIdsFromArgs(args, criteria)

# apply rule to replace existing data files
if replace_existing:
    for ucanid in ucanids:
        filepath = factory.getFilepathForUcanid(ucanid,'hour-stats')
        if os.path.exists(filepath): os.remove(filepath)
else:
    ucanids = [uid for uid in ucanids
              if not os.path.exists(factory.getFilepathForUcanid(uid,'hour-stats'))]

# process each station in the list
for ucanid in ucanids:

    hours_manager = factory.getStationFileManager((ucanid,'hours'), 'r')
    if options.elements is None:
        elements = hours_manager.hdf5_file.keys()
    else:
        elements = stringToTuple(options.elements)

    stats_manager = factory.getStationFileManager((ucanid,'hour-stats'), 'w')
    stats_manager.setFileAttributes(**hours_manager.getFileAttributes())
    stats_manager.setFileAttribute('created', stats_manager._timestamp())

    earliest_hour = 9999999999
    latest_hour = -32768

    for element in elements:
        if debug:
            print ' '
if not file_extension.startswith('.'): file_extension.insert(0, '.')

# get time span from input arguments
start_time, end_time = getTimeSpanFromArgs(args[4:])
print 'time span', start_time, end_time

# set local variables from options
debug = options.debug

# create a factory, then use it to get the list of stations
factory = ObsnetDataFactory(options)

# get a data manager for first file
filepath = '%s.h5' % first_file
filepath = os.path.join(factory.config.working_dir, '%s.h5' % first_file)
manager = factory.getStationFileManager(filepath, 'r')
# get the first dataset
attrs, data = manager.getData(first_dataset, True)
# convert to a time series dataset and get it's iterator
time_series = TimeSeries(first_dataset, data, None, False, '-', **attrs)
start_index, end_index = time_series.getIndexes(start_time, end_time)
# get the subset
first_data = time_series.getIntervalData(start_index, end_index)
first_dates = time_series.getDates(start_time, end_time)
manager.closeFile()

# get a data manager for second file
filepath = os.path.join(factory.config.working_dir, '%s.h5' % second_file)
manager = factory.getStationFileManager(filepath, 'r')
# get the first dataset
attrs, data = manager.getData(second_dataset, True)
factory = ObsnetDataFactory(options)
stations = factory.argsToStationData(args, options)
total_stations = len(stations)

# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

station_num = 0

for station in stations:
    station_num += 1
    ucanid = station['ucanid']

    filepath = factory.getFilepathForUcanid(station['ucanid'], 'hours')
    if os.path.exists(filepath):
        manager = factory.getStationFileManager((ucanid,'hours'),'a')
    else:
        print skipmsg % (station_num, total_stations, ucanid, station['sid'],
                         station['name'])
        continue

    # we're going to process this station
    print procmsg % (station_num, total_stations,ucanid, station['sid'],
                     station['name'])

    # check to see if dewpt already exists and deal with it
    gen_dewpt = True
    if 'dewpt' in manager.listGroups():
        if replace_existing: manager.deleteGroup('dewpt')
        else: gen_dewpt = False