Пример #1
0
                print msg % ('new', elem_string)
                index_datasets[station_index] = elem_string
                num_changed += 1

    ucanid_list = tuple(do_over)
    do_over = [ ]
    attempt += 1

# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

num_left = len(ucanid_list)
attempt -= 1
if num_left == 0:
    print '\nCompleted discovery after %d attempts.' % attempt
    if num_changed > 0:
        factory.backupIndexFile()
        msg = 'Differences were discovered in %d of %d stations.'
        print msg % (num_changed, num_stations)
        if update:
            index_manager = factory.getFileManager('index', mode='a')
            index_manager.replaceDataset('datasets', index_datasets,
                                         datasets_attrs)
            index_manager.closeFile()
            print 'The database has been updated.' 
        else:
            print 'The database was not updated during this test run.' 
    else:
        print 'No changes were discovered in the element list of any station.'
        print 'The database was not updated.' 
else:
    print '\n******************************************************************'
Пример #2
0
        'ucanid': ucanids[station_index],
    }

    is_missing = last_reports[station_index] < 0
    if missing_only and not is_missing: continue

    print 'updating last report date for :\n', station
    last_report = latestReportDate(station, newa_base_time, icao_base_time,
                                   debug)

    # set last_reports to latest date found
    if last_report != last_reports[station_index]:
        last_reports[station_index] = last_report
        num_changed += 1
        station['last'] = last_report
        print message % station
        sys.stdout.flush()

# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

if num_changed > 0 and not test_run:
    backup_filepath = factory.backupIndexFile()
    print 'Index file backed up to', backup_filepath

    print 'Last report date will be updated for %d stations.' % num_changed
    manager = factory.getFileManager('index', mode='a')
    manager.replaceDataset('last_report', last_reports, last_report_attrs)
    manager.closeFile()
    print 'Station index file has been updated.'
    print factory.getFilepath('index')
# create the index array dictionary
arrays = { }
for key in INDEX_KEYS:
    arrays[key] = [ ]

# populate the index arrays with station data
for station in stations:
    for key in INDEX_KEYS:
        if key in station:
            arrays[key].append(station[key])
        else:
            arrays[key].append(MISSING[key])

# save the index arrays to the index file
factory = ObsnetDataFactory(options)
if factory.fileExists('index'): factory.backupIndexFile()

index_manager = factory.getFileManager('index', mode='w')
for key, dataset in INDEX.items():
    print 'creating array for', key
    data = N.array(arrays[key], dtype=dataset.data_type)
    attrs = { 'missing' : dataset.missing,
              'description' : dataset.description,
            }
    if key in ('lon','lat','elev'):
        valid = data[N.where(N.isfinite(data))]
        attrs['min'] = N.min(valid)
        attrs['max'] = N.max(valid)

    if dataset.units is not None: attrs['units'] = dataset.units
factory = ObsnetDataFactory(options)

# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #

date_str = download_date.strftime('%Y%m%d')
time_str = datetime.now().strftime('%y%m%d.%H%M')

debug = options.debug

# get path to station index file
index_filepath = factory.getFilepath('index')
if not os.path.exists(index_filepath):
    raise IOError, 'Station index file not accessable : %s' % index_filepath

# make a backup copy of index file
if create_backup: factory.backupIndexFile()

# get log directory path
log_dirpath = os.path.join(factory.getDirectoryPath('working'), 'updates')
if not os.path.exists(log_dirpath):
    os.makedirs(log_dirpath)

# get file path for change log
change_log_name = '%s_changes.log' % time_str
change_log_path = os.path.join(log_dirpath, change_log_name)

# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #

# get 'sid' dataset from index file
manager = factory.getFileManager('index', mode='r')
sids = manager.getData('sid')
print 'new index will contain %d stations' % num_stations

# fill the column list with data from each station
columns = dict([(name, []) for name in column_names])
for station in stations:
    for name in column_names:
        columns[name].append(station[name])

# make sure that all of the new columns are the correct length
for name, data in columns.items():
    if len(data) != num_stations:
        errmsg = '%s column has %d entries, but there are %d stations'
        raise RuntimeError, errmsg % (name, len(data), num_stations)

# backup the existing index file
index_filepath, backup_filepath = factory.backupIndexFile(keep_original=False)

time_str = datetime.now().strftime('%y%m%d.%H%M')
# create a new version of the index file that contains valid stations
# from the log file
manager = factory.getFileManager('index', 'w')
update_time = manager._timestamp()
for name, data in columns.items():
    attrs = {
        'created': column_created[name],
        'updated': update_time,
        'description': INDEX[name].description,
        'missing': MISSING[name],
    }
    units = INDEX[name].units
    if units: attrs['units'] = units