day = int(args[2]) if len(args) > 3: first_hour_in_day = int(args[3]) base_time = datetime(year, month, day, 1) else: date = datetime.now() - relativedelta(days=1) base_time = datetime(date.year, date.month, date.day, 23) # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - bad_station = '\n%(network)s station %(ucanid)d : %(sid)s : %(name)s' missing_data = 'No data reported on %s : last report date is %s' # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - factory = ObsnetDataFactory(options) stations = factory.getIndexedStations('all') num_stations = len(stations) for station in stations: ucanid = station['ucanid'] if debug: print '%(ucanid)d : %(sid)s : %(name)s' % station raw_elements = [name for name in station['elements'].split(',') if name] raw_elements.sort() num_elements = len(raw_elements) if debug: print num_elements, raw_elements # make connection to UCAN server connection = HourlyDataConnection(2, first_hour_in_day=1) end_time = base_time
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - criteria = ('bbox', 'county', 'name', 'network', 'sid', 'state') factory = ObsnetDataFactory(options) criteria = factory._validCriteria(options, criteria) metadata = list(factory._parseMetadata(options.metadata)) if 'datasets' not in metadata: metadata.append('datasets') if 'first_hour' not in metadata: metadata.append('first_hour') if 'last_report' not in metadata: metadata.append('last_report') if 'name' not in metadata: metadata.append('name') if 'sid' not in metadata: metadata.append('sid') if 'state' not in metadata: metadata.append('state') if 'ucanid' not in metadata: metadata.append('ucanid') for station in factory.getIndexedStations(metadata, criteria, options.sort_by): if test_index: prev_last_report = station['last_report'] prev_last_day = prev_last_report / 100 if prev_last_day != test_last_day: print test_msg % station else: last_report = latestReportDate(station, newa_base_time, icao_base_time, debug) station['last'] = last_report if not debug: station['last'] = last_report last_day = last_report / 100 prev_last_report = station['last_report'] prev_last_day = prev_last_report / 100 if last_day != prev_last_day: print message % station
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # factory = ObsnetDataFactory(options) sort_by = args[0] # get all stations currently in the index file manager = factory.getFileManager('index', 'r') column_names = manager.listDatasets() column_created = {} columns = {} for name in column_names: column_created[name] = manager.getDatasetAttribute(name, 'created') columns[name] = [] stations = list(factory.getIndexedStations(column_names, None, sort_by)) manager.closeFile() del manager for station in stations: for name in column_names: columns[name].append(station.get(name, MISSING[name])) del stations # backup the index file index_filepath = factory.getFilepath('index') backup_filepath = factory._backupFilePath(index_filepath) os.rename(index_filepath, backup_filepath) if os.path.isfile(backup_filepath): print 'Index backed up to', backup_filepath else:
validation_manager =\ ValidationManager(decisionTree, start_time, end_time, networks, STATE_COORDINATORS, EMAIL_HOST, RAW_DATASETS, active_status_column, last_report_column, sids_column, debug, test_run, verbose) # build search criteria if options.search_criteria is None: search_criteria = VALIDATION.search_criteria else: search_criteria = eval(options.search_criteria) # loop through all stations that match search criteria for station in factory.getIndexedStations('all', search_criteria, sort_by='name'): try: result = validation_manager.validate(station) except Exception as e: xtype, formatted, details = captureLastException() print "\n\nCaught", xtype, "excetpion for station on", date print station print ''.join(formatted) print ''.join(details) if result is None: continue station, msg = result if msg is not None and verbose: print '\nMessage to %s' % (STATION_INFO % station) print msg
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # if options.sid is None and options.name is None and options.ucanid is None: print 'You must se one of the following options : --sid, --name or --ucanid' os.exit(99) dataset_name = args[0] year = int(args[1]) month = int(args[2]) day = int(args[3]) date = datetime(year, month, day) print date.strftime('Retrieving data for %B %d, %Y') factory = ObsnetDataFactory(options) station = factory.argsToStationData((), options)[0] criteria = factory._validCriteria(options, ('sid', 'name', 'ucanid')) metadata = ('lat', 'lon', 'name', 'network', 'sid', 'state', 'ucanid') station = factory.getIndexedStations(metadata, criteria)[0] print STATION_INFO % station start, end, stn_data = getStationData(factory, station, date, dataset_name) print STN_DATA_MSG % (start.strftime('%Y-%m-%d:%H'), end.strftime('%Y-%m-%d:%H'), arrayToString(stn_data)) grid_data = getGridData(factory, station, date, dataset_name) print GRID_DATA_MSG % (date.strftime('%Y-%m-%d'), arrayToString(grid_data)) print COMPARE_MSG % (N.nansum(stn_data), N.nanmin(grid_data), N.nanmax(grid_data))
action='store', type='string', dest='working_dir', default=None) parser.add_option('-z', action='store_true', dest='debug', default=False) options, args = parser.parse_args() # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # input_filepaths = [] for arg in args: input_filepaths.append(os.path.normpath(arg)) factory = ObsnetDataFactory(options) stations = list(factory.getIndexedStations('all')) index_dict = dict([(station['sid'], indx) for (indx, station) in enumerate(stations)]) station_ids = list(index_dict.keys()) for input_filepath in input_filepaths: input_file = open(input_filepath, 'r') dump_stations = eval(input_file.read()) input_file.close() new_stations = [] new_station_ids = [] for station in dump_stations: sid = station['sid']