parser.add_option('-w', action='store', type='string', dest='working_dir', default=None) options, args = parser.parse_args() # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # if len(args) > 0: by_ucanid = tuple([int(arg) for arg in args]) else: by_ucanid = None factory = ObsnetDataFactory(options) index_manager = factory.getFileManager('index', mode='r') max_buddies = options.max_buddies networks = index_manager.getData('network') icao = N.where(networks == 'icao') not_icao = N.where(networks != 'icao') del networks lats = index_manager.getData('lat') icao_lats = lats[icao] lats = lats[not_icao] lons = index_manager.getData('lon') icao_lons = lons[icao]
# filter annoying numpy warnings warnings.filterwarnings('ignore',"All-NaN axis encountered") warnings.filterwarnings('ignore',"All-NaN slice encountered") warnings.filterwarnings('ignore',"invalid value encountered in greater") # MUST ALSO TURN OFF WARNING FILTERS AT END OF SCRIPT !!!!! # crcreate the detector detector = PrecipErrorDetector(precipValidationTree, start_hour, end_hour, padding, missing_threshold, grid_offset, networks, notify_stations, verbose, test_run, debug) # get the stations for this run if run_detector: factory = ObsnetDataFactory(options) stations = factory.argsToStationData((), options, tuple(metadata)) for station in stations: date = start_date while date <= end_date: detected = detector(date, station) date += ONE_DAY # create the preface for the summary email if end_date is None or end_date == start_date: date_str = 'on %s' % start_date.strftime('%B %d, %Y') else: date_str = 'from %s to %s' % (start_date.strftime('%B %d, %Y'), end_date.strftime('%B %d, %Y'))
default=None) parser.add_option('-x', action='store_true', dest='replace_existing', default=False) parser.add_option('-z', action='store_true', dest='debug', default=False) options, args = parser.parse_args() # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # procmsg = '\nprocessing station %d of %d : %d : %s (%s)' skipmsg = '\nNo hourly data file for station %d of %d : %d : %s (%s)' debug = options.debug replace_existing = options.replace_existing factory = ObsnetDataFactory(options) stations = factory.argsToStationData(args, options) total_stations = len(stations) # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - station_num = 0 for station in stations: station_num += 1 ucanid = station['ucanid'] filepath = factory.getFilepathForUcanid(station['ucanid'], 'hours') if os.path.exists(filepath): manager = factory.getStationFileManager((ucanid,'hours'),'a') else:
if len(args) > 0 and args[0].isdigit(): end_date = (int(args[0]), int(args[1]), int(args[2])) if len(args) > 3: args = args[4:] else: args = () else: end_date = None search_keys = ('bbox', 'county', 'network', 'sid', 'state') if options.elements == 'all': all_elements = list(ELEMENTS.keys()) else: all_elements = list(stringToTuple(options.elements)) all_elements.sort() factory = ObsnetDataFactory(options) stations = factory.argsToStationData(args, options, search_keys=search_keys) stations.sort(key=lambda x: x['ucanid']) total_stations = len(stations) # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - badmsg = 'encountered known bad station %s : %s (%s)' procmsg = '\nprocessing station %d of %d : %d : %s (%s)' skipmsg = 'skipping station %d of %d : %d : %s (%s)' days_per_request = options.days_per_request replace_existing = options.replace_existing test_run = options.test if test_run: debug = True
parser.add_option('-w', action='store', type='string', dest='working_dir', default=None) parser.add_option('-z', action='store_true', dest='debug', default=False) options, args = parser.parse_args() # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # input_filepaths = [] for arg in args: input_filepaths.append(os.path.normpath(arg)) factory = ObsnetDataFactory(options) stations = list(factory.getIndexedStations('all')) index_dict = dict([(station['sid'], indx) for (indx, station) in enumerate(stations)]) station_ids = list(index_dict.keys()) for input_filepath in input_filepaths: input_file = open(input_filepath, 'r') dump_stations = eval(input_file.read()) input_file.close() new_stations = [] new_station_ids = [] for station in dump_stations: sid = station['sid']
options, args = parser.parse_args() # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # year = int(args[0]) month = int(args[1]) day = int(args[2]) if len(args) > 3: hour = int(args[3]) else: hour = 23 max_report_date = dateAsInt((year, month, day)) max_report_time = (max_report_date * 100) + hour # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - factory = ObsnetDataFactory(options) index_manager = factory.getFileManager('index', mode='r') last_reports = index_manager.getData('last_report') index_manager.closeFile() for indx in range(len(last_reports)): last_report = last_reports[indx] if last_report == max_report_date: last_reports[indx] = max_report_time elif last_report > max_report_time: last_reports[indx] = max_report_time factory.backupIndexFile() index_manager = factory.getFileManager('index', mode='a') index_manager.updateDataset('last_report', last_reports)
action='store', type='string', dest='dataset_names', default=None) parser.add_option('-w', action='store', type='string', dest='working_dir', default=None) parser.add_option('-y', action='store_true', dest='test_run', default=False) parser.add_option('-z', action='store_true', dest='debug', default=False) options, args = parser.parse_args() # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # factory = ObsnetDataFactory(options) manager = factory.getFileManager('index', 'r') dataset_name = args[0] value = args[1] if dataset_name == 'ucanid': value = N.int64(value) debug = options.debug test_run = options.test_run verbose = debug or test_run if options.dataset_names is not None: dataset_names = options.dataset_names.split(',') else: dataset_names = manager.listDatasets()
type='string', dest='sort_by', default='name') parser.add_option('-w', action='store', type='string', dest='working_dir', default=None) parser.add_option('-v', action='store_true', dest='verbose', default=False) parser.add_option('-z', action='store_true', dest='debug', default=False) options, args = parser.parse_args() # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # factory = ObsnetDataFactory(options) update_log_filepath = os.path.normpath(args[0]) if not os.path.isfile(update_log_filepath): dirpath = factory.getDirectoryPath('updates') update_log_filepath = os.path.join(dirpath, args[0]) if not os.path.isfile(update_log_filepath): raise IOError, "Log file not found : %s" % update_log_filepath debug = options.debug must_be_active = options.must_be_active sort_by = options.sort_by verbose = debug or options.verbose # get all stations currently in the index file manager = factory.getFileManager('index', 'r')
first_dataset = args[1] second_file = args[2] second_dataset = args[3] file_extension = options.extension if not file_extension.startswith('.'): file_extension.insert(0, '.') # get time span from input arguments start_time, end_time = getTimeSpanFromArgs(args[4:]) print 'time span', start_time, end_time # set local variables from options debug = options.debug # create a factory, then use it to get the list of stations factory = ObsnetDataFactory(options) # get a data manager for first file filepath = '%s.h5' % first_file filepath = os.path.join(factory.config.working_dir, '%s.h5' % first_file) manager = factory.getStationFileManager(filepath, 'r') # get the first dataset attrs, data = manager.getData(first_dataset, True) # convert to a time series dataset and get it's iterator time_series = TimeSeries(first_dataset, data, None, False, '-', **attrs) start_index, end_index = time_series.getIndexes(start_time, end_time) # get the subset first_data = time_series.getIntervalData(start_index, end_index) first_dates = time_series.getDates(start_time, end_time) manager.closeFile()