def length_stats_for_intervals(t_start, t_end, dt, D, b_s, chi2=5.0, stations=5): """ """ t_edges, duration = time_edges(t_start, t_end, dt.total_seconds()) t_ref, t_edges_seconds = seconds_since_start_of_day(t_start, t_edges) n_frames = len(t_edges) - 1 max_alt, d_alt = 20.0, 0.5 alt_bins = np.arange(0.0, max_alt + d_alt, d_alt) results_aggregator = StatResults(alt_bins, basedate=t_ref) all_frame_targets = [] for n in range(n_frames): t0, t1 = t_edges_seconds[n:n + 2] statframer = results_aggregator.timeframe_results_rcvr() this_frame = in_time_range(t0, t1, statframer) all_frame_targets.append(this_frame) brancher = Branchpoint(all_frame_targets) ev_fl_rcvr = length_for_these_flashes(D, b_s, alt_bins, chi2=chi2, stations=stations, target=brancher.broadcast()) return ev_fl_rcvr, all_frame_targets, results_aggregator
def flash_size_stats_for_intervals(t_start, t_end, dt, lat_bounds=None, lon_bounds=None, outdir=None): """ Create a pipeline to process an arbitrary number of flashes which are accumulated in dt-length intervals between t_start and t_end. t_start, t_end: datetime.datetime instances dt: datetime.timedelta Returns: printer_of_stats: all_frame_targets: a list of targets that receive (events, flashes) from the flash file reader all_frame_targets can be passed as other_analysis_targets to lmatools.flash_stats.plot_spectra_for_files. When all data have been read (i.e., plot_spectra_for_files has returned), the accumulated statistics are retrieved by closing the targets returned by this function, i.e. for target in timeframe_targets: target.close() printer_of_stats.close() plot_spectra_for_files itself could be refactored to make use of these same time windows natively, but that's a project for later. The dependency here on stormdrain is another issue; it may make sense to have lmatools depend on stormdrain. """ t_edges, duration = time_edges(t_start, t_end, dt.total_seconds()) t_ref, t_edges_seconds = seconds_since_start_of_day(t_start, t_edges) n_frames = len(t_edges) - 1 results = StatResults(basedate=t_ref, outdir=outdir) printer_of_stats = results.stats_printer() all_frame_targets = [] for n in range(n_frames): # New BoundsFilter object for filtering flashes and flash_stats.stats_for_parameter that sits behind the bounds filter t0, t1 = t_edges_seconds[n:n + 2] b = Bounds() if lat_bounds is not None: b.ctr_lat = lat_bounds if lon_bounds is not None: b.ctr_lon = lon_bounds b.start = t0, t1 statframer = results.stats_for_frame(t_edges[n], t_edges[n + 1], target=printer_of_stats) momentizer = raw_moments_for_parameter('area', preprocess=np.sqrt, output_target=statframer) bound_filt = BoundsFilter(bounds=b, target=momentizer) ev_fl_rcvr = events_flashes_receiver(target=bound_filt.filter()) all_frame_targets.append(ev_fl_rcvr) return printer_of_stats, all_frame_targets
def __init__(self, t_start, t_end, dt, base_date=None): self.lma = None if base_date is None: self.base_date = datetime(t_start.year, t_start.month, t_start.day) else: self.base_date = base_date t_edges, duration = time_edges(t_start, t_end, dt.total_seconds()) self.t_edges = t_edges self.basedate, t_edges_seconds = seconds_since_start_of_day(t_start, t_edges) self.t_edges_seconds = np.asarray(t_edges_seconds) self.n_frames = len(t_edges)-1 self.t_chopper = ArrayChopper(t_edges_seconds)
def flash_size_stats_for_intervals(t_start, t_end, dt, lat_bounds=None, lon_bounds=None, outdir=None): """ Create a pipeline to process an arbitrary number of flashes which are accumulated in dt-length intervals between t_start and t_end. t_start, t_end: datetime.datetime instances dt: datetime.timedelta Returns: printer_of_stats: all_frame_targets: a list of targets that receive (events, flashes) from the flash file reader all_frame_targets can be passed as other_analysis_targets to lmatools.flash_stats.plot_spectra_for_files. When all data have been read (i.e., plot_spectra_for_files has returned), the accumulated statistics are retrieved by closing the targets returned by this function, i.e. for target in timeframe_targets: target.close() printer_of_stats.close() plot_spectra_for_files itself could be refactored to make use of these same time windows natively, but that's a project for later. The dependency here on stormdrain is another issue; it may make sense to have lmatools depend on stormdrain. """ t_edges, duration = time_edges(t_start, t_end, dt.total_seconds()) t_ref, t_edges_seconds = seconds_since_start_of_day(t_start, t_edges) n_frames = len(t_edges)-1 results = StatResults(basedate=t_ref, outdir=outdir) printer_of_stats = results.stats_printer() all_frame_targets = [] for n in range(n_frames): # New BoundsFilter object for filtering flashes and flash_stats.stats_for_parameter that sits behind the bounds filter t0, t1 = t_edges_seconds[n:n+2] b = Bounds() if lat_bounds is not None: b.ctr_lat = lat_bounds if lon_bounds is not None: b.ctr_lon = lon_bounds b.start = t0, t1 statframer = results.stats_for_frame(t_edges[n], t_edges[n+1], target=printer_of_stats) momentizer = raw_moments_for_parameter('area', preprocess=np.sqrt, output_target=statframer) bound_filt = BoundsFilter(bounds=b, target=momentizer) ev_fl_rcvr = events_flashes_receiver(target=bound_filt.filter()) all_frame_targets.append(ev_fl_rcvr) return printer_of_stats, all_frame_targets
def length_stats_for_intervals(t_start, t_end, dt, D, b_s, chi2=5.0, stations=5): """ """ t_edges, duration = time_edges(t_start, t_end, dt.total_seconds()) t_ref, t_edges_seconds = seconds_since_start_of_day(t_start, t_edges) n_frames = len(t_edges)-1 max_alt, d_alt = 20.0, 0.5 alt_bins = np.arange(0.0,max_alt+d_alt, d_alt) results_aggregator = StatResults(alt_bins, basedate=t_ref) all_frame_targets = [] for n in range(n_frames): t0, t1 = t_edges_seconds[n:n+2] statframer = results_aggregator.timeframe_results_rcvr() this_frame = in_time_range(t0, t1, statframer) all_frame_targets.append(this_frame) brancher = Branchpoint(all_frame_targets) ev_fl_rcvr = length_for_these_flashes(D, b_s, alt_bins, chi2=chi2, stations=stations, target=brancher.broadcast()) return ev_fl_rcvr, all_frame_targets, results_aggregator