def analysis(**kwargs): """Performs event analysis/picking over a set of seismic signals. Performs event detection if parameter 'threshold' is not None, otherwise performs event picking. """ # Get file list file_list = kwargs.pop('FILEIN', []) # Get debug level debug = kwargs.get('verbosity', 1) if debug: print_settings(**kwargs) if kwargs.get('no_multiprocessing', False): analysis_chunk_task((file_list, kwargs)) else: processes = kwargs.get('processes', multiprocessing.cpu_count()) p = multiprocessing.Pool(processes=processes) p.map( analysis_chunk_task, itertools.izip( collections.chunkify(file_list, len(file_list) / processes), itertools.repeat(kwargs))) p.close() p.join()
def detect(self, alg, trace_list=None, allow_multiprocessing=True, **kwargs): """ """ trace_list = self.traces if trace_list is None else trace_list[:] n_traces = len(trace_list) if allow_multiprocessing and n_traces > 1: processes = min(mp.cpu_count(), n_traces) p = mp.Pool(processes=processes) processed_traces = p.map(_detect, itertools.izip(itertools.repeat(alg), collections.chunkify(trace_list, n_traces / processes), itertools.repeat(kwargs))) processed_traces = collections.flatten_list(processed_traces) # Update existing traces w. new events and cf from processed events for trace, processed_trace in zip(trace_list, processed_traces): new_events = [event for event in processed_trace.events if event not in trace.events] for event in new_events: trace.add_event_from_copy(event) trace.cf = processed_trace.cf[:] # Cleanup del processed_traces del trace_list p.close() p.join() gc.collect(2) else: _detect((alg, trace_list, kwargs))
def analysis(**kwargs): """Performs event analysis/picking over a set of seismic signals. Performs event detection if parameter 'threshold' is not None, otherwise performs event picking. """ # Get file list file_list = kwargs.pop('FILEIN', []) # Get debug level debug = kwargs.get('verbosity', 1) if debug: print_settings(**kwargs) if kwargs.get('no_multiprocessing', False): analysis_chunk_task((file_list, kwargs)) else: processes = kwargs.get('processes', multiprocessing.cpu_count()) p = multiprocessing.Pool(processes=processes) p.map(analysis_chunk_task, itertools.izip(collections.chunkify(file_list, len(file_list) / processes), itertools.repeat(kwargs))) p.close() p.join()