det_list = lklhds.json_to_detection_list('data/detection_set1.json') # define joint-likelihood calculation parameters width = 10.0 rng_max = 3000.0 # define clustering parameters dist_max = 10.0 clustering_threshold = 5.0 trimming_thresh = 3.0 pl = Pool(cpu_count() - 1) ###################### #### Run analysis #### ###################### labels, dists = hjl.run(det_list, clustering_threshold, dist_max=dist_max, bm_width=width, rng_max=rng_max, trimming_thresh=trimming_thresh, pool=pl,show_result=True) # Summarize clusters clusters, qualities = hjl.summarize_clusters(labels, dists) for n in range(len(clusters)): print("Cluster:", clusters[n], '\t', "Cluster Quality:", 10.0**(-qualities[n])) pl.close() pl.terminate()
def data_processingASSOC(self): ''' ''' print('data processing', short_time(UTCDateTime(self.time_initial)), short_time(UTCDateTime(self.time_end))) det_list = lklhds.db2dets(self.det_tot) EVIDs = [] embed() if len(det_list) > 1: try: #EVIDs,DAQ,CAQ=assoc(det_list, self.lims, float(self.assocthresh), show_result=False,parallel=True,num_cores=self.numcores) #labels, dists = hjl.run(det_list, self.clusterthresh, dist_max=self.distmax, bm_width=self.beamwidth, rng_max=self.rangemax, trimming_thresh=self.trimthresh, pool=self.pl) labels, dists = hjl.run(det_list, self.clusterthresh, dist_max=self.distmax, bm_width=self.beamwidth, rng_max=self.rangemax, pool=self.pl) clusters, qualities = hjl.summarize_clusters(labels, dists) for n in range(len(clusters)): print("Cluster:", clusters[n], '\t', "Cluster Quality:", 10.0**(-qualities[n])) lastEVENTIDQ = self.session.query( func.max(self.ASSOC_results.eventid)).all() lastEVENTID = lastEVENTIDQ[0][0] if lastEVENTID is None: lastEVENTID = int(0) lastEVENTID = lastEVENTID + 1 #embed() for nn in range(len(clusters[n])): det_id = clusters[n][nn] id_res=self.session.query(self.ASSOC_results).filter(self.ASSOC_results.net==self.net)\ .filter(self.ASSOC_results.fdid ==self.det_tot[det_id][6])\ .filter(self.ASSOC_results.passocid ==self.passocid)\ .filter(self.ASSOC_results.timeini==self.time_initial)\ .filter(self.ASSOC_results.timeend==self.time_end)\ .filter(self.ASSOC_results.qdetcluster==10.0**(-qualities[n]))\ .filter(self.ASSOC_results.fdtable==self.det_tot[det_id][8])\ .filter(self.ASSOC_results.sta==self.det_tot[det_id][7]).all() id_resC = self.session.query( self.ASSOC_results).count() + 1 if bool(id_res) == False: res=self.ASSOC_results(associd=id_resC,\ fdid=self.det_tot[det_id][6],\ eventid=int(lastEVENTID),\ passocid=self.passocid,\ net=self.net,\ timeini=self.time_initial,\ timeend=self.time_end,\ qdetcluster=10.0**(-qualities[n]),\ fdtable=self.det_tot[det_id][8],\ sta=self.det_tot[det_id][7]) self.session.add(res) self.session.commit() print('associations written', len(clusters)) except Exception as ex1: print('error running assoc:', ex1) embed() exit() '''
def data_processingASSOC(self,t_start,t_end,src_win,max_prop_tm): ''' ''' print('data processing',short_time(UTCDateTime(self.time_initial)),short_time(UTCDateTime(self.time_end))) det_list = lklhds.db2dets(self.det_tot) min_array_pop=self.minarraypop EVIDs=[] if len(det_list)>1: try: events = [] event_qls = [] window_start=[] window_end=[] duration_dd = int((t_end - t_start).astype('m8[s]').astype(float) / 60.0) #duration_dd = int((t_end - t_start) / 60.0) for dt in range(0, duration_dd, int(src_win)): win_start = t_start + np.timedelta64(dt, 'm') win_end = t_start + np.timedelta64(dt + int(src_win + max_prop_tm), 'm') print('\n' + "Computing associations for:", win_start, " - ", win_end) temp = [(n, det) for n, det in enumerate(det_list) if np.logical_and(win_start <= det.peakF_UTCtime, det.peakF_UTCtime <= win_end)] key = [pair[0] for pair in temp] new_list = [pair[1] for pair in temp] # run analysis if len(new_list)>1: if self.trimthresh=='None': self.trimthresh=None labels, dists = hjl.run(new_list, self.clusterthresh, dist_max=self.distmax, bm_width=self.beamwidth, rng_max=self.rangemax, pool=self.pl,trimming_thresh=self.trimthresh) clusters, qualities = hjl.summarize_clusters(labels, dists,population_min=int(self.mindetpop)) for n in range(len(clusters)): events += [[key[n] for n in clusters[n]]] event_qls += [10.0**(-qualities[n])] window_start.append(UTCDateTime(win_start.astype(datetime)).timestamp) window_end.append(UTCDateTime(win_end.astype(datetime)).timestamp) event_cnt = len(events) for n1 in range(event_cnt): for n2 in range(n1 + 1, event_cnt): if len(events[n1]) > 0 and len(events[n2]) > 0: set1, set2 = set(events[n1]), set(events[n2]) rel_overlap = len(set1.intersection(set2)) / min(len(set1), len(set2)) if rel_overlap > 0.5: events[n1], events[n2] = list(set1.union(set2)), [] event_qls[n1], event_qls[n2] = max(event_qls[n1], event_qls[n2]), -1.0 for n, ev_ids in enumerate(events): if len(ev_ids) > 0: locs = np.array([[det_list[j].latitude, det_list[j].longitude] for j in ev_ids]) #embed() unique_cnt = max(len(np.unique(locs[:, 0])), len(np.unique(locs[:, 1]))) if unique_cnt < int(min_array_pop): events[n] = [] event_qls[n] = -1.0 events = [ei for ei in events if len(ei) > 0] event_qls = [eqi for eqi in event_qls if eqi > 0] print("Identified events and qualities:") for n in range(len(events)): print('\t', events[n], '\t', event_qls[n]) lastEVENTIDQ=self.session.query(func.max(self.ASSOC_results.eventid)).all() lastEVENTID=lastEVENTIDQ[0][0] if lastEVENTID is None: lastEVENTID=int(0) for n in range(len(events)): for nn in range(len(events[n])): det_id = events[n][nn] id_res=self.session.query(self.ASSOC_results).filter(self.ASSOC_results.net==self.net)\ .filter(self.ASSOC_results.fdid ==self.det_tot[det_id][6])\ .filter(self.ASSOC_results.passocid ==self.passocid)\ .filter(self.ASSOC_results.timeini==self.time_initial)\ .filter(self.ASSOC_results.timeend==self.time_end)\ .filter(self.ASSOC_results.qdetcluster==event_qls[n])\ .filter(self.ASSOC_results.fdtable==self.det_tot[det_id][8])\ .filter(self.ASSOC_results.sta==self.det_tot[det_id][7]).all() id_resC=self.session.query(self.ASSOC_results).count()+1 if bool(id_res)==False: res=self.ASSOC_results(associd=id_resC,\ fdid=self.det_tot[det_id][6],\ eventid=int(lastEVENTID+1+n),\ passocid=self.passocid,\ net=self.net,\ timeini=window_start[n],\ timeend=window_end[n],\ qdetcluster=event_qls[n],\ fdtable=self.det_tot[det_id][8],\ sta=self.det_tot[det_id][7]) self.session.add(res) self.session.commit() print('associations written', len(events)) except Exception as ex1: print('error running assoc:',ex1) embed() exit() '''