def plot_mags(result, fname=None, title=None, xlim=None, ylim=None, figsize=None): """Plot Qopen moment magnitudes versus catalogue magnitudes""" fig = plt.figure(figsize=figsize) ax = fig.add_subplot(111) temp = [(r['Mcat'], r['Mw']) for r in result['events'].values() if r.get('Mcat') is not None and r.get('Mw') is not None] if len(temp) == 0: return Mcat, Mw = zip(*temp) ax.plot(Mcat, Mw, 'ok', ms=MS) if xlim is not None: mmin, mmax = xlim else: mmin, mmax = np.min(Mcat), np.max(Mcat) m = np.linspace(mmin, mmax, 100) if len(Mw) > 2: a, b = linear_fit(Mw, Mcat) ax.plot(m, a * m + b, '-m', label='%.2fM %+.2f' % (a, b)) if len(Mw) > 3: _, b2 = linear_fit(Mw, Mcat, m=1) ax.plot(m, m + b2, '--m', label='M %+.2f' % (b2, )) if len(Mw) > 2: ax.legend(loc='lower right') if xlim: ax.set_xlim(xlim) ax.set_xlabel('M from catalog') ax.set_ylabel('Mw from inversion') _savefig(fig, fname=fname, title=title)
def plot_mags(result, xlim=None, ylim=None, plot_only_ids=None, **kwargs): """Plot Qopen moment magnitudes versus catalogue magnitudes""" fig = plt.figure() ax = fig.add_subplot(111) temp = [(r['Mcat'], r['Mw']) for id_, r in result['events'].items() if r.get('Mcat') is not None and r.get('Mw') is not None and ( plot_only_ids is None or id_ in plot_only_ids)] if len(temp) == 0: return Mcat, Mw = zip(*temp) ax.plot(Mcat, Mw, 'ok', ms=MS) if xlim is not None: mmin, mmax = xlim else: mmin, mmax = np.min(Mcat), np.max(Mcat) m = np.linspace(mmin, mmax, 100) if len(Mw) > 3: a, b = linear_fit(Mw, Mcat) ax.plot(m, a * m + b, '-m', label='%.2fM %+.2f' % (a, b)) # if len(Mw) > 3: # _, b2 = linear_fit(Mw, Mcat, m=1) # ax.plot(m, m + b2, '--m', label='M %+.2f' % (b2,)) if len(Mw) > 3: ax.legend(loc='lower right') if xlim: ax.set_xlim(xlim) ax.set_xlabel('M from catalog') ax.set_ylabel('Mw from inversion') _savefig(fig, **kwargs)
def plot_mean(): import seaborn as sns sns.set_style('ticks') d = [[] for _ in range(9)] for i in range(len(FREQ)): lons1, lats1, data1 = collect_data('Qi', i) lons2, lats2, data2 = collect_data('Qsc', i) data3 = Qtot(np.array(data1), np.array(data2)) d[0].append(np.mean(data1)) d[1].append(np.mean(data2)) d[2].append(np.mean(data3)) ind = np.array(lons1) < -103 d[3].append(np.mean(np.array(data1)[ind])) d[4].append(np.mean(np.array(data1)[~ind])) d[5].append(np.mean(np.array(data2)[ind])) d[6].append(np.mean(np.array(data2)[~ind])) d[7].append(np.mean(np.array(data3)[ind])) d[8].append(np.mean(np.array(data3)[~ind])) fs = 100 / 25.4 fig = plt.figure(figsize=(fs, 0.9 * fs)) ax = fig.add_subplot(111) d = [10**np.array(dx) for dx in d] c1, c2 = '#e41a1c #275982'.split() kw = {'lw': 2, 'dash_capstyle': 'round'} ax.loglog(FREQ, d[2], color='#4E4E4E', label='total', **kw) kw['dashes'] = (3, 2) ax.loglog(FREQ, d[0], '--', color='#4E4E4E', label='intrinsic', **kw) ax.loglog(FREQ, d[3], '--', color='#CF0000', alpha=0.5, **kw) ax.loglog(FREQ, d[4], '--', color='#006AFF', alpha=0.5, **kw) kw['dashes'] = (2, 2, 0.1, 2) ax.loglog(FREQ, d[1], '-.', color='#4E4E4E', label='scattering', **kw) ax.loglog(FREQ, d[5], '-.', color='#CF0000', alpha=0.5, **kw) ax.loglog(FREQ, d[6], '-.', color='#006AFF', alpha=0.5, **kw) kw.pop('dashes') ax.loglog(FREQ, d[7], color='#CF0000', label='west', alpha=0.5, **kw) ax.loglog(FREQ, d[8], color='#006AFF', label='east', alpha=0.5, **kw) ax.legend(fontsize='small', labelspacing=0.3, handlelength=1.8, bbox_to_anchor=(1.1, 0.9)) ax.set_xlim(1.5 / 1.2, 17 * 1.2) ax.set_ylim(4e-4, 2e-2) ax.set_xticks([1.5, 3, 6, 12]) ax.set_xticklabels([1.5, 3., 6., 12.]) ax.set_xticks([2.1, 4.2, 8.5, 17.], minor=True) ax.set_xlabel('frequency (Hz)') ax.set_ylabel('attenuation Q$^{-1}$') slope, intercept = linear_fit(np.log10(d[2]), np.log10(FREQ)) l = r'$Q_{\mathrm{tot}}=%.0f\times f^{%.1f}$' % (10**(-intercept), -slope) ax.annotate(l, (1.7, 1.3e-2)) sns.despine() fig.tight_layout() fig.savefig(FIGOUT2 + 'Qmean.pdf', bbox_inches='tight') plt.close(fig)
def collect_results(correct=True, events=False): with open(CONF_FNAME) as f: conf = json.load(f, cls=ConfigJSONDecoder) freq = get_freqs(**conf['freqs']) fbands = list(freq.values()) freq = list(freq.keys()) fa = np.asarray(freq) print('copy results from shove') r = {'freq': freq, 'events': {}, 'config': conf} if GENERATE_TEST_FILE: nobs_station = defaultdict(int) with JsonShove(SHOVE_FNAME, compress=True) as sh: msg = '%d in shove / %d events processed' print(msg % (len(sh), len(glob(EVENT_GLOB)))) for evid in sh: if sh[evid] is None: continue if GENERATE_TEST_FILE: stations = sh[evid]['R'].keys() if min(nobs_station[s] for s in stations) >= MAX_NOBS: continue for s in stations: nobs_station[s] = nobs_station[s] + 1 r['events'][evid] = sh[evid] if GENERATE_TEST_FILE: i = 6 for evid in r['events']: for sta in r['events'][evid]['R']: r['events'][evid]['R'][sta] = \ r['events'][evid]['R'][sta][i:i + 1] r['events'][evid] = { 'W': r['events'][evid]['W'][i:i + 1], 'R': r['events'][evid]['R'] } r['freq'] = r['freq'][i:i + 1] del r['config'] with open(TMP + 'usarray_dataset.json', 'wb') as f: json.dump(r, f) return if IGNORE_STATIONS is not None: print('remove stations %s' % IGNORE_STATIONS) sh = r['events'] for evid in sh.keys(): for station in sh[evid]['R'].keys()[:]: if station in IGNORE_STATIONS: sh[evid]['R'].pop(station) if len(sh[evid]['R']) == 0: sh.pop(evid) Nres = len(r['events']) print("number of events with results: %d" % Nres) rorig = r if correct: rorig = deepcopy(r) print('correct sensitivities') # r = align_site_responses(r, station=BOREHOLE_STATIONS, response=0.25) r = align_site_responses(r) if events: print('collect magnitudes') eventresults2json(r) print('collect results') data = defaultdict(lambda: defaultdict(lambda: [[] for _ in freq])) data['v0'] = defaultdict(list) results = defaultdict(lambda: defaultdict(dict)) sh = r['events'] sh2 = rorig['events'] for evid in sh: for station in sh2[evid]['R']: v0 = sh2[evid]['v0'] data['v0'][station].append(v0) for i in range(len(sh2[evid]['R'][station])): R1 = sh[evid]['R'][station][i] R2 = sh2[evid]['R'][station][i] b = sh[evid]['b'][i] g0 = sh[evid]['g0'][i] if R2 is not None: if R1 is not None: data['R'][station][i].append(R1) Qi = calc_dependent('Qi', b, freq[i], v0) data['Qi'][station][i].append(Qi) Qsc = calc_dependent('Qsc', g0, freq[i], v0) data['Qsc'][station][i].append(Qsc) print('calc mean, err and slope') for what in ('v0', 'R', 'Qsc', 'Qi'): for station in data[what]: d = data[what][station] if what == 'v0': results[what][station] = np.mean(d) continue mean, err, nobs = [], [], [] for i in range(len(freq)): da = np.asarray(d[i], dtype=np.float) m, e1, e2 = gerr(da) e = np.log10(m) - np.log10(e1) m = np.log10(m) mean.append(None if not np.isscalar(m) or np.isnan(m) else m) err.append(None if not np.isscalar(e) or np.isnan(e) else e) nobs.append(np.count_nonzero(np.logical_not(np.isnan(da)))) results[what][station]['mean'] = mean results[what][station]['error'] = err if what == 'Qsc': results['nobs'][station] = nobs mean = np.asarray(mean, dtype=np.float) ind = np.logical_not(np.isnan(mean)) if what == 'R': continue elif np.count_nonzero(ind) < 3: slope, intercept = None, None else: slope, intercept = linear_fit(mean[ind], np.log10(fa[ind])) results[what][station]['slope'] = slope results[what][station]['intercept'] = intercept print('convert to OrderedDict and save json') for key in results: if key not in ('v0', 'nobs'): for sta in results[key]: results[key][sta] = sort_dict(results[key][sta], order=ORDER) results[key] = OrderedDict(sorted(results[key].items())) results['freqs'] = freq results['freq_bands'] = fbands results['stations'] = collect_stations(False) # get coordinates results['title'] = TITLE results['description'] = DESC results['author'] = 'Tom Eulenfeld' # results['copyright'] = COPYR results['license'] = LICENSE results = sort_dict(results, order=ORDER) with open(RESULTSJSON, 'w') as f: f.write(to_json(results, nl_after_str=True))