def main(): # Get Config args = parseConfig("Zach's HW2 ConvNet Model") config = Config(args) config.log(config) train_dataset = NaiveDataset(TRAIN_DATA_PATH, TRAIN_LABELS_PATH, num_examples = NUM_TRAIN) train_idx, val_idx = splitIndices(train_dataset, config, shuffle = True) train_sampler = SubsetRandomSampler(train_idx) val_sampler = SubsetRandomSampler(val_idx) train_loader = DataLoader(train_dataset, batch_size = config.batch_size, num_workers = 4, sampler = train_sampler) val_loader = DataLoader(train_dataset, batch_size = config.batch_size, num_workers = 1, sampler = val_sampler) config.train_loader = train_loader config.val_loader = val_loader #get test data test_dataset = NaiveDataset(TEST_DATA_PATH, TEST_LABELS_PATH) test_loader = DataLoader(test_dataset, batch_size = config.batch_size, shuffle = False, num_workers = 2) # Create Model model = BaselineCNN(config) model.apply(initialize_weights) # Train and Eval Model results = train(model, config) visualize.plot_results(results, config) make_predictions = True if make_predictions: predict(model, config, train_loader, dataset = "train") predict(model, config, val_loader, dataset = "val") predict(model, config, test_loader, dataset = "test")
def main(): BIN = environ['WPHASE_HOME']+'/bin' EXTRACT = BIN + '/extract.csh' PREPARE = BIN + '/prepare_wp.csh' WPINVER = BIN + '/wpinversion' TRACES = BIN + '/traces.py' if len(argv) > 1: CMTFILE = argv[1] elif not exists('i_master'): stderr.write('Error: file i_master not available\n') exit(1) else: CMTFILE = utils.parseConfig('i_master')['CMTFILE'] stdout.write('Using cmtfile = %s\n'%CMTFILE) mkdir_new('SYNTH') #call(EXTRACT, shell=True, stdout=stdout) cmd = PREPARE + ' -a' call(cmd, shell=True, stdout=stdout) cmd = WPINVER + ' -osyndir SYNTH -nops -ocmtf /dev/null' oo = open('/dev/null', 'r') call(cmd, shell=True, stdout=oo) oo.close() pdf = 'wp_pages.pdf' if exists(pdf): RM(pdf) cmd = TRACES + ' --icmtf ' + CMTFILE + ' --osydir SYNTH' call(cmd, shell=True, stdout=stdout) stdout.write('---> ' + pdf + '\n')
def main(): BIN = environ['WPHASE_HOME'] + '/bin' EXTRACT = BIN + '/extract.csh' PREPARE = BIN + '/prepare_wp.csh' WPINVER = BIN + '/wpinversion' TRACES = BIN + '/traces.py' if len(argv) > 1: CMTFILE = argv[1] elif not exists('i_master'): stderr.write('Error: file i_master not available\n') exit(1) else: CMTFILE = utils.parseConfig('i_master')['CMTFILE'] stdout.write('Using cmtfile = %s\n' % CMTFILE) mkdir_new('SYNTH') call(EXTRACT, shell=True, stdout=stdout) cmd = PREPARE + ' -a' call(cmd, shell=True, stdout=stdout) cmd = WPINVER + ' -osyndir SYNTH -nops -ocmtf /dev/null' oo = open('/dev/null', 'r') call(cmd, shell=True, stdout=oo) oo.close() pdf = 'wp_pages.pdf' if exists(pdf): RM(pdf) cmd = TRACES + ' --icmtf ' + CMTFILE + ' --osydir SYNTH' call(cmd, shell=True, stdout=stdout) stdout.write('---> ' + pdf + '\n')
#!/usr/bin/python # -*- coding: utf-8 -*- ## forward message to standard output and email ## simple fillter ## TODO: digest message from messenger import Skype, FacebookMessenger import keyring import utils import trollius from trollius import From import functools import sys from datetime import datetime cfg = utils.parseConfig() if cfg == False: print("Please check your config file, normally it is config.yml") sys.exit(0) #give empty string will disable email function emailAddress = cfg['general']['emailaddress'] #only send email if message has a word in bellow list #leave empty list will disable this filter strFilter = cfg['general']['filterList'] ## get data from keyring to re-used username = keyring.get_password('messagesReceiver', 'skypeUsername') password = keyring.get_password('messagesReceiver', 'skypePassword') prefix = "%s_" % username token = keyring.get_password('messagesReceiver', prefix + 'skypeToken') registrationToken = keyring.get_password('messagesReceiver', prefix + 'skypeRegistrationToken')
def main(argv): # Extract command line options try: opts, args = getopt.gnu_getopt(argv[1:], 'stpSdi:nhz', [ "hdsafe", "onlyts", "onlyxy", "npar", "imas=", "strike=", "dc", "nont", "dip=", "rake=", "mom=", "noref", "xyz", "old", "help" ]) except getopt.GetoptError as err: usage() raise # Parse command line options i_master = IMASTER fastflag = True flagts = True flagxy = True flagxyz = False flagref = True sdrM0 = {} for o, a in opts: if o == '-h' or o == '--help': disphelp() sys.exit(0) if o == '-s' or o == '--hdsafe': fastflag = False if o == '-t' or o == '--onlyts': if not flagts: usage() raise getopt.GetoptError( 'options -t and -p cannot be used simultaneously') flagxy = False flagts = True if o == '-p' or o == '--onlyxy': if not flagxy: usage() raise getopt.GetoptError( 'options -t and -p cannot be used simultaneously') flagts = False fastflag = False flagxy = True if o == '--dc': sdrM0['-dc'] = '' if o == '--nont': sdrM0['-nont'] = '' if o == '--strike': sdrM0['-strike'] = a if o == '--dip': sdrM0['-dip'] = a if o == '--rake': sdrM0['-rake'] = a if o == '--mom': sdrM0['-mom'] = a if o == '-i' or o == '--imas': i_master = a if o == '-n' or o == '--noref': flagref = False if o == '-z' or o == '--xyz': flagxyz = True if o == '--old': WPINV_XY += ' -old' # Read i_master iconfig = utils.parseConfig(i_master) cmtref = iconfig['CMTFILE'] evname = iconfig['EVNAME'].replace(' ', '_').replace(',', '') # Set comments in output ps file Median = '-med ' if 'P2P_SCREENING' in iconfig: p2p_items = iconfig['P2P_SCREENING'].split() if p2p_items[0] != 'YES': Median = ' ' elif len(p2p_items) > 1: for p in p2p_items[1:]: Median += p + ' ' ths = '-th 5.0 3.0 0.9' if 'RMS_SCREENING' in iconfig: ths = '-th ' + iconfig['RMS_SCREENING'] comments = [VERSION, 'GF_PATH: ' + GF_PATH, 'Screening: ' + Median + ths] # Read reference CMTFILE eq = EarthQuake() eq.rcmtfile(cmtref) eq.title = evname.strip().replace(' ', '_').replace(',', '') cmtf = open(cmtref, 'r') L = cmtf.readlines() cmtf.close() if len(L) < 13: print('*** WARNING : no reference solution in %s' % (cmtref)) flagref = False # TS and/or LAT/LON Grid-search if (flagts or flagxy) and not flagxyz: grid_search(eq, cmtref, TS_NIT, TS_DT, TSBOUNDS, XY_NIT, XY_DX, XY_NX, XY_NOPT, fastflag, flagts, flagxy, sdrM0, ts_ofile=TS_OFILE, xy_ofile=XY_OFILE, comments=comments) # TS and LAT/LON/DEP Grid-search if flagxyz: grid_search(eq, cmtref, TS_NIT, TS_DT, TSBOUNDS, XYZ_NIT, XYZ_DX, XYZ_NX, XYZ_NOPT, fastflag, flagts, flagxyz, sdrM0, dz=DDEP, minz=MINDEP, ts_ofile=TS_OFILE, xy_ofile=XYZ_OFILE, comments=comments) if flagxy: eq.wcmtfile('_tmp_CMTSOLUTION.xyz') if flagref: addrefsol(cmtref, '_tmp_CMTSOLUTION.xyz') grid_search(eq, '_tmp_CMTSOLUTION.xyz', TS_NIT, TS_DT, TSBOUNDS, XY_NIT, XY_DX, XY_NX, XY_NOPT, 0, 0, 1, sdrM0, ts_ofile=TS_OFILE, xy_ofile=XY_OFILE, comments=comments) utils.rm('_tmp_CMTSOLUTION.xyz') # Cleaning up if os.path.exists('_tmp_ts_table'): utils.rm('_tmp_ts_table') if os.path.exists('_tmp_xy_table'): utils.rm('_tmp_xy_table')
def main(argv): # Extract command line options try: opts, args = getopt.gnu_getopt(argv[1:],'stpSdi:nhz',["hdsafe","onlyts","onlyxy","npar", "imas=","strike=","dc","nont","dip=", "rake=","mom=","noref","xyz","old", "help"]) except getopt.GetoptError as err: usage() raise # Parse command line options i_master = IMASTER fastflag = True flagts = True flagxy = True flagxyz = False flagref = True sdrM0 = {} for o, a in opts: if o == '-h' or o == '--help': disphelp() sys.exit(0) if o == '-s' or o == '--hdsafe': fastflag = False if o == '-t' or o == '--onlyts': if not flagts: usage() raise getopt.GetoptError('options -t and -p cannot be used simultaneously') flagxy = False flagts = True if o == '-p' or o == '--onlyxy': if not flagxy: usage() raise getopt.GetoptError('options -t and -p cannot be used simultaneously') flagts = False fastflag = False flagxy = True if o == '--dc': sdrM0['-dc']='' if o == '--nont': sdrM0['-nont']='' if o == '--strike': sdrM0['-strike']=a if o == '--dip': sdrM0['-dip']=a if o == '--rake': sdrM0['-rake']=a if o == '--mom': sdrM0['-mom']=a if o == '-i' or o == '--imas': i_master = a if o == '-n' or o == '--noref': flagref = False if o == '-z' or o == '--xyz': flagxyz = True if o == '--old': WPINV_XY += ' -old' # Read i_master iconfig = utils.parseConfig(i_master) cmtref = iconfig['CMTFILE'] evname = iconfig['EVNAME'].replace(' ','_').replace(',','') # Set comments in output ps file Median = '-med ' if 'P2P_SCREENING' in iconfig: if iconfig['P2P_SCREENING'] != 'YES': Median = ' ' ths = '5.0 3.0 0.9' if 'RMS_SCREENING' in iconfig: ths = iconfig['RMS_SCREENING'] comments = [VERSION,'GF_PATH: '+GF_PATH,'Screening: '+Median+ths] # Read reference CMTFILE eq = EarthQuake() eq.rcmtfile(cmtref) eq.title = evname.strip().replace(' ','_').replace(',','') cmtf = open(cmtref,'r') L=cmtf.readlines() cmtf.close() if len(L) < 13: print('*** WARNING : no reference solution in %s'%(cmtref)) flagref = False # TS and/or LAT/LON Grid-search if (flagts or flagxy) and not flagxyz: grid_search(eq,cmtref,TS_NIT,TS_DT,TSBOUNDS,XY_NIT,XY_DX,XY_NX,XY_NOPT,fastflag, flagts,flagxy,sdrM0,ts_ofile=TS_OFILE,xy_ofile=XY_OFILE,comments=comments) # TS and LAT/LON/DEP Grid-search if flagxyz: grid_search(eq,cmtref,TS_NIT,TS_DT,TSBOUNDS,XYZ_NIT,XYZ_DX,XYZ_NX,XYZ_NOPT,fastflag, flagts,flagxyz,sdrM0,dz=DDEP,minz=MINDEP,ts_ofile=TS_OFILE,xy_ofile=XYZ_OFILE, comments=comments) if flagxy: eq.wcmtfile('_tmp_CMTSOLUTION.xyz') if flagref: addrefsol(cmtref,'_tmp_CMTSOLUTION.xyz') grid_search(eq,'_tmp_CMTSOLUTION.xyz',TS_NIT,TS_DT,TSBOUNDS,XY_NIT,XY_DX,XY_NX,XY_NOPT, 0,0,1,sdrM0,ts_ofile=TS_OFILE,xy_ofile=XY_OFILE,comments=comments) utils.rm('_tmp_CMTSOLUTION.xyz') # Cleaning up if os.path.exists('_tmp_ts_table'): utils.rm('_tmp_ts_table') if os.path.exists('_tmp_xy_table'): utils.rm('_tmp_xy_table')
def main(): # Get Config args = parseConfig("U-Net Model ( https://arxiv.org/pdf/1505.04597.pdf )") config = Config(args) config.log(config) # Transformations size = 112 transformations = transforms.Compose([ transforms.Scale(size + 5), transforms.RandomCrop(size), transforms.RandomHorizontalFlip(), transforms.Lambda(lambda x: randomTranspose(np.array(x))), transforms.Lambda(lambda x: np.array(x)[:, :, :3]), transforms.ToTensor(), ]) # Datasets train_dataset = NaiveDataset(TRAIN_DATA_PATH, TRAIN_LABELS_PATH, num_examples=NUM_TRAIN, transforms=transformations) train_idx, val_idx = splitIndices(train_dataset, config, shuffle=True) #weights = UpsamplingWeights(train_dataset) #train_sampler = WeightedRandomSampler(weights = weights[train_idx], replacement = True, num_samples = config.num_train) train_sampler = SubsetRandomSampler(train_idx) val_sampler = SubsetRandomSampler(val_idx) # Loaders train_loader = DataLoader(train_dataset, batch_size=config.batch_size, num_workers=4, sampler=train_sampler) val_loader = DataLoader(train_dataset, batch_size=config.batch_size, num_workers=1, sampler=val_sampler) config.train_loader = train_loader config.val_loader = val_loader #get test data test_dataset = NaiveDataset(TEST_DATA_PATH, TEST_LABELS_PATH) test_loader = DataLoader(test_dataset, batch_size=config.batch_size, shuffle=False, num_workers=2) # Create Model model = UNet() if config.use_gpu: model = model.cuda() model.apply(initialize_weights) # Train and Eval Model results = train(model, config, weight_decay=0.0005) visualize.plot_results(results, config) # Evaluate Results test_dataset = NaiveDataset(TEST_DATA_PATH, TEST_LABELS_PATH, num_examples=20) test_loader = DataLoader(test_dataset, batch_size=10, shuffle=False, num_workers=3) make_predictions = True if make_predictions: predict(model, config, test_loader, dataset="test") predict(model, config, train_loader, dataset="train") predict(model, config, val_loader, dataset="val")
def main(argv): # Input parameters (from Arguments.py) imaster = IMASTER length = LENGTH_GLOBAL syndir = 'SYNTH_traces' o_wpinversion = O_WPINVERSION nc = NC nl = NL solfile = None flagreg = False # Parse options try: opts, args = go.gnu_getopt(argv[1:], 'i:d:rh', ["icmtf=", "osydir=", "regional", "help"]) except go.GetoptError as err: sys.stderr.write('usage: %s [option] (for help see %s -h)\n' % (sys.argv[0], sys.argv[0])) raise for o, a in opts: if o == '-h' or o == '--help': disphelp(sys.argv[0], solfile, syndir) sys.exit(0) if o == '-r' or o == '--regional': length = LENGTH_REGIONAL flagreg = True if o == '-i' or o == '--icmtf': solfile = a if not os.path.exists(solfile): raise IOError('No wcmtfile named %s' % (solfile)) if o == '-d' or o == '--osyndir': syndir = a if not solfile: for f in ['xy_WCMTSOLUTION', 'ts_WCMTSOLUTION', 'WCMTSOLUTION']: if os.path.exists(f): solfile = f break if not solfile: raise IOError( 'No wcmtfile available, can be specified with --icmtf') eq = EarthQuake() eq.rcmtfile(solfile) cmtla, cmtlo = eq.lat, eq.lon # Title conf = utils.parseConfig(imaster) title = '_'.join(conf['EVNAME'].split()) title += ', filter = (%s, %s, %s, %s)' % ( conf['filt_cf1'], conf['filt_cf2'], conf['filt_order'], conf['filt_pass']) # Cleanup run dir if os.path.exists(syndir) and syndir != '.' and syndir != './': utils.rm(syndir) if syndir != '.' and syndir != './': os.mkdir(syndir) if not os.path.exists(LOGDIR): os.mkdir(LOGDIR) for l in os.listdir('.'): if l[:4] == 'page' and l[-4:] == '.pdf': utils.rm(l) # Compute synthetics cmd = SYNTHS + ' ' + imaster + ' ' + solfile + ' ' + o_wpinversion + ' ' + syndir print(cmd) #status = call(cmd, shell=True, stdin=sys.stdin, stdout=sys.stdout); status = os.system(SYNTHS + ' ' + imaster + ' ' + solfile + ' ' + o_wpinversion + ' ' + syndir + ' > ' + os.path.join(LOGDIR, '_tmp_synths')) if status: print('Error while running ' + SYNTHS) sys.exit(1) # Create Sac Objects sacdata = sacpy.sac() sacsynt = sacpy.sac() coords = [] L = open(o_wpinversion).readlines() for l in L: sacf = l.strip().split()[0] sacdata.read(sacf, datflag=0) coords.append([sacdata.stla, sacdata.stlo, sacdata.az, sacdata.dist]) coords = np.array(coords) # Main loop print('Input (W)CMTSOLUTION file is: %s' % (solfile)) print('Output synthetic directory is: %s' % (syndir)) perpage = nl * nc ntot = len(L) npages = np.ceil(float(ntot) / float(perpage)) nchan = 1 count = 1 pages = 1 fig = plt.figure() fig.subplots_adjust(bottom=0.06, top=0.87, left=0.06, right=0.95, wspace=0.25, hspace=0.4) print('All pages will be saved in %s' % (OPDFFILE)) pp = mpl.backends.backend_pdf.PdfPages(OPDFFILE) basem = None for l in L: # Parse line items = l.strip().split() fic1 = items[0] sacdata.read(fic1) chan = sacdata.kcmpnm[0:3] loc = sacdata.khole fic2 = syndir+'/%s.%s.%s.%s.complete_synth.bp.sac'\ %(sacdata.kstnm,sacdata.knetwk,chan,loc) sacsynt.read(fic2) # pages if count > perpage: plt.suptitle(title + ', p %d/%d' % (pages, npages), fontsize=16, y=0.95) print('page %d/%d' % (pages, npages)) #fig.set_rasterized(True) pp.savefig(orientation='landscape') plt.close() pages += 1 count = 1 fig = plt.figure() fig.subplots_adjust(bottom=0.06, top=0.87, left=0.06, right=0.95, wspace=0.25, hspace=0.4) # Time - W phase window t1 = np.arange(sacdata.npts, dtype='double') * sacdata.delta + sacdata.b - sacdata.o t2 = np.arange(sacsynt.npts, dtype='double') * sacsynt.delta + sacsynt.b - sacsynt.o wnb = float(items[5]) wne = float(items[6]) wtb = sacdata.b - sacdata.o + wnb * sacdata.delta wte = sacdata.b - sacdata.o + wne * sacdata.delta # Plot trace ax = plt.subplot(nl, nc, count) plt.plot(t1, sacdata.depvar * 1000., 'k') plt.plot(t2, sacsynt.depvar * 1000., 'r-') plt.plot([wtb, wte], [0., 0.], 'ro') # Axes limits B = wtb - 150.0 if B < 0: B = 0.0 plt.xlim([B, B + length * sacsynt.delta]) if YLIM_AUTO: a = np.absolute(sacsynt.depvar[:length]).max() * 1000. ymin = -1.1 * a ymax = 1.1 * a ylims = [ymin, ymax] else: ylims = YLIMFIXED plt.ylim(ylims) # Annotations plt.rcParams['font.family'] = 'sans-serif' plt.rcParams['font.sans-serif'] = 'Arial' plt.rcParams['mathtext.fontset'] = 'custom' plt.rcParams['mathtext.rm'] = 'sans' plt.rcParams['mathtext.it'] = 'sans:italic' plt.rcParams['mathtext.default'] = 'it' if sacdata.kcmpnm[2] == 'Z': if sys.version_info >= (2, 7): label = r'%s %s %s %s $(\phi,\Delta) = %6.1f\degree, %6.1f\degree$' % ( sacdata.knetwk, sacdata.kstnm, sacdata.kcmpnm, sacdata.khole, sacdata.az, sacdata.gcarc) else: label = r'%s %s %s %s Az=%3.0f, delta=%3.0f' % ( sacdata.knetwk, sacdata.kstnm, sacdata.kcmpnm, sacdata.khole, sacdata.az, sacdata.gcarc) else: if sys.version_info >= (2, 7): label = r'%s %s %s %s $(\phi,\Delta,\alpha) = %6.1f\degree,' label += '%6.1f\degree, %6.1f\degree$' label = label % (sacdata.knetwk, sacdata.kstnm, sacdata.kcmpnm, sacdata.khole, sacdata.az, sacdata.gcarc, sacdata.cmpaz) else: label = r'%s %s %s %s Az=%3.0f, delta=%3.0f' label = label % (sacdata.knetwk, sacdata.kstnm, sacdata.kcmpnm, sacdata.khole, sacdata.az, sacdata.gcarc) plt.title(label, fontsize=10.0, va='center', ha='center') if not (count - 1) % nc: plt.ylabel('mm', fontsize=10) if (count - 1) / nc == nl - 1 or nchan + nc > ntot: plt.xlabel('time, sec', fontsize=10) plt.grid() try: basem = showBasemap(ax, cmtla, cmtlo, sacdata.stla, sacdata.stlo, coords, flagreg, basem) except: showPolarmap(ax, sacdata.az, sacdata.dist, coords) print('Cannot use basemap') count += 1 nchan += 1 print('page %d/%d' % (pages, npages)) #fig.set_rasterized(True) plt.suptitle(title + ', p %d/%d' % (pages, npages), fontsize=16, y=0.95) pp.savefig(orientation='landscape') plt.close() pp.close()
def main(argv): # Input parameters (from Arguments.py) imaster = IMASTER length = LENGTH_GLOBAL syndir = 'SYNTH_traces' o_wpinversion = O_WPINVERSION nc = NC nl = NL solfile = None flagreg = False # Parse options try: opts, args = go.gnu_getopt(argv[1:],'i:d:rh',["icmtf=","osydir=","regional","help"]) except go.GetoptError as err: sys.stderr.write('usage: %s [option] (for help see %s -h)\n'%(sys.argv[0],sys.argv[0])) raise for o, a in opts: if o == '-h' or o == '--help': disphelp(sys.argv[0],solfile,syndir) sys.exit(0) if o == '-r' or o == '--regional': length = LENGTH_REGIONAL flagreg = True if o == '-i' or o=='--icmtf': solfile = a if not os.path.exists(solfile): raise IOError('No wcmtfile named %s'%(solfile)) if o == '-d' or o == '--osyndir': syndir = a if not solfile: for f in ['xy_WCMTSOLUTION','ts_WCMTSOLUTION','WCMTSOLUTION']: if os.path.exists(f): solfile = f break if not solfile: raise IOError('No wcmtfile available, can be specified with --icmtf') eq = EarthQuake() eq.rcmtfile(solfile) cmtla,cmtlo = eq.lat, eq.lon # Title conf = utils.parseConfig(imaster) title = '_'.join(conf['EVNAME'].split()) title += ', filter = (%s, %s, %s, %s)'%(conf['filt_cf1'],conf['filt_cf2'],conf['filt_order'],conf['filt_pass']) # Cleanup run dir if os.path.exists(syndir) and syndir != '.' and syndir != './': utils.rm(syndir) if syndir != '.' and syndir != './': os.mkdir(syndir) if not os.path.exists(LOGDIR): os.mkdir(LOGDIR) for l in os.listdir('.'): if l[:4]=='page' and l[-4:]=='.pdf': utils.rm(l) # Compute synthetics cmd = SYNTHS+' '+imaster+' '+solfile+' '+o_wpinversion+' '+syndir print(cmd) #status = call(cmd, shell=True, stdin=sys.stdin, stdout=sys.stdout); status = os.system(SYNTHS+' '+imaster+' '+solfile+' '+o_wpinversion+' '+syndir+' > '+os.path.join(LOGDIR,'_tmp_synths')) if status: print('Error while running '+SYNTHS) sys.exit(1) # Create Sac Objects sacdata = sacpy.sac() sacsynt = sacpy.sac() coords = [] L = open(o_wpinversion).readlines() for l in L: sacf = l.strip().split()[0] sacdata.read(sacf,datflag=0) coords.append([sacdata.stla,sacdata.stlo,sacdata.az,sacdata.dist]) coords = np.array(coords) # Main loop print('Input (W)CMTSOLUTION file is: %s'%(solfile)) print('Output synthetic directory is: %s'%(syndir)) perpage = nl*nc ntot = len(L) npages = np.ceil(float(ntot)/float(perpage)) nchan = 1 count = 1 pages = 1 fig = plt.figure() fig.subplots_adjust(bottom=0.06,top=0.87,left=0.06,right=0.95,wspace=0.25,hspace=0.35) print('All pages will be saved in %s'%(OPDFFILE)) pp = mpl.backends.backend_pdf.PdfPages(OPDFFILE) basem = None for l in L: # Parse line items = l.strip().split() fic1 = items[0] sacdata.read(fic1) chan = sacdata.kcmpnm[0:3] loc = sacdata.khole fic2 = syndir+'/%s.%s.%s.%s.complete_synth.bp.sac'\ %(sacdata.kstnm,sacdata.knetwk,chan,loc) sacsynt.read(fic2) # pages if count > perpage: plt.suptitle(title+ ', p %d/%d'%(pages,npages), fontsize=16, y=0.95) print('page %d/%d'%(pages,npages)) #fig.set_rasterized(True) pp.savefig(orientation='landscape') plt.close() pages += 1 count = 1 fig = plt.figure() fig.subplots_adjust(bottom=0.06,top=0.87,left=0.06,right=0.95,wspace=0.25,hspace=0.35) # Time - W phase window t1 = np.arange(sacdata.npts,dtype='double')*sacdata.delta + sacdata.b - sacdata.o t2 = np.arange(sacsynt.npts,dtype='double')*sacsynt.delta + sacsynt.b - sacsynt.o wnb = float(items[5]) wne = float(items[6]) wtb = sacdata.b - sacdata.o + wnb * sacdata.delta wte = sacdata.b - sacdata.o + wne * sacdata.delta # Plot trace ax = plt.subplot(nl,nc,count) plt.plot(t1,sacdata.depvar*1000.,'k') plt.plot(t2,sacsynt.depvar*1000.,'r-') plt.plot([wtb,wte],[0.,0.],'ro') # Axes limits B=wtb-150.0 if B<0: B = 0.0 plt.xlim([B,B+length*sacsynt.delta]) if YLIM_AUTO: a = np.absolute(sacsynt.depvar[:length]).max()*1000. ymin = -1.1*a ymax = 1.1*a ylims = [ymin,ymax] else: ylims = YLIMFIXED plt.ylim(ylims) # Annotations if sacdata.kcmpnm[2] == 'Z': label = u'%s %s %s %s (\u03C6,\u0394) = %6.1f\u00B0, %5.1f\u00B0' label = label%(sacdata.knetwk,sacdata.kstnm, sacdata.kcmpnm, sacdata.khole, sacdata.az, sacdata.gcarc) else: label = u'%s %s %s %s (\u03C6,\u0394,\u03B1) = %6.1f\u00B0, %5.1f\u00B0, %6.1f\u00B0' label = label%(sacdata.knetwk,sacdata.kstnm, sacdata.kcmpnm, sacdata.khole, sacdata.az, sacdata.gcarc, sacdata.cmpaz) plt.title(label,fontsize=10.0,va='center',ha='center') if not (count-1)%nc: plt.ylabel('mm',fontsize=10) if (count-1)/nc == nl-1 or nchan+nc > ntot: plt.xlabel('time, sec',fontsize=10) plt.grid() try: basem = showBasemap(ax,cmtla,cmtlo,sacdata.stla,sacdata.stlo,coords,flagreg,basem) except: showPolarmap(ax,sacdata.az,sacdata.dist,coords) print('No basemap module') count += 1 nchan += 1 print('page %d/%d'%(pages,npages)) #fig.set_rasterized(True) plt.suptitle(title + ', p %d/%d'%(pages,npages), fontsize=16, y=0.95) pp.savefig(orientation='landscape') plt.close() pp.close()
from flask import Flask, render_template, request, redirect, url_for from werkzeug.utils import secure_filename import json, os, subprocess from utils import parseConfig, persistFilename # defining constant UPLOAD_FOLDER = "temp" ALLOWED_EXTENSIONS = set(["csv", "txt", "mat"]) HDFS_PATH = "/user/ubuntu/data" ##### bootstraping app #### app = Flask(__name__) app.config["UPLOAD_FOLDER"] = UPLOAD_FOLDER config = parseConfig() filenames = persistFilename() #### define functions #### def allowed_file(filename): # filename is a string return "." in filename and filename.split( ".")[1].lower() in ALLOWED_EXTENSIONS #### handle requests #### @app.route("/") @app.route("/index") def index(): return render_template("main.html") @app.route("/upload", methods=["POST", "GET"]) def upload():
import warnings warnings.simplefilter(action='ignore', category=FutureWarning) warnings.simplefilter(action='ignore', category=UserWarning) import os, sys from crossasr import CrossASR import json import utils if __name__ == "__main__": config = utils.readJson(sys.argv[1]) # read json configuration file tts = utils.getTTS(config["tts"]) asrs = utils.getASRS(config["asrs"]) estimator = utils.getEstimator(config["estimator"]) crossasr = CrossASR(tts=tts, asrs=asrs, estimator=estimator, **utils.parseConfig(config)) corpus_fpath = os.path.join(config["output_dir"], config["corpus_fpath"]) texts = utils.readCorpus(corpus_fpath=corpus_fpath) crossasr.processCorpus(texts=texts) crossasr.printStatistic()