def load_clean_set(path): data_set = parse_binary_llh(path) data_set = [convert_to_date(x) for x in data_set] data_set = list(sorted(data_set, key=lambda x: x.time)) baseline = data_set[0].time #make data relative to baseline data_set = [date_relative_days(x, baseline) for x in data_set] #manufacture time series size = len(data_set) data = np.zeros([size, 7]) days = [x.time for x in data_set] ew = np.array([x.pos[0] for x in data_set]) ns = np.array([x.pos[1] for x in data_set]) ud = np.array([x.pos[2] for x in data_set]) ewe = np.array([x.err[0] for x in data_set]) nse = np.array([x.err[1] for x in data_set]) ude = np.array([x.err[2] for x in data_set]) data[:, 0] = days data[:, 1] = ew data[:, 2] = ns data[:, 3] = ud data[:, 4] = ewe data[:, 5] = nse data[:, 6] = ude return outlierdet(data, 50, 20), baseline
def load_set(file_name): stations_names = open(file_name).readlines() stations = dict() for name in stations_names: set = sorted(parse_binary_llh(r'conv/'+name[0:4]+'.tseries.neu'), key=lambda x: x.time) if (get_date(set[-1])-get_date(set[0])).days >= 1000: print(name[0:4]) print(get_date(set[-1])) print(get_date(set[0])) stations[name[0:4]] = set min_date = max([get_date(stations[key][0]) for key in stations]) max_date = min([get_date(stations[key][-1]) for key in stations]) print(min_date) print(max_date) return stations, min_date, max_date
return conv def seismic_act(data): scipy.io.savemat("data.mat", {'data': data}) eng = matlab.engine.start_matlab() eng.quakedet() quakes = scipy.io.loadmat("earthquakes.mat")['earthquakes'] return quakes if __name__ == "__main__": path = "C:\\Users\\Wim Jodehl\\Desktop\\TAaS\\Project2-Code\\conv" os.chdir(path) collection = parse_binary_llh(path + "\\KUAL.tseries.neu") series = sorted(collection, key=lambda x: x.time) locationsx, locationsy, locationsz, times = [], [], [], [] init_time = series[0].time init_year = init_time // 1000 init_days = init_time - init_year * 1000 init_date = datetime.date.fromordinal( datetime.date(init_year, 1, 1).toordinal() + init_days - 1) for i in range(len(series)): ts = series[i].time year = ts // 1000 days = ts - year * 1000 date = datetime.date.fromordinal(
import geoplotlib from graphing import parse_binary_llh from geoplotlib.layers import DelaunayLayer import sys, os from math import degrees from geoplotlib.colors import colorbrewer from geoplotlib.utils import epoch_to_str, BoundingBox, read_csv if __name__ == "__main__": path = sys.argv[1] files = os.listdir(path) stations = [] for file in files: stations.append(parse_binary_llh(path + '/' + file)[0]) print(stations[-1].name) file = open('tmp.csv', 'w') file.write('name,lat,lon\n') for stat in stations: name, lon, lat = (stat.name, degrees(stat.pos[0]), degrees(stat.pos[1])) print(name, lon, lat) file.write(','.join([name, str(lon), str(lat)]) + '\n') file.close() data = read_csv('tmp.csv') print(type(data)) geoplotlib.delaunay(data, cmap="hot_r") geoplotlib.labels(data, 'name', color=[0, 0, 255, 255],
def conv_data(data,n): conv_mat = np.ones(n)/n d = data[:,1] d_ave = np.convolve(d,conv_mat,mode = "same") return "" #path to converted files path = os.path.dirname(os.path.realpath(__file__))+'/conv' os.chdir(path) #get locations and scaled velocity line of all stations from files Locations = [] for file in os.listdir(): if fnmatch.fnmatch(file, '*.neu'): Sname,T,N = file.split('.') Collection = parse_binary_llh(path+'/'+file) series = sorted(Collection, key=lambda x: x.time) #apply outlier detection and paste data into array Lat_locations,Lon_locations,times = [],[],[] for i in range(len(series)): ComT = series[i].time Year = (ComT-(ComT%1000))/1000 Day = ComT%1000 Year_len = 365*24*3600+6*3600 Day_len = 24*3600 Timestamp = int(((Year-1970)*Year_len) + (Day*Day_len)) times.append(Timestamp) Lat_locations.append(deg(series[i].pos[0])) Lon_locations.append(deg(series[i].pos[1]))