def processing(self): _event_id = None _event_id = self.getEvent() if _event_id: if not self.db_scisola.EventExist(_event_id): print _event_id orig = origin.Origin() orig = self.getOriginInfo(_event_id, orig) # if event's info exist if orig.datetime and orig.magnitude and orig.depth: # two threshold requirements in order to run a new event if orig.magnitude >= self.settings.magnitude_threshold and \ inrange(self.settings.center_latitude, self.settings.center_longitude, orig.latitude, orig.longitude, self.settings.distance_range): _p = process.Process( origin=orig, settings=self.settings, station_list=[], db_scisola=self.db_scisola, save2DB=True, timeout=self.settings.process_timeout, delay=self.settings.process_delay, parent=self.parent) _p.start()
def __init__(self, neurons, dimensions, count = 1, max_rate = (200, 300), intercept = (-1.0, 1.0), t_ref = 0.002, t_rc = 0.02, seed = None, type = 'lif', dt = 0.001, encoders = None, name = None, address = "localhost"): self.seed = seed self.neurons = neurons self.dimensions = dimensions self.count = count self.name = name self.address = address self.ticker_conn = None # create the neurons # TODO: handle different neuron types, which may have different parameters to pass in self.neuron = neuron.names[type]((count, self.neurons), t_rc = t_rc, t_ref = t_ref, dt = dt) # compute alpha and bias srng = RandomStreams(seed=seed) max_rates = srng.uniform([neurons], low=max_rate[0], high=max_rate[1]) threshold = srng.uniform([neurons], low=intercept[0], high=intercept[1]) alpha, self.bias = theano.function([], self.neuron.make_alpha_bias(max_rates,threshold))() self.bias = self.bias.astype('float32') # compute encoders self.encoders = make_encoders(neurons, dimensions, srng, encoders=encoders) self.encoders = (self.encoders.T * alpha).T # make default origin self.origin = dict(X=origin.Origin(self)) self.accumulator = {}
def add_origin(self, name, func, eval_points=None): """Create a new origin to perform a given function over the represented signal :param string name: name of origin :param function func: desired transformation to perform over represented signal :param list eval_points: specific set of points to optimize decoders over for this origin """ if eval_points == None: eval_points = self.eval_points self.origin[name] = origin.Origin(self, func, eval_points=eval_points)
def los_angeles(_class, demand=4, parameters=None, path=None): """Generate small map of L.A. with 122 links and 44 modes """ if not path: path = 'data/los_angeles_data_2.mat' data = scipy.io.loadmat(path) nodes = data['nodes'] links = data['links'] if demand in [1, 2, 3, 4]: ODs = data["ODs%d" % demand] else: raise Exception("No such demand") # TODO(syadlowsky): probably should be MultiDiGraph, but for now, this # is good enough. No networks we work with have two links between same # intersections. network = nx.DiGraph() od_demand_dict = {} for index_, (posx, posy) in enumerate(nodes): index = index_ + 1 network.add_node(index, pos=(posx, posy)) for startnode, endnode, route, ffdelay, slope in links: network.add_edge(startnode, endnode, free_flow_delay=ffdelay, delay_slope=slope) for (r, s, flow) in ODs: origin_ = origin.Origin(r, [r]) destination = origin.Origin(s, [s]) if origin_ not in od_demand_dict: od_demand_dict[origin_] = {} od_demand_dict[origin_][destination] = (origin_, destination, flow) return RoadNetwork(network, od_demand.ODDemand(od_demand_dict))
def __init__(self, neurons, dimensions, count = 1, max_rate = (200, 300), intercept = (-1.0, 1.0), t_ref = 0.002, t_rc = 0.02, seed = None, type='lif', dt=0.001, encoders=None, is_subensemble=False, name=None, decoders=None, bias=None): self.name = name self.seed = seed self.neurons = neurons self.dimensions = dimensions self.count = count self.accumulator = {} self.is_subensemble = is_subensemble # create the neurons # TODO: handle different neuron types, which may have different parameters to pass in # The structure of the data contained in self.neuron consists of several variables that are # arrays of the form # Array([ # [x_0_0, x_0_1, x_0_2,..., x_0_(neurons - 1)], # [x_1_0, x_1_1, x_1_2,..., x_1_(neurons - 1)], # [...], # [x_(count-1)_0, x_(count-1)_1, x_(count-1)_2,..., x_$count-1)_(neurons - 1)] # ]) self.neuron = neuron.names[type]((count, self.neurons), t_rc = t_rc, t_ref = t_ref, dt = dt) if is_subensemble: self.bias = bias self.encoders = encoders else: # compute alpha and bias srng = RandomStreams(seed=seed) max_rates = srng.uniform([neurons], low=max_rate[0], high=max_rate[1]) threshold = srng.uniform([neurons], low=intercept[0], high=intercept[1]) alpha, self.bias = theano.function([], self.neuron.make_alpha_bias(max_rates, threshold))() self.bias = self.bias.astype('float32') # compute encoders self.encoders = make_encoders(neurons, dimensions, srng, encoders=encoders) self.encoders = (self.encoders.T * alpha).T # make default origin self.origin = dict(X=origin.Origin(self, decoder=decoders))
_depth_folder = os.path.join(self.dir.inversion, str(depth)) _inpinv_file = os.path.join(_depth_folder, 'inpinv.dat') _allstat_file = os.path.join(_depth_folder, 'allstat.dat') shutil.copyfile(self.dir.inpinv, _inpinv_file) shutil.copyfile(self.dir.allstat, _allstat_file) _isola12c_file = os.path.join(self.settings.isola_path, _isola12c) _norm12c_file = os.path.join(self.settings.isola_path, _norm12c) # calculate inversion isola.calculateInversion(_isola12c_file, _norm12c_file, _depth_folder) if __name__ == "__main__": orig = origin.Origin() # orig.datetime = "2014/06/25 09:21:41.00" # orig.magnitude = 3.9 # orig.longitude = 21.747 # orig.latitude = 38.3568 # orig.depth = 7 orig.datetime = str(sys.argv[1]) + " " + str(sys.argv[2]) orig.magnitude = float(sys.argv[6]) orig.longitude = float(sys.argv[4]) orig.latitude = float(sys.argv[3]) orig.depth = int(float(sys.argv[5])) orig.event_id = "dataset" # orig.datetime = "2014/02/26 01:42:50.00" # orig.magnitude = 4 # orig.longitude = 21.6427
def add_origin(self, name, func): self.origin[name] = origin.Origin(self, func)
# Add Display section displayFrame = mydisplay.Display(root, mainFrame) displayFrame.grid(column=0, row=0, stick='w') # Add horizontal line ttk.Separator(mainFrame, orient=HORIZONTAL).grid(column=0, row=1, sticky='ew') # Add Aircraft section aircraftFrame = aircraft.Aircraft(root, mainFrame) aircraftFrame.grid(column=0, row=2, sticky='w') # Add horizontal line ttk.Separator(mainFrame, orient=HORIZONTAL).grid(column=0, row=3, sticky='ew') # Add Origin Section originFrame = origin.Origin(root, mainFrame) originFrame.grid(column=0, row=4, sticky='w') # Add horizontal line ttk.Separator(mainFrame, orient=HORIZONTAL).grid(column=0, row=5, sticky='ew') # Create Volumne section volumeFrame = volume.Volume(root, mainFrame, originFrame) volumeFrame.grid(column=0, row=6, sticky='w') originFrame.volFrame = volumeFrame # Add horizontal line ttk.Separator(mainFrame, orient=HORIZONTAL).grid(column=0, row=7, sticky='ew') # Create Generate section generateFrame = tools.Generate(root, mainFrame, displayFrame, aircraftFrame,
def add_origin(self, name, func, decoder=None): self.origin[name] = origin.Origin(self, func, decoder=decoder)
def loadOrigin(self, origin_id, orig, station_list): """ Fetches Origin (and MT) from the scisola database and return it in origin object. Returns True/False and origin object/error. """ _query = "SELECT `Origin`.`id`, `Origin`.`timestamp`, " + \ "`Origin`.`datetime`, " + \ "`Origin`.`magnitude`, `Origin`.`latitude`, " + \ "`Origin`.`longitude`, `Origin`.`depth`, " + \ "`Origin`.`automatic`, `Origin`.`results_dir`, " + \ "`Event`.`id`, " + \ "`Moment_Tensor`.`cent_shift`, " + \ "`Moment_Tensor`.`cent_time`, `Moment_Tensor`." + \ "`cent_latitude`, `Moment_Tensor`.`cent_longitude`, " + \ "`Moment_Tensor`.`cent_depth`, `Moment_Tensor`." + \ "`correlation`, `Moment_Tensor`.`var_reduction`, " + \ "`Moment_Tensor`.`mw`, `Moment_Tensor`.`mrr`, " + \ "`Moment_Tensor`.`mtt`, `Moment_Tensor`.`mpp`, " + \ "`Moment_Tensor`.`mrt`, `Moment_Tensor`.`mrp`, " + \ "`Moment_Tensor`.`mtp`, `Moment_Tensor`.`vol`, " + \ "`Moment_Tensor`.`dc`, `Moment_Tensor`.`clvd`, " + \ "`Moment_Tensor`.`mo`, `Moment_Tensor`.`strike`, " + \ "`Moment_Tensor`.`dip`, `Moment_Tensor`.`rake`, " + \ "`Moment_Tensor`.`strike_2`, `Moment_Tensor`.`dip_2`, " + \ "`Moment_Tensor`.`rake_2`, `Moment_Tensor`.`p_azm`, " + \ "`Moment_Tensor`.`p_plunge`, `Moment_Tensor`.`t_azm`, " + \ "`Moment_Tensor`.`t_plunge`, `Moment_Tensor`.`b_azm`, " + \ "`Moment_Tensor`.`b_plunge`, `Moment_Tensor`.`minSV`, " + \ "`Moment_Tensor`.`maxSV`, `Moment_Tensor`.`CN`, " + \ "`Moment_Tensor`.`stVar`, `Moment_Tensor`.`fmVar`, " + \ "`Moment_Tensor`.`frequency_1`, `Moment_Tensor`." + \ "`frequency_2`, `Moment_Tensor`.`frequency_3`, " + \ "`Moment_Tensor`.`frequency_4` FROM `Origin` INNER JOIN " + \ "`Event` ON `Origin`.`id` = `Event`.`Origin_id` INNER JOIN " + \ "`Moment_Tensor` ON " + \ "`Origin`.`id` = `Moment_Tensor`.`Origin_id` " + \ "WHERE `Origin`.`id` = " + str(origin_id) + ";" _row = self.read([_query])[0][0] # converts string to datetime object _orig_tp = date.datetime.strptime(_row[1], "%Y/%m/%d %H:%M:%S.%f") orig = origin.Origin() orig.id = int(_row[0]) orig.timestamp = _orig_tp orig.datetime = _row[2] orig.magnitude = float(_row[3]) orig.latitude = float(_row[4]) orig.longitude = float(_row[5]) orig.depth = float(_row[6]) orig.automatic = bool(_row[7]) orig.results_dir = _row[8] orig.event_id = _row[9] orig.mt = origin.MomentTensor() orig.mt.cent_shift = int(_row[10]) orig.mt.cent_time = float(_row[11]) orig.mt.cent_latitude = float(_row[12]) orig.mt.cent_longitude = float(_row[13]) orig.mt.cent_depth = float(_row[14]) orig.mt.correlation = float(_row[15]) orig.mt.var_reduction = float(_row[16]) orig.mt.mw = float(_row[17]) orig.mt.mrr = float(_row[18]) orig.mt.mtt = float(_row[19]) orig.mt.mpp = float(_row[20]) orig.mt.mrt = float(_row[21]) orig.mt.mrp = float(_row[22]) orig.mt.mtp = float(_row[23]) orig.mt.vol = float(_row[24]) orig.mt.dc = float(_row[25]) orig.mt.clvd = float(_row[26]) orig.mt.mo = float(_row[27]) orig.mt.strike = float(_row[28]) orig.mt.dip = float(_row[29]) orig.mt.rake = float(_row[30]) orig.mt.strike2 = float(_row[31]) orig.mt.dip2 = float(_row[32]) orig.mt.rake2 = float(_row[33]) orig.mt.p_azm = float(_row[34]) orig.mt.p_plunge = float(_row[35]) orig.mt.t_azm = float(_row[36]) orig.mt.t_plunge = float(_row[37]) orig.mt.b_azm = float(_row[38]) orig.mt.b_plunge = float(_row[39]) orig.mt.minSV = float(_row[40]) orig.mt.maxSV = float(_row[41]) orig.mt.CN = float(_row[42]) orig.mt.stVar = float(_row[43]) orig.mt.fmVar = float(_row[44]) orig.mt.frequency_1 = float(_row[45]) orig.mt.frequency_2 = float(_row[46]) orig.mt.frequency_3 = float(_row[47]) orig.mt.frequency_4 = float(_row[48]) _query = "SELECT DISTINCT `streamNetworkCode`, " + \ "`streamStationCode` FROM `Stream_Contribution` " + \ "WHERE `Origin_id` = " + str(orig.id) + ";" _rows = self.read([_query])[0] # get stations for _row in _rows: _station = stream.Station() _station.network = _row[0] _station.code = _row[1] _query = "SELECT `streamCode`, `var_reduction`, " + \ "`mseed_path` FROM `Stream_Contribution` " + \ "WHERE `Origin_id` = " + str(orig.id) + \ " AND streamStationCode = '" + str(_station.code) + \ "' ORDER BY `streamCode`;" _stream_rows = self.read([_query])[0] # get station's streams for _stream_row in _stream_rows: _stream = stream.Stream() _stream.code = _stream_row[0] _stream.reduction = float(_stream_row[1]) _stream.mseed_path = _stream_row[2] _station.stream_list.append(_stream) station_list.append(_station) return orig, station_list