def _create_particles_and_constraints(self) -> None: for name, segment in TURKEY_SEGMENTS.items(): # scale the points up. scaled_points = np.asarray(segment, float) * self.scale self.segments[name] = [] for p in scaled_points: # find the particle if it already exists, otherwise create a new one. particle: Particle = next((part for part in self.particles if np.allclose(part.curr_pos, p)), Particle(position=p, mass=Turkey.mass)) if particle not in self.particles: self.particles.append(particle) # print("adding a new particle at ", particle.curr_pos) # print(len(self.particles)) # append the particle to this segment. self.segments[name].append(particle) if name == "eye": continue # create a Stick constraint for each pair within this segment. for p1, p2 in pairs(self.segments[name]): constraint = StickConstraint(p1, p2, relatice_tolerance=0.1) self.stick_constraints.append(constraint) # add more flexible stick constraints between every other node on this segment: for p1, p2 in pairs(self.segments[name][::2]): constraint = StickConstraint(p1, p2, relatice_tolerance=0.4) self.stick_constraints.append(constraint)
def num_inflection_points(ys): ysp = [y2 - y1 for y1,y2 in pairs(ys)] yspp = [y2 - y1 for y1,y2 in pairs(ysp)] sign_changes = 0 for y1,y2 in pairs(yspp): if y1*y2 < 0: sign_changes += 1 return sign_changes
def hamiltonian(config): """Given a configuration describing the left-endpoints of tfs, compute associated energy""" poses = positions(config) total_site_energy = sum(score_seq(energy_matrix,genome[pos:pos+w]) for pos in poses) total_interaction_energy = interaction_energy * len([(i,j) for (i,j) in pairs(poses) if j - i == w]) total_exclusion_energy = exclusion_energy * len([(i,j) for (i,j) in pairs(poses) if j - i < w]) return total_site_energy + total_interaction_energy + total_exclusion_energy
def estimate_drift_diffusion(xs): """Given a sample path Xt, estimate mu, sigma for: dX = mu + sigma dW""" dxs = [x1-x0 for (x0,x1) in pairs(xs)] mu = mean(dxs) sigma = sd(dxs) return mu,sigma
def encode_exactly_one(self, variables): # at least one self._add_clause(*variables) # at most one for v1, v2 in pairs(variables): self._add_clause(-v1, -v2)
def ordered_fast_1d_haar_transform(signal): """ Calculate the (ordered) 1D Haar transform of a signal. Notes ----- - Copies the signal before operating on it. - Be sure to invert it using the ordered algorithm. - If the signal is not length power of two, it will be zero padded. """ # Set up overhead variables s = zero_pad(signal) num_sweeps = int(log(len(s), 2)) a = s.copy() new_a = s.copy() for _ in range(num_sweeps): calculations = [((first + second) / 2, (first - second) / 2) for first, second in pairs(new_a)] new_a, c = zip(*calculations) new_a = np.array(new_a) c = np.array(c) # New signal is [a, c] a[:len(new_a)] = new_a[:] a[len(new_a):len(new_a) + len(c)] = c[:] return a
def make_ringer_viterbi2(code, L=L): """Make ringer using viterbi algorithm""" def aa_mu(aa): return mean([code[aa, b1, b2] for b1, b2 in nuc_pairs]) def aa_sigma(aa): return sqrt(variance([code[aa, b1, b2] for b1, b2 in nuc_pairs])) etas = [] #etas.append({x3:min(code[aa,x1,x3] for x1 in nucs for aa in aas) for x3 in nucs}) etas.append({ x3: min(code[aa, x1, x3] - aa_mu(aa) + (aa_sigma(aa)**2) / 2.0 for x1 in nucs for aa in aas) for x3 in nucs }) for i in range(1, L): d = { xnp1: min(code[aa, xn, xnp1] + etas[i - 1][xn] for xn in nucs for aa in aas) for xnp1 in nucs } etas.append(d) binding_site = "".join([min(nucs, key=lambda n: eta[n]) for eta in etas]) sites = [binding_site for i in range(n)] bd = [ min(aas, key=lambda aa: code[aa, n1, n2] - aa_mu(aa) + (aa_sigma(aa)**2) / 2.0) for n1, n2 in pairs(binding_site) ] return bd, sites
def valid(xs,sigma): """ Determine whether configuration is valid """ ys = sorted(xs) return (all(xp - x >= 2*sigma for (x,xp) in pairs(ys)) and ys[0] > sigma and ys[-1] < 1 - sigma)
def acceptance_ratio(sample): ars = [0.0] acceptances = 0.0 for n,(x,y) in enumerate(pairs(sample)): if x != y: acceptances += 1 ars.append(acceptances/(n+1)) return ars
def new_from_image(): """Create a new deck from an uploaded image """ log_request(request) data = request.json if not valid_params(['username', 'deck_name', 'description', 'session_id', 'name', 'divs'], data): logging.debug("Missing parameters") return jsonify({'error' : 500}) username = data['username'] deckname = data['deck_name'] desc = data['description'] sId = data['session_id'] filename = data['name'] uId = user.get_uId(username) if not user.verify(username, sId): return jsonify({'error' : 101}) if not filename or not os.path.exists("/var/www/resources/tmp/{0}".format(filename)): return jsonify({'error' : 201}) # create the new deck in the database dId, deck_id = deck.new(deckname, uId, desc) # split the temp image i = Image.open("/var/www/resources/tmp/{0}".format(filename)) imgs = splitImage(i, data['divs']) for row in imgs: # pairwise collect the cards img_pairs = pairs(row) for p in img_pairs: cId = card.new(dId, "", "") # String IO for file in memory atmp = StringIO() p[0].convert("RGB").save(atmp, format="JPEG") atmp.seek(0) a_id = resource.new(atmp, id_generator() + ".jpg", cId)[1] sideA = '<img src="[FLASHYRESOURCE:{0}]" />'.format(a_id) if p[1] != None: btmp = StringIO() p[1].convert("RGB").save(btmp, format="JPEG") btmp.seek(0) b_id = resource.new(btmp, id_generator() + ".jpg", cId)[1] sideB = '<img src="[FLASHYRESOURCE:{0}]" />'.format(b_id) else: sideB = '[FLASHYRESOURCE:00000000]' card.modify(cId, sideA, sideB) os.unlink("/var/www/resources/tmp/{0}".format(filename)) # let the filesystem delete the temp file d = deck.get_deck(dId); return jsonify({'deck': d, 'error': 0})
def __draw_data(self, painter): painter.setPen(self.__look.data_pen) points_adjuster = utils.QtPointsAdjuster(self.__drawing_area_rect, self.__model.bounding_rect) qpoints = (QPoint(*points_adjuster.adjust(point)) for point in self.__model.points) for qpoint1, qpoint2 in utils.pairs(qpoints): painter.drawLine(qpoint1, qpoint2)
def estimate_a01(hidden_states): n = d = 0 for hs1,hs2 in pairs(hidden_states): if hs1 == 0: d += 1 if hs2 == 1: n += 1 a01 = n/float(d) if d > 0 else 0 return a01
def chip_ref(G,config,mean_frag_length): """Given a genome length G, configuration (vector giving locations of left endpoints of TFs), and mean fragment length, return a collection of fragments (in form [left endpoint, right endpoint)) for one cell.""" lamb = 1.0/mean_frag_length splits = make_splits(G,lamb) all_endpoints = pairs(splits) bound_fragments = [(lep,rep) for (lep,rep) in all_endpoints if any(lep <= pos < rep for pos in config)] return bound_fragments
def _get_order(self, vars): num_nodes = self.encoder.num_nodes position = {node: 0 for node in range(num_nodes)} for u, v in pairs(range(num_nodes)): var = vars[u][v] if var in self.model: position[v] += 1 else: position[u] += 1 ordering = posdict_to_ordering(position) return [self.encoder.node_reverse_lookup[i] for i in ordering]
def encode(self): # allow at most one arc (redundant, not strictly necessary, speeds up encoding) for i,j in pairs(range(self.num_nodes)): _i, _j = self.node_reverse_lookup[i], self.node_reverse_lookup[j] self._add_comment(f"at most one arc {_i}->{_j} or {_j}->{_i}") self._add_clause(-self.arc[i][j], -self.arc[j][i]) self.encode_bn() # setup graph variables for treewidth computation self.encode_tw() super().encode()
def get_triangulated(self): graph = nx.DiGraph() elim_order = self.get_elim_order() # add nodes graph.add_nodes_from(elim_order) # add edges for _u, _v in pairs(elim_order): u = self.encoder.node_lookup[_u] v = self.encoder.node_lookup[_v] # only consider arc if it obeys elim order if self.encoder.arc[u][v] in self.model: graph.add_edge(_u, _v) return graph
def __init__(self): super().__init__() for p1, p2 in pairs(mountain_points): bottom_left = (p1[0], GROUND_Y) bottom_right = (p2[0], GROUND_Y) points_list = [ bottom_left, p1, p2, bottom_right, ] # each "segment" of the mountain is a convex polygon. self.append( arcade.create_polygon(points_list, arcade.color.BROWN_NOSE))
def encode_tw(self): data = self.data for _v in data: v = self.node_lookup[_v] for p, score in data[_v].items(): for _u in p: u = self.node_lookup.get(_u) if u is None: continue # external vertex, ignore # clause: if par and ord then arc self._add_comment(f"par({_v}, {set(p)}) and ord*({_u},{_v}) => arc({_u},{_v})") self._add_clause(-self.par[v][p], -self._ord(u, v), self.arc[u][v]) self._add_comment(f"par({_v}, {set(p)}) and ord*({_v},{_u}) => arc({_v},{_u})") self._add_clause(-self.par[v][p], -self._ord(v, u), self.arc[v][u]) self._add_comment(f"begin moralization of parent {set(p)} of {v}") for _u, _w in pairs(p): u, w = self.node_lookup.get(_u), self.node_lookup.get(_w) if u is None or w is None: continue # external vertices, ignore # clause: moralization (arc between common parents) self._add_comment(f"par({_v}, {set(p)} and ord*({_u},{_w}) => arc({_u},{_w})") self._add_clause(-self.par[v][p], -self._ord(u, w), self.arc[u][w]) self._add_comment(f"par({_v}, {set(p)} and ord*({_w},{_u}) => arc({_w},{_u})") self._add_clause(-self.par[v][p], -self._ord(w, u), self.arc[w][u]) self._add_comment(f"end moralization of parent {set(p)} of {v}") # slim only for _, bag in self.forced_cliques.items(): if len(bag) <= 1: continue # nothing to encode self._add_comment(f"begin [slim] forced clique {set(bag)}") for _u, _v in pairs(bag): u, v = self.node_lookup[_u], self.node_lookup[_v] # slim-clause: ord* => arc for nodes in same boundary bag self._add_comment(f"\t {_u} before {_v} implies {_u}->{_v}") self._add_clause(-self._ord(u, v), self.arc[u][v]) self._add_comment(f"\t {_v} before {_u} implies {_v}->{_u}") self._add_clause(-self._ord(v, u), self.arc[v][u]) self._add_comment(f"end [slim] forced clique {set(bag)}")
def handle_acyclicity(bn: BayesianNetwork, seen: set, leaf_nodes: set, debug=False): dag = bn.dag subdag = nx.subgraph_view(dag, lambda x: True, lambda x, y: not ((x in seen) and (y in seen))) forced_arcs = [] for src, dest in pairs(leaf_nodes): if nx.has_path(subdag, src, dest): forced_arcs.append((src, dest)) if debug: print(f"added forced {src}->{dest}") else: # only check if prev path not found if nx.has_path(subdag, dest, src): forced_arcs.append((dest, src)) if debug: print(f"added forced {dest}->{src}") return forced_arcs
def explore_dft(): """illustrate an example of the detailed fluctuation theorem""" K = 3 fs = np.random.random(K) mus = random_stochastic_matrix(K) A = np.diag(fs).dot(mus) M = infinitesimal_generator_from_rate_matrix(A) v = largest_eigenvector(A) #path = [0] + [random.randrange(K) for i in xrange(100000)] N = 10000 acc = 0 for trial in xrange(N): path = random_walk(A) sigma = sum(log(M[j, i] / M[i, j]) for i, j in pairs(path)) i_final = -log(v[path[-1]]) acc += exp(sigma - i_final) print trial, -sigma, i_final, acc / float(trial + 1) return acc / float(N)
def make_ringer_viterbi(code, L=L): """Make ringer using viterbi algorithm""" etas = [] etas.append({ x3: min(code[aa, x1, x3] for x1 in nucs for aa in aas) for x3 in nucs }) for i in range(1, L): d = { xnp1: min(code[aa, xn, xnp1] + etas[i - 1][xn] for xn in nucs for aa in aas) for xnp1 in nucs } etas.append(d) binding_site = "".join([min(nucs, key=lambda n: eta[n]) for eta in etas]) sites = [binding_site for i in range(n)] bd = [ min(aas, key=lambda aa: code[aa, n1, n2]) for n1, n2 in pairs(binding_site) ] return bd, sites
def parse_containers(containers): # Parse an AirAsia price container and returns a list of flights for container in containers: rows = container.findAll( 'tr', {'class': ['fare-light-row', 'fare-dark-row']}) list_of_flights = [] for row in rows: rowOfFare = row.findAll( 'tr', {'class': ['fare-light-row', 'fare-dark-row']}) trip = {} if len(rowOfFare) > 0: flights = row.findAll('td', {'class': 'avail-table-detail'}) for depart, arrive in pairs(flights): d, a = trim(depart.getText()), trim(arrive.getText()) flight = {'origin': f'{d}', 'destination': f'{a}'} trip.setdefault('flights', []).append(flight) price = row.findAll( 'div', {'class': 'avail-fare-price'})[0].getText() trip['price'] = trim(price) list_of_flights.append(trip) yield list_of_flights
def bd_variance(code, bd): mean_fs = [mean(code[aa, x, y] for (x, y) in nuc_pairs) for aa in bd] mean_sq_fs = [mean(code[aa, x, y]**2 for (x, y) in nuc_pairs) for aa in bd] mean_di_fs = [ 2 * mean(code[aa1, x, y] * code[aa2, y, z] for (x, y, z) in nuc_trips) for (aa1, aa2) in pairs(bd) ] higher_terms = [ f1 * f2 for i, f1 in enumerate(mean_fs) for j, f2 in enumerate(mean_fs) if abs(i - j) > 1 ] # print len(mean_sq_fs + mean_di_fs + higher_terms),len(bd)**2 # print len(mean_sq_fs), len(mean_di_fs), len(higher_terms) # print [(i,j) for i,aa1 in enumerate(bd) # for j,aa2 in enumerate(bd) if abs(j-i) == 1] # print [(i,j) for i,f1 in enumerate(mean_fs) for j,f2 in enumerate(mean_fs) if abs(i-j) > 1] print len(mean_sq_fs), 2 * len(mean_di_fs), len(higher_terms) eps_sq = sum(mean_sq_fs) + sum(mean_di_fs) + sum(higher_terms) print "eps_sq:", bd_eps_sq_ref(code, bd), eps_sq bd_mean_sq = bd_mean(code, bd)**2 print "bd_mean_sq:", bd_mean_sq return eps_sq - bd_mean_sq
def mean_field_hs(Vs, K): """ Pj(xj) = 1/Z0 *exp(-beta*hj(xj)), where hj(xj) = \sum_{<j,jp>} \sum_{xjp \in jp} V(xj,xjp)*Pjp(xjp) We assume a Potts model of m variables x0...xj...xm-1 where each variable can take on K states 0...i...K-1. Mean field functions h are represented as a matrix hss where each row gives the values hj(i). [Note that i,j are reversed from the usual row-column convention.] Input is a matrix Vs of pairwise contributions to the hamiltonian where Vs[j][jp] is a function V(xj,xjp) """ M = len(Vs) jpairs = pairs(range(M)) hs = [[1 for i in range(K)] for j in range(M)] def Pj(xj, j): # print xj,j return exp(-beta * hs[j][xj]) / sum(exp(-beta * hs[j][xjp]) for xjp in range(K)) old_hs = matcopy(hs) while True: for j in range(M): for i in range(K): hs[j][i] = sum(sum(Vs[j][jp](i, ip) * Pj(ip, jp) for ip in range(K)) for jp in range(j + 1, M)) + sum( sum(Vs[jp][j](ip, i) * Pj(ip, jp) for ip in range(K)) for jp in range(0, j - 1) ) print l2(concat(hs), concat(old_hs)) if old_hs == hs: break else: old_hs = matcopy(hs) print hs return hs
def main_example(): sequence="""CGAAAAAACGCGAAAAAACGCGAAAAAACGCGAAAAAACGCGAAAAAACGCG AAAAAACGCGAAAAAACGCGAAAAAACGCGAAAAAACGCGAAAAAACGCGAAAAAACGCGAAAAAA CGCGAAAAAACGCGAAAAAACG""".replace("\n","") lookup = functionify_model(aawedge) rise = 3.38 #Angstroms, from Gohlke b0 = transpose([[0,0,0]]) v0 = transpose([[0,0,rise]]) bs = [b0] vs = [v0] for pair in pairs(sequence): dinuc = "".join(pair) params = lookup(dinuc) ro, ti, tw = params["roll"],params["tilt"],params["twist"] last_b = bs[-1] last_v = vs[-1] new_v = reduce(matrix_mult,[roll_matrix(ro), tilt_matrix(ti), twist_matrix(tw), last_v]) new_b = matrix_add(last_b,new_v) bs.append(new_b) vs.append(new_v) return bs
def retrieve_cycles_info(GI): cycles = [tuple(c) for c in nx.simple_cycles(GI) ] # Convert cycle to tuple to be able to use them as key cycles_info = [] # Cycles are found as sequence of nodes, all possible edge combination # must be found for each cycle. The sign of each cycle do not depend on # the edges however. for cycle in cycles: paths = [[]] sign = 1 for p in pairs(cycle): for k, path in enumerate(paths): # Replace each path/sign by a list of possible path/sign paths[k] = [path + [R] for R in GI.edges[p]["reactions"]] sign *= GI.edges[p]["sign"] # Flatten the lists paths = list(chain.from_iterable(paths)) cycles_info.append(dict(cycle=cycle, paths=paths, sign=sign)) return cycles_info
def addgeometry(self): #initialization s = QSettings() if self.useactivelayer: vectorlayer = self.iface.activeLayer() else: oldValidation = s.value("/Projections/defaultBehaviour", "useProject") s.setValue("/Projections/defaultBehaviour", "useProject") vectorlayer=QgsVectorLayer("LineString", "tmp_plot", "memory") s.setValue("/Projections/defaultBehaviour", oldValidation) # if magnetic heading chosen, assure we have a declination angle if (self.pluginGui.radioButton_magNorth.isChecked()) and (str(self.pluginGui.lineEdit_magNorth.text()) == ''): #magnetic headings self.say("No magnetic declination value entered.") return 0 #Get starting point coordinates X0 = float(str(self.pluginGui.lineEdit_vertexX0.text())) Y0 = float(str(self.pluginGui.lineEdit_vertexY0.text())) Z0 = float(str(self.pluginGui.lineEdit_vertexZ0.text())) #check if the starting point is specified if (X0 == 0 and Y0 == 0 and Z0 == 90): self.say("You must supply a starting point.") return 0 # Check if there are any segments if (self.pluginGui.table_segmentList.rowCount() < 1): self.say("You must enter at least one segment.") return 0 if (self.pluginGui.radioButton_radialSurvey.isChecked()): surveytype='radial' elif (self.pluginGui.radioButton_boundarySurvey.isChecked()): surveytype = 'polygonal' arcpoint_count = self.pluginGui.spin_arclines.value() def get_points(surveytype): """ Return a list of calculated points for the full run. :param surveytype: :return: """ vlist = [] vlist.append(utils.Point(X0, Y0, Z0)) # convert segment list to set of vertice for i in range(self.pluginGui.table_segmentList.rowCount()): az = str(self.pluginGui.table_segmentList.item(i, 0).text()) dis = float(str(self.pluginGui.table_segmentList.item(i, 1).text())) zen = str(self.pluginGui.table_segmentList.item(i, 2).text()) direction = str(self.pluginGui.table_segmentList.item(i, 4).text()) direction = utils.Direction.resolve(direction) try: radius = float(self.pluginGui.table_segmentList.item(i, 3).text()) except ValueError: radius = None if (self.pluginGui.radioButton_englishUnits.isChecked()): # adjust for input in feet, not meters dis = float(dis) / 3.281 #checking degree input if (self.pluginGui.radioButton_azimuthAngle.isChecked()): az = float(self.dmsToDd(az)) zen = float(self.dmsToDd(zen)) elif (self.pluginGui.radioButton_bearingAngle.isChecked()): az = float(self.bearingToDd(az)) zen = float(self.bearingToDd(zen)) #correct for magnetic compass headings if necessary if (self.pluginGui.radioButton_defaultNorth.isChecked()): self.magDev = 0.0 elif (self.pluginGui.radioButton_magNorth.isChecked()): self.magDev = float(self.dmsToDd(str(self.pluginGui.lineEdit_magNorth.text()))) az = float(az) + float(self.magDev) #correct for angles outside of 0.0-360.0 while (az > 360.0): az = az - 360.0 while (az < 0.0): az = az + 360.0 # checking survey type if surveytype == 'radial': reference_point = vlist[0] # reference first vertex if surveytype == 'polygonal': reference_point = vlist[-1] #reference previous vertex nextpoint = utils.nextvertex(reference_point, dis, az, zen) log(nextpoint) log(reference_point) if radius: # If there is a radius then we are drawing a arc. # Calculate the arc points. points = list(utils.arc_points(reference_point, nextpoint, dis, radius, point_count=arcpoint_count, direction=direction)) if direction == utils.Direction.ANTICLOCKWISE: points = reversed(points) # Append them to the final points list. vlist.extend(points) vlist.append(nextpoint) return vlist #reprojecting to projects SRS points = get_points(surveytype) vlist = self.reproject(points, vectorlayer) as_segments = self.pluginGui.checkBox_asSegments.isChecked() def createpoints(points): for point in points: geom = QgsGeometry.fromPoint(point) feature = QgsFeature() feature.setGeometry(geom) yield feature def createline(points): """ Creata a line feature from a list of points :param points: List of QgsPoints """ geom = QgsGeometry.fromPolyline(points) feature = QgsFeature() feature.setGeometry(geom) return feature def createpolygon(polygon): """ Create a polygon from a list of points :param points: List of QgsPoints """ geom = QgsGeometry.fromPolygon(polygon) QgsMessageLog.logMessage(str(geom.isGeosValid())) feature = QgsFeature() feature.setGeometry(geom) return feature featurelist=[] geometrytype= vectorlayer.geometryType() if geometrytype == QGis.Point: points = utils.to_qgspoints(vlist) features = createpoints(points) featurelist.extend(features) elif geometrytype == QGis.Line: pointlist = utils.to_qgspoints(vlist, repeatfirst=surveytype == 'radial') if as_segments: # If the line is to be draw as segments then we loop the pairs and create a line for each one. for pair in utils.pairs(pointlist, matchtail=surveytype == 'polygonal'): feature = createline(pair) featurelist.append(feature) else: feature = createline(pointlist) featurelist.append(feature) elif geometrytype == QGis.Polygon: polygon = utils.to_qgspoints(vlist) feature = createpolygon([polygon]) featurelist.append(feature) #commit provider=vectorlayer.dataProvider() provider.addFeatures(featurelist) if not self.useactivelayer: QgsMapLayerRegistry.instance().addMapLayer(vectorlayer) self.iface.mapCanvas().refresh()
def test_pairs_returns_non_matching_tail_head(self): points = [1, 2, 3, 4] pairs = utils.pairs(points, matchtail=False) one = pairs.next() two = pairs.next() self.assertNotEqual(one[1], two[0])
def hamil(xs, J): return dot(xs, hs) + J * (sum([xi * xj for (xi, xj) in pairs(xs)]))
columns=columns, data=frames_data) for keyframe_idx in df_keys.index: df_frames.ix[keyframe_idx] = df_keys.ix[keyframe_idx] return df_frames interpolated_basemanualcurve = interpolate_curve(basemanualcurve_keys) interpolated_manualcurve = interpolate_curve(manualcurve_keys) def element_anchors(curve_element, anchors): curve_element.clear() for x, y in anchors: anchor_element = etree.Element('AnchorXY') anchor_element.text = "{0} {1}".format(x, y) curve_element.append(anchor_element) for frame_idx, filename in _frames.iteritems(): with open(filename.replace('cr2', 'ufraw'), 'w') as file_: exposure = interpolated_exposure.ix[frame_idx] basemanualcurve_anchors = pairs(interpolated_basemanualcurve.ix[frame_idx]) manualcurve_anchors = pairs(interpolated_manualcurve.ix[frame_idx]) basemanualcurve_element = tree.find('BaseManualCurve') element_anchors(basemanualcurve_element, basemanualcurve_anchors) manualcurve_element = tree.find('ManualCurve') element_anchors(manualcurve_element, manualcurve_anchors) tree.find('Exposure').text = str(exposure) tree.find('InputFilename').text = os.path.abspath(filename) tree.find('OutputFilename').text = os.path.abspath(filename).replace('cr2', 'jpg') out_string = etree.tostring(tree, pretty_print=True) file_.write(out_string)
def brownian_test(xs): """given a time series, test for gaussian increments by Shapiro-Wilks test. """ dxs = [x1-x0 for (x0,x1) in pairs(xs)] return scipy.stats.shapiro(dxs)
def random_integer_partition(N, A=4): cuts = [0] + sorted([random.randrange(N + 1) for i in range(A - 1)]) + [N] ns = [y - x for (x, y) in pairs(cuts)] if not sum(ns) == N: raise Exception(ns) return ns
def get_moralized(self): moral = self.dag.to_undirected() for node in self.parents.keys(): for p1, p2 in pairs(self.parents[node]): moral.add_edge(p1, p2) return moral
def frags_from_splits_ref(config,splits): all_endpoints = pairs(splits) bound_fragments = [(lep,rep) for (lep,rep) in all_endpoints if any(lep <= pos < rep for pos in config)] return bound_fragments
def make_frags(lamb): return pairs(breaks(lamb))
def highlight_hooping(self, cycles, reactions): for cycle, path in zip(cycles, reactions): for e, R in zip(pairs(cycle), path): mutable = self.edges[e][R] mutable.add_arrow(highlight=True)
def fragments_from_breaks(breaks,G): endpoints = [0] + [i for (i,b) in enumerate(breaks) if b] + [G] return pairs(endpoints)
def distances(bs): return map(uncurry(distance),pairs(bs))
def ordered(ys): return all(y1 <= y2 for y1,y2 in pairs(ys))
motif = random_motif(L, n) pwm = sample_matrix(L, linear_sigma) pairwise_weights = [[[random.gauss(0, pairwise_sigma) for i in range(4)] for j in range(4)] for k in range(L - 1)] return motif, copies, (pwm, pairwise_weights) def btoi(b): return "ACGT".index(b) def energy_score((pwm, pairwise_weights), seq): linear_score = score_seq(pwm, seq) pairwise_score = sum(weight[btoi(b1)][btoi(b2)] for weight, (b1, b2) in zip(pairwise_weights, pairs(seq))) return linear_score + pairwise_score def compute_Zb(G, (linear_weights, pairwise_weights)): pure_pairwise_weights = [[ [pw[i][j] + lwi[i] + lwj[j] for j in range(4)] for i in range(4) ] for pw, (lwi, lwj) in zip(pairwise_weights, pairs(linear_weights))] Ws = [ np.matrix([[exp(w[btoi(b1)][btoi(b2)]) for b2 in "ACGT"] for b1 in "ACGT"]) for w in pure_pairwise_weights ] return np.array([1, 1, 1, 1]).dot(reduce(lambda x, y: x.dot(y), Ws)).dot(np.array([1, 1, 1, 1]))[0, 0]
def satisfy_constraints(self) -> None: for i in range(ParticleSystem.NUM_ITERATIONS): for ball in filter(lambda ball: ball.can_move, self.balls): # if the ball hits the ground, it is dead. if (ball.curr_pos[1] - Ball.radius) <= GROUND_Y: print("Ball collided with the ground!") ball.can_move = False continue if ball.might_collide_with_wall(): p1 = np.asarray((WALL_X, GROUND_Y)) p2 = np.asarray((WALL_X, SCREEN_HEIGHT - 1)) collision, wall_p, penetration_distance = circle_line_intersection( ball.curr_pos, Ball.radius, p1, p2) if collision: print("Ball Collided with the wall!") print(collision, wall_p, penetration_distance) velocity = ball.curr_pos - ball.prev_pos restitution_coefficient: float = 0.95 # Only the x-component of the velocity is changed (since the wall is vertical) velocity[0] *= -1 * restitution_coefficient # get the ball out of the wall by sliding it to the right. ball.curr_pos[0] += penetration_distance # set the ball's velocity. ball.prev_pos = ball.curr_pos - velocity # can't possibly collide with anything else at the same time. continue elif ball.might_collide_with_mountain(): # check if the ball collidies with a line of the mountain. for p0, p1 in pairs(self.mountain_points): result = circle_line_intersection( ball.curr_pos, Ball.radius, p0, p1) collision, mountain_p, penetration_dist = result if not collision: continue print("Ball Collided with the mountain!") print(mountain_p, penetration_dist) restitution_coefficient: float = 0.50 # the mountain segment is the tangential vector to the collision. tangential = p1 - p0 tangential /= np.linalg.norm(tangential) # the normal is perpendicular to the mountain segment. # TODO: we want the upward-pointing normal, does this matter ? normal = np.asarray((-tangential[1], tangential[0]), float) # move the ball out of the mountain. ball.curr_pos += normal * penetration_dist velocity = ball.curr_pos - ball.prev_pos v_tangent = np.dot(velocity, tangential) v_normal = np.dot(velocity, normal) # inelastic collision: the normal component is reversed. v_normal *= -1 * restitution_coefficient new_velocity = v_tangent * tangential + v_normal * normal # set the ball's velocity ball.prev_pos = ball.curr_pos - new_velocity break min_constraint = (0, GROUND_Y) max_constraint = (SCREEN_WIDTH, SCREEN_HEIGHT) ball.curr_pos = np.max([ball.curr_pos, min_constraint], axis=0) ball.curr_pos = np.min([ball.curr_pos, max_constraint], axis=0) for turkey in self.turkeys: min_constraint = (0.0, GROUND_Y) max_constraint = (MOUNTAIN_START_X, SCREEN_HEIGHT) for particle in turkey.particles: particle.curr_pos = np.max( [particle.curr_pos, min_constraint], axis=0) particle.curr_pos = np.min( [particle.curr_pos, max_constraint], axis=0) for constraint in turkey.stick_constraints: constraint.apply()
def test_pairs_returns_non_matching_tail_head(self): points = [1,2,3,4] pairs = utils.pairs(points, matchtail=False) one = pairs.next() two = pairs.next() self.assertNotEqual(one[1], two[0])
def addgeometry(self): #initialization s = QSettings() if self.useactivelayer: vectorlayer = self.iface.activeLayer() else: oldValidation = s.value("/Projections/defaultBehaviour", "useProject") s.setValue("/Projections/defaultBehaviour", "useProject") vectorlayer = QgsVectorLayer("LineString", "tmp_plot", "memory") s.setValue("/Projections/defaultBehaviour", oldValidation) # if magnetic heading chosen, assure we have a declination angle if (self.pluginGui.radioButton_magNorth.isChecked()) and ( str(self.pluginGui.lineEdit_magNorth.text()) == ''): #magnetic headings self.say("No magnetic declination value entered.") return 0 #Get starting point coordinates X0 = float(str(self.pluginGui.lineEdit_vertexX0.text())) Y0 = float(str(self.pluginGui.lineEdit_vertexY0.text())) Z0 = float(str(self.pluginGui.lineEdit_vertexZ0.text())) #check if the starting point is specified if (X0 == 0 and Y0 == 0 and Z0 == 90): self.say("You must supply a starting point.") return 0 # Check if there are any segments if (self.pluginGui.table_segmentList.rowCount() < 1): self.say("You must enter at least one segment.") return 0 if (self.pluginGui.radioButton_radialSurvey.isChecked()): surveytype = 'radial' elif (self.pluginGui.radioButton_boundarySurvey.isChecked()): surveytype = 'polygonal' arcpoint_count = self.pluginGui.spin_arclines.value() def get_points(surveytype): """ Return a list of calculated points for the full run. :param surveytype: :return: """ vlist = [] vlist.append(utils.Point(X0, Y0, Z0)) # convert segment list to set of vertice for i in range(self.pluginGui.table_segmentList.rowCount()): az = str(self.pluginGui.table_segmentList.item(i, 0).text()) dis = float( str(self.pluginGui.table_segmentList.item(i, 1).text())) zen = str(self.pluginGui.table_segmentList.item(i, 2).text()) direction = str( self.pluginGui.table_segmentList.item(i, 4).text()) direction = utils.Direction.resolve(direction) try: radius = float( self.pluginGui.table_segmentList.item(i, 3).text()) except ValueError: radius = None if (self.pluginGui.radioButton_englishUnits.isChecked()): # adjust for input in feet, not meters dis = float(dis) / 3.281 #checking degree input if (self.pluginGui.radioButton_azimuthAngle.isChecked()): az = float(self.dmsToDd(az)) zen = float(self.dmsToDd(zen)) elif (self.pluginGui.radioButton_bearingAngle.isChecked()): az = float(self.bearingToDd(az)) zen = float(self.bearingToDd(zen)) #correct for magnetic compass headings if necessary if (self.pluginGui.radioButton_defaultNorth.isChecked()): self.magDev = 0.0 elif (self.pluginGui.radioButton_magNorth.isChecked()): self.magDev = float( self.dmsToDd( str(self.pluginGui.lineEdit_magNorth.text()))) az = float(az) + float(self.magDev) #correct for angles outside of 0.0-360.0 while (az > 360.0): az = az - 360.0 while (az < 0.0): az = az + 360.0 # checking survey type if surveytype == 'radial': reference_point = vlist[0] # reference first vertex if surveytype == 'polygonal': reference_point = vlist[-1] #reference previous vertex nextpoint = utils.nextvertex(reference_point, dis, az, zen) log(nextpoint) log(reference_point) if radius: # If there is a radius then we are drawing a arc. # Calculate the arc points. points = list( utils.arc_points(reference_point, nextpoint, dis, radius, point_count=arcpoint_count, direction=direction)) if direction == utils.Direction.ANTICLOCKWISE: points = reversed(points) # Append them to the final points list. vlist.extend(points) vlist.append(nextpoint) return vlist #reprojecting to projects SRS points = get_points(surveytype) vlist = self.reproject(points, vectorlayer) as_segments = self.pluginGui.checkBox_asSegments.isChecked() def createpoints(points): for point in points: geom = QgsGeometry.fromPoint(point) feature = QgsFeature() feature.setGeometry(geom) yield feature def createline(points): """ Creata a line feature from a list of points :param points: List of QgsPoints """ geom = QgsGeometry.fromPolyline(points) feature = QgsFeature() feature.setGeometry(geom) return feature def createpolygon(polygon): """ Create a polygon from a list of points :param points: List of QgsPoints """ geom = QgsGeometry.fromPolygon(polygon) QgsMessageLog.logMessage(str(geom.isGeosValid())) feature = QgsFeature() feature.setGeometry(geom) return feature featurelist = [] geometrytype = vectorlayer.geometryType() if geometrytype == QGis.Point: points = utils.to_qgspoints(vlist) features = createpoints(points) featurelist.extend(features) elif geometrytype == QGis.Line: pointlist = utils.to_qgspoints(vlist, repeatfirst=surveytype == 'radial') if as_segments: # If the line is to be draw as segments then we loop the pairs and create a line for each one. for pair in utils.pairs(pointlist, matchtail=surveytype == 'polygonal'): feature = createline(pair) featurelist.append(feature) else: feature = createline(pointlist) featurelist.append(feature) elif geometrytype == QGis.Polygon: polygon = utils.to_qgspoints(vlist) feature = createpolygon([polygon]) featurelist.append(feature) #commit provider = vectorlayer.dataProvider() provider.addFeatures(featurelist) if not self.useactivelayer: QgsMapLayerRegistry.instance().addMapLayer(vectorlayer) self.iface.mapCanvas().refresh()
def main(args): import model_utils as mutils # Set the parameters from the specified file BEFORE any model.* import import model mutils.set_model_ps(args.psfile) import numpy as np import analysis import plotting from utils import print_dict, pairs from scipy.signal import resample from model.glomerule import Glomerule from model.mitral_cells import MitralCells from model.synapse import Synapse from model.granule_cells import GranuleCells # Reset old stuff from Brian memory clear(erase=True, all=True) defaultclock.reinit() # Initialize random generator (necessary mainly for parallel simulations) np.random.seed() """ Parameters ---------- Get the parameter values from the `ps` module, which in turn gets the values from the file specified in parameters.py. Set some aliases for the different cell population sizes. Also check that there is an even number of cells for each column. Finally set some simulation parameters. """ psmt = model.PARAMETERS['Mitral'] psgr = model.PARAMETERS['Granule'] pscommon = model.PARAMETERS['Common'] n_mitral = pscommon['N_mitral'] n_glomeruli = n_granule = n_subpop = pscommon['N_subpop'] # check to have an even number of mitral in each sub-population assert n_mitral % n_subpop == 0, \ "N_mitral is not a multiple of the number of sub-populations N_subpop." n_mitral_per_subpop = n_mitral/n_subpop defaultclock.dt = pscommon['simu_dt'] simu_length = pscommon['simu_length'] """ Population Initialization ------------------------- 1. glomeruli *. synapses between granule and mitral cells 3. mitral cells 4. granule cells """ # Glomeruli glom = Glomerule() glom.add_eqs() glom.make_pop(n_glomeruli*n_mitral_per_subpop) # Synapses (granule -- mitral) synexc = Synapse(synapse_type='exc') # excitatory synapse synexc.set_eqs_model() syninhib = Synapse(synapse_type='inhib') # inhibitory synapse syninhib.set_eqs_model() # Mitral cells mt = MitralCells() mt_supp_eqs = {'var': ['- I_syn', '- g_input*V'], 'eqs': [synexc.get_eqs_model(), Equations("g_input : siemens*meter**-2")]} mt.add_eqs(supp_eqs=mt_supp_eqs) mt.make_pop(n_mitral) mt.pop.V = (psmt['V_t'] - psmt['V_r'])*np.random.random_sample(np.shape(mt.pop.V)) \ + psmt['V_r'] # Granule Cells gr = GranuleCells() gr_supp_eqs = {'var': ['-I_syn'], 'eqs': [syninhib.get_eqs_model()]} gr.add_eqs(supp_eqs=gr_supp_eqs) gr.make_pop(n_granule) gr.pop.V_D = psgr['E_L'] gr.pop.V_S = psgr['E_L'] """ Connecting Populations ---------------------- 1. Glomeruli and mitral cells 2. Mitral cells and granule cells """ # Connecting mitral cells to glomeruli glmt_connections = diag(ones(n_mitral)) # Glomeruli--Mitral interactions @network_operation(when='start') def mt_input(): mt.pop.g_input = dot(glom.pop.g, glmt_connections) # Connecting sub-population of mitral cells to granule cells mtgr_connections = mutils.intrapop_connections(n_mitral, n_granule, n_subpop, n_mitral_per_subpop) # Inter subpopulation connectivities inter_conn_rate = pscommon['inter_conn_rate'] inter_conn_strength = pscommon['inter_conn_strength'] homeostasy = pscommon['homeostasy'] mtgr_connections, grmt_connections = mutils.interpop_connections(mtgr_connections, n_mitral, n_subpop, n_mitral_per_subpop, inter_conn_rate, inter_conn_strength,homeostasy) # Mitral--Granule interactions @network_operation(when='start') def graded_synapse(): """Computes granule and mitral s_syn""" mt.pop.state('T')[:] = 0. mt.pop.state('T')[mt.pop.get_refractory_indices()] = 1. gr.pop.s_syn = dot(mt.pop.s, mtgr_connections) mt.pop.s_syn = dot(gr.pop.s, grmt_connections) @network_operation(when='start') def sum_s(): """Computes granule self s_syn (for its glomerular column only)""" for subpop in xrange(n_subpop): start = subpop*n_mitral_per_subpop stop = start + n_mitral_per_subpop gr.pop.s_syn_self[subpop] = sum(mt.pop.state('s')[start:stop]) @network_operation(when='after_groups') def keep_reset(): mt.pop.state('V')[mt.pop.get_refractory_indices()] = psmt['V_r'] """ Simulation Monitoring --------------------- Monitor state variables for the different populations. """ glom_ps = ('g') mt_ps = ('s', 's_syn', 'V') gr_ps = ('V_D', 's_syn', 's', 's_syn_self') # Simulation monitors rec_neurons = True # Must be set to True if we want accurate MPS and STS timestep = int(pscommon['resample_dt']/pscommon['simu_dt']) monit_glom = mutils.monit(glom.pop, glom_ps, timestep, reclist=rec_neurons) monit_mt = mutils.monit(mt.pop, mt_ps, timestep, reclist=rec_neurons, spikes=True) monit_gr = mutils.monit(gr.pop, gr_ps, timestep) """ Running Simulation ------------------ Create Network object and put everything simulation related in it. Then run this network. """ # Gathering simulation objects netw = Network(glom.pop, mt.pop, gr.pop, mt_input, graded_synapse, keep_reset, sum_s, [m for m in monit_glom.values()], [m for m in monit_mt.values()], [m for m in monit_gr.values()]) # Simulation run if args.no_brian_output: report_output = None else: report_output = "text" netw.run(simu_length, report=report_output) """ Information Output ------------------ """ if args.full_ps: print 'Full set of parameters:' print_dict(model.PARAMETERS) burnin = pscommon['burnin'] times = monit_gr['s'].times sig_start = where(times > burnin)[0][0] sts_indexes = {} mps_indexes = {} fftmax = {} mps_indexes['whole'] = analysis.mps(monit_mt['V'], 0, n_mitral, sig_start) gr_s_syn_self_whole = np.zeros(monit_gr['s_syn_self'][0].shape) # MPS and STS computation for subpopulation for subpop in xrange(n_subpop): start = subpop*n_mitral_per_subpop stop = start + n_mitral_per_subpop sts = analysis.sts(monit_gr['s_syn_self'][subpop], monit_mt['spikes'], start, stop, sig_start, burnin) sts_indexes[subpop] = sts gr_s_syn_self_whole += monit_gr['s_syn_self'][subpop] mps = analysis.mps(monit_mt['V'], start, stop, sig_start) mps_indexes[subpop] = mps # STS for the whole population sts_indexes['whole'] = analysis.sts(gr_s_syn_self_whole, monit_mt['spikes'], 0, n_mitral, sig_start, burnin) # FFT Max index fftmax = analysis.fftmax(monit_gr['s_syn_self'], n_subpop, pscommon['resample_dt'], sig_start) # Peak distances index peak_distances = {} if n_subpop > 1: for sub_i, sub_j in pairs(n_subpop): sig1 = monit_gr['s_syn_self'][sub_i] sig2 = monit_gr['s_syn_self'][sub_j] if not peak_distances.has_key(sub_i): peak_distances[sub_i] = {} pd_index = analysis.peak_dist_circ_index(sig1, sig2) peak_distances[sub_i][sub_j] = {} peak_distances[sub_i][sub_j]['mean'] = pd_index[0] peak_distances[sub_i][sub_j]['disp'] = pd_index[1] if not args.no_summary: print '\nParameters: using', args.psfile print 'Populations:', n_subpop, 'glomerular columns;', print n_mitral, 'mitral cells;', n_granule, 'granule cells.' print 'Times:', simu_length, 'of simulation; dt =', defaultclock.dt, '.' print 'Indexes: STS =', sts_indexes, '\nMPS =', mps_indexes print 'FFT peaks (Hz):', fftmax print 'Peak distances index:', peak_distances """ Plotting -------- Plot monitored variables and a scatter plot. """ if not args.no_plot: # Raster plot spikes_it = monit_mt['spikes'].it plotting.raster_plot(spikes_it[0], spikes_it[1], mtgr_connections) # Membrane potentials if not rec_neurons: # if we only have a couple of recorded neurons plotting.memb_plot_figure(monit_mt, monit_gr, rec_neurons, n_granule) # Granule synapses plotting.granule_figure(monit_gr, pscommon) show() """ Simulation records ------------------ Put numpy arrays in var `results` to save them into the simulation record. Note: the variable must be monitored by Brian. """ # Add parameters ps_arrays = {'mtgr_connections': (mtgr_connections, "Connection matrix from mitral (rows) to granules (columns)")} # Add results array_spikes_it = np.array((monit_mt['spikes'].it[0], monit_mt['spikes'].it[1])) results = {} # Mean inputs mean_inputs = np.ndarray((n_glomeruli, monit_glom['g'].values.shape[1])) for glom in xrange(n_glomeruli): start_subpop = glom*n_mitral_per_subpop stop_subpop = start_subpop + n_mitral_per_subpop mean_inputs[glom] = np.mean(monit_glom['g'].values[start_subpop:stop_subpop], axis=0) # Mean membrane potentials mean_memb_pot = np.ndarray((n_glomeruli*2, monit_mt['V'].values.shape[1])) bin_interco_matrix = (mtgr_connections > 0.) interco_neurons = (bin_interco_matrix.sum(axis=1) > 1) for glom in xrange(n_glomeruli): start_subpop = glom*n_mitral_per_subpop stop_subpop = start_subpop + n_mitral_per_subpop # Get subpopulation membrane potentials and interconnected neurons subpop_memb_pot = monit_mt['V'].values[start_subpop:stop_subpop] subpop_interco_neurons = interco_neurons[start_subpop:stop_subpop] # Compute one mean for interconnected neurons and another for the other neurons mean_pop = np.mean(subpop_memb_pot[~subpop_interco_neurons], axis=0) mean_pop_interco = np.mean(subpop_memb_pot[subpop_interco_neurons], axis=0) mean_memb_pot[glom*2] = mean_pop mean_memb_pot[glom*2 + 1] = mean_pop_interco results['data'] = {'spikes_it': [array_spikes_it, "Spikes: one array for the neuron number, another one for the spike times."], 'input': [mean_inputs, "Mean network input conductance value for each glomerule."], 's_granule': [monit_gr['s'].values, "Variable 's' of the granules."], 's_syn_self': [monit_gr['s_syn_self'].values, "Variable 's_syn' for the granule, without integrating the mitral 's' from other subpopulations."], 'mean_memb_pot': [mean_memb_pot, "Mean membrane potential. For each subpop: one mean for the interconnected neurons and one mean for the non-interconnected neurons."]} results['indexes'] = {'MPS': mps_indexes, 'STS': sts_indexes, 'FFTMAX': fftmax, 'peak_distances': peak_distances} return {'set': model.PARAMETERS, 'arrays': ps_arrays}, results
def score_site(code, bd, site): return sum(code[aa, n1, n2] for aa, (n1, n2) in zip(bd, pairs(site)))