def partition_extension_blocks(blocks,num_blocks=None,modify_pa=True): if num_blocks is None: num_parts=num_blocks else: num_parts=len(blocks)-1 P,Q=pe_blocks(blocks[:-1],len(blocks)-1) freeby=blocks[-1] # New symbol: !!! Note the difference, no longe assuming square blocks! this may not be AGL new_symbol=len(blocks[0][0]) # Extend the Permutation Array: if modify_pa: pa_ext=extend(blocks[:-1],P,Q,new_symbol) # Extend the freeby: pa_ext+=extend([freeby],[[new_symbol]],[xrange(new_symbol)],new_symbol) else: pa_ext=extend(blocks[:-1],P,Q) # Extend the freeby: pa_ext+=extend([freeby],[[new_symbol]],[xrange(new_symbol)]) # Create the p partition and q partition as strings: p_q_str='P:'+str(P)+'\n' p_q_str+='Q:'+str(Q)+'\n' # Convert to string: pa_str=pa2str(pa_ext) return pa_str,p_q_str,len(pa_ext)
def partition_extension(pa_file,sought_hd,max_iter=5,num_blocks=None,ceil=False): # Retrive the blocks and freeby: blocks,freeby,Q,P,new_symbol=prepare(pa_file,sought_hd,num_blocks,ceil=ceil) # print len(pa_file) # print len(blocks) # exit() # Solve with ILP P,coverage=make_ilp(blocks,Q,P) # Something went wrong with ILP if P is None: return None,None,None # Extend the Permutation Array: pa_ext=extend(blocks,P,Q,new_symbol) # Extend the freeby: pa_ext+=extend([freeby],[[new_symbol]],[xrange(new_symbol)],new_symbol) # Create the p partition and q partition as strings: p_q_str='P:'+str(P)+'\n' p_q_str+='Q:'+str(Q)+'\n' # Convert to string: pa_str=pa2str(pa_ext) return pa_str,p_q_str,len(pa_ext)
def tt_check_all(kb, alpha, symbols, model): """Auxiliary routine to implement tt_entails.""" if not symbols: if pl_true(kb, model): result = pl_true(alpha, model) assert result in (True, False) return result else: return True else: P, rest = symbols[0], symbols[1:] return (tt_check_all(kb, alpha, rest, extend(model, P, True)) and tt_check_all(kb, alpha, rest, extend(model, P, False)))
def sum_out(self, var, bn): """Make a factor eliminating var by summing over its values.""" variables = [X for X in self.variables if X != var] cpt = {event_values(e, variables): sum(self.p(extend(e, var, val)) for val in bn.variable_values(var)) for e in all_events(variables, bn, {})} return Factor(variables, cpt)
def enumerate_joint(variables, e, P): """Return the sum of those entries in P consistent with e, provided variables is P's remaining variables (the ones not in e).""" if not variables: return P[e] Y, rest = variables[0], variables[1:] return sum([enumerate_joint(rest, extend(e, Y, y), P) for y in P.values(Y)])
def all_events(variables, bn, e): """Yield every way of extending e with values for all variables.""" if not variables: yield e else: X, rest = variables[0], variables[1:] for e1 in all_events(rest, bn, e): for x in bn.variable_values(X): yield extend(e1, X, x)
def create_path(a, b, contour, distancia, dir=0): conf_1 = -90 conf_2 = -1 conf_3 = "left" conf_4 = 0 if dir == 1: conf_3 = "right" path = [] contorno_points = [] for c in contour: c = utils.to_utm(c) utm_pos = Point(c.x, c.y) contorno_points.append(utm_pos) a2 = utils.to_utm(a) a_utm = Point(a2.x, a2.y) b2 = utils.to_utm(b) b_utm = Point(b2.x, b2.y) ab_course = utils.bearing(a_utm, b_utm) a_utm = utils.offset(a2, ab_course - 90, distancia * 80) b_utm = utils.offset(b2, ab_course - 90, distancia * 80) a2, b2 = utils.extend(a_utm, b_utm) AB = LineString([a2, b2]) contorno = Polygon(contorno_points) eroded = contorno.buffer(-distancia, resolution=16, join_style=1) line = AB.intersection(eroded) for x in range(1, 200): ab_1 = AB.parallel_offset(x * distancia, conf_3) line = ab_1.intersection(eroded) if (line.geom_type == "LineString"): if (len(line.coords) > 1): p1 = 1 p2 = conf_2 if x % 2 == 0: p1 = 0 p2 = -conf_2 centro = utils.offset(utils.toCoord(line.coords[p1]), ab_course + conf_1, distancia / 2) radius = distancia / 2 start_angle, end_angle = 90 - ab_course - 90, 90 - ab_course + 90 # In degrees if x % 2 == 0: start_angle, end_angle = 90 - ab_course + 90, 90 - ab_course - 90 numsegments = 200 theta = np.radians( np.linspace(start_angle, end_angle, numsegments)) x = centro.x + (radius * np.cos(theta)) * p2 y = centro.y + (radius * np.sin(theta)) * p2 arc = LineString(np.column_stack([x, y])) for c in arc.coords: path.append(Point(c)) final = LineString(path) path2 = [] for x in range(0, int(final.length / 2)): p = final.interpolate(x * 2) path2.append(p) return path2
def p(self, value): defaults = { 'type': 'local', 'localroot': './local', 'remoteroot': './remote' } p = utils.extend(defaults, value) p['localroot'] = self.fixPath(p['localroot']) p['remoteroot'] = self.fixPath(p['remoteroot']) self._p = p
def domain_splitting(self, domains=None, to_do=None, arc_heuristic=sat_up): """return a solution to the current CSP or False if there are no solutions to_do is the list of arcs to check """ if domains is None: domains = self.csp.domains consistency, new_domains = self.GAC(domains, to_do, arc_heuristic) if not consistency: return False elif all(len(new_domains[var]) == 1 for var in domains): return {var: first(new_domains[var]) for var in domains} else: var = first(x for x in self.csp.variables if len(new_domains[x]) > 1) if var: dom1, dom2 = partition_domain(new_domains[var]) new_doms1 = extend(new_domains, var, dom1) new_doms2 = extend(new_domains, var, dom2) to_do = self.new_to_do(var, None) return self.domain_splitting(new_doms1, to_do, arc_heuristic) or \ self.domain_splitting(new_doms2, to_do, arc_heuristic)
def enumeration_ask(X, e, bn): """Return the conditional probability distribution of variable X given evidence e, from BayesNet bn. [Figure 14.9] >>> enumeration_ask('Burglary', dict(JohnCalls=T, MaryCalls=T), burglary ... ).show_approx() 'False: 0.716, True: 0.284'""" assert X not in e, "Query variable must be distinct from evidence" Q = ProbDist(X) for xi in bn.variable_values(X): Q[xi] = enumerate_all(bn.variables, extend(e, X, xi), bn) return Q.normalize()
def actions(self, state): var = first(x for x in state if len(state[x]) > 1) neighs = [] if var: dom1, dom2 = partition_domain(state[var]) to_do = self.cons.new_to_do(var, None) for dom in [dom1, dom2]: new_domains = extend(state, var, dom) consistency, cons_doms = self.cons.GAC(new_domains, to_do, self.heuristic) if consistency: neighs.append(cons_doms) return neighs
def markov_blanket_sample(X, e, bn): """Return a sample from P(X | mb) where mb denotes that the variables in the Markov blanket of X take their values from event e (which must assign a value to each). The Markov blanket of X is X's parents, children, and children's parents.""" Xnode = bn.variable_node(X) Q = ProbDist(X) for xi in bn.variable_values(X): ei = extend(e, X, xi) # [Equation 14.12] Q[xi] = Xnode.p(xi, e) * product(Yj.p(ei[Yj.variable], ei) for Yj in Xnode.children) # (assuming a Boolean variable here) return probability(Q.normalize()[True])
def enumerate_all(variables, e, bn): """Return the sum of those entries in P(variables | e{others}) consistent with e, where P is the joint distribution represented by bn, and e{others} means e restricted to bn's other variables (the ones other than variables). Parents must precede children in variables.""" if not variables: return 1.0 Y, rest = variables[0], variables[1:] Ynode = bn.variable_node(Y) if Y in e: return Ynode.p(e[Y], e) * enumerate_all(rest, e, bn) else: return sum(Ynode.p(y, e) * enumerate_all(rest, extend(e, Y, y), bn) for y in bn.variable_values(Y))
def like(self, content, id): if not id: return response = client.make_request( "http://api.twitter.com/1/favorites/create/%s.json" % id, self.user.token, self.user.secret, protected=True, method="POST", ) if response.status_code != 200: self.twitter_error(response) else: self.reply_template("timeline", feed=extend([json.loads(response.content)]))
def enumerate_joint_ask(X, e, P): """Return a probability distribution over the values of the variable X, given the {var:val} observations e, in the JointProbDist P. [Section 12.3] >>> P = JointProbDist(['X', 'Y']) >>> P[0,0] = 0.25; P[0,1] = 0.5; P[1,1] = P[2,1] = 0.125 >>> enumerate_joint_ask('X', dict(Y=1), P).show_approx() '0: 0.667, 1: 0.167, 2: 0.167' """ assert X not in e, "Query variable must be distinct from evidence" Q = ProbDist(X) # probability distribution for X, initially empty Y = [v for v in P.variables if v != X and v not in e] # hidden variables. for xi in P.values(X): Q[xi] = enumerate_joint(Y, extend(e, X, xi), P) return Q.normalize()
def analyse(dataFrame, col, extra=False, chart=False, findanomalies=False): x = dataFrame[col] utils.setIndex(col) df = utils.concat(ct.mean(x), ct.median(x), ct.mode(x)) df = utils.concat(df, ct.rng(x)) df = utils.concat(df, ct.unique(x)) df = utils.concat(df, ct.missing(x)) df.columns = ['Mean', 'Median', 'Mode', 'Range', '%Unique', '%Missing'] before = utils.getColumns(df) if extra == True: if utils.isCategory(x, pct, maxcat) == False: df = utils.concat(df, ct.skew(x)) df = utils.concat(df, ct.var(x)) df = utils.concat(df, ct.std(x)) df = utils.concat(df, ct.min(x)) df = utils.concat(df, ct.max(x)) df = utils.concat(df, ct.iqr(x)) df = utils.concat(df, utils.isCategory(x, pct, maxcat)) cols = [ 'Skewness', 'Variance', 'SD', 'Min', 'Max', 'InterQuartileRng', 'IsCategorical' ] df.columns = utils.extend(before, cols) else: df = utils.concat(df, cf.top_catg(dataFrame, col)) df = utils.concat(df, cf.top_value(dataFrame, col)) df = utils.concat(df, utils.isCategory(x, pct, maxcat)) cols = ['Top Category', 'Top Fequency', 'IsCategorical'] df.columns = utils.extend(before, cols) if chart == True: if (utils.isContinous(x, pct, maxcat)): charts.continous_graph(dataFrame, col, pct, maxcat) else: charts.catg_graph(dataFrame, col) return df
def dpll(clauses, symbols, model, branching_heuristic=no_branching_heuristic): # See if the clauses are true in a partial model unknown_clauses = [] # clauses with an unknown truth value for c in clauses: val = pl_true(c, model) if val is False: return False if val is None: unknown_clauses.append(c) if not unknown_clauses: return model P, value = find_pure_symbol(symbols, unknown_clauses) if P: return dpll(clauses, remove_all(P, symbols), extend(model, P, value), branching_heuristic) P, value = find_unit_clause(clauses, model) if P: return dpll(clauses, remove_all(P, symbols), extend(model, P, value), branching_heuristic) P, value = branching_heuristic(symbols, unknown_clauses) return (dpll(clauses, remove_all(P, symbols), extend(model, P, value), branching_heuristic) or dpll(clauses, remove_all(P, symbols), extend( model, P, not value), branching_heuristic))
def ga_to_df(self, v4_response): """ Take a json response from Google Analytics API and transform it to pandas DF""" try: rows = v4_response['reports'][0]['data']['rows'] header = v4_response['reports'][0]['columnHeader'] index_col = header['dimensions'] _ = index_col.extend([ v.get('name') for v in header['metricHeader']['metricHeaderEntries'] ]) index_col = [re.sub(r'ga:(.*)', r'\1', v) for v in index_col] _dims = [v.get('dimensions') for v in rows] _mets = [v.get('metrics')[0].get('values') for v in rows] _ = [u.extend(v) for u, v in zip(_dims, _mets)] return pd.DataFrame(_dims, columns=index_col) except KeyError: return pd.DataFrame( {'error': pd.Series(['no data for this query'])})
def fetch(self): params = {"count": 100} if self.mapping.last_id: params["since_id"] = self.mapping.last_id response = client.make_request( "http://api.twitter.com/1/statuses/home_timeline.json", self.user.token, self.user.secret, protected=True, additional_params=params, ) if response.status_code != 200: self.twitter_error(response) else: feed = extend(json.loads(response.content)) self.reply_template("timeline", feed=feed) if len(feed) > 0: self.mapping.last_id = feed[0]["id"] self.mapping.last_fetch = datetime.datetime.utcnow() self.mapping.put()
def _balance(self, balanced_num=None): """Balance dataset Increase occlusion objs by (balanced_num + 1) times :param balanced_num: required balanced_num to increase nums of occlusion objs """ count = 0 for index in range(self.data_size): if np.sum(self.occlusions[index]) > 0: count += 1 ratio = float(count) / self.data_size balanced_num = int(float(1) / ratio) if balanced_num is None else balanced_num occlusions_add = [] heatmaps_add = [] faces_add = [] names_add = [] landmarks_add = [] for index in range(len(self.occlusions)): if np.sum(self.occlusions[index]) > 0: for num in range(balanced_num): heatmap = gaussian_noise(self.heat_maps[index], color=self.color) heatmaps_add.append(heatmap) face = gaussian_noise(self.faces[index], color=self.color) faces_add.append(face) occlusions_add.append(self.occlusions[index]) landmarks_add.append(self.aug_landmarks[index]) names_add.append( add_postfix(self.names[index], "_gaussian_{}".format(num))) if self.print_debug and (index + 1) % 500 == 0: logger("data aug phase 2 processed {} images".format(index + 1)) self.faces = extend(self.faces, faces_add) self.occlusions.extend(occlusions_add) self.heat_maps.extend(heatmaps_add) self.aug_landmarks.extend(landmarks_add) self.names.extend(names_add) self.data_size = len(self.occlusions) logger("length of imgs and occlusions is {}".format(self.data_size))
def search(self, content): # Make an authorised search request where possible if self.mapping and self.mapping.username: logging.info("Secure search request") response = client.make_request( "http://search.twitter.com/search.json", self.user.token, self.user.secret, protected=True, additional_params={"q": content, "rpp": 50}, custom_headers={"User-Agent": config.google_api["domain"]}, ) else: response = client.make_request( "http://search.twitter.com/search.json", additional_params={"q": content, "rpp": 50}, custom_headers={"User-Agent": config.google_api["domain"]}, ) if response.status_code != 200: logging.debug(repr(response.headers) + "\n" + response.content) self.reply_template("timeline", feed=extend(json.loads(response.content)["results"]))
def update(self, content, id=None): if not content: return # Upload images where applicable if hasattr(self.message, "attachments"): for name, body in self.message.attachments: mime = mimetypes.guess_type(name)[0] if mime and mime.startswith("image/"): img = images.Image(body.decode()) img.im_feeling_lucky() out = img.execute_transforms(output_encoding=images.JPEG) response = urlfetch.fetch( "http://api.imgur.com/2/upload.json", method="POST", payload=urllib.urlencode( {"key": config.imgur_api["key"], "image": base64.b64encode(out), "caption": content} ), ) if response.status_code != 200: logging.warn(response.content) else: out = json.loads(response.content) content += " " + out["upload"]["links"]["imgur_page"] params = {"status": shrink(content, 140)} if id: params["in_reply_to_status_id"] = id response = client.make_request( "http://api.twitter.com/1/statuses/update.json", self.user.token, self.user.secret, protected=True, method="POST", additional_params=params, ) if response.status_code != 200: self.twitter_error(response) else: self.reply_template("timeline", feed=extend([json.loads(response.content)]))
"cpp": { "lang": "cpp", "ext": "cpp", "rules": { "default": { "encoder": "to_string" }, "int": { "repr": "int", "decoder": "stoi" }, }, "templates": { "Solution": templates["cpp"]["Solution"], "Main": templates["cpp"]["Main"] } }, }, "types": [ "int" ] } # Update all rules with defaults w.r.t. their language for lang in config["languages"]: for rule in config["languages"][lang]["rules"]: config["languages"][lang]["rules"][rule] = \ extend(config["languages"][lang]["rules"][rule], config["languages"][lang]["rules"]["default"])