def export(self): l = Link(self.url,self.source_url) l = l.parse_url(self.url) return { "url": self.url, "domain": l.domain, "subdomain": l.subdomain, "extension": l.extension, "filetype": l.filetype, "date": [dt.now()], "source": self.source_url, "title": self.title, "cited_links": self.links, "cited_links_ids": self.domain_ids, "cited_domains": self.domains, "html": self.html, "text": self.text, "depth": self.depth, #"keywords": self.keywords, #"description": self.description, "meta": self.meta, "crawl_nb": 0, "status": [True], "msg": ["Ok"] #"lang": self.metalang, }
class TestLinkMethods(unittest.TestCase): def setUp(self): self.c = Link() pass def tearDown(self): pass def test_connectMysql(self): db = self.c.connectMysql(db = 'maimai') db.query('show tables') rows = db.fetchAllRows() self.assertEqual(type(rows), tuple) def test_connectRedis(self): redis = self.c.connectRedis() keys = redis.keys() self.assertEqual(type(keys), list) def test_connectSphinx(self): sphinx = self.c.connectSphinx() res = sphinx.query('a') self.assertIn('status', res) self.assertIn('matches', res)
def nav_dd_icon(icon_class="fa fa-envelope-o", label_class="label label-success", label_text=""): link = Link(href="#", css_classes="dropdown-toggle", data_toggle="dropdown") icon = Icon(icon_class) span = Span(label_class, label_text) icon.append_widget(span) link.append_widget(icon) return link
def post_link(): ''' Performed when a link is POSTed. Inserts the (sanitized) data into the database. The description is parsed as Markdown and saved in HTML, while allowing a few attributes ------------------ NOTE: `request` is global and is implicitly passed as a parameter. It would probably be better to pass the data as a param for testing purposes. ------------------ NOTE: Maybe save the raw data as Markdown and only render it once needed ? Makes editing posts easier, but puts more load on the server. ''' allowed_tags = ("p", "h1", "h2", "h3", "h4", "h5", "h6", "b", "em", "small", "code", "i", "pre", "strong", "table", 'thead', 'tbody', 'th', 'tr', 'td', 'ul', 'ol', 'li', 'input') allowed_attrs = ('type', 'disabled', 'checked') title = bleach.clean(request.form['title']) desc = bleach.clean(request.form['desc'], tags=allowed_tags, attributes=allowed_attrs) url = bleach.clean(request.form["url"]) timestamp = time.time() tags = [] post = Link(title, url, desc, timestamp) post.set_tags(bleach.clean(request.form['tags'])) post.write() return redirect("/")
def fetch_domains_id(self): self.domain_ids = [] for n in self.links: url = Link(n, self.url) if url.is_valid(): self.domain_ids.append(url.netloc) return self.domain_ids
def fetch_domains(self): self.domains = [] for n in self.links: url = Link(n, self.url) if url.is_valid(): self.domains.append(url.domain) return self.domains
def delete_link(id): form = forms.DeleteLinkForm() link = Link.from_id(id) if link is None: return redirect('/404') if request.method == "GET": return render_template(Config.theme + "delete.jinja", app=Config, link=link, form=form) else: Link.delete_from_id(id) return redirect("/")
def add_link(self, node1, node2, bandwidth): link = Link(node1.get_id(), node2.get_id(), bandwidth, bandwidth) end_point = link.get_end_points() if link in self.links: raise Exception("Link already exist...") else: self.links[end_point] = link node1.add_link(link) node2.add_link(link) self.links[end_point] = link
def is_valid(self): #logging.info("Valid url?") url = Link(self.url, self.source_url, self.debug) if url.is_valid(): #logging.info("Yes") return True else: self.msg = url.msg self.code = url.code self.step = "Validating page" self.status = False return False
def process(self,filexml,mgrEvent): xmldoc = minidom.parse(filexml) itemlist2 = xmldoc.getElementsByTagName('manager') for ast in itemlist2 : self.asterisk_server = ast.getElementsByTagName('server')[0].firstChild.nodeValue self.asterisk_login = ast.getElementsByTagName('login')[0].firstChild.nodeValue self.asterisk_secret = ast.getElementsByTagName('secret')[0].firstChild.nodeValue print('server: ' + self.asterisk_server + ' login: '******' secret: ' + self.asterisk_secret) itemlist = xmldoc.getElementsByTagName('link') print len(itemlist) for s in itemlist : link = Link(mgrEvent) link.linkid = s.attributes['id'].value link.csp = s.getElementsByTagName('csp')[0].firstChild.nodeValue link.channels = s.getElementsByTagName('channels')[0].firstChild.nodeValue link.freeChannels = link.channels aux = s.getElementsByTagName('routes')[0].firstChild.nodeValue link.routes = aux.split(',') print(s.attributes['id'].value) print(s.getElementsByTagName('csp')[0].firstChild.nodeValue) print(s.getElementsByTagName('routes')[0].firstChild.nodeValue) self.links[link.linkid] = link link.startLink()
def service(self): """Listen on iport, accept incomming connections. For a new connection, the first 4 bytes indicate neighbor id.""" s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind( ('', self.iport) ) s.listen(1) while True: conn, addr = s.accept() neighbour = struct.unpack('!I', get_data(4, conn)[0])[0] link_property = self.topology[(neighbour, self.vrid)] tlink = Link(link_property, self.iqueue) self.l2p[neighbour] = {'addr':addr, 'link':tlink} tlink.setup(conn) pass
def _createLink(self): pos, rot = self._getInitialValues(LINK) name = '%s.link' % self.name self.link = Link(self.world, name=name, pos=pos, rot=rot, cfg=self.cfg['link'], module=self, data=self.data) self.world.add(self.link) logger.info("using link pos: %s" % pos)
def merge_paths(self, used_paths): # TODO: Need to rethink if apps have min. bandwidth requirement for paths in used_paths: path = paths[1] for i in range(len(path) - 1): l = self.links[Link.get_id(path[i], path[i + 1])] l.set_bandwidth(l.get_bandwidth() - self.bandwidth / 10)
def get_link(self, node1, node2): ret = None link_id = Link.get_id(node1.get_id(), node2.get_id()) if link_id in self.links: ret = self.links[link_id] return ret
def create_link(self, connection1, connection2, buffer_size, capcity, static_cost, link_id): link = Link(connection1, connection2, buffer_size, capcity, static_cost, link_id, self) self.links[link_id] = link if DEBUG: print("Link successfully created, id:", link_id) return link
def add_link(self, source, target, weight=1.0): # if we've already got a link between these two neurons, return it # otherwise add a new one l = source.is_connected_outgoing(target) if l is not None: return l l = Link(source, target, weight) return l
def __init__(self, physical_host, physical_port, neighbours_info, lock): self.physical_host = physical_host self.physical_port = physical_port self.neighbours_info = neighbours_info self.destination = {} self.passing_node = {} self.distance_table = [] self.last_updates = [] self.registered_handlers = {} self.link = Link(self.run_handler, self.physical_host, self.physical_port) self.link.create_neighbour_sockets(len(neighbours_info)) self.trace_route_result = [] self.trace_route_ttl = 1 self.lock = lock self.trace_route_done = True self.trace_dest = None self.initialize_table()
def get_all_links(self): (status, result) = self._send_template_request('getAllLinks') if status and result: links = [] for link_info in result: links.append(Link(self, link_info)) return links return []
def nodepair(request): username = json.loads(request.body)["username"] password = json.loads(request.body)["password"] if authenticate(username=username, password=password) is not None: if request.method == 'POST': nodes = [] links = [] linksLabels = [] for i in json.loads(request.body)["nodes"]: nodes.append(node_from_dict(i)) for i in json.loads(request.body)["links"]: linksLabels.append(link_from_dict(i)) startLabel = json.loads(request.body).get("start") endLabel = json.loads(request.body).get("end") d = {} for i in nodes: if i.label == startLabel: start = i elif i.label == endLabel: end = i d[i.label] = i for i in linksLabels: links.append( Link(i.length, i.failureRate, i.repairRate, d[i.src], d[i.dest], i.label)) request.session["nodes"] = nodes request.session["links"] = links g = Graph(nodes, links) t = json.loads(request.body)["t"] if 'start' in locals(): rel = g.calculate_reliability_all_paths(start, end, t) ava = g.calculate_availability_all_paths(start, end) s = "{\"result\":{\"pair\":\"" + rel[0][0].label + "," + rel[ 0][1].label + "\",\"reliability\":" + str( rel[1]) + ",\"availability\":" + str(ava[1]) + "}}" else: rel = g.calculate_reliability_all_paths(None, None, t) ava = g.calculate_availability_all_paths(None, None) s = "{\"result\":[" for i in range(len(rel[0])): s += "{\"pair\":\"" + rel[0][i][0][0].label + "," + rel[0][ i][0][1].label + "\",\"reliability\":" + str( rel[0][i][1]) + " ,\"availability\":" + str( ava[0][i][1]) + "}," s = s[:-1] s += "],\"reliability\":{\"s,t\":" + str( rel[1][0]) + ",\"av\":" + str(rel[1][1]) + "}," s += "\"availability\":{\"s,t\":" + str( ava[1][0]) + ",\"av\":" + str(ava[1][1]) + "}}" response = json.loads(s) return JsonResponse(response) else: nodes = request.session["nodes"] links = request.session["links"] g = Graph(nodes, links) return JsonResponse(g.to_json()) else: return HttpResponse(status=401)
def simple_human_arm(seg1_len, seg2_len, q0, base=None): """Creates a simple human-like robotic arm with 7 links and 2 segments with the desired lengths and starting joint configuration :param seg1_len: The length of the first segment of the arm :type seg1_len: int :param seg2_len: The length of the second segment of the arm :type seg2_len: int :param q0: 1xN vector of the starting joint configuration :type q0: numpy.ndarray :param base: (Optional) (x, y, z) location of arm base :type base: numpy.ndarray or None :returns: 7 link, 2 segment "human" arm. :rtype: maddux.robot.Arm """ L1 = Link(0, 0, 0, 1.571) L2 = Link(0, 0, 0, -1.571) L3 = Link(0, seg1_len, 0, -1.571) L4 = Link(0, 0, seg2_len, -1.571) L5 = Link(0, 0, 0, 1.571) L6 = Link(0, 0, 0, 1.571) L7 = Link(0, 0, 0, 0) links = np.array([L1, L2, L3, L4, L5, L6, L7]) robot = Arm(links, q0, 'simple_human_arm', 4, base) return robot
def __init__(self, timestep, freeFlowSpeed, backwardWaveSpeed, jamDensity, length, downstreamCapacity, upstreamCapacity=DEFAULT, ID=None): # Create a generic link... Link.__init__(self, timestep, freeFlowSpeed, backwardWaveSpeed, jamDensity, length, downstreamCapacity, ID) # ... then allow spatial queues to have different upstream and downstream capacities self.downstreamCapacity = self.capacity if (upstreamCapacity == DEFAULT): self.upstreamCapacity = self.downstreamCapacity else: self.upstreamCapacity = upstreamCapacity / HOURS * timestep
def check_fruit_collisions(snake, fruit, settings, screen, score): last_element = snake.links[-1] if snake.head.rect.top == fruit.rect.top and snake.head.x == fruit.x: snake.links.append( Link(settings, screen, last_element.x, last_element.y - settings.speed)) fruit.update_coordinates() score.add_score() settings.fruit_on_screen = False
def AddIncomingLink(self, parent, argument=None, setting=None, linkType=LinkTypes.required, shouldProliferate=True): self._incomingLinks.append( Link(parent, self, argument, setting, linkType)) return True
def findSlope(): link_data = [] file_line_count = 0 # Reading link file with open(linkData) as street_data: for line in street_data: link_data.append(Link(line)) # Reading matched probe data with open(matchedData) as matched_data: result_data = open(slopeData, 'w') prev_probe = None print("Calculating slope for each mapped probe") for line in matched_data: probe = MatchedProbe(line) # Check for link if not prev_probe or probe.linkID != prev_probe.linkID: probe.slope = '' else: try: # Calculating slope start, end = list( map(float, [probe.longitude, probe.latitude])), list( map(float, [prev_probe.longitude, prev_probe.latitude])) # calculating hypotenuse hypotenuse = calculate_distance(start[0], start[1], end[0], end[1]) / 1000 opposite = float(probe.altitude) - float( prev_probe.altitude) probe.slope = (2 * math.pi * math.atan(opposite / hypotenuse)) / 360 except ZeroDivisionError: probe.slope = 0.0 # check for same linkID with current probe data matched link for link in link_data: if probe.linkID == link.linkID and link.slopeInfo != '': link.ProbePoints.append(probe) break # Increment line number file_line_count += 1 result_data.write(probe.toString()) prev_probe = probe result_data.close() return link_data
def add_link(self, to_box, from_side=st.right, to_side=st.left): #print "link" name = "%s_%s" % (self.box_name, to_box.box_name) self.links[name] = Link(self, to_box, self.scene()) self.links[name].from_box_side(from_side) self.links[name].to_box_side(to_side) self.update_links() self.scene().invalidate(self.scene().sceneRect()) return self.links[name]
def __init__(self, full_hyperlink, links=None): self.full_hyperlink = full_hyperlink self.links = links if not self.links: self.create_links() # This doesn't feel great, maybe pull root_url creation method out of Link? self.domain = Link("#null", self.full_hyperlink).root_url
def parseLinks(self, linkParams): """Parse links from linkParams, dict""" links = {} for addr1, addr2, p1, p2, c12, c21 in linkParams: #print "{}:{} --cost:{}--> {}:{} --cost:{}--> {}:{}".format( #addr1, p1, c12, addr2, p2, c21, addr1, p1) link = Link(addr1, addr2, c12, c21, self.latencyMultiplier) links[(addr1,addr2)] = (p1, p2, c12, c21, link) return links
def test_cost(self): print("test_cost") cost = 6 node_a = Node("A") node_b = Node("B") link = Link(node_a, node_b, cost) node_a.add_link(link) self.assertEqual(node_a.cost(node_b), cost)
def insertAfter(self, after, key, data): curr = self.first while (curr != None): if curr.key == after: node = Link(key, data, curr.next) curr.next = node return curr = curr.next raise ValueError("No key found")
def link_ptp(self, other, method=""): """ Create p-t-p link with other interface Raise ValueError if either of interface already connected. :type other: Interface :returns: Link instance """ # Try to check existing LAG el = Link.objects.filter(interfaces=self.id).first() if el and other not in el.interfaces: el = None if (self.is_linked or other.is_linked) and not el: raise ValueError("Already linked") if self.id == other.id: raise ValueError("Cannot link with self") if self.type in ("physical", "management"): if other.type in ("physical", "management"): # Refine LAG if el: left_ifaces = [i for i in el.interfaces if i not in (self, other)] if left_ifaces: el.interfaces = left_ifaces el.save() else: el.delete() # link = Link(interfaces=[self, other], discovery_method=method) link.save() return link else: raise ValueError("Cannot connect %s interface to %s" % ( self.type, other.type)) elif self.type == "aggregated": # LAG if other.type == "aggregated": # Check LAG size match # Skip already linked members l_members = [i for i in self.lag_members if not i.is_linked] r_members = [i for i in other.lag_members if not i.is_linked] if len(l_members) != len(r_members): raise ValueError("LAG size mismatch") # Create link if l_members: link = Link(interfaces=l_members + r_members, discovery_method=method) link.save() return link else: return else: raise ValueError("Cannot connect %s interface to %s" % ( self.type, other.type)) raise ValueError("Cannot link")
def __init__(self, filename, duration): self.filename = filename # the duration of the simulation, in number of timesteps self.duration = int(duration / globals.dt) # Import the network object parameters with open(self.filename) as f: network_objects = json.load(f) # Create links for l in network_objects['links']: # Clear the variable link = None # Add to idmapping link = Link(l['id'], l['connection1'], l['connection2'], \ l['rate'], l['delay'], l['buffersize'], l['track1'] == 1, \ l['track2'] == 1) globals.idmapping['links'][l['id']] = link # Create hosts for h in network_objects['hosts']: # Clear the variable host = None # Add to idmapping host = Host(h['id'], h['linkid']) globals.idmapping['hosts'][h['id']] = host # Create routers if network_objects['routers'] != [{}]: for r in network_objects['routers']: # Clear the variable router = None # Get the list of links connected to each router link_list = [] for lin_id in r['links']: link_list.append(globals.idmapping['links'][lin_id]) # Initialize router and add to idmapping router = Router(r['id'], link_list) globals.idmapping['routers'][r['id']] = router # Create flows for f in network_objects['flows']: # Clear the variable flow = None # add to idmapping if f['congestion_control'] == 'reno': flow = Flow(f['id'], f['source'], f['destination'], f['amount'], \ f['start'], f['track'] == 1) else: flow = Flow_FAST(f['id'], f['source'], f['destination'], f['amount'], \ f['start'], f['track'] == 1) globals.idmapping['flows'][f['id']] = flow
def _compose_user_panel(self): userpanel = ZenWidget("div", {"class": "user-panel"}) userimagediv = ZenWidget("div", {"class": "pull-left image"}) userpanel.append_widget(userimagediv) userimg = Image(self.user_info["src"], self.user_info["css_classes"], self.user_info["alt"]) userimagediv.append_widget(userimg) usernamediv = ZenWidget("div", {"class": "pull-left info"}) p = ZenWidget("p") p.append_text(self.user_info["username"]) userimagediv.append_widget(p) userstatus = Link("#") userstatusicon = Icon("fa fa-circle text-success") userstatusicon.append_text(self.user_info["userstatus"]) userstatus.append_widget(userstatusicon) userimagediv.append_widget(userstatus) userpanel.append_widget(userstatus) return userpanel
def getData(self): name = str(self.linkName.text()) protocol = str(self.protocol.currentText()) nodeName1 = str(self.node1.currentText()) nodeName2 = str(self.node2.currentText()) risk = str(5) node1 = self.findNode(nodeName1)[1]['obj'] node2 = self.findNode(nodeName2)[1]['obj'] link = Link(node1, node2, name, protocol, risk) return link
def __init__(self, timestep, freeFlowSpeed, backwardWaveSpeed, jamDensity, length, capacity, ID=None): Link.__init__(self, timestep, freeFlowSpeed, backwardWaveSpeed, jamDensity, length, capacity, ID) # Create a cell for each timestep needed to traverse the link self.cells = list() # set proportion of cars p = 0 # set vehicle lengths (in feet) and driver reaction time (in seconds) busLength = 40 carLength = 15.75 reactionTime = 1.25 # override backward wave speed, jam density, and capacity given derived equations self.backwardWaveSpeed = ((p * carLength + (1 - p) * busLength) / reactionTime) self.jamDensity = 1 / ( (self.freeFlowSpeed * reactionTime + p * carLength + (1 - p) * busLength)) self.capacity = self.freeFlowSpeed / ( self.freeFlowSpeed * reactionTime + p * carLength + (1 - p) * busLength) # show link properties that were changed print("Link: ", self.ID) print("capacity (veh/s): ", self.capacity) print("jamDensity (veh/ft): ", self.jamDensity) print("backwardWaveSpeed (ft/s): ", self.backwardWaveSpeed) print("----------------") for c in range(self.freeFlowTime): newCell = Cell(self.capacity, self.jamDensity * self.length / self.freeFlowTime, self.backwardWaveSpeed / self.freeFlowSpeed) self.cells.append(newCell)
def __init__(self, settings, screen): self.screen = screen self.screen_rect = screen.get_rect() self.settings = settings self.speed = settings.speed self.colour = settings.snake_colour self.start_x, self.start_y = (randrange(0, settings.screen_size[0], settings.width) for _ in range(2)) self.head = Link(settings, screen, self.start_x, self.start_y) self.links = [self.head] + [ Link(settings, screen, self.head.x, self.head.y - self.speed * i) for i in range(1, 3) ] self.direction = 'right' self.change_to = ''
def __main__(): nj_municipals = json.load(open('./json/nj_municipals.json')) counties = list(nj_municipals.keys()) if len(sys.argv) == 1: url, date = Link() elif len(sys.argv) == 2: _, date = Link() url = sys.argv[1] else: url = sys.argv[1] date = sys.argv[2] print(url) print(date) data = Parse(url, counties) total_df = Clean(csv_file, data, date, nj_municipals) Update(total_df, csv_file) Today(total_df, date, counties, json_file)
def links(self): result = [] for link in re.findall('<a href="(.*?)"', self.__html): if len(link) > 2 and link[0] == '/' and link[1] != '/': link = f'{self.__current_url}/{link[1:]}' elif link == '/': link = self.__current_url result.append(Link(url=link)) return result
def test_add_link(self): print("test_add_link") cost = 5 node_a = Node("A") node_b = Node("B") link = Link(node_a, node_b, cost) node_a.add_link(link) self.assertEqual(len(node_a), 1)
def create_links(self): link1 = Link(self.core1, self.core2) self.links.append(link1) link2 = Link(self.core2, self.core1) self.links.append(link2) if link1.get_link_id() is link2.get_link_id(): self.link_id = link1.get_link_id() self.core1.add_link_id(self.link_id) self.core2.add_link_id(self.link_id)
def parseLinks(self, linkParams): """Parse links from linkParams, dict""" links = {} for addr1, addr2, p1, p2, c12, c21 in linkParams: #print "{}:{} --cost:{}--> {}:{} --cost:{}--> {}:{}".format( #addr1, p1, c12, addr2, p2, c21, addr1, p1) link = Link(addr1, addr2, c12, c21, self.latencyMultiplier) # use Link class constructor to build a link, then put it into links dictionary # caution!: link and links are different! links[(addr1,addr2)] = (p1, p2, c12, c21, link) return links
def createNet(self, squareSize, width, height, posX, posY): points = [Particle([0.0, 0.0]) for x in xrange(width*height)] for point in range(0, len(points)): # Link to point to the right (rows) if ((point+1) <= len(points) and (point+1) % width != 0): points[point].links.append(Link(points[point], points[(point+1)], squareSize)) # Link to point above (columns) if ((point+width) <= len(points)-1): points[point].links.append(Link(points[point], points[point+width], squareSize)) # Pin the top two points points[0].pinned = True points[0].position = [250, 150] points[width-1].pinned = True points[width-1].position = [250, 250] return points
def link(self, src, dst): """ Create a veth Link between two nodes :param src: The source node of the link :param dst: The destination node of the link """ log.debug('Linking %s to %s', src.id, dst.id) l = Link(src, dst) # Register the new link self.links.append(l) return l
def add_link(self, to_box, from_side = "right", to_side="left"): #print "link" from_side = string_to_side(from_side) to_side = string_to_side(to_side) name = "%s_%s" % (self.box_name, to_box.box_name) self.links[name] = Link(self, to_box) self.links[name].from_box_side(from_side) self.links[name].to_box_side(to_side) self.update_links() self.s.invalidate(self.s.sceneRect()) return self.links[name]
def add_link(self, curr_capacity=0.0, max_capacity=25.0, price=None, traffic=None): new_link = Link(self.counter, curr_capacity, max_capacity, price, traffic) self.link_pool.append(new_link) self.trace[new_link] = [[], []] # price, curr_capacity self.counter += 1
def index(start=None): ''' ROUTE : localhost/ Displays an unfiltered list of links, ranging from `start` (0 if not set) to `Config.max_links_per_page`. -------------------- Parameters: - `start`: Amount of links to skip (i.e. ignore the `start`ieth first posts) ''' start = 0 if start is None else start links = Link.get_posts(start, 20) return render_template(Config.theme + "links.jinja", app=Config, links=links)
def fetch_links(self): ''' extract raw_links and domains ''' self.domains = [] self.links = [] self.domain_ids = [] links = [n.get('href') for n in self.doc.find_all("a")] links = [n for n in links if n is not None and n != "" and n != "/" and n[0] !="#"] for url in links: if url.startswith('mailto'): pass if url.startswith('javascript'): pass else: l = Link(url) if l.is_valid(): url, domain, domain_id = l.clean_url(url, self.url) self.domains.append(domain) self.links.append(url) self.domain_ids.append(domain_id) return (self.links, self.domains, self.domain_ids)
def getLinks(self): s = requests.Session() r = s.get(self.url) soup = BeautifulSoup(r.content) linksArray = [] items = soup.findAll('item') index = 0; for item in items: title = soup.select('item title') description = soup.select('item description') link = soup.select('item link') match = re.search("[(]\s?nasdaq(:| :|: | :|)\s?(?P<symbol>[a-z][a-z][a-z][a-z]?)\s?[)]",item.getText().lower()) if match: if match.group("symbol"): for symbol in reader.requestArray: if symbol[0].lower() == match.group("symbol"): newLink = Link() newLink.symbol = symbol[0] newLink.url = link[index].text newLink.text = description[index].text newLink.linkText = title[index].text #newLink.date = dates[index].text.strip() newLink.source = "GlobeNewswire" linksArray.append(newLink) index= index+1 return linksArray
def __init__(self): self.tool = Tool() self.link = Link() if not self.db: self.db = self.link.connectMysql(db = 'maimai') if not self.r: self.r = self.link.connectRedis() if not self.sp: self.sp = self.link.connectSphinx() cf = ConfigParser() cf.read('conf/config.ini') self.imageHost = cf.get('aliyun_oss', 'host')
def setClause(self): title = unicode(self.ui.titleEdit.text()) parent = unicode(self.ui.parentBox.currentText()) self.clause = Clause() self.clause.setTitle(title) self.clause.setType(self.project.getTIM().getType(unicode(self.ui.typeBox.currentText()))) self.document = self.project.getDocument(unicode(self.ui.documentBox.currentText())) self.document.addClause(self.clause) if (parent != "Sem Clausula Pai") : parentClause = self.clausesList[unicode(self.ui.parentBox.currentText())] link = Link() link.addChild(self.clause.getID()) link.addParent(parentClause) link.consolidateLink(self.project) self.openElementSignal.emit("clause:" + self.clause.getID())
def test(name): logger.info("hello world %s" % __name__) d = Link('/dev/ttyUSB1') logger.debug("%s" % d) d = Link('/dev/ttyUSB1') logger.debug("%r" % d) if not d.isOpen(): d.open() command = commands.ATCommand() result = d.process(command) print "%r" % command command = commands.CMEE.query() result = d.process(command) print "%r" % command command = commands.CMEE.assign(2) result = d.process(command) print "%r" % command command = commands.CGDCONT.query() result = d.process(command) print "%r" % command
def show_arbiter_masters(self): if len(self.arbiter_boxes) > 0: return print "show_arbiter_masters()" count = self.s.module_arbiter_count(self.box_name) print "%d arbiters" % count #print "Add %d arbiter boxes" % num_m #setup the start position for the case where there is only one master position = QPointF(self.pos()) rect = QRectF(self.rect) arb_x = position.x() + rect.width() + ARB_MASTER_HORIZONTAL_SPACING arb_y = position.y() + rect.height() / 2 arb_y -= (count - 1) * ARB_MASTER_VERTICAL_SPACING arb_y -= ((count - 1) * ARB_MASTER_RECT.height()) / 2 arb_pos = QPointF(arb_x, arb_y) #for i in range(0, len(self.arbiter_masters)): for name in self.s.arbiter_master_names(self.box_name): #print "Add Arbiter %s" % self.arbiter_masters[i] arb_rect = QRectF(ARB_MASTER_RECT) #am = ArbiterMaster(name = self.arbiter_masters[i], am = ArbiterMaster(name = name, position = arb_pos, y_pos = position.y(), scene = self.scene(), slave = self) #am.movable(False) self.arbiter_boxes.append(am) al = Link(self, am, self.scene(), lt.arbiter_master) al.from_box_side(st.right) al.to_box_side(st.left) al.en_bezier_connections(True) self.links[am] = al #Progress the position arb_pos = QPointF(arb_pos.x(), arb_pos.y() + arb_rect.height() + ARB_MASTER_VERTICAL_SPACING) self.update_links()
def _simulate_link(self, link, payload, error_tables): for ii in range(len(payload)): link.tx['data'][10 + 12 + ii] = payload[ii] new_link = Link(link.tx) corrupted_payload = self._corrupt_payload(payload, error_tables) for rx in link.valid_rx: # print "origi:", "".join(map((lambda d: bin(d)[2:].zfill(8)), rx['xor'])), rx['bit_errors'] for ii in range(len(corrupted_payload)): rx['data'][10 + 12 + ii] = corrupted_payload[ii] new_link.add_rx(rx) for rx in link.timeout_rx: new_link.add_rx(rx) for rx in link.invalid_rx: new_link.add_rx(rx) return new_link
def edit_link(id): form = forms.SubmitLinkForm() link = Link.from_id(id, format=False) if link is None: return redirect('/404') if form.validate_on_submit(): pass for field, error in form.errors.iteritems(): for err in error: flash(err) taglist = [] for tag in link.tags: taglist.append(tag.text) link.tags_as_string = ','.join(taglist) return render_template(Config.theme + "edit.jinja", app=Config, link=link, form=form)
def getLinks(self): linksArray = [] for pageNumber in self.pageArray: urlQuery = self.query.format(pageNumber) #print URL+urlQuery resp = requests.get(self.URL+urlQuery) soup = BeautifulSoup(resp.content) index = 0; ul = soup.findAll("ul",{"class":"bw-news-list"}) links = soup.select("ul.bw-news-list li") dates = soup.findAll("time") headlyne = soup.select("ul.bw-news-list h3") summaries = soup.select("ul.bw-news-list p") for link in links: title = link.findAll('a',href=True) text = link.findAll('p') #match1 = re.search("") to match the company name with symbol[0] from reader match = re.search("[(]\s?nasdaq(:| :|: | :|)\s?(?P<symbol>[a-z][a-z][a-z][a-z]?)\s?[)]",link.getText().lower()) if match: if match.group("symbol"): match2 = re.search("to present at",link.getText().lower()) if not match2: if self.today in str(dates[index]): for symbol in reader.requestArray: if symbol[0].lower() == match.group("symbol"): newLink = Link() newLink.symbol = symbol[0] newLink.url = title[0]['href'] newLink.text = text[0].text newLink.linkText = title[0].text newLink.date = dates[index].text.strip() newLink.source = "BusinessWire" linksArray.append(newLink) index= index+1 return linksArray
def set_flow(self, chosen_paths): self.flows = {} # print "chosen_paths: ", chosen_paths # Create Flow Objects for cp in self.comm_pattern: fl = Flow(cp[0], cp[1], cp[2]) self.flows[fl.get_end_points()] = fl for p in chosen_paths: path = p[1] link_list = [] for i in range(len(path)-1): l = self.links[Link.get_id(path[i], path[i+1])] link_list.append(l) fl = self.flows[Flow.get_id(path[0], path[-1])] fl.set_path(link_list) for link in self.get_links().values(): link.adjust_flow_bandwidths()
def run(self): while True: if self._add_tx.value: tx = StringMessage(str(self._tx_message.value)) if tx: if self.link: self.add_link(self.link) self.link = Link(tx) else: self.link = None self._add_tx.value = False if self._add_rx.value: if self.link: rx = StringMessage(str(self._rx_message.value)) if rx: self.link.add_rx(rx) self._add_rx.value = False time.sleep(0.1)
def getLinks(self): s = requests.Session() dummyResp = s.get("http://www.marketwired.com/") linksArray = [] for pageNumber in self.pageArray: urlQuery = self.query.format(pageNumber) resp = s.get(self.URL1) queryResp = s.get(resp.url+urlQuery) soup = BeautifulSoup(queryResp.content) links = soup.findAll("div",{"style":"margin-bottom: 30px;"}) dates = soup.findAll("span",{"style":"color: #888888; font-size: 9pt"}) index = 0; for link in links: title = link.findAll('a',href=True) text = link.findAll('div',{"class":"search-results-width"}) match = re.search("[(]\s?nasdaq(:| :|: | :|)\s?(?P<symbol>[a-z][a-z][a-z][a-z]?)\s?[)]",link.getText().lower()) if match: if match.group("symbol"): match2 = re.search("to present at",link.getText().lower()) if not match2: for symbol in reader.requestArray: if symbol[0].lower() == match.group("symbol"): if self.today in str(dates[index]): newLink = Link() newLink.symbol = symbol[0] newLink.url = "http://www.marketwired.com"+title[0]['href'] newLink.text = text[1].text newLink.linkText = title[0].text newLink.date = dates[index].text.strip() newLink.source = "MarketWired" linksArray.append(newLink) index= index+1 return linksArray
def arbiter_master_selected(self, arbiter_master): self.dbg = True if self.dbg: print "PS: arbiter_master_selected()" if self.ignore_selection: return self.ignore_selection = True self.s.clearSelection() #Maybe this is to remove other arbiter masters! self.remove_arbiter_masters() position = QPointF(self.pos()) rect = QRectF(self.rect) arb_x = position.x() + rect.width() + ARB_MASTER_HORIZONTAL_SPACING arb_y = position.y() + (rect.height() / 2) - (ARB_MASTER_ACT_RECT.height() / 2) arb_pos = QPointF(arb_x, arb_y) #print "Adding arbiter master" am = ArbiterMaster(name = arbiter_master, position = arb_pos, y_pos = arb_y, scene = self.scene(), slave = self) am.set_activate(True) am.update_view() self.arbiter_boxes.append(am) al = Link(self, am, self.scene(), lt.arbiter_master) al.from_box_side(st.right) al.to_box_side(st.left) al.en_bezier_connections(True) self.links[am] = al #print "update links" self.update_links() self.ignore_selection = False self.s.invalidate(self.s.sceneRect()) self.dbg = True