def __init__(self, root, month, year): self.root = root self.month = month self.year = year self.daysList = ['Δε','Τρ','Τε','Πε','Πα','Σα','Κυ'] #φτιάχνω λίστα με τους αριθμούς των ημερών self.numOfDays = self.getNumOfDays(self.month) self.numbers = Link.Link(int) for i in range(self.numOfDays): self.numbers.add(i+1) self.numbers.setDefault(time.localtime()[2]) #φτιάχνω λίστα με τις μέρες self.days = Link.Link(str) for i in range(7): self.days.add(self.daysList[i]) self.days.setDefault(self.daysList[time.localtime()[6]]) #αρχικοποιώ ημέρα for i in range(time.localtime()[2]): self.days.setPrevious() self.firstDay = self.days.getData() #φτιάχνω τον πίνακα self.days.setDefault(self.firstDay) self.numbers.setDefault(1) self.table = self.makeTable()
def build_web(self, start_url): """ Starts at the given URL and builds a web that is levels deep using BFS. Uses helper function crawl, to visit, connect, and mark sites. """ self.links = [Link(url=start_url)] Q = [0] for _ in range(self.levels): level = [] for link_id in Q: origin = self.links[link_id] links = self.crawl(origin) # list[Link] for link in links: conn = self.web.get(link_id, []) if link in self.links: i = self.links.index(link) self.web[link_id] = conn + [i] continue i = len(self.links) self.links.append(link) self.web[link_id] = conn + [i] level.append(i) Q = level self.save_links('links.json') self.save_web('web.json') return
def merge_sorted_link(link1, link2): """ 合并两个有序的链表(默认从小到大) :param link1: 原始有序链表1 :param link2: 原始有序链表2 :return: Link, 合并后的有序链表 """ link_merged = Link() p1 = link1.get_first_node() p2 = link2.get_first_node() while p1 or p2: if not p1: link_merged.cat_with_node(p2) break elif not p2: link_merged.cat_with_node(p1) break else: if p1.data >= p2.data: link_merged.append(p2.data) p2 = link2.get_next_node(p2) else: link_merged.append(p1.data) p1 = link1.get_next_node(p1) return link_merged
def addLink(self): tA = self.linkA tB = self.linkB if tA == -1 or tB == -1: print "ERROR: a link end not assigned" if tA != tB and not self.hasLink(tA, tB): self.links.append( Link(self, self.getThought(tA), self.getThought(tB), importance=self.linkImportance)) #print "Creating a link!" # set circle colour back to white TA = self.getThought(tA) TA.canvas.itemconfig(TA.smallCircleIndex, fill=g.toHex(self.cs.smallCircle)) # reset link assignments self.resetLinkData() self.lowerLinks()
def crawl(self, origin): """ Crawls to URL and returns a unique, sanitized list of all links associated with this site. """ res = requests.get(origin.url) page = html.fromstring(res.content) # build unique set of Links with hrefs links = set() for href in page.xpath('//a/@href'): if href.startswith('http'): link = Link(url=href) else: link = Link(base=origin.root, path=href) links.add(link) return list(links)
def supetransition(self): rm2 = [] for link in self.links: if "#" in link.tag: if link.destination.isFinal(): link.origin.final = True if link.origin.isInitial(): link.destination.initial = True result = False for lk in self.links: if link != lk and lk.destination == link.origin: anotherLink = self.isLink(lk.origin, link.destination) if anotherLink[0]: anotherLink[1].add(lk) else: nlk = Link(lk.origin, link.destination, lk.tag[0]) for t in lk.tag[1:]: nlk.addTag(t) self.links.append(nlk) result = True if result: link.delTag("#") if len(link.tag) == 0: rm2.append(link) for l in rm2: self.links.remove(l) self.syncState()
def addLink(self, manager): if self.index == -1: raise AssertionError("Index not found in command string") url = self.getStringArgument("link") topic = manager.get(self.index - 1) newLink = Link.Link() newLink.url = url topic.links.append(newLink) print("Added link number " + str(len(topic.links)))
def updateLink(self, active_id, id, object_id, name, node1, node2, direction, capacity, distance): temp_link = lk.Link(id, object_id, name, node1, node2, direction, capacity, distance) active_link = self.getLinkById(active_id) if (temp_link.getAllProperties() == active_link.getAllProperties()): pass else: active_link.updateLink(id, object_id, name, node1, node2, direction, capacity, distance)
def initProgram(): linksTxt = "txtFiles/links.txt" demandsTxt = "txtFiles/demands.txt" links = [] demands = [] variables = [] with open(linksTxt, "r") as f: line = f.readline() while(line): txtParsed = line.strip().split(" ") link = Link(txtParsed[0], txtParsed[1], txtParsed[2]) links.append(link) line = f.readline() with open(demandsTxt, "r") as f: line = f.readline() while(line): txtParsed = line.strip().split(" ") demand = Demand(txtParsed[0], txtParsed[1], txtParsed[2]) demands.append(demand) line = f.readline() for link in links: for demand in demands: firstElementOfDemand = demand.firstElement lastElementOfDemand = demand.lastElement if (link.lastElement != demand.firstElement and link.firstElement != demand.lastElement): variable = Variable(link, demand) variables.append(variable) outputFile = "output.lp" temp = [] with open(outputFile, "w+") as f: for demand in demands: print("DEMAND: " + str(demand)) variablesByDemand = getVariablesByDemand(variables, demand) string = "" for variableDemand in variablesByDemand: if variableDemand.link.firstElement == demand.firstElement: print(variableDemand) # string += str(variableDemand) + " + " variableDemand.demand.getVolume() for variableDemand in variablesByDemand: if variableDemand.link.lastElement == demand.lastElement: print(variableDemand) intermediateVariables = getIntermediateVariables(variables, demand) print("INTERMEDIATES") for element in intermediateVariables: print("POSITIVE") for y in element[0]: print(y) print("NEGATIVE") for k in element[1]: print(k)
def __init__(self): self.InputLayerWidth = 18 #self.HiddenLayerWidth = 10 #self.OutputLayerWidth = 2 self.HiddenLayerWidth = 21 self.OutputLayerWidth = 24 self.NumInputToHiddenLinks = 18 * 21 #18 * 10 self.NumHiddenToHiddenLinks = 21 * 21 #10 * 10 self.NumHiddenToOutputLinks = 21 * 24 #10 * 2 self.InputLayer = [] for i in range(0, self.InputLayerWidth): self.InputLayer.append(Node()) self.InputLayer[0].CurrentActivation = Decimal(0) self.HiddenLayer1 = [] self.HiddenLayer2 = [] self.HiddenLayer3 = [] for i in range(0, self.HiddenLayerWidth): self.HiddenLayer1.append(Node()) self.HiddenLayer2.append(Node()) self.HiddenLayer3.append(Node()) self.OutputLayer = [] for i in range(0, self.OutputLayerWidth): self.OutputLayer.append(Node()) self.InputToHidden1Links = [] for i in range(0, self.NumInputToHiddenLinks): self.InputToHidden1Links.append(Link()) self.Hidden1ToHidden2Links = [] self.Hidden2ToHidden3Links = [] for i in range(0, self.NumHiddenToHiddenLinks): self.Hidden1ToHidden2Links.append(Link()) self.Hidden2ToHidden3Links.append(Link()) self.Hidden3ToOutputLinks = [] for i in range(0, self.NumHiddenToOutputLinks): self.Hidden3ToOutputLinks.append(Link())
def parse_topology(generate_json): """"generate JSON file for visualization if generate_json == True""" tree = ET.parse("abilene-TM" + os.sep + "topo" + os.sep + "Abilene-Topo-10-04-2004.xml") root = tree.getroot() topology = root.find("topology") node_list = [] link_list = [] graph = Graph(node_list, link_list) if generate_json: f = open("data.json", "w") output = {"nodes":{}, "links":[]} for node in topology.iter("node"): new_node = Node(node.attrib["id"]) node_list.append(new_node) if generate_json: location = node.find("location") new_node.set_location(float(location.attrib["latitude"]), float(location.attrib["longitude"])) output["nodes"][new_node.node_id] =\ (float(location.attrib["latitude"]), float(location.attrib["longitude"])) for link in topology.iter("link"): link_id = link.attrib["id"] link_from = graph.find_node(link.find("from").attrib["node"]) link_to = graph.find_node(link.find("to").attrib["node"]) bw = int(link.find("bw").text) new_link = Link(link_id, link_from, link_to, bw) link_list.append(new_link) if generate_json: output["links"].append(\ ((link_from.lat, link_from.lng), (link_to.lat, link_to.lng))) igp = root.find("igp").find("links") for link in igp.iter("link"): link_id = link.attrib["id"] link_obj = graph.find_link_by_id(link_id) if link_obj != None: link_obj.metric = float(link.find("static").find("metric").text) if generate_json: json.dump(output, f) f.close() return graph
def config(self, path): data = json.load(open(path, mode='r')) for c in data: chain = KineChain(self.root) for l in c: name = l['name'] pos = l['pos'] theta = l['theta'] lb = l['lb'] ub = l['ub'] link = Link(name, pos, theta, lb, ub) chain.add_link(link) self.chains.append(chain)
def __init__(self, root): self.root = root months = ['Ιανουάριος','Φεβρουάριος','Μάρτιος','Απρίλιος','Μάιος','Ιουνίος', 'Ιούλιος','Αύγουστος','Σεπτέμβριος','Οκτώβριος','Νοέμβριος','Δεκέμβριος'] self.year = time.localtime()[0] #φτιάχνω λίστα με τους μήνες self.months = Link.Link(str) for i in range(12): self.months.add(months[i]) self.months.setDefault(months[time.localtime()[1]-1]) #φτιάχνω το frame για τους μήνες self.monthsFrame = tk.Frame(self.root) self.monthsFrame.pack(expand = False,side = 'top',anchor = 'n') self.month = tk.StringVar() self.month.set(self.months.getData() + ' ' + str(self.year)) self.left = tk.Button(self.monthsFrame, text = '<', bg = 'lightgreen', command = self.back) self.left.pack(fill = 'both',side = 'left') self.right = tk.Button(self.monthsFrame, text = '>', bg = 'lightgreen', command = self.next) self.right.pack(fill = 'both',side = 'right') self.label = tk.Label(self.monthsFrame,textvariable = self.month, font = 'Arial 10',width = 45, height = 2, bg = 'lightblue') self.label.pack(fill = 'both',side = 'bottom') #φτιάχνω λίστα με τους αριθμούς των μηνών numbers = [1,2,3,4,5,6,7,8,9,10,11,12] self.numbers = Link.Link(int) for i in range(12): self.numbers.add(numbers[i]) self.numbers.setDefault(time.localtime()[1]) #φτιάχνω το frame για τον πίνακα self.tableFrame = tk.Frame(self.root) self.tableFrame.pack(expand = True,side = 'top',anchor = 'n') self.table = Table(self.tableFrame,self.numbers.getData(),self.year)
def __init__(self, url, html=""): self.url = url self.link = Link(self.url) self.response_code = 0 if html == "": self.html_page = False self.download_sec = 0.0 self.html = "" self.download() else: self.html_page = True self.download_sec = 0.0 self.html = html if self.html_page: self.extracted_links = self.__extract_links()
def loadFromFile(self, filename, jok=False): """loadFromFile permet de charger un Automate à partir d'un fichier Elle chargera le fichier indiqué dans filename. jok permet de specifier l'utilisation de e-transition Parameters ---------- filename : String chemin d'acces au fichier jok : boolean indique l'utilisation de e-transition Returns ------- None """ try: file = open(filename, "r") except: print("Cannot open the file, abort") return False lines = file.readlines() for i in range(0, len(lines)): lines[i] = lines[i].rstrip() file.close() self.alphabet = [e for e in lines[0]] if jok: self.alphabet.append("#") self.nbState = int(lines[1]) self.initial = [int(e) for e in lines[2].split()] self.final = [int(e) for e in lines[3].split()] self.states = [State(i, self.alphabet) for i in range(0, self.nbState)] self.current = self.states[self.initial[0]] self.initStateStatus() for i in range(4, len(lines)): line = lines[i].split() origin = self.states[int(line[0])] destination = self.states[int(line[1])] link = self.isLink(origin, destination) if not (line[2] == '#' and origin.id == destination.id): if not link[0]: self.links.append(Link(origin, destination, line[2])) else: link[1].addTag(line[2]) origin.nbLink += 1 origin.addLink(line[2], destination) if origin != destination: destination.nbLink += 1
def updatebyMoore(self, list): """"updatebyMoore permet de mettre à jour un automate avec un tableau issue de l'algorithme de moore Parameters ---------- list : [] liste issue de la fonction minMoore Returns ------- None """ list = self.minusOne2D(list) newState = [] for i in range(len(list)): if list[i][0] not in newState: newState.append(list[i][0]) nbnewState = len(newState) states = [State(e, self.alphabet) for e in newState] finals = [] inits = [] for i in range(nbnewState): isFinal = True isInit = True for j in range(self.nbState): if list[j][0] == i: if not self.states[j].isFinal(): isFinal = False if not self.states[j].isInitial(): isInit = False if isFinal: finals.append(i) if isInit: inits.append(i) links = [] for i in range(len(states)): for j in range(1, len(self.alphabet) + 1): origin = states[list[i][0]] dest = states[list[i][j]] link = self.isLink(origin, dest, links, True) if not link[0]: links.append(Link(states[list[i][0]], states[list[i][j]], self.alphabet[j - 1])) else: link[1].addTag(self.alphabet[j - 1]) origin.nbLink += 1 if origin != dest: dest.nbLink += 1 origin.addLink(self.alphabet[j - 1], dest) self.initMachine(nbnewState, inits, finals, states, links)
def _addlinks(self, category, cat): ''' Parse and add all links to a category @param category: a collection of XML Elements @param cat: the Category to add the collections to ''' for link in category.findall("links/link"): linkid = link.get("id") if linkid == None: logging.warn("Category " + cat.term + " - invalid link") continue logging.info("Category " + cat.term + " link " + linkid) try: cat.addlink(Link.Link(linkid)) except: logging.error("Category " + cat.term + " Problem processing link " + linkid) logging.error(sys.exc_info())
def get_index_page(self, filter=None): file_names = self.file_reader.get_files_list(self.pages_dir) file_names.sort() labels = [ self.file_name_to_link_label(file_name) for file_name in file_names ] targets = [ self.file_name_to_link_target(file_name) for file_name in file_names ] links = [ Link.Link(labels[ind], targets[ind]) for ind in range(len(file_names)) ] return render_template('index.j2', links=links)
def read_network(filename): with open('Euro16/' + filename) as f: lines = f.readlines() nodes = int(lines[0]) links = int(lines[1]) network = [] l = [] x = 0 for line in lines[2:]: network.append([int(x) for x in line.split('\t')]) for i in range(2, len(lines)): n = lines[i].split('\t') for j in range(len(n)): if int(n[j]) != 0: l.append(Link.Link(x, j, int(n[j]), cores_num)) x += 1 return nodes, links, network, l
def __init__(self, connections, terminals, inhibN,threshold1, tau1, timeStep,dinc): global threshold,tau # record the firing time of the neuron self.fireTime = list() self.psp = list() threshold = threshold1 tau = tau1 # create the number of connections the current neuron has with the previous layer self.synapses = np.empty((connections), dtype=object) if inhibN > 0: self.type = -1 else: self.type = 1 # initialise each link as a connection element with different weights and delays for s in range(connections): self.synapses[s] = Link(terminals, connections,threshold, tau,timeStep,dinc)
def __extract_links(self): try: log = structlog.get_logger() log.info("__extracting_links", base_url=self.url) links = [] soup = BeautifulSoup(self.html, "html.parser") anchors = soup.find_all('a') count = 0 for href in anchors: count = count + 1 url = href.get("href") log.info("got_href", url=url) if url is None: # Link has no href and is placeholder for future frontend processing continue try: link = Link(url, self.url, href.text) formatted_url = link.url_qualified log.info("got_link", formatted_url=formatted_url) if url is not None: if link.url_qualified != "": link_location = "" if link.is_external_link: link_location = "external" else: link_location = "relative" links.append(link) except Exception as e: exception_name = type(e).__name__ log.exception("formatting_url_exception", exception_name=exception_name, formatted_url=formatted_url) traceback.print_exc() continue except Exception as e: exception_name = type(e).__name__ log.exception("basic_extract_url_exception", exception_name=exception_name) traceback.print_exc() return links
Python implementation by: Luis Fernando Lara Tobar and Peter Corke. Based on original Robotics Toolbox for Matlab code by Peter Corke. Permission to use and copy is granted provided that acknowledgement of the authors is made. @author: Luis Fernando Lara Tobar and Peter Corke """ from numpy import * from Robot import * from Link import * L = [] L.append(Link(A=1)) L.append(Link(A=1)) L[0].m = 1 L[1].m = 1 L[0].r = mat([1,0,0]) L[1].r = mat([1,0,0]) L[0].I = mat([0,0,0,0,0,0]) L[1].I = mat([0,0,0,0,0,0]) L[0].Jm = 0 L[1].Jm = 0 L[0].G = 1
def cycle_detect(link): """ 链表中环的检测 :param link: 原始链表 :return: boolean, True/False """ p = link.get_first_node() p_one, p_two = p, p while p_one and p_two: p_one = p_one.p_next p_two = p_two.p_next.p_next if p_two.p_next else None if p_one == p_two and p_one: return True return False if __name__ == '__main__': link = Link() for i in range(5): link.append(i) print(link) print(cycle_detect(link)) p = link.get_last_node() p.p_next = link.get_first_node() print(cycle_detect(link))
# $Log: twolink.m,v $ # Revision 1.2 2002/04/01 11:47:20 pic # General cleanup of code: help comments, see also, copyright, remnant dh/dyn # references, clarification of functions. # # $Revision: 1.2 $ # Copyright (C) 2000-2002, by Peter I. Corke from numpy import * from SerialLink import * from Link import * L = [] L.append(Link(a=1)) L.append(Link(a=1)) L[0].m = 1 L[1].m = 1 L[0].r = mat([1, 0, 0]) L[1].r = [1, 0, 0] L[0].I = mat([0, 0, 0, 0, 0, 0]) L[1].I = mat([0, 0, 0, 0, 0, 0]) L[0].Jm = 0 L[1].Jm = 0 L[0].G = 1
# Revision 1.3 2002/04/01 11:47:16 pic # General cleanup of code: help comments, see also, copyright, remnant dh/dyn # references, clarification of functions. # # $Revision: 1.3 $ # Copyright (C) 1993-2002, by Peter I. Corke from numpy import * from Link import * from Robot import * print "in puma560" L = []; L.append( Link(alpha=pi/2, A=0, D=0) ) L.append( Link(alpha=0, A=0.4318, D=0) ) L.append( Link(alpha=-pi/2, A=0.0203, D=0.15005) ) L.append( Link(alpha=pi/2, A=0, D=0.4318) ) L.append( Link(alpha=-pi/2, A=0, D=0) ) L.append( Link(alpha=0, A=0, D=0) ) L[0].m = 0 L[1].m = 17.4 L[2].m = 4.8 L[3].m = 0.82 L[4].m = 0.34 L[5].m = .09 L[0].r = mat([ 0, 0, 0 ])
from Link import * class Solution: def removeNthFromEnd(self, head: ListNode, n: int) -> ListNode: node_list: list = [] empty_node = ListNode() empty_node.next = head head = empty_node head_node = head while head: node_list.append(head) head = head.next if len(node_list) > 1: node_list[-n - 1].next = node_list[-n + 1] if n > 1 else None del node_list[-n] return head_node.next return None s = Solution() a = Link([1, 2]) x = s.removeNthFromEnd(a.head_node, 2) a.show(x)
while fast_point and slow_point != fast_point: for _ in range(2): if fast_point.next: fast_point = fast_point.next else: return False slow_point = slow_point.next return True if fast_point else False return False def dp(self, fast: ListNode, slow: ListNode): if fast and slow: next_ = fast.next next__ = next_.next if next__ and next_: return fast == slow or self.dp(next__, slow.next) else: return False return False def DP(self, head: ListNode): if head: return self.dp(head.next, head) return False a = Link([1, 2, 3, 4]) a.end_node.next = a.head_node s = Solution() print(s.DP(a.head_node))
# See also: ROBOT, PUMA560, PUMA560AKB, TWOLINK. # $Log: stanford.m,v $ # Revision 1.2 2002/04/01 11:47:18 pic # General cleanup of code: help comments, see also, copyright, remnant dh/dyn # references, clarification of functions. # # $Revision$ # Copyright (C) 1990-2002, by Peter I. Corke from numpy import * from Link import * from Chain import * L = [] L.append(Link(alpha=-pi / 2, A=0, theta=0, D=0.412, sigma=0)) L.append(Link(alpha=pi / 2, A=0, theta=0, D=0.154, sigma=0)) L.append(Link(alpha=0, A=0, theta=-pi / 2, D=0, sigma=1)) L.append(Link(alpha=-pi / 2, A=0, theta=0, D=0, sigma=0)) L.append(Link(alpha=pi / 2, A=0, theta=0, D=0, sigma=0)) L.append(Link(alpha=0, A=0, theta=0, D=0.263, sigma=0)) L[0].m = 9.29 L[1].m = 5.01 L[2].m = 4.25 L[3].m = 1.08 L[4].m = 0.63 L[5].m = 0.51 L[0].r = mat([0, .0175, -0.1105]) L[1].r = mat([0, -1.054, 0])
import sys sys.path.append('./') import Topic import Link import Searching manager = Topic.TopicsManger() topic = Topic.Topic() topic.title = "abracadabra" link = Link.Link() link.url = "link_ura" link.tags.append("arad") link.tags.append("iasi") topic.links.append(link) manager.add(topic) def test1(): queries = [('arg0', 'abra')] result = Searching.generalSearch(queries, manager) if result[0][0].title == "abracadabra": print "Test1 OK" else: print "Test1 Failed" def test2(): queries = [('arg0', 'ura')] result = Searching.generalSearch(queries, manager)
def addLink(self, node, value): self.connections.append(Link(node, value))