def build_order(projects, dependencies): """ The solution to this problem is a topological search. It builds a graph from the tasks and dependencies, finds the independent tasks, and then performs a breadth first search to identify a legal topological ordering. """ #Build a graph where each projects is a node #and each dependency is a directed edge from #the dependent project to the earlier project. node_dict = {x: GraphNode(val=x) for x in projects} for dependency in dependencies: first, second = dependency node_dict[second].add_edge(node_dict[first]) g = Graph() for node in node_dict.values(): g.add(node) nodes = node_dict.values() independents = [node for node in nodes if len(node.adjacent)==0] for node in nodes: #Delete all edges from node. node.adjacenct = [] for dependency in dependencies: #Flip edge from original setup. Now projects that #come first point to ones that should follow first, second = dependency node_dict[first].add_edge(node_dict[second]) q = MyQueue() for node in independents: q.add(node) task_list = [] while not q.is_empty(): node = q.remove().data task_queue.append(node.val) node.marked = True for adjacent_node in node.adjacent: if adjacent_node.marked is False: adjacent_node.marked = True q.add(adjacent_node) return task_list
class App(PCMSessionDelegate): BUFFER_LENGTH = 64 DB_OFFSET = -10.0 * math.log10(32768.0) def __init__(self): self.root = tk.Tk() self.root.protocol('WM_DELETE_WINDOW', self.shutdown) self.content = ttk.Frame(self.root, width=300, height=500) self.content.grid(column=0, row=0, sticky=(tk.N, tk.S, tk.E, tk.W)) self.root.columnconfigure(0, weight=1) self.root.rowconfigure(0, weight=1) pcm = PCMSystem() self.devices = OrderedDict() for dev in pcm.inputs(): self.devices[dev.name] = dev print(f'Device : {dev.name} {dev.maxIn} {dev.rate}') self.currentDevice = tk.StringVar() self.currentDevice.set(self[0].name) self.cards = ttk.Combobox(self.content, textvariable=self.currentDevice, values=self.names, justify=tk.LEFT) self.cards.bind('<<ComboboxSelected>>', self.changeCard) self.cards.grid(column=0, row=0, columnspan=4, sticky=(tk.N, tk.S, tk.E, tk.W)) self.graph = Graph(self.content) self.graph.grid(column=0, row=1, columnspan=4, sticky=(tk.N, tk.S, tk.E, tk.W)) self.graph.bind('<Button-1>', self.onClick) self.spec = tk.Toplevel(self.root, width=800, height=300) self.spectrum = SpectrumView(self.spec, bounds=Range(-120, -60), xflen=513) self.spectrum.configure(width=800, height=300) self.spectrum.pack() gradient = Gradient(Stop(Colour.Blue, offset=0), Stop(Colour.Green, offset=0.5), Stop(Colour.Yellow, offset=0.8), Stop(Colour.Red, offset=0.9)) self.spectro = tk.Toplevel(self.root, width=800, height=400) self.spectrogram = Spectrogram(self.spectro, bounds=Range(-120, -60), gradient=gradient, xflen=513) self.spectrogram.configure(width=800, height=400) self.spectrogram.pack() self.fft = SpectralBase(fftSize=1024, viewers=[self.spectrum, self.spectrogram]) self.startButton = ttk.Button(self.content, text='Start', command=self.start) self.stopButton = ttk.Button(self.content, text='Stop', command=self.stop) self.clearButton = ttk.Button(self.content, text='Clear', command=self.graph.clear) self.clearButton.grid(column=0, row=2, sticky=(tk.N, tk.S, tk.W)) self.startButton.grid(column=2, row=2, sticky=(tk.N, tk.S, tk.E)) self.stopButton.grid(column=3, row=2, sticky=(tk.N, tk.S, tk.E, tk.W)) for column in [0, 2, 3]: self.content.columnconfigure(column, weight=1) self.content.rowconfigure(1, weight=1) self.mean = [] self.timer = None self.samples = [] self.raw = [] self.session = PCMSession(self[0], delegate=self) def onClick(self, event): print(f'Click on {event.widget}') canvas = event.widget x = canvas.canvasx(event.x) y = canvas.canvasy(event.y) print(f'{self.graph.size} : ({event.x},{event.y}) -> ({x},{y})') @property def names(self): return list(self.devices.keys()) def __getitem__(self, index): if type(index) == str: return self.devices[index] else: return self.devices[self.names[index]] def changeCard(self, event=None): try: if self.session is None: return dev = self[self.currentDevice.get()] if dev == self.session.pcm: return else: print(f'Changing to {dev}') self.stop() self.session = PCMSession(dev) self.spec.setSampleRate(self.session.samplerate) self.start() except: print(f'{event}') def __call__(self, n, time, data=[]): if len(data) > 0: self.samples.extend(data) self.fft.add(data) #d=datetime.now() #print(f'{d.hour}:{d.minute}:{d.second}:{d.microsecond} : {len(data)}') def update(self): if len(self.samples) > 0: data = np.mean(self.samples, axis=1) self.samples = [] value = np.mean(data, axis=0) #print(f'Samples {min(data)} {max(data)}') self.graph.add(value) #raw=self.raw[:] #self.raw=[] #self.fft.add(raw) def start(self): self.stop() # make sure we're in a known state self.timer = MultiTimer(interval=0.05, function=self.update, runonstart=False) self.timer.start() self.fft.start() self.session.start() print(f'Started {self.session}') def stop(self): if self.session: self.session.stop() if self.timer: self.timer.stop() if self.fft: self.fft.stop() self.timer = None def shutdown(self): self.stop() self.root.destroy() def run(self): try: self.start() self.root.mainloop() except: pass finally: self.stop()
class TestGraph(unittest.TestCase): txt_out_file = "_.txt" xlsx_out_file = "_.xlsx" def setUp(self): self.g = Graph() touch(self.txt_out_file) touch(self.xlsx_out_file) def tearDown(self): # return os.remove(self.txt_out_file) os.remove(self.xlsx_out_file) def test_add_word(self): self.g.add_word("test", 5, "synonym", "target_word") def test___contains(self): self.g.add_word("test", 5, "synonym", "target_word") self.assertTrue("test" in self.g) self.assertFalse("notest" in self.g) def test_str(self): self.g.add_word("test", 5, "synonym", "target_word") self.assertIsInstance(self.g.to_str(), str) g2 = rdflib.Graph() g2.parse(data=str(self.g), format="ttl") def test_word_in_graph(self): self.g.add_word("test", 5, "synonym", "target_word") self.assertTrue(self.g.word_in_graph("test")) self.assertFalse(self.g.word_in_graph("tfdfdfest")) def test_to_text_file(self): self.g.parse(self.graph_test_path, format="ttl") self.g.to_text_file(self.txt_out_file) with open(self.txt_out_file) as f: words = sorted([line.strip() for line in f if line.strip()]) self.assertEqual(words, self.g.to_list()) def test_good_words(self): self.g.parse(self.graph_test_path, format="ttl") for word in self.g.to_list(): self.assertTrue(word) # no empty words self.assertEqual(unidecode(word.lower().strip()), word) def test_is_empty(self): self.assertTrue(self.g.is_empty()) self.assertRaises(AssertionError, self.g._set_root_word_attribute) rw_strings = [ "root_word_string_1", "root_word_string_2", "root_word_string_3" ] for w in rw_strings: self.g.add_root_word(w) self.assertTrue(self.g.is_empty()) self.assertEqual(rw_strings, self.g.to_list()) self.g._set_root_word_attribute() for w in self.g.root_words: self.assertTrue(isinstance(w, str)) def test_add_several_root_words(self): self.g.add_root_word("root_word_string_1") self.g.add_root_word("root_word_string_2") self.g.add_root_word("root_word_string_3") self.g.to_text_file(self.txt_out_file) with open(self.txt_out_file) as f: words = sorted([line.strip() for line in f if line.strip()]) self.assertEqual(words, self.g.to_list()) def test_add_several_root_words_with_previous_graph(self): self.g.parse(self.graph_test_path, format="ttl") self.g.add_root_word("root_word_string_1") self.g.add_root_word("root_word_string_2") self.g.add_root_word("root_word_string_3") self.g.to_text_file(self.txt_out_file) with open(self.txt_out_file) as f: words = sorted([line.strip() for line in f if line.strip()]) self.assertEqual(words, self.g.to_list()) self.test_list_is_sorted() def test_list_is_sorted(self): self.assertEqual(sorted(self.g.to_list()), self.g.to_list()) def test___len__(self): self.assertFalse(len(self.g)) for i in range(1, 10): self.g.add_root_word(f"test_{i}") self.assertEqual(len(self.g), i) self.g.add_word("test", 5, "synonym", "target_word") self.assertEqual(len(self.g), i + 1) def test_add_relation(self): self.g.add_root_word("root_word_string_1") self.g.add_word("test", 1, "synonym", "root_word_string_1") self.g.add_word("test", 1, "hyponym", "root_word_string_1") self.g.add_word("test", 1, "hypernym", "root_word_string_1") self.g.add_word("test", 1, "holonym", "root_word_string_1") self.assertRaises( ValueError, self.g.add_word, "test", 1, "thing_that_ends_with_nym", "root_word_string_1", ) def test_to_xlsx(self): self.g.add_root_word("dog") self.g.to_xlsx_file(self.xlsx_out_file) def test__get_maximum_origin(self): self.assertFalse(self.g._get_maximum_origin()) for i in range(1, 5): self.g.add(( rdflib.Literal("node_uri"), rdflib.URIRef("urn:default:baseUri:#comesFrom"), rdflib.Literal(f"test-{i}"), )) self.assertEqual(self.g._get_maximum_origin(), i) def test_pop_non_relevant_words(self): self.assertFalse(len(self.g)) for i in range(10): self.g.add_word("test", 1, "synonym", "target_word", comesFrom=f"test-{i}") self.g.add_word("test2", 1, "synonym", "target_word_2", comesFrom=f"test-x") self.assertEqual(len(self.g), 2) self.g.pop_non_relevant_words() self.assertEqual(len(self.g), 1)