def main() -> None: parser = argparse.ArgumentParser( description="Shows AI classes with non-trivial class hierarchies.") parser.add_argument("--type", help="AI class type to visualise", choices=["Action", "AI", "Behavior", "Query"], required=True) parser.add_argument( "--out-names", help="Path to which a vtable -> name map will be written", required=True) args = parser.parse_args() all_vtables = ai_common.get_vtables() graph = Graph() reverse_graph = Graph() build_graph(all_vtables, args.type, graph, reverse_graph) interesting_nodes = set() node_colors = dict() colors = [ "#c7dcff", "#ffc7c7", "#ceffc7", "#dcc7ff", "#fffdc9", "#c9fff3", "#ffe0cc", "#ffcffe", "#96a8ff" ] components = graph.find_connected_components() num_nontrivial_cc = 0 for i, comp in enumerate(components): if len(comp) == 2: continue for node in comp: node_colors[node] = colors[i % len(colors)] num_nontrivial_cc += 1 interesting_nodes |= set(comp) print("digraph {") print("node [shape=rectangle]") for u in graph.nodes: if u not in interesting_nodes: continue for v in graph.nodes[u]: shape_u = "shape=component," if "[V]" not in u else "" shape_v = "shape=component," if "[V]" not in v else "" print( f'"{u}" [{shape_u}style=filled, fillcolor="{node_colors[u]}"]') print( f'"{v}" [{shape_v}style=filled, fillcolor="{node_colors[v]}"]') print(f'"{u}" -> "{v}"') print("}") print(f"# {len(components)} connected components") print(f"# {num_nontrivial_cc} non-trivial connected components") yaml.add_representer(int, lambda dumper, data: yaml.ScalarNode( 'tag:yaml.org,2002:int', f"{data:#x}"), Dumper=yaml.CSafeDumper) with Path(args.out_names).open("w") as f: yaml.dump(_known_vtables, f, Dumper=yaml.CSafeDumper)
def topologically_sort_vtables(all_vtables: dict, type_: str) -> List[int]: graph = Graph() for name, vtables in all_vtables[type_].items(): classes = list(dict.fromkeys(reversed(vtables))) for i in range(len(classes) - 1): graph.add_edge(classes[i + 1], classes[i]) return graph.topological_sort()
def train_model_old(config, train_set, test_set, num_training_samples, num_test_samples): graph = Graph(skeleton_edges, is_directed=True) shape = (config.batch_size, 3, 300, 25, 2) model = create_model(config, graph, shape) model.compile(optimizer=keras.optimizers.SGD(learning_rate=config.base_lr, momentum=0.9, nesterov=True), loss=keras.losses.CategoricalCrossentropy(from_logits=True), metrics=["accuracy", "top_k_categorical_accuracy"]) model.summary() lr_scheduler = keras.optimizers.schedules.PiecewiseConstantDecay( config.steps, [config.base_lr**i for i in range(1, len(config.steps) + 2)]) callbacks = [ keras.callbacks.LearningRateScheduler(lr_scheduler), keras.callbacks.TensorBoard(os.path.join( config.log_path, time.strftime("training_%Y_%m_%d-%H_%M_%S")), profile_batch="200,250"), keras.callbacks.ModelCheckpoint(os.path.join(config.checkpoint_path, "weights.{epoch:02d}.h5"), save_best_only=True) ] model.fit(train_set, epochs=config.epochs, validation_data=test_set, callbacks=callbacks)
def _build_model(self, config: dict, data_shape: tuple, num_classes: int) -> tuple: """ Build the network model, optimizer, loss function and learning rate scheduler given the. :param config: :return: tuple (model, loss function, optimizer, learning rate scheduler) """ skeleton_edges, center_joint = import_dataset_constants( self._base_config.dataset, ["skeleton_edges", "center_joint"]) graph = Graph(skeleton_edges, center_joint=center_joint) # https://pytorch.org/docs/stable/generated/torch.nn.Module.html # noinspection PyPep8Naming Model = import_model(self._base_config.model) model = Model(data_shape, num_classes, graph, mode=self._base_config.mode, **self._base_config.model_args).cuda() loss_function = torch.nn.CrossEntropyLoss().cuda() optimizer = session_helper.create_optimizer(config["optimizer"], model, config["base_lr"], **config["optimizer_args"]) lr_scheduler = session_helper.create_learning_rate_scheduler( config["lr_scheduler"], optimizer, **config["lr_scheduler_args"]) return model, loss_function, optimizer, lr_scheduler
def build_imu_graph(data_shape: tuple, num_signals: int = 0, temporal_back_connections: int = 1, inter_signal_back_connections=False) -> Graph: sequence_length, num_signals_0 = data_shape assert num_signals == 0 or (num_signals_0 % num_signals) == 0 if num_signals == 0: num_signals = num_signals_0 num_vertices = sequence_length * num_signals graph_edges = [] for i in range(0, num_vertices, num_signals): # spatial connections (connections between all values at a single time step) # IMU data is in form (sequence_length = [N + 1], num_signals = [M + 1]) with samples TnSm and 0<=n<=N; 0<=m<=M # memory layout for vertices will therefore be: T0S0, T0S1, T0S2, ... T0SM, T1S0, T1S1, ... T1Sm, ..., TNSM for j in range(num_signals): for k in range(j + 1, num_signals): graph_edges.append((i + j, i + k)) graph_edges.append((i + k, i + j)) # temporal back connections for j in range(min(i // num_signals, temporal_back_connections)): for k in range(num_signals): for m in range(num_signals): if k == m or inter_signal_back_connections: graph_edges.append( (i - num_signals * (j + 1) + k, i + m)) return Graph(graph_edges, num_vertices)
def __init__(self, fname): """ 2013-07-24: Create a graph of the file """ self.astGraph = Graph() self.processFile(fname)
def create_graph(self, schedule): if os.path.isfile(self.graph_path): self.graph = GraphDecoder().load_from_file(self.graph_path) else: self.graph = Graph() self.create_stations(schedule.stops) self.create_sections(schedule) GraphEncoder().save_to_file(self.graph, self.graph_path) print('Stations: {}, Sections: {}'.format(len(self.graph.stations), len(self.graph.sections)))
def __init__(self, data_shape, num_classes: int, graph, **kwargs): super().__init__() num_layers = kwargs.get("num_layers", 10) edges = kwargs["rgb_patch_groups_edges"] edges = [tuple(map(int, edge.split(", "))) for edge in edges] graph = Graph(edges) self.agcn = agcn.Model(data_shape["rgb"], num_classes, graph, num_layers=num_layers, without_fc=kwargs.get("without_fc", False))
def index(): api_url = "https://api.stackexchange.com/2.2/questions?page=1&pagesize=100&order=desc&sort=activity&site=datascience" file_url = ".//resources//" extract = ExtractData(api_url, file_url) graph = Graph() extract.extractData() graph.createGraph(extract.stack_exchange_tags) if (request.method == 'GET'): return render_template('index.html', show_tag=False) else: query = request.form.to_dict() associated_tags = graph.findNeighborsOfaTag( str(query['query']).lower()) return redirect(url_for('tag_results', tags=associated_tags))
def main(): g = Graph() v1 = Matrix([[3, 0]]).T v2 = Matrix([[1, 2]]).T g.add_vector(v1, color='tab:green') g.add_vector(v2, color='tab:red') p1 = v1.copy() p2 = orthogonal(p1, v2) pprint([p1, p2]) g.add_vector(p1, color='tab:blue') # same as v1 (green) so it hides it g.add_vector(p2, color='tab:orange') g.show()
def main(): g = Graph() v1 = Matrix([[2, 1]]).T v2 = Matrix([[1, 1]]).T g.add_vector(v1) g.add_vector(v2) p1 = v1.copy() p2 = orthogonal(p1, v2) g.add_vector(p1, color='tab:blue') g.add_vector(p2, color='tab:blue') n1 = normalize(p1) n2 = normalize(p2) g.add_vector(n1, color='tab:green') g.add_vector(n2, color='tab:green') g.show()
def get_dataset(filepath): with open(filepath, 'r', encoding="utf-8") as fp: reader = csv.reader(fp) source = [] for row in reader: source.append(row) source = source[1:] def prepare(source): print("preparing data...") positive_graph = nx.Graph() negative_graph = nx.Graph() for row in source: u = row[1].lower() v = row[2].lower() w = row[3] w = float(w) # if w < 0: # print(u+"---"+v+"---"+str(w)) if w > 0: positive_graph.add_edge(u, v, weight=w) if w < 0: negative_graph.add_edge(u, v, weight=w) print("positive_number_of_nodes: " + str(positive_graph.number_of_nodes())) print("positive_number_of_edges: " + str(positive_graph.number_of_edges())) print("negative_number_of_nodes: " + str(negative_graph.number_of_nodes())) print("negative_number_of_edges: " + str(negative_graph.number_of_edges())) return positive_graph, negative_graph positive_graph, negative_graph = prepare(source) my_graph = Graph(positive_graph, negative_graph) print("getting triplets...") del source triplets = my_graph.get_triplets() vocab = my_graph.vocab.getnode2id() return triplets, vocab
import numpy as np from util.graph import Graph from sympy import Matrix u = np.array([-1, 1]) v = np.array([2, 3]) # Solve Ax=0 where A = (u v) A = np.array([u, v, [0, 0]]).T A, _ = Matrix(A).rref() A = np.array(A) print(A) # u and v are linearly independent because scalars k and c = zero g = Graph() g.add_vector(u, color='b') g.add_vector(v, color='g') g.show()
x, y = symbols('x y') eq1 = Eq(2 * x - y, 0) eq2 = Eq(-x + 2 * y, 3) pprint(eq1) pprint(eq2) result = linsolve([eq1, eq2], [x, y]) print('x, y:', pretty(result)) # row "picture" # See an intersection at [1, 2] g_row = plot(solve(eq1, y)[0], show=False, line_color='tab:blue', xlim=[-5, 5], ylim=[-5, 5]) g_row.append(plot(solve(eq2, y)[0], show=False, line_color='tab:blue')[0]) g_row.show() # col "picture" g_col = Graph() M, b = linear_eq_to_matrix([eq1, eq2], x, y) # Show the columns vectors in green u = np.array(M.col(0)).T[0] v = np.array(M.col(1)).T[0] g_col.add_vector(u, color='tab:green') g_col.add_vector(v, color='tab:green') # Show [0, 3] in orange (2x the second column, added to the first) w = 2 * v g_col.add_vector(w, start=u, color='tab:orange') g_col.show()
lr_scheduler = keras.optimizers.schedules.PiecewiseConstantDecay( config.steps, [config.base_lr**i for i in range(1, len(config.steps) + 2)]) callbacks = [ keras.callbacks.LearningRateScheduler(lr_scheduler), keras.callbacks.TensorBoard(os.path.join( config.log_path, time.strftime("training_%Y_%m_%d-%H_%M_%S")), profile_batch="200,250"), keras.callbacks.ModelCheckpoint(os.path.join(config.checkpoint_path, "weights.{epoch:02d}.h5"), save_best_only=True) ] model.fit(train_set, epochs=config.epochs, validation_data=test_set, callbacks=callbacks) if __name__ == "__main__": cf = get_config() setattr(cf, "kernel_regularizer", keras.regularizers.l2(cf.weight_decay)) graph = Graph(skeleton_edges, is_directed=True) model = create_model(cf, graph, data_shape) training_procedure = ModelTraining(cf, model, *load_data(cf)) training_procedure.start()
from util.graph import Graph from util.showing_functions import * from util.path_finding import * import time graph = Graph({ 'A': ['B', 'C'], 'B': ['D', 'A'], 'C': ['D', 'A'], 'D': ['E', 'F', 'B', 'C'], 'E': ['D'], 'F': ['D'] }) print() print('The graph') print(graph) print() print() print('The Breadth-First Search') start_time = time.time() show_sorted_traversal(graph.bfs('A')) print("--- %s milliseconds ---" % ((time.time() - start_time) * 1000)) print() print() print('The shortest path between two vertices') start_time = time.time() shortest_path('A', 'E', graph.bfs('A')) print()