def setUp(self): odx = ODX(0, 1) odx.load_gtsf() day = dt.datetime.strptime("01/30/18 00:00", "%m/%d/%y %H:%M") megas = odx.preprocess_gtsf(day) builder = NetworkBuilder(700) self.net = builder.build(megas, 1)
def main(): sess = tf.Session() image = read_image('../data/heart.jpg') image = np.reshape(image, [1, 224, 224, 3]) # type numpy.ndarray image.astype(np.float32) parser = Parser('../data/alexnet.cfg') network_builder = NetworkBuilder("test") # type: NetworkBuilder network_builder.set_parser(parser) network = network_builder.build() # type: Network network.add_input_layer(InputLayer(tf.float32, [None, 224, 224, 3])) network.add_output_layer(OutputLayer()) network.connect_each_layer() sess.run(tf.global_variables_initializer()) fc_layer = sess.run(network.output, feed_dict={network.input: image})
def __init__(self, start_node: int, end_node: int, render: bool): self.__network = NetworkBuilder().build_network print("Graph representation build done...\n") print("Original node matrix: ") print(self.__network.node_matrix.matrix) print("######################################################") print("Virtual nodes: ") print([ str(node.parent_id) + "_" + str(node.id % (len(constants.REQUIRED_SERVICE_FUNCTIONS) + 1)) for node in self.__network.virtual_nodes ]) print("######################################################") pass self.__start_node = start_node self.__end_node = end_node self.view = View(self.__network, render) self.view.render(True)
def main(): parser = Parser('../data/alexnet.cfg') network_builder = NetworkBuilder("test") mnist = input_data.read_data_sets("F:/tf_net_parser/datasets/MNIST_data/", one_hot=True) # 读取数据 network_builder.set_parser(parser) network = network_builder.build() # type: Network network.add_input_layer(InputLayer(tf.float32, [None, 28, 28, 1])) network.add_output_layer(OutputLayer()) network.set_labels_placeholder(tf.placeholder(tf.float32, [None, 10])) network.connect_each_layer() network.set_accuracy() network.init_optimizer() train_tool = TrainTool() train_tool.bind_network(network) sess = tf.Session() sess.run(tf.initialize_all_variables()) for i in range(300): batch = mnist.train.next_batch(100) feed_dict = { network.input: np.reshape(batch[0], [-1, 28, 28, 1]), network.labels: batch[1] } train_tool.train(sess, network.output, feed_dict=feed_dict) if (i + 1) % 100 == 0: train_tool.print_accuracy(sess, feed_dict) train_tool.save_model_to_pb_file( sess, '../pb/alexnet-' + str(i + 1) + '/', input_data={'input': network.input}, output={'predict-result': network.output}) # train_tool.save_ckpt_model('f:/tf_net_parser/save_model/model', sess, gloabl_step=(i+1)) batch_test = mnist.test.next_batch(100) feed_dict = { network.input: np.reshape(batch_test[0], [100, 28, 28, 1]), network.labels: batch_test[1] } train_tool.print_test_accuracy(sess, feed_dict)
def __init__(self, path): self.model = Model("ontology.graph") self.nodes = dict() self.actions = [] self.network = NetworkBuilder() f = open(path) for line in f: if line[0] == '#': continue tokens = line.strip().split(' ') if len(tokens) < 3: continue if tokens[1] == 'type': class_name = self.model.get_class_info(tokens[-1]) if class_name == 'N/A': print('UNKNOWN CLASS : ' + tokens[-1]) continue else: node = Node(tokens[-1], tokens[0]) self.nodes[tokens[0]] = node elif tokens[1] == 'isGroupOf': node_grp = NodeGroup(tokens[2], tokens[0], tokens[4]) self.nodes[tokens[0]] = node_grp elif tokens[1] == 'uses': action = Action(tokens[0], tokens[2], tokens[4]) self.actions.append(action) elif tokens[1] == 'networkType': self.network.build(tokens) elif tokens[1] == 'attachTo': attach_src = tokens[0] if attach_src in self.nodes: src = self.nodes[attach_src] if src.get_entity_type() == 'NodeGroup': members = src.get_members() for member in members: new_tokens = tokens new_tokens[0] = member self.network.build(new_tokens) else: self.network.build(tokens) else: self.network.build(tokens) f.close()
class EmotionRecognition: def __init__(self): self.dataset = DatasetLoader() self.networkbuilder = NetworkBuilder() def build_network(self): self.model = self.networkbuilder.build_vgg() # self.load_model() def load_saved_dataset(self): self.dataset.load_from_save() print('[+] Dataset found and loaded') def start_training(self): self.load_saved_dataset() self.build_network() if self.dataset is None: self.load_saved_dataset() print('[+] Training network') self.model.fit(self.dataset.images, self.dataset.labels, validation_set=(self.dataset.images_test, self.dataset.labels_test), n_epoch=100, batch_size=100, shuffle=True, show_metric=True, snapshot_step=200, snapshot_epoch=True, run_id=RUN_NAME) def predict(self, image): if image is None: return None image = image.reshape([-1, SIZE_FACE, SIZE_FACE, 1]) return self.model.predict(image) def save_model(self): self.model.save(join(SAVE_DIRECTORY, SAVE_MODEL_FILENAME)) print('[+] Model trained and saved at ' + SAVE_MODEL_FILENAME) def load_model(self): self.model.load(join(SAVE_DIRECTORY, SAVE_MODEL_FILENAME)) print('[+] Model loaded from ' + SAVE_MODEL_FILENAME)
class Controller(object): def __init__(self, start_node: int, end_node: int, render: bool): self.__network = NetworkBuilder().build_network print("Graph representation build done...\n") print("Original node matrix: ") print(self.__network.node_matrix.matrix) print("######################################################") print("Virtual nodes: ") print([ str(node.parent_id) + "_" + str(node.id % (len(constants.REQUIRED_SERVICE_FUNCTIONS) + 1)) for node in self.__network.virtual_nodes ]) print("######################################################") pass self.__start_node = start_node self.__end_node = end_node self.view = View(self.__network, render) self.view.render(True) @property def net(self): return self.__network def run(self): self.init_phase_2() self.view.render() self.pruning() self.view.render() self.dijkstra() def init_phase_2(self): rows, cols = np.where(self.__network.node_matrix.matrix != 0) edges = zip(rows.tolist(), cols.tolist()) print("Adding edges to virtual nodes: ") for edge in edges: v_l = self.__network.nodes[edge[0]] v_k = self.__network.nodes[edge[1]] v_l_virtual_nodes = self.__network.get_virtual_nodes_of_node_id( v_l.id) v_k_virtual_nodes = self.__network.get_virtual_nodes_of_node_id( v_k.id) for i, v_l_virtual_node in enumerate(v_l_virtual_nodes): j_star = i for j, v_k_virtual_node in enumerate(v_k_virtual_nodes): if j >= i: subset = set(constants.REQUIRED_SERVICE_FUNCTIONS[i:j]) network_functions_v_k = set( v_k_virtual_node.network_functions) #print("i: ", i, "j: ", j, "sfcs: ", subset, "nf: ", network_functions_v_k) if subset.issubset(network_functions_v_k): j_star = j print(v_l_virtual_nodes[i].parent_id, "_", i, "---->", v_k_virtual_nodes[j_star].parent_id, "_", j_star) self.__network.virtual_node_matrix.make_edge( v_l_virtual_nodes[i], v_k_virtual_nodes[j_star]) self.view.render() print("######################################################") pass def pruning(self): print("Removing unneccessary nodes:") found = False while not found: found = False for node in self.__network.virtual_nodes: if node.parent_id != self.__start_node and node.parent_id != self.__end_node and not node.is_removed: to_ids_length = len( self.__network.virtual_node_matrix. get_node_to_neighbor_ids(node)) from_ids_length = len( self.__network.virtual_node_matrix. get_node_from_neighbor_ids(node)) if to_ids_length == 0 or from_ids_length == 0: for i in range( self.__network.virtual_node_matrix.size): self.__network.virtual_node_matrix.del_edge( i, node.id) self.__network.virtual_node_matrix.del_edge( node.id, i) self.__network.virtual_nodes[ node.id].set_is_removed = True self.view.render() print( "Removed: ", str(node.parent_id) + "_" + str(node.id % (len(constants.REQUIRED_SERVICE_FUNCTIONS) + 1))) found = True #remove unneccessary starting and ending nodes print("Removing unneccessary starting nodes:") found = False for id in range(len(constants.REQUIRED_SERVICE_FUNCTIONS) + 1): if not self.__network.virtual_nodes[id].is_removed: if not found: found = True else: for i in range(self.__network.virtual_node_matrix.size): self.__network.virtual_node_matrix.del_edge(i, id) self.__network.virtual_node_matrix.del_edge(id, i) self.__network.virtual_nodes[id].set_is_removed = True self.view.render() print( "Removed: ", str(self.__network.virtual_nodes[id].parent_id) + "_" + str(id % (len(constants.REQUIRED_SERVICE_FUNCTIONS) + 1))) print("Removing unneccessary destination nodes:") found = False for id in range( len(self.__network.virtual_nodes) - 1, len(self.__network.virtual_nodes) - 1 - len(constants.REQUIRED_SERVICE_FUNCTIONS) - 1, -1): if not self.__network.virtual_nodes[id].is_removed: if not found: found = True else: for i in range(self.__network.virtual_node_matrix.size): self.__network.virtual_node_matrix.del_edge(i, id) self.__network.virtual_node_matrix.del_edge(id, i) self.__network.virtual_nodes[id].set_is_removed = True self.view.render() print( "Removed: ", str(self.__network.virtual_nodes[id].parent_id) + "_" + str(id % (len(constants.REQUIRED_SERVICE_FUNCTIONS) + 1))) print("######################################################") pass def dijkstra(self): graph = nx.MultiDiGraph() node_matrix = self.__network.virtual_node_matrix.matrix rows, cols = np.where(node_matrix != 0) edges = zip(rows.tolist(), cols.tolist()) graph.add_edges_from(edges) print("After pruning: ") print(node_matrix) print("######################################################") all_active_node = set(rows).union(cols) print("Shortest path with service fuction chaining:") for node in nx.dijkstra_path(graph, min(all_active_node), max(all_active_node)): print(self.__network.virtual_nodes[node].parent_id) pass
def __init__(self, input_dim, output_dim, training=True): self.training = training nb = NetworkBuilder() with tf.name_scope("Input"): self.input = tf.placeholder(tf.float32, shape=[None, input_dim, input_dim, 1], name="input") with tf.name_scope("Output"): self.output = tf.placeholder(tf.float32, shape=[None, output_dim], name="output") with tf.name_scope("ImageModel"): model = self.input model = nb.add_batch_normalization(model, self.training) model = nb.add_conv_layer(model, output_size=64, feature_size=(4, 4), padding='SAME', activation=tf.nn.relu) model = nb.add_max_pooling_layer(model) model = nb.add_dropout(model, 0.1, self.training) model = nb.add_conv_layer(model, 64, feature_size=(4, 4), activation=tf.nn.relu, padding='VALID') model = nb.add_max_pooling_layer(model) model = nb.add_dropout(model, 0.3, self.training) model = nb.flatten(model) model = nb.add_dense_layer(model, 256, tf.nn.relu) model = nb.add_dropout(model, 0.5, self.training) model = nb.add_dense_layer(model, 64, tf.nn.relu) model = nb.add_batch_normalization(model, self.training) self.logits = nb.add_dense_layer(model, output_dim, activation=tf.nn.softmax)
class Enterprise: def __init__(self, path): self.model = Model("ontology.graph") self.nodes = dict() self.actions = [] self.network = NetworkBuilder() f = open(path) for line in f: if line[0] == '#': continue tokens = line.strip().split(' ') if len(tokens) < 3: continue if tokens[1] == 'type': class_name = self.model.get_class_info(tokens[-1]) if class_name == 'N/A': print('UNKNOWN CLASS : ' + tokens[-1]) continue else: node = Node(tokens[-1], tokens[0]) self.nodes[tokens[0]] = node elif tokens[1] == 'isGroupOf': node_grp = NodeGroup(tokens[2], tokens[0], tokens[4]) self.nodes[tokens[0]] = node_grp elif tokens[1] == 'uses': action = Action(tokens[0], tokens[2], tokens[4]) self.actions.append(action) elif tokens[1] == 'networkType': self.network.build(tokens) elif tokens[1] == 'attachTo': attach_src = tokens[0] if attach_src in self.nodes: src = self.nodes[attach_src] if src.get_entity_type() == 'NodeGroup': members = src.get_members() for member in members: new_tokens = tokens new_tokens[0] = member self.network.build(new_tokens) else: self.network.build(tokens) else: self.network.build(tokens) f.close() def get_entity_type(self, entity_name): return self.nodes[entity_name].get_entity_type() def print_app_graph(self): #for (k,node) in self.nodes.iteritems(): #qname = self.model.get_class_info(node.get_class_id()) #node_id = node.get_node_id() #print('--- ' + node.get_entity_type()) #print(qname + ',' + node_id) print('digraph dataflow {') for a in self.actions: src = self.nodes[a.get_src()] target = self.nodes[a.get_target()] relation = a.get_relation() if src.get_entity_type() == 'NodeGroup': src.expand_actions(True, target, relation) elif target.get_entity_type() == 'NodeGroup': target.expand_actions(False, src, relation) else: print_edge(a.get_src(), relation, a.get_target()) if a.get_src() == 'SalesTeam_0': print(a.get_src() + '#############' + a.get_target()) print('}') def print_topology(self): print('graph topology {') self.network.print_topology() print('}')
def test_build(self): builder = NetworkBuilder(700) net = builder.build(self.megas, 1)
def __init__(self, env_type=Env.MULTI, env_dict=None, env_file_name=None, training_dict=None, training_file_name=None, env_dict_string=False, training_name=None, model_load_name=None, load_model=False, network_type=Network.SA_TO_Q, custom_network=None, training_mode=Modes.TRAINING, use_tensorboard=True, print_training=True, require_all_params=False, gym_env_name=None): self.env_type = env_type self.training_name = training_name self.training_mode = training_mode self.network_type = network_type self.print_training = print_training self.env_file_name = env_file_name self.env_dict = env_dict self.env_dict_string = env_dict_string self.gym_env_name = gym_env_name #check for valid environment settings if env_type == Env.LEGACY: self.env_import = environment elif env_type == Env.MULTI: self.env_import = multienvironment else: #A gym environment self.env_import = None #remove after testing # #Load environment arguments # if env_file_name is not None: # self.env_args = self.read_dict('./args/environment/' + env_file_name + '.txt') # if env_dict is not None: # for key in env_dict: # self.env_args[key] = env_dict[key] # elif env_dict is not None: # if env_dict_string: # env_dict = eval(env_dict) # self.env_args = env_dict # else: # self.env_args = None # # #initialize the environment # self.env = self.env_import.Environment(**self.env_args) self.initialize_environment() #self.num_actions = self.env.action_space() self.num_actions = self.action_space() #load training parameters and set as class fields if training_mode is not Modes.TESTING: #Load training arguments if training_file_name is not None: self.parameter_dict = self.read_dict('./args/training/' + training_file_name + '.txt') if training_dict is not None: for key in training_dict: self.parameter_dict[key] = training_dict[key] elif training_dict is not None: self.parameter_dict = training_dict else: self.parameter_dict = None raise Exception('no training argument parameters specified') #instead of a boolean which turns requirement on/off, there will be a list of params that are crucial for testing #if require_all_params: # for parameter in Parameters: # if parameter not in parameter_dict: # raise Exception('parameter list is missing required parameters for training') #initialize all training parameters as None, so those that are not used are still defined for parameter in Parameters: setattr(self, parameter.name, None) #convert parameter list to class fields for parameter in self.parameter_dict: #in case string based parameters are supported, this may not be needed setattr(self, parameter.name, self.parameter_dict[parameter]) #Set up the network, custom_network should pass an uncompiled model built with keras layers #either load a pre-trained model or create a new model if load_model: self.model = tf.keras.models.load_model('./models/' + model_load_name + '.h5') else: #create a new model following one of the preset models, or create a new custom model #image_shape = np.shape(self.env.screenshot()) image_shape = self.observation_space() num_actions = self.num_actions if custom_network is not None: argument_dict = { 'image_shape': image_shape, 'num_actions': num_actions } if network_type == Network.SM_TO_QA: argument_dict['stack_size'] = self.env.stacker.stack_size if network_type == Network.SR_TO_QA: argument_dict['trace_length'] = self.TRACE_LENGTH network_builder = NetworkBuilder(custom_network, network_type, argument_dict) self.model = network_builder.get_model() else: if self.env_type is not Env.GYM: #fix this if self.env.frame_stacking: #a tupple base_size = image_shape[0:2] #a scalar channels = image_shape[2] stack_size = self.env.stacker.stack_size #example stack- image dimensions: 30 x 40, stack size: 4, channels: 3 if self.env.concatenate: #should be width by height by (channels * stack size) #so 30 x 40 x 12 image_shape = base_size + (channels * stack_size, ) #else: #hould be stack size by height by width by channels #so 4 x 30 x 40 x 3 #image_shape = (stack_size,) + base #print(image_shape) kernel_size = (5, 5) #default models for each network type if network_type == Network.SA_TO_Q: image_input = tf.keras.Input(shape=image_shape) conv1 = tf.keras.layers.Conv2D( 32, kernel_size=kernel_size, activation=tf.keras.activations.relu, strides=1)(image_input) pooling1 = tf.keras.layers.MaxPooling2D( pool_size=(2, 2))(conv1) drop1 = tf.keras.layers.Dropout(.25)(pooling1) conv2 = tf.keras.layers.Conv2D( 64, kernel_size=kernel_size, strides=1, activation=tf.keras.activations.relu)(drop1) pooling2 = tf.keras.layers.MaxPooling2D( pool_size=(2, 2))(conv2) drop2 = tf.keras.layers.Dropout(0.25)(pooling2) flat = tf.keras.layers.Flatten()(drop2) conv_dense = tf.keras.layers.Dense( 100, activation=tf.keras.activations.relu)(flat) action_input = tf.keras.Input(shape=(num_actions, )) action_dense = tf.keras.layers.Dense( num_actions**2, activation=tf.keras.activations.relu)(action_input) merged_dense = tf.keras.layers.concatenate( [conv_dense, action_dense]) dense1 = tf.keras.layers.Dense( 10, activation=tf.keras.activations.relu)(merged_dense) output = tf.keras.layers.Dense( 1, activation=tf.keras.activations.linear)(dense1) self.model = tf.keras.Model( inputs=[image_input, action_input], outputs=output) elif network_type == Network.S_TO_QA: image_input = tf.keras.Input(shape=image_shape) conv1 = tf.keras.layers.Conv2D( 32, kernel_size=kernel_size, activation=tf.keras.activations.relu, strides=1)(image_input) pooling1 = tf.keras.layers.MaxPooling2D( pool_size=(2, 2))(conv1) drop1 = tf.keras.layers.Dropout(.25)(pooling1) conv2 = tf.keras.layers.Conv2D( 64, kernel_size=kernel_size, strides=1, activation=tf.keras.activations.relu)(drop1) pooling2 = tf.keras.layers.MaxPooling2D( pool_size=(2, 2))(conv2) drop2 = tf.keras.layers.Dropout(0.25)(pooling2) flat = tf.keras.layers.Flatten()(drop2) conv_dense = tf.keras.layers.Dense( 100, activation=tf.keras.activations.relu)(flat) output = tf.keras.layers.Dense( num_actions, activation=tf.keras.activations.linear)(conv_dense) self.model = tf.keras.Model(inputs=image_input, outputs=output) elif network_type == Network.SM_TO_QA: #concat should be false stack_size = self.env.stacker.stack_size input_layer_list = [] dense_layer_list = [] for i in range(stack_size): image_input = tf.keras.Input(shape=image_shape) conv1 = tf.keras.layers.Conv2D( 32, kernel_size=kernel_size, activation=tf.keras.activations.relu, strides=1)(image_input) pooling1 = tf.keras.layers.MaxPooling2D( pool_size=(2, 2))(conv1) drop1 = tf.keras.layers.Dropout(.25)(pooling1) conv2 = tf.keras.layers.Conv2D( 64, kernel_size=kernel_size, strides=1, activation=tf.keras.activations.relu)(drop1) pooling2 = tf.keras.layers.MaxPooling2D( pool_size=(2, 2))(conv2) drop2 = tf.keras.layers.Dropout(0.25)(pooling2) flat = tf.keras.layers.Flatten()(drop2) #add to layer lists input_layer_list.append(image_input) dense_layer_list.append(flat) merged_dense = tf.keras.layers.concatenate( dense_layer_list) dense1 = tf.keras.layers.Dense( 100, activation=tf.keras.activations.relu)(merged_dense) dense2 = tf.keras.layers.Dense( 100, activation=tf.keras.activations.relu)(dense1) output = tf.keras.layers.Dense( num_actions, activation=tf.keras.activations.linear)(dense2) self.model = tf.keras.Model(inputs=input_layer_list, outputs=output) else: raise Exception( 'invalid network type or no default model for network type' ) #other variables for training self.target_model = None self.tensorboard = None self.replay_memory = None self.reward_list = [] self.epsilon = 1 self.epsilon_decay_function = None #self.model should now be defined, compile the model if training if training_mode is not Modes.TESTING: #copy the model if using double q learning if self.DOUBLE: self.target_model = tf.keras.models.clone_model(self.model) #redo this logic eventually, support all tf optimizers if self.OPTIMIZER == Optimizer.ADAM: self.OPTIMIZER = tf.keras.optimizers.Adam elif self.OPTIMIZER == Optimizer.SGD: self.OPTIMIZER = tf.keras.optimizers.SGD if self.OPTIMIZER is None: self.OPTIMIZER = tf.keras.optimizers.Adam self.model.compile(loss=tf.keras.losses.mean_squared_error, optimizer=self.OPTIMIZER(lr=self.ALPHA)) if self.DOUBLE: self.update_target() #test custom directory thing #initialize epsilon decay function #right now, all functions should have 2 arg , epsilon and the total number of epochs if self.EPSILON_DECAY == Decay.LINEAR: self.epsilon_decay_function = self.linear_decay else: raise Exception('Decay function not found') if use_tensorboard: self.tensorboard = tf.keras.callbacks.TensorBoard( log_dir='logs/{}/{}'.format(training_name, time()), batch_size=self.BATCH_SIZE, write_grads=True, write_images=True) self.tensorboard.set_model(self.model) if self.UPDATE_FREQUENCY is None: self.UPDATE_FREQUENCY = 1
# # painter.draw_raw_road_network(splitter.old_road_network) # painter.draw_calculated_road_network(splitter.road_network) # # cost = utils.compute_road_network_cost(splitter.road_network) # # print(f"Стоимость строительства дорожной сети: {cost}") from network_builder import NetworkBuilder # t, q = utils.read_terminal_points("input_terminal_points") p = np.vstack((t, q)) g = Grid(p, set(range(len(t), len(p)))) g.generate() # painter.draw_grid(g) # # print(g.distance_matrix[0, 1]) nb = NetworkBuilder(g) network = nb.build_network() splitter = EdgesSplitter(network) splitter.calculate() painter.draw_raw_road_network(splitter.old_road_network) painter.draw_calculated_road_network(splitter.road_network) # cost = utils.compute_road_network_cost(splitter.road_network) print(f"Стоимость строительства дорожной сети: {cost}")
def do(self, matrix, transcriptome, metabolome, depth, filter, limit, queries, subparser_name, starting_compounds, steps, number_of_queries, output_directory): ''' Parameters ---------- depth filter limit metabolome queries subparser_name transcriptome output_directory ''' nb = NetworkBuilder(self.metadata.keys()) km = KeggMatrix(matrix, transcriptome) abundances_metagenome = \ {key:km.group_abundances(self.metadata[key], km.reaction_matrix) for key in self.metadata.keys()} if transcriptome: abundances_transcriptome = \ {key:km.group_abundances(self.metadata[key], km.reaction_matrix_transcriptome) for key in self.metadata.keys()} abundances_expression = \ {key:km.group_abundances(self.metadata[key], km.reaction_matrix_expression) for key in self.metadata.keys()} else: abundances_transcriptome = None abundances_expression = None if metabolome: abundances_metabolome = Matrix(metabolome) ### ~ TODO: This WILL NOT WORK - MATRIX is no longer an existing class. ### ~ TODO: I've added a note in the holp for network analyzer ### ~ TODO: that warns the user about this. else: abundances_metabolome = None if subparser_name==self.TRAVERSE: logging.info('Traversing network') output_lines = \ nb.traverse(abundances_metagenome, abundances_transcriptome, limit, filter, starting_compounds, steps, number_of_queries) self._write_results(os.path.join(output_directory, self.TRAVERSE_OUTPUT_FILE), output_lines) elif subparser_name==self.EXPLORE: logging.info("Using supplied queries (%s) to explore network" \ % queries) network_lines, node_metadata = \ nb.query_matrix(abundances_metagenome, abundances_transcriptome, abundances_expression, queries, depth) self._write_results(os.path.join(output_directory, self.NETWORK_OUTPUT_FILE), network_lines) self._write_results(os.path.join(output_directory, self.METADATA_OUTPUT_FILE), node_metadata) elif subparser_name==self.PATHWAY: logging.info('Generating pathway network') network_lines, node_metadata = \ nb.pathway_matrix(abundances_metagenome, abundances_transcriptome, abundances_expression, abundances_metabolome, limit, filter) self._write_results(os.path.join(output_directory, self.NETWORK_OUTPUT_FILE), network_lines) self._write_results(os.path.join(output_directory, self.METADATA_OUTPUT_FILE), node_metadata)
from network_builder import NetworkBuilder nets_available = ["Net1","Net2","Net4","Net5","Net7","Net8","Net9","Net10","Net10v2","Net11","Net11v2","Net11v3"] images_in = 10 for name in nets_available: if name == "Net9": bn = NetworkBuilder(name, depth=32,num_input_im = images_in, max_batch_size = 6, epochs = 500, steps_per_epoch = 50).build_net() else: bn = NetworkBuilder(name, depth=32,max_batch_size = 6, epochs = 500, steps_per_epoch = 5).build_net() bn.build() bn.fit() bn.gen_raport() del bn# = None
def __init__(self): self.dataset = DatasetLoader() self.networkbuilder = NetworkBuilder()