Exemplo n.º 1
0
def calc_ged(recepie1, recepie2, timeout_val=600):
    start_time = time.time()
    G1 = make_graph(recepie1)
    G2 = make_graph(recepie2)
    ged = None

    try:
        status = "OK"
        with timeout(Quota(timeout_val), exception=RuntimeError):
            for ged in nx.optimize_graph_edit_distance(G1, G2, lambda n1, n2: n1['op'] == n2['op']):
                pass

    except RuntimeError as e:
        status = "Timeout"

    except Exception as e:
        status = "Exception: " + str(e)

    return {
        "recepie_i": recepie1,
        "recepie_j": recepie2,
        "ged": ged,
        "time": time.time() - start_time,
        "status": status
    }
Exemplo n.º 2
0
def main(distribution, count):
    """dnolul"""
    past_days, past_values, fieldnames = datasets.btc_csv_data()

    model = models.linear_regression(past_days, past_values)

    future_days = [[x * distribution + past_days[-1][0]]
                   for x in xrange(count)]
    future_values = model.predict(future_days)

    total_days = past_days + future_days
    total_values = past_values + future_values

    utils.make_graph(total_days, total_values, fieldnames[0], fieldnames[1])
Exemplo n.º 3
0
def goto(start: int, end: int) -> Dict[str, Any]:
    sess = Session(bind=engine)
    graph = make_graph(sess)
    try:
        paths = bf_paths(graph, start)
        path = paths[(start, end)]
    except KeyError as e:
        try:
            paths = df_paths(graph, start)
            path = paths[(start, end)]
        except KeyError as e:
            print("search for path failed. ")
            return "search for path failed"
           
    print(f"it will take roughly under {9 * len(path) / 60} minutes to make {len(path)} moves")
    for direction in path:
        moved_to = move(direction)
        curr_room_id = moved_to['room_id']
        exits = moved_to['exits']
        curr_room = sess.query(Room).filter(Room.room_id==curr_room_id).all()[0]
        directions = dict()
        for exit in exits:
            directions[exit] = eval(f'curr_room.{exit}_to')

        for direction, room_id in directions.items():
            if not room_id:
                goto_then_move_dir.append((curr_room_id, direction))
                print('appending a tuple! ', end='')

        print(f"backtracking... {moved_to['room_id']}: {moved_to['title']}")
    return moved_to
Exemplo n.º 4
0
    def select_isomer(self, mother, scaffold, latent_vector):
        """\
        Return an isomer-selection vector and the answer one-hot.

        Returns
        -------
        retval: isomer-selection latent vector of shape (len(isomers),)
        target: answer one-hot of shape (len(isomers),)
            where `isomers` are the isomers of `mother`.
        """
        #sample possible isomer
        m_mother = Chem.MolFromSmiles(mother)
        isomer_candidates = utils.enumerate_molecule(
            mother)  # list of isomer SMILESs
        isomers = []
        for s in isomer_candidates:
            m = Chem.MolFromSmiles(s)
            if m.HasSubstructMatch(Chem.MolFromSmiles(scaffold),
                                   useChirality=True):
                isomers.append(s)
        graph_vectors = []

        #make graph for each isomer
        for s in isomers:
            g, h = utils.make_graph(s,
                                    extra_atom_feature=True,
                                    extra_bond_feature=True)
            self.embede_graph(g, h)
            for k in range(len(self.prop_select_isomer_U)):
                self.mpnn(g, h, self.prop_select_isomer_U[k],
                          self.prop_select_isomer_C[k], latent_vector)
            graph_vectors.append(utils.average_node_state(h))
        graph_vectors = torch.cat(graph_vectors, 0)
        # -> (len(isomers), dim_of_node_vector)
        latent_vectors = latent_vector.repeat(len(isomers), 1)
        # -> (len(isomers), dim_of_node_vector + N_conditions)
        retval = torch.cat([graph_vectors, latent_vectors], -1)
        # -> (len(isomers), 2*dim_of_node_vector + N_conditions)

        #FC layer
        retval = F.relu(self.select_isomer1(retval))
        retval = F.relu(self.select_isomer2(retval))
        retval = self.select_isomer3(retval)
        retval = retval.view(-1)  # (len(isomers),)
        retval = torch.sigmoid(retval)
        target = []

        #check which isomer is same as mother
        for s in isomers:
            if m_mother.HasSubstructMatch(Chem.MolFromSmiles(s),
                                          useChirality=True):
                target.append(1)
            else:
                target.append(0)
        target = utils.create_var(torch.Tensor(target))  # (len(isomers),)

        return retval, target, isomers
Exemplo n.º 5
0
def callback_inline(call):
    date_str, today = call.data.split(' ')
    # proxies = settings.proxy
    data_tb = utils.get_data_tb(today)  #, proxies=proxies)
    plt = utils.make_graph(data_tb, date_str, save=False)
    plot_buffer = BytesIO()
    with plot_buffer as plot:
        plt.savefig(plot, format='png')
        plot.seek(0)
        bot.send_photo(call.message.chat.id, plot.getvalue())
Exemplo n.º 6
0
    r = precision_recall_fscore_support(dev_x, pred_x, average='micro')[1]
    f = precision_recall_fscore_support(dev_x, pred_x, average='micro')[2]
    total_precision.append(p)
    total_recall.append(r)
    total_f1.append(f)
    print("intermediate metrics: ", p, r, f)

    X = []
    y = []
    X_pred = []
    y_pred = []
    graph = defaultdict(dict)
    graph2 = defaultdict(dict)

    data_val = val
    make_graph(data_val, val_y, X, y, pred_x, X_pred, y_pred)
    characters = entities_2_graph(X, y)
    characters2 = entities_2_graph(X, y_pred)

    graph = prepare_eval(characters, y, graph, param='gold')
    graph2 = prepare_eval(characters2, y_pred, graph2, param='pred')

    if setting != "2class":
        eval(graph, graph2, graph_total_precision, graph_total_recall,
             graph_total_f1)
    else:
        eval_2class(graph, graph2, graph_total_precision, graph_total_recall,
                    graph_total_f1)

print(setting, 'indicators:', indicators, 'window_size:', window_size)
print(np.mean(total_precision), np.mean(total_recall), np.mean(total_f1))
Exemplo n.º 7
0
import sys

sys.path.append('midterm_presentation/scripts')

from utils import make_graph

make_graph(with_red_circle=True)
Exemplo n.º 8
0
    def __init__(self, pathToPlugin, pathToFolder, parameterString, osSystem):

        self.pathToFolder = pathToFolder
        self.parameterString = parameterString
        self.osSystem = int(osSystem)
        self.originalData = []
        self.randomData = []
        if self.osSystem == 1:
            self.pathToPlugin = '\\'.join(pathToPlugin.split('\\')[:-1])
        else:
            self.pathToPlugin = '/'.join(pathToPlugin.split('/')[:-1])

        # set parameters
        self.randn, self.sigma, self.block, self.small, self.factr = self.parameterString.split(
            ",")

        self.randn = int(self.randn)
        self.sigma = float(self.sigma)
        self.block = float(self.block)
        self.small = float(self.small)
        self.factr = float(self.factr)
        self.get_parameters()

        # go to selected folder and search filtered image
        os.chdir(self.pathToFolder)
        self.filterImg = glob.glob('*_filter.tif')

        if len(self.filterImg) == 0:
            print(
                "ERROR: No pre-processed image ('*_filter.tif') was found. Select pre-processing to create the pre-processed image."
            )
        else:
            self.imgRaw = skimage.io.imread(self.filterImg[0],
                                            plugin='tifffile')
            if len(self.imgRaw.shape) > 2:
                if self.imgRaw.shape[2] in (3, 4):
                    self.imgRaw = np.swapaxes(self.imgRaw, -1, -3)
                    self.imgRaw = np.swapaxes(self.imgRaw, -1, -2)
                self.slices = len(self.imgRaw)
            else:
                self.slices = 1

            # find and open image mask
            self.maskImg = glob.glob('*_mask.tif')
            self.mask = skimage.io.imread(self.maskImg[0],
                                          plugin='tifffile') > 0

            print('Start extraction of image', self.filterImg[0])
            for i in range(self.slices):
                print('\n', i + 1, 'of', self.slices)
                if self.slices == 1:
                    self.imgSlice = self.imgRaw.copy()
                else:
                    self.imgSlice = self.imgRaw[i]
                self.imgGaussian = skimage.filters.gaussian(
                    self.imgSlice, self.sigma)
                self.imgTube, self.imgSkeleton = utils.skeletonize_graph(
                    self.imgGaussian, self.mask, self.sigma, self.block,
                    self.small, self.factr)
                if np.sum(self.imgSkeleton) == 0:
                    print(
                        "ERROR: No skeleton was extracted from the selected image. Check the parameters and try again."
                    )
                else:
                    self.imgNodes = utils.node_graph(self.imgSkeleton > 0,
                                                     self.imgGaussian)

                    self.originalGraph, self.originalPosition = utils.make_graph(
                        self.imgNodes, self.imgGaussian)
                    self.originalNormalizedGraph, self.originalProperties, self.unifiedGraph = self.processGraph(
                        self.originalGraph, self.originalPosition,
                        self.imgGaussian, self.mask)
                    self.originalData.append([
                        i, self.originalNormalizedGraph, self.originalPosition,
                        self.originalProperties
                    ])

                    for r in range(self.randn):
                        self.randomGraph, self.randomPosition = utils.randomize_graph(
                            self.unifiedGraph, self.originalPosition,
                            self.mask)
                        self.randomNormalizedGraph, self.randomProperties, _ = self.processGraph(
                            self.randomGraph, self.randomPosition,
                            self.imgGaussian, self.mask)
                        self.randomData.append([
                            i, self.randomNormalizedGraph, self.randomPosition,
                            self.randomProperties
                        ])

                    if i == 0:
                        print('Export plot.')
                        self.plotSkeleton(self.originalData, self.randomData)

            if np.sum(self.imgSkeleton) != 0:
                print('\nExport data.')
                self.saveData(self.originalData, self.randomData)
Exemplo n.º 9
0
import sys

sys.path.append('midterm_presentation/scripts')

from utils import make_graph

make_graph(with_red_circle=False)
Exemplo n.º 10
0
imO=skimage.io.imread(path+aa,plugin='tifffile')[0]                             # open first frame of actin image
imT=skimage.io.imread(path+gg,plugin='tifffile')[0]                             # open Golgi image
mask=skimage.io.imread(path+'mask.tif',plugin='tifffile')>0                     # open mask

track=utils.xmlread(path+'track.xml')                                           # read Golgi tracking results
T=len(track)                                                                    # get number of tracks

print('extract','segment')  
imI=utils.im2d3d(imO)                                                           # if 2D image convert to 3D
imG=skimage.filters.gaussian(imI,sigma)                                         # apply Gaussian filter                                    
imR,imA=utils.skeletonize_graph(imG,mask,sigma,block,small,factr)               # filter and skeletonize actin image
imE=utils.node_graph(imA>0,imG)                                                 # detect filaments and network nodes
     
print('extract','graph')     
gBo,pos=utils.make_graph(imE,imG)                                               # construct graph from filament and node image
gBu=utils.unify_graph(gBo)                                                      # project multigraph to simple graph
gBc=utils.connect_graph(gBu,pos,imG)                                            # connect disconnected components of graph
gBx=utils.centralize_graph(gBc)                                                 # compute edge centrality measures
gBn=utils.normalize_graph(gBx)                                                  # normalize total edge capacity to one    
         
#%%############################################################################# plot data

print('export','plot')  

aspect=2.0                                                                      # set aspect ratio                
alpha=1.0                                                                       # set transparency
lw=1.5                                                                          # set line width        

plt.clf()
gs=mpl.gridspec.GridSpec(1,2,width_ratios=[1,1],height_ratios=[1],left=0.01,bottom=0.01,right=0.99,top=0.99,wspace=0.1,hspace=0.1)                                                       
Exemplo n.º 11
0
    def sample(self,
               s1=None,
               s2=None,
               latent_vector=None,
               condition1=None,
               condition2=None,
               stochastic=False):
        """\
        Parameters
        ----------
        s1: whole SMILES str
            If given, its graph becomes a latent vector to be decoded.
        s2: scaffold SMILES str
            Must be given other than None.
        latent_vector: None | torch.autograd.Variable
            A latent vector to be decoded.
            Not used if `s1` is given.
            If both `latent_vector` and `s1` are None,
            a latent vector is sampled from the standard normal.
        condition1: list[float] | None
            [ target_value1, target_value2, ... ]
            If None, target values are sampled from uniform [0, 1].
            Can be an empty list for unconditional sampling.
        condition2: list[float] | None
            [ scaffold_value1, scaffold_value2, ... ]
            If None, scaffold values are sampled from uniform [0, 1].
            Can be an empty list for unconditional sampling.
        stochastic: bool
            See `utils.probability_to_one_hot`.

        Returns
        -------
        scaffold_g_save: OrderedDict[int, list[tuple[torch.autograd.Variable, int]]]
            A new dict of edge one-hot vectors and partner-node indices
            generated from the given scaffold `s2`.
        scaffold_h_save: OrderedDict[int, torch.autograd.Variable]
            A new dict of node one-hot vectors generated from the given scaffold `s2`.
        """
        max_add_nodes = 100
        max_add_edges = 5

        if s2 is None:
            print('when you sample, you must give scaffold')
            return None

        # Embede the scaffold edge/node vectors.
        # If `s1` is given, convert its graph to a latent vector.
        if s1 is not None:
            g_save, h_save, scaffold_g_save, scaffold_h_save = utils.make_graphs(
                s1, s2)
            if g_save is None and h_save is None:
                return None
            g, h, scaffold_g, scaffold_h = utils.make_graphs(
                s1, s2, extra_atom_feature=True, extra_bond_feature=True)

            self.embede_graph(g, h)
            self.embede_graph(scaffold_g, scaffold_h)

            self.encode(g, h)
            encoded_vector = self.cal_encoded_vector(h)
            latent_vector, mu, logvar = self.reparameterize(encoded_vector)
            # `mu` and `logvar` are not used further.

        # If `s1` is None, sample a latent vector from the standard normal.
        elif s1 is None:
            scaffold_g_save, scaffold_h_save = utils.make_graph(s2)
            if scaffold_g_save is None and scaffold_h_save is None:
                return None
            scaffold_g, scaffold_h = utils.make_graph(s2,
                                                      extra_atom_feature=True,
                                                      extra_bond_feature=True)

            self.embede_graph(scaffold_g, scaffold_h)
            if latent_vector is None:  # Sampling
                latent_vector = utils.create_var(
                    torch.randn(1, self.dim_of_node_vector))

        # Sample condition values if not given.
        if condition1 is None or condition2 is None:
            assert not self.N_conditions % 2
            condition1 = np.random.rand(self.N_conditions // 2)
            condition2 = np.random.rand(self.N_conditions // 2)

        # A condition torch.FloatTensor of shape (1, N_conditions):
        condition = utils.create_var(torch.Tensor(condition1 + condition2))
        if condition.shape:
            condition = condition.unsqueeze(0)
            latent_vector = torch.cat([latent_vector, condition], -1)
        # -> (1, dim_of_node_vector + N_conditions)
        self.init_scaffold_state(scaffold_g, scaffold_h, condition)

        for null_index1 in range(max_add_nodes):
            new_node = self.add_node(scaffold_g, scaffold_h,
                                     latent_vector)  # (1, N_atom_features)
            new_node = utils.probability_to_one_hot(new_node, stochastic)

            # Recall our definition of the termination vector:
            if np.argmax(new_node.data.cpu().numpy().ravel()
                         ) == N_atom_features - 1:
                break

            idx = len(scaffold_h)
            scaffold_h_save[idx] = new_node
            scaffold_h[idx] = self.init_node_state(scaffold_h, new_node)

            for null_index2 in range(max_add_edges):
                new_edge = self.add_edge(scaffold_g, scaffold_h,
                                         latent_vector)  # (1, N_bond_features)
                new_edge = utils.probability_to_one_hot(new_edge, stochastic)

                # Recall our definition of the termination vector:
                if np.argmax(new_edge.data.cpu().numpy().ravel()
                             ) == N_bond_features - 1:
                    break

                selected_node = self.select_node(scaffold_g, scaffold_h,
                                                 latent_vector).view(1, -1)
                # -> (1, len(scaffold_h)-1)
                # Index of the selected node (int)
                selected_node = list(scaffold_h.keys())[np.argmax(
                    utils.probability_to_one_hot(
                        selected_node, stochastic).data.cpu().numpy().ravel())]
                if idx not in scaffold_g_save:
                    scaffold_g_save[idx] = []
                    scaffold_g[idx] = []
                scaffold_g_save[idx].append((new_edge, selected_node))
                scaffold_g[idx].append(
                    (self.init_edge_state(scaffold_h,
                                          new_edge), selected_node))

                # Add the same edge in the opposite direction.
                if selected_node not in scaffold_g_save:
                    scaffold_g_save[selected_node] = []
                    scaffold_g[selected_node] = []
                scaffold_g_save[selected_node].append((new_edge, idx))
                scaffold_g[selected_node].append(
                    (self.init_edge_state(scaffold_h, new_edge), idx))

        try:
            new_smiles = utils.graph_to_smiles(scaffold_g_save,
                                               scaffold_h_save)
            new_smiles = Chem.MolToSmiles(Chem.MolFromSmiles(new_smiles),
                                          isomericSmiles=False)
        except:
            return None
        selected_isomer, target, isomers = self.select_isomer(
            new_smiles, s2, latent_vector)
        selected_isomer = np.argmax(
            utils.probability_to_one_hot(selected_isomer,
                                         stochastic).data.cpu().numpy())

        return isomers[selected_isomer]
        """ 
Exemplo n.º 12
0
            print('Acc: {:.6f}'.format(WA))

            logging.info(
                "epoch: {}, train_loss: {:.6f}, valid_loss: {:.6f}, Valid Acc: {:.6f}"
                .format(epoch, train_loss, valid_loss, WA))

        if valid_loss < loss_min:
            torch.save(net.state_dict(), MODEL_PATH + NAME + '.net')
            loss_min = valid_loss
            best_epoch = epoch

    # train fin
    print("\nbest_epoch : {}".format(best_epoch))
    logging.info("\nbest epoch : {}".format(best_epoch))

    print("\nTest")
    logging.info("\nTest")

    net = model.lld_blstm_attn(hidden_dim=lstm_hidden).to(device)
    net.load_state_dict(torch.load(MODEL_PATH + NAME + '.net'))

    mat_test, test_loss = test(net, criterion, test_loader)
    WA, _ = utils.evaluate(mat_test, 4)
    print(np.array(mat_test))
    print(WA)

    logging.info(np.array(mat_test))
    logging.info(WA)

    utils.make_graph(tls, vls, vac, GRAPH)