Esempio n. 1
0
def crop_region(I, label, bg=0.5):

    wh = np.array(I.shape[1::-1])

    ch = I.shape[2] if len(I.shape) == 3 else 1
    tl = np.floor(label.tl() * wh).astype(int)
    br = np.ceil(label.br() * wh).astype(int)
    outwh = br - tl

    if np.prod(outwh) == 0.:
        return None

    outsize = (outwh[1], outwh[0], ch) if ch > 1 else (outwh[1], outwh[0])
    if (np.array(outsize) < 0).any():
        pause()
    Iout = np.zeros(outsize, dtype=I.dtype) + bg

    offset = np.minimum(tl, 0) * (-1)
    tl = np.maximum(tl, 0)
    br = np.minimum(br, wh)
    wh = br - tl

    Iout[offset[1]:(offset[1] + wh[1]),
         offset[0]:(offset[0] + wh[0])] = I[tl[1]:br[1], tl[0]:br[0]]

    return Iout
Esempio n. 2
0
    def __call__(self, outputs, targets):
        pause()
        loss = (1 - self.IoU(outputs, targets)) * self.nll_loss(
            outputs.argmax(dim=1), targets)

        #if self.jaccard_weight :
        #    loss += self.jaccard_weight * (1 - soft_jaccard(outputs, targets))

        return loss
    def evaluate_pano(self, checkpoint_path):
        print('Evaluating model....')

        # Load the checkpoint to evaluate
        self.load_checkpoint(checkpoint_path, True, True)

        # Put the model in eval mode
        self.network = self.network.eval()

        # print(self.network.module.input0_0.conv.bias)
        # print(self.network.module.input0_0.conv.bias.shape)
        # exit()

        # Reset meter
        self.reset_eval_metrics()

        # Load data
        s = time.time()
        with torch.no_grad():
            rgb = io.imread(
                '/home/paulo/datasets/lab/lab_original/SAM_100_0130.jpg'
            ).astype(np.float32) / 255.
            rgb = cv2.resize(rgb, (256, 512))
            rgb = torch.from_numpy(rgb.transpose(2, 0, 1)).float()
            rgb.to(self.device)
            inputs = [rgb]
            #inputs, gt, other = self.parse_data(data)

            # Run a forward pass

            output = self.forward_pass(inputs)
            pause()
            # Compute the evaluation metrics
            self.compute_eval_metrics(output, gt)

            # If trying to save intermediate outputs
            if self.validation_sample_freq >= 0:
                # Save the intermediate outputs
                if i % self.validation_sample_freq == 0:
                    self.save_samples(inputs, gt, other, output)

        # Print a report on the validation results
        print('Evaluation finished in {} seconds'.format(time.time() - s))
        self.print_validation_report()
Esempio n. 4
0
def softmin_routing(G, D, gamma=2, hard_cap=False, verbose=False):
    '''
    Return a routing policy given a directed graph with weighted edges and a
    demand matrix.
    input parameters:
        G is a networkx graph with nodes and edges. Edges must have both a
        'capacity' attribute and a 'weight' attribute. Edge capacity denotes the
        maximum possible traffic utilization for an edge. It can be set as a
        hard or soft optimization constraint through the 'hard_cap' parameter.
        The edge 'weight' attribute is used for determining shortest paths.
        Edges may additionally have a 'cost' attribute used for weighting the
        maximum link utilization.

        D is a |V| x |V| demand matrix, represented as a 2D numpy array. |V|
        here denotes the number of vertices in the graph G.

        gamma is floating point number used as a parameter for the softmin
        function (exponential scaling). The larger the value for gamma, the
        closer the method is to shortest path routing.

        hard_cap is a boolean flag which determines whether edge capacities are
        treated as hard or soft optimization constraints.

        verbose is a boolean flag enabling/disabling optimizer printing.

    return values:
        f_sol is a routing policy, represented as a numpy array of size
        |V| x |V| x |E| such that f_sol[s, t, i, j] yields the amount of traffic
        from source s to destination t that goes through edge (i, j).

        l_sol is numpy array of size |E| such that l[i, j] represents the total
        amount of traffic that flows through edge (i, j) under the given flow.

        g_sol is a numpy array of size |V| x |V| such that g(i, j) is the total
        amount of traffic destined for node j that ever arrives at node i, which
        includes the inital demand from i to j.

         m_cong is the maximal congestion for any link weighted by cost.
        ie max_{(i, j) in E} cost[i, j] * l[i, j] / cap[i, j].
    '''
    nV = G.number_of_nodes()
    nE = G.number_of_edges()
    np.fill_diagonal(D, 0)

    sps = get_shortest_paths(G)

    m = gb.Model('netflow')

    verboseprint = print

    if not verbose:
        verboseprint = lambda *a: None
        m.setParam('OutputFlag', False)
        m.setParam('LogToConsole', False)

    V = np.array([i for i in G.nodes()])

    cost = {}
    for k, e in enumerate(G.edges()):
        if 'cost' in G[e[0]][e[1]]:
            cost[e] = G[e[0]][e[1]]['cost']
        else:
            # If costs aren't specified, make uniform.
            cost[e] = 1.0

    cap = {}
    for k, e in enumerate(G.edges()):
        cap[e] = G[e[0]][e[1]]['capacity']

    arcs, capacity = gb.multidict(cap)
    #pause()
    # Create variables.
    f = m.addVars(V, V, arcs, lb=0.0, name='flow')
    g = m.addVars(V, V, lb=0.0, name='traf_at_node')
    l = m.addVars(arcs, lb=0.0, name='tot_traf_across_link')

    # Link utilization is sum of flows.
    m.addConstrs(
        (l[i, j] == f.sum('*', '*', i, j) for i, j in arcs),
        'l_sum_traf',
    )

    # Arc capacity constraints
    if hard_cap:
        verboseprint('Capacity constraints set as hard constraints.')
        m.addConstrs(
            (l[i, j] <= capacity[i, j] for i, j in arcs),
            'traf_below_cap',
        )

    # Total commodity at node is sum of incoming commodities times split
    # ratios plus the source demand.
    for s, t in cartesian_product(V, V):
        qs = gb.quicksum(g[u, t] * split_ratio(G, u, v, t, gamma, sps)
                         for (u, v) in G.in_edges(s))
        #pause()
        m.addConstr(g[s, t] == qs + D[s, t], 'split_ratio_{}_{}'.format(s, t))

    # Total commodity is sum of incoming flows plus outgoing source.
    for s, t in cartesian_product(V, V):
        m.addConstr(g[s, t] == (f.sum('*', t, '*', s) + D[s, t]))

    # Flow conservation constraints.
    for s, t, u in cartesian_product(V, V, V):
        d = D[int(s), int(t)]
        if u == s:
            m.addConstr(
                f.sum(s, t, u, '*') - f.sum(s, t, '*', u) == d, 'conserv')
        elif u == t:
            m.addConstr(
                f.sum(s, t, u, '*') - f.sum(s, t, '*', u) == -d, 'conserv')
        else:
            m.addConstr(
                f.sum(s, t, u, '*') - f.sum(s, t, '*', u) == 0, 'conserv')

    # Compute max-link utilization (congestion).
    max_cong = m.addVar(name='congestion')
    m.addConstrs(
        ((cost[i, j] * l[i, j]) / capacity[i, j] <= max_cong for i, j in arcs))

    # Compute optimal solution.
    m.optimize()
    # Print solution.
    if m.status == gb.GRB.Status.OPTIMAL:
        l_sol = m.getAttr('x', l)
        g_sol = m.getAttr('x', g)
        f_sol = m.getAttr('x', f)
        m_cong = float(max_cong.x)

        verboseprint('\nOptimal traffic flows.')
        verboseprint('f_{i -> j}(s, t) denotes amount of traffic from source'
                     ' s to destination t that goes through link (i, j) in E.')

        for s, t in cartesian_product(V, V):
            for i, j in arcs:
                p = f_sol[s, t, i, j]
                if p > 0:
                    verboseprint('f_{%s -> %s}(%s, %s): %g bytes.' %
                                 (i, j, s, t, p))

        verboseprint('\nTotal traffic at node.')
        verboseprint('g(i, j) denotes the total amount of traffic destined for'
                     ' node j that passes through node i.')

        for s, t in cartesian_product(V, V):
            p = g_sol[s, t]
            if p > 0:
                verboseprint('g({}, {}): {} bytes.'.format(s, t, p))

        verboseprint('\nTotal traffic through link.')
        verboseprint('l(i, j) denotes the total amount of traffic that passes'
                     ' through edge (i, j).')

        for i, j in arcs:
            p = l_sol[i, j]
            if p > 0:
                verboseprint('l({}, {}): {} bytes.'.format(i, j, p))

        verboseprint('\nMaximum weighted link utilization (or congestion):',
                     format(m_cong, '.4f'))

    else:
        print(D, m.status)
        np.savetxt("demand.txt", D)
        w = np.zeros(nE)
        cap = np.zeros(nE)
        cost = np.zeros(nE)
        e0 = np.zeros(nE)
        e1 = np.zeros(nE)
        for k, e in enumerate(G.edges()):
            w[k] = G[e[0]][e[1]]['weight']
            cap[k] = G[e[0]][e[1]]['capacity']
            cost[k] = G[e[0]][e[1]]['cost']
            e0[k] = e[0]
            e1[k] = e[1]
            print(e, G[e[0]][e[1]]['cost'], G[e[0]][e[1]]['capacity'],
                  G[e[0]][e[1]]['weight'])

        np.savetxt("w.txt", w)
        np.savetxt("capacity.txt", cap)
        np.savetxt("cost.txt", cost)
        np.savetxt("e0.txt", e0)
        np.savetxt("e1.txt", e1)
        pause()
        verboseprint('\nERROR: Flow Optimization Failed!', file=sys.stderr)
        return None, None, None, None

    return f_sol, l_sol, g_sol, m_cong
Esempio n. 5
0
    G[7][4]['weight'] = 3.26726909324
    G[7][5]['weight'] = 4.30541960772
    G[7][6]['weight'] = 3.9939494995
    G[8][4]['weight'] = 0.506047611233
    G[9][8]['weight'] = 4.38215269704
    G[9][1]['weight'] = 4.05885254022
    G[9][10]['weight'] = 4.2463743546
    G[9][11]['weight'] = 2.82527709364
    G[9][5]['weight'] = 0.2
    G[10][4]['weight'] = 4.83718017961
    G[11][0]['weight'] = 3.97261233893
    G[11][8]['weight'] = 4.2321296212
    G[11][7]['weight'] = 4.30096320225

    #for k, e in enumerate(G.edges()):
    #    G[e[0]][e[1]]['weight']=a[k]
    #print(D, D.shape)
    w = torch.zeros(32, dtype=torch.float64, device=torch.device('cuda'))
    for i, e in enumerate(G.edges()):
        if i != bk_edge.index((e[0], e[1])):
            print(i, e)
        w[bk_edge.index((e[0], e[1]))] = G[e[0]][e[1]]['weight']
    test_soft(G, w, D)
    m_cong_t = softmin_routing_torch(G, w, D, GAMMA, verbose=True)
    #_,_,_,m_cong = softmin_routing(G, D, GAMMA, verbose=True)

    pause()
    print(D)
    for k, e in enumerate(G.edges()):
        print(e, G[e[0]][e[1]]['capacity'], G[e[0]][e[1]]['weight'])