Beispiel #1
0
def solver(graph, demand, g=None, od=None, max_iter=100, eps=1e-8, q=None, \
    display=0, past=None, stop=1e-8):

    if g is None: g = construct_igraph(graph)
    if od is None: od = construct_od(demand)
    f = np.zeros(graph.shape[0],
                 dtype="float64")  # initial flow assignment is null
    K = total_free_flow_cost(g, od)
    if K < eps:
        K = np.sum(demand[:, 2])
    elif display >= 1:
        print 'average free-flow travel time', K / np.sum(demand[:, 2])

    for i in range(max_iter):
        if display >= 1:
            if i <= 1:
                print 'iteration: {}'.format(i + 1)
            else:
                print 'iteration: {}, error: {}'.format(i + 1, error)
        # construct weighted graph with latest flow assignment
        L, grad = search_direction(f, graph, g, od)
        if i >= 1:
            error = grad.dot(f - L) / K
            if error < stop: return f
        f = f + 2. * (L - f) / (i + 2.)
    return f
def solver_2(graph, demand, g=None, od=None, max_iter=100, eps=1e-8, q=10, \
    display=0, past=None, stop=1e-8):
    
    if g is None: g = construct_igraph(graph)
    if od is None: od = construct_od(demand)
    f = np.zeros(graph.shape[0],dtype="float64") # initial flow assignment is null
    ls = max_iter/q # frequency of line search
    K = total_free_flow_cost(g, od)
    if K < eps:
        K = np.sum(demand[:,2])
    elif display >= 1:
        print 'average free-flow travel time', K / np.sum(demand[:,2])

    for i in range(max_iter):
        if display >= 1:
            if i <= 1:
                print 'iteration: {}'.format(i+1)
            else:
                print 'iteration: {}, error: {}'.format(i+1, error)
        # construct weighted graph with latest flow assignment
        L, grad = search_direction(f, graph, g, od)
        if i >= 1:
            # w = f - L
            # norm_w = np.linalg.norm(w,1)
            # if norm_w < eps: return f
            error = grad.dot(f - L) / K
            if error < stop: return f
        # s = line_search(lambda a: potential(graph, (1.-a)*f+a*L)) if i>max_iter-q \
        #     else 2./(i+2.)
        s = line_search(lambda a: potential(graph, (1.-a)*f+a*L)) if i%ls==(ls-1) \
            else 2./(i+2.)
        if s < eps: return f
        f = (1.-s) * f + s * L
    return f
def OD_non_routed_costs(alphas, net, net2, demand, inputs, output, verbose=0):
    '''
    from input files of equilibrium flows for the heterogeneous game
    outputs the travel times of non-routed users
    net  : network with travel time cost functions
    net2 : network with perseived cost functions
    '''
    od = construct_od(demand)
    num_ods = demand.shape[0]
    out = np.zeros((num_ods, len(alphas)+2))
    out[:,:2] = demand[:,:2]
    g = construct_igraph(net)
    for i, alpha in enumerate(alphas):
        fs = np.loadtxt(inputs.format(int(alpha*100)), delimiter=',', skiprows=1)
        c = cost(np.sum(fs, axis=1), net2) # vector of non-routed edge costs
        g.es["weight"] = c.tolist() 
        tt = cost(np.sum(fs, axis=1), net) # vector of edge travel times
        # import pdb; pdb.set_trace()
        if verbose >= 1: 
            print 'computing OD costs for alpha = {} ...'.format(alpha)
        costs = path_cost_non_routed(net2, c, tt, demand, g, od)
        for j in range(num_ods):
            out[j,i+2] = costs[(int(out[j,0]), int(out[j,1]))]
    header = ['o,d']
    for alpha in alphas: 
        header.append(str(int(alpha*100)))
    np.savetxt(output, out, delimiter=',', header=','.join(header), comments='')
def solver(graph, demand, g=None, od=None, max_iter=100, eps=1e-8, q=None, \
    display=0, past=None, stop=1e-8):

    if g is None: g = construct_igraph(graph)
    if od is None: od = construct_od(demand)
    f = np.zeros(graph.shape[0],dtype="float64") # initial flow assignment is null
    K = total_free_flow_cost(g, od)
    if K < eps:
        K = np.sum(demand[:,2])
    elif display >= 1:
        print 'average free-flow travel time', K / np.sum(demand[:,2])

    for i in range(max_iter):
        if display >= 1:
            if i <= 1:
                print 'iteration: {}'.format(i+1)
            else:
                print 'iteration: {}, error: {}'.format(i+1, error)
        # construct weighted graph with latest flow assignment
        L, grad = search_direction(f, graph, g, od)
        if i >= 1:
            error = grad.dot(f - L) / K
            if error < stop: return f
        f = f + 2.*(L-f)/(i+2.)
    return f
def all_or_nothing_assignment(cost, net, demand):
    # given vector of edge costs, graph, and demand, computes the AoN
    # assignment
    g = construct_igraph(net)
    od = construct_od(demand)
    g.es["weight"] = cost.tolist()
    return all_or_nothing(g, od)
def OD_routed_costs(alphas, net, demand, inputs, output, verbose=0):
    '''
    from input files of equilibrium flows for the heterogeneous game
    outputs the travel times of routes users
    '''
    od = construct_od(
        demand)  # od is a dict {origin: ([destination],[demand])}
    num_ods = demand.shape[0]
    out = np.zeros((num_ods, len(alphas) + 2))
    out[:, :2] = demand[:, :2]
    g = construct_igraph(net)  # construct igraph object
    for i, alpha in enumerate(alphas):
        fs = np.loadtxt(inputs.format(int(alpha * 100)),
                        delimiter=',',
                        skiprows=1)
        c = cost(np.sum(fs, axis=1), net)
        g.es["weight"] = c.tolist()
        # get shortest path and returns {(o, d): path_cost}
        if verbose >= 1:
            print 'computing OD costs for alpha = {} ...'.format(alpha)
        costs = path_cost(net, c, demand, g, od)
        for j in range(num_ods):
            out[j, i + 2] = costs[(int(out[j, 0]), int(out[j, 1]))]
    header = ['o,d']
    for alpha in alphas:
        header.append(str(int(alpha * 100)))
    np.savetxt(output,
               out,
               delimiter=',',
               header=','.join(header),
               comments='')
def path_cost_non_routed(net, nr_cost, tt, d, g=None, od=None):
    '''
    given
    net:     graph
    nr_cost: vector of edge costs
    tt:      vector of free-flow travel times
    d:       demand
    returns travel time cost for non-routed users
    for all ODs in d (or od) in the format
    {(o, d): path_cost}
    '''
    if g is None:
        g = construct_igraph(net)
        g.es["weight"] = nr_cost.tolist()
    else:
        g.es["weight"] = nr_cost.tolist()
    if od is None:
        od = construct_od(d)
    out = {}
    for o in od.keys():
        p = g.get_shortest_paths(o,
                                 to=od[o][0],
                                 weights="weight",
                                 output="epath")
        for i, d in enumerate(od[o][0]):
            out[(o, d)] = np.sum(tt[p[i]])
    return out
Beispiel #8
0
def solver_2(graph, demand, g=None, od=None, max_iter=100, eps=1e-8, q=10, \
    display=0, past=None, stop=1e-8):

    if g is None: g = construct_igraph(graph)
    if od is None: od = construct_od(demand)
    f = np.zeros(graph.shape[0],
                 dtype="float64")  # initial flow assignment is null
    ls = max_iter / q  # frequency of line search
    K = total_free_flow_cost(g, od)
    if K < eps:
        K = np.sum(demand[:, 2])
    elif display >= 1:
        print 'average free-flow travel time', K / np.sum(demand[:, 2])

    for i in range(max_iter):
        if display >= 1:
            if i <= 1:
                print 'iteration: {}'.format(i + 1)
            else:
                print 'iteration: {}, error: {}'.format(i + 1, error)
        # construct weighted graph with latest flow assignment
        L, grad = search_direction(f, graph, g, od)
        if i >= 1:
            # w = f - L
            # norm_w = np.linalg.norm(w,1)
            # if norm_w < eps: return f
            error = grad.dot(f - L) / K
            if error < stop: return f
        # s = line_search(lambda a: potential(graph, (1.-a)*f+a*L)) if i>max_iter-q \
        #     else 2./(i+2.)
        s = line_search(lambda a: potential(graph, (1.-a)*f+a*L)) if i%ls==(ls-1) \
            else 2./(i+2.)
        if s < eps: return f
        f = (1. - s) * f + s * L
    return f
def check__LA_connectivity():
    graph, demand, node = load_LA()
    print np.min(graph[:,1:3])
    print np.max(graph[:,1:3])
    print np.min(demand[:,:2])
    print np.max(demand[:,:2])
    od = construct_od(demand)
    g = construct_igraph(graph)
    f = np.zeros((graph.shape[0],))
    print average_cost_all_or_nothing(f, graph, demand)
def check__LA_connectivity():
    graph, demand, node = load_LA()
    print np.min(graph[:, 1:3])
    print np.max(graph[:, 1:3])
    print np.min(demand[:, :2])
    print np.max(demand[:, :2])
    od = construct_od(demand)
    g = construct_igraph(graph)
    f = np.zeros((graph.shape[0], ))
    print average_cost_all_or_nothing(f, graph, demand)
def gauss_seidel(graphs,
                 demands,
                 solver,
                 max_cycles=10,
                 max_iter=100,
                 by_origin=False,
                 q=10,
                 display=0,
                 past=10,
                 stop=1e-8,
                 eps=1e-8,
                 stop_cycle=None):
    # we are given a list of graphs and demands that are specific
    # for different types of players
    # the gauss-seidel scheme updates cyclically for each type at a time
    if stop_cycle is None:
        stop_cycle = stop
    g = construct_igraph(graphs[0])
    ods = [construct_od(d) for d in demands]
    types = len(graphs)
    fs = np.zeros((graphs[0].shape[0], types), dtype="float64")
    g2 = np.copy(graphs[0])
    K = total_ff_costs_heterogeneous(graphs, g, ods)
    if K < eps:
        K = np.sum([np.sum(d[:, 2]) for d in demands])
    elif display >= 1:
        print 'average free-flow travel time', \
            K / np.sum([np.sum(d[:, 2]) for d in demands])

    for cycle in range(max_cycles):
        if display >= 1:
            print 'cycle:', cycle
        for i in range(types):
            # construct graph with updated latencies
            shift = np.sum(fs[:, range(i) + range(i + 1, types)], axis=1)
            shift_graph(graphs[i], g2, shift)
            g.es["weight"] = g2[:, 3].tolist()
            # update flow assignment for this type
            fs[:, i] = solver(g2,
                              demands[i],
                              g,
                              ods[i],
                              max_iter=max_iter,
                              q=q,
                              display=display,
                              past=past,
                              stop=stop,
                              eps=eps)
        # check if we have convergence
        r = residual(graphs, demands, g, ods, fs) / K
        if display >= 1:
            print 'error:', r
        if r < stop_cycle and r > 0:
            return fs
    return fs
Beispiel #12
0
def solver(graph, demand, g=None, od=None, max_iter=100, eps=1e-8, q=None, \
    display=0, past=None, stop=1e-8):

    if g is None: g = construct_igraph(graph)
    if od is None: od = construct_od(demand)
    f = np.zeros(graph.shape[0],
                 dtype='float64')  # initial flow assignment is null
    h = defaultdict(np.float64)  # initial path flow assignment is null
    K = total_free_flow_cost(g, od)
    if K < eps:
        K = np.sum(demand[:, 2])
    elif display >= 1:
        print 'average free-flow travel time', K / np.sum(demand[:, 2])

    start_time = timeit.default_timer()
    for i in range(max_iter):
        if display >= 1:
            if i <= 1:
                print 'iteration: {}'.format(i + 1)
            else:
                print 'iteration: {}, error: {}'.format(i + 1, error)
        # construct weighted graph with latest flow assignment
        L, grad, path_flows = search_direction(f, graph, g, od)
        if i >= 1:
            error = grad.dot(f - L) / K
            if error < stop: return f, h
        f = f + 2. * (L - f) / (i + 2.)
        for k in set(h.keys()).union(set(path_flows.keys())):
            h[k] = h[k] + 2. * (path_flows[k] - h[k]) / (i + 2.)
        print 'iteration', i
        print 'time(sec):', timeit.default_timer() - start_time
        print 'num path flows:', len(h)

        f_h = np.zeros(graph.shape[0],
                       dtype='float64')  # initial flow assignment is null
        for k in h:
            flow = h[k]
            for link in k[2]:
                f_h[link] += flow
        print "path vs link flow diff:", np.sum(np.abs(f_h - f)), f.shape

    # find how many paths each od pair really has
    od_paths = defaultdict(int)
    most_paths = 0
    for k in h.keys():
        od_paths[(k[:2])] += 1
        most_paths = max(most_paths, od_paths[(k[:2])])
    path_counts = [0 for i in range(most_paths + 1)]
    for k in od_paths.keys():
        path_counts[od_paths[k]] += 1
    for i in range(len(path_counts)):
        print i, path_counts[i]
    return f, h
def LA_free_flow_costs(thres, cog_costs):
    '''
    study aiming at comparing the OD costs of all-or-nothing assignment
    between costs = travel times, and costs with multiplicative cognitive costs
    '''
    net, demand, node, geom = load_LA_2()
    g = construct_igraph(net)
    g2 = construct_igraph(net)
    od = construct_od(demand)
    print np.array(g.es["weight"]).dot(all_or_nothing(g, od))/ (np.sum(demand[:,2])*60.)
    for K in cog_costs:
        net2, small_capacity = multiply_cognitive_cost(net, geom, thres, K)
        g2.es["weight"] = net2[:,3]
        print np.array(g.es["weight"]).dot(all_or_nothing(g2, od))/ (np.sum(demand[:,2])*60.)
Beispiel #14
0
def fw_heterogeneous_1(graphs,
                       demands,
                       max_iter=100,
                       eps=1e-8,
                       q=None,
                       display=0,
                       past=None,
                       stop=1e-8):
    '''
    Frank-Wolfe algorithm on the heterogeneous game
    given a list of graphs in the format
    g = [[link_id from to a0 a1 a2 a3 a4]]
    and demand in the format
    d = [[o d flow]]
    '''
    # construct graph and demand objects suiteable for AoN_igraph
    gs = [construct_igraph(graph) for graph in graphs]
    ods = [construct_od(demand) for demand in demands]
    # construct empty vector to be filled in with values
    links = graphs[0].shape[0]
    types = len(graphs)
    # initial flow assignment is null
    f = np.zeros(links * types, dtype="float64")
    L = np.zeros(links * types, dtype="float64")
    grad = np.zeros(links * types, dtype="float64")
    error = 'N/A'
    # compute re-normalization constant
    K = sum([total_free_flow_cost(g, od) for g, od in zip(gs, ods)])
    if K < eps:
        K = sum([np.sum(demand[:, 2]) for demand in demands])
    elif display >= 1:
        print 'average free-flow travel time', \
            K / sum([np.sum(demand[:, 2]) for demand in demands])
    # compute iterations
    for i in range(max_iter):
        if display >= 1:
            if i <= 1:
                print 'iteration: {}'.format(i + 1)
            else:
                print 'iteration: {}, error: {}'.format(i + 1, error)
        # construct weighted graph with latest flow assignment
        L, grad = search_direction_multi(f, graphs, gs, ods, L, grad)
        if i >= 1:
            error = grad.dot(f - L) / K
            if error < stop:
                return np.reshape(f, (types, links)).T
        f = f + 2. * (L - f) / (i + 2.)
    return np.reshape(f, (types, links)).T
def LA_free_flow_costs(thres, cog_costs):
    '''
    study aiming at comparing the OD costs of all-or-nothing assignment
    between costs = travel times, and costs with multiplicative cognitive costs
    '''
    net, demand, node, geom = load_LA_2()
    g = construct_igraph(net)
    g2 = construct_igraph(net)
    od = construct_od(demand)
    print np.array(g.es["weight"]).dot(all_or_nothing(
        g, od)) / (np.sum(demand[:, 2]) * 60.)
    for K in cog_costs:
        net2, small_capacity = multiply_cognitive_cost(net, geom, thres, K)
        g2.es["weight"] = net2[:, 3]
        print np.array(g.es["weight"]).dot(all_or_nothing(
            g2, od)) / (np.sum(demand[:, 2]) * 60.)
def path_cost_non_routed(net, nr_cost, tt, d, g=None, od=None):
    '''
    given vector of edge costs, graph, demand
    returns travel time cost for non-routed users for all ODs in d (or od) in the format
    {(o, d): path_cost}
    '''
    if g is None: 
        g = construct_igraph(net)
        g.es["weight"] = nr_cost.tolist()
    if od is None:
        od = construct_od(d)
    out = {}
    for o in od.keys():
        p = g.get_shortest_paths(o, to=od[o][0], weights="weight", output="epath")
        for i,d in enumerate(od[o][0]):
            out[(o,d)] = np.sum(tt[p[i]])
    return out
def path_cost(net, cost, d, g=None, od=None):
    '''
    given graph, vector of edge costs, demand
    returns shortest path cost for all ODs in d (or od) in the format
    {(o, d): path_cost}
    '''
    if g is None: 
        g = construct_igraph(net)
        g.es["weight"] = cost.tolist()
    if od is None:
        od = construct_od(d)
    out = {}
    for o in od.keys():
        c = g.shortest_paths_dijkstra(o, target=od[o][0], weights="weight")
        for i,d in enumerate(od[o][0]):
            out[(o,d)] = c[0][i]
    return out
def free_flow_OD_costs(net, costs, demand, output, verbose=0):
    '''
    do all-or-nothing assignments following list of arc costs 'costs'
    output OD costs under arc costs contained in 'net'
    '''
    od = construct_od(demand)
    num_ods = demand.shape[0]
    out = np.zeros((num_ods, len(costs)+3))
    out[:,:2] = demand[:,:2]
    g = construct_igraph(net)
    out[:,2] = demand[:,2]
    for i,c in enumerate(costs):
        if verbose >= 1: 
            print 'computing OD costs for {}-eme cost vector ...'.format(i+1)
        cost = path_cost_non_routed(net, c, net[:,3], demand, g, od)
        for j in range(num_ods):
            out[j,i+3] = cost[(int(out[j,0]), int(out[j,1]))]
    header = ['o,d,demand'] + [str(i) for i in range(len(costs))]
    np.savetxt(output, out, delimiter=',', header=','.join(header), comments='')
def fw_heterogeneous_1(graphs, demands, max_iter=100, eps=1e-8, q=None, \
    display=0, past=None, stop=1e-8):
    '''
    Frank-Wolfe algorithm on the heterogeneous game
    given a list of graphs in the format 
    g = [[link_id from to a0 a1 a2 a3 a4]]
    and demand in the format
    d = [[o d flow]]
    '''
    # construct graph and demand objects suiteable for AoN_igraph 
    gs = [construct_igraph(graph) for graph in graphs]
    ods = [construct_od(demand) for demand in demands]
    # construct empty vector to be filled in with values
    links = graphs[0].shape[0]
    types = len(graphs)
    f = np.zeros(links*types,dtype="float64") # initial flow assignment is null
    L = np.zeros(links*types,dtype="float64")
    grad = np.zeros(links*types,dtype="float64")
    # compute re-normalization constant
    K = sum([total_free_flow_cost(g, od) for g,od in zip(gs, ods)])
    if K < eps:
        K = sum([np.sum(demand[:,2]) for demand in demands])
    elif display >= 1:
        print 'average free-flow travel time', \
            K / sum([np.sum(demand[:,2]) for demand in demands])
    # compute iterations
    for i in range(max_iter):
        if display >= 1:
            if i <= 1:
                print 'iteration: {}'.format(i+1)
            else:
                print 'iteration: {}, error: {}'.format(i+1, error)
        # construct weighted graph with latest flow assignment
        L, grad = search_direction_multi(f, graphs, gs, ods, L, grad)
        if i >= 1:
            error = grad.dot(f - L) / K
            if error < stop: return np.reshape(f,(types,links)).T
        f = f + 2.*(L-f)/(i+2.)
    return np.reshape(f,(types,links)).T
def OD_routed_costs(alphas, net, demand, inputs, output, verbose=0):
    '''
    from input files of equilibrium flows for the heterogeneous game
    outputs the travel times of routes users
    '''
    od = construct_od(demand) # od is a dict {origin: ([destination],[demand])}
    num_ods = demand.shape[0]
    out = np.zeros((num_ods, len(alphas)+2))
    out[:,:2] = demand[:,:2]
    g = construct_igraph(net) # construct igraph object
    for i, alpha in enumerate(alphas):
        fs = np.loadtxt(inputs.format(int(alpha*100)), delimiter=',', skiprows=1)
        c = cost(np.sum(fs, axis=1), net)
        g.es["weight"] = c.tolist()    
        # get shortest path and returns {(o, d): path_cost}
        if verbose >= 1: 
            print 'computing OD costs for alpha = {} ...'.format(alpha)
        costs = path_cost(net, c, demand, g, od) 
        for j in range(num_ods):
            out[j,i+2] = costs[(int(out[j,0]), int(out[j,1]))]
    header = ['o,d']
    for alpha in alphas: 
        header.append(str(int(alpha*100)))
    np.savetxt(output, out, delimiter=',', header=','.join(header), comments='')
def gauss_seidel(graphs, demands, solver, max_cycles=10, max_iter=100, \
    by_origin=False, q=10, display=0, past=10, stop=1e-8, eps=1e-8, \
    stop_cycle=None):
    # we are given a list of graphs and demands that are specific for different types of players
    # the gauss-seidel scheme updates cyclically for each type at a time
    if stop_cycle is None: 
        stop_cycle = stop
    g = construct_igraph(graphs[0])
    ods = [construct_od(d) for d in demands]
    types = len(graphs)
    fs = np.zeros((graphs[0].shape[0],types),dtype="float64")
    g2 = np.copy(graphs[0])
    K =  total_ff_costs_heterogeneous(graphs, g, ods)
    if K < eps:
        K = np.sum([np.sum(d[:,2]) for d in demands])
    elif display >= 1:
        print 'average free-flow travel time', \
            K / np.sum([np.sum(d[:,2]) for d in demands])

    for cycle in range(max_cycles):
        if display >= 1: print 'cycle:', cycle
        for i in range(types):
            # construct graph with updated latencies
            shift = np.sum(fs[:,range(i)+range(i+1,types)], axis=1)
            shift_graph(graphs[i], g2, shift)
            g.es["weight"] = g2[:,3].tolist()
            # update flow assignment for this type
            fs[:,i] = solver(g2, demands[i], g, ods[i], max_iter=max_iter, q=q, \
                display=display, past=past, stop=stop, eps=eps)
        # check if we have convergence
        r = residual(graphs, demands, g, ods, fs) / K
        if display >= 1:
            print 'error:', r
        if r < stop_cycle and r > 0:
            return fs
    return fs
Beispiel #22
0
def fw_heterogeneous_2(graphs,
                       demands,
                       past=10,
                       max_iter=100,
                       eps=1e-8,
                       q=50,
                       display=0,
                       stop=1e-8):
    '''
    Frank-Wolfe algorithm on the heterogeneous game
    given a list of graphs in the format
    g = [[link_id from to a0 a1 a2 a3 a4]]
    and demand in the format
    d = [[o d flow]]
    '''
    assert past <= q, "'q' must be bigger or equal to 'past'"
    # construct graph and demand objects suiteable for AoN_igraph
    gs = [construct_igraph(graph) for graph in graphs]
    ods = [construct_od(demand) for demand in demands]
    # construct empty vector to be filled in with values
    links = graphs[0].shape[0]
    types = len(graphs)
    # initial flow assignment is null
    f = np.zeros(links * types, dtype="float64")
    fs = np.zeros((links * types, past), dtype="float64")
    L = np.zeros(links * types, dtype="float64")
    grad = np.zeros(links * types, dtype="float64")
    L2 = np.zeros(links * types, dtype="float64")
    grad2 = np.zeros(links * types, dtype="float64")
    error = 'N/A'
    # compute re-normalization constant
    K = sum([total_free_flow_cost(g, od) for g, od in zip(gs, ods)])
    if K < eps:
        K = sum([np.sum(demand[:, 2]) for demand in demands])
    elif display >= 1:
        print 'average free-flow travel time', \
            K / sum([np.sum(demand[:, 2]) for demand in demands])
    # compute iterations
    for i in range(max_iter):
        if display >= 1:
            print 'iteration: {}, error: {}'.format(i + 1, error)
        # construct weighted graph with latest flow assignment
        # print 'f', f
        # print 'reshape', np.reshape(f,(links,types))
        total_f = np.sum(np.reshape(f, (types, links)).T, 1)
        # print 'total flow', total_f
        for j, (graph, g, od) in enumerate(zip(graphs, gs, ods)):
            l, gr = search_direction(total_f, graph, g, od)
            L[(j * links):((j + 1) * links)] = l
            grad[(j * links):((j + 1) * links)] = gr
        # print 'L', L
        # print 'grad', grad
        fs[:, i % past] = L
        w = L - f
        if i >= 1:
            error = -grad.dot(w) / K
            # if error < stop and error > 0.0:
            if error < stop:
                if display >= 1:
                    print 'stop with error: {}'.format(error)
                return np.reshape(f, (types, links)).T
        if i > q:
            # step 3 of Fukushima
            v = np.sum(fs, axis=1) / min(past, i + 1) - f
            norm_v = np.linalg.norm(v, 1)
            if norm_v < eps:
                if display >= 1:
                    print 'stop with norm_v: {}'.format(norm_v)
                return np.reshape(f, (types, links)).T
            norm_w = np.linalg.norm(w, 1)
            if norm_w < eps:
                if display >= 1:
                    print 'stop with norm_w: {}'.format(norm_w)
                return np.reshape(f, (types, links)).T
            # step 4 of Fukushima
            gamma_1 = grad.dot(v) / norm_v
            gamma_2 = grad.dot(w) / norm_w
            if gamma_2 > -eps:
                if display >= 1:
                    print 'stop with gamma_2: {}'.format(gamma_2)
                return np.reshape(f, (types, links)).T
            d = v if gamma_1 < gamma_2 else w
            # step 5 of Fukushima
            s = line_search(
                lambda a: merit(f + a * d, graphs, gs, ods, L2, grad2))
            # print 'step', s
            if s < eps:
                if display >= 1:
                    print 'stop with step_size: {}'.format(s)
                return np.reshape(f, (types, links)).T
            f = f + s * d
        else:
            f = f + 2. * w / (i + 2.)
    return np.reshape(f, (types, links)).T
def all_or_nothing_assignment(cost, net, demand):
    # given vector of edge costs, graph, and demand, computes the AoN assignment
    g = construct_igraph(net)
    od = construct_od(demand)
    g.es["weight"] = cost.tolist()    
    return all_or_nothing(g, od)
def solver_3(graph, demand, g=None, od=None, past=10, max_iter=100, eps=1e-8, \
    q=50, display=0, stop=1e-8):
    '''
    this is an adaptation of Fukushima's algorithm
    graph:    numpy array of the format [[link_id from to a0 a1 a2 a3 a4]]
    demand:   mumpy arrau of the format [[o d flow]]
    g:        igraph object constructed from graph
    od:       od in the format {from: ([to], [rate])}
    past:     search direction is the mean over the last 'past' directions
    max_iter: maximum number of iterations
    esp:      used as a stopping criterium if some quantities are too close to 0
    q:        first 'q' iterations uses open loop step sizes 2/(i+2)
    display:  controls the display of information in the terminal
    stop:     stops the algorithm if the error is less than 'stop'
    '''
    assert past <= q, "'q' must be bigger or equal to 'past'"
    if g is None: g = construct_igraph(graph)
    if od is None: od = construct_od(demand)
    f = np.zeros(graph.shape[0],dtype="float64") # initial flow assignment is null
    fs = np.zeros((graph.shape[0],past),dtype="float64")
    K = total_free_flow_cost(g, od)
    if K < eps:
        K = np.sum(demand[:,2])
    elif display >= 1:
        print 'average free-flow travel time', K / np.sum(demand[:,2])

    for i in range(max_iter):
        if display >= 1:
            if i <= 1:
                print 'iteration: {}'.format(i+1)
            else:            
                print 'iteration: {}, error: {}'.format(i+1, error)
        # construct weighted graph with latest flow assignment
        L, grad = search_direction(f, graph, g, od)
        fs[:,i%past] = L
        w = L - f
        if i >= 1:
            error = -grad.dot(w) / K
            # if error < stop and error > 0.0:
            if error < stop:
                if display >= 1: print 'stop with error: {}'.format(error)
                return f
        if i > q:
            # step 3 of Fukushima
            v = np.sum(fs,axis=1) / min(past,i+1) - f
            norm_v = np.linalg.norm(v,1)
            if norm_v < eps: 
                if display >= 1: print 'stop with norm_v: {}'.format(norm_v)
                return f
            norm_w = np.linalg.norm(w,1)
            if norm_w < eps: 
                if display >= 1: print 'stop with norm_w: {}'.format(norm_w)
                return f
            # step 4 of Fukushima
            gamma_1 = grad.dot(v) / norm_v
            gamma_2 = grad.dot(w) / norm_w
            if gamma_2 > -eps: 
                if display >= 1: print 'stop with gamma_2: {}'.format(gamma_2)
                return f
            d = v if gamma_1 < gamma_2 else w
            # step 5 of Fukushima
            s = line_search(lambda a: potential(graph, f+a*d))
            if s < eps: 
                if display >= 1: print 'stop with step_size: {}'.format(s)
                return f
            f = f + s*d
        else:
            f = f + 2. * w/(i+2.)
    return f
Beispiel #25
0
def solver_3(graph, demand, g=None, od=None, past=10, max_iter=100, eps=1e-16, \
    q=50, display=0, stop=1e-8):
    '''
    this is an adaptation of Fukushima's algorithm
    graph:    numpy array of the format [[link_id from to a0 a1 a2 a3 a4]]
    demand:   mumpy arrau of the format [[o d flow]]
    g:        igraph object constructed from graph
    od:       od in the format {from: ([to], [rate])}
    past:     search direction is the mean over the last 'past' directions
    max_iter: maximum number of iterations
    esp:      used as a stopping criterium if some quantities are too close to 0
    q:        first 'q' iterations uses open loop step sizes 2/(i+2)
    display:  controls the display of information in the terminal
    stop:     stops the algorithm if the error is less than 'stop'
    '''

    assert past <= q, "'q' must be bigger or equal to 'past'"
    if g is None:
        g = construct_igraph(graph)
    if od is None:
        od = construct_od(demand)
    f = np.zeros(graph.shape[0],
                 dtype='float64')  # initial flow assignment is null
    fs = np.zeros((graph.shape[0], past),
                  dtype='float64')  #not sure what fs does
    h = defaultdict(np.float64)  # initial path flow assignment is null
    hs = defaultdict(
        lambda: [0.
                 for _ in range(past)])  # initial path flow assignment is null
    K = total_free_flow_cost(g, od)

    # why this?
    if K < eps:
        K = np.sum(demand[:, 2])
    elif display >= 1:
        print 'average free-flow travel time', K / np.sum(demand[:, 2])

    #import pdb; pdb.set_trace()
    start_time = timeit.default_timer()
    for i in range(max_iter):

        #        if display >= 1:
        #            if i <= 1:
        #                print 'iteration: {}'.format(i+1)
        #            else:
        #                print 'iteration: {}, error: {}'.format(i+1, error)

        #start timer
        #start_time2 = timeit.default_timer()

        # construct weighted graph with latest flow assignment
        L, grad, path_flows = search_direction(f, graph, g, od)

        fs[:, i % past] = L
        for k in set(h.keys()).union(set(path_flows.keys())):
            hs[k][i % past] = path_flows[k]
        w = L - f
        w_h = defaultdict(np.float64)
        for k in set(h.keys()).union(set(path_flows.keys())):
            w_h[k] = path_flows[k] - h[k]
        if i >= 1:
            error = -grad.dot(w) / K
            # if error < stop and error > 0.0:
            if error < stop:
                if display >= 1: print 'stop with error: {}'.format(error)
                return f, h
        if i > q:
            # step 3 of Fukushima
            v = np.sum(fs, axis=1) / min(past, i + 1) - f
            v_h = np.defaultdict(np.float64)
            for k in set(hs.keys()).union(set(path_flows.keys())):
                v_h[k] = sum(hs[k]) / min(past, i + 1) - h[k]
            norm_v = np.linalg.norm(v, 1)
            if norm_v < eps:
                if display >= 1: print 'stop with norm_v: {}'.format(norm_v)
                return f, h
            norm_w = np.linalg.norm(w, 1)
            if norm_w < eps:
                if display >= 1: print 'stop with norm_w: {}'.format(norm_w)
                return f, h
            # step 4 of Fukushima
            gamma_1 = grad.dot(v) / norm_v
            gamma_2 = grad.dot(w) / norm_w
            if gamma_2 > -eps:
                if display >= 1: print 'stop with gamma_2: {}'.format(gamma_2)
                return f, h
            d = v if gamma_1 < gamma_2 else w
            d_h = v_h if gamma_1 < gamma_2 else w_h
            # step 5 of Fukushima
            s = line_search(lambda a: potential(graph, f + a * d))
            lineSearchResult = s
            if s < eps:
                if display >= 1: print 'stop with step_size: {}'.format(s)
                return f, h
            f = f + s * d
            for k in set(hs.keys()).union(set(path_flows.keys())):
                h[k] = h[k] + s * d_h[k]
        else:
            f = f + 2. * w / (i + 2.)
            for k in set(h.keys()).union(set(path_flows.keys())):
                h[k] = h[k] + 2. * (w_h[k]) / (i + 2.)
        print 'iteration', i
        print 'time(sec):', timeit.default_timer() - start_time
        print 'num path flows:', len(h)

        f_h = np.zeros(graph.shape[0],
                       dtype='float64')  # initial flow assignment is null
        for k in h:
            flow = h[k]
            for link in k[2]:
                f_h[link] += flow
        print "path vs link flow diff:", np.sum(np.abs(f_h - f)), f.shape

    # find how many paths each od pair really has
    od_paths = defaultdict(int)
    most_paths = 0
    for k in h.keys():
        od_paths[(k[:2])] += 1
        most_paths = max(most_paths, od_paths[(k[:2])])
    path_counts = [0 for i in range(most_paths + 1)]
    for k in od_paths.keys():
        path_counts[od_paths[k]] += 1
    for i in range(len(path_counts)):
        print i, path_counts[i]

    return f, h
Beispiel #26
0
def solver_2(graph, demand, g=None, od=None, max_iter=100, eps=1e-8, q=10, \
    display=0, past=None, stop=1e-8):

    if g is None: g = construct_igraph(graph)
    if od is None: od = construct_od(demand)
    f = np.zeros(graph.shape[0],
                 dtype='float64')  # initial flow assignment is null
    h = defaultdict(np.float64)  # initial path flow assignment is null
    ls = max_iter / q  # frequency of line search
    K = total_free_flow_cost(g, od)
    if K < eps:
        K = np.sum(demand[:, 2])
    elif display >= 1:
        print 'average free-flow travel time', K / np.sum(demand[:, 2])

    start_time = timeit.default_timer()
    for i in range(max_iter):
        if display >= 1:
            if i <= 1:
                print 'iteration: {}'.format(i + 1)
            else:
                print 'iteration: {}, error: {}'.format(i + 1, error)
        # construct weighted graph with latest flow assignment
        L, grad, path_flows = search_direction(f, graph, g, od)
        if i >= 1:
            # w = f - L
            # norm_w = np.linalg.norm(w,1)
            # if norm_w < eps: return f, h
            error = grad.dot(f - L) / K
            if error < stop: return f, h
        # s = line_search(lambda a: potential(graph, (1.-a)*f+a*L)) if i>max_iter-q \
        #     else 2./(i+2.)
        s = line_search(lambda a: potential(graph, (1.-a)*f+a*L)) if i%ls==(ls-1) \
            else 2./(i+2.)
        if s < eps: return f, h
        f = (1. - s) * f + s * L
        for k in set(h.keys()).union(set(path_flows.keys())):
            h[k] = (1. - s) * h[k] + s * path_flows[k]
        print 'iteration', i
        print 'time(sec):', timeit.default_timer() - start_time
        print 'num path flows:', len(h)

        f_h = np.zeros(graph.shape[0],
                       dtype='float64')  # initial flow assignment is null
        for k in h:
            flow = h[k]
            for link in k[2]:
                f_h[link] += flow
        print "path vs link flow diff:", np.sum(np.abs(f_h - f)), f.shape

    # find how many paths each od pair really has
    od_paths = defaultdict(int)
    most_paths = 0
    for k in h.keys():
        od_paths[(k[:2])] += 1
        most_paths = max(most_paths, od_paths[(k[:2])])
    path_counts = [0 for i in range(most_paths + 1)]
    for k in od_paths.keys():
        path_counts[od_paths[k]] += 1
    for i in range(len(path_counts)):
        print i, path_counts[i]
    return f, h
Beispiel #27
0
def fw_heterogeneous_1(graphs, demands, r, max_iter=100, eps=1e-8, q=None, \
    display=0, past=None, stop=1e-8):
    '''
    Frank-Wolfe algorithm on the heterogeneous game
    given a list of graphs in the format 
    g = [[link_id from to a0 a1 a2 a3 a4]]
    and demand in the format
    d = [[o d flow]]
    r = % app users
    '''
    #start timer
    #start_time1 = timeit.default_timer()

    # construct graph and demand objects suiteable for AoN_igraph
    gs = [construct_igraph(graph) for graph in graphs]
    ods = [construct_od(demand) for demand in demands]
    # construct empty vector to be filled in with values
    links = graphs[0].shape[0]
    types = len(graphs)
    f = np.zeros(links * types,
                 dtype="float64")  # initial flow assignment is null
    L = np.zeros(links * types, dtype="float64")
    grad = np.zeros(links * types, dtype="float64")
    h = defaultdict(np.float64)  # initial path flow assignment is null
    # compute re-normalization constant
    K = sum([total_free_flow_cost(g, od) for g, od in zip(gs, ods)])
    if K < eps:
        K = sum([np.sum(demand[:, 2]) for demand in demands])
    elif display >= 1:
        print 'average free-flow travel time', \
            K / sum([np.sum(demand[:,2]) for demand in demands])

    #end of timer
    #elapsed1 = timeit.default_timer() - start_time1;
    #print ("step0 too %s seconds" % elapsed1)

    # compute iterations
    start_time = timeit.default_timer()
    for i in range(max_iter):
        #start timer
        #start_time3 = timeit.default_timer()

        if display >= 1:
            if i <= 1:
                print 'iteration: {}'.format(i + 1)
            else:
                print 'iteration: {}, error: {}'.format(i + 1, error)

        #start timer
        #start_time2 = timeit.default_timer()

        # construct weighted graph with latest flow assignment
        L, grad, path_flows = search_direction_multi(f, graphs, gs, ods, L,
                                                     grad)

        #end of timer
        #elapsed2 = timeit.default_timer() - start_time2;
        #print ("search_direction took %s seconds" % elapsed2)

        if i >= 1:
            error = grad.dot(f - L) / K
            if error < stop:
                return np.reshape(f, (types, links)).T, h, np.dot(
                    grad[:links],
                    np.sum(np.reshape(f, (types, links)).T, 1) -
                    np.sum(np.reshape(L, (types, links)).T, 1) * r)

        f = f + 2. * (L - f) / (i + 2.)
        # print type(h), type(path_flows)
        for k in set(h.keys()).union(set(path_flows.keys())):
            h[k] = h[k] + 2. * (path_flows[k] - h[k]) / (i + 2.)
        print 'iteration', i
        print 'time(sec):', timeit.default_timer() - start_time
        print 'num path flows:', len(h)

        f_h = np.zeros(graph.shape[0],
                       dtype='float64')  # initial flow assignment is null
        for k in h:
            flow = h[k]
            for link in k[2]:
                f_h[link] += flow
        print "path vs link flow diff:", np.sum(
            np.abs(f_h - np.sum(np.reshape(f, (types, links)).T, 1))), f.shape

        #end of timer
        #elapsed3 = timeit.default_timer() - start_time3;
        #print ("The whole iteration took %s seconds" % elapsed3)

    # find how many paths each od pair really has
    od_paths = defaultdict(int)
    most_paths = 0
    for k in h.keys():
        od_paths[(k[:2])] += 1
        most_paths = max(most_paths, od_paths[(k[:2])])
    path_counts = [0 for i in range(most_paths + 1)]
    for k in od_paths.keys():
        path_counts[od_paths[k]] += 1
    for i in range(len(path_counts)):
        print i, path_counts[i]

    L, grad, path_flows = search_direction_multi(f, graphs, gs, ods, L, grad)
    return np.reshape(f, (types, links)).T, h, np.dot(
        grad[:links],
        np.sum(np.reshape(f, (types, links)).T, 1) -
        np.sum(np.reshape(L, (types, links)).T, 1) * r)
def solver_social_optimum(graph, demand, g=None, od=None, past=10, max_iter=100, eps=1e-16, \
    q=50, display=0, stop=1e-8):
    '''
    this is an adaptation of Fukushima's algorithm
    graph:    numpy array of the format [[link_id from to a0 a1 a2 a3 a4]]
    demand:   mumpy arrau of the format [[o d flow]]
    g:        igraph object constructed from graph
    od:       od in the format {from: ([to], [rate])}
    past:     search direction is the mean over the last 'past' directions
    max_iter: maximum number of iterations
    esp:      used as a stopping criterium if some quantities are too close to 0
    q:        first 'q' iterations uses open loop step sizes 2/(i+2)
    display:  controls the display of information in the terminal
    stop:     stops the algorithm if the error is less than 'stop'
    '''

    assert past <= q, "'q' must be bigger or equal to 'past'"
    if g is None:
        g = construct_igraph(graph)
    if od is None:
        od = construct_od(demand)
    f = np.zeros(graph.shape[0],
                 dtype="float64")  # initial flow assignment is null
    fs = np.zeros((graph.shape[0], past),
                  dtype="float64")  #not sure what fs does
    K = total_free_flow_cost(g, od)

    # why this?
    if K < eps:
        K = np.sum(demand[:, 2])
    elif display >= 1:
        print 'average free-flow travel time', K / np.sum(demand[:, 2])

    for i in range(max_iter):

        if display >= 1:
            if i <= 1:
                print 'iteration: {}'.format(i + 1)
            else:
                print 'iteration: {}, error: {}'.format(i + 1, error)

        # search direction using social optimum
        L, grad = search_direction_social_optimum(f, graph, g, od)

        fs[:, i % past] = L
        w = L - f
        if i >= 1:
            error = -grad.dot(w) / K
            # if error < stop and error > 0.0:
            if error < stop:
                if display >= 1: print 'stop with error: {}'.format(error)
                #printing the final total cost
                average_cost = total_cost(graph, L, grad) / np.sum(demand[:,
                                                                          2])
                print("average delay %s seconds" % average_cost)
                #import pdb; pdb.set_trace()

                return f
        if i > q:
            # step 3 of Fukushima
            v = np.sum(fs, axis=1) / min(past, i + 1) - f
            norm_v = np.linalg.norm(v, 1)
            if norm_v < eps:
                if display >= 1: print 'stop with norm_v: {}'.format(norm_v)
                #printing the final total cost
                average_cost = total_cost(graph, L, grad) / np.sum(demand[:,
                                                                          2])

                print("average delay %s seconds" % average_cost)
                return f
            norm_w = np.linalg.norm(w, 1)
            if norm_w < eps:
                if display >= 1: print 'stop with norm_w: {}'.format(norm_w)
                #printing the final total cost
                average_cost = total_cost(graph, L, grad) / np.sum(demand[:,
                                                                          2])
                print("average delay %s seconds" % average_cost)

                return f
            # step 4 of Fukushima
            gamma_1 = grad.dot(v) / norm_v
            gamma_2 = grad.dot(w) / norm_w
            if gamma_2 > -eps:
                if display >= 1: print 'stop with gamma_2: {}'.format(gamma_2)
                average_cost = total_cost(graph, L, grad) / np.sum(demand[:,
                                                                          2])
                print("average delay %s seconds" % average_cost)
                return f
            d = v if gamma_1 < gamma_2 else w
            # step 5 of Fukushima
            s = line_search(
                lambda a: potential_Social_Optimum(graph, f + a * d))
            if s < eps:
                if display >= 1: print 'stop with step_size: {}'.format(s)
                #printing the final total cost
                average_cost = total_cost(graph, L, grad) / np.sum(demand[:,
                                                                          2])
                print("average delay %s seconds" % average_cost)
                #import pdb; pdb.set_trace()
                return f
            f = f + s * d
        else:
            f = f + 2. * w / (i + 2.)

    return f
def fw_heterogeneous_2(graphs, demands, past=10, max_iter=100, eps=1e-8, q=50, \
    display=0, stop=1e-8):
    '''
    Frank-Wolfe algorithm on the heterogeneous game
    given a list of graphs in the format 
    g = [[link_id from to a0 a1 a2 a3 a4]]
    and demand in the format
    d = [[o d flow]]
    '''
    assert past <= q, "'q' must be bigger or equal to 'past'"
    # construct graph and demand objects suiteable for AoN_igraph 
    gs = [construct_igraph(graph) for graph in graphs]
    ods = [construct_od(demand) for demand in demands]
    # construct empty vector to be filled in with values
    links = graphs[0].shape[0]
    types = len(graphs)
    f = np.zeros(links*types,dtype="float64") # initial flow assignment is null
    fs = np.zeros((links*types,past),dtype="float64")
    L = np.zeros(links*types,dtype="float64")
    grad = np.zeros(links*types,dtype="float64")
    L2 = np.zeros(links*types,dtype="float64")
    grad2 = np.zeros(links*types,dtype="float64")
    # compute re-normalization constant
    K = sum([total_free_flow_cost(g, od) for g,od in zip(gs, ods)])
    if K < eps:
        K = sum([np.sum(demand[:,2]) for demand in demands])
    elif display >= 1:
        print 'average free-flow travel time', \
            K / sum([np.sum(demand[:,2]) for demand in demands])
    # compute iterations
    for i in range(max_iter):
        if display >= 1:
            if i <= 1:
                print 'iteration: {}'.format(i+1)
            else:            
                print 'iteration: {}, error: {}'.format(i+1, error)
        # construct weighted graph with latest flow assignment
        #print 'f', f
        #print 'reshape', np.reshape(f,(links,types))
        total_f = np.sum(np.reshape(f,(types,links)).T,1)
        #print 'total flow', total_f
        for j, (graph, g, od) in enumerate(zip(graphs, gs, ods)):
            l, gr = search_direction(total_f, graph, g, od)
            L[(j*links) : ((j+1)*links)] = l
            grad[(j*links) : ((j+1)*links)] = gr
        #print 'L', L
        #print 'grad', grad
        fs[:,i%past] = L
        w = L - f
        if i >= 1:
            error = -grad.dot(w) / K
            # if error < stop and error > 0.0:
            if error < stop:
                if display >= 1: print 'stop with error: {}'.format(error)
                return np.reshape(f,(types,links)).T
        if i > q:
            # step 3 of Fukushima
            v = np.sum(fs,axis=1) / min(past,i+1) - f
            norm_v = np.linalg.norm(v,1)
            if norm_v < eps: 
                if display >= 1: print 'stop with norm_v: {}'.format(norm_v)
                return np.reshape(f,(types,links)).T
            norm_w = np.linalg.norm(w,1)
            if norm_w < eps: 
                if display >= 1: print 'stop with norm_w: {}'.format(norm_w)
                return np.reshape(f,(types,links)).T
            # step 4 of Fukushima
            gamma_1 = grad.dot(v) / norm_v
            gamma_2 = grad.dot(w) / norm_w
            if gamma_2 > -eps: 
                if display >= 1: print 'stop with gamma_2: {}'.format(gamma_2)
                return np.reshape(f,(types,links)).T
            d = v if gamma_1 < gamma_2 else w
            # step 5 of Fukushima
            s = line_search(lambda a: merit(f+a*d, graphs, gs, ods, L2, grad2))
            # print 'step', s
            if s < eps: 
                if display >= 1: print 'stop with step_size: {}'.format(s)
                return np.reshape(f,(types,links)).T
            f = f + s*d
        else:
            f = f + 2. * w/(i+2.)
    return np.reshape(f,(types,links)).T
Beispiel #30
0
# whether or not to produce graphs
graph = False
# whether or not to plot specific graphs
graph_path_flow_vs_app_usage = True
graph_travel_time_vs_app_usage = True
graph_total_path_flow_vs_app_usage = True

# compile path flow data into single file focused on one OD pair
if compile_od_data:
    graph = np.loadtxt("data/LA_net.csv", skiprows=1, delimiter=',')
    graph[10787, -1] = graph[10787, -1] / (1.5**4)
    graph[3348, -1] = graph[3348, -1] / (1.2**4)
    demand = np.loadtxt('data/LA_od_2.csv', delimiter=',', skiprows=1)
    demand[:, 2] = 0.5 * demand[:, 2] / 4000
    # most_demand = np.argmax(demand[:, 2])
    od = construct_od(demand)

    # sort ODs based demand
    argsorted = np.argsort(demand[:, 2])
    # print 'most_demand', argsorted[-1]

    # pick an OD to aggregate path data on, -1 is highest demand
    sorted_od = demand[argsorted[-3]]

    # nash distances
    nds = []
    # path flows for app users
    path_flows_x = defaultdict(
        lambda: np.array([0. for _ in range(granularity)]))
    # path flows for non app users
    path_flows_y = defaultdict(
Beispiel #31
0
def fw_heterogeneous_2(graphs, demands, past=10, max_iter=100, eps=1e-8, q=50, \
    display=0, stop=1e-8):
    '''
    Frank-Wolfe algorithm on the heterogeneous game
    given a list of graphs in the format 
    g = [[link_id from to a0 a1 a2 a3 a4]]
    and demand in the format
    d = [[o d flow]]
    '''
    assert past <= q, "'q' must be bigger or equal to 'past'"
    # construct graph and demand objects suiteable for AoN_igraph
    gs = [construct_igraph(graph) for graph in graphs]
    ods = [construct_od(demand) for demand in demands]
    # construct empty vector to be filled in with values
    links = graphs[0].shape[0]
    types = len(graphs)
    f = np.zeros(links * types,
                 dtype="float64")  # initial flow assignment is null
    fs = np.zeros((links * types, past), dtype="float64")
    L = np.zeros(links * types, dtype="float64")
    grad = np.zeros(links * types, dtype="float64")
    L2 = np.zeros(links * types, dtype="float64")
    grad2 = np.zeros(links * types, dtype="float64")
    h = defaultdict(np.float64)  # initial path flow assignment is null
    hs = defaultdict(
        lambda: [0.
                 for _ in range(past)])  # initial path flow assignment is null
    # compute re-normalization constant
    K = sum([total_free_flow_cost(g, od) for g, od in zip(gs, ods)])
    if K < eps:
        K = sum([np.sum(demand[:, 2]) for demand in demands])
    elif display >= 1:
        print 'average free-flow travel time', \
            K / sum([np.sum(demand[:,2]) for demand in demands])
    # compute iterations
    start_time = timeit.default_timer()
    for i in range(max_iter):
        if display >= 1:
            if i <= 1:
                print 'iteration: {}'.format(i + 1)
            else:
                print 'iteration: {}, error: {}'.format(i + 1, error)
        # construct weighted graph with latest flow assignment
        #print 'f', f
        #print 'reshape', np.reshape(f,(links,types))
        total_f = np.sum(np.reshape(f, (types, links)).T, 1)
        #print 'total flow', total_f
        L, grad, path_flows = search_direction_multi(f, graphs, gs, ods, L,
                                                     grad)
        #print 'L', L
        #print 'grad', grad
        fs[:, i % past] = L
        for k in set(h.keys()).union(set(path_flows.keys())):
            hs[k][i % past] = path_flows[k]
        w = L - f
        w_h = defaultdict(np.float64)
        for k in set(h.keys()).union(set(path_flows.keys())):
            w_h[k] = path_flows[k] - h[k]
        if i >= 1:
            error = -grad.dot(w) / K
            # if error < stop and error > 0.0:
            if error < stop:
                if display >= 1: print 'stop with error: {}'.format(error)
                return np.reshape(f, (types, links)).T
        if i > q:
            # step 3 of Fukushima
            v = np.sum(fs, axis=1) / min(past, i + 1) - f
            v_h = np.defaultdict(np.float64)
            for k in set(hs.keys()).union(set(path_flows.keys())):
                v_h[k] = sum(hs[k]) / min(past, i + 1) - h[k]
            norm_v = np.linalg.norm(v, 1)
            if norm_v < eps:
                if display >= 1: print 'stop with norm_v: {}'.format(norm_v)
                return np.reshape(f, (types, links)).T
            norm_w = np.linalg.norm(w, 1)
            if norm_w < eps:
                if display >= 1: print 'stop with norm_w: {}'.format(norm_w)
                return np.reshape(f, (types, links)).T
            # step 4 of Fukushima
            gamma_1 = grad.dot(v) / norm_v
            gamma_2 = grad.dot(w) / norm_w
            if gamma_2 > -eps:
                if display >= 1: print 'stop with gamma_2: {}'.format(gamma_2)
                return np.reshape(f, (types, links)).T
            d = v if gamma_1 < gamma_2 else w
            d_h = v_h if gamma_1 < gamma_2 else w_h
            # step 5 of Fukushima
            s = line_search(
                lambda a: merit(f + a * d, graphs, gs, ods, L2, grad2))
            # print 'step', s
            if s < eps:
                if display >= 1: print 'stop with step_size: {}'.format(s)
                return np.reshape(f, (types, links)).T
            f = f + s * d
            for k in set(hs.keys()).union(set(path_flows.keys())):
                h[k] = h[k] + s * d_h[k]
        else:
            f = f + 2. * w / (i + 2.)
            for k in set(h.keys()).union(set(path_flows.keys())):
                h[k] = h[k] + 2. * (w_h[k]) / (i + 2.)
        print 'iteration', i
        print 'time(sec):', timeit.default_timer() - start_time
        print 'num path flows:', len(h)

        f_h = np.zeros(graph.shape[0],
                       dtype='float64')  # initial flow assignment is null
        for k in h:
            flow = h[k]
            for link in k[2]:
                f_h[link] += flow
        print "path vs link flow diff:", np.sum(
            np.abs(f_h - np.sum(np.reshape(f, (types, links)).T, 1))), f.shape

    # find how many paths each od pair really has
    od_paths = defaultdict(int)
    most_paths = 0
    for k in h.keys():
        od_paths[(k[:2])] += 1
        most_paths = max(most_paths, od_paths[(k[:2])])
    path_counts = [0 for i in range(most_paths + 1)]
    for k in od_paths.keys():
        path_counts[od_paths[k]] += 1
    for i in range(len(path_counts)):
        print i, path_counts[i]

    return np.reshape(f, (types, links)).T