コード例 #1
0
def main():
    parser = parser = get_parser()
    parser.add_argument('--degree', type=int, default=4, help='node degree')
    parser.add_argument('--graphs', '-g', type=int, default=1, help='number of random graphs to generate and use')

    args = parser.parse_args()

    options = get_options(args)
    get_layouts(args.layout, options)
    options['type'] = 'random'
    options['graphs'] = args.graphs
    options['degree'] = args.degree

    logging.basicConfig(level=options['log_level'])

    graphs = create_random(options)
    options['graph'] = graphs
    graphToNED(options)
    graphToCSV(options)
    if args.graph_only:
        sys.exit(0)
    write_header(options)

    ptuples = get_tuples(options)
    ptuples = numpy.array(ptuples).repeat(len(graphs), axis=0)
    params = zip(graphs*len(ptuples), ptuples)

    pool = Pool(processes=options['processes'])
    manager = Manager()
    lock = manager.Lock()

    data = [(G, options, i, (ps, pb), lock) for i, (G, (ps, pb)) in enumerate(params)]
    run_process(pool, data, options)
コード例 #2
0
def main():
    parser = parser = get_parser()
    parser.add_argument('--dimensions', '-d', type=int, default=2, help='dimension of the grid')
    args = parser.parse_args()

    options = get_options(args)
    get_layouts(args.layout, options)
    options['type'] = 'grid'
    options['graphs'] = 1
    options['dimensions'] = args.dimensions

    logging.basicConfig(level=options['log_level'])

    graphs = create_grid(options)
    options['graph'] = graphs
    graphToNED(options)
    graphToCSV(options)
    if args.graph_only:
        sys.exit(0)
    write_header(options)

    pool = Pool(processes=options['processes'])
    manager = Manager()
    lock = manager.Lock()
    data = [(graphs[0], options, i, ptuple, lock) for i, ptuple in enumerate(get_tuples(options))]
    run_process(pool, data, options)
コード例 #3
0
def main():
    parser = get_parser()
    parser.add_argument('--db', help='topology database')
    parser.add_argument('--size',
                        '-b',
                        type=int,
                        default=100,
                        help='packet size; determines the graph in the db')
    parser.add_argument('--mode',
                        default='pb',
                        help='[pb, uu, du, dw] (default=pb)')

    args = parser.parse_args()

    options = get_options(args)
    get_layouts(args.layout, options)
    options['type'] = 'DES-Testbed'
    options['db'] = args.db
    options['size'] = args.size
    options['mode'] = args.mode

    logging.basicConfig(level=options['log_level'])

    logging.info('connecting to database')
    db = options["db"]
    if not os.path.exists(db):
        logging.critical('Database file %s does not exist' % (db))
        sys.exit(2)
    options['db_conn'] = sqlite3.connect(db)
    graphs = create_graph_db(options)
    options['graph'] = graphs
    options['graphs'] = 1
    graphToNED(options)
    graphToCSV(options)
    if args.graph_only:
        sys.exit(0)
    write_header(options)

    pool = Pool(processes=options['processes'])
    manager = Manager()
    lock = manager.Lock()
    data = [(graphs[0], options, i, ptuple, lock)
            for i, ptuple in enumerate(get_tuples(options))]
    logging.info('Starting percolation simulation')
    run_process(pool, data, options)
コード例 #4
0
def main():
    parser = parser = get_parser()
    parser.add_argument('--files',
                        '-f',
                        nargs='+',
                        default=[],
                        help='provide graphs as file')
    args = parser.parse_args()

    options = get_options(args)
    get_layouts(args.layout, options)
    options['type'] = 'files'
    options['nodes'] = None
    options['degree'] = None
    options['db'] = None
    options['size'] = None
    options['files'] = args.files
    logging.basicConfig(level=options['log_level'])

    if len(options['files']) == 0:
        logging.critical('no files specified: aborting')
        sys.exit(1)

    graphs = parse_files(options)
    options['graph'] = graphs
    options['graphs'] = len(graphs)
    graphToNED(options)
    graphToCSV(options)
    write_header(options)

    ptuples = get_tuples(options)
    ptuples = numpy.array(ptuples).repeat(len(graphs), axis=0)
    params = zip(graphs * len(ptuples), ptuples)

    pool = Pool(processes=options['processes'])
    manager = Manager()
    lock = manager.Lock()

    data = [(G, options, i, (ps, pb), lock)
            for i, (G, (ps, pb)) in enumerate(params)]
    run_process(pool, data, options)
コード例 #5
0
def main():
    parser = get_parser()
    parser.add_argument('--db', help='topology database')
    parser.add_argument('--size', '-b', type=int, default=100, help='packet size; determines the graph in the db')
    parser.add_argument('--mode', default='pb', help='[pb, uu, du, dw] (default=pb)')

    args = parser.parse_args()

    options = get_options(args)
    get_layouts(args.layout, options)
    options['type'] = 'DES-Testbed'
    options['db'] = args.db
    options['size'] = args.size
    options['mode'] = args.mode

    logging.basicConfig(level=options['log_level'])

    logging.info('connecting to database')
    db = options["db"]
    if not os.path.exists(db):
        logging.critical('Database file %s does not exist' % (db))
        sys.exit(2)
    options['db_conn'] = sqlite3.connect(db)
    graphs = create_graph_db(options)
    options['graph'] = graphs
    options['graphs'] = 1
    graphToNED(options)
    graphToCSV(options)
    if args.graph_only:
        sys.exit(0)
    write_header(options)

    pool = Pool(processes=options['processes'])
    manager = Manager()
    lock = manager.Lock()
    data = [(graphs[0], options, i, ptuple, lock) for i, ptuple in enumerate(get_tuples(options))]
    logging.info('Starting percolation simulation')
    run_process(pool, data, options)
コード例 #6
0
def main():
    parser = get_parser()
    parser.add_argument('--degree', type=int, default=4, help='node degree')
    parser.add_argument('--graphs',
                        '-g',
                        type=int,
                        default=1,
                        help='number of random graphs to generate and use')

    args = parser.parse_args()

    options = get_options(args)
    get_layouts(args.layout, options)
    options['type'] = 'random regular'
    options['degree'] = args.degree
    options['graphs'] = args.graphs

    logging.basicConfig(level=options['log_level'])

    graphs = create_random_regular(options)
    options['graph'] = graphs
    graphToNED(options)
    graphToCSV(options)
    if args.graph_only:
        sys.exit(0)
    write_header(options)

    ptuples = get_tuples(options)
    ptuples = numpy.array(ptuples).repeat(len(graphs), axis=0)
    params = zip(graphs * len(ptuples), ptuples)

    pool = Pool(processes=options['processes'])
    manager = Manager()
    lock = manager.Lock()

    data = [(G, options, i, (ps, pb), lock)
            for i, (G, (ps, pb)) in enumerate(params)]
    run_process(pool, data, options)
コード例 #7
0
def main():
    parser = get_parser()
    parser.add_argument('--range', type=float, default=250.0, help='radio range for nodes in unit-square')
    parser.add_argument('--size', type=float, default=1000.0, help='physical length of the unit-square')
    parser.add_argument('--dim', type=int, default=2, help='dimension of the graph')
    parser.add_argument('--graphs', '-g', type=int, default=1, help='number of random graphs to generate and use')

    args = parser.parse_args()

    options = get_options(args)
    get_layouts(args.layout, options)
    options['type'] = 'random geometric'
    options['graphs'] = args.graphs
    options['range'] = args.range
    options['dim'] = args.dim
    options['size'] = args.size

    logging.basicConfig(level=options['log_level'])

    graphs = create_random_geometric(options)
    options['graph'] = graphs
    graphToNED(options)
    graphToCSV(options)
    graphToNEDMixim(options)
    if args.graph_only:
        sys.exit(0)
    write_header(options)

    ptuples = get_tuples(options)
    ptuples = numpy.array(ptuples).repeat(len(graphs), axis=0)
    params = zip(graphs*len(ptuples), ptuples)

    pool = Pool(processes=options['processes'])
    manager = Manager()
    lock = manager.Lock()

    data = [(G, options, i, (ps, pb), lock) for i, (G, (ps, pb)) in enumerate(params)]
    run_process(pool, data, options)
コード例 #8
0
def main():
    parser = parser = get_parser()
    parser.add_argument('--files', '-f', nargs='+', default=[], help='provide graphs as file')
    args = parser.parse_args()

    options = get_options(args)
    get_layouts(args.layout, options)
    options['type'] = 'files'
    options['nodes'] = None
    options['degree'] = None
    options['db'] = None
    options['size'] = None
    options['files'] = args.files
    logging.basicConfig(level=options['log_level'])

    if len(options['files']) == 0:
        logging.critical('no files specified: aborting')
        sys.exit(1)

    graphs = parse_files(options)
    options['graph'] = graphs
    options['graphs'] = len(graphs)
    graphToNED(options)
    graphToCSV(options)
    write_header(options)

    ptuples = get_tuples(options)
    ptuples = numpy.array(ptuples).repeat(len(graphs), axis=0)
    params = zip(graphs*len(ptuples), ptuples)

    pool = Pool(processes=options['processes'])
    manager = Manager()
    lock = manager.Lock()

    data = [(G, options, i, (ps, pb), lock) for i, (G, (ps, pb)) in enumerate(params)]
    run_process(pool, data, options)