help='Time to monotir inventory after flows are configured in seconds; default=60')
    parser.add_argument('--deconfig_monitor', type=int, default=60,
                        help='Time to monitor inventory after flows are de configured in seconds; default=60')
    parser.add_argument('--monitor_period', type=int, default=10,
                        help='Monitor period of triggering inventory crawler in seconds; default=10')
    parser.add_argument('--monitor_outfile', default=None, help='Output file(if specified)')

    in_args = parser.parse_args()

    # Initialize
    if in_args.file != '':
        flow_template = get_json_from_file(in_args.file)
    else:
        flow_template = None

    ic = InventoryCrawler(in_args.host, in_args.port, 0, 'operational', in_args.auth, False)

    fct = FlowConfigBlaster(in_args.host, in_args.port, in_args.cycles, in_args.threads, in_args.fpr,
                            16, in_args.flows, in_args.startflow, in_args.auth)
    # Get the baseline stats. Required in Step 3 to validate if the delete
    # function gets the controller back to the baseline
    ic.crawl_inventory()
    reported = ic.reported_flows
    found = ic.found_flows

    print 'Baseline:'
    print '   Reported nodes: %d' % reported
    print '   Found nodes:    %d' % found

    stats = []
    stats.append((time.time(), ic.nodes, ic.reported_flows, ic.found_flows))
Example #2
0
    parser.add_argument(
        '--auth',
        dest='auth',
        action='store_true',
        help=
        "Use authenticated access to REST (username: '******', password: '******'); default=False"
    )
    parser.add_argument('--startflow',
                        type=int,
                        default=0,
                        help='The starting Flow ID; default=0')

    in_args = parser.parse_args()

    # Initialize
    ic = InventoryCrawler(in_args.host, in_args.port, 0, 'operational',
                          in_args.auth, False)

    fct = FlowConfigBlaster(in_args.host, in_args.port, in_args.cycles,
                            in_args.threads, in_args.nodes, in_args.flows,
                            in_args.startflow, in_args.auth, JSON_FLOW_MOD1)

    # Get baseline stats
    ic.crawl_inventory()
    reported = ic.reported_flows
    found = ic.found_flows

    print 'Baseline:'
    print '   Reported nodes: %d' % reported
    print '   Found nodes:    %d' % found

    # Run through <cycles>, where <threads> are started in each cycle and <flows> are added from each thread