def main():
    infile = 'traffic_8nodes_ht.txt'
    L = 3
    # Parameters
    p_max = 1.0 - 1.0/float(2**(1.0/3))
    p0 = 0.9*p_max
    error, eta = 0.4, 25
    b, u = 2, 810
    # Scale for test functions
    Fval = 4*u**(1/float(b))*(math.log(2.0*1.0**3/float(p0))/float(1.0))**((b-1)/float(b))
    cF = eta/float(Fval)
    # Sorted leaf tags
    sorted_leaf_tags = [(L, idx) for idx in range(8)]

    # Instantiate a pointer
    l, k = 0, 0
    #scale = 3.0*L*get_constant(p0)
    scale = 1.0

    pntr = Pointer_HT(l, k, L, p0, eta, error, scale, logging.DEBUG, b, u, cF)
    time_interval = 0

    with open(infile, 'rb') as ff:
        for line in ff:
            time_interval += 1
            values = [int(k) for k in line.split(',')]
            dvalues = construct(values, sorted_leaf_tags)
            pntr.run(dvalues)
            if not pntr.isActive():
                break
    print "Time interval: %d" % time_interval
示例#2
0
def run_one_instance(tHHH, infile, L, sorted_leaf_tags, eta, Smax,
                     logging_level):
    logger = logging.getLogger()
    logger.setLevel(logging.DEBUG)

    monitor = MinlanAlgo(L, eta, Smax, logging_level)
    time_interval, ret = 0, []
    with open(infile, 'rb') as ff:
        for line in ff:
            time_interval += 1
            values = [int(k) for k in line.rstrip().split(',')]
            dvalues = construct(values, sorted_leaf_tags)
            hhh_nodes = monitor.run(dvalues)

            mHHH = sorted([(node.l, node.k) for node in hhh_nodes],
                          key=lambda x: x[0],
                          reverse=True)
            o, p, r = error_function(mHHH, tHHH), precision(mHHH,
                                                            tHHH), recall(
                                                                mHHH, tHHH)
            ret.append([o, p, r])
            logger.debug(
                "Level %d, At time interval %d, Error_function %d, Precision %f, Recall %f",
                L, time_interval, o, p, r)
            logger.debug("True HHHes: %s", ', '.join([str(k) for k in tHHH]))
            logger.debug("Measured HHHes: %s",
                         ', '.join([str(k) for k in mHHH]))
    return ret
示例#3
0
def main():
    infile = 'traffic_8nodes_poisson.txt'
    L = 3
    # Parameters
    p_max = 1.0 - 1.0 / float(2**(1.0 / 3))
    p0 = 0.9 * p_max
    error, eta = 0.4, 25
    xi = 1.0

    # Sorted leaf tags
    sorted_leaf_tags = [(L, idx) for idx in range(8)]

    # Instantiate a pointer
    #scale = 3.0*L*get_constant(p0)
    scale = 1.0
    l, k = 0, 0
    pntr = Pointer_Poisson(l, k, L, p0, eta, error, scale, logging.DEBUG, xi)
    time_interval = 0

    with open(infile, 'rb') as ff:
        for line in ff:
            time_interval += 1
            values = [int(k) for k in line.split(',')]
            dvalues = construct(values, sorted_leaf_tags)
            pntr.run(dvalues)
            if not pntr.isActive():
                break
    print "Time interval: %d" % time_interval
示例#4
0
def run_one_instance(infile, L, leaf_tags, name, p0, eta, error, scale,
                     logging_level, T, *args, **kwargs):
    #infile = 'traffic_tmp.txt'
    # Maintain a list of Pointer objects
    pntrs = collections.deque()

    # Instantiate initial pointers
    starting_nodes = split_tree(L, T)
    for idx, (l, k) in enumerate(starting_nodes):
        pntr = get_pntr(name, l, k, L, p0, eta, error, scale, logging_level,
                        idx, *args, **kwargs)
        #pntr = Pointer_Poisson(l, k, L, p0, eta, error_0, xi, logging.WARNING, idx)
        pntrs.append(pntr)
    time_interval = 0

    # Keep monitoring detected HHH nodes
    # key: (l,k), val: newNode object
    HHH_nodes = {}

    isActive = True
    with open(infile, 'rb') as ff:
        for line in ff:
            time_interval += 1
            values = [int(k) for k in line.split(',')]
            dvalues = construct(values, leaf_tags)

            # Keep monitoring detected HHH nodes
            read_hhh_nodes(dvalues, HHH_nodes)

            # Moving the pointers
            lenth = len(pntrs)
            for i in range(lenth):
                pntr = pntrs.popleft()
                hhh_node = pntr.run(dvalues, HHH_nodes)

                # If found a new HHH node, add it to the HHH_nodes set.
                if hhh_node and not (hhh_node.l, hhh_node.k) in HHH_nodes:
                    HHH_nodes[(hhh_node.l, hhh_node.k)] = hhh_node
                    # Assign this pointer to keep monitoring this HHH node
                else:
                    pntrs.append(pntr)

                if not pntr.isActive():
                    isActive = False
                    break
            if not isActive:
                break
    return [HHH_nodes, time_interval]
def main():
    infile = 'traffic_8nodes_poisson_multi.txt'
    eta = 25
    L = 3

    # Sorted leaf tags
    sorted_leaf_tags = [(L, idx) for idx in range(8)]

    monitor = MinlanAlgo(L, eta, 2, logging.DEBUG)
    time_interval = 0
    with open(infile, 'rb') as ff:
        for line in ff:
            time_interval += 1
            values = [int(k) for k in line.split(',')]
            dvalues = construct(values, sorted_leaf_tags)
            monitor.run(dvalues)
示例#6
0
def main():
    infile = 'traffic_8nodes_ht_multi.txt'
    L = 3
    # Parameters
    p_max = 1.0 - 1.0 / float(2**(1.0 / 3))
    p0 = 0.9 * p_max
    error, eta = 0.4, 25
    b, u = 2.0, 500
    # Scale for test functions
    Fval = 4 * u**(1 / float(b)) * (math.log(2.0 * 1.0**3 / float(p0)) /
                                    float(1.0))**((b - 1) / float(b))
    cF = eta / float(Fval) * 1.02
    cS = 3.0
    Smax = 2
    error_0 = error / float(2.0 * Smax)

    # Instantiate a pointer
    scale = 3.0 * L * get_constant(p0)
    #scale = 1.0
    l, k = 0, 0
    pntr = Pointer_HT(l, k, L, p0, eta, error_0, scale, logging.DEBUG, 0, b, u,
                      cF, cS)
    time_interval = 0

    # Sorted leaf tags
    sorted_leaf_tags = [(L, idx) for idx in range(8)]

    # Keep monitoring detected HHH nodes
    # key: (l,k), val: newNode object
    HHH_nodes = {}

    with open(infile, 'rb') as ff:
        for line in ff:
            time_interval += 1
            values = [int(k) for k in line.split(',')]
            dvalues = construct(values, sorted_leaf_tags)

            # Keep monitoring detected HHH nodes
            read_hhh_nodes(dvalues, HHH_nodes)
            hhh_node = pntr.run(dvalues, HHH_nodes)
            # If found a new HHH node, add it to the HHH_nodes set.
            if hhh_node:
                HHH_nodes[(hhh_node.l, hhh_node.k)] = hhh_node
                print HHH_nodes
            if not pntr.isActive():
                break
    print "Time interval: %d" % time_interval
def loop_inner(L, infile, iters):
    # Logger
    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    # leaves: A dictionary with key = (l,k), value = lambda
    leaves = load_lambdas(infile)
    # Preprocess lambda range
    for k in leaves:
        leaves[k] /= 1000 if leaves[k] > 1000 else 1
    total_count = sum([leaves[k] for k in leaves])

    # Find set of true HHHes based on leaf-node lambdas.
    ratio = 0.05
    eta = total_count * ratio
    hhh_nodes = findHHH(leaves, eta)
    Smax = len(hhh_nodes)
    tHHH = [(node.l, node.k) for node in hhh_nodes]
    tHHH = sorted(tHHH, key=lambda x: x[0], reverse=True)

    tHHH_vals = [((node.l, node.k), node.val) for node in hhh_nodes]
    tHHH_vals = sorted(tHHH_vals, key=lambda x: x[0][0], reverse=True)
    #logger.info("True HHHes: %s", \
    #        ','.join(str(k) for k in tHHH))

    # Run Algorithms.
    sorted_leaf_tags = sorted(leaves.keys(), key=lambda x: x[1])
    lambda_lists = [leaves[k] for k in sorted_leaf_tags]

    for i in range(iters):
        # Generate one realization of traffic under Poisson distribution
        # leaf node lambdas are set as above.
        generate_traffic(lambda_lists, 1000)
        """ RWCB ALGO API """
        # Parameters
        p_max = 1.0 - 1.0 / float(2**(1.0 / 3))
        p0 = 0.9 * p_max
        error, xi = 0.4, 1.0
        error_0 = error / float(2.0 * Smax)

        # Instantiate a pointer
        scale = 3.0 * L * get_constant(p0)
        #scale = 1.0
        l, k = 0, 0
        pntr = Pointer_Poisson(l, k, L, p0, eta, error_0, scale, logging.DEBUG,
                               0, xi)
        time_interval = 0

        # Keep monitoring detected HHH nodes
        # key: (l,k), val: newNode object
        HHH_nodes = {}
        infile = "traffic_tmp.txt"

        with open(infile, 'rb') as ff:
            for line in ff:
                time_interval += 1
                values = [int(k) for k in line.split(',')]
                dvalues = construct(values, sorted_leaf_tags)

                # Keep monitoring detected HHH nodes
                read_hhh_nodes(dvalues, HHH_nodes)
                hhh_node = pntr.run(dvalues, HHH_nodes)
                # If found a new HHH node, add it to the HHH_nodes set.
                if hhh_node:
                    HHH_nodes[(hhh_node.l, hhh_node.k)] = hhh_node
                    print HHH_nodes
                if not pntr.isActive():
                    break
        print "Time interval: %d" % time_interval

        # Calculating metrics
        mHHH = sorted(HHH_nodes.keys(), key=lambda x: x[0], reverse=True)
        mHHH_vals = [(k, HHH_nodes[k].x_mean_net) for k in HHH_nodes]
        mHHH_vals = sorted(mHHH_vals, key=lambda x: x[0][0], reverse=True)
        print "True HHHes: ", tHHH_vals
        print "Measured HHHes: ", mHHH_vals

        p, r = precision(mHHH, tHHH), recall(mHHH, tHHH)
        print "Iter {0}, Level {1}: Time interval {2}, Precision {3}, Recall {4}".format(
            i, L, time_interval, p, r)
示例#8
0
def loop_inner(L, infile, iters):
    # Logger
    logger = logging.getLogger()
    logger.setLevel(logging.INFO)

    # leaves: A dictionary with key = (l,k), value = lambda
    leaves = load_lambdas(infile)
    # Preprocess lambda range
    for k in leaves:
        leaves[k] /= 1000 if leaves[k]>1000 else 1
    total_count = sum([leaves[k] for k in leaves])

    # Find set of true HHHes based on leaf-node lambdas.
    ratio = 0.05
    eta = total_count * ratio
    hhh_nodes = findHHH(leaves, eta)
    Smax = len(hhh_nodes)
    tHHH = [(node.l, node.k) for node in hhh_nodes]
    tHHH = sorted(tHHH, key = lambda x:x[0], reverse=True)

    tHHH_vals = [((node.l, node.k), node.val) for node in hhh_nodes]
    tHHH_vals = sorted(tHHH_vals, key = lambda x:x[0][0], reverse=True)
    print tHHH
    print tHHH_vals
    #logger.info("True HHHes: %s", \
    #        ','.join(str(k) for k in tHHH))

    # Run Algorithms.
    sorted_leaf_tags = sorted(leaves.keys(), key = lambda x:x[1])
    lambda_lists = [leaves[k] for k in sorted_leaf_tags]

    # Calculate for parameter u
    sigma2 = 0.70
    u = 0
    for curr_l in lambda_lists:
        sigma = math.sqrt(sigma2)
        mu = math.log(curr_l)-(sigma**2)/2.0
        tmp = math.exp(2.0*mu + 2.0*sigma**2)
        if u < tmp:
            u = tmp
    print "u: ", u
    # Parameters
    p_max = 1.0 - 1.0/float(2**(1.0/3))
    p0 = 0.9*p_max
    b = 2.0
    # Scale for test functions
    Fval = 4*u**(1/float(b))*(math.log(2.0*1.0**3/float(p0))/float(1.0))**((b-1)/float(b))
    cF = eta/float(Fval)*1.05
    # Scale for truncated threshold
    threshold = (u*float(1.0)/math.log(1.0/float(p0)))**(1.0/float(b))
    cS = total_count/float(threshold)/2.0
    print "cF, cS", cF, cS

    for i in range(iters):
        # Generate one realization of traffic under Poisson distribution
        # leaf node lambdas are set as above.
        generate_ht(lambda_lists, sigma2, 5000)

        """ RWCB ALGO API """
        # Parameters
        error =  0.4
        error_0 = error/float(2.0*Smax)

        # Instantiate a pointer
        scale = 3.0*L*get_constant(p0)
        #scale = 1.0
        l, k = 0, 0
        pntr = Pointer_HT(l, k, L, p0, eta, error_0, scale, logging.DEBUG, 0, b, u, cF, cS)
        time_interval = 0

        # Keep monitoring detected HHH nodes
        # key: (l,k), val: newNode object
        HHH_nodes = {}
        infile = "traffic_tmp.txt"

        with open(infile, 'rb') as ff:
            for line in ff:
                time_interval += 1
                values = [int(k) for k in line.split(',')]
                dvalues = construct(values, sorted_leaf_tags)

                # Keep monitoring detected HHH nodes
                read_hhh_nodes(dvalues, HHH_nodes)
                hhh_node = pntr.run(dvalues, HHH_nodes)
                # If found a new HHH node, add it to the HHH_nodes set.
                if hhh_node:
                    HHH_nodes[(hhh_node.l, hhh_node.k)] = hhh_node
                    print HHH_nodes
                if not pntr.isActive():
                    break
        print "Time interval: %d" % time_interval

        # Calculating metrics
        mHHH = sorted(HHH_nodes.keys(), key = lambda x:x[0], reverse=True)
        mHHH_vals = [(k, HHH_nodes[k].x_mean_net) for k in HHH_nodes]
        mHHH_vals = sorted(mHHH_vals, key = lambda x:x[0][0], reverse=True)
        print "True HHHes: ", tHHH_vals 
        print "Measured HHHes: ", mHHH_vals

        p, r = precision(mHHH, tHHH), recall(mHHH, tHHH)
        print "Iter {0}, Level {1}: Time interval {2}, Precision {3}, Recall {4}".format(i, L, time_interval, p, r)