Пример #1
0
def get_sim_pkt(data):
    """
    1. Determine the number of broadcast performed by all packets
    2. Determine the total number of packets received by any node
    3. Determined at each node which are the packets lost
    """
    broadcasts = [set(bcast) for bcast in dao.select(data, BROADCAST_ENTRY)]
    pkt_tx = reduce(lambda x, y: x.union(y), broadcasts)
    nodes_data = dao.group_by(data, NODE_ENTRY)
    # compute map <node, pkt_rx>
    # if node n has flood entry f, then it received at least one packet
    # during that flood
    pkt_rx = {node:set(dao.select(nodes_data[node], SEQNO_ATTR))\
            for node in nodes_data}

    not_received = {node: [] for node in nodes_data}
    for nid, rx in pkt_rx.items():
        if not rx.issubset(pkt_tx):
            logger.error(
                "Received packets are not a subset of those sent in node %s" %
                str(nid))
            raise DAException(
                "Received packets are not a subset of those sent in node %s" %
                str(nid))
        not_received[nid] = pkt_tx.difference(rx)

    nodes_pkt = {node_id: len(pkt_rx[node_id]) for node_id in pkt_rx.keys()}
    return len(pkt_tx), nodes_pkt, not_received
Пример #2
0
def get_sim_pdr(data):
    """
    1. Determine the number of broadcast performed by all packets
    2. Determine the total number of packets received by any node
    3. For each node, divide the number of packets received by the
       number of packets sent computed in (1)
    """
    broadcasts = [set(bcast) for bcast in dao.select(data, BROADCAST_ENTRY)]
    pkt_tx = reduce(lambda x, y: x.union(y), broadcasts)
    nodes_data = dao.group_by(data, NODE_ENTRY)
    pkt_rx = {node:set(dao.select(nodes_data[node], SEQNO_ATTR))\
            for node in nodes_data}

    # check that receveived packet are a subset of those tx
    for nid, rx in pkt_rx.items():
        if not rx.issubset(pkt_tx):
            logger.error(
                "Received packets are not a subset of those sent in node %s" %
                str(nid))
            raise DAException(
                "Received packets are not a subset of those sent in node %s" %
                str(nid))

    # return pdr for each node
    nodes_pdr = {
        node_id: len(pkt_rx[node_id]) / len(pkt_tx)
        for node_id in pkt_rx.keys()
    }
    return nodes_pdr
Пример #3
0
def clean_data(data, offset):
    """Drop initial and terminal floods information.

    1. the set of packets considered is the set of packet
       broadcast by any node, reduced by an offset to remove
       marginal floods

    2. Any flood corresponding to a sequence number different from
      the set given by 1, is discarded
    2. Remove references to discarded packet also from the broadcast
       packets sets.
    """
    pkt_bcast = [
        set(pkt_node) for pkt_node in dao.select(data, BROADCAST_ENTRY)
    ]
    pkt_bcast_some = reduce(lambda x, y: x.union(y), pkt_bcast)
    pkt_considered = list(pkt_bcast_some)[0 + offset:-offset]

    pkt_received = [set(pkt_node) for pkt_node in dao.select(data, SEQNO_ATTR)]
    pkt_received_some = reduce(lambda x, y: x.union(y), pkt_received)

    to_remove = set(pkt_bcast_some).union(pkt_received_some).difference(
        set(pkt_considered))
    dao.delete(data, SEQNO_ATTR, to_remove)

    # remove packets not considered from node broadcasts
    paths = dao.get_paths_for_field(data, BROADCAST_ENTRY)
    path = paths[0]
    bcast_structures = dao.get_pointer_to(data, path)
    for bcast_structure in bcast_structures:
        new_pkt_bcast = set(bcast_structure[BROADCAST_ENTRY])\
                .difference(to_remove)
        bcast_structure[BROADCAST_ENTRY] = list(new_pkt_bcast)

    # remove first and last epoch estimates
    paths = dao.get_paths_for_field(data, RTIMER_EPOCH_ATTR)
    path = paths[0]
    bcast_structures = dao.get_pointer_to(data, path)
    for bcast_structure in bcast_structures:
        new_epoch_rtimer = bcast_structure[RTIMER_EPOCH_ATTR][0 +
                                                              offset:-offset]
        bcast_structure[RTIMER_EPOCH_ATTR] = new_epoch_rtimer

    # test that the pkt broadcast match those received, and vice versa
    pkt_bcast = [
        set(pkt_node) for pkt_node in dao.select(data, BROADCAST_ENTRY)
    ]
    pkt_bcast_some = reduce(lambda x, y: x.union(y), pkt_bcast)
    pkt_received = [set(pkt_node) for pkt_node in dao.select(data, SEQNO_ATTR)]
    pkt_received_some = reduce(lambda x, y: x.union(y), pkt_received)
    tx_rx_diff = pkt_bcast_some.symmetric_difference(pkt_received_some)
    if len(tx_rx_diff) != 0:
        logger.debug("Packets received and sent differ!")
        logger.debug("Differing packets: {}".format(tx_rx_diff))
        raise DAException("Packets received and sent differ!")

    return True
Пример #4
0
def run():
    while True:
        print "开始处理任务"
        task = dao.select(state=0)       
        cv.acquire()
        while task == None:
            cond_wait(cv)
        cv.release()
        ret = dao.update(state=1, update_time=now(), id=task.id)
        if ret == 0:
            print "任务已经被处理,直接跳出循环"
            continue
        page = http_crawler(task.link,task.type)   
        if task.type == 0:
            print "处理列表任务...."
            for item in page:
                prefix = "http://yue.ifeng.com/news/detail_"
                link = prefix + item[0]
                new_task = build_task(link)
                dao.insert(new_task)
                cond_signal(cv)
            dao.update(state=2, update_time=now(), id=task.id)
        if task.type == 1:
	    file_name = task.link.split("/")[-1]
	    print "保存页面....",task.link,file_name   
            save_page(page,file_name)
            ret = dao.update(state=2, update_time=now(), id=task.id)
        print "任务完成"
Пример #5
0
def get_sim_trx_errors(data):
    """Retrieve the number of transmission and reception
    errors, besides the number of bad packet errors.

    * # transmission errors is given by
        # RX errors + # TIMEOUTS errors

    * # bad packet errors is given by
        # BAD_LEN + # BAD HEADER + # BAD PAYLOAD
    """
    nodes_data = dao.group_by(data, NODE_ENTRY)
    # compute map <node, glossy_stats>
    node_gstats = {node:dao.select(nodes_data[node], GLOSSY_ENTRY)\
            for node in nodes_data}
    nerr = {}
    nbad_pkt = {}
    for node_id in node_gstats:
        try:
            nerr[node_id] = node_gstats[node_id][N_RX_ERR_ATTR]    +\
                            node_gstats[node_id][N_RX_TIMEOUT_ATTR]

            nbad_pkt[node_id] = node_gstats[node_id][BAD_LEN_ATTR]     +\
                                node_gstats[node_id][BAD_HEADER_ATTR]  +\
                                node_gstats[node_id][BAD_PAYLOAD_ATTR]
        except KeyError:
            nerr[node_id] = 0
            nbad_pkt[node_id] = 0
    return nerr, nbad_pkt
Пример #6
0
def get_sim_slot_estimation(data):
    """Retrieve the set of values assumed by the
    slot estimation across floods, by each node.

    NOTE:
    -----
    Slot values are returned with the transceiver precision, where
    1 unit corresponds to 31.25ns.
    To note this, the y label of this chart reports
    a "x31 ns".

        1 DWT_TU ~= 32ns
    """
    nodes_data = dao.group_by(data, NODE_ENTRY)
    # compute map <node, <T_slot>>
    node_slots = {node:dao.select(nodes_data[node], T_SLOT_ATTR)\
            for node in nodes_data}
    # filter slots equal to 0
    higher_zero_elements = lambda list_: list(filter(lambda el: el > 0, list_))
    x = [int(node_id) for node_id in node_slots.keys()]  # node ids
    x.sort()
    x = [str(node_id) for node_id in x]
    y = [node_slots[node_id] for node_id in x]  # slots
    y = list(map(higher_zero_elements, y))
    return {x[i]: y[i] for i in range(0, len(x))}
Пример #7
0
def plot_num_initiator(data):
    """Produce a boxplot counting how many times each node has been
    the initiator within the total execution time."""
    nodes_data = dao.group_by(data, NODE_ENTRY)
    # compute map <node, <ref_relay_counter>>
    broadcast = {node:len(dao.select(nodes_data[node], BROADCAST_ENTRY))\
            for node in nodes_data}
    return broadcast
Пример #8
0
def get_sim_epoch_estimates(data):
    nodes_data = dao.group_by(data, NODE_ENTRY)
    # compute map <node, app_stats>
    node_appstats = {node:dao.select(nodes_data[node], RTIMER_EPOCH_ATTR)\
            for node in nodes_data}
    return {
        node_id: node_appstats[node_id]
        for node_id in node_appstats.keys()
    }
Пример #9
0
def get_sim_first_relay_counter(data):
    """Retrieve for each node the set of values
    assumed by the first relay counter during
    the considered floods."""
    nodes_data = dao.group_by(data, NODE_ENTRY)
    # compute map <node, <ref_relay_counter>>
    ref_relay_cnt = {node:dao.select(nodes_data[node], REF_RELAY_CNT_ATTR)\
            for node in nodes_data}
    return ref_relay_cnt
Пример #10
0
def get_sim_flood_trx(data):
    """Return number of transmission and reception at each flood,
    showing possible variations in their distributions.
    """
    nodes_data = dao.group_by(data, NODE_ENTRY)
    # compute map <node, floods_stats>
    node_fstats = {node:dao.select(nodes_data[node], FLOODS_ENTRY)\
            for node in nodes_data}

    # for each node, collect info on rx and tx within each flood.
    node_tx = {
        node: np.array(dao.select(node_fstats[node], N_TX_ATTR))
        for node in node_fstats.keys()
    }
    node_rx = {
        node: np.array(dao.select(node_fstats[node], N_RX_ATTR))
        for node in node_fstats.keys()
    }
    return node_tx, node_rx
Пример #11
0
def get_sim_sync_counters(data):
    """Retrieve for each node how many times it was able to synchronize,
    and how many to desync."""
    nodes_data = dao.group_by(data, NODE_ENTRY)
    # compute map <node, app_stats>
    node_appstats = {node:dao.select(nodes_data[node], APP_ENTRY)\
            for node in nodes_data}

    nsync = {node_id : node_appstats[node_id][N_SYNC_ATTR] \
            for node_id in node_appstats.keys()}

    ndesync = {node_id : node_appstats[node_id][N_NO_SYNC_ATTR] \
            for node_id in node_appstats.keys()}
    return nsync, ndesync
Пример #12
0
def hit_me():
    cursor1 = dao.select()
    for row in cursor1:
        varTrue.set(row[6])
        # l['text'] = row[1]
        text.config(state='normal')
        text.delete(1.0, tkinter.END)
        text.insert("insert", row[1])
        text.config(state='disabled')
        varA.set(row[2])
        varB.set(row[3])
        varC.set(row[4])
        varD.set(row[5])
    var.set('cha')  # 开始啥也不选

    check()
Пример #13
0
def get_sim_trx(data):
    """Show total number of "service" packets (NOT application
    packets) transmitted and received from each node.
    """
    nodes_data = dao.group_by(data, NODE_ENTRY)
    # compute map <node, glossy_stats>
    node_gstats = {node:dao.select(nodes_data[node], GLOSSY_ENTRY)\
            for node in nodes_data}
    nodes_tx = {}
    nodes_rx = {}
    for node_id in node_gstats.keys():
        try:
            nodes_rx[node_id] = node_gstats[node_id][N_RX_ATTR]
            nodes_tx[node_id] = node_gstats[node_id][N_TX_ATTR]
        except:
            nodes_rx[node_id] = 0
            nodes_rx[node_id] = 0
    return nodes_tx, nodes_rx
Пример #14
0
def get_sim_failed_slot_estimation(data):
    """Return how many times a node failed to
    estimate the slot (producing 0 as estimation).
    """
    nodes_data = dao.group_by(data, NODE_ENTRY)
    # compute map <node, <T_slot>>
    node_slots = {node:dao.select(nodes_data[node], T_SLOT_ATTR)\
            for node in nodes_data}

    # filter slots not equal to 0
    higher_zero_elements = lambda list_: list(filter(lambda el: el == 0, list_)
                                              )
    x = [int(node_id) for node_id in node_slots.keys()]  # node ids
    x.sort()
    x = [str(node_id) for node_id in x]
    slots = [node_slots[node_id] for node_id in x]
    zero_list = list(map(higher_zero_elements, slots))
    # count how many zeros are there in each list
    y = list(map(len, zero_list))
    return {x[i]: y[i] for i in range(0, len(x))}
Пример #15
0
def get_sim_trx_error_details(data):
    # collect all glossy_stats
    node_gstats = dao.select(data, GLOSSY_ENTRY)
    # filter nodes without glossy_stats
    node_gstats = filter(lambda gstats: len(gstats) > 0, node_gstats)
    # aggregate results
    results = reduce(lambda x,y: {k: x[k] + y[k]\
            for k in set(list(x.keys()) + list(y.keys()))}, node_gstats)

    # compute the number of unknown errors:
    nerrs = results[N_RX_ERR_ATTR]    +\
            results[N_RX_TIMEOUT_ATTR]
    results.pop(N_RX_ERR_ATTR)
    results.pop(N_RX_TIMEOUT_ATTR)
    results.pop(BAD_LEN_ATTR)
    results.pop(BAD_HEADER_ATTR)
    results.pop(BAD_PAYLOAD_ATTR)
    results.pop(N_RX_ATTR)
    results.pop(N_TX_ATTR)
    results.pop(REL_CNT_FIRST_RX_ATTR)
    detailed_errors = sum(results.values())
    results["unknown_err"] = nerrs - detailed_errors
    return results
Пример #16
0
def selectAllBookIns():
    return dao.select("testDB", "BookIn", "*", "")
Пример #17
0
def selectBookInsByName(name):
    return dao.select("testDB", "BookIn", "*", "WHERE name == '%s'" % name)
Пример #18
0

def deleteTestDb():
    if os.path.exists(ROOT_DIR + '/../db/{}.db'.format(test_movie_db)):
        os.remove(ROOT_DIR + '/../db/{}.db'.format(test_movie_db))

    if os.path.exists(ROOT_DIR + '/../db/{}.db'.format(test_user_db)):
        os.remove(ROOT_DIR + '/../db/{}.db'.format(test_user_db))


deleteTestDb()

print("Checking creating test user db...")
assert (dao.create("CREATE TABLE IF NOT EXISTS test(id int, name varchar(64))",
                   test_user_db) == True)
assert (len(dao.select("SELECT * FROM test", test_user_db, False)) == 0)

print("Checking insert test user db...")
assert (dao.insert("INSERT INTO test(id, name) VALUES(?,?)", [123, "taro"],
                   test_user_db) == True)
assert (len(dao.select("SELECT * FROM test", test_user_db, False)) == 1)

print("Checking update test user db...")
assert (dao.update("UPDATE test SET name=? WHERE id=?", ["goro", 123],
                   test_user_db) == True)
assert (len(dao.select("SELECT * FROM test", test_user_db, False)) == 1)
assert (dao.select("SELECT * FROM test", test_user_db, False)[0][1] == "goro")

print("Checking delete a column test test db...")
assert (dao.delete("DELETE FROM test WHERE id=?", [123], test_user_db) == True)
assert (len(dao.select("SELECT * FROM test", test_user_db, False)) == 0)