def report_generate(obj, timestamp, iteration):
    report = StatisticalReport(obj, timestamp, iteration)
    hostname = "host%d" % obj.id
    base_directory = obj.configuration("test_directory")
    filepath = base_directory + os.sep + "%s" % hostname + os.sep + "%04d" % timestamp + os.sep + "%04d.txt" % iteration
    dir_name = os.path.dirname(filepath)
    if not os.path.exists(dir_name):
        os.makedirs(dir_name)

    with open(filepath,"w") as f:
        f.write("## INPUT\n")
        f.write(str(obj.input.to_string()) + "\n")
        f.write("## DB\n")
        f.write(obj.context_database.to_string(timestamp) + "\n")
        f.write("## ASSORTED CONTEXTS\n")
        f.write(obj.assorted_context_database.to_string(timestamp) + "\n")
        f.write("## FILTERED SINGLES\n")
        r = contexts_to_standard(obj.filtered_singles)
        f.write("%s\n" % r[0])
        f.write("## NEW AGGREGATES\n")
        if obj.new_aggregate is None:
            aggr_string = "*"
            count = 0
        else:
            aggr_string = obj.new_aggregate
            count = len(obj.new_aggregate.get_cohorts_as_set())
        f.write("size(%d)-%s\n" % (count, aggr_string))
        f.write("## CONTEXT HISTORY\n")
        f.write(str(obj.context_history.get(timestamp)) + "\n")
        f.write("## OUTPUT\n")
        f.write(str(obj.output.to_string()) + "\n")
        f.write("## ACTUAL OUTPUT\n")
        f.write(str(obj.output.to_string(True))) # with a parameter, it will show actual output

        f.write("\n\n-------------------\n")
        f.write("## STATISTICS\n")
        f.write("%s" % report.run())
    def run(config, timestamp=0):
        """
        Tests if the algorithm works fine
        """
        hosts = config["hosts"]
        neighbors = config["neighbors"]
        test_directory = config["test_directory"]

        assert hosts is not None
        assert neighbors is not None

        # We don't drop any packets
        drop_rate = 0.0
        if "drop_rate" in config:
            drop_rate = float(config["drop_rate"])
        disconnection_rate = 0.0
        if "disconnection_rate" in config:
            disconnection_rate = float(config["disconnection_rate"])

        # configurations
        for h in hosts:
            h.context_aggregator.set_config(config)

        timestamp = timestamp

        disconnecion_count = 0
        sent_count = 0
        count = 0
        while True:
            print "Iteration [%d]: at timestamp (%d)" % (count, timestamp)

            ## sample
            for h in hosts:
                n = neighbors[h.id]
                r = h.context_aggregator.process_to_set_output(neighbors=n, timestamp = timestamp, iteration=count)

            ## communication
            ### Check if there is anything to send
            if not AggregationSimulator.stop_simulation(hosts):
                from_to_map = {}
                ### We need neighbors computation code here
                for h in hosts:
                    if not h.context_aggregator.is_nothing_to_send():
                        ns = neighbors[h.id]

                        for n in ns:

                            # When host has nothing to send to neighbors, just skip it
                            if not h.context_aggregator.output.dictionary[n]:
                                continue

                            if disconnection_rate > 0.0 :
                                if check_drop(disconnection_rate):
                                    disconnecion_count += 1
                                    #print "no connection from %d to %d; couldn't send %s" % (h.id, n, h.context_aggregator.output.dictionary[n])
                                    continue

                            sent_count += 1
                            sends = h.context_aggregator.send(neighbor=n, timestamp=timestamp)

                            # sends is a dictionary that maps id -> contexts
                            for k, value in sends.items():
                                if value == set([]): continue
                                key = AggregationSimulator.encode_key(h.id, k)
                                from_to_map[key] = value
                                # store what is actually sent
                                h.context_aggregator.output.actual_sent_dictionary[k] = contexts_to_standard(value)

                #print from_to_map
                for i, value in from_to_map.items():
                    from_node, to_node = AggregationSimulator.decode_key(i)
                    h = filter(lambda i: i.id == to_node, hosts)[0]

                    if drop_rate > 0.0:
                        if check_drop(drop_rate):
                            print "dropping packets %s" % i
                            continue
                    h.context_aggregator.receive(from_node=from_node,contexts=value,timestamp=timestamp)

            for h in hosts:
                report_generate(h.context_aggregator, timestamp, count)

            if AggregationSimulator.stop_simulation(hosts):
                break

            count += 1

        print "sent count:(%d) disconnection count:(%d)" % (sent_count, disconnecion_count)