示例#1
0
def main():
    for i in range (p.Runs):
        clock =0 # set clock to 0 at the start of the simulation
        if p.hasTrans:
            if p.Ttechnique == "Light": LT.create_transactions() # generate pending transactions
            elif p.Ttechnique == "Full": FT.create_transactions() # generate pending transactions

        Node.generate_gensis_block() # generate the gensis block for all miners
        BlockCommit.generate_initial_events() # initiate initial events >= 1 to start with

        while  not Queue.isEmpty() and clock <= p.simTime:
            next_event = Queue.get_next_event()
            clock = next_event.time # move clock to the time of the event
            BlockCommit.handle_event(next_event)
            Queue.remove_event(next_event)

        Consensus.fork_resolution() # apply the longest chain to resolve the forks
        Incentives.distribute_rewards()# distribute the rewards between the particiapting nodes
        Statistics.calculate() # calculate the simulation results (e.g., block statstics and miners' rewards)

		########## reset all global variable before the next run #############
        Statistics.reset() # reset all variables used to calculate the results
        Node.resetState() # reset all the states (blockchains) for all nodes in the network
    fname = os.getenv('OUTPUT', "(Allverify)1day_{0}M_{1}K".format(
    	p.Bsize/1000000, p.Tn/1000))+".xlsx"
    Statistics.print_to_excel(fname) # print all the simulation results in an excel file
    Statistics.reset2() # reset profit results
示例#2
0
def main():
    for i in range(p.Runs):
        clock = 0  # set clock to 0 at the start of the simulation
        if p.hasTrans:
            if p.Ttechnique == "Light":
                LT.create_transactions()  # generate pending transactions
            elif p.Ttechnique == "Full":
                FT.create_transactions()  # generate pending transactions

        Node.generate_gensis_block(
        )  # generate the gensis block for all miners
        # initiate initial events >= 1 to start with
        BlockCommit.generate_initial_events()

        while not Queue.isEmpty() and clock <= p.simTime:
            next_event = Queue.get_next_event()
            clock = next_event.time  # move clock to the time of the event
            BlockCommit.handle_event(next_event)
            Queue.remove_event(next_event)

        # for the AppendableBlock process transactions and
        # optionally verify the model implementation
        if p.model == 3:
            BlockCommit.process_gateway_transaction_pools()

            if i == 0 and p.VerifyImplemetation:
                Verification.perform_checks()

        Consensus.fork_resolution(
        )  # apply the longest chain to resolve the forks
        # distribute the rewards between the particiapting nodes
        Incentives.distribute_rewards()
        # calculate the simulation results (e.g., block statstics and miners' rewards)
        Statistics.calculate()

        if p.model == 3:
            Statistics.print_to_excel(i, True)
            Statistics.reset()
        else:
            ########## reset all global variable before the next run #############
            Statistics.reset(
            )  # reset all variables used to calculate the results
            Node.resetState(
            )  # reset all the states (blockchains) for all nodes in the network
            fname = "(Allverify)1day_{0}M_{1}K.xlsx".format(
                p.Bsize / 1000000, p.Tn / 1000)
            # print all the simulation results in an excel file
            Statistics.print_to_excel(fname)
            fname = "(Allverify)1day_{0}M_{1}K.xlsx".format(
                p.Bsize / 1000000, p.Tn / 1000)
            # print all the simulation results in an excel file
            Statistics.print_to_excel(fname)
            Statistics.reset2()  # reset profit results
def main(dir, batch_size):
    # if not os.path.isabs(dir):
    #     dir = str(Path(os.getcwd())) + os.sep + dir + os.sep
    os.chdir(dir)
    # # start = time.time()
    # for epoch in epoch_range:
    #     Plot.create_plots(epoch, batch_size)
    # # end = time.time()
    # # print("create plots: " + str(end - start))
    # # Plot.create_video(batch_range, epoch_num, dir)
    # # end = time.time()
    # # print("create video: " + str(end - start))
    # # start = time.time()
    stats = Statistics(dir)
    stats.calculate(epoch_range, batch_range)
    stats.calculate_goal_success(epoch_range)
示例#4
0
def main():
    for i in range(p.Runs):
        print('-' * 10, f'Run: {i+1}', '-' * 10)
        print(p.sim_type)
        print('No. of Miners:', len(p.NODES))

        hash_power = 0
        # Giving every pool a reference to the nodes it contains. Also, update the total hashrate of a pool.
        print('SOLO Nodes: ', end='')
        for node in p.NODES:
            hash_power += node.hashPower
            if node.pool:
                node.pool.nodes.append(node)
                node.pool.hash_power += node.hashPower
            else:
                print(node.id, end=', ')
        print()

        print('Pools:')
        for pool in p.POOLS:
            print(' -', pool.id, pool.strategy, 'Fee Rate:', pool.fee_rate,
                  'Nodes:', [node.id for node in pool.nodes], 'Hash power:',
                  pool.hash_power)
        print('Total hash power:', hash_power, '\n')

        clock = 0  # set clock to 0 at the start of the simulation
        if p.hasTrans:
            if p.Ttechnique == "Light":
                LT.create_transactions()  # generate pending transactions
            elif p.Ttechnique == "Full":
                FT.create_transactions()  # generate pending transactions

        Node.generate_gensis_block(
        )  # generate the gensis block for all miners
        # initiate initial events >= 1 to start with
        BlockCommit.generate_initial_events()

        while not Queue.isEmpty() and clock <= p.simTime:
            next_event = Queue.get_next_event()
            clock = next_event.time  # move clock to the time of the event
            BlockCommit.handle_event(next_event)
            Queue.remove_event(next_event)

        # for the AppendableBlock process transactions and
        # optionally verify the model implementation
        if p.model == 3:
            BlockCommit.process_gateway_transaction_pools()

            if i == 0 and p.VerifyImplemetation:
                Verification.perform_checks()

        Consensus.fork_resolution(
        )  # apply the longest chain to resolve the forks
        # distribute the rewards between the particiapting nodes
        Incentives.distribute_rewards()
        # calculate the simulation results (e.g., block statstics and miners' rewards)
        Statistics.calculate(i)

        if p.model == 3:
            Statistics.print_to_excel(i, True)
            Statistics.reset()
        else:
            ########## reset all global variable before the next run #############
            Statistics.reset(
            )  # reset all variables used to calculate the results
            Node.resetState(
            )  # reset all the states (blockchains) for all nodes in the network
            Pool.resetState()  # reset all pools in the network

    # set file name for results
    fname = f"{p.sim_type}_{int(p.simTime/(24*60*60))}days_{datetime.now()}.xlsx".replace(
        ':', '_')
    # fname = f"(Allverify)1day_{p.Bsize/1000000}M_{p.Tn/1000}K-{i}-{datetime.now()}.xlsx".replace(':', '_')
    # print all the simulation results in an excel file
    Statistics.print_to_excel(fname)
示例#5
0
文件: Report.py 项目: ericpotvin/BTK
    def handle_files(self):
        """ Process files
            :return: None
        """
        if not self.files:
            return

        total_lines = 0
        covered_lines = 0
        dirs = {}

        for file_ in self.files:
            Statistics.calculate(file_)

            dir_name = os.path.dirname(file_.path)
            if dir_name not in dirs:
                dirs[dir_name] = Folder(dir_name)
            dirs[dir_name].add_file(file_)

        for dir_ in dirs.values():
            total_lines = total_lines + dir_.total_lines
            covered_lines = covered_lines + dir_.covered_lines

        open_file = open(
            os.path.join(CC_BASE, HTML.INDEX_FILE + HTML.FILE_EXT), "w")

        root_link = HTML.get_link(CC_BASE, HTML.LINK_ROOT, "")

        open_file.write(HTML.get_header(root_link, HTML.TABLE_HEADER))

        for dir_ in dirs.values():
            path = os.path.join(self.ROOT_PATH, dir_.name[1:],
                                HTML.INDEX_FILE + HTML.FILE_EXT).replace(
                                    '//', '/')

            last_pos = dir_.name.rfind("/")

            open_file.write(
                HTML.get_line(path, dir_.name[last_pos + 1:], dir_.total_lines,
                              dir_.covered_lines))

        open_file.write(HTML.get_footer(total_lines, covered_lines))
        open_file.close()

        for dir_ in dirs.values():

            path = (CC_BASE + "/" + self.ROOT_PATH + dir_.name)\
                .replace('//', '/')

            if not os.path.exists(path):
                os.makedirs(path)

            open_file = open(
                os.path.join(path, HTML.INDEX_FILE + HTML.FILE_EXT), "w")

            open_file.write(
                HTML.get_header(root_link + dir_.basename, HTML.TABLE_HEADER))

            for file_ in dir_.files:
                open_file.write(
                    HTML.get_line(file_.basename + HTML.FILE_EXT,
                                  file_.basename, file_.total_lines,
                                  file_.covered_lines))

            open_file.write(
                HTML.get_footer(dir_.total_lines, dir_.covered_lines))
            open_file.close()

        for file_ in self.files:
            self.handle_file(file_)