Пример #1
0
class PercolationStats:

    def __init__(self, gridSize :int, trials :int):
        self.gridSize = gridSize
        self.trials = trials
        self.statistic = Statistics([])
        
    def simulePercolation(self):
        trialsFraction = []
        for trial in range(self.trials):
            grid = Percolation(self.gridSize, WeightedQuickUnion())
            while not grid.percolates():
                row = random.randint(1, self.gridSize)
                col = random.randint(1, self.gridSize)
                grid.open(row,col)
            trialsFraction.append(grid.numberOfOpenSites()/(self.gridSize*self.gridSize))
        self.statistic = Statistics(trialsFraction)

    def getAverage(self):
        return self.statistic.getAverage()
    
    def getSquareStdDeviation(self):
        return math.sqrt(self.statistic.getSquareStdDeviation())

    def getHighConf(self):
        return self.statistic.getHighConf()

    def getLowConf(self):
        return self.statistic.getLowConf()
Пример #2
0
    async def check(real_roomid, raffle_name):
        if not await Utils.is_normal_room(real_roomid):
            return
        data = await BasicRequest.tv_req_check(real_roomid)
        checklen = data["data"]["list"]
        list_available_raffleid = []
        try:
            for j in checklen:
                raffle_id = j["raffleId"]
                raffle_type = j["type"]
                time_wanted = j["time_wait"] + int(time.time())

                if not Statistics.is_raffleid_duplicate(raffle_id):
                    Log.raffle("本次获取到 %s 的抽奖id为: %s" %
                               (raffle_name, raffle_id))
                    list_available_raffleid.append(
                        (raffle_id, raffle_type, time_wanted))
                    Statistics.add2raffle_ids(raffle_id)
        except:
            Log.error("检测到无效的小电视类抽奖")
        # 暂时没啥用
        # num_aviable = len(list_available_raffleid)
        for raffle_id, raffle_type, time_wanted in list_available_raffleid:
            Timer.add2list_jobs(
                TvRaffleHandler.join, time_wanted,
                (real_roomid, raffle_id, raffle_type, raffle_name))
Пример #3
0
def selective_repeat_test(file_name: str, repetitions: int) -> None:
    for i in range(repetitions):
        #Print proggress
        print(f"Run {i + 1}/{repetitions}")
        stats = Statistics()

        #Setup test specific statistics
        CommunicationSettings.check_sum = CheckSum.CRC
        CommunicationSettings.window_size = 4

        #Create sender and reciever
        sender = SenderSR("Sender", stats)
        reciever = RecieverSR("Reciever", stats)

        #Setup resoult image name
        reciever.set_recreated_image_name(f"Img/res_sr{i + 1}.png")

        #Bind them
        sender.bind(reciever)
        reciever.bind(sender)

        #Start them
        sender.start()
        reciever.start()

        #Start transmition
        sender.send_image(file_name)

        wait_for_simulation_end()
        print(stats.get_statistics())
        CommunicationSettings.reset_sumulation_state()
Пример #4
0
    def __init__(self, parent=None):
        super().__init__(parent)

        self.statistics = Statistics(resourcePath(DB_PATH))

        self.setupUi(self)
        self.initUi()
Пример #5
0
 def getVehicleTypeAmount(self):
     from Statistics import Statistics
     player = BigWorld.player()
     vehicles = player.arena.vehicles
     if player.playerVehicleID not in vehicles:
         return
     curVeh = vehicles[player.playerVehicleID]
     Statistics.getInfos(curVeh['accountDBID'])
     vehicles[player.playerVehicleID]['team']
     amounts = {VEHICLE_CLASS_NAME.HEAVY_TANK:{'ally':0,'enemy':0},
             VEHICLE_CLASS_NAME.MEDIUM_TANK:{'ally':0,'enemy':0},
             VEHICLE_CLASS_NAME.LIGHT_TANK:{'ally':0,'enemy':0},
             VEHICLE_CLASS_NAME.AT_SPG:{'ally':0,'enemy':0},
             VEHICLE_CLASS_NAME.SPG:{'ally':0,'enemy':0}}
     tiers = []
     for accountDBID,entityObj in Statistics.getEmo().getAll().iteritems():
         vID = g_sessionProvider.getCtx().getVehIDByAccDBID(accountDBID)
         if vID in vehicles:
             v_info = vehicles[vID]
             tiers.append(v_info['vehicleType'].level)
             if not BattleUtils.isMyTeam(v_info['team']):
                 tag = 'enemy'
             else:
                 tag = 'ally'
             for vehicleType in VEHICLE_TYPES_ORDER:
                 if vehicleType in v_info['vehicleType'].type.tags:
                     amounts[vehicleType][tag] += 1 
     currentTier = curVeh['vehicleType'].level       
     return (amounts,tiers,currentTier)
Пример #6
0
    async def join(real_roomid, raffle_id, raffle_type, raffle_name):
        await BasicRequest.enter_room(real_roomid)
        data = await BasicRequest.tv_req_join(real_roomid, raffle_id,
                                              raffle_type)
        Log.raffle("参与了房间 %s 的 %s 抽奖" % (real_roomid, raffle_name))
        Log.raffle("%s 抽奖状态: %s" %
                   (raffle_name, "OK" if data["code"] == 0 else data["msg"]))
        Statistics.add2joined_raffles("小电视类(合计)")

        code = data["code"]
        # tasklist = []
        if not code:
            # await asyncio.sleep(random.randint(170,190))
            # task = asyncio.ensure_future(TvRaffleHandler.notice(raffle_id,real_roomid,raffle_name))
            # tasklist.append(task)
            # await asyncio.wait(tasklist, return_when=asyncio.FIRST_COMPLETED)
            Log.raffle("房间 %s %s 抽奖结果: %s X %s" %
                       (real_roomid, raffle_name, data["data"]["award_name"],
                        data["data"]["award_num"]))
            Statistics.add2results(data["data"]["award_name"],
                                   int(data["data"]["award_num"]))
        elif code == -500:
            Log.error("-500繁忙,稍后重试")
            return False
        elif code == -403 or data["msg"] == "访问被拒绝":
            Log.error("当前账号正在小黑屋中")
            return False
Пример #7
0
    async def check(real_roomid, raffle_id=None):
        if not await Utils.is_normal_room(real_roomid):
            return
        if raffle_id is not None:
            data = {"data": [{"id": raffle_id, "time": 65}]}
        else:
            for i in range(10):
                data = await BasicRequest.guard_req_check(real_roomid)
                if data["data"]:
                    break
                await asyncio.sleep(1)
            else:
                Log.warning("%s 没有guard或guard已领取" % real_roomid)
                return

            list_available_raffleid = []

            for j in data["data"]:
                raffle_id = j["id"]
                if not Statistics.is_raffleid_duplicate(raffle_id):
                    Log.raffle("本次获取到的 大航海 抽奖id为 %s" % raffle_id)
                    list_available_raffleid.append(raffle_id)
                    Statistics.add2raffle_ids(raffle_id)

            tasklist = []
            num_available = len(list_available_raffleid)
            for raffle_id in list_available_raffleid:
                task = asyncio.ensure_future(GuardRaffleHandler.join(num_available, real_roomid, raffle_id))
                tasklist.append(task)
            if tasklist:
                raffle_results = await asyncio.gather(*tasklist)
                if False in raffle_results:
                    Log.error("繁忙提示,稍后重新尝试")
                    RaffleHandler.push2queue((real_roomid,), GuardRaffleHandler.check)
def do_clustering():
    try:
        j = JenkinsClustering()
        # get the data and dataframe to perform clustering
        data, df = j.prepare_data_for_clustering()
        print('Proceeding to cluster the data')
        estimator = KMeans(n_clusters=30, init='k-means++', max_iter=1000)
        estimator.fit(data)
        print(estimator.labels_)
        print(collections.Counter(estimator.labels_))
        df['labels'] = estimator.labels_
        os.chdir('../')
        # Gather data for a particular cluster and build a word cloud
        for item in set(estimator.labels_):
            con_string = ''
            for i in range(len(df)):
                if df['labels'][i] == item:
                    con_string += df['text'][i]
            Statistics.create_word_cloud(con_string, "Cluster %s" % (item),
                                         './cluster_%s.png' % (item))
            print('Created the word cloud for the cluster: ' + str(item))

    except Exception:
        print(traceback.format_exc())
        print('Error performing clustering')
Пример #9
0
    def setUp(self):
        self.config = {
            "PARENT_THREAD_SLEEP_TIME": 60,
            "TWITCH_THREAD_SLEEP_TIME": 0.75,
            "IRC_THREAD_SLEEP_TIME": 1,
            "TIMEOUT": 5,
            "MATCH_PHRASES": [
                "BabyRage NEVER LUCKY BabyRage",
            ],
            "LOGS_FOLDER": "/logs/",
            "STATS_FOLDER": "/stats/",
            "CSV_FOLDER": "/CSV/",
            "DATE_TIME_FORMAT": "%B %d %Y %H:%M:%S",
            "TIME_FORMAT": "%H:%M:%S",
            "GRAPH_FILE_FORMAT": "D%d_M%m_Y%Y_H%H_m%M_s%S.csv",
            "JSON_FILE_FORMAT": "D%d_M%m_Y%Y_H%H_m%M_s%S.json",
            "CHAT_LOG_FILE_FORMAT": "D%d_M%m_Y%Y_H%H_m%M_s%S.log",
            "RECONNECT_TIME": 360,

            #enable bot that reads stream IRC?
            "IRC_BOT": True,
            #if true, this will cause the IRC Bot to never stop reading chat, even if the streamer goes offline
            "ALWAYS_ONLINE": False,
            #enable bot that grabs stream data using Twitch API?
            "TWITCH_BOT": True,
        }

        self.jsonFile = JsonEditor.JsonEditor(
            "./data/_TEST_/stats/D13_M05_Y2015_H20_m00_s45.json",
            "./data/_TEST_/logs/_TEST_.json")
        self.stats = Statistics(
            "_TEST_", "./data/_TEST_/CSV/D13_M05_Y2015_H20_m00_s45.csv",
            "./data/_TEST_/stats/D13_M05_Y2015_H20_m00_s45.json",
            "./data/_TEST_/logs/D13_M05_Y2015_H20_m00_s45.log",
            "./data/_TEST_/logs/_TEST_.json", self.config)
Пример #10
0
 def showStatistics(self):
     """Shows general statistics of the analyzed data over all frames."""
     if len(self.contacts) == 0 or self.contacts is None:
         box = ErrorBox(ErrorMessages.NOSCORES_PROMPTANALYSIS)
         box.exec_()
         return
     self.statisticsView = Statistics(self.contacts)
     self.statisticsView.showNormal()
Пример #11
0
 def __init__(self):
     self.flightList = FlightListAdministration(
     )  #Maipular lista Dic de vuelos -> Fernan
     self.statistic = Statistics(
     )  #Actualizar atributos para llevar las ventas ->Fernan
     self.flightAdmin = FlightAdministration(self.flightList)
     self.Admin = Administrator("Lluvia", "Manilla", 10000.555,
                                "1999/04/24")
     self.principalMenu()
Пример #12
0
 async def check(real_roomid):
     data = await BasicRequest.anchor_req_chcek(real_roomid)
     if not data["code"]:
         data = data["data"]
         if Utils.have_win_award(data["award_users"]):
             Log.raffle("%s 天选时刻抽奖结果: %s" % (real_roomid, data["name"]))
             Statistics.add2results(data["name"], 1)
         else:
             Log.raffle("%s 天选时刻抽奖结果: 没有中奖" % real_roomid)
Пример #13
0
 def recordStats(self):
     #create all of our stats for the stream, is run after the stream is over
     jsonFile = JsonEditor(self.JSONfp, self.globalPath)
     stats = Statistics(self.stream, self.CSVfp, self.JSONfp, self.LOGfp,
                        self.globalPath, self.config)
     dailyStats = stats.doDaily()
     jsonFile.toJSON(dailyStats)
     print self.stream + ": Tally Emotes started!"
     stats.tallyEmotes()
     print self.stream + ": End of stream tasks finished!"
Пример #14
0
Файл: UI.py Проект: paweus/pite
 def LoadDataForVisualisation(self):
     print 'Loading data for visualisation...'
     Reader.read(self.filePath2)
     self.data = Reader.load
     self.dataStr = Reader.load2 #data i czas
     self.data = Operations.CalculateToMetrics(self.data)
     stat = Statistics(self.data,self.dataStr)
     stat.makeStats()
     self.UpdateStatsGUI(stat)
     print 'All done.'
Пример #15
0
Файл: UI.py Проект: paweus/pite
 def LoadDataForVisualisation(self):
     print 'Loading data for visualisation...'
     Reader.read(self.filePath2)
     self.data = Reader.load
     self.dataStr = Reader.load2  #data i czas
     self.data = Operations.CalculateToMetrics(self.data)
     stat = Statistics(self.data, self.dataStr)
     stat.makeStats()
     self.UpdateStatsGUI(stat)
     print 'All done.'
Пример #16
0
 async def join(room_id, raffle_id):
     await BasicRequest.enter_room(room_id)
     data = await BasicRequest.storm_req_join(raffle_id)
     Statistics.add2joined_raffles("节奏风暴(合计)")
     if not data["code"]:
         data = data["data"]
         gift_name = data["gift_name"]
         gift_num = data["gift_num"]
         Log.critical("房间 %s 节奏风暴抽奖结果: %s X %s")
         Statistics.add2results(gift_name, int(gift_num))
Пример #17
0
 def simulePercolation(self):
     trialsFraction = []
     for trial in range(self.trials):
         grid = Percolation(self.gridSize, WeightedQuickUnion())
         while not grid.percolates():
             row = random.randint(1, self.gridSize)
             col = random.randint(1, self.gridSize)
             grid.open(row,col)
         trialsFraction.append(grid.numberOfOpenSites()/(self.gridSize*self.gridSize))
     self.statistic = Statistics(trialsFraction)
Пример #18
0
    def DocumentsAnalysis(self, naturalText='', generatedText=''):
        natGraph = GraphBuilder()
        natTextSize = len(sent_tokenize(naturalText))
        genTextSize = len(sent_tokenize(generatedText))
        genGraph = GraphBuilder()

        natGraph.CreateGraph(naturalText)
        genGraph.CreateGraph(generatedText)
        self.__plot.StatisticsComp(Statistics(natGraph.Graph(), natTextSize),
                                   Statistics(genGraph.Graph(), genTextSize))
        self.__textCount += 1
Пример #19
0
class TestStatistics(unittest.TestCase):
    def setUp(self):
        self.config = {
            "PARENT_THREAD_SLEEP_TIME": 60,
            "TWITCH_THREAD_SLEEP_TIME": 0.75,
            "IRC_THREAD_SLEEP_TIME": 1,
            "TIMEOUT": 5,
            "MATCH_PHRASES": [
                "BabyRage NEVER LUCKY BabyRage",
            ],
            "LOGS_FOLDER": "/logs/",
            "STATS_FOLDER": "/stats/",
            "CSV_FOLDER": "/CSV/",
            "DATE_TIME_FORMAT": "%B %d %Y %H:%M:%S",
            "TIME_FORMAT": "%H:%M:%S",
            "GRAPH_FILE_FORMAT": "D%d_M%m_Y%Y_H%H_m%M_s%S.csv",
            "JSON_FILE_FORMAT": "D%d_M%m_Y%Y_H%H_m%M_s%S.json",
            "CHAT_LOG_FILE_FORMAT": "D%d_M%m_Y%Y_H%H_m%M_s%S.log",
            "RECONNECT_TIME": 360,

            #enable bot that reads stream IRC?
            "IRC_BOT": True,
            #if true, this will cause the IRC Bot to never stop reading chat, even if the streamer goes offline
            "ALWAYS_ONLINE": False,
            #enable bot that grabs stream data using Twitch API?
            "TWITCH_BOT": True,
        }

        self.jsonFile = JsonEditor.JsonEditor(
            "./data/_TEST_/stats/D13_M05_Y2015_H20_m00_s45.json",
            "./data/_TEST_/logs/_TEST_.json")
        self.stats = Statistics(
            "_TEST_", "./data/_TEST_/CSV/D13_M05_Y2015_H20_m00_s45.csv",
            "./data/_TEST_/stats/D13_M05_Y2015_H20_m00_s45.json",
            "./data/_TEST_/logs/D13_M05_Y2015_H20_m00_s45.log",
            "./data/_TEST_/logs/_TEST_.json", self.config)

    def test_tallyEmotes(self):
        #self.stats.tallyEmotes()
        print "Tally emotes done!"

    def test_doDaily(self):
        dailyStats = self.stats.doDaily()
        self.jsonFile.toJSON(dailyStats)

    def test_peakViewers(self):
        self.assertEqual(self.stats.getPeakViewers()[1], 3576)

    def test_averageViewers(self):
        print "AV: " + str(self.stats.getAverageViewers())

    def test_timeStreamed(self):
        stream_ses = self.stats.getSessions()[0]
        self.assertEqual(self.stats.getTimeStreamed(stream_ses)[1], "7:18:45")
Пример #20
0
 async def join(num, real_roomid, raffle_id):
     await asyncio.sleep(random.uniform(0.5, min(30, num * 1.3)))
     data = await BasicRequest.guard_req_join(real_roomid, raffle_id)
     Log.raffle("参与了房间 %s 的大航海抽奖" % (real_roomid))
     if not data["code"]:
         Statistics.add2results(data["data"]["award_name"],
                                data["data"]["award_num"])
         Log.raffle("房间 %s 大航海抽奖结果: %s" % (real_roomid, data["message"]))
         Statistics.add2joined_raffles("大航海(合计)")
     else:
         Log.info(data)
Пример #21
0
def main():
    arrange_data = ArrangeData()
    create_plot = CreatePlot()
    statistics = Statistics()

    arrange_data.load_data()
    arrange_data.count_values()

    create_plot.connect_small_values()
    create_plot.bar_plot()

    statistics.percentage()
Пример #22
0
def make_solution(final_state):
    list_of_solution = list()
    list_of_solution.insert(0,  final_state)

    while 1:
        if final_state.parent:
            final_state = final_state.parent
            list_of_solution.insert(0, final_state)
        else:
            break

    Statistics.show_result(list_of_solution)
Пример #23
0
 async def notice(raffleid, real_roomid, raffle_name):
     data = await BasicRequest.tv_req_notice(real_roomid, raffleid)
     if not data["code"]:
         if data["data"]["gift_id"] == "-1":
             return
         elif data["data"]["gift_id"] != "-1":
             data = data["data"]
             Log.critical("房间 %s %s 抽奖结果: %s X %s" %
                          (real_roomid, raffle_name, data["gift_name"],
                           data["gift_num"]))
             Statistics.add2results(data["gift_name"],
                                    int(data["gift_num"]))
Пример #24
0
 async def join(num, real_roomid, raffle_id):
     await asyncio.sleep(random.uniform(0.5, min(30, num * 1.3)))
     data = await BasicRequest.guard_req_join(real_roomid, raffle_id)
     Log.raffle("参与了房间 %s 的大航海抽奖" % (real_roomid))
     if not data["code"]:
         for award in data["data"]["award_list"]:
             result = re.search("(^获得|^)(.*)<%(\+|X)(\d*)%>", award['name'])
             Statistics.add2results(result.group(2), result.group(4))
         Log.raffle("房间 %s 大航海抽奖结果: %s" % (real_roomid, data["data"]["message"]))
         Statistics.add2joined_raffles("大航海(合计)")
     else:
         Log.info(data)
Пример #25
0
def simulate(episode, workers, model, optim, rewardQueue, batch_save, path):
    [w.start() for w in workers]

    stats = Statistics(episode)

    while True:
        episode += 1
        if episode % batch_save == 0:
            torch.save(model.state_dict(), path + "models/" + str(episode))
            torch.save(optim.state_dict(), path + "optims/" + str(episode))

        reward = rewardQueue.get()
        stats.update(reward)
Пример #26
0
 async def join(real_roomid, name, raffle_id, expireAt):
     if not await Utils.is_normal_room(real_roomid):
         return
     if not Utils.is_normal_anchor(name):
         Log.error("检测到 %s 的异常天选时刻" % real_roomid)
         return
     data = await BasicRequest.anchor_req_join(raffle_id)
     if not data["code"]:
         Log.raffle("参与了 %s 的 天选时刻" % real_roomid)
         Statistics.add2joined_raffles("天选时刻(合计)")
         Timer.add2list_jobs(AnchorRaffleHandler.check, expireAt + 3, [real_roomid])
     else:
         Log.error("%s 天选时刻错误: %s" % (real_roomid, data["message"]))
Пример #27
0
    def __init__(self, config, pipe_in):
        self.device = torch.device(
            'cuda:0' if torch.cuda.is_available() else 'cpu')
        print(config)
        seed = 23
        game = LunarLander.LunarLander()
        max_steps = 1000
        self.env = TimeLimit.TimeLimit(game, max_episode_steps=max_steps)
        self.env.seed(seed)
        random.seed(seed)
        torch.manual_seed(seed)
        self.config = config
        self.pipe_in = pipe_in
        self.num_o_ep = config['num_o_ep']
        self.num_o_input = self.env.observation_space.shape[0]
        self.num_o_output = self.env.action_space.n
        self.NN = NeuralNetwork(self.num_o_input, self.num_o_output,
                                self.config).to(self.device)
        self.targetNN = NeuralNetwork(self.num_o_input, self.num_o_output,
                                      self.config).to(self.device)
        self.stats_output_file = config['stats_output_file']
        if self.config['load_weights_enabled']:
            self.NN.load_state_dict(
                torch.load(self.config['load_weights_filename']))
            self.targetNN.load_state_dict(
                torch.load(self.config['load_weights_filename']))

        self.lossFunc = nn.MSELoss()
        self.optimizer = opt.Adam(params=self.NN.parameters(),
                                  lr=self.config['learn_rate'])
        self.batch_size = self.config['batch_size']
        self.memory = ExperienceReplay(self.config['memory_size'])
        self.statistics = Statistics()
        self.egreedy = self.config['egreedy']
        self.egreedy_final = self.config['egreedy_final']
        self.decay = self.config['decay']
        self.statistics.set('egreedy', self.egreedy)
        self.update_target_counter = 0

        if config['variable_updating_enabled']:
            self.update_frequency = config['update_target_frequency_base']
            self.update_frequency_float = config[
                'update_target_frequency_base']
            self.update_frequency_multiplicator = config[
                'update_target_frequency_multiplicator']
            self.update_frequency_limit = config[
                'update_target_frequency_limit']
        else:
            self.update_frequency = config['update_target_frequency']
        self.statistics.add('update_target_frequency', self.update_frequency,
                            True)
Пример #28
0
    def handle_message(self, data):
        cmd = data["category"]
        raffle_name = data["name"]
        raffle_id = data["id"]
        room_id = data["roomid"]
        expireAt = data["expireAt"]

        # 大航海
        if cmd == "guard":
            if config["Raffle_Handler"]["GUARD"] != "False":
                Log.raffle("监控服务器检测到 %s 的 %s" % (room_id, raffle_name))
                RaffleHandler.push2queue((room_id,), GuardRaffleHandler.check)
                # 如果不是总督就设置为2(本房间)
                broadcast_type = 0 if raffle_name == "总督" else 2
                Statistics.add2pushed_raffles(raffle_name, broadcast_type)
        # PK(WIP)
        elif cmd == "pk":
            if config["Raffle_Handler"]["PK"] != "False":
                Log.raffle("监控服务器检测到 %s 的 %s" % (room_id, raffle_name))
                RaffleHandler.push2queue((room_id,), PkRaffleHandler.check)
                Statistics.add2pushed_raffles(raffle_name, 1)
        # 节奏风暴
        elif cmd == "storm":
            if config["Raffle_Handler"]["STORM"] != "False":
                Log.raffle("监控服务器检测到 %s 的 %s" % (room_id, raffle_name))
                RaffleHandler.push2queue((room_id,), StormRaffleHandler.check)
                Statistics.add2pushed_raffles(raffle_name, 1)
        # 天选
        elif cmd == "anchor":
            if config["Raffle_Handler"]["ANCHOR"] != "False":
                Log.raffle("监控服务器检测到 %s 的 天选时刻, 奖品为: %s" % (room_id, raffle_name))
                RaffleHandler.push2queue((room_id, raffle_name, raffle_id, expireAt), AnchorRaffleHandler.join)
                Statistics.add2pushed_raffles("天选时刻", 1)
Пример #29
0
def main():
    for i in range (p.Runs):
        clock =0 # set clock to 0 at the start of the simulation
        if p.hasTrans:
            if p.Ttechnique == "Light": LT.create_transactions() # generate pending transactions
            elif p.Ttechnique == "Full": FT.create_transactions() # generate pending transactions

        Node.generate_gensis_block() # generate the gensis block for all miners
        BlockCommit.generate_initial_events() # initiate initial events >= 1 to start with

        while  not Queue.isEmpty() and clock <= p.simTime:
            next_event = Queue.get_next_event()
            clock = next_event.time # move clock to the time of the event
            BlockCommit.handle_event(next_event)
            Queue.remove_event(next_event)

        Consensus.fork_resolution() # apply the longest chain to resolve the forks
        Incentives.distribute_rewards()# distribute the rewards between the particiapting nodes
        Statistics.calculate() # calculate the simulation results (e.g., block statstics and miners' rewards)

		########## reset all global variable before the next run #############
        Statistics.reset() # reset all variables used to calculate the results
        Node.resetState() # reset all the states (blockchains) for all nodes in the network
    fname = os.getenv('OUTPUT', "(Allverify)1day_{0}M_{1}K".format(
    	p.Bsize/1000000, p.Tn/1000))+".xlsx"
    Statistics.print_to_excel(fname) # print all the simulation results in an excel file
    Statistics.reset2() # reset profit results
Пример #30
0
    def run_Stat(self, Distribution_By_Category, Distribution_By_Bins):

        S = Statistics(self.asset_pool)
        S.general_statistics_1()
        S.loop_Ds_ret_province_profession(Distribution_By_Category,
                                          Distribution_By_Bins)
        S.cal_income2debt_by_ID()
Пример #31
0
    def __init__(self, genome):
        """ The GPopulation Class creator """

        if isinstance(genome, GPopulation):
            self.oneSelfGenome = genome.oneSelfGenome
            self.internalPop = []
            self.internalPopRaw = []
            self.popSize = genome.popSize
            self.sortType = genome.sortType
            self.sorted = False
            self.minimax = genome.minimax
            self.scaleMethod = genome.scaleMethod
            self.allSlots = [self.scaleMethod]

            self.internalParams = genome.internalParams
            self.multiProcessing = genome.multiProcessing
            try:
                logging.debug("do I have a comm?")
                self.mpi_comm = genome.mpi_comm
                self.mpi_myeval = genome.mpi_myeval
                self.mpi_full_copy = genome.mpi_full_copy
                logging.debug("I do")
            except:
                logging.debug("I do not")
                pass

            self.statted = False
            self.stats = Statistics()
            return

        logging.debug("New population instance, %s class genomes.",
                      genome.__class__.__name__)
        self.oneSelfGenome = genome
        self.internalPop = []
        self.internalPopRaw = []
        self.popSize = 0
        self.sortType = Consts.CDefPopSortType
        self.sorted = False
        self.minimax = Consts.CDefPopMinimax
        self.scaleMethod = FunctionSlot("Scale Method")
        self.scaleMethod.set(Consts.CDefPopScale)
        self.allSlots = [self.scaleMethod]

        self.internalParams = {}
        self.multiProcessing = (False, False)

        # Statistics
        self.statted = False
        self.stats = Statistics()
Пример #32
0
    def Dispatcher(self):
        self.processor = FrameProcessor(self.SharedData)
        self.SharedData.MyThreadPool.start(self.processor)
        self.SharedData.current_frame.connect(self.update_frame)
        self.SharedData.frame_processor_finished.connect(
            self.processor.cleanup)
        print("Frame Dispatcher ready")

        self.dispatcher = ThreadDispatcher(self.SharedData)
        self.SharedData.dispatcher_finished.connect(self.dispatcher.cleanup)
        self.dispatcher.start()
        print("Network Reader ready")

        self.stats = Statistics(self.SharedData)
        self.stats.start(5000)
Пример #33
0
   def __init__(self, genome):
      """ The GPopulation Class creator """

      if isinstance(genome, GPopulation):
         self.oneSelfGenome  = genome.oneSelfGenome
         self.internalPop    = []
         self.internalPopRaw = []
         self.popSize       = genome.popSize
         self.sortType      = genome.sortType
         self.sorted        = False
         self.minimax       = genome.minimax
         self.scaleMethod   = genome.scaleMethod
         self.allSlots      = [self.scaleMethod]

         self.internalParams = genome.internalParams
         self.multiProcessing = genome.multiProcessing
         try:
            logging.debug("do I have a comm?")
            self.mpi_comm = genome.mpi_comm
            self.mpi_myeval = genome.mpi_myeval
            self.mpi_full_copy = genome.mpi_full_copy
            logging.debug("I do")
         except:
            logging.debug("I do not")
            pass

         self.statted = False
         self.stats   = Statistics()
         return

      logging.debug("New population instance, %s class genomes.", genome.__class__.__name__)
      self.oneSelfGenome  = genome
      self.internalPop    = []
      self.internalPopRaw = []
      self.popSize       = 0
      self.sortType      = Consts.CDefPopSortType
      self.sorted        = False
      self.minimax       = Consts.CDefPopMinimax
      self.scaleMethod   = FunctionSlot("Scale Method")
      self.scaleMethod.set(Consts.CDefPopScale)
      self.allSlots      = [self.scaleMethod]

      self.internalParams = {}
      self.multiProcessing = (False, False)

      # Statistics
      self.statted = False
      self.stats   = Statistics()
Пример #34
0
   def __init__(self, genome):
      """ The GPopulation Class creator """

      if isinstance(genome, GPopulation):
          #Cloning a population?
         self.oneSelfGenome  = genome.oneSelfGenome
         self.internalPop    = []
         self.internalPopRaw = []
         self.popSize       = genome.popSize
         self.sortType      = genome.sortType
         self.sorted        = False
         self.minimax       = genome.minimax
         self.scaleMethod   = genome.scaleMethod
         self.allSlots      = [self.scaleMethod]

         self.internalParams = genome.internalParams
         self.multiProcessing = genome.multiProcessing

         self.statted = False
         self.stats   = Statistics()
         self.proc_pool = genome.proc_pool
         return

      logging.debug("New population instance, %s class genomes.", genome.__class__.__name__)
      self.oneSelfGenome  = genome
      self.internalPop    = []
      self.internalPopRaw = []
      self.popSize       = 0
      self.proc_pool = None
      self.sortType      = Consts.CDefPopSortType
      self.sorted        = False
      self.minimax       = Consts.CDefPopMinimax
      self.scaleMethod   = FunctionSlot("Scale Method")
      self.scaleMethod.set(Consts.CDefPopScale)
      self.allSlots      = [self.scaleMethod]

      self.internalParams = {}
      self.multiProcessing = (False, False)

      # Statistics
      self.statted = False
      self.stats   = Statistics()
Пример #35
0
    def generateChart(self):
        self.graphView.setScrollView(self.scrollView)

        stat = Statistics(self.startDate, self.endDate)
        
        if self.reportType == "tasks":
            self.graphView.setData(stat.countTasks(), self.reportType) 
                
        elif self.reportType == "projects":
            self.graphView.setData(stat.countProjects(), self.reportType) 
                
        elif self.reportType == "slacking":
            self.graphView.setData(stat.countSlacking(), self.reportType) 
    
        self.graphView.setScale(stat.maxValue)
        self.lblWorkTotal.setStringValue_(secToTimeStr(stat.totalWork))
        self.lblAvgWork.setStringValue_(secToTimeStr(stat.avgWork))
        self.lblSlackTotal.setStringValue_(secToTimeStr(stat.totalSlacking))
        self.lblAvgSlack.setStringValue_(secToTimeStr(stat.avgSlacking))
        self.graphView.setNeedsDisplay_(True)
Пример #36
0
   def __init__(self, genome):
      """ The GPopulation Class creator """

      logging.debug("New population instance, %s class genomes.", genome.__class__.__name__)
      self.oneSelfGenome = genome
      self.internalPop   = []
      self.popSize       = 0
      self.sortType      = Consts.CDefPopSortType
      self.sorted        = False
      self.minimax       = Consts.CDefPopMinimax
      self.scaleMethod   = FunctionSlot("Scale Method")
      self.scaleMethod.set(Consts.CDefPopScale)
      self.allSlots      = [self.scaleMethod]

      # Statistics
      self.statted = False
      self.stats   = Statistics()
Пример #37
0
def setup(env, num_gates, wait_time, t_inter):
    airport = Airport(env, num_gates, wait_time)
    for i in range(1):
        env.process(plane(env, 'Plane %d' % i, airport))
        stats.addArrivals()

    usage_res = env.now
    while True:
        yield env.timeout(random.randint(t_inter-2, t_inter+2))
        i += 1
        env.process(plane(env, 'Plane %d' % i, airport))
        stats.addArrivals()

        #print(airport.num_gates.count)
        if not airport.open:
            stats.addBusyTime(env.now - usage_res)
            usage_res = env.now

stats = Statistics(SIM_TIME)

stats.setNumRecursos(NUM_GATES)

random.seed(RANDOM_SEED)

env = simpy.Environment()
env.process(setup(env, NUM_GATES, WAIT_TIME, T_INTER))

env.run(until=SIM_TIME)

stats.printStats("Airport")
Пример #38
0
class GPopulation:
   """ GPopulation Class - The container for the population

   **Examples**
      Get the population from the :class:`GSimpleGA.GSimpleGA` (GA Engine) instance
         >>> pop = ga_engine.getPopulation()

      Get the best fitness individual
         >>> bestIndividual = pop.bestFitness()

      Get the best raw individual
         >>> bestIndividual = pop.bestRaw()

      Get the statistics from the :class:`Statistics.Statistics` instance
         >>> stats = pop.getStatistics()
         >>> print stats["rawMax"]
         10.4

      Iterate, get/set individuals
         >>> for ind in pop:
         >>>   print ind
         (...)
         
         >>> for i in xrange(len(pop)):
         >>>    print pop[i]
         (...)

         >>> pop[10] = newGenome
         >>> pop[10].fitness
         12.5

   :param genome: the :term:`Sample genome`, or a GPopulation object, when cloning.

   """

   def __init__(self, genome):
      """ The GPopulation Class creator """

      if isinstance(genome, GPopulation):
          #Cloning a population?
         self.oneSelfGenome  = genome.oneSelfGenome
         self.internalPop    = []
         self.internalPopRaw = []
         self.popSize       = genome.popSize
         self.sortType      = genome.sortType
         self.sorted        = False
         self.minimax       = genome.minimax
         self.scaleMethod   = genome.scaleMethod
         self.allSlots      = [self.scaleMethod]

         self.internalParams = genome.internalParams
         self.multiProcessing = genome.multiProcessing

         self.statted = False
         self.stats   = Statistics()
         self.proc_pool = genome.proc_pool
         return

      logging.debug("New population instance, %s class genomes.", genome.__class__.__name__)
      self.oneSelfGenome  = genome
      self.internalPop    = []
      self.internalPopRaw = []
      self.popSize       = 0
      self.proc_pool = None
      self.sortType      = Consts.CDefPopSortType
      self.sorted        = False
      self.minimax       = Consts.CDefPopMinimax
      self.scaleMethod   = FunctionSlot("Scale Method")
      self.scaleMethod.set(Consts.CDefPopScale)
      self.allSlots      = [self.scaleMethod]

      self.internalParams = {}
      self.multiProcessing = (False, False)

      # Statistics
      self.statted = False
      self.stats   = Statistics()

   #---------------------------------------------------------------------------------
   def setMultiProcessing(self, flag=True, full_copy=False, number_of_processes=None):
        """ Sets the flag to enable/disable the use of python multiprocessing module.
        Use this option when you have more than one core on your CPU and when your
        evaluation function is very slow.
        The parameter "full_copy" defines where the individual data should be copied back
        after the evaluation or not. This parameter is useful when you change the
        individual in the evaluation function.

        :param flag: True (default) or False
        :param full_copy: True or False (default)
        :param number_of_processes: None = use the default, or specify the number

        .. warning:: Use this option only when your evaluation function is slow, se you
                   will get a good tradeoff between the process communication speed and the
                   parallel evaluation.
        """
        #Save the parameters
        old_settings = self.multiProcessing
        self.multiProcessing = (flag, full_copy, number_of_processes)
        #Re-initialize if anything changed.
        if (old_settings != self.multiProcessing):
            self.initializeMultiProcessing()

   #---------------------------------------------------------------------------------
   def initializeMultiProcessing(self):
        """Initialize the multiprocessing interface. Create the process pool."""
        #Close the pool if it exists (we'll be creating a new one)
        self.cleanupMultiProcessing()

        if self.multiProcessing[0]:
            t1 = time.time()
            #Create the process pool with the # of processes
            num_proc = self.multiProcessing[2]
            if num_proc is None:
                self.proc_pool = Pool()
            elif num_proc > 0:
                self.proc_pool = Pool(processes=num_proc)
            else:
                self.proc_pool = Pool()
            print "Multiprocessing initialized in %03.3f sec; will use %d processors." % ( (time.time()-t1), num_proc )

   #---------------------------------------------------------------------------------
   def cleanupMultiProcessing(self):
       """Clean up process pools."""
       if not self.proc_pool is None:
            self.proc_pool.close()

   def setMinimax(self, minimax):
      """ Sets the population minimax

      Example:
         >>> pop.setMinimax(Consts.minimaxType["maximize"])
   
      :param minimax: the minimax type

      """
      self.minimax = minimax

   def __repr__(self):
      """ Returns the string representation of the population """
      ret =  "- GPopulation\n"
      ret += "\tPopulation Size:\t %d\n" % (self.popSize,)
      ret += "\tSort Type:\t\t %s\n" % (Consts.sortType.keys()[Consts.sortType.values().index(self.sortType)].capitalize(),)
      ret += "\tMinimax Type:\t\t %s\n" % (Consts.minimaxType.keys()[Consts.minimaxType.values().index(self.minimax)].capitalize(),)
      for slot in self.allSlots:
         ret+= "\t" + slot.__repr__()
      ret+="\n"
      ret+= self.stats.__repr__()
      return ret

   def __len__(self):
      """ Return the length of population """
      return len(self.internalPop)
      
   def __getitem__(self, key):
      """ Returns the specified individual from population """
      return self.internalPop[key]

   def __iter__(self):
      """ Returns the iterator of the population """
      return iter(self.internalPop)

   def __setitem__(self, key, value):
      """ Set an individual of population """
      self.internalPop[key] = value
      self.clearFlags()

   def clearFlags(self):
      """ Clear the sorted and statted internal flags """
      self.sorted = False
      self.statted = False

   def getStatistics(self):
      """ Return a Statistics class for statistics

      :rtype: the :class:`Statistics.Statistics` instance

      """
      self.statistics()
      return self.stats      

   def statistics(self):
      """ Do statistical analysis of population and set 'statted' to True """
      if self.statted: return
      logging.debug("Running statistical calculations")
      raw_sum = 0
      fit_sum = 0

      len_pop = len(self)
      for ind in xrange(len_pop):
         raw_sum += self[ind].score
         #fit_sum += self[ind].fitness

      self.stats["rawMax"] = max(self, key=key_raw_score).score
      self.stats["rawMin"] = min(self, key=key_raw_score).score
      self.stats["rawAve"] = raw_sum / float(len_pop)
      #self.stats["rawTot"] = raw_sum
      #self.stats["fitTot"] = fit_sum
      
      tmpvar = 0.0
      for ind in xrange(len_pop):
         s = self[ind].score - self.stats["rawAve"]
         s*= s
         tmpvar += s

      tmpvar/= float((len(self) - 1))
      try:
         self.stats["rawDev"] = math_sqrt(tmpvar)
      except:
         self.stats["rawDev"] = 0.0

      self.stats["rawVar"] = tmpvar

      self.statted = True

   def bestFitness(self, index=0):
      """ Return the best scaled fitness individual of population

      :param index: the *index* best individual
      :rtype: the individual

      """
      self.sort()
      return self.internalPop[index]

   def bestRaw(self, index=0):
      """ Return the best raw score individual of population

      :param index: the *index* best raw individual
      :rtype: the individual

      .. versionadded:: 0.6
         The parameter `index`.
      
      """
      if self.sortType == Consts.sortType["raw"]:
         return self.internalPop[index]
      else:
         self.sort()
         return self.internalPopRaw[index]

   def sort(self):
      """ Sort the population """
      if self.sorted: return
      rev = (self.minimax == Consts.minimaxType["maximize"])

      if self.sortType == Consts.sortType["raw"]:
         self.internalPop.sort(cmp=Util.cmp_individual_raw, reverse=rev)
      else:
         self.scale()
         self.internalPop.sort(cmp=Util.cmp_individual_scaled, reverse=rev)
         self.internalPopRaw = self.internalPop[:]
         self.internalPopRaw.sort(cmp=Util.cmp_individual_raw, reverse=rev)

      self.sorted = True

   def setPopulationSize(self, size):
      """ Set the population size

      :param size: the population size

      """
      self.popSize = size

   def setSortType(self, sort_type):
      """ Sets the sort type

      Example:
         >>> pop.setSortType(Consts.sortType["scaled"])

      :param sort_type: the Sort Type

      """
      self.sortType = sort_type

   def create(self, **args):
      """ Clone the example genome to fill the population """
      self.minimax = args["minimax"]
      self.internalPop = [self.oneSelfGenome.clone() for i in xrange(self.popSize)]
      self.clearFlags()

   def __findIndividual(self, individual, end):
      for i in xrange(end):
         if individual.compare(self.internalPop[i]) == 0:
            return True

   def initialize(self, **args):
      """ Initialize all individuals of population,
      this calls the initialize() of individuals """
      logging.debug("Initializing the population")
   
      if self.oneSelfGenome.getParam("full_diversity", True) and hasattr(self.oneSelfGenome, "compare"):
         for i in xrange(len(self.internalPop)):
            curr = self.internalPop[i]
            curr.initialize(**args)
            while self.__findIndividual(curr, i):
               curr.initialize(**args)
      else:
         for gen in self.internalPop:
            gen.initialize(**args)
      self.clearFlags()

   def evaluate(self, **args):
      """ Evaluate all individuals in population, calls the evaluate() method of individuals
   
      :param args: this params are passed to the evaluation function

      """
      # We have multiprocessing
      if self.multiProcessing[0] and MULTI_PROCESSING:
         logging.debug("Evaluating the population using the multiprocessing method")

         #Make sure we have a process pool.
         if self.proc_pool is None:
             self.initializeMultiProcessing()

         # Multiprocessing full_copy parameter
         if self.multiProcessing[1]:
            results = self.proc_pool.map(multiprocessing_eval_full, self.internalPop)
            for i in xrange(len(self.internalPop)):
               self.internalPop[i] = results[i]
         else:
            results = self.proc_pool.map(multiprocessing_eval, self.internalPop)
            for individual, score in zip(self.internalPop, results):
               individual.score = score
      else:
         #Direct evaluation (no multiprocessing)
         for ind in self.internalPop:
            ind.evaluate(**args)

      self.clearFlags()

   def scale(self, **args):
      """ Scale the population using the scaling method

      :param args: this parameter is passed to the scale method

      """
      for it in self.scaleMethod.applyFunctions(self, **args):
         pass

      fit_sum = 0
      for ind in xrange(len(self)):
         fit_sum += self[ind].fitness

      self.stats["fitMax"] = max(self, key=key_fitness_score).fitness
      self.stats["fitMin"] = min(self, key=key_fitness_score).fitness
      self.stats["fitAve"] = fit_sum / float(len(self))

      self.sorted = False

   def printStats(self):
      """ Print statistics of the current population """
      message = ""
      if self.sortType == Consts.sortType["scaled"]:
         message = "Max/Min/Avg Fitness(Raw) [%(fitMax).2f(%(rawMax).2f)/%(fitMin).2f(%(rawMin).2f)/%(fitAve).2f(%(rawAve).2f)]" % self.stats
      else:
         message = "Max/Min/Avg Raw [%(rawMax).2f/%(rawMin).2f/%(rawAve).2f]" % self.stats
      logging.info(message)
      print message
      return message

   def copy(self, pop):
      """ Copy current population to 'pop'

      :param pop: the destination population

      .. warning:: this method do not copy the individuals, only the population logic

      """
      pop.popSize = self.popSize
      pop.sortType = self.sortType
      pop.minimax = self.minimax
      pop.scaleMethod = self.scaleMethod
      #pop.internalParams = self.internalParams.copy()
      pop.internalParams = self.internalParams
      pop.multiProcessing = self.multiProcessing
   
   def getParam(self, key, nvl=None):
      """ Gets an internal parameter

      Example:
         >>> population.getParam("tournamentPool")
         5

      :param key: the key of param
      :param nvl: if the key doesn't exist, the nvl will be returned

      """
      return self.internalParams.get(key, nvl)


   def setParams(self, **args):
      """ Gets an internal parameter

      Example:
         >>> population.setParams(tournamentPool=5)

      :param args: parameters to set

      .. versionadded:: 0.6
         The `setParams` method.
      """
      self.internalParams.update(args)

   def clear(self):
      """ Remove all individuals from population """
      del self.internalPop[:]
      del self.internalPopRaw[:]
      self.clearFlags()
      
   def clone(self):
      """ Return a brand-new cloned population """
      newpop = GPopulation(self.oneSelfGenome)
      self.copy(newpop)
      return newpop
class Road:
    def __init__(self):

        # all variables here
        self.TOTAL_TIME = 15000
        self.NUM_OF_CARS = 200000
        self.DELTA_TIME = 0.05
        self.PERCENT_OF_CARS = [0.3,0.7]

        # Units: m/delta_t
        self.AVG_SPEED_OF_CARS = {'s': 30, 'm': 25, 'b': 20}
        self.STD_DEV_OF_CARS = {'s': 10, 'm':10, 'b': 10}
        
        # Units: m
        self.LENGTHS_OF_CARS = {'s': 5, 'm': 7, 'b': 10}
        self.VISIBLE_DISTANCE_OF_CARS = {'s': 70, 'm': 80, 'b': 90}

        # Units: m/delta_t
        self.MIN_SPEED = 15
        self.MAX_SPEED = 60

        # Units: m
        self.ROAD_LENGTH = 5000
        self.SAFE_DISTANCE_BETWEEN_CARS = 20
        ''' this affects accident rate a lot'''
        self.INITIAL_DISTANCE_BETWEEN_CARS = 80

        self._recycledIndexes = set()
        
        self.all_cars = []

        self.current_num_of_cars = 0

        self.stat = Statistics()
        
        self.generateCar(self.generateIndex())
        for time in range(self.TOTAL_TIME):
            if self.needToGenerateCars():
                self.generateCar(self.generateIndex())
##                print("New Car generated")
##                print(self._recycledIndexes)
##                print()
                
            num_of_cars_on_r = 0
            num_of_cars_on_l = 0
            total_speed_on_r = 0
            total_speed_on_l = 0
            for i in range(len(self.all_cars)):
                each = self.all_cars[i]
                if each == None:
                    self._recycledIndexes.add(i)
                    continue
                if each.position > self.ROAD_LENGTH:
                    self.all_cars[i] = None
                    self._recycledIndexes.add(i)
##                    print(each.index, "th car reaches the end of the road!")
##                    print(self._recycledIndexes)
                    continue
##                print(each.index, "th car: ", each.position, " lane: ", each.lane)
##                print()
                each.move(self.all_cars)

###################################################Statistics################################
                if each.lane == 'r':
                    num_of_cars_on_r += 1
                    total_speed_on_r += each.Vcurrent
                elif each.lane == 'l':
                    num_of_cars_on_l += 1
                    total_speed_on_l += each.Vcurrent

            self.stat.listIncrease("density_of_r", time, num_of_cars_on_r / self.ROAD_LENGTH)
            self.stat.listIncrease("density_of_l", time, num_of_cars_on_l / self.ROAD_LENGTH)

            if num_of_cars_on_r == 0:
                    self.stat.listIncrease("avg_speed_on_r", time, 0)
            else:
                self.stat.listIncrease("avg_speed_on_r", time, total_speed_on_r / num_of_cars_on_r)
                    
            if num_of_cars_on_l == 0:
                    self.stat.listIncrease("avg_speed_on_l", time, 0)
            else:
                self.stat.listIncrease("avg_speed_on_l", time, total_speed_on_l / num_of_cars_on_l)
            
            time += self.DELTA_TIME


        print("accident rate: ", self.stat.getRate("num_of_accidents", "num_of_cars_generated"))
        print("succussful passing rate: ", self.stat.getRate("successful_passings", "passing_intents"))

        writeToFile(self.stat.getList("density_of_r"), "density_of_r.txt")
        writeToFile(self.stat.getList("density_of_l"), "density_of_l.txt")
        writeToFile(self.stat.getList("avg_speed_on_r"), "avg_speed_on_r.txt")
        writeToFile(self.stat.getList("avg_speed_on_l"), "avg_speed_on_l.txt")
##        print("density of lane r: ", self.stat.getList("density_of_r"))
##        print("density of lane l: ", self.stat.getList("density_of_l"))
##        print("Average speed on lane r: ", self.stat.getList("avg_speed_on_r"))
##        print("Average speed on lane l: ", self.stat.getList("avg_speed_on_l"))        

    #############
##        f.write(s)
##        f.close()
  ####################      

    def generateIndex(self):
        if len(self._recycledIndexes) > 0:
            return self._recycledIndexes.pop()
        else:
            return len(self.all_cars)

        
    def needToGenerateCars(self):
        # base cases
        if len(self.all_cars) == 0:
            return True
        if len(self.all_cars) >= self.NUM_OF_CARS:
            return False

        
        car = self.all_cars[len(self.all_cars) - 1]
        if car == None or car.position >= self.INITIAL_DISTANCE_BETWEEN_CARS:
            return True
        return False

    
    def generateRandomType(self):
        a = random.random()
        percent_of_cars = self.PERCENT_OF_CARS
        if a < percent_of_cars[0]:
            return 's'
        elif a >= percent_of_cars[0] and a < percent_of_cars[1]:
            return 'm'
        else:
            return 'b'

    def generateCar(self, index):
        typeOfCar = self.generateRandomType()
        Vcurrent = -1
        
        while(Vcurrent < self.MIN_SPEED or Vcurrent > self.MAX_SPEED):
            u = self.AVG_SPEED_OF_CARS[typeOfCar]
            std_dev = self.STD_DEV_OF_CARS[typeOfCar]
            u1 = random.random()
            u2 = random.random()
            z = (-2*math.log(u1))**(0.5)*math.cos(2*math.pi*u2)
            Vcurrent = u + std_dev*z
        
        Vexpected = Vcurrent
        length = self.LENGTHS_OF_CARS[typeOfCar]
        visible_distance = self.VISIBLE_DISTANCE_OF_CARS[typeOfCar]
        car = Car(typeOfCar, Vexpected, Vcurrent, length, 'r', 0, visible_distance, index, self.stat)
        if index >= len(self.all_cars):
            self.all_cars += [car]
        else:
            self.all_cars[index] = car
    def __init__(self):

        # all variables here
        self.TOTAL_TIME = 15000
        self.NUM_OF_CARS = 200000
        self.DELTA_TIME = 0.05
        self.PERCENT_OF_CARS = [0.3,0.7]

        # Units: m/delta_t
        self.AVG_SPEED_OF_CARS = {'s': 30, 'm': 25, 'b': 20}
        self.STD_DEV_OF_CARS = {'s': 10, 'm':10, 'b': 10}
        
        # Units: m
        self.LENGTHS_OF_CARS = {'s': 5, 'm': 7, 'b': 10}
        self.VISIBLE_DISTANCE_OF_CARS = {'s': 70, 'm': 80, 'b': 90}

        # Units: m/delta_t
        self.MIN_SPEED = 15
        self.MAX_SPEED = 60

        # Units: m
        self.ROAD_LENGTH = 5000
        self.SAFE_DISTANCE_BETWEEN_CARS = 20
        ''' this affects accident rate a lot'''
        self.INITIAL_DISTANCE_BETWEEN_CARS = 80

        self._recycledIndexes = set()
        
        self.all_cars = []

        self.current_num_of_cars = 0

        self.stat = Statistics()
        
        self.generateCar(self.generateIndex())
        for time in range(self.TOTAL_TIME):
            if self.needToGenerateCars():
                self.generateCar(self.generateIndex())
##                print("New Car generated")
##                print(self._recycledIndexes)
##                print()
                
            num_of_cars_on_r = 0
            num_of_cars_on_l = 0
            total_speed_on_r = 0
            total_speed_on_l = 0
            for i in range(len(self.all_cars)):
                each = self.all_cars[i]
                if each == None:
                    self._recycledIndexes.add(i)
                    continue
                if each.position > self.ROAD_LENGTH:
                    self.all_cars[i] = None
                    self._recycledIndexes.add(i)
##                    print(each.index, "th car reaches the end of the road!")
##                    print(self._recycledIndexes)
                    continue
##                print(each.index, "th car: ", each.position, " lane: ", each.lane)
##                print()
                each.move(self.all_cars)

###################################################Statistics################################
                if each.lane == 'r':
                    num_of_cars_on_r += 1
                    total_speed_on_r += each.Vcurrent
                elif each.lane == 'l':
                    num_of_cars_on_l += 1
                    total_speed_on_l += each.Vcurrent

            self.stat.listIncrease("density_of_r", time, num_of_cars_on_r / self.ROAD_LENGTH)
            self.stat.listIncrease("density_of_l", time, num_of_cars_on_l / self.ROAD_LENGTH)

            if num_of_cars_on_r == 0:
                    self.stat.listIncrease("avg_speed_on_r", time, 0)
            else:
                self.stat.listIncrease("avg_speed_on_r", time, total_speed_on_r / num_of_cars_on_r)
                    
            if num_of_cars_on_l == 0:
                    self.stat.listIncrease("avg_speed_on_l", time, 0)
            else:
                self.stat.listIncrease("avg_speed_on_l", time, total_speed_on_l / num_of_cars_on_l)
            
            time += self.DELTA_TIME


        print("accident rate: ", self.stat.getRate("num_of_accidents", "num_of_cars_generated"))
        print("succussful passing rate: ", self.stat.getRate("successful_passings", "passing_intents"))

        writeToFile(self.stat.getList("density_of_r"), "density_of_r.txt")
        writeToFile(self.stat.getList("density_of_l"), "density_of_l.txt")
        writeToFile(self.stat.getList("avg_speed_on_r"), "avg_speed_on_r.txt")
        writeToFile(self.stat.getList("avg_speed_on_l"), "avg_speed_on_l.txt")
 def __init__(self):
     self.dataset = None
     self.Graph = nx.DiGraph()
     self.K =2
     self.items = dict()
     self.stats = Statistics()
Пример #42
0
def test_statistics(clean=False, language='en'):
    lib = Library(cleaning=clean)
    lib.load_library(language=language)
    st = Statistics(books=lib.get_books(),authors=lib.get_authors())
    st.collect_statistics()
 def processParagraphs(self):
     allNouns=[]
     nounCountDictList=[]
     #The following loop identifies the count of each noun in each paragraph 
     #and creates a dictionary (nounCountDict) for each paragraphs
     #Each dictionary element is added to the list nounCountDictList
     #Another output of this loop is allNouns, 
     #which his the list is all nouns in all paragraphs,
     #it may contain duplicates at the end of this loop
     #Duplicates are removed using an elegant method (See:self.removeDuplicates() )
     for paragraph in self.paragraphs:
         sentenses=nltk.sent_tokenize(paragraph)
         tags=[]
         for sentens in sentenses:
             tokens=nltk.word_tokenize(sentens)
             tags.extend(nltk.pos_tag(tokens))
         nouns=self.getNouns(tags)
         filteredNouns=self.removeStopWords(nouns)
         allNouns.extend(filteredNouns)
         nounCountDict=self.getNounsCounts(filteredNouns)
         nounCountDictList.append(nounCountDict)
     allNouns=self.removeDuplicates(allNouns)
     
     #Creates occurrenceVector. See createOccurenceVector() for more details
     occurenceVectorDict=self.createOccurenceVector(allNouns,nounCountDictList)
     weightVectorDict=self.createNounWeightDict(occurenceVectorDict)
     numberOfParagraphs=len(self.paragraphs)
     pointList=[]
     for key in weightVectorDict.keys():
         totalOccurrences=sum(weightVectorDict[key])
         averageCount=totalOccurrences/numberOfParagraphs
         variance=numpy.var(weightVectorDict[key])
         #TODO: have to replace the following line with a better formula that balances mean and variance 
         point=averageCount-variance
         pointList.append((key,point))
     
     #Sort keywords according to weight
     pointList.sort(key=itemgetter(1),reverse=True)
     print(pointList)
     
     #Take most important 10 words
     keyWords=[]
     print("Important words")
     if len(pointList)>10:
         for i in range(0,10):
             keyWords.append(pointList[i][0])
     else:
         for i in range(0,len(pointList)):
             keyWords.append(pointList[i][0])
     
     s=Statistics()
     s.keywords=keyWords
     s.title=keyWords[0]
     
     '''
     Following code implements the paragraph scoring algorithm based of Eigan vectors of similarity matrix
     '''
     #Creating the similarity vector to find rate paragraphs
     similarityMatrix=[];
     for i in range(0,numberOfParagraphs):
         currentRow=[]
         for j in range(0,numberOfParagraphs):
             freq=0
             nounsInThisPara=0
             for k in allNouns:
                 currentOccurenceVector=occurenceVectorDict[k]
                 if currentOccurenceVector[i]>0:
                     nounsInThisPara=nounsInThisPara+1
                 if currentOccurenceVector[i]>0 and currentOccurenceVector[j]>0:
                     freq=freq+1
             if nounsInThisPara==0:
                 similarity=0
             else:
                 similarity=float(freq)/float(nounsInThisPara)
             currentRow.append(similarity)
         similarityMatrix.append(currentRow)
     
     print("Similarity Matrix")
     self.printMatrix(similarityMatrix)
     
     similarityArray=numpy.array(similarityMatrix)
     
     #Calculating eigan values of similarity matrix
     eigenvalues, eigenvectors = numpy.linalg.eig(similarityArray)
     
     #Only for the purpose of printing Eigan values
     print("Eigan Vectors")
     for i in range(0,numberOfParagraphs):
         print(self.paragraphs[i])
         print(eigenvectors[i])
         
     paragraphRatings=[];
     k=0
     for i in eigenvectors:
         count=0
         for j in i:
             if j>0.001: #checking for positive value
                 count=count+1
         paragraphRatings.append((count,k))
         k=k+1
     #The following lines are to locate the most important paragraph
     
     #Sort paragraphs according to rating
     paragraphRatings.sort(key=itemgetter(0),reverse=True)
     
     s.importantPara=self.paragraphs[paragraphRatings[0][1]]
     s.importantParaRating=paragraphRatings[0][0]
     return s
Пример #44
0
CONFIG = Configuration()


#Load Hashcodes from File
DATABASE = set()
try:
    DATABASE_FILE = open(CONFIG.DATABASE_FILE_PATH, 'r')
except:
    DATABASE_FILE = open(CONFIG.DATABASE_FILE_PATH, 'w+')
    logging.info("Created New Database %s", CONFIG.DATABASE_FILE_PATH)

print "Loading Database...."
for line in DATABASE_FILE:
    DATABASE.add(line.replace('\n', ''))
logging.info("Loaded %i mail hash values from database", len(DATABASE))
STATS = Statistics(len(DATABASE))
DATABASE_FILE.close()
#Open Database-File for appending new HashCodes
DATABASE_FILE = open(CONFIG.DATABASE_FILE_PATH, 'a')

print "Connecting to Server..."
#Init Mail Connection
MAIL_CONNECTION = imaplib.IMAP4_SSL(CONFIG.MAIL_SERVER, CONFIG.MAIL_PORT) if CONFIG.MAIL_PORT else imaplib.IMAP4_SSL(CONFIG.MAIL_SERVER)
try:
    MAIL_CONNECTION.login(CONFIG.MAIL_USER, CONFIG.MAIL_PASSWORD)
    logging.info("Successfully connected to %s@%s", CONFIG.MAIL_USER, CONFIG.MAIL_SERVER)
except imaplib.IMAP4.error as e:
    print "Failed to connect"
    logging.error("Could not connect to %s@%s", CONFIG.MAIL_USER, CONFIG.MAIL_SERVER)
    logging.error("Reason: %s", e)
    exit(1)
class GraphDrawer():

    def __init__(self):
        self.dataset = None
        self.Graph = nx.DiGraph()
        self.K =2
        self.items = dict()
        self.stats = Statistics()


    def build_graph(self, dataset, keys):
        self.dataset = dataset
        for sessionID in keys:
            try:
                session_purchases = self.dataset[sessionID]
            except:
                print sessionID
            session_states = self.extract_states(session_purchases)
            self.insert_states(session_states)
        # self.print_edges_data()
        self.mark_popular_item()
        self.stats.num_of_edges = self.Graph.number_of_edges()
        self.stats.num_of_nodes = self.Graph.number_of_nodes()

    def mark_popular_item(self):
        max = 0
        for item in self.items.keys():
            if self.items[item] > max:
                max = self.items[item]
                self.most_popular_item = item

    def print_all_nodes(self):
        for n in self.Graph.nodes(): print str(n)

    def print_all_edges(self):
        for e in self.Graph.edges(): print e

    def print_successors(self,source_state):
        succ_list = self.Graph.successors(source_state)
        for succ in succ_list: print succ

    def print_edges_data(self):
        for edge in self.Graph.edges():
            print '{0} -->  {1}    {2}'.format(edge[0], edge[1],self.Graph.get_edge_data(edge[0], edge[1], None))

    def draw(self):
        # nx.draw(self.Graph)
        # nx.draw_networkx(self.Graph)
        pos = nx.shell_layout(self.Graph)
        nx.draw(self.Graph, pos)

        # show graph
        plt.show()
        raw_input('press any key to continue')

    def fit(self):
        """
        this method goes over all nodes and counts the number of out edges,
        then, it normalize the weight of each edge according to:
            for each node t in successor(s) do:
                sum += weight(s,t)
            for each node t in successors(s) do:
                normalized_weight(s,t)  =   weight(s,t)/sum
        :return: void
        """
        nodes = self.Graph.nodes()
        print "---- number of nodes in the graph = {0}".format(len(nodes))
        for curr_node in nodes:
            node_successors = self.Graph.successors(curr_node)
            sum = 0
            for s in node_successors:
                edge_data =self.Graph.get_edge_data(curr_node, s, None)
                # print edge_data
                count = edge_data[Config.COUNT]
                sum += count

            # print sum
            # print 'curr:  ' + str(curr_node)
            for s in node_successors:
                count = self.Graph[curr_node][s][Config.COUNT]
                self.Graph[curr_node][s][Config.WEIGHT] = float(count) / float(sum)
                # print 'succ:   ' + str(s) + ' ' + str(self.Graph[curr_node][s])

    def predict(self,testset):
        """
        :param testset: a map - 'itemIP' --> list<Purchase>

        the main idea is:
            1. for each session
                1.1 hide the last item purchased
                1.2 predict the last item using the model
                1.3 save the
        """
        y_true = []
        y_pred = []
        y_score = []

        for key in testset.keys():
            sequence = testset[key]
            seq_length = len(sequence)
            if seq_length > 1:
                actual = sequence[-1].itemID
                prediction, is_popularity_prediction = self.__predict_sequence__(sequence[:-1])

                if prediction != None and is_popularity_prediction:
                    y_true.append(1)
                    if actual in prediction[Config.PRED_ITEMS]:
                        y_pred.append(1)
                        y_score.append(prediction[Config.PRED_PROB]) #distanse 0
                        self.stats.correct_prediction()
                    else:
                        y_score.append(0)
                        y_pred.append(0)
                        self.stats.incorrect_prediction()
                    # print '**************'
                    # raw_seq = [x.itemID for x in sequence]
                    # print raw_seq
                    # print 'prediction[{0}]  actual[{1}]  success[{2}]'.format(prediction[Config.PRED_ITEMS], actual, actual in prediction[Config.PRED_ITEMS])
                    # print '----------------------'
                    # print ' '
        return y_true,y_score, y_pred

    def roc(self,y_true, y_score):
        self.stats.draw_ROC_curve(y_true,y_score)



    def print_prediction_stats(self,y_true, y_score,y_pred):
        self.stats.print_prediction_stats(y_true, y_score,y_pred)

    def __predict_sequence__(self,sequence):
        states = self.extract_states(sequence)
        last_state = states[-1]
        best_succ = dict()
        best_succ[Config.PRED_PROB] = 0.0
        best_succ[Config.PRED_ITEMS] = []

        is_popularity_prediction = False
        if self.Graph.__contains__(last_state):
            successors = self.Graph.successors(last_state)
            max_weight = 0.0
            for succ in successors:
                edge = self.Graph[last_state][succ]
                max_weight = edge[Config.WEIGHT]
                if float(edge[Config.WEIGHT]) > max_weight:
                    best_succ[Config.PRED_PROB] = max_weight
                    best_succ[Config.PRED_ITEMS] = [succ[-1]]
                else:
                    if float(edge[Config.WEIGHT]) == max_weight:
                        best_succ[Config.PRED_PROB] = max_weight
                        best_succ[Config.PRED_ITEMS].append(succ[-1])

                # print '{0}  --->   {1}  count[{2}]  weight[{3}]'.format(last_state,succ,edge[Config.COUNT], edge[Config.WEIGHT])
                is_popularity_prediction = True
        else:
            best_succ[Config.PRED_PROB] = 0.0
            best_succ[Config.PRED_ITEMS].append(self.most_popular_item)
            # print 'popularity:  last state: {0} best: {1}'.format(last_state, best_succ)

        return best_succ, is_popularity_prediction


    def insert_states(self, states):

        # insert all states:
        for state in states:
            self.Graph.add_node(state)

        # insert all edges:
        for i in range(len(states) -1):
            self.add_to_edge(states[i], states[i+1], Config.COUNT, 1)
        # in this case there are nodes without a successor
        # therefore we need to handle them by finding the closest node/nodes and
        # connect them with a new edge
        if len(states) == 1:
            for node in states:
                if self.K == 3:
                    if node[0] == '-1' and node[1] == '-1':
                        # this is the case of there is only one item in a state(sequence of purchases)
                        # and now we're going to find all state that this item is in
                        # then, add edges accordingly
                        item = node[2]
                        for s in self.Graph.nodes():
                            if s[0] == '-1' and item == s[1] and s != node:
                                self.add_to_edge(node,s,Config.COUNT,1)
                    else:
                        first_item = node[1]
                        second_item = node[2]
                        for s in self.Graph.nodes():
                            if s[0] == first_item and s[1] == second_item:
                                if s != node:
                                    self.add_to_edge(node,s,Config.COUNT,1)
                if self.K == 2:
                    if node[0] == '-1':
                        item = node[1]
                        for s in self.Graph.nodes():
                            if s[1] == item and s != node:
                                self.add_to_edge(node,s,Config.COUNT,1)





    def add_to_edge(self,u,v,attr, value):
        if(self.Graph.get_edge_data(u, v,None) is None):
                self.Graph.add_edge(u, v,{attr:value})

        else:
            self.Graph[u][v][attr] += value

    def extract_states(self,purchases):
        items = []
        for purchase in purchases:
            items.append(purchase.itemID)
            self.count_item(purchase.itemID)
        items_length = len(items)
        if items_length < 3:
            for i in range(self.K - items_length):
                items.insert(0,'-1')

        states = zip(*(items[i:] for i in range(self.K)))
        # for p in purchases: print p
        # print states
        return states

    def count_item(self, itemID):
        if itemID in self.items:
            self.items[itemID] += 1
        else:
            self.items[itemID] = 1
Пример #46
0
class GPopulation:
   """ GPopulation Class - The container for the population

   **Examples**
      Get the population from the :class:`GSimpleGA.GSimpleGA` (GA Engine) instance
         >>> pop = ga_engine.getPopulation()

      Get the best fitness individual
         >>> bestIndividual = pop.bestFitness()

      Get the best raw individual
         >>> bestIndividual = pop.bestRaw()

      Get the statistics from the :class:`Statistics.Statistics` instance
         >>> stats = pop.getStatistics()
         >>> print stats["rawMax"]
         10.4

      Iterate, get/set individuals
         >>> for ind in pop:
         >>>   print ind
         (...)
         
         >>> for i in xrange(len(pop)):
         >>>    print pop[i]
         (...)

         >>> pop[10] = newGenome
         >>> pop[10].fitness
         12.5

   :param genome: the :term:`Sample genome`

   """

   def __init__(self, genome):
      """ The GPopulation Class creator """

      logging.debug("New population instance, %s class genomes.", genome.__class__.__name__)
      self.oneSelfGenome = genome
      self.internalPop   = []
      self.popSize       = 0
      self.sortType      = Consts.CDefPopSortType
      self.sorted        = False
      self.minimax       = Consts.CDefPopMinimax
      self.scaleMethod   = FunctionSlot("Scale Method")
      self.scaleMethod.set(Consts.CDefPopScale)
      self.allSlots      = [self.scaleMethod]

      # Statistics
      self.statted = False
      self.stats   = Statistics()

   def setMinimax(minimax):
      """ Sets the population minimax

      Example:
         >>> pop.setMinimax(Consts.minimaxType["maximize"])
   
      :param minimax: the minimax type

      """
      self.minimax = minimax

   def __repr__(self):
      """ Returns the string representation of the population """
      ret =  "- GPopulation\n"
      ret += "\tPopulation Size:\t %d\n" % (self.popSize,)
      ret += "\tSort Type:\t\t %s\n" % (Consts.sortType.keys()[Consts.sortType.values().index(self.sortType)].capitalize(),)
      ret += "\tMinimax Type:\t\t %s\n" % (Consts.minimaxType.keys()[Consts.minimaxType.values().index(self.minimax)].capitalize(),)
      for slot in self.allSlots:
         ret+= "\t" + slot.__repr__()
      ret+="\n"
      ret+= self.stats.__repr__()
      return ret

   def __len__(self):
      """ Return the length of population """
      return len(self.internalPop)
      
   def __getitem__(self, key):
      """ Returns the specified individual from population """
      return self.internalPop[key]

   def __iter__(self):
      """ Returns the iterator of the population """
      return iter(self.internalPop)

   def __setitem__(self, key, value):
      """ Set an individual of population """
      self.internalPop[key] = value
      self.__clear_flags()

   def __clear_flags(self):
      self.sorted = False
      self.statted = False

   def getStatistics(self):
      """ Return a Statistics class for statistics

      :rtype: the :class:`Statistics.Statistics` instance

      """
      self.statistics()
      return self.stats      

   def statistics(self):
      """ Do statistical analysis of population and set 'statted' to True """
      if self.statted: return
      logging.debug("Running statistical calc.")
      raw_sum = 0

      len_pop = len(self)
      for ind in xrange(len_pop):
         raw_sum += self[ind].score

      self.stats["rawMax"] = max(self, key=key_raw_score).score
      self.stats["rawMin"] = min(self, key=key_raw_score).score
      self.stats["rawAve"] = raw_sum / float(len_pop)
      
      tmpvar = 0.0;
      for ind in xrange(len_pop):
         s = self[ind].score - self.stats["rawAve"]
         s*= s
         tmpvar += s

      tmpvar/= float((len(self) - 1))
      self.stats["rawDev"] = math_sqrt(tmpvar)
      self.stats["rawVar"] = tmpvar

      self.statted = True

   def bestFitness(self, index=0):
      """ Return the best scaled fitness individual of population

      :param index: the *index* best individual
      :rtype: the individual

      """
      self.sort()
      return self.internalPop[index]

   def bestRaw(self):
      """ Return the best raw score individual of population

      :rtype: the individual
      
      """
      if self.minimax == Consts.minimaxType["minimize"]:
         return min(self, key=key_raw_score)
      else:
         return max(self, key=key_raw_score)

   def sort(self):
      """ Sort the population """
      if self.sorted: return
      rev = (self.minimax == Consts.minimaxType["maximize"])

      if self.sortType == Consts.sortType["raw"]:
         self.internalPop.sort(cmp=cmp_individual_raw, reverse=rev)
      else:
         self.scale()
         self.internalPop.sort(cmp=cmp_individual_scaled, reverse=rev)

      self.sorted = True

   def setPopulationSize(self, size):
      """ Set the population size

      :param size: the population size

      """
      self.popSize = size

   def setSortType(self, sort_type):
      """ Sets the sort type

      Example:
         >>> pop.setSortType(Consts.sortType["scaled"])

      :param sort_type: the Sort Type

      """
      self.sortType = sort_type

   def create(self, **args):
      """ Clone the example genome to fill the population """
      self.clear()
      self.minimax = args["minimax"]
      for i in xrange(self.popSize):
         self.internalPop.append(self.oneSelfGenome.clone())
      self.__clear_flags()

   def initialize(self):
      """ Initialize all individuals of population,
      this calls the initialize() of individuals """
      for gen in self.internalPop:
         gen.initialize()
      self.__clear_flags()

   def evaluate(self, **args):
      """ Evaluate all individuals in population, calls the evaluate() method of individuals
   
      :param args: this params are passed to the evaluation function

      """
      for ind in self.internalPop:
         ind.evaluate(**args)
      self.__clear_flags()

   def scale(self, **args):
      """ Scale the population using the scaling method

      :param args: this parameter is passed to the scale method

      """
      for it in self.scaleMethod.applyFunctions(self, **args):
         pass

      fit_sum = 0
      for ind in xrange(len(self)):
         fit_sum += self[ind].fitness

      self.stats["fitMax"] = max(self, key=key_fitness_score).fitness
      self.stats["fitMin"] = min(self, key=key_fitness_score).fitness
      self.stats["fitAve"] = fit_sum / float(len(self))

      self.sorted = False

   def printStats(self):
      """ Print statistics of the current population """
      message = ""
      if self.sortType == Consts.sortType["scaled"]:
         message =  "Max/Min/Avg Fitness(Raw) [%.2f(%.2f)/%.2f(%.2f)/%.2f(%.2f)]" % (self.stats["fitMax"], self.stats["rawMax"], self.stats["fitMin"], self.stats["rawMin"], self.stats["fitAve"], self.stats["rawAve"])
      else:
         message = "Max/Min/Avg Raw [%.2f/%.2f/%.2f]" % (self.stats["rawMax"], self.stats["rawMin"], self.stats["rawAve"])
      logging.info(message)
      print message
      return message

   def copy(self, pop):
      """ Copy current population to 'pop'

      :param pop: the destination population

      .. warning:: this method do not copy the individuals, only the population logic

      """
      pop.popSize = self.popSize
      pop.sortType = self.sortType
      pop.sorted = self.sorted
      pop.statted = self.statted
      pop.minimax = self.minimax
      pop.scaleMethod = self.scaleMethod
   
   def clear(self):
      """ Remove all individuals from population """
      del self.internalPop[:]
      self.__clear_flags()
      
   def clone(self):
      """ Return a brand-new cloned population """
      newpop = GPopulation(self.oneSelfGenome.clone())
      self.copy(newpop)
      return newpop
Пример #47
0
        myShaper = Shaper({
                'capacity': capacity,        
                'full_watermark': full_watermark,
                'hot_watermark': hot_watermark,
                'write_seq_threshold': write_seq_threshold
            })
        myShaper.run('./output/gen.csv','./output/optimize.csv')
        myShaper.print_io_count()

        ##############################################################
        #400GB SSD with optimizer
        myStat = Statistics({
            'file'          : './output/optimize.csv',
            'ssd_size'      : 200*1024*1024,        
            'ssd_seq_write' : 400*1024*1024,
            'ssd_seq_read'  : 500*1024*1024,
            'ssd_ran_write' : 80*1000,
            'ssd_ran_read'  : 100*1000,
            'ssd_read_hit'   : 0.91,                
            'hdd_seq_write' : 400*1024*1024,
            'hdd_seq_read'  : 500*1024*1024,
            'hdd_ran_write' : 1400,
            'hdd_ran_read'  : 1400
        })
        myStat.run()
        text_file.write("%d,%d,%f\n" % (genRange,bufSize,myStat.total_time))

text_file.close()
exit(1)