def mas_get_html(url): # .... retry_count = 5 proxy = mas_get_proxy() proxy = proxy.splitlines() while retry_count > 0: try: # print('hhh') # print('proxy', proxy) html = requests.get(url, headers=ToolBox.tool_get_random_headers(), proxies={"http": "http://{}".format(proxy)}) if html is None: time.sleep(1) continue # 使用代理访问 return html except Exception as e: log = 'mas_get_html error {} {}'.format(url, e) ToolBox.tool_log_info(level='error', message=log) print('mas_get_html error {}'.format(e)) retry_count -= 1 # 出错5次, 删除代理池中代理 # print('zzzz', html) mas_delete_proxy(proxy) return None
def __init__(self, filename, nb_times): self.filename = filename self.country_to_id = {} self.id_to_country = [] self.nb_teams = 0 self.nb_times = nb_times max_time = 0 min_time = datetime.date(2100, 1, 1).toordinal() with open(filename, 'rb') as csvfile: spamreader = csv.reader(csvfile, delimiter=',') for row in spamreader: if row: for s in row[:2]: s_cropped = ToolBox.format_name(s) if s_cropped not in self.country_to_id: self.country_to_id[s_cropped] = self.nb_teams self.id_to_country.append(s_cropped) self.nb_teams += 1 time = ToolBox.date_to_number(row[4]) if time > max_time: max_time = time if time < min_time: min_time = time self.date_to_id = lambda x: ToolBox.gen_date_to_id(x, self.nb_times, max_time,min_time) self.train = {} self.test = {} self.elo = None self.metadata = {'nb_teams': self.nb_teams, 'nb_times': self.nb_times}
def __init__(self, driver): assert type(driver) == BusyBoxSQL.BusyBoxSQL self.driver = driver project = self.driver.get_config_by_name("map_project") assert project == 1, "(e) not supported map projection!" self.width = self.driver.get_config_by_name("map_width") self.height = self.driver.get_config_by_name("map_height") ToolBox.print_output(f"original map size: {self.width} x {self.height}") self.diagram = self.driver.get_vector_diagram() self.set_resize(1) self.set_zoom(None) self.set_offset(0) self.border = False self.set_hopsize(50) assert self.refresh fix = Gtk.Fixed() self.add(fix) ebox = Gtk.EventBox() fix.put(ebox, 0,0) # ebox.connect('scroll-event', self.on_scroll) ebox.connect ('button-press-event', self.on_click) ebox.add_events(Gdk.EventMask.SCROLL_MASK|Gdk.EventMask.SMOOTH_SCROLL_MASK) self.img = Gtk.Image() ebox.add(self.img) self.show_all()
def process_raw_video_info(aid, spider=default_spider): ''' :param spider: an object of bilibili_spider :param aid: :return: list ['view', 'danmaku', 'reply', 'favorite', 'coin', 'share', 'like'] ''' try: raw = spider.get_raw_video_info(aid) if raw.get('data') is None: raw = error_raw error_raw['data']['aid'] = aid except Exception as e: log = 'ERROR IN process raw video info aid {} {}'.format(aid, e) ToolBox.tool_log_info(level='error', message=log) print(log) raw = error_raw raw['data']['aid'] = aid info = [] temp = [ '0view', '1danmaku', '2reply', '3favorite', '4coin', '5share', '6like' ] for i in temp: info.append(raw.get('data').get(i[1:])) return info
def __init__(self, filename, nb_times): self.filename = filename self.country_to_id = {} self.id_to_country = [] self.nb_teams = 0 self.nb_times = nb_times max_time = 0 min_time = datetime.date(2100, 1, 1).toordinal() with open(filename, 'rb') as csvfile: spamreader = csv.reader(csvfile, delimiter=',') for row in spamreader: if row: for s in row[:2]: s_cropped = ToolBox.format_name(s) if s_cropped not in self.country_to_id: self.country_to_id[s_cropped] = self.nb_teams self.id_to_country.append(s_cropped) self.nb_teams += 1 time = ToolBox.date_to_number(row[4]) if time > max_time: max_time = time if time < min_time: min_time = time self.date_to_id = lambda x: ToolBox.gen_date_to_id( x, self.nb_times, max_time, min_time) self.train = {} self.test = {} self.elo = None self.metadata = {'nb_teams': self.nb_teams, 'nb_times': self.nb_times}
def __init__(self, train_set, test_set, dictparam, type='elostd'): self.type = type # Define constants self.nb_times = train_set['nb_times'] self.nb_teams = train_set['nb_teams'] first_time = ToolBox.first_time(self.nb_times) timediff = ToolBox.timediff_gen(self.nb_times) # Define meta-parameters self.param = {} for key in dictparam: self.param[key] = tf.Variable(dictparam[key], trainable=False) # Define training and testing set self.train_data = {} self.test_data = {} for key in train_set: self.train_data[key] = tf.Variable(train_set[key], validate_shape=False, trainable=False) for key in test_set: self.test_data[key] = tf.Variable(test_set[key], validate_shape=False, trainable=False) # Define parameters self.elo = tf.Variable(tf.zeros([self.nb_teams, self.nb_times])) # Define the model self.res = {} for key, proxy in [('train', self.train_data), ('test', self.test_data)]: elomatch = tf.matmul(proxy['team_h'] - proxy['team_a'], self.elo) elomatch = tf.reduce_sum(elomatch * proxy['time'], reduction_indices=[1]) elomatch += self.param['bais_ext'] self.res[key] = tf.inv(1. + tf.exp(ELOCONST * elomatch)) # Define the costs self.cost_entropy = {} self.cost_regularized = {} for key, proxy in [('train', self.train_data), ('test', self.test_data)]: costs = [] entropies = proxy['res']*tf.log(self.res[key]+1e-9) + (1-proxy['res'])*tf.log(1-self.res[key]+1e-9) self.cost_entropy[key] = tf.reduce_mean(-entropies) costs.append(self.cost_entropy[key]) cost_rawelo = tf.reduce_mean(tf.square(tf.matmul(self.elo, first_time))) cost_rawelo *= self.param['metaparam1'] * ELOCONST ** 2 cost_rawelo += tf.reduce_mean(tf.square(self.elo)) * self.param['metaparam0'] * ELOCONST ** 2 costs.append(cost_rawelo) if self.nb_times > 1: cost_diffelo = tf.reduce_mean(tf.square(tf.matmul(self.elo, timediff))) cost_diffelo *= self.param['metaparam2'] * ELOCONST ** 2 costs.append(cost_diffelo) self.cost_regularized[key] = tf.add_n(costs) # Define the cost minimization method self.train_step = tf.train.AdamOptimizer(0.1).minimize(self.cost_regularized['train']) # Create the session self.session = tf.Session() self.session.run(tf.initialize_all_variables())
def on_press(self, widget, event): ToolBox.print_output(f"Key val: {event.keyval}, ", f"Key name: {Gdk.keyval_name(event.keyval)}") if self.shift_zoom(Gdk.keyval_name(event.keyval)): pass elif Gdk.keyval_name(event.keyval) == "Return": self.diagram = self.driver.get_vector_diagram() self.refresh()
def get_rank_video_info(self, rank_type='origin', video_type='all', rank_time_type='day'): ''' :param video_type: string, the category of rank, default is origin, the category of the video is all :return: list, ['video_rank', 'video_aid', 'video_title', 'up_mid'] ''' info = [] info.append([ '0rank_type', '1video_type', '2video_rank', '3video_aid', '4video_title', '5up_mid', '6points' ]) suffix = self.rank_time_category.get(rank_time_type) try: url = self.api_rank.format( self.rank_category.get(rank_type), self.video_category.get(video_type)) + suffix except Exception as e: log = 'ERROR IN {} VIDEO_TYPE={} {}'.format( sys._getframe().f_code.co_name, video_type, e) ToolBox.tool_log_info(level='error', message=log) print(log) os._exit(-1) else: # res = urllib.request.Request(url, headers=self.get_random_headers()) # html = urllib.request.urlopen(res).read().decode('utf-8') res = self.__get_html_requests(url) html = res.text # os._exit(-1) soup = BeautifulSoup(html, features='html5lib') points = soup.find_all('div', {'class': 'pts'}) titles = soup.find_all('a', {'class': 'title', 'target': '_blank'}) author_ids = soup.find_all( 'a', target='_blank', href=re.compile('//space.bilibili.com/')) # print(titles) # os._exit(-1) for i in range(len(points)): aid = re.findall(r'av\d+/', str(titles[i])) aid = aid[0][2:-1] up_mid = re.findall(r'//space.bilibili.com/\d+', str(author_ids[i])) up_mid = re.findall(r'\d+', up_mid[0])[0] title = re.findall(r'>[\S\s]+<', str(titles[i])) title = title[0][1:-1] point = re.findall(r'\d+', str(points[i])) point = point[0] info.append( [rank_type, video_type, i + 1, aid, title, up_mid, point]) return info
def set_distribution(self, column): diagram = self.driver.get_vector_diagram() all_nodes = self.driver.get_node_names_as_set() nodes = [n for n in all_nodes if diagram.check_land(n)] for node in nodes: # land nodes only value = self.obtain_new_value(node) ToolBox.print_output("distribution:", node, column, value) self.driver.set_distribution_by_node(node, column, value) return len(nodes)
def on_click(self, box, event): xo, y = Diagram.Diagram.on_click(self, box, event) node = self.diagram[xo,y][1] if self.nvalues is None: db_name = self.driver.db_name os.system(f"./node.py -f {db_name} -p {node}") else: try: val = self.nvalues[node] except KeyError: val = "-" ToolBox.print_output(f"{xo}x{y} = {node} -> {val}")
def ShowAnimals(self, p): ToolBox.WriteLineBlue( "----------------------------------------------------------------------------------------------" ) ToolBox.WriteLineBlue("Current Population:") for habs in p.arkOne.GetHabitats(): habs.SumAnimals() print(habs.HabitatName.PadRight(18)) for x in habs.AnimalDict: tempString: str = x.Key.PadRight(11) + " " + x.Value print(tempString.PadRight(17)) print()
def __init__(self, fname): if not os.path.exists(fname): ToolBox.print_error(f"path {fname} not exists!") sys.exit(-1) self.db_name = fname def dict_factory(cur, row): gen = enumerate(cur.description) out = {c[0]: row[i] for i, c in gen} return out self.conn = sqlite3.connect(fname) self.conn.row_factory = dict_factory self.cur = self.conn.cursor() self.vdiag = self.get_vector_diagram()
def ShowRequiredResources(self, p): ToolBox.WriteLineBlue( "---------------------------------------------------------------------------------------------" ) ToolBox.WriteLineBlue("Required Resources:") ToolBox.WriteLineBlue( "Zone Energy(kW) Heat(kJ) Food(unit) Water(m3) Oxigen(m3)" ) for member in p.arkOne.GetHabitats(): print(member.HabitatName.ToString().PadRight(19)) print(member.SumReqEnergy.ToString().PadLeft(9)) print(member.SumReqHeat.ToString().PadLeft(11)) print(member.SumReqFood.ToString().PadLeft(11)) print(member.SumReqWater.ToString().PadLeft(14)) print(member.SumReqOxigen.ToString().PadLeft(13))
def __get_html_requests(self, url): if self.mas_proxy_flag: try_count = 0 html = bilibiliSpider.mas_get_html(url) while html == None: try_count += 1 if try_count == 3: html = requests.get( url, headers=ToolBox.tool_get_random_headers()) else: html = bilibiliSpider.mas_get_html(url) time.sleep(1) return html else: return requests.get(url, headers=ToolBox.tool_get_random_headers())
def Dying(self): if random.randint(0, 100) > 98: if AnimalList.Count > 50: index = random.randint(0, len(AnimalList)) ToolBox.WriteLineRed("A " + AnimalList[index].SpeciesName + " has died!") AnimalList.RemoveAt(index)
def set_zoom(self, zoom): self.zoom = {} self.zoom["west"] = 0 self.zoom["north"] = 0 self.zoom["east"] = self.width - 1 self.zoom["south"] = self.height - 1 if zoom: if zoom["west"] > self.zoom["west"]: self.zoom["west"] = zoom["west"] if zoom["east"] < self.zoom["east"]: self.zoom["east"] = zoom["east"] if zoom["north"] > self.zoom["north"]: self.zoom["north"] = zoom["north"] if zoom["south"] < self.zoom["south"]: self.zoom["south"] = zoom["south"] self.zoom_width = self.zoom["east"] - self.zoom["west"] + 1 self.zoom_height = self.zoom["south"] - self.zoom["north"] + 1 ToolBox.print_output(f"map size: {self.zoom_width} x {self.zoom_height}")
def StarterPopulation(self, p): counter: int = 0 while counter < 10: try: p.arkOne.AddNewAnimal("Tiger", "Carnivore", "Rainforest") p.arkOne.AddNewAnimal("Panda", "Herbivore", "Rainforest") p.arkOne.AddNewAnimal("Chimp", "Omnivore", "Rainforest") p.arkOne.AddNewAnimal("Zebra", "Herbivore", "Savannah") p.arkOne.AddNewAnimal("Lion", "Carnivore", "Savannah") p.arkOne.AddNewAnimal("Antilop", "Herbivore", "Savannah") p.arkOne.AddNewAnimal("Wolf", "Carnivore", "Temperate Forest") p.arkOne.AddNewAnimal("Beaver", "Herbivore", "Temperate Forest") p.arkOne.AddNewAnimal("Bald Eagle", "Carnivore", "Temperate Forest") p.arkOne.AddNewAnimal("Polar bear", "Carnivore", "Arctic") p.arkOne.AddNewAnimal("Seal", "Carnivore", "Arctic") p.arkOne.AddNewAnimal("Penguin", "Carnivore", "Arctic") p.arkOne.AddNewAnimal("Dolphin", "Carnivore", "Sea") p.arkOne.AddNewAnimal("Turtle", "Herbivore", "Sea") p.arkOne.AddNewAnimal("Seagull", "Herbivore", "Sea") counter += 1 except HabitatNotExistException: ToolBox.WriteLineRed("Habitat is not exist!") time.Sleep(1500)
async def __check_available(self, proxy:ProxyPool.proxy): proxy = proxy.get_string_address() print(f'checking {proxy}') try: # async with semaphore: async with aiohttp.ClientSession() as session: response = await session.get( url=self.__test_ip_url, proxy=proxy, timeout=default_check_proxy_timeout, headers=ToolBox.tool_get_random_headers() ) response_status = response.status except Exception as e: print(proxy, '11111111111111111', e) return (proxy, False) if response_status == 503: print(proxy, '22222222222222222') return (proxy, False) try: temp = await response.json() if response_status == 200 and temp.get("origin"): return (proxy, True) else: print(proxy, '3333333333333333') return (proxy, False) except Exception as e: print(proxy, '4444444444444444444', e) return (proxy, False)
def __drive_timer_check(self): # while True: asyncio.run(self.__timer_check()) # time.sleep(self.__check_interval) print('Check done!', ToolBox.tool_get_current_time()) thread = threading.Timer(self.__check_interval, self.__drive_timer_check) thread.start()
def __evaluate_pool(self): feedback = self.__db.get_feedback() increase_proxies = feedback.get('increase') decrease_proxies = feedback.get('decrease') for proxy in self.__pool: if proxy.get_string_address() in increase_proxies: proxy.points += 1 elif proxy.get_string_address() in decrease_proxies: proxy.points -= 2 delete_proxies = [] add_proxies = [] for proxy in self.__pool: if proxy.points < -3: self.__pool.remove(proxy) delete_proxies.append(proxy) print(f'Delete {proxy}') elif proxy.points > 1: add_proxies.append(proxy) print(f'Add {proxy}') else: # print(proxy.get_string_address(), proxy.points) pass self.__db.delete_proxies(delete_proxies) self.__db.add_proxies(add_proxies) print('Evaluate done!' ,ToolBox.tool_get_current_time()) thread = threading.Timer(self.__evaluate_interval, self.__evaluate_pool) thread.start()
def next_atom_generator(self, x, y): for dx, dy in ToolBox.unit_generator(): ny = y + dy if ny < 0 or ny >= self.config["map_height"]: continue nx = (x + dx) % self.config["map_width"] yield nx, ny
def SubMenuStatistics(self): os.system("clear") ToolBox.WriteLineBlue("Statistics panel (please choose a number)") print() print("(1) Information about the Habitats") print("(2) Information about the animals") print("(0) Back to the main panel") print()
def __init__(self, argv, parentQWidget=None): QWidget.__init__(self) const.mode_interactive = 1 const.mode_file = 2 const.mode_batch = 3 const.mode_default = const.mode_batch arg_file = '' if '-i' in argv: mode = const.mode_interactive elif '-b' in argv: mode = const.mode_batch elif '-f' in argv: mode = const.mode_file idx = argv.index('-f') arg_file = argv[idx + 1] src_path = None if '-s' in argv: idx = argv.index('-s') src_path = argv[idx + 1] if '-c' in argv: idx = argv.index('-c') cfg_file = argv[idx + 1] hbox = QHBoxLayout() vbox = QVBoxLayout() scrollView = ScrollArea() headerView = Header.Header(self) scrollView.connectHeaderView(headerView) headerView.connectMainView(scrollView.mainView.drawer) vbox.addWidget(headerView) vbox.addWidget(scrollView) toolBox = ToolBox.ToolBox(mode) hbox.addLayout(vbox) hbox.addLayout(toolBox) self.controller = Kitchen.Kitchen(mode, arg_file, cfg_file) self.controller.connectView(scrollView.mainView.drawer) self.controller.connectToolBox(toolBox) self.controller.start() srcViewer = SourceViewer.SourceViewer() srcViewer.createIndex(src_path) toolBox.connectMsgRcv(headerView) toolBox.connectMsgRcv(scrollView.mainView.drawer) toolBox.connectMsgRcv(self.controller) toolBox.connectDiagramView(scrollView.mainView.drawer) scrollView.mainView.drawer.setToolBox(toolBox) scrollView.mainView.drawer.connectSourceViewer(srcViewer) self.setLayout(hbox)
def phxRefTemp(teff, numDeps, tauRos): logE = math.log10(math.e) #//Theoretical radiative/convective model from Phoenix V15: phxRefTemp64 = [ 3.15213572679982190e+03, 3.15213572679982190e+03, 3.17988621810632685e+03, 3.21012887128011243e+03, 3.24126626267038500e+03, 3.27276078893546673e+03, 3.30435725697820226e+03, 3.33589185632140106e+03, 3.36724151725549154e+03, 3.39831714195318273e+03, 3.42906935013664861e+03, 3.45949368388945595e+03, 3.48962758169505923e+03, 3.51953742647688796e+03, 3.54929791042697934e+03, 3.57896962155466872e+03, 3.60858205550851335e+03, 3.63812646699481775e+03, 3.66755983657917068e+03, 3.69681905522719444e+03, 3.72583932497757132e+03, 3.75457006928661031e+03, 3.78298372918123914e+03, 3.81109104721021231e+03, 3.83893072914395862e+03, 3.86656355962043835e+03, 3.89408059675027425e+03, 3.92160316230741546e+03, 3.94927225929978204e+03, 3.97726284805320847e+03, 4.00584847611869327e+03, 4.03531360317989993e+03, 4.06591896438200047e+03, 4.09802860937899732e+03, 4.13221207874272022e+03, 4.16915227717330799e+03, 4.20937593060261861e+03, 4.25369220113429128e+03, 4.30330739566306784e+03, 4.36035870964639616e+03, 4.42601579216115442e+03, 4.50281614584142153e+03, 4.59386420090837146e+03, 4.70448179136501403e+03, 4.83727710376560208e+03, 4.99516189027659129e+03, 5.19102132587796405e+03, 5.40505223548941285e+03, 5.67247302987449984e+03, 5.95695843497286933e+03, 6.27957483223234703e+03, 6.71365960956718118e+03, 7.06828382342861460e+03, 7.34157936910693206e+03, 7.56939938735570740e+03, 7.77138428264261165e+03, 7.95656000812699585e+03, 8.13006721530056711e+03, 8.29523535580475982e+03, 8.45429779465689171e+03, 8.60879260449185131e+03, 8.75981713693203528e+03, 8.90838141718757288e+03, 9.05361290415211806e+03 ] logPhxRefTau64 = getLogPhxRefTau64() #// interpolate onto gS3 tauRos grid and re-scale with Teff: phxRefTemp = [0.0 for i in range(numDeps)] scaleTemp = [[0.0 for i in range(numDeps)] for j in range(2)] #for i in range(numDeps): # phxRefTemp[i] = ToolBox.interpol(logPhxRefTau64, phxRefTemp64, tauRos[1][i]) # scaleTemp[0][i] = teff * phxRefTemp[i] / phxRefTeff() # scaleTemp[1][i] = math.log(scaleTemp[0][i]); phxRefTemp = [ ToolBox.interpol(logPhxRefTau64, phxRefTemp64, x) for x in tauRos[1] ] scaleTemp[0] = [teff * x / phxRefTeff() for x in phxRefTemp] scaleTemp[1] = [math.log(x) for x in scaleTemp[0]] #//System.out.println("tauRos[1][i] " + logE * tauRos[1][i] + " scaleTemp[1][i] " + logE * scaleTemp[1][i]); return scaleTemp
def Birth(self): if random.randint(0, 100) > 90: if len(AnimalList) > 0: index = random.randint(0, len(AnimalList)) ToolBox.WriteLineGreen("A " + AnimalList[index].SpeciesName + " baby has born!") AddNewAnimal(AnimalList[index].SpeciesName, AnimalList[index].Type, AnimalList[index].IdealEnvironment)
def __getattr__(self, name): try: return ToolBox.toolGetVariable(self.tool_name_, name) except: def method(*args): ToolBox.toolFunction(self.tool_name_, name, args) return method
def append_match(self, proxy, team1, team2, score1, score2, date): proxy['time'].append(ToolBox.make_vector(date, self.nb_times)) id_team_h = self.country_to_id[ToolBox.format_name(team1)] id_team_a = self.country_to_id[ToolBox.format_name(team2)] proxy['team_h'].append(ToolBox.make_vector(id_team_h, self.nb_teams)) proxy['team_a'].append(ToolBox.make_vector(id_team_a, self.nb_teams)) score_team_h = min(int(score1), 9) score_team_a = min(int(score2), 9) proxy['score_h'].append(ToolBox.make_vector(score_team_h, 10)) proxy['score_a'].append(ToolBox.make_vector(score_team_a, 10)) proxy['res'].append(ToolBox.result(int(score1) - int(score2)))
def append_match(self, proxy, team1, team2, score1, score2, date): proxy['time'].append(ToolBox.make_vector(date, self.nb_times)) id_team_h = self.country_to_id[ToolBox.format_name(team1)] id_team_a = self.country_to_id[ToolBox.format_name(team2)] proxy['team_h'].append(ToolBox.make_vector(id_team_h, self.nb_teams)) proxy['team_a'].append(ToolBox.make_vector(id_team_a, self.nb_teams)) score_team_h = min(int(score1), 9) score_team_a = min(int(score2), 9) proxy['score_h'].append(ToolBox.make_vector(score_team_h, 10)) proxy['score_a'].append(ToolBox.make_vector(score_team_a, 10)) proxy['res'].append(ToolBox.result(int(score1) - int(score2)))
def phxRefTemp(teff, numDeps, tauRos): logE = math.log10(math.e) #//Theoretical radiative/convective model from Phoenix V15: phxRefTemp64 = [ 6.07574016685149309E+03, 6.07574016685149309E+03, 6.13264671606194861E+03, 6.20030362747541585E+03, 6.27534705504544127E+03, 6.35396254937768026E+03, 6.43299900128272293E+03, 6.51018808525609893E+03, 6.58411555606889124E+03, 6.65406717610081068E+03, 6.71983498258185136E+03, 6.78154367852633823E+03, 6.83954193198123903E+03, 6.89437231818902364E+03, 6.94676889243451842E+03, 6.99759489202792247E+03, 7.04769490055547158E+03, 7.09773520027041195E+03, 7.14812062339764907E+03, 7.19901426577775601E+03, 7.25041827414427917E+03, 7.30225171801659872E+03, 7.35440093819652611E+03, 7.40675066225539558E+03, 7.45920456139609178E+03, 7.51166464185182758E+03, 7.56404228766520191E+03, 7.61627005664532771E+03, 7.66833575187113820E+03, 7.72034173334201841E+03, 7.77258785750414881E+03, 7.82555139374063583E+03, 7.87986936059489017E+03, 7.93639246968124371E+03, 7.99620846303960116E+03, 8.06052820253916161E+03, 8.13047124123426238E+03, 8.20741189262034641E+03, 8.29307358429898159E+03, 8.38980788216330802E+03, 8.49906053657168923E+03, 8.62314483632361771E+03, 8.76456384216990409E+03, 8.92693370905029224E+03, 9.11177170396923248E+03, 9.32167977041711492E+03, 9.56236981551314602E+03, 9.82432656703466455E+03, 1.01311427939962559E+04, 1.04299661074183350E+04, 1.08355089220389909E+04, 1.12094886773674716E+04, 1.16360710406256258E+04, 1.20991237739366334E+04, 1.25891111265208237E+04, 1.31070008299570563E+04, 1.36522498965801387E+04, 1.42233473670298790E+04, 1.48188302103200131E+04, 1.54423659243804523E+04, 1.60892587452310745E+04, 1.67828517694842230E+04, 1.74930217234773954E+04, 1.82922661949382236E+04 ] logPhxRefTau64 = getLogPhxRefTau64() #// interpolate onto gS3 tauRos grid and re-scale with Teff: phxRefTemp = [0.0 for i in range(numDeps)] scaleTemp = [[0.0 for i in range(numDeps)] for j in range(2)] #for i in range(numDeps): # phxRefTemp[i] = ToolBox.interpol(logPhxRefTau64, phxRefTemp64, tauRos[1][i]) # scaleTemp[0][i] = teff * phxRefTemp[i] / phxRefTeff() # scaleTemp[1][i] = math.log(scaleTemp[0][i]) phxRefTemp = [ ToolBox.interpol(logPhxRefTau64, phxRefTemp64, x) for x in tauRos[1] ] scaleTemp[0] = [teff * x / phxRefTeff() for x in phxRefTemp] scaleTemp[1] = [math.log(x) for x in scaleTemp[0]] #//System.out.println("tauRos[1][i] " + logE * tauRos[1][i] + " scaleTemp[1][i] " + logE * scaleTemp[1][i]); return scaleTemp
def SubMenuHabitat(self): os.system("clear") ToolBox.WriteLineBlue("Habitat panel (please choose a number)") print() print("(1) Building a new Habitat zone") print("(2) Information about the current zones") print("(3) Recalibrating the zones") print("(4) Demolish a zone") print("(0) Back to the main panel") print()
def process_multi_tasks(tasks=default_tasks, rank_tyoe='origin'): ''' Multi processors spider :param tasks: video categories like ['guochuang', 'movie'] :return: ''' # max_cpu_count = multiprocessing.cpu_count() max_cpu_count = default_multi_processor_num if max_cpu_count == None: max_cpu_count = 1 p = Pool(max_cpu_count) for task in tasks: p.apply_async(process_one_task, args=(task, )) p.close() p.join() log = f'done ! spider videos failed in {default_spider.get_error_count()} videos' ToolBox.tool_log_info(level='info', message=log)
def __version__(): import ToolBox as tb name = "straindata" commit = tb.get_git_commit("straindata") ret = [] ret.append(": ".join([name, commit])) modules = [tb] for module in modules: ret.append(module.__version__()) return ("\n\t".join(ret))
def __version__(): import ToolBox as tb name = "straindata" commit = tb.get_git_commit("straindata") ret = [] ret.append(": ".join([name, commit])) modules = [tb] for module in modules: ret.append(module.__version__()) return "\n\t".join(ret)
def ShowResourceGenerators(self, p): ToolBox.WriteLineBlue( "---------------------------------------------------------------------------------------------" ) ToolBox.WriteLineBlue("Running Facilities:") print("Heatcollectors".PadRight(19) + p.arkOne.HeatCollectors.Count + " block, Load: " + p.arkOne.HeatLoad + " %") print("Solarpanels".PadRight(19) + p.arkOne.SolarPanels.Count + " block, Load: " + p.arkOne.EnergyLoad + " %") print("Foodreplicators".PadRight(19) + p.arkOne.FoodReplicators.Count + " block, Load: " + p.arkOne.FoodLoad + " %") print("Oxigen generators".PadRight(19) + p.arkOne.OxigenGenerators.Count + " block, Load: " + p.arkOne.OxigenLoad + " %") print("Water filters: ".PadRight(19) + p.arkOne.WaterFilters.Count + " block, Load: " + p.arkOne.WaterLoad + " %") ToolBox.WriteLineBlue( "---------------------------------------------------------------------------------------------" ) ToolBox.WriteLineBlue("Messages:")
def __init__(self, train_set, test_set, dictparam): super(Elostd, self).__init__(train_set, test_set, dictparam) # Define parameters self.elo = tf.Variable(tf.zeros([self.nb_teams, self.nb_times])) # Define the model for key, proxy in [('train', self.train_data), ('test', self.test_data)]: elomatch = ToolBox.get_elomatch(proxy['team_h'] - proxy['team_a'], proxy['time'], self.elo) elomatch += self.param['bais_ext'] self.res[key] = tf.inv(1. + tf.exp(-elomatch)) # Define the costs self.init_cost() for key in ['train', 'test']: cost = ToolBox.get_raw_elo_cost(self.param['metaparam0'], self.param['metaparam1'], self.elo, self.nb_times) self.regulizer[key].append(cost) cost = ToolBox.get_timediff_elo_cost(self.param['metaparam2'], self.elo, self.nb_times) self.regulizer[key].append(cost) # Finish the initialization super(Elostd, self).finish_init()
def __init__(self, train_set, test_set, dictparam): super(Elosplit, self).__init__(train_set, test_set, dictparam) k = tf.constant(map(lambda x: float(x), range(10))) last_vect = tf.expand_dims(ToolBox.last_vector(10),0) win_vector = ToolBox.win_vector(10) # Define parameters self.elo_atk = tf.Variable(tf.zeros([self.nb_teams, self.nb_times])) self.elo_def = tf.Variable(tf.zeros([self.nb_teams, self.nb_times])) # Define the model for key, proxy in [('train', self.train_data), ('test', self.test_data)]: elo_atk_h = ToolBox.get_elomatch(proxy['team_h'], proxy['time'], self.elo_atk) elo_def_h = ToolBox.get_elomatch(proxy['team_h'], proxy['time'], self.elo_def) elo_atk_a = ToolBox.get_elomatch(proxy['team_a'], proxy['time'], self.elo_atk) elo_def_a = ToolBox.get_elomatch(proxy['team_a'], proxy['time'], self.elo_def) lambda_h = tf.expand_dims(tf.exp(self.param['goals_bias'] + elo_atk_h - elo_def_a), 1) lambda_a = tf.expand_dims(tf.exp(self.param['goals_bias'] + elo_atk_a - elo_def_h), 1) score_h = tf.exp(-lambda_h + tf.log(lambda_h) * k - tf.lgamma(k + 1)) score_a = tf.exp(-lambda_a + tf.log(lambda_a) * k - tf.lgamma(k + 1)) score_h += tf.matmul(tf.expand_dims((1. - tf.reduce_sum(score_h, reduction_indices=[1])), 1), last_vect) score_a += tf.matmul(tf.expand_dims((1. - tf.reduce_sum(score_a, reduction_indices=[1])), 1), last_vect) self.score[key] = tf.batch_matmul(tf.expand_dims(score_h, 2), tf.expand_dims(score_a, 1)) self.res[key] = tf.reduce_sum(self.score[key] * win_vector, reduction_indices=[1,2]) # Define the costs self.init_cost() for key in ['train', 'test']: for proxy in [self.elo_atk, self.elo_def]: cost = ToolBox.get_raw_elo_cost(self.param['metaparam0'], self.param['metaparam1'], proxy, self.nb_times) self.regulizer[key].append(cost) cost = ToolBox.get_timediff_elo_cost(self.param['metaparam2'], proxy, self.nb_times) self.regulizer[key].append(cost) # Finish the initialization super(Elosplit, self).finish_init()