def run(self): if IS_DEV_VERSION: return try: stage = file("stage.txt").read().strip() url = "http://jlpo.free.fr/soundrts/%sversion.txt" % stage rev = urllib.urlopen(url).read().strip() if (rev != VERSION) and (rev.find("404") == -1): voice.important(mp.UPDATE_AVAILABLE) stats.Stats(OLD_STATS_PATH, METASERVER_URL).send() stats.Stats(STATS_PATH, METASERVER_URL).send() except: pass
def __init__(self, lvl, scheme, target=None, **kwargs): super().__init__(**kwargs) allChamps.append(self) self.ID = self.champID.pop(0) self.lvl = lvl self.target = target self.scheme = scheme self.shields = [] self.pshields = [] self.mshields = [] self.onhits = [] self.dmg_reductions= [] self.STATS = stats.Stats(CHAMP=self) # self.REGEN = regen.Regen(CHAMP=self) self.ABILITY_USED = observer.AbilityUsed() ### this design seems bad # self.EFFECT_HANDLER = effecthandler.EffectHandler(CHAMP=self) self.AUTO = self.autoclass_map[self.autoclass](CHAMP = self) self.P = self._load_skill('p') self.Q = self._load_skill('q') self.W = self._load_skill('w') self.E = self._load_skill('e') self.R = self._load_skill('r')
def main(): env = simpy.Environment() eth = ether.Ether(env) statistics = stats.Stats() nodes = [] for i in range(0, parameters.NUMBER_OF_NODES): name = "Node" + str(i) nodes.append( node.Node(env, name, eth, random.randint(0, 40), random.randint(0, 40), statistics)) for i in range(0, parameters.NUMBER_OF_NODES): destinations = [] for j in range(0, parameters.NUMBER_OF_NODES): if i != j: destinations.append(nodes[j].name) env.process(nodes[i].keepSendingIncreasing(parameters.STARTING_RATE, parameters.TARGET_RATE, destinations)) #env.process(nodes[i].keepSending(parameters.TARGET_RATE, destinations)) if not parameters.PRINT_LOGS: env.process(printProgress(env)) env.run(until=parameters.SIM_TIME) statistics.plotCumulativePackets() statistics.plotThroughput() statistics.plotDelays() statistics.plotRetransmissions()
def Optimize(self, npoints=15): import stats as CalcStatistics #add things here to loop over alpha and do optimization bestAlpha = 0. bestEnergy = 0. oldene = 100. alpha = np.linspace( 0.15, 0.85, num=npoints, endpoint=True, ) optimizeList = np.zeros((npoints, 3)) for i in range(npoints): self.SetParams([alpha[i]]) energyList = self.VMC(100000) energy, variance, error, corrtime = CalcStatistics.Stats( np.array(energyList)) optimizeList[i, 0] = alpha[i] optimizeList[i, 1] = energy optimizeList[i, 2] = error print( " alpha= %g , energy= %g , var = %g , err = %g , ac-time= %g" % (alpha[i], energy, variance, error, corrtime)) if energy <= oldene: bestAlpha = alpha[i] bestEnergy = energy oldene = energy return (optimizeList, bestAlpha, bestEnergy)
def process_stats(founddir, faildir, files): numcandidates = 256 * 256 * 128 charsetid = 1 for i in range(len(files)): fname = files[i] print ">> [%d/%d] Reading %s" % (i, len(files), fname) s = stats.Stats().readfile(fname) offset = int(s.options["offset"]) cookiepos = int(s.options["cookiepos"]) maxgap = int(s.options["maxgap"]) count = s.count if count.dtype != "uint32": count = count.astype("uint32") print ">> Processing", fname firstcorrect, foundpos = rc4decrypt.process_simultlscookie(count, offset, cookiepos, maxgap, numcandidates, charsetid) dest = os.path.join(founddir if foundpos >= 0 else faildir, fname) print ">> Moving file to", dest os.rename(fname, dest) options = s.options.copy() options["samples"] = s.numsamples() options["numcand"] = numcandidates options["charsetid"] = charsetid save_results("simultlscookie", options, firstcorrect, [foundpos]) del s
def index(self): c = p.toolkit.c stats = stats_lib.Stats() rev_stats = stats_lib.RevisionStats() c.top_rated_packages = stats.top_rated_packages() c.most_edited_packages = stats.most_edited_packages() c.largest_groups = stats.largest_groups() c.top_tags = stats.top_tags() c.top_package_creators = stats.top_package_creators() c.new_packages_by_week = rev_stats.get_by_week('new_packages') c.deleted_packages_by_week = rev_stats.get_by_week('deleted_packages') c.num_packages_by_week = rev_stats.get_num_packages_by_week() c.package_revisions_by_week = rev_stats.get_by_week('package_revisions') c.raw_packages_by_week = [] for week_date, num_packages, cumulative_num_packages in c.num_packages_by_week: c.raw_packages_by_week.append({'date': h.date_str_to_datetime(week_date), 'total_packages': cumulative_num_packages}) c.all_package_revisions = [] c.raw_all_package_revisions = [] for week_date, revs, num_revisions, cumulative_num_revisions in c.package_revisions_by_week: c.all_package_revisions.append('[new Date(%s), %s]' % (week_date.replace('-', ','), num_revisions)) c.raw_all_package_revisions.append({'date': h.date_str_to_datetime(week_date), 'total_revisions': num_revisions}) c.new_datasets = [] c.raw_new_datasets = [] for week_date, pkgs, num_packages, cumulative_num_packages in c.new_packages_by_week: c.new_datasets.append('[new Date(%s), %s]' % (week_date.replace('-', ','), num_packages)) c.raw_new_datasets.append({'date': h.date_str_to_datetime(week_date), 'new_packages': num_packages}) return p.toolkit.render('ckanext/stats/index.html')
def brute_capture(fname, plainfile): # Get the known plaintext plaintext = open(plainfile).read() # Example stats generation: ./stats.py generate rc4decrypt simultlscookie 30 cookiepos=306 absabmaxgap=129 s = stats.Stats().readfile(fname) count = s.count if count.dtype != "uint32": count = count.astype("uint32") # Default values correspond to previously hardcoded generation options offset = int(s.options.get("offset")) cookiepos = int(s.options.get("cookiepos")) absabmaxgap = int(s.options.get("maxgap")) # Generate the candadite list (and free the statistics one we have a list) numcandidates = 256 * 64 chatsetid = 1 returnlist = 1 candidates = rc4decrypt.process_simultlscookie(count, offset, cookiepos, absabmaxgap, numcandidates, chatsetid, plaintext, returnlist) del count del s # Pass the generated candidates to the bruteforcer hosts = filter(lambda h: h.lower().startswith("host: "), plaintext.split("\r\n")) cookiename = "auth" sentinel = "logged in as" bruter.brutecookie(hosts[0][6:], cookiename, candidates, sentinel)
def test_differentConfidenceLevel(self): s = stats.Stats() s.add(1.0) s.add(2.0) s.add(3.0) s.add(4.0) self.assertAlmostEqual(s.conf(0.99), 3.4905535)
def render_game(color, game_id, game, opponent): access = chess.accessibility_map(game.current_board) if game.to_play is None: to_play = "nobody" else: to_play = chess.color_names[game.to_play] if game.termination is None: termination = "undefined" termination_msg = None else: termination = "'%s'" % game.termination termination_msg = termination_str(game, color) if color is None: color_name = None else: color_name = chess.color_names[color] stat_obj = stats.Stats(game, eco_data) return flask.render_template( "game.html", game=json.dumps(game.to_json_dict()), to_play=to_play, termination=termination, termination_msg=termination_msg, color_name=color_name, player=color, summary=linkify_summary(game), accessibility=json.dumps(access), opponent=opponent, stats=stat_obj, )
def main(self, player_name): self.player_name = player_name game_running = True while game_running: tick = self.clock.tick(self.clock_tick) self.events = pygame.event.get() self.stat_class = stats.Stats(self) for event in self.events: if event.type == pygame.QUIT: self.stat_class.print_scores() return if event.type == pygame.KEYDOWN \ and event.key == pygame.K_ESCAPE: self.stat_class.print_scores() return enemy_count = self.enemies.sprites().__len__() if enemy_count < 1 and self.game_level <= 5: self.game_level += 1 if enemy_count < 1 and self.game_level <= 4: self.enemies = self.__load_enemies() self.sprites.add(self.enemies) if enemy_count < 1 and self.game_level == 5: self.__load_boss() self.sprites.update(tick / 1000., self) screen.blit(self.background, self.bg_img_pos) self.sprites.draw(screen) self.__load_game_stats() self.__check_status() pygame.display.flip()
def __init__(self, context): super().__init__() self.context = context self.config = config.Config.instance() self.logger = logging.getLogger(logger_str(__class__) + " " + context) self.logger.info(f"Creating clientlet {self.context}") self.path = config.path_for(self.config.get(self.context, "backup")) assert os.path.exists(self.path), f"{self.path} does not exist!" # ALL source contexts (we care a lot) self.sources = {} self.scanners = {} self.random_source_list = [] self.build_sources() lazy_write = utils.str_to_duration( self.config.get(context, "LAZY WRITE", 5)) # TODO: my cache of claims should expire in rescan/2 self.rescan = self.get_interval("rescan") // 2 self.claims = PersistentDict(f"/tmp/cb.c{context}.json.bz2", lazy_write=5, expiry=self.rescan) self.drops = 0 # count the number of times I drop a file self.stats = stats.Stats() self.update_allocation() self.bailing = False self.datagrams = {}
def __init__(self, lines): """ :param lines: """ current_time = datetime.now().strftime("%d-%m-%Y_%Hh.%Mm.%Ss") user = getpass.getuser() self.personal_path = "gen/%s_%s/" % (user, current_time) self.index_line = 0 self.completion = 0.0 self.connections_completions = 0.0 try: os.mkdir(self.personal_path) except Exception as e: print e print "Personal Path used for this Puzzle: %s" % self.personal_path arr = list(self.randomize_lines(*lines)) Puzzle.dynamique_type() self.toolbox = base.Toolbox() self.toolbox.register("new_individual", creator.Individual, self._get_line_, arr) self.toolbox.register("desk", tools.initRepeat, list, self.toolbox.new_individual) self.population = self.toolbox.desk(n=len(arr)) # Applying rotation until it's the right side self.fixing_outside() # Init the stats we want to log self.stats = stats.Stats(self.personal_path) self.evaluate() self.log_stats(-1, 0, 0)
def __init__(self, email, password, thread_fbid, config): self.stats = stats self.config = config self.commands = config[consts.COMMANDS] self.stats = stats.Stats(STATS_FILE) fbchat.Client.__init__(self, email, password, thread_fbid, True, None) threads = self.getThreadList(0) for item in threads: if item.thread_fbid == thread_fbid: thread = item # Extracts ids from participants and gets full data users = [] for user_id in thread.participants: if user_id.startswith("fbid:"): users.append(user_id[5:]) self.full_users = self.getUserInfo(users) self.annoy_list = self.stats.vals["annoy_list"] self.onseen_list = self.stats.vals["onseen_list"] self.mquiz = quiz.Quiz(config["quiz_file"], self.stats) self.__quiz_question_count = 0 self.__quiz_timeout_set = False self.__quiz_timer = None
def __init__(self, args, header): self.args = args self.stats = stats.Stats('') self.amplicons = amplicon.load_amplicons(args.amps, self.stats, args) self.clip = args.clip self.exclude_offtarget = args.exclude_offtarget AMS = [] for amp in self.amplicons: AMS.append( json.dumps({ 'type': 'ea', 'id': amp.external_id, 'ac': '%s:%s-%s' % (amp.chr, amp.start, amp.end), 'tc': '%s:%s-%s' % (amp.chr, amp.trim_start, amp.trim_end), 'st': str(amp.strand) })) header['CO'] = header.get('CO', []) + AMS # create a list of lists ref by tid self._amps_by_chr = [] for _ in range(args.input.nreferences): self._amps_by_chr.append([]) for a in self.amplicons: self._amps_by_chr[args.input.gettid(a.chr)].append(a)
def __init__(self, parent=None): super(QMainWindow, self).__init__(parent) self.isLogging = False self.ui = Ui_MainWindow() self.ui.setupUi(self) self.parameters = ParamTree() self.ui.graphicsView.setParameters(self.parameters.tree, showTop=False) self.parameters.tree.sigTreeStateChanged.connect(self.change_params) self.config = Config(self.parameters.activeParams) self.resultPlots = {} self.plot = gui_scatter.ScatterPlot(self.config.dataset.data) self.plot.w.die.connect(self.untick_show_plot) self.graphs = gui_graphs.GraphsWrapper(self.config) self.graphs.w.reinit_graphs(self.config, self.parameters) self.graphs.w.graphsdying.connect(self.untick_show_graphs) self.axestable = AxesTable(self, self.ui.table_axes) self.stats_tab = stats.Stats(self.ui) self.ui.button_start.clicked.connect(self.start) self.ui.checkBox_plotShowing.stateChanged.connect(self.plot.show) self.ui.checkbox_graphs.stateChanged.connect(self.graphs.show_graphs) self.ui.checkBox_logging.stateChanged.connect(self.is_logging) self.ui.show_grid_checkbox.stateChanged.connect(self.plot.showGrid) self.ui.sample_size_slider.valueChanged.connect( self.plot.setSampleSize)
def swi_arrayplot(self, dir, name, measure, **keys): s = stats.Stats('%s/%s' % (dir, name)) data = s.get_raw(measure) c = ArrayPlotConfig(keys) index = int(c.index) pylab.figure(figsize=(float(c.width), float(c.height))) pylab.axes((float(c.b_left), float(c.b_bot), 1.0 - float(c.b_left) - float(c.b_right), 1.0 - float(c.b_top) - float(c.b_bot))) max_index = len(data[0]) t = pylab.arange(len(data[0])) * float(c.dt) if c.other_color != '': for i in range(len(data)): d = data[i] if i != index: pylab.plot(t, d, color=c.other_color, linewidth=c.other_thickness) if c.overlay != '': data2 = s.get_raw(c.overlay) if data2 is not None: d = data2[index] scale = float(c.overlay_scale) d = [scale * x for x in d] pylab.plot(t[:len(d)], d, color=c.overlay_color, linewidth=c.overlay_thickness) if c.line_color != '': d = data[index] pylab.plot(t, d, color=c.line_color, linewidth=c.line_thickness) if ',' in c.xlim: mn, mx = c.xlim.split(',', 1) pylab.xlim((float(mn), float(mx))) if ',' in c.ylim: mn, mx = c.ylim.split(',', 1) pylab.ylim((float(mn), float(mx))) if c.xlabel != '': pylab.xlabel(c.xlabel) if c.ylabel != '': pylab.ylabel(c.ylabel) else: pylab.ylabel(measure) img = StringIO.StringIO() dpi = c.dpi if type(dpi) is list: dpi = dpi[-1] pylab.savefig(img, dpi=int(dpi), format='png') return 'image/png', img.getvalue()
def monitor(iface="eth0", debug=False): import httpsmon cookielen = 16 cookiepos = COOKIEPOS absabmaxgap = 129 serverips = [ownip(iface)] clientips = [] verbose = 0 # Keep track of start time key = stats.KeyInfo() key.start() # Capture traffic if debug: counts, numrequests, offset = httpsmon.monitor_rc4cookie(iface, serverips, clientips, cookielen, cookiepos, absabmaxgap, verbose, RSAKEY_FILE, TESTCOOKIE) else: counts, numrequests, offset = httpsmon.monitor_rc4cookie(iface, serverips, clientips, cookielen, cookiepos, absabmaxgap, verbose) # Save the results (use a random AES key) key.finish(os.urandom(16), counts.shape, numrequests) options = {"cookiepos": cookiepos, "maxgap": absabmaxgap, "offset": offset} s = stats.Stats("httpsmon", "monitor", counts, options, set([key])) filename = s.write() print "Wrote captured stats to", filename
def brute_candidates(fname, plainfile): # Get the known plaintext plaintext = open(plainfile).read() # Example stats generation: ./stats.py generate rc4decrypt simultlscookie 30 cookiepos=306 absabmaxgap=129 s = stats.Stats().readfile(fname) count = s.count if count.dtype != "uint32": count = count.astype("uint32") # Default values correspond to previously hardcoded generation options offset = int(s.options.get("offset")) cookiepos = int(s.options.get("cookiepos")) absabmaxgap = int(s.options.get("maxgap")) # Generate the candadite list (and free the statistics one we have a list) numcandidates = 256 * 64 chatsetid = 1 returnlist = 1 candidates = rc4decrypt.process_simultlscookie(count, offset, cookiepos, absabmaxgap, numcandidates, chatsetid, plaintext, returnlist) del count del s with open("cookies.txt", "w") as fp: for candidate in candidates: fp.write(candidate + "\n") print("Wrote list of cookie candidates to cookies.txt!")
def swi_rawdata(self, dir, name, measure, **keys): s = stats.Stats('%s/%s' % (dir, name)) data = s.get_raw(measure) c = ArrayPlotConfig(keys) page = T.div() config = T.form( action="/rawdata/%s/%s/%s" % (dir, name, measure), method="get")[ c.index_bar("/rawdata/%s/%s/%s" % (dir, name, measure), 'index', len(data)), c.index_config(), T.br, c.plot_axis_config(), T.input(type='submit', value='Update'), ] params, settings, options, defaults = make_settings_table(dir) setting = parse_setting_name(name) params = T.table(border=1) row1 = T.tr() row2 = T.tr() for k, vv in sorted(options.items()): if len(vv) > 1: row1[T.th[k]] values = T.td() vvlist = list( sorted(vv, key=lambda x: convert_string_to_value(x))) for i in range(len(vvlist)): vvv = vvlist[i] sval = setting.get(k, defaults[k]) if vvv == sval: values[T.b[vvv], T.br] else: setting2 = dict(setting) setting2[k] = vvv name2 = make_setting_name(dir, setting2) values[T.a(href="/rawdata/%s/%s/%s?%s" % (dir, name2, measure, c.url_args()))[vvv], T.br] row2[values] params[row1, row2] if type(data[0]) is float or type(data[0]) is int: table = T.table() for d in data: table[T.tr[T.td[d]]] page[table, T.img(src='/histogram/%s/%s/%s' % (dir, name, measure))] return str(page) elif type(data[0]) is list and type(data[0][0]) is float: page[T.a(href='/arrayplot/%s/%s/%s?%s' % (dir, name, measure, c.url_args( dpi=300)))[T.img(style='float:right', src='/arrayplot/%s/%s/%s?%s' % (dir, name, measure, c.url_args()))], params, config] return str(page)
def __init__(self, args): self.args = args self.stats = stats.Stats('') self.samfile = pysam.Samfile(args.input) self.amplicons = amplicon.load_amplicons_from_header( self.samfile.header, self.stats, self.samfile) self.amplicons = dict([(x.external_id, x) for x in self.amplicons])
def test_stats(self): s = stats.Stats() for i in range(0, 100): s['all'].incr() if i % 2 == 0: s['evens'].incr() self.assertEqual(int(s['evens']), 50) self.assertEqual(int(s['all']), 100)
def PlotETest(E, E_bin, b): n,bins = np.histogram(E, bins=E_bin, density=True) width = 2 mid = 0.5*(bins[1:] + bins[:-1]) n *= 2*width plt.bar(mid, n, width=width, align='center', label=r'test') E_mean, E_var, E_err, E_tau = stats.Stats(np.array(E)) plt.axvline(E_mean, lw=0.25, label=r'test $\langle E \rangle$')
def PlotM2Test(M2, M2_bin, b): n,bins = np.histogram(M2, bins=M2_bin, density=True) width = 0.5*(max(bins)-min(bins))/(len(bins)-1) mid = 0.5*(bins[1:] + bins[:-1]) n *= 2*width plt.bar(mid, n, width=width, align='center', label=r'test') M2_mean, M2_var, M2_err, M2_tau = stats.Stats(np.array(M2)) plt.axvline(M2_mean, lw=0.25, label=r'test $\langle M^2\rangle$')
def get_data(filename): df = pandas.read_csv(filename, header=0) data = stats.Stats() data.num_male = len(df.query('Sex == "M"').index) data.num_female = len(df.query('Sex == "F"').index) data.num_norm = len(df.query('Cat == 1.0').index) data.num_pre = len(df.query('Cat == 2.0').index) data.num_cancer = len(df.query('Cat == 3.0').index) data.num_screening = len(df.query('Reason == "screening"').index) data.num_fam_hist = len(df.query('Reason == "FHx"').index) data.num_hx_pol = len(df.query('Reason == "Hx Pol"').index) data.num_m_norm = len(df.query('Sex == "M"').query("Cat == 1.0")) data.num_m_pre = len(df.query('Sex == "M"').query("Cat == 2.0")) data.num_m_cancer = len(df.query('Sex =="M"').query("Cat == 3.0")) data.num_f_norm = len(df.query('Sex == "F"').query("Cat == 1.0")) data.num_f_pre = len(df.query('Sex == "F"').query("Cat == 2.0")) data.num_f_cancer = len(df.query('Sex =="F"').query("Cat == 3.0")) data.num_m_screening = len( df.query('Sex == "M"').query("Reason == 'screening'")) data.num_m_fam_hist = len(df.query('Sex == "M"').query("Reason == 'FHx'")) data.num_m_hx_pol = len(df.query('Sex =="M"').query("Reason == 'Hx Pol'")) data.num_f_screening = len( df.query('Sex == "F"').query("Reason == 'screening'")) data.num_f_fam_hist = len(df.query('Sex == "F"').query("Reason == 'FHx'")) data.num_f_hx_pol = len(df.query('Sex =="F"').query("Reason == 'Hx Pol'")) data.num_norm_screening = len( df.query('Cat == 1.0').query('Reason == "screening"')) data.num_norm_fam_hist = len( df.query('Cat == 1.0').query('Reason == "FHx"')) data.num_norm_hx_pol = len( df.query('Cat == 1.0').query('Reason == "Hx Pol"')) data.num_pre_screening = len( df.query('Cat == 2.0').query('Reason == "screening"')) data.num_pre_fam_hist = len( df.query('Cat == 2.0').query('Reason == "FHx"')) data.num_pre_hx_pol = len( df.query('Cat == 2.0').query('Reason == "Hx Pol"')) data.num_cancer_screening = len( df.query('Cat == 3.0').query('Reason == "screening"')) data.num_cancer_fam_hist = len( df.query('Cat == 3.0').query('Reason == "FHx"')) data.num_cancer_hx_pol = len( df.query('Cat == 3.0').query('Reason == "Hx Pol"')) data.num_patients = len(df.index) data.make_stats_file("data_stats.csv")
def initiliaze(self, gameGrid, level): self.grid = gameGrid self.__setMoves() if len(self.childrenMoves) == 0: return self.__initiliazeChildren(0, self.children) self.stats = stats.Stats(self.grid.gameStats) self.level = level self.initiliazed = True
def in_get_stats(outkey, desired_stats, interval, **kwargs): ti84 = stats.Stats() threading.Thread(target=fill_messages).start() while not self.stopper.is_set(): if len(self.messages) > 0: packaged_data = ti84.package(desired_stats, self.messages, **kwargs) self.pub('schoolbus', packaged_data, outkey) time.sleep(interval)
def __init__(self, ip_address, aport, sport, measurement_folder): self.ip_address = ip_address self.robot_port = aport self.sport = sport self.robot_ip = ip_address self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.stat = stats.Stats(subfolder=measurement_folder) self.reord_cntr = self.stat.new_counter("controller_reord") self.lost_cntr = self.stat.new_counter("controller_lost") self.rx_cntr = self.stat.new_counter("controller_rx") self.tx_cntr = self.stat.new_counter("controller_tx") self.avg_single_hop_delay = 0 self.tsr_ks = {} self.tsstx_ks = {} self.tssrx_ks = {} self.tastx_ks = {} self.tasrx_ks = {} self.taw_ks = {} self.gyro_rate_logger = self.stat.new_logger("gyro_rate_logger") self.motor_position_left_logger = self.stat.new_logger( "motor_position_left_logger") self.motor_position_right_logger = self.stat.new_logger( "motor_position_right_logger") self.gyro_offset_logger = self.stat.new_logger("gyro_offset_logger") self.motor_voltage_applied_left_logger = self.stat.new_logger( "motor_voltage_applied_left_logger") self.motor_voltage_applied_right_logger = self.stat.new_logger( "motor_voltage_applied_right_logger") self.robot_roundtrip_network_delay_logger = self.stat.new_logger( "robot_roundtrip_network_delay_logger") self.controller_processing_delay_logger = self.stat.new_logger( "controller_processing_delay_logger") self.tsr_k_logger = self.stat.new_logger("tsr_k_logger") self.tsstx_k_logger = self.stat.new_logger("tsstx_k_logger") self.tssrx_k_logger = self.stat.new_logger("tssrx_k_logger") self.tastx_k_logger = self.stat.new_logger("tastx_k_logger") self.tasrx_k_logger = self.stat.new_logger("tasrx_k_logger") self.taw_k_logger = self.stat.new_logger("taw_k_logger") logging.debug("Listen on address %s (port %s)", ip_address, str(self.sport)) self.sock.setblocking(False) self.sock.bind(('', sport)) self.old_timestamp = -1 self.old_seq_number = 0
def main(): #The following line is to make sure the script has sudo privilage to run tcpdump os.system('sudo echo') #Setting up configs PRINT_ACTION('Reading configs file and args', 0) configs, cases, methods, testDir, resultsDir, statsDir, userDirs, screenshotsDir, dataPaths, netLogs, tcpdumpDir, tcpdumpFile, uniqeOptions, modifyEtcHosts = initialize() configs.show_all() #Creating options ''' IMPORTANT: --enable-benchmarking --enable-net-benchmarking: to enable the Javascript interface that allows chrome-har-capturer to flush the DNS cache and the socket pool before loading each URL. in other words, clear cache and close connections between runs! ''' PRINT_ACTION('Creating options', 0) drivers = {} stat = stats.Stats(netInt=configs.get('networkInt')) chromeOptions = {} commonOptions = ['--no-first-run'] if configs.get('clearCacheConns'): commonOptions += ['--enable-benchmarking', '--enable-net-benchmarking'] #Creating driver instances and modifying /etc/hosts PRINT_ACTION('Creating driver options and modifying /etc/hosts', 0) for case in cases: #Modify /etc/hosts # modifyEtcHosts.add([configs.get('host')[case]], hostIP=configs.get('serverIP')) # # if case.endswith('proxy'): # modifyEtcHosts.add([configs.get('quicProxyIP')]) chromeOptions[case] = webdriver.ChromeOptions() unCommonOptions = ['--user-data-dir={}/{}'.format(userDirs, case), '--data-path={}/{}'.format(dataPaths, case), # '--log-net-log={}/{}.json'.format(netLogs, case), ] for option in uniqeOptions[case] + commonOptions + unCommonOptions: chromeOptions[case].add_argument(option) drivers[case] = Driver(configs.get('chromedriver'), configs.get('browserPath'), chromeOptions[case], pageLoadTimeOut=configs.get('pageLoadTimeout')) if not configs.get('closeDrivers'): print '\tFor: {}...\t'.format(case),; sys.stdout.flush() try: drivers[case].open() except TimeoutError: print 'Got stuck opening driver! Drivers are persistant, so cannot continue. Exiting! :-s' sys.exit() print 'Done' drivers[case].sizePosition(case=case)
def __init__(self, hostname): super().__init__() self.hostname = hostname self.config = config.Config.instance() self.logger = logging.getLogger(utils.logger_str(__class__)) # self.logger.setLevel(logging.INFO) self.contexts = self.get_contexts() self.servlets = {} self.build_servlets() self.stats = stats.Stats()
def __init__(self, health, **kwargs): self.name = '' self.health = health self.modifier = '' self.specialty = '' self.stats = stats.Stats(health, kwargs.get("explore") if "explore" else 0, kwargs.get("build") if "build" else 0, kwargs.get("strength") if "strength" else 0, kwargs.get("intellect") if "intellect" else 0)