def collect(self): self.pathsToCollect.append("/etc/profile") self.pathsToCollect.append("/etc/bashrc") self.pathsToCollect.append("/etc/sudoers") for user in self.userList: p = "/Users/{0}/.bash_profile".format(user) if os.path.isfile(p): self.pathsToCollect.append(p) p = "/Users/{0}/.bash_login".format(user) if os.path.isfile(p): self.pathsToCollect.append(p) p = "/Users/{0}/.profile".format(user) if os.path.isfile(p): self.pathsToCollect.append(p) p = "/Users/{0}/.bash_logout".format(user) if os.path.isfile(p): self.pathsToCollect.append(p) Collector.collect(self)
def collect(self): output = "" receiptPlistPaths = glob.glob("/private/var/db/receipts/*.plist") for onePlistPath in receiptPlistPaths: if os.path.isfile(onePlistPath) and onePlistPath.endswith( ".plist"): plistDict = FoundationPlist.readPlist(onePlistPath) if not plistDict: continue output += str(plistDict["InstallDate"]) + "\t" output += plistDict["InstallPrefixPath"] + "\t" output += plistDict["InstallProcessName"] + "\t" output += plistDict["PackageFileName"] + "\t" output += plistDict["PackageIdentifier"] + "\t" output += plistDict["PackageVersion"] + "\n" output = "Install Date\tPrefix Path\tProcess Name\tFile Name\tIdentifier\tVersion\n" + "\n".join( sorted(output.split("\n"))) filename = "installs.txt" filePath = self.collectionPath + filename f = open(filePath, "w+") f.write(output) f.close self.pathsToCollect.append("/Library/Receipts/InstallHistory.plist") Collector.collect(self)
def metateleport(defaults): collector = Collector(**defaults) env = Game(**defaults) mover = Mover(env, _extra_dim=1, **defaults) teleporter1 = Teleporter(env, _extra_dim=1, **defaults) teleporter2 = MetaTeleporter(env, **defaults) buffer1 = ReplayBuffer(**defaults) buffer2 = ReplayBuffer(**defaults) with Save(env, collector, mover, teleporter1, teleporter2, **defaults) as save: intervention_idx2, modified_board2 = teleporter2.pre_process(env) intervention_idx1, _ = teleporter1.pre_process(env) for frame in loop(env, collector, save, teleporter1, teleporter2): modified_board2 = teleporter2.interveen(env.board, intervention_idx2, modified_board2) modified_board1 = teleporter1.interveen(env.board, intervention_idx1, modified_board2) actions = mover(modified_board1) observations, rewards, dones, info = env.step(actions) modified_board1, modified_board2, modified_rewards1, modified_rewards2, modified_dones1, modified_dones2, tele_rewards, intervention_idx1, intervention_idx2 = teleporter2.metamodify(observations, rewards, dones, info, teleporter1.interventions) buffer1.teleporter_save_data(teleporter1.boards, modified_board2, teleporter1.interventions, modified_rewards2, modified_dones2, intervention_idx1) buffer2.teleporter_save_data(teleporter2.boards, observations, teleporter2.interventions, tele_rewards, dones, intervention_idx2) mover.learn(modified_board1, actions, modified_rewards1, modified_dones1) board_before, board_after, intervention, tel_rewards, tele_dones = buffer1.sample_data() teleporter1.learn(board_after, intervention, tel_rewards, tele_dones, board_before) board_before, board_after, intervention, tel_rewards, tele_dones = buffer2.sample_data() teleporter2.learn(board_after, intervention, tel_rewards, tele_dones, board_before) collector.collect([rewards, modified_rewards1, modified_rewards2, tele_rewards], [dones, modified_dones1, modified_dones2])
def CFagent(defaults): env = Game(**defaults) mover = Mover(env, _extra_dim=1, **defaults) teleporter = Teleporter(env, **defaults) buffer = ReplayBuffer(**defaults) CFagent = CFAgent(env, **defaults) CFbuffer = CFReplayBuffer(**defaults) collector = Collector(**defaults) with Save(env, collector, mover, teleporter, CFagent, **defaults) as save: intervention_idx, modified_board = teleporter.pre_process(env) dones = CFagent.pre_process(env) CF_dones, cfs = None, None for frame in loop(env, collector, save, teleporter): CFagent.counterfact(env, dones, teleporter, CF_dones, cfs) modified_board = teleporter.interveen(env.board, intervention_idx, modified_board) actions = mover(modified_board) observations, rewards, dones, info = env.step(actions) modified_board, modified_rewards, modified_dones, teleport_rewards, intervention_idx = teleporter.modify(observations, rewards, dones, info) buffer.teleporter_save_data(teleporter.boards, observations, teleporter.interventions, teleport_rewards, dones, intervention_idx) mover.learn(modified_board, actions, modified_rewards, modified_dones) board_before, board_after, intervention, tele_rewards, tele_dones = buffer.sample_data() teleporter.learn(board_after, intervention, tele_rewards, tele_dones, board_before) collector.collect([rewards, modified_rewards, teleport_rewards], [dones, modified_dones]) CF_dones, cfs = CFagent.counterfact_check(dones, env, **defaults) CFbuffer.CF_save_data(CFagent.boards, observations, CFagent.counterfactuals, rewards, dones, CF_dones) CFboard, CFobs, cf, CFrewards, CFdones1 = CFbuffer.sample_data() CFagent.learn(CFobs, cf, CFrewards, CFdones1, CFboard)
def __init__(self, agent: Agent, env: UnityEnvironment, config: Dict[str, Any]): super().__init__(agent, env, config) default_config = { "steps": 2048, # Tensorboard settings "tensorboard_name": None, # str, set explicitly # PPO "ppo_config": { # GD settings "optimizer": "adam", "optimizer_kwargs": { "lr": 1e-4, "betas": (0.9, 0.999), "eps": 1e-7, "weight_decay": 0, "amsgrad": False }, "gamma": .99, # Discount factor # PPO settings "ppo_steps": 25, # How many max. gradient updates in one iterations "eps": 0.1, # PPO clip parameter "target_kl": 0.01, # KL divergence limit "value_loss_coeff": 0.1, "entropy_coeff": 0.1, "max_grad_norm": 0.5, # Backpropagation settings "use_gpu": False, } } self.config = with_default_config(config, default_config) self.collector = Collector(agent=self.agent, env=self.env) self.ppo = PPOptimizer(agent=agent, config=self.config["ppo_config"]) # Setup tensorboard self.writer: SummaryWriter if self.config["tensorboard_name"]: dt_string = datetime.now().strftime("%Y-%m-%d_%H-%M-%S") self.path = Path.home( ) / "drlnd_logs" / f"{self.config['tensorboard_name']}_{dt_string}" self.writer = SummaryWriter(str(self.path)) # Log the configs with open(str(self.path / "trainer_config.json"), "w") as f: json.dump(self.config, f) with open(str(self.path / f"agent_config.json"), "w") as f: json.dump(self.agent.model.config, f) self.path = str(self.path) else: self.writer = None
def make_app(): """Instantiates Flask app, attaches collector database, installs acl.""" LOG.info('Starting API') app = flask.Flask(__name__) app.register_blueprint(v1.blueprint, url_prefix='/v1') collector = Collector() collector.clean() thread.start_new_thread(listen, (collector.add, )) @app.before_request def attach_config(): flask.request.collector = collector collector.lock.acquire() @app.after_request def unlock(response): collector.lock.release() return response # Install the middleware wrapper if cfg.CONF.acl_enabled: acl.install(app, cfg.CONF) return app
def collect(self): filename = "history_downloads.txt" filePath = self.collectionPath + filename f = open(filePath, "w+") f.write( "Downloads (QuarantineEventsV2)\n-----------------------------\n") output = self.collectDownloads() f.write(output) f.close filename = "history_safari.txt" filePath = self.collectionPath + filename f = open(filePath, "w+") f.write("Safari history\n-----------------------------\n") output = self.collectSafari() f.write(output) f.close filename = "history_chrome.txt" filePath = self.collectionPath + filename f = open(filePath, "w+") f.write("Chrome history\n-----------------------------\n") output = self.collectChrome() f.write(output) f.close filename = "history_firefox.txt" filePath = self.collectionPath + filename f = open(filePath, "w+") f.write("Firefox history\n-----------------------------\n") output = self.collectFirefox() f.write(output) f.close Collector.collect(self)
class RankingCrawler(): # flow capacity (MB) def __init__(self, cookie, capacity=1024): self.date = START_DATE self.domain = DOMAIN self.mode = PIXIV_MODES[PIXIV_MODE] # url sample: https://www.pixiv.net/ranking.php?mode=daily&date=20200801&p=1&format=json # ref url sample: https://www.pixiv.net/ranking.php?mode=daily&date=20200801 self.url = 'https://www.pixiv.net/ranking.php?mode=' + self.mode self.cookie = cookie self.headers = {'x-requested-with': 'XMLHttpRequest'} self.collector = Collector(cookie, capacity) def __nextday(self): self.date += datetime.timedelta(days=1) # collect illust_id from daily json def collect(self): # note that 50 artworks per p=x page_num = (ARTWORKS_PER - 1) // 50 + 1 #ceil print("---start collecting " + self.mode + " ranking---") print("start with " + self.date.strftime("%Y-%m-%d")) print("end with " + (self.date + datetime.timedelta( days=self.domain - 1)).strftime("%Y-%m-%d" + '\n')) # store all jsons' url in self.group self.group = set() for _i in range(DOMAIN): for j in range(page_num): self.group.add(self.url + '&date=' + self.date.strftime("%Y%m%d") + '&p=' + str(j + 1) + '&format=json') self.__nextday() pool = ParallelPool(len(self.group)) while len(self.group) or not pool.empty(): time.sleep(THREAD_DELAY) # send ranking_json to parallel pool while not pool.full() and len(self.group): url = self.group.pop() ref = re.search('(.*)&p', url).group(1) headers = self.headers.update({'Referer': ref}) pool.add( CollectorUnit(url, self.cookie, ranking_selector, headers)) # remove complete thread finished = pool.finished_item() while True: try: ranking_json = next(finished) self.collector.add(ranking_json.group) if MOST_OUTPUT: print("--send page " + ranking_json.url + " to collector--") except StopIteration: break print("\n---collect " + self.mode + " ranking complete---") def run(self): self.collect() self.collector.collect() return self.collector.download()
def __init__(self, cookie, maxnum=200, capacity=1024): self.num = maxnum self.cookie = cookie self.url = "https://www.pixiv.net/ajax/user/" + USER_ID + "/illusts" self.headers = BROWSER_HEADER self.collect_cnt = 0 self.collector = Collector(cookie, capacity)
def main(): creds = Credentials(os.path.expanduser('~/.tweepy')) today = datetime.date.today() week = datetime.timedelta(7) query_terms=['#NBAFinals2015', '#Warriors'] query_ops={} query_ops['lang'] = 'en' stream = False; if stream: auth = tweepy.OAuthHandler(creds.consumer_key, creds.consumer_secret) auth.set_access_token(creds.access_token, creds.access_token_secret) query_ops['until'] = today+week else: #Per Ron Cordell's suggestion, use App Auth token for increased limits auth = tweepy.AppAuthHandler(creds.consumer_key, creds.consumer_secret) query_ops['since'] = today-week query_ops['until'] = today query_ops['result_type'] = 'recent' collector = Collector(auth) if stream: print("Starting streaming") collector.stream(query_terms=query_terms, query_ops=query_ops) else: print("Starting search") collector.search(query_terms=query_terms, query_ops=query_ops) print('All done! Last ID processed={0}'.format(collector.last_id))
class MainResource(object): def __init__(self): self._logger = logging.getLogger('gunicorn.error') self._collector = Collector() def on_post(self, req, resp): self._logger.debug( f'Handling request {req.url} with params {req.params}') try: self._collector.collect_phrase(req.params['phrase']) response_body = json.dumps({ "status": "success", "message": "Phrase sent for collection" }) resp.status = falcon.HTTP_200 resp.body = response_body except Exception as e: self._logger.error('An error occurred when processing the request', exc_info=e) response_body = json.dumps({ "status": "error", "message": "An error occurred when processing the request" }) resp.status = falcon.HTTP_500 resp.body = response_body
def test_enhance_sibling_symbols(self): c = Collector() aeabi_drsub = { collector.ADDRESS: "0000009c", collector.SIZE: 8, collector.TYPE: collector.TYPE_FUNCTION, } aeabi_dsub = { collector.ADDRESS: "000000a4", collector.SIZE: 4, collector.TYPE: collector.TYPE_FUNCTION, } adddf3 = { collector.ADDRESS: "000000a8", collector.SIZE: 123, collector.TYPE: collector.TYPE_FUNCTION, } c.symbols = {int(f[collector.ADDRESS], 16): f for f in [aeabi_drsub, aeabi_dsub, adddf3]} c.enhance_sibling_symbols() self.assertFalse(aeabi_drsub.has_key(collector.PREV_FUNCTION)) self.assertEqual(aeabi_dsub, aeabi_drsub.get(collector.NEXT_FUNCTION)) self.assertEqual(aeabi_drsub, aeabi_dsub.get(collector.PREV_FUNCTION)) self.assertEqual(adddf3, aeabi_dsub.get(collector.NEXT_FUNCTION)) self.assertEqual(aeabi_dsub, adddf3.get(collector.PREV_FUNCTION)) self.assertFalse(adddf3.has_key(collector.NEXT_FUNCTION))
def run(self): print("==== Checking hosts on ====") # Here we can block by online workers (only run if haver 4 workers), # but we can work with less then 4 workers, and in my test, I have only 1. num_hosts = self._get_hosts_on(1) print("Hosts ON: %d" % num_hosts) print("==== Creating random matrices ====") self.matrix_a = self._get_random_matrix(self.a_n, self.a_m) self.matrix_b = self._get_random_matrix(self.b_n, self.b_m) print("==== Created Matrices: ====") self._print_two_matrices(self.matrix_a, self.matrix_b) self.matrix_divide() distributor = Distributor("*", "50010") collector = Collector("*", "50012", 5000) distributor.send_jobs(self._create_jobs()) # For test, check services in rasp's self._check_services(1) results = collector.collect(4) if 'err' in results: print("Error in some RasPI: %s" % results['err']) exit() print("==== Appending matrices ====") C1 = self._matrix_sum(results['A1B1'], results['A2B2']) C2 = self._matrix_sum(results['A3B1'], results['A4B2']) C = C1 + C2 print("==== Final result: ====") self._print_matrix(C)
def test_parses_assembly_and_stops_after_function(self): assembly = """ 000034fc <window_raw_click_subscribe>: $t(): 34fc: b40f push {r0, r1, r2, r3} 34fe: 4901 ldr r1, [pc, #4] ; (3504 <window_raw_click_subscribe+0x8>) 3500: f7fc bdc2 b.w 88 <jump_to_pbl_function> $d(): 3504: 000004c4 .word 0x000004c4 3508: 00040000 .word 0x00040000 350c: 000b008d .word 0x000b008d 00003510 <.LC1>: .LC1(): 3510: 69727073 .word 0x69727073 3514: 42736574 .word 0x42736574 3518: 31647269 .word 0x31647269 351c: 0036 .short 0x0036 """ c = Collector() self.assertEqual(2, c.parse_assembly_text(assembly)) self.assertTrue(c.symbols.has_key(0x000034fc)) self.assertEqual(c.symbols[0x000034fc]["name"], "window_raw_click_subscribe") # print "\n".join(c.symbols["000034fc"]["asm"]) self.assertEqual(len(c.symbols[0x000034fc]["asm"]), 8)
def make_app(): """Instantiates Flask app, attaches collector database, installs acl.""" LOG.info('Starting API') app = flask.Flask(__name__) app.register_blueprint(v1.blueprint, url_prefix='/v1') collector = Collector() collector.clean() thread.start_new_thread(listen, (collector.add,)) @app.before_request def attach_config(): flask.request.collector = collector collector.lock.acquire() @app.after_request def unlock(response): collector.lock.release() return response # Install the middleware wrapper if cfg.CONF.acl_enabled: acl.install(app, cfg.CONF) return app
def collect(self): filename = "processes.txt" filePath = self.collectionPath + filename f = open(filePath, "w+") f.write("Processes\n-----------------------------\n") processData = os.popen( "ps axo user,pid,ppid,start,time,command").read().rstrip() f.write(processData + "\n\n") f.close filename = "processes_files.txt" filePath = self.collectionPath + filename f = open(filePath, "w+") f.write("Files open by process\n-----------------------------\n") output = os.popen("lsof").read().rstrip() f.write(output + "\n\n") f.close filename = "processes_network.txt" filePath = self.collectionPath + filename f = open(filePath, "w+") f.write( "Network connections open by process\n-----------------------------\n" ) output = os.popen("lsof -i").read().rstrip() f.write(output + "\n\n") f.close # No file paths to collect Collector.collect(self)
def collect(self): filename = "trusted_certificates.txt" filePath = self.collectionPath + filename f = open(filePath, "w+") f.write("Certificate trust settings\n-----------------------------\n") for user in self.userList: f.write("For user {0}:\n\n".format(user)) output = os.popen( "sudo -u {0} security dump-trust-settings 2>&1".format( user)).read().rstrip() f.write(output + "\n\n") f.write("Admin certificate info\n-----------------------------\n") output = os.popen("security dump-trust-settings -d").read().rstrip() f.write(output + "\n\n") f.write("System certificate info\n-----------------------------\n") output = os.popen("security dump-trust-settings -s").read().rstrip() f.write(output + "\n\n") f.write("All certificates\n-----------------------------\n") output = self.collectAllCerts() f.write(output) f.close # Add any paths to self.pathsToCollect Collector.collect(self)
def collect(self): filename = "network_config.txt" filePath = self.collectionPath + filename f = open(filePath, "w+") f.write("en0\n-----------------------------\n") output = os.popen("ifconfig en0").read().rstrip() f.write(output + "\n\n") f.write("en1\n-----------------------------\n") output = os.popen("ifconfig en1").read().rstrip() f.write(output + "\n\n") # Output will already have a "DNS configuration" heading, no need to add one output = os.popen("scutil --dns 2>&1").read().rstrip() f.write(output + "\n\n") f.write("Proxies\n-----------------------------\n") output = os.popen("scutil --proxy 2>&1").read().rstrip() f.write(output + "\n\n") f.write("pf rules\n-----------------------------\n") output = os.popen("sudo pfctl -s rules 2>&1").read().rstrip() f.write(output + "\n\n") f.close self.pathsToCollect.append("/etc/hosts") Collector.collect(self)
def __init__(self, artist_id, cookie, capacity=1024): self.url = 'https://www.pixiv.net/ajax/user/' + artist_id + '/profile/all?lang=zh' self.ref = 'https://www.pixiv.net/users/' + artist_id + '/illustrations' self.headers = {'x-user-id': USER_ID} self.headers.update({'Referer': self.ref}) self.cookie = cookie self.collector = Collector(cookie, capacity)
def main(): ''' Function which starts the tool. ''' # setup logging setupLogging() # get start and end dates for collecting if given from config file startDate, endDate = _getStartAndEnd() # get the list of datanodes we will be collecting measurements from iotTicket.getDataNodes() # create FIWARE entities to Orion from the buses we collect data from if not already created fiware.createEntities() if not fiware.sendToQl: # we will send measurements to QuantumLeap through Orion subscription(s) so create them if not already created fiware.addSubscription() # create the collector that takes care of the actual collection process myCollector = Collector(startDate, endDate) try: # and start collecting myCollector.startCollecting() except KeyboardInterrupt: log.info('Got keyboard interrupt. Collecting stopped.') except: log.exception('Unexpected exception occurred.') exit() log.info('Data collection done.')
def test_enhances_caller(self): assembly = """ 00000098 <pbl_table_addr>: 8e4: f000 f824 bl 930 <app_log> 00000930 <app_log>: $t(): """ c = Collector() self.assertEqual(2, c.parse_assembly_text(assembly)) self.assertTrue(c.symbols.has_key(0x00000098)) self.assertTrue(c.symbols.has_key(0x00000930)) pbl_table_addr = c.symbols[0x00000098] app_log = c.symbols[0x00000930] self.assertFalse(pbl_table_addr.has_key("callers")) self.assertFalse(pbl_table_addr.has_key("callees")) self.assertFalse(app_log.has_key("callers")) self.assertFalse(app_log.has_key("callees")) c.enhance_call_tree() self.assertEqual(pbl_table_addr["callers"], []) self.assertEqual(pbl_table_addr["callees"], [app_log]) self.assertEqual(app_log["callers"], [pbl_table_addr]) self.assertEqual(app_log["callees"], [])
def main(): query_terms=['#NBAFinals2015', '#Warriors'] # We only need a single mongodb sink; FilteringFacet will just let us # avoid storing non-matching tweets sink = MongoDBSink('db_restT') sink.open('tweets') # closing ensures any files written get flushed/closed. with closing( FilteringFacet( RegexMatcher('(' + '|'.join(query_terms) + ')'), lambda key: sink)) as facet: creds = Credentials(os.path.expanduser('~/.tweepy')) auth = tweepy.AppAuthHandler(creds.consumer_key, creds.consumer_secret) collector = Collector(auth, facet) today = datetime.date.today() week = datetime.timedelta(7) query_ops={} query_ops['lang'] = 'en' query_ops['result_type'] = 'mixed' print("Starting search") collector.search(query_terms=query_terms, query_ops=query_ops) print('All done! Last ID processed={0}'.format(collector.last_id))
def __init__(self, classifier, mic_params, is_audio_record=False, root_path='./'): # arguments self.classifier = classifier self.mic_params = mic_params self.is_audio_record = is_audio_record self.root_path = root_path # plot path self.plot_path = self.root_path + self.mic_params['plot_path'] # create folder for plot path create_folder([self.plot_path]) # shortcuts self.feature_params = classifier.feature_params # feature extractor self.feature_extractor = FeatureExtractor(self.feature_params) # windowing params self.N, self.hop = self.feature_extractor.N, self.feature_extractor.hop # queue self.q = queue.Queue() # collector self.collector = Collector( N=self.N, hop=self.hop, frame_size=self.feature_params['frame_size'], update_size=self.mic_params['update_size'], frames_post=self.mic_params['frames_post'], is_audio_record=self.is_audio_record) # device self.device = sd.default.device[0] if not self.mic_params[ 'select_device'] else self.mic_params['device'] # determine downsample self.downsample = self.mic_params['fs_device'] // self.feature_params[ 'fs'] # get input devices self.input_dev_dict = self.extract_devices() # show devices print("\ndevice list: \n", sd.query_devices()) print("\ninput devs: ", self.input_dev_dict.keys()) # stream self.stream = None # change device flag self.change_device_flag = False
def test_stack_usage_line2(self): line = "puncover.c:8:43:dynamic_stack2 16 dynamic" c = Collector() c.symbols = {"123": { "base_file": "puncover.c", "line": 8, }} self.assertTrue(c.parse_stack_usage_line(line))
def collect(self): for user in self.userList: bashHistPath = "/Users/{0}/.bash_history".format(user) if os.path.isfile(bashHistPath): self.pathsToCollect.append(bashHistPath) Collector.collect(self)
def test_subfix(self): col = Collector() print(col.suffix("abc.TXT")) print(col.suffix("./test_suite.py")) print( col.suffix( r"C:\Users\Qun\PycharmProjects\PhotoCollector\PhotoCollecter\test\test_suite.py" ))
def __init__(self, width, height): game_mouse.Game.__init__(self, "Flappy Bird", width, height, 50) self.font_height = 12 self.font = pygame.font.SysFont("arial", self.font_height) self.mCollector = Collector(width, height) return
def applySettings(self, settingsDict): if "unifiedLogArguments" in settingsDict: self.unifiedLogArguments = settingsDict["unifiedLogArguments"] if "collectAuditLogs" in settingsDict: self.collectAuditLogs = settingsDict["collectAuditLogs"] Collector.applySettings(self, settingsDict)
def collect_sample(): c = Collector() while True: sample = sample_queue.get() logging.info('Processing sample {}'.format(sample.url)) try: c.collect(sample) except Exception as e: logging.info('Error processing sample: ')
def __init__(self, file_name): self.map = [] self.hero = None # waiting for the first call of spawn(hero) self.hero_x = -1 self.hero_y = -1 self.collector = Collector( ) # collect spawn cells, enemies and treasures self.level = levels.index(file_name) + 1 self._read_file(file_name)
def main(): conf = parse_options() c = Client(conf) collector = Collector(c) start = c.get_user(conf.start_user) collector.collect(start) g = collector.graph nx.write_dot(g, conf.output_file)
def test1(self): coll = Collector() for x in self.data: coll.add(x) self.assertAlmostEqual(self.stdev, coll.standard_deviation()) self.assertAlmostEqual(self.var, coll.variance(), 5) self.assertAlmostEqual(self.avg, coll.average(), 5) self.assertAlmostEqual(self.sumsq, coll.sum_squares(), 5) self.assertAlmostEqual(self.sum, coll.sum(), 5) self.assertEqual(self.count, coll.count())
def test_stack_usage_line(self): line = "puncover.c:14:40:0 16 dynamic,bounded" c = Collector() c.symbols = {"123": { "base_file": "puncover.c", "line": 14, }} self.assertTrue(c.parse_stack_usage_line(line)) self.assertEqual(16, c.symbols["123"]["stack_size"]) self.assertEqual("dynamic,bounded", c.symbols["123"]["stack_qualifiers"])
def __init__(self, config, update=False): self.df = pd.DataFrame() if update: cll = Collector(config) cll.download_data() self.config = config self.symbol = self.config['symbol'] self.timeframe = self.config['timeframe'] self.all_candles = [] self.pull_data()
def collecting(args): logger = get_logger() db = create_db_connection(args.db_connection) collector = Collector(args.node, db) if args.rebuild: collector.clear() collector.stat() collector.play(args.limit) collector.stat()
def test_derive_filename_from_assembly(self): c = Collector() c.parse_assembly_text(""" 000008a8 <uses_doubles2.constprop.0>: uses_doubles2(): /Users/behrens/Documents/projects/pebble/puncover/pebble/build/../src/puncover.c:19 8a8: b508 push {r3, lr} """) s = c.symbol_by_addr("8a8") self.assertEqual("/Users/behrens/Documents/projects/pebble/puncover/pebble/build/../src/puncover.c", s[collector.PATH]) self.assertEqual("puncover.c", s[collector.BASE_FILE]) self.assertEqual(19, s[collector.LINE])
def test_parses_assembly_and_ignores_c(self): assembly = """ 00000098 <pbl_table_addr>: /path/to.c:8 pbl_table_addr(): 98: a8a8a8a8 .word 0xa8a8a8a8 """ c = Collector() self.assertEqual(1, c.parse_assembly_text(assembly)) self.assertTrue(c.symbols.has_key(0x00000098)) self.assertEqual(c.symbols[0x00000098]["name"], "pbl_table_addr") self.assertEqual(len(c.symbols[0x00000098]["asm"]), 2) self.assertEqual(c.symbols[0x00000098]["asm"][0], "pbl_table_addr():")
def test_derive_file_elements_for_unknown_files(self): c = Collector() s = c.add_symbol("some_symbol", "00a") self.assertEqual("some_symbol", s[collector.NAME]) self.assertNotIn(collector.PATH, s) self.assertNotIn(collector.BASE_FILE, s) c.derive_folders() self.assertEqual("<unknown>/<unknown>", s[collector.PATH]) self.assertEqual("<unknown>", s[collector.BASE_FILE]) self.assertIn(collector.FILE, s) file = s[collector.FILE] self.assertEqual("<unknown>", file[collector.NAME]) folder = file[collector.FOLDER] self.assertEqual("<unknown>", file[collector.NAME])
def run(self): while True: """ collection management module """ logger.info("CollectorManager has successfuly initiated") collector = Collector() collector.create_tenants_bills() """ subscription management module """ logger.info("SubscriptionManager has successfuly initiated") subs = SubscriptionManager() subs.manage_subscription() logger.info("SchedulingManager has successfuly initiated") sched = SchedulingManager() sched.commence() time.sleep(60)
def test_enhances_assembly(self): assembly = """ 00000098 <pbl_table_addr>: pbl_table_addr(): 568: f7ff ffca bl 98 """ c = Collector() self.assertEqual(1, c.parse_assembly_text(assembly)) self.assertTrue(c.symbols.has_key(0x00000098)) self.assertEqual(c.symbols[0x00000098]["name"], "pbl_table_addr") self.assertEqual(c.symbols[0x00000098]["asm"][1], " 568:\tf7ff ffca \tbl\t98") c.enhance_assembly() self.assertEqual(c.symbols[0x00000098]["asm"][1], " 568:\tf7ff ffca \tbl\t98 <pbl_table_addr>")
def test_enhance_function_size_from_assembly(self): c = Collector() c.symbols = { int("0000009c", 16) : { collector.ADDRESS: "0000009c", collector.ASM: """ $t(): 9c: f081 4100 eor.w r1, r1, #2147483648 ; 0x80000000 a0: e002 b.n a8 <__adddf3> a2: bf00 nop """.split("\n") }} s = c.symbol_by_addr("9c") self.assertFalse(s.has_key(collector.SIZE)) c.enhance_function_size_from_assembly() self.assertEqual(8, s[collector.SIZE])
def __init__(self, collection_name, query_name, queue_name, table_name): self.collection_name = collection_name self.redis_conn = get_rc() self.table_name = table_name self.queue = Queue('collections',connection = self.redis_conn) self.query_name = query_name self.collector = Collector(query_name, collection_name, queue_name)
def test_parses_assembly2(self): assembly = """ 00000098 <pbl_table_addr.constprop.0>: pbl_table_addr(): 98: a8a8a8a8 .word 0xa8a8a8a8 0000009c <__aeabi_dmul>: __aeabi_dmul(): 9c: b570 push {r4, r5, r6, lr} """ c = Collector() self.assertEqual(2, c.parse_assembly_text(assembly)) self.assertTrue(c.symbols.has_key(0x0000009c)) self.assertEqual(c.symbols[0x0000009c]["name"], "__aeabi_dmul") self.assertTrue(c.symbols.has_key(0x00000098)) self.assertEqual(c.symbols[0x00000098]["name"], "pbl_table_addr")
def __init__(self): conf_path = "%s/FDM/FDM.conf"%(os.getcwd()) self.__conf = ConfigParser.ConfigParser() self.__conf.read(conf_path) self.__conn = None self.collector = Collector()
def __init__(self, config): Collector.__init__(self, config.datapool) self.config = config # log初始化 loggingInfo = config.loggerInfo logDir, logFile = os.path.split(loggingInfo['filename']) if not os.path.exists(logDir): os.makedirs(logDir) logging.basicConfig( level=loggingInfo['level'], format=loggingInfo['format'], filename=loggingInfo['filename'], encoding=loggingInfo['encoding'] ) self.collected = 0 logging.info('collector "%s" starting...' % self.name)
def run_collector(self): '''instigates a collector, adds the specified jobs to it, then returns the job uuids & queue name''' details = request.json or {} _input = details.pop('Input') input_type = details.pop('InputType') collector = Collector(**details) jobs = collector.schedule_jobs(_input, input_type) res = { 'data':{ 'jobs':jobs, 'log':collector.log_data, }, 'message':'collector initiated successfully', } return Response( response = dumps(res), status = 200, mimetype = 'application/json' )
class Collection: def __init__(self, collection_name, query_name, queue_name, table_name): self.collection_name = collection_name self.redis_conn = get_rc() self.table_name = table_name self.queue = Queue('collections',connection = self.redis_conn) self.query_name = query_name self.collector = Collector(query_name, collection_name, queue_name) def schedule(self, job_parameters, input_type): self.collector.schedule_jobs(job_parameters, input_type) finishings = map(self.add_finisher, self.collector.jobs) return finishings def add_finisher(self, job): return self.queue.enqueue( consume_data, kwargs = {'job_id':job.id,'table_name':self.table_name}, depends_on = job )
def test_derive_file_elements(self): c = Collector() s1 = {collector.PATH: "/Users/behrens/Documents/projects/pebble/puncover/pebble/build/../src/puncover.c"} s2 = {collector.PATH: "/Users/thomas/work/arm-eabi-toolchain/build/gcc-final/arm-none-eabi/thumb2/libgcc/../../../../../gcc-4.7-2012.09/libgcc/config/arm/ieee754-df.S"} s3 = {collector.PATH: "src/puncover.c"} c.symbols = { 1: s1, 2: s2, 3: s3, } c.derive_folders() self.assertEqual("/Users/behrens/Documents/projects/pebble/puncover/pebble/src/puncover.c", s1[collector.PATH]) self.assertIsNotNone(s1[collector.FILE]) self.assertEqual("/Users/thomas/work/arm-eabi-toolchain/gcc-4.7-2012.09/libgcc/config/arm/ieee754-df.S", s2[collector.PATH]) self.assertIsNotNone(s2[collector.FILE]) self.assertEqual("src/puncover.c", s3[collector.PATH]) self.assertIsNotNone(s3[collector.FILE])
def make_app(): """Instantiates Flask app, attaches collector database.""" LOG.info('Starting API') app = flask.Flask(__name__) app.register_blueprint(v1.blueprint, url_prefix='') collector = Collector() collector.clean() thread.start_new_thread(listen, (collector.add,)) @app.before_request def attach_config(): flask.request.collector = collector collector.lock.acquire() @app.after_request def unlock(response): collector.lock.release() return response return app
def search(keywords): keyword_list = keywords.split(" ") collector = Collector() if collector.is_meiju_info_file_exist(): collector.read_all_meiju_info_from_file() else: collector.save_all_meiju_info() collector.write_all_meiju_info_to_file() searcher = Searcher() meiju_ename_list = searcher.search_meiju_list_by_english_name_keyword(collector, keyword_list) click.echo("Total %d Meiju is found. Following are the lists:" % len(meiju_ename_list)) for meiju_ename in meiju_ename_list: click.echo("%s" % meiju_ename) return
def __init__(self, data_file=None, data_suffix=False, cover_pylib=False, auto_data=False): """Create a new coverage measurement context. `data_file` is the base name of the data file to use, defaulting to ".coverage". `data_suffix` is appended to `data_file` to create the final file name. If `data_suffix` is simply True, then a suffix is created with the machine and process identity included. `cover_pylib` is a boolean determining whether Python code installed with the Python interpreter is measured. This includes the Python standard library and any packages installed with the interpreter. If `auto_data` is true, then any existing data file will be read when coverage measurement starts, and data will be saved automatically when measurement stops. """ self.cover_pylib = cover_pylib self.auto_data = auto_data self.exclude_re = "" self.exclude_list = [] self.file_locator = FileLocator() self.collector = Collector(self._should_trace) # Create the data file. if data_suffix: if not isinstance(data_suffix, basestring): # if data_suffix=True, use .machinename.pid data_suffix = ".%s.%s" % (socket.gethostname(), os.getpid()) else: data_suffix = None self.data = CoverageData( basename=data_file, suffix=data_suffix, collector="coverage v%s" % __version__ ) # The default exclude pattern. self.exclude('# *pragma[: ]*[nN][oO] *[cC][oO][vV][eE][rR]') # The prefix for files considered "installed with the interpreter". if not self.cover_pylib: os_file = self.file_locator.canonical_filename(os.__file__) self.pylib_prefix = os.path.split(os_file)[0] here = self.file_locator.canonical_filename(__file__) self.cover_prefix = os.path.split(here)[0]
def main(): f = open('try_3.txt','w') g = open('accs.txt', 'w') g.close() task = MarioTask("testbed", initMarioMode = 2) task.env.initMarioMode = 2 task.env.levelDifficulty = 1 task.env.BASE_LEVEL = 500000 results = [] names = [] iterations = 20 rounds = 30 learning_samples = 33 eval_samples = 10 # iterations = 5 # rounds = 2 # learning_samples = 3 # eval_samples = 2 if args['noisy']: agent = NoisySupervise(IT, useKMM = False) dire = './training_data_noisy/' else: agent = Supervise(IT, useKMM = False) dire = './training_data/' exp = EpisodicExperiment(task, agent) C = Collector(agent,exp) C.collect(rounds = rounds, iterations = iterations, learning_samples = learning_samples, eval_samples = eval_samples, directory=dire) print "finished"
def start(self): if self.debug: msg = "Package: mpx.service.logger.periodic_log\n" msg = msg + "Class: PeriodicLog\n" msg = msg + "Method: start" msg = msg + "starting the periodic log collecting..." mpx.lib.msglog.log("broadway", mpx.lib.msglog.types.DB, msg) if self.collector is not None: self.collector.stop() self.collector = Collector(self, self.period, self.trigger) for node in self.get_child("columns").children_nodes(): self.collector.add_column(node) Log.start(self) self.collector.start()
def setup(self): config = self.config logger.info('setup', name=config["name"], module=config["probe_module"], config=pprint.pformat(config)) # logger.warn('NODE: ' + HOSTNAME, name=config["name"], module=config["probe_module"], config=pprint.pformat(config)) probe_mod = blipp_import(config["probe_module"]) self.probe = probe_mod.Probe(self.service, self.measurement) if "." in config["collection_schedule"]: sched_file, sched_name = config["collection_schedule"].split('.') else: sched_file, sched_name = "builtins", config["collection_schedule"] logger.info('setup', sched_file=sched_file, sched_name=sched_name) logger.warn('NODE: ' + HOSTNAME, sched_file=sched_file, sched_name=sched_name) self.scheduler = blipp_import("schedules." + sched_file, fromlist=[1]).__getattribute__(sched_name) self.scheduler = self.scheduler(self.service, self.measurement) self.collector = Collector(self.service, self.measurement)
def show(name): collector = Collector() if collector.is_meiju_info_file_exist(): collector.read_all_meiju_info_from_file() else: collector.save_all_meiju_info() collector.write_all_meiju_info_to_file() if name in collector.meiju_ename_inst_dict: meiju_inst = collector.meiju_ename_inst_dict[name] click.echo("Detailed information for Meiju - %s" % name) for (season_id, season_inst) in meiju_inst.season_id_inst_dict.items(): output = "Season %d [" % season_id for (episode_id, episode_inst) in season_inst.episode_id_inst_dict.items(): output += "Ep%d, " % episode_id output += "]" click.echo(output) else: click.echo("Failed to find any Meiju named %s" % name) return
def download(name, season, episode, path): collector = Collector() if collector.is_meiju_info_file_exist(): collector.read_all_meiju_info_from_file() else: collector.save_all_meiju_info() collector.write_all_meiju_info_to_file() if name in collector.meiju_ename_inst_dict: if int(season) == 0 and int(episode) == 0: downloader = Downloader() downloader.download_meiju(collector, name, path) elif int(season) != 0 and int(episode) == 0: downloader = Downloader() downloader.download_meiju_season(collector, name, int(season), path) elif int(season) != 0 and int(episode) != 0: downloader = Downloader() downloader.download_meiju_episode(collector, name, int(season), int(episode), path) else: click.echo("Failed to find Meiju named is %s" % name) return