def main(args): logging.basicConfig() # debug if args.debug: logger.setLevel(logging.DEBUG) if not args.filter_path: logger.error( "Please specify a file containing the memory filter to be applied." ) sys.exit(0) # open the filter f = open(args.filter_path, 'r') filter_string = f.read() f.close() filter_dict = ast.literal_eval(filter_string) mem_filter = MemoryFilter(filter_dict) import lophi_automation.database.datastore as datastore db_uri = 'mongodb://' + args.db_host + ':27017/lophi_db' analysis_datastore = datastore.Datastore(db_uri + G.DB_ANALYSES) results = analysis_datastore.db.collection.find({ 'status': 'COMPLETED', "machine_type": args.machine_type, "analysis_script": args.analysis_script, "volatility_profile": args.volatility_profile }) logger.info("Processing %d analyses..." % results.count()) for result in results: mem_results = result['memory_analysis'] filtered_results_dict = mem_filter.apply_filter(mem_results) analysis_datastore.db.collection.update( {'_id': result['_id']}, {'$set': { 'memory_analysis_filtered': filtered_results_dict }}) logger.info("Filtered results for %s" % result['_id'])
def main(args): logging.basicConfig() # debug if args.debug: logger.setLevel(logging.DEBUG) if not args.filter_path: logger.error("Please specify a file containing the memory filter to be applied.") sys.exit(0) # open the filter f = open(args.filter_path, 'r') filter_string = f.read() f.close() filter_dict = ast.literal_eval(filter_string) mem_filter = MemoryFilter(filter_dict) import lophi_automation.database.datastore as datastore db_uri = 'mongodb://'+args.db_host+':27017/lophi_db' analysis_datastore = datastore.Datastore(db_uri+G.DB_ANALYSES) results = analysis_datastore.db.collection.find({'status': 'COMPLETED', "machine_type": args.machine_type, "analysis_script": args.analysis_script, "volatility_profile": args.volatility_profile}) logger.info("Processing %d analyses..."%results.count()) for result in results: mem_results = result['memory_analysis'] filtered_results_dict = mem_filter.apply_filter(mem_results) analysis_datastore.db.collection.update({'_id': result['_id']}, {'$set': { 'memory_analysis_filtered': filtered_results_dict}}) logger.info("Filtered results for %s"%result['_id'])
def main(args): logging.basicConfig() # debug if args.debug: logger.setLevel(logging.DEBUG) if not args.output_path: logger.error("Please specify a filename to save the filters to.") sys.exit(0) import lophi_automation.database.datastore as datastore db_uri = 'mongodb://'+args.db_host+':27017/lophi_db' analysis_datastore = datastore.Datastore(db_uri+G.DB_ANALYSES) results = analysis_datastore.db.collection.find({'status': 'COMPLETED', "machine_type": args.machine_type, "analysis_script": args.analysis_script, "volatility_profile": args.volatility_profile, "sample":args.sample_id}) logger.info("Creating filter from %d analyses..."%results.count()) # Memory mem_filter = MemoryFilter() try: os.makedirs(args.output_path) except: pass for result in results: mem_results = result['memory_analysis'] logger.info("Memory Filter: Analyzing %s" % result['_id']) diff = None if not args.rerun and os.path.exists(os.path.join(args.output_path, result['_id'] + "_" + 'mem_diff.txt')): diff = pickle.load(open(os.path.join(args.output_path, result['_id'] + "_" + 'mem_diff.txt'), 'rb')) else: diff = result['memory_analysis'] pickle.dump(diff, open(os.path.join(args.output_path, result['_id'] + "_" + 'mem_diff.txt'), 'wb+')) if not diff: logger.error("Error processing memory analysis for %s . . . skipping!" % result['_id']) continue mem_filter.add_analysis(diff) # adjust the filter mem_filter.adjust_filter(1) # write filter to disk # pprint.pprint(mem_filter.occurrences_dict) f = open(os.path.join(args.output_path, 'filter.mem'), 'w') f.write(pprint.pformat(mem_filter.occurrences_dict)) f.close()