def __init__(self, app, folder, report_path): if '.' in app: self.app = Utils.find_app_name(app) self.app_id = app else: self.app = app self.app_id = Utils.find_package(app) self.internal_path = None self.external_path = None self.folder = folder self.report_path = report_path Utils.remove_folder(self.report_path) self.initialize_dumps()
def process_by_datasource(self, dataSource): #Since we are running ingest for the same datasource, we remove the output folder first but only for the datasource! temp_directory = os.path.join(self.temp_module_path, dataSource.getName().replace(":","_")) Utils.remove_folder(temp_directory) Utils.check_and_generate_folder(self.temp_module_path) self.progressJob.change_text("Analyzing Information for {}".format(dataSource.getName())) # We are going to use import previous json file or other data if self.method == "method_importfile": json_report = "Report.json" reports_by_app = {} # Find all Report.json in the data source json_reports = self.fileManager.findFiles(dataSource, json_report) # Processing all datasource json reports for report in json_reports: # Get app id of the json report info = Utils.read_json(report.getLocalPath()) app_id = info["header"]["app_id"] self.progressJob.next_job("Processing report {} ".format(app_id)) # Since we can have multiple json files for multiple apps, we have to track how many reports exists for each app if not reports_by_app.get(app_id): reports_by_app[app_id] = 1 else: reports_by_app[app_id] += 1 # Path for every report report_folder_path = os.path.join(temp_directory, app_id, str(reports_by_app[app_id])) Utils.check_and_generate_folder(report_folder_path) # Copy json report to output folder report_location = os.path.join(report_folder_path, "Report.json") copyfile(report.getLocalPath(), report_location) item = {} item["report"] = report_location item["file"] = report item["app"] = Utils.find_app_name(app_id) self.process_report(item, dataSource) # Not using json report else: reports_by_app = {} #We will find all dumps on the datasource internal = "%_internal.tar.gz" external = "%_external.tar.gz" dumps = [] dumps.extend(self.fileManager.findFiles(dataSource, internal)) dumps.extend(self.fileManager.findFiles(dataSource, external)) #We found dumps, the datasource is not a mount path if dumps: #For each dump, we are going to check it for base in dumps: #Get app id of the dump app_id = base.getName().replace('_internal.tar.gz', '').replace('_external.tar.gz','') self.progressJob.next_job("Processing report {} ".format(app_id)) #No reports for this app yet if not reports_by_app.get(app_id): reports_by_app[app_id] = [] #We can have multiple dumps for the same app. this ensure we don't add the same folder twice base_path = os.path.dirname(base.getLocalPath()) if base_path in reports_by_app[app_id]: continue #Adds the folder to the list of reports by app reports_by_app[app_id].append(base_path) #Multiple dumps per app, we are going to create the folder based on the number of the reports report_folder_path = os.path.join(temp_directory, app_id, str(len(reports_by_app[app_id]))) Utils.check_and_generate_folder(report_folder_path) self.progressJob.change_text("Analyzing Information for {} ({})".format(dataSource.getName(), app_id)) #We are going to analyze the dumps and generate the report analyzer = Analyzer(app_id, base_path, report_folder_path) analyzer.generate_report() #Generated json report location report_location = os.path.join(report_folder_path, "Report.json") #Add to reports list item = {} item["report"] = report_location item["file"] = base item["app"] = Utils.find_app_name(app_id) self.process_report(item, dataSource) else: base_path = None base = None # Little hack to know datasource real path # We only know the real path on files, folders doesn't provide the real path # So we are going to search every file files = self.fileManager.findFiles(dataSource, "%") for x in files: #We should add artifacts to a file, so we add it to the logicalfileset as reference if not base: base = x # Script needs files inside /data/data/ # We find a file with this path, we got the base path if x.getLocalPath() and '/data/data/' in x.getParentPath(): # Multiple SO normalization local = Utils.replace_slash_platform(x.getLocalPath()) if Utils.get_platform().startswith("windows"): base_path = local.split("\\data\\data\\")[0] else: base_path = local.split("/data/data/")[0] #Already have the base folder, stop the find break # If have the base folder if base_path: # For all supported apps for app_id in Utils.get_all_packages().values(): # If app data exists in mount if os.path.exists(os.path.join(base_path, "data", "data", app_id)): # Create report folder report_number = 1 report_folder_path = os.path.join(temp_directory, app_id, str(report_number)) #report path Utils.check_and_generate_folder(report_folder_path) self.progressJob.change_text("Analyzing Information for {} ({})".format(dataSource.getName(), app_id)) # Folder to analyze analyzer = Analyzer(app_id, base_path, report_folder_path) analyzer.generate_report() # Report report_location = os.path.join(report_folder_path, "Report.json") item = {} item["report"] = report_location item["file"] = base item["app"] = Utils.find_app_name(app_id) self.process_report(item, dataSource) # After all reports, post a message to the ingest messages in box. return IngestModule.ProcessResult.OK
def start(args): Utils.setup_custom_logger() Utils.set_env() logging.info("Starting") extract = Extract() #If we don't found an output folder, we use the "report" folder if not args.output: args.output = os.path.join(Utils.get_base_path_folder(), "report") #Remove previous index.html from output folder try: os.remove(os.path.join(args.output, "index.html")) except: pass #Remove previous assets from output folder try: Utils.remove_folder(os.path.join(args.output, "assets")) except: pass #List of reports for html output reports = [] for app in args.app: folders = [] #This logic support both <appname> and <com.app.id> #app > appname #app_id > com.app.id if '.' in app: app = Utils.find_app_name(app) app_id = app else: app_id = Utils.find_package(app) #app_id output folder app_report_base = os.path.join(args.output, app_id) #We are starting, let's clean report app output folder first! Utils.remove_folder(app_report_base) if args.dump: #For each dump in arguments for dump in args.dump: dump_path = os.path.join(Utils.get_base_path_folder(), "dumps", dump) #If dump path exists if os.path.exists(dump_path): #We list everything in folder for folder in os.listdir(dump_path): #We add every subdirectory to analyze (folder by device) if os.path.isdir(os.path.join(dump_path, folder)): folders.append(os.path.join(dump_path, folder)) #We found .tar.gz in the base folder, let's add them too, but add it only if don't add it before elif '.tar.gz' in folder and dump_path not in folders: folders.append(dump_path) #The dump file not found else: logging.warning( "Invalid dump name: {}. Ignoring".format(dump)) #We can use an mount path as input if args.path: folders.append(args.path) #We can use adb to extract contents if args.adb: for serial, folder in extract.dump_from_adb(app_id).items(): folders.append(folder) #For each folder previously added index = 0 for folder in folders: index += 1 #Every app can have multiple dumps, so we add a folder for each dump report_path = os.path.join(app_report_base, str(index)) #Analyze every folder analyzer = Analyzer(app_id, folder, report_path) report = analyzer.generate_report() #If we set html report output, generate it if args.html and report: #Generate individual html report analyzer.generate_html_report(report, report_path) #Add to list to create index reports.append( analyzer.generate_report_summary(report, str(index))) #Generate html index if args.html and reports: item = {} item["reports"] = reports analyzer.generate_html_index(item, args.output) logging.info("Done")