def get_videos_publish(self): logging.info("Getting published videos") videos = [] base_path = os.path.join(self.internal_cache_path, "cache", "aweme_publish") aweme_publish_files = os.listdir(base_path) for aweme_file in aweme_publish_files: dump = Utils.read_json(os.path.join(base_path, aweme_file)) aweme_list = dump.get("aweme_list") if aweme_list: for entry in aweme_list: video = {} video["created_time"] = entry.get("create_time") video["video"] = str(entry.get( "video")) #.get("animated_cover").get("url_list")[0] timeline_event = {} timeline_event["url"] = video["video"] self.timeline.add(video["created_time"], "publish", timeline_event) videos.append(video) logging.info("{} video(s) found".format(len(videos))) return videos
def generateReport(self, baseReportDir, progressBar): logging.info("Starting Report Module") progressBar.setIndeterminate(True) self.fileManager = Case.getCurrentCase().getServices().getFileManager() progressBar.updateStatusLabel("Finding source data") self.tempDirectory = os.path.join( Case.getCurrentCase().getModulesOutputDirAbsPath(), "AndroidForensics") if not os.path.exists(self.tempDirectory): progressBar.complete(ReportStatus.ERROR) progressBar.updateStatusLabel("Run Ingest Module first!") return progressBar.updateStatusLabel("Creating report") os.environ["CASE_NAME"] = Case.getCurrentCase().getName() os.environ["CASE_NUMBER"] = Case.getCurrentCase().getNumber() os.environ["EXAMINER"] = Case.getCurrentCase().getExaminer() reports = {} reports["reports"] = [] for fileset in os.listdir(self.tempDirectory): fileset_path = os.path.join(self.tempDirectory, fileset) for app_id in os.listdir(fileset_path): app_path = os.path.join(fileset_path, app_id) for app_report in os.listdir(app_path): report = os.path.join(app_path, app_report, "Report.json") if os.path.exists(report): report_content = Utils.read_json(report) report_path = Analyzer.generate_html_report( report_content, os.path.join(app_path, app_report)) Case.getCurrentCase().addReport( report_path, "Report", "Forensics Report") reports["reports"].append( Analyzer.generate_report_summary(report_content, app_report, fileset=fileset)) if len(reports) == 0: progressBar.complete(ReportStatus.ERROR) progressBar.updateStatusLabel("Nothing to report!") return report_file_path = Analyzer.generate_html_index(reports, baseReportDir) Case.getCurrentCase().addReport(report_file_path, "Report", "Forensics Report") progressBar.updateStatusLabel("Done") progressBar.complete(ReportStatus.COMPLETE)
def get_videos_publish(self): logging.info("Getting published videos") videos = [] base_path = os.path.join(self.internal_cache_path, "cache", "aweme_publish") if not os.path.exists(base_path): return videos aweme_publish_files = os.listdir(base_path) for aweme_file in aweme_publish_files: dump = Utils.read_json(os.path.join(base_path, aweme_file)) aweme_list = dump.get("aweme_list") if aweme_list: for entry in aweme_list: video ={} video["created_time"] = entry.get("create_time") # if entry.get("video"): # if entry.get("video").get("animated_cover"): # if entry.get("video").get("animated_cover").get("url_list"): # video["video"] = str(entry.get("video").get("animated_cover").get("url_list")[0]) # else: # video["video"] = entry.get("video").get("animated_cover") # else: # video["video"] = entry.get("video") video["video"] = "" video["duration"] = "" video["cover"] = "" video["api_address"] = "" if entry.get("video"): if entry.get("video").get("animated_cover"): video["video"] =entry.get("video").get("animated_cover").get("url_list")[0] else: video["video"] =str(entry) video["duration"] = entry.get("video").get("duration") try: video["cover"] = str(entry.get("video").get("cover").get("url_list")[0]) except: pass try: video["api_address"] = entry.get("video").get("play_addr").get("url_list")[-1] except: pass video["share_url"] = entry.get("share_url") video["music"] = entry.get("music").get("play_url").get("url_list")[0] timeline_event = {} timeline_event["url"] = video["video"] self.timeline.add(video["created_time"],"AF_publish", timeline_event) videos.append(video) logging.info("{} video(s) found".format(len(videos))) return videos
def process_report(self, datasource_name, file, report_number, path): if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #TODO # HERE WE CAN CALL THE FUNCTIONS THAT WILL INDEX THE REPORT'S ARTIFACTS data = Utils.read_json(path) self.process_messages(data.get("messages"), file) self.process_user_photos(data.get("user_photos"), file) self.process_bio_changes(data.get("bio_changes"), file) self.process_user_matches(data.get("matches"), file) self.process_credit_cards(data.get("credit_cards"), file) self.process_locations(data.get("locations"), file) self.process_drp(data.get("sqlparse"), file) self.process_undark(data.get("freespace"), file)
def process_report(self, datasource_name, file, report_number, path): # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK data = Utils.read_json(path) self.uid = data.get("profile").get("uid") self.process_messages(data.get("messages"), file) self.process_user_profile(data.get("profile"), file) self.process_users(data.get("users"), file) self.process_searches(data.get("searches"), file) self.process_undark(data.get("freespace"), file) self.process_drp(data.get("sqlparse"), file) #self.process_videos(data.get("videos"), report_number, file, os.path.dirname(path), datasource_name) self.process_logs(data.get("log"), file) self.process_published_videos(data.get("published_videos"), file)
def generateReport(self, baseReportDir, progressBar): logging.info("Starting Report Module") progressBar.setIndeterminate(True) self.fileManager = Case.getCurrentCase().getServices().getFileManager() progressBar.updateStatusLabel("Finding source data") self.tempDirectory = os.path.join(Case.getCurrentCase().getModulesOutputDirAbsPath(), "FAMA") if not os.path.exists(self.tempDirectory): progressBar.complete(ReportStatus.ERROR) progressBar.updateStatusLabel("Run Ingest Module first!") return progressBar.updateStatusLabel("Creating report") os.environ["CASE_NAME"] = Case.getCurrentCase().getName() os.environ["CASE_NUMBER"] = Case.getCurrentCase().getNumber() os.environ["EXAMINER"] = Case.getCurrentCase().getExaminer() reports = {} reports["reports"] = [] #Android Analyzer Smart Report for fileset in os.listdir(self.tempDirectory): fileset_path = os.path.join(self.tempDirectory, fileset) for app_id in os.listdir(fileset_path): app_path = os.path.join(fileset_path, app_id) for app_report in os.listdir(app_path): report = os.path.join(app_path, app_report, "Report.json") if os.path.exists(report): report_content = Utils.read_json(report) report_path = Analyzer.generate_html_report(report_content, os.path.join(app_path, app_report)) Case.getCurrentCase().addReport(report_path, "Report", "Forensics Report") reports["reports"].append(Analyzer.generate_report_summary(report_content, app_report, fileset = fileset)) #Classic Report if len(reports["reports"]) == 0: report = {} report["header"] = { "report_name": "Generated Report", "report_date": int(time.time()) * 1000, "app_name": "Generic", "app_id": "Generated Report" } has_row = False for artifact in PsyUtils.get_artifacts_list(): ##GGOODD artifact_name = artifact.getDisplayName() report[artifact_name] = [] command = "WHERE (blackboard_artifacts.artifact_type_id = '{}')".format(artifact.getTypeID()) rows = Case.getCurrentCase().getSleuthkitCase().getMatchingArtifacts(command) for row in rows: has_row = True item = {} atts = row.getAttributes() for att in atts: item[att.getAttributeTypeDisplayName()] = str(att.getDisplayString().encode('utf-8','ignore')) report[artifact_name].append(item) if not has_row: progressBar.complete(ReportStatus.ERROR) progressBar.updateStatusLabel("Nothing to report!") return report_path = os.path.join(baseReportDir, report["header"]["app_id"], "Generic") reporthtml = Analyzer.generate_html_report(report, report_path) Case.getCurrentCase().addReport(reporthtml, "Report", "Forensics Report") reports["reports"].append(Analyzer.generate_report_summary(report, "Generic")) report_file_path = Analyzer.generate_html_index(reports, baseReportDir) Case.getCurrentCase().addReport(report_file_path, "Report", "Forensics Report") progressBar.updateStatusLabel("Done") progressBar.complete(ReportStatus.COMPLETE)
def process_by_datasource(self, dataSource): #Since we are running ingest for the same datasource, we remove the output folder first but only for the datasource! temp_directory = os.path.join(self.temp_module_path, dataSource.getName().replace(":","_")) Utils.remove_folder(temp_directory) Utils.check_and_generate_folder(self.temp_module_path) self.progressJob.change_text("Analyzing Information for {}".format(dataSource.getName())) # We are going to use import previous json file or other data if self.method == "method_importfile": json_report = "Report.json" reports_by_app = {} # Find all Report.json in the data source json_reports = self.fileManager.findFiles(dataSource, json_report) # Processing all datasource json reports for report in json_reports: # Get app id of the json report info = Utils.read_json(report.getLocalPath()) app_id = info["header"]["app_id"] self.progressJob.next_job("Processing report {} ".format(app_id)) # Since we can have multiple json files for multiple apps, we have to track how many reports exists for each app if not reports_by_app.get(app_id): reports_by_app[app_id] = 1 else: reports_by_app[app_id] += 1 # Path for every report report_folder_path = os.path.join(temp_directory, app_id, str(reports_by_app[app_id])) Utils.check_and_generate_folder(report_folder_path) # Copy json report to output folder report_location = os.path.join(report_folder_path, "Report.json") copyfile(report.getLocalPath(), report_location) item = {} item["report"] = report_location item["file"] = report item["app"] = Utils.find_app_name(app_id) self.process_report(item, dataSource) # Not using json report else: reports_by_app = {} #We will find all dumps on the datasource internal = "%_internal.tar.gz" external = "%_external.tar.gz" dumps = [] dumps.extend(self.fileManager.findFiles(dataSource, internal)) dumps.extend(self.fileManager.findFiles(dataSource, external)) #We found dumps, the datasource is not a mount path if dumps: #For each dump, we are going to check it for base in dumps: #Get app id of the dump app_id = base.getName().replace('_internal.tar.gz', '').replace('_external.tar.gz','') self.progressJob.next_job("Processing report {} ".format(app_id)) #No reports for this app yet if not reports_by_app.get(app_id): reports_by_app[app_id] = [] #We can have multiple dumps for the same app. this ensure we don't add the same folder twice base_path = os.path.dirname(base.getLocalPath()) if base_path in reports_by_app[app_id]: continue #Adds the folder to the list of reports by app reports_by_app[app_id].append(base_path) #Multiple dumps per app, we are going to create the folder based on the number of the reports report_folder_path = os.path.join(temp_directory, app_id, str(len(reports_by_app[app_id]))) Utils.check_and_generate_folder(report_folder_path) self.progressJob.change_text("Analyzing Information for {} ({})".format(dataSource.getName(), app_id)) #We are going to analyze the dumps and generate the report analyzer = Analyzer(app_id, base_path, report_folder_path) analyzer.generate_report() #Generated json report location report_location = os.path.join(report_folder_path, "Report.json") #Add to reports list item = {} item["report"] = report_location item["file"] = base item["app"] = Utils.find_app_name(app_id) self.process_report(item, dataSource) else: base_path = None base = None # Little hack to know datasource real path # We only know the real path on files, folders doesn't provide the real path # So we are going to search every file files = self.fileManager.findFiles(dataSource, "%") for x in files: #We should add artifacts to a file, so we add it to the logicalfileset as reference if not base: base = x # Script needs files inside /data/data/ # We find a file with this path, we got the base path if x.getLocalPath() and '/data/data/' in x.getParentPath(): # Multiple SO normalization local = Utils.replace_slash_platform(x.getLocalPath()) if Utils.get_platform().startswith("windows"): base_path = local.split("\\data\\data\\")[0] else: base_path = local.split("/data/data/")[0] #Already have the base folder, stop the find break # If have the base folder if base_path: # For all supported apps for app_id in Utils.get_all_packages().values(): # If app data exists in mount if os.path.exists(os.path.join(base_path, "data", "data", app_id)): # Create report folder report_number = 1 report_folder_path = os.path.join(temp_directory, app_id, str(report_number)) #report path Utils.check_and_generate_folder(report_folder_path) self.progressJob.change_text("Analyzing Information for {} ({})".format(dataSource.getName(), app_id)) # Folder to analyze analyzer = Analyzer(app_id, base_path, report_folder_path) analyzer.generate_report() # Report report_location = os.path.join(report_folder_path, "Report.json") item = {} item["report"] = report_location item["file"] = base item["app"] = Utils.find_app_name(app_id) self.process_report(item, dataSource) # After all reports, post a message to the ingest messages in box. return IngestModule.ProcessResult.OK