def dump_from_adb(self, app_package): folders = {} Utils.check_and_generate_folder(self.path_dump_folder) for serial_number in DeviceCommunication.list_devices(): path_dump_folder = os.path.join( self.path_dump_folder, Utils.clean_invalid_filename(serial_number, character="_")) Utils.check_and_generate_folder(path_dump_folder) #Dump internal logging.info("[{}] Extracting internal {} (root) data!".format( serial_number, app_package)) path_dump_internal = os.path.join( path_dump_folder, self.internal_data_dump_name.format(app_package)) self.extract_from_device( serial_number, self.internal_data_path.format(app_package), path_dump_internal) #Dump external logging.info("[{}] Extracting external {} data!".format( serial_number, app_package)) path_dump_external = os.path.join( path_dump_folder, self.external_data_dump_name.format(app_package)) self.extract_from_device( serial_number, self.external_data_path.format(app_package), path_dump_external) #Dump output folder folders[serial_number] = path_dump_folder return folders
def generate_html_report(reports, report_path): logging.info("Generating HTML report") Utils.copy_tree(os.path.join(Utils.get_base_path_folder(), "template"), report_path) report_file_path = os.path.join(report_path, "report.html") try: os.remove(os.path.join( report_path, "index.html")) #remove report.html from index except: pass if not reports.get("header"): reports["header"] = {} reports["header"]["case_name"] = os.environ.get("case_name".upper()) reports["header"]["case_number"] = os.environ.get( "case_number".upper()) reports["header"]["examiner"] = os.environ.get("examiner".upper()) js_code = "var reportData = " + json.dumps(reports, indent=2) assets_folder = os.path.join(report_path, "assets") Utils.check_and_generate_folder(assets_folder) handler = open(os.path.join(assets_folder, "Report.js"), "w") handler.write(js_code) handler.close() return report_file_path
def set_header(self): self.report = {} self.report["header"] = { "report_name": Utils.get_current_time(), "report_date": Utils.get_current_millis(), "app_name": self.app_name, "app_id": self.app_id }
def add(self, path): media = {} media["path"] = path media["type"] = "unknown" if os.path.exists(path) or Utils.is_url(path): #media["path"] = os.path.join("Contents", path) media["type"] = Utils.get_media_type(path) self.media.append(media)
def __init__(self): self.internal_data_path = "/data/data/{}" self.external_data_path = "/sdcard/Android/data/{}" self.internal_data_dump_name = "{}_internal.tar.gz" self.external_data_dump_name = "{}_external.tar.gz" #Dump internal data https://android.stackexchange.com/questions/85564/need-one-line-adb-shell-su-push-pull-to-access-data-from-windows-batch-file self.check_root_command = """{} -s {} shell "su -c 'echo HASROOT'""" self.magic_root_command = """{} -s {} shell "su -c 'cd {} && tar czf - ./ --exclude='./files'| base64' 2>/dev/null" | {} -d""" self.magic_noroot_command = """{} -s {} shell "cd {} && tar czf - ./ --exclude='./files'| base64 2>/dev/null" | {} -d""" if not ( Utils.get_platform().startswith("windows") or Utils.get_platform().startswith("darwin") ): #some linux versions doesn't output if contains errors, so we ignore it. but base64 for windows doesn't have this attribute self.magic_root_command += "i" #add -i flag to base64 decode to avoid some encoding issues self.magic_noroot_command += "i" #add -i flag to base64 decode to avoid some encoding issues self.adb_location = Utils.get_adb_location() self.base64_location = Utils.get_base64_location() self.dumps_path = os.path.join(Utils.get_base_path_folder(), "dumps") Utils.check_and_generate_folder(self.dumps_path) self.path_dump_folder = os.path.join(self.dumps_path, Utils.get_current_time())
def extract_dumps(self, serial, folder): files = os.listdir(folder) self.internal_path = os.path.join(folder, "data", "data") self.external_path = os.path.join(folder, "data", "media", "0", "Android", "data") Utils.check_and_generate_folder(self.internal_path) Utils.check_and_generate_folder(self.external_path) if self.dsprocessor: self.progress.setProgressText( ' Handling extracted data from {}.\n Please wait.'.format( serial)) else: self.progress.progress( 'Handling extracted data from {}'.format(serial)) for filename in files: if '_internal.tar.gz' in filename: Utils.extract_tar( os.path.join(folder, filename), os.path.join(self.internal_path, filename.replace('_internal.tar.gz', ''))) elif '_external.tar.gz' in filename: Utils.extract_tar( os.path.join(folder, filename), os.path.join(self.external_path, filename.replace('_external.tar.gz', ''))) return os.path.join(folder, "data")
def initComponents(self): self.apps_checkboxes_list = [] self.setLayout(BoxLayout(self, BoxLayout.PAGE_AXIS)) # title self.p_title = SettingsUtils.createPanel() self.lb_title = JLabel("Android Forensics") self.lb_title.setFont(self.lb_title.getFont().deriveFont(Font.BOLD, 11)) self.p_title.add(self.lb_title) self.add(self.p_title) # end of title # info menu self.p_info = SettingsUtils.createPanel() self.lb_info = JLabel("") self.lb_info2 = JLabel("") self.p_info.add(self.lb_info) self.p_info.add(self.lb_info2) self.add(self.p_info) # end of info menu # method menu self.p_method = SettingsUtils.createPanel() self.bg_method = ButtonGroup() self.rb_selectedDatasource = SettingsUtils.createRadioButton("Analyse selected datasource", "method_datasource", self.onMethodChange) self.rb_importReportFile = SettingsUtils.createRadioButton("Import previous generated report file","method_importfile" ,self.onMethodChange) self.rb_liveExtraction = SettingsUtils.createRadioButton("Live extraction with ADB","method_adb", self.onMethodChange) self.rb_selectedDatasource.setSelected(True) self.bg_method.add(self.rb_selectedDatasource) self.bg_method.add(self.rb_importReportFile) self.bg_method.add(self.rb_liveExtraction) self.p_method.add(JLabel("Analysis method")) self.p_method.add(self.rb_selectedDatasource) self.p_method.add(self.rb_importReportFile) self.p_method.add(self.rb_liveExtraction) self.add(self.p_method) # end of method menu #app checkboxes menu self.p_apps = SettingsUtils.createPanel() sorted_items = OrderedDict(sorted(Utils.get_all_packages().items())) for app, app_id in sorted_items.iteritems(): #(app, app_id) checkbox = SettingsUtils.addApplicationCheckbox(app, app_id, self.getSelectedApps) self.add(checkbox) self.apps_checkboxes_list.append(checkbox) self.p_apps.add(checkbox) self.add(self.p_apps)
def __init__(self, app, folder, report_path): if '.' in app: self.app = Utils.find_app_name(app) self.app_id = app else: self.app = app self.app_id = Utils.find_package(app) self.internal_path = None self.external_path = None self.folder = folder self.report_path = report_path Utils.remove_folder(self.report_path) self.initialize_dumps()
def list_devices(): logging.info("Getting list of devices") adb_location = Utils.get_adb_location() command = """{} devices""".format(adb_location) info = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE).stdout.read() devices = [] for device in info.decode().splitlines(): if 'devices attached' in device: continue device_serial = device.split('\t')[0].strip() if not device_serial: continue if '\tunauthorized' in device: logging.warning( "{} unauthorized. Trust this device. Ignoring...".format( device_serial)) continue devices.append(device_serial) message = "Found {} device".format(len(devices)) if (len(devices) != 1): message += "s" logging.info("{}".format(message)) return devices
def get_videos_publish(self): logging.info("Getting published videos") videos = [] base_path = os.path.join(self.internal_cache_path, "cache", "aweme_publish") aweme_publish_files = os.listdir(base_path) for aweme_file in aweme_publish_files: dump = Utils.read_json(os.path.join(base_path, aweme_file)) aweme_list = dump.get("aweme_list") if aweme_list: for entry in aweme_list: video = {} video["created_time"] = entry.get("create_time") video["video"] = str(entry.get( "video")) #.get("animated_cover").get("url_list")[0] timeline_event = {} timeline_event["url"] = video["video"] self.timeline.add(video["created_time"], "publish", timeline_event) videos.append(video) logging.info("{} video(s) found".format(len(videos))) return videos
def get_videos(self): logging.info("Getting Videos...") videos = [] db = os.path.join(self.internal_cache_path, "databases", "video.db") database = Database(db) results = database.execute_query( "select key, extra from video_http_header_t") for entry in results: video = {} video["key"] = entry[0] dump = json.loads(entry[1]) for line in dump["responseHeaders"].splitlines(): if 'Last-Modified:' in line: video["last_modified"] = Utils.date_parser( line.split(": ")[1], "%a, %d %b %Y %H:%M:%S %Z") timeline_event = {} timeline_event["video"] = video["key"] self.timeline.add(video["last_modified"], "video", timeline_event) break self.media.add( os.path.join("internal", "cache", "cache", video["key"])) videos.append(video) logging.info("{} video(s) found".format(len(videos))) return videos
def set_shared_preferences(self): files = [] for xmlfile in Utils.list_files(self.internal_cache_path, [".xml"]): if '/shared_prefs/' in xmlfile or '\\shared_prefs\\' in xmlfile: files.append(xmlfile) return files
def get_logged_users(self): logging.info("Get User Profile...") xml_file = os.path.join(self.internal_cache_path, "shared_prefs", "aweme_user.xml") user_profiles = [] user_profile = {} values = Utils.xml_attribute_finder(xml_file) for key, value in values.items(): if key.endswith("_significant_user_info"): user_profile = {} dump = json.loads(value) atributes = [ "uid", "short_id", "unique_id", "nickname", "nickname", "avatar_url" ] for index in atributes: user_profile[index] = dump.get(index) if user_profile.get("unique_id"): user_profile["url"] = "https://www.tiktok.com/@{}".format( user_profile["unique_id"]) user_profiles.append(user_profile) return user_profiles
def get_user_profile(self): logging.info("Get User Profile...") xml_file = os.path.join(self.internal_cache_path, "shared_prefs", "aweme_user.xml") user_profile = {} values = Utils.xml_attribute_finder(xml_file) for key, value in values.items(): if key.endswith("_aweme_user_info"): #try: dump = json.loads(value) atributes = [ "account_region", "follower_count", "following_count", "gender", "google_account", "is_blocked", "is_minor", "nickname", "register_time", "sec_uid", "short_id", "uid", "unique_id" ] for index in atributes: user_profile[index] = dump.get(index) break if user_profile.get("unique_id"): user_profile["url"] = "https://www.tiktok.com/@{}".format( user_profile["unique_id"]) if user_profile.get("uniqueid") and user_profile.get("url"): timeline_event = {} timeline_event["uniqueid"] = user_profile["unique_id"] timeline_event["url"] = user_profile["url"] self.timeline.add(user_profile["register_time"], "AF_user", timeline_event) return user_profile
def generateReport(self, baseReportDir, progressBar): logging.info("Starting Report Module") progressBar.setIndeterminate(True) self.fileManager = Case.getCurrentCase().getServices().getFileManager() progressBar.updateStatusLabel("Finding source data") self.tempDirectory = os.path.join( Case.getCurrentCase().getModulesOutputDirAbsPath(), "AndroidForensics") if not os.path.exists(self.tempDirectory): progressBar.complete(ReportStatus.ERROR) progressBar.updateStatusLabel("Run Ingest Module first!") return progressBar.updateStatusLabel("Creating report") os.environ["CASE_NAME"] = Case.getCurrentCase().getName() os.environ["CASE_NUMBER"] = Case.getCurrentCase().getNumber() os.environ["EXAMINER"] = Case.getCurrentCase().getExaminer() reports = {} reports["reports"] = [] for fileset in os.listdir(self.tempDirectory): fileset_path = os.path.join(self.tempDirectory, fileset) for app_id in os.listdir(fileset_path): app_path = os.path.join(fileset_path, app_id) for app_report in os.listdir(app_path): report = os.path.join(app_path, app_report, "Report.json") if os.path.exists(report): report_content = Utils.read_json(report) report_path = Analyzer.generate_html_report( report_content, os.path.join(app_path, app_report)) Case.getCurrentCase().addReport( report_path, "Report", "Forensics Report") reports["reports"].append( Analyzer.generate_report_summary(report_content, app_report, fileset=fileset)) if len(reports) == 0: progressBar.complete(ReportStatus.ERROR) progressBar.updateStatusLabel("Nothing to report!") return report_file_path = Analyzer.generate_html_index(reports, baseReportDir) Case.getCurrentCase().addReport(report_file_path, "Report", "Forensics Report") progressBar.updateStatusLabel("Done") progressBar.complete(ReportStatus.COMPLETE)
def generate_report(self): self.report["freespace"] = self.get_undark_db() self.report["sqlparse"] = self.get_sqlparse() self.report["user_photos"] = self.get_user_photos() self.report["matches"] = self.get_user_matches() self.report["bio_changes"] = self.get_bio_changes() self.report["messages"] = self.get_user_messages() self.report["credit_cards"] = self.get_credit_cards() self.report["locations"] = self.get_locations() self.report["timeline"] = self.timeline.get_sorted_timeline() self.report["tracking"] = self.locations.get_sorted_locations() self.report["media"] = self.media.get_media() logging.info("Report Generated") Utils.save_report(os.path.join(self.report_path, "Report.json"), self.report) return self.report
def get_videos_publish(self): logging.info("Getting published videos") videos = [] base_path = os.path.join(self.internal_cache_path, "cache", "aweme_publish") if not os.path.exists(base_path): return videos aweme_publish_files = os.listdir(base_path) for aweme_file in aweme_publish_files: dump = Utils.read_json(os.path.join(base_path, aweme_file)) aweme_list = dump.get("aweme_list") if aweme_list: for entry in aweme_list: video ={} video["created_time"] = entry.get("create_time") # if entry.get("video"): # if entry.get("video").get("animated_cover"): # if entry.get("video").get("animated_cover").get("url_list"): # video["video"] = str(entry.get("video").get("animated_cover").get("url_list")[0]) # else: # video["video"] = entry.get("video").get("animated_cover") # else: # video["video"] = entry.get("video") video["video"] = "" video["duration"] = "" video["cover"] = "" video["api_address"] = "" if entry.get("video"): if entry.get("video").get("animated_cover"): video["video"] =entry.get("video").get("animated_cover").get("url_list")[0] else: video["video"] =str(entry) video["duration"] = entry.get("video").get("duration") try: video["cover"] = str(entry.get("video").get("cover").get("url_list")[0]) except: pass try: video["api_address"] = entry.get("video").get("play_addr").get("url_list")[-1] except: pass video["share_url"] = entry.get("share_url") video["music"] = entry.get("music").get("play_url").get("url_list")[0] timeline_event = {} timeline_event["url"] = video["video"] self.timeline.add(video["created_time"],"AF_publish", timeline_event) videos.append(video) logging.info("{} video(s) found".format(len(videos))) return videos
def generate_report(self): self.report["freespace"] = self.get_undark_db() self.report["sqlparse"] = self.get_sqlparse() self.report["profile"] = self.get_user_profile() self.report["messages"] = self.get_user_messages() self.report["users"] = self.get_user_profiles() self.report["searches"] = self.get_user_searches() self.report["videos"] = self.get_videos() self.report["published_videos"] = self.get_videos_publish() self.report["log"] = self.get_last_session() self.report["timeline"] = self.timeline.get_sorted_timeline() self.report["media"] = self.media.get_media() logging.info("Report Generated") Utils.save_report(os.path.join(self.report_path, "Report.json"), self.report) return self.report
def generate_report(self): self.report["freespace"] = self.get_info(self.get_undark_db) self.report["sqlparse"] = self.get_info(self.get_sqlparse) self.report["user_photos"] = self.get_info(self.get_user_photos) self.report["matches"] = self.get_info(self.get_user_matches) self.report["bio_changes"] = self.get_info(self.get_bio_changes) self.report["messages"] = self.get_info(self.get_user_messages) self.report["credit_cards"] = self.get_info(self.get_credit_cards) self.report["locations"] = self.get_info(self.get_locations) self.add_model(self.timeline) self.add_model(self.locations) self.add_model(self.media) logging.info("Report Generated") Utils.save_report(os.path.join(self.report_path, "Report.json"), self.report) return self.report
def appsBlock(self): sorted_items = OrderedDict(sorted(Utils.get_all_packages().items())) for app, app_id in sorted_items.iteritems(): #(app, app_id) checkbox = SettingsUtils.addApplicationCheckbox( app, app_id, self.updateCheckboxes, visible=True) #self.add(checkbox) self.apps_checkboxes_list.append(checkbox) self.p_apps.add(checkbox)
def __init__(self, settings): #Set logging path to autopsy log Utils.setup_custom_logger(os.path.join(Case.getCurrentCase().getLogDirectoryPath(), "autopsy.log.0")) #Context of the ingest self.context = None #Module Settings choosed in ingest settings self.settings = settings #Autopsy utils methods instance self.utils = PsyUtils() #Filemanager for this case self.fileManager = Case.getCurrentCase().getServices().getFileManager() #Initialize output folder path self.temp_module_path = os.path.join(Case.getCurrentCase().getModulesOutputDirAbsPath(), "AndroidForensics") Utils.check_and_generate_folder(self.temp_module_path)
def generate_report(self): self.report["freespace"] = self.get_info(self.get_undark_db) self.report["sqlparse"] = self.get_info(self.get_sqlparse) self.report["profile"] = self.get_info(self.get_user_profile) self.report["messages"] = self.get_info(self.get_user_messages) self.report["users"] = self.get_info(self.get_user_profiles) self.report["logged_users"] = self.get_info(self.get_logged_users) self.report["searches"] = self.get_info(self.get_user_searches) self.report["videos"] = self.get_info(self.get_videos) self.report["published_videos"] = self.get_info(self.get_videos_publish) self.report["log"] = self.get_info(self.get_last_session) self.report["cache_images"] = self.get_info(self.get_fresco_cache) self.report["open_events"] = self.get_info(self.get_open_events) self.add_model(self.timeline) self.add_model(self.media) logging.info("Report Generated") Utils.save_report(os.path.join(self.report_path, "Report.json"), self.report) return self.report
def get_user_searches(self): logging.info("Getting User Search History...") xml_file = os.path.join(self.internal_cache_path, "shared_prefs", "search.xml") searches = [] #verify if recent hisotry tag exists try: dump = json.loads(Utils.xml_attribute_finder(xml_file, "recent_history")["recent_history"]) for i in dump: searches.append(i["keyword"]) except: pass logging.info("{} search entrys found".format(len(searches))) return searches
def process(self, dataSource, progressBar): #Set progressbar to an scale of 100% self.progressBar = progressBar progressBar.switchToDeterminate(100) #Initialize list of possible data sources data_sources = [] max_apps = len(Utils.get_all_packages().values()) #Extract method for adb selected #THIS IS ONLY USED IN <= AUTOPSY 4.16 if self.method == "method_adb": #Get list of selected apps to extract self.apps = json.loads(self.settings.getSetting('apps')) jobs = max_apps * 3 #extract, analyser, index self.progressJob = ProgressJob(progressBar, jobs) self.progressJob.next_job("Extracting from ADB") logging.info("Starting ADB") #Variable used to store all folders for each device folders = Extractor(self.apps, DeviceCommunication.list_devices(), self.progressJob, dsprocessor = False).dump_apps() # Add one datasource for each device, with the list of the possible folders for serial, folders_list in folders.items(): datasource_name = "ADB_{}_{}".format(serial, int(time.time())) self.utils.add_to_fileset(datasource_name, folders_list) # Add data source to case to be analised for case_datasources in Case.getCurrentCase().getDataSources(): if case_datasources.getName() == datasource_name: data_sources.append(case_datasources) break logging.info("Ending ADB") # Add the selected files for the datasource (json, dumps or mount case) else: logging.info("Using Selected Datasource") data_sources.append(dataSource) self.progressJob = ProgressJob(progressBar, max_apps * 2) #indexing and analying # For each data source, we will process it each one for source in data_sources: self.process_by_datasource(source) self.progressJob.next_job("Done")
def get_last_session(self): logging.info("Getting last session...") session = [] relevant_keys = [ "device", "name", "status", "ab_sdk_version", "storage_available_internal_size", "storage_available_external_size", "app_storage_size", "brand", "page", "request_method", "is_first", "duration", "is_first", "rip", "duration", "author_id", "access2", "video_duration", "video_quality", "access", "page_uid", "previous_page", "enter_method", "enter_page", "key_word", "search_keyword", "next_tab", "search_type", "play_duration", "content", "manufacturer", "os_version" ] db = os.path.join(self.internal_cache_path, "databases", "ss_app_log.db") database = Database(db) results = database.execute_query( "select tag, ext_json, datetime(timestamp/1000, 'unixepoch', 'localtime'), session_id from event order by timestamp" ) for entry in results: session_entry = {} session_entry["action"] = entry[0] body_dump = json.loads(entry[1]) session_entry["time"] = Utils.date_parser(entry[2], "%Y-%m-%d %H:%M:%S") session_entry["session_id"] = entry[3] timeline_event = {} timeline_event["action"] = session_entry["action"] self.timeline.add(session_entry["time"], "AF_system", timeline_event) session.append(session_entry) #json body parser body = {} for key, value in body_dump.items(): if key in relevant_keys: body[key] = value session_entry["body"] = body logging.info("{} entrys found".format(len(results))) return session
def get_undark_output(databases, report_path): output = {} for name in databases: listing = [] undark_output = Utils.run_undark(name).decode() for line in undark_output.splitlines(): listing.append(line) if listing: relative_name = os.path.normpath(name.replace( report_path, "")) #clean complete path output[relative_name] = listing return output
def process_report(self, datasource_name, file, report_number, path): if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK #TODO # HERE WE CAN CALL THE FUNCTIONS THAT WILL INDEX THE REPORT'S ARTIFACTS data = Utils.read_json(path) self.process_messages(data.get("messages"), file) self.process_user_photos(data.get("user_photos"), file) self.process_bio_changes(data.get("bio_changes"), file) self.process_user_matches(data.get("matches"), file) self.process_credit_cards(data.get("credit_cards"), file) self.process_locations(data.get("locations"), file) self.process_drp(data.get("sqlparse"), file) self.process_undark(data.get("freespace"), file)
def process_report(self, datasource_name, file, report_number, path): # Check if the user pressed cancel while we were busy if self.context.isJobCancelled(): return IngestModule.ProcessResult.OK data = Utils.read_json(path) self.uid = data.get("profile").get("uid") self.process_messages(data.get("messages"), file) self.process_user_profile(data.get("profile"), file) self.process_users(data.get("users"), file) self.process_searches(data.get("searches"), file) self.process_undark(data.get("freespace"), file) self.process_drp(data.get("sqlparse"), file) #self.process_videos(data.get("videos"), report_number, file, os.path.dirname(path), datasource_name) self.process_logs(data.get("log"), file) self.process_published_videos(data.get("published_videos"), file)
def __init__(self, internal_path, external_path, report_path, app_name, app_id): # self.report_path = os.path.join(report_path, "report", self.report_name) self.report_path = report_path Utils.check_and_generate_folder(self.report_path) self.internal_cache_path = internal_path self.external_cache_path = external_path Utils.check_and_generate_folder(self.internal_cache_path) Utils.check_and_generate_folder(self.external_cache_path) self.databases = self.set_databases() self.shared_preferences = self.set_shared_preferences() self.report = {} self.app_name = app_name self.app_id = app_id self.set_header()
def generateReport(self, baseReportDir, progressBar): logging.info("Starting Report Module") progressBar.setIndeterminate(True) self.fileManager = Case.getCurrentCase().getServices().getFileManager() progressBar.updateStatusLabel("Finding source data") self.tempDirectory = os.path.join(Case.getCurrentCase().getModulesOutputDirAbsPath(), "FAMA") if not os.path.exists(self.tempDirectory): progressBar.complete(ReportStatus.ERROR) progressBar.updateStatusLabel("Run Ingest Module first!") return progressBar.updateStatusLabel("Creating report") os.environ["CASE_NAME"] = Case.getCurrentCase().getName() os.environ["CASE_NUMBER"] = Case.getCurrentCase().getNumber() os.environ["EXAMINER"] = Case.getCurrentCase().getExaminer() reports = {} reports["reports"] = [] #Android Analyzer Smart Report for fileset in os.listdir(self.tempDirectory): fileset_path = os.path.join(self.tempDirectory, fileset) for app_id in os.listdir(fileset_path): app_path = os.path.join(fileset_path, app_id) for app_report in os.listdir(app_path): report = os.path.join(app_path, app_report, "Report.json") if os.path.exists(report): report_content = Utils.read_json(report) report_path = Analyzer.generate_html_report(report_content, os.path.join(app_path, app_report)) Case.getCurrentCase().addReport(report_path, "Report", "Forensics Report") reports["reports"].append(Analyzer.generate_report_summary(report_content, app_report, fileset = fileset)) #Classic Report if len(reports["reports"]) == 0: report = {} report["header"] = { "report_name": "Generated Report", "report_date": int(time.time()) * 1000, "app_name": "Generic", "app_id": "Generated Report" } has_row = False for artifact in PsyUtils.get_artifacts_list(): ##GGOODD artifact_name = artifact.getDisplayName() report[artifact_name] = [] command = "WHERE (blackboard_artifacts.artifact_type_id = '{}')".format(artifact.getTypeID()) rows = Case.getCurrentCase().getSleuthkitCase().getMatchingArtifacts(command) for row in rows: has_row = True item = {} atts = row.getAttributes() for att in atts: item[att.getAttributeTypeDisplayName()] = str(att.getDisplayString().encode('utf-8','ignore')) report[artifact_name].append(item) if not has_row: progressBar.complete(ReportStatus.ERROR) progressBar.updateStatusLabel("Nothing to report!") return report_path = os.path.join(baseReportDir, report["header"]["app_id"], "Generic") reporthtml = Analyzer.generate_html_report(report, report_path) Case.getCurrentCase().addReport(reporthtml, "Report", "Forensics Report") reports["reports"].append(Analyzer.generate_report_summary(report, "Generic")) report_file_path = Analyzer.generate_html_index(reports, baseReportDir) Case.getCurrentCase().addReport(report_file_path, "Report", "Forensics Report") progressBar.updateStatusLabel("Done") progressBar.complete(ReportStatus.COMPLETE)