def do_output(self, record): if self.output != None and not self.json: self.file_csv.writerow(mft.mft_to_csv(record, False)) elif self.output != None and self.json: write_to_json(self.header, mft.mft_to_csv(record, False), self.json_writer) if self.num_records % (self.mftsize / 5) == 0 and self.num_records > 0: self.logger.info('Building MFT: {0:.0f}'.format(100.0 * self.num_records / self.mftsize) + '%')
def _json_list_running_process(self, list_running): self.logger.info("Health : Listing running processes") if self.destination == 'local': with open( os.path.join(self.output_dir + '%s_list_running.json' % self.computer_name), 'ab') as fw: json_writer = get_json_writer(fw) headers = [ "COMPUTER_NAME", "TYPE", "PID", "PROCESS_NAME", "COMMAND", "EXEC_PATH" ] for p in list_running: pid = p[0] name = p[1] cmd = p[2] exe_path = p[3] write_to_json(headers, [ self.computer_name, 'processes', unicode(pid), name, unicode(cmd), unicode(exe_path) ], json_writer)
def _json_list_scheduled_jobs(self, is_at_available=False): self.logger.info('Health : Listing scheduled jobs') if self.destination == 'local': file_tasks = self.output_dir + '%s_scheduled_jobs' % self.computer_name + self.rand_ext with open(file_tasks, 'wb') as tasks_logs: json_writer = get_json_writer(tasks_logs) header = [ "COMPUTER_NAME", "TYPE", 'TASK_NAME', 'NEXT_SCHEDULE', "STATUS" ] for line in self._list_scheduled_jobs(): write_to_json(header, [self.computer_name, 'Scheduled Jobs'] + line.replace('"', '').split(','), json_writer) if is_at_available: for line in self._list_at_scheduled_jobs(): write_to_json(header, [ self.computer_name, 'scheduled_jobs', line[4], line[2] + ' ' + line[3], line[0] ], json_writer) close_json_writer(json_writer) record_sha256_logs( self.output_dir + self.computer_name + '_scheduled_jobs' + self.rand_ext, self.output_dir + self.computer_name + '_sha256.log')
def _json_list_scheduled_jobs(self): self.logger.info('Health : Listing scheduled jobs') if self.destination == 'local': file_tasks = os.path.join(self.output_dir , '%s_tasks.json' % self.computer_name) with open(file_tasks, 'wb') as tasks_logs: json_writer = get_json_writer(tasks_logs) proc = subprocess.Popen(["schtasks.exe", '/query', '/fo', 'CSV'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) res = proc.communicate() res = get_terminal_decoded_string(res[0]) # clean and write the command output header= ["COMPUTER_NAME", "TYPE",'TASK_NAME','NEXT_SCHEDULE',"STATUS"] column_names = None for line in res.split('\r\n'): if line == "": continue if line[0] != '"': continue if not column_names: column_names = line continue elif column_names == line: continue write_to_json(header, [self.computer_name, 'Scheduled Jobs'].extends(line.split(',')), json_writer)
def _json_windows_prefetch(self, wpref): if self.destination == 'local': with open( self.output_dir + self.computer_name + '_prefetch' + self.rand_ext, 'wb') as output: json_writer = get_json_writer(output) header = [ "COMPUTER_NAME", "TYPE", "FILE", "VERSION", "SIZE", "EXEC_NAME", "CREATE_TIME", "MODIFICATION_TIME", "RUN_COUNT", "START_TIME", "DURATION", "AVERAGE_DURATION", "DLL_LIST" ] for pref_file, format_version, file_size, exec_name, tc, tm, run_count, hash_table_a, list_str_c in wpref: str_c = '' for s in list_str_c: str_c += s.replace('\0', '') + ';' write_to_json(header, [ self.computer_name, 'prefetch', pref_file, unicode(format_version), unicode(file_size), exec_name.replace('\00', ''), unicode(tc), unicode(tm), unicode(run_count), unicode(hash_table_a['start_time']), unicode(hash_table_a['duration']), unicode(hash_table_a['average_duration']), str_c ], json_writer) close_json_writer(json_writer) record_sha256_logs( self.output_dir + self.computer_name + '_prefetch' + self.rand_ext, self.output_dir + self.computer_name + '_sha256.log')
def json_recycle_bin(self): if self.destination == 'local': with open( self.output_dir + self.computer_name + '_recycle_bin' + self.rand_ext, 'wb') as output: json_writer = get_json_writer(output) header = ["COMPUTER_NAME", "TYPE", "NAME_1", "NAME_2"] idl = shell.SHGetSpecialFolderLocation( 0, shellcon.CSIDL_BITBUCKET) desktop = shell.SHGetDesktopFolder() files = desktop.BindToObject(idl, None, shell.IID_IShellFolder) for bin_file in files: write_to_json(header, [ self.computer_name, 'recycle_bin', files.GetDisplayNameOf( bin_file, shellcon.SHGDN_NORMAL), files.GetDisplayNameOf( bin_file, shellcon.SHGDN_FORPARSING) ], json_writer) close_json_writer(json_writer) record_sha256_logs( self.output_dir + self.computer_name + '_recycle_bin' + self.rand_ext, self.output_dir + self.computer_name + '_sha256.log')
def _json_list_sockets_network(self, connections): self.logger.info('Health : Listing sockets networks') if self.destination == 'local': with open( os.path.join(self.output_dir + '%s_sockets.json' % self.computer_name), 'ab') as fw: json_writer = get_json_writer(fw) headers = [ "COMPUTER_NAME", "TYPE", "PID", "PROCESS_NAME", "LOCAL_ADDR", "SOURCE_PORT", "REMOTE_ADDR", "REMOTE_PORT", "STATUS" ] for pid, name, local_address, source_port, remote_addr, remote_port, status in connections: write_to_json(headers, [ self.computer_name, 'sockets', unicode(pid), unicode(name), unicode(local_address), unicode(source_port), unicode(remote_addr), unicode(remote_port), unicode(status) ], json_writer)
def _json_hash_running_process(self, list_running): self.logger.info("Health : Hashing running processes") if self.destination == 'local': with open( os.path.join(self.output_dir + '%s_list_share.json' % self.computer_name), 'ab') as fw: json_writer = get_json_writer(fw) headers = [ "COMPUTER_NAME", "TYPE", "PID", "PROCESS_NAME", "EXEC_PATH", "MD5", "SHA1", "CTIME", "MTIME", "ATIME" ] for p in list_running: pid = p[0] name = p[1] cmd = p[2] exe_path = p[3] if exe_path and os.path.isfile(exe_path): ctime = datetime.datetime.fromtimestamp( os.path.getctime(exe_path)) mtime = datetime.datetime.fromtimestamp( os.path.getmtime(exe_path)) atime = datetime.datetime.fromtimestamp( os.path.getatime(exe_path)) md5 = process_md5(unicode(exe_path)) sha1 = process_sha1(unicode(exe_path)) write_to_json(headers, [ self.computer_name, 'processes', unicode(pid), name, unicode(exe_path), md5, sha1, ctime, mtime, atime ], json_writer)
def _json_infos_fs(self, files): if self.destination == 'local': with open( os.path.join(self.output_dir, '%s_Filecatcher.json' % self.computer_name), 'wb') as fw: json_writer = get_json_writer(fw) header = [ 'COMPUTER NAME', 'TYPE', 'DATE', 'PATH', 'MD5', 'SHA1', 'SHA256', 'MIMETYPE', 'ZIP', 'EMPTY', 'VT' ] for f, mime, md5, sha1, sha256, zip_value, datem, empty in files: write_to_json(header, [ self.computer_name, 'Filecatcher', unicode(datem), unicode(f), unicode(md5), unicode(sha1), unicode(sha256), unicode(mime), unicode(zip_value), unicode(empty), self._get_url_VT(sha256) ], json_writer) if self.zip_file: self.zip_file.close()
def _json_list_named_pipes(self, pipes): if self.destination == 'local': with open(os.path.join(self.output_dir, '%s_named_pipes.json' % self.computer_name), 'wb') as output: json_writer = get_json_writer(output) header = ["COMPUTER_NAME", "TYPE", "NAME"] for pipe in pipes: write_to_json(header, [self.computer_name, 'named_pipes', pipe], json_writer)
def _json_list_scheduled_jobs(self): self.logger.info('Health : Listing scheduled jobs') if self.destination == 'local': file_tasks = os.path.join(self.output_dir, '%s_tasks.json' % self.computer_name) with open(file_tasks, 'wb') as tasks_logs: json_writer = get_json_writer(tasks_logs) proc = subprocess.Popen( ["schtasks.exe", '/query', '/fo', 'CSV'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) res = proc.communicate() res = get_terminal_decoded_string(res[0]) # clean and write the command output header = [ "COMPUTER_NAME", "TYPE", 'TASK_NAME', 'NEXT_SCHEDULE', "STATUS" ] column_names = None for line in res.split('\r\n'): if line == "": continue if line[0] != '"': continue if not column_names: column_names = line continue elif column_names == line: continue write_to_json(header, [self.computer_name, 'Scheduled Jobs'].extends(line.split(',')), json_writer)
def _json_chrome_history(self, chistory): if self.destination == 'local': with open(os.path.join(self.output_dir, '%s_chrome_history.json') % self.computer_name, 'wb') as output: json_writer = get_json_writer(output) header = ["COMPUTER_NAME", "TYPE", "TIME", "URL", "TITLE", "USER", "PROFILE"] for time, url, title, user, profile in chistory: write_to_json(header, [self.computer_name, 'chrome_history', time, url, title, user, profile], json_writer)
def _json_list_route_table(self, routes): self.logger.info('Health : Listing routes tables') if self.destination == 'local': with open(os.path.join(self.output_dir + '%s_routes_tables.json' % self.computer_name), 'ab') as fw: json_writer = get_json_writer(fw) headers = ["COMPUTER_NAME", "TYPE", "NAME", "MASK"] for ip, mask in routes: write_to_json(headers, [self.computer_name, 'routes_tables', unicode(ip), unicode(mask)], json_writer)
def _json_list_kb(self, kbs): self.logger.info('Health : Listing KB installed on computer') if self.destination =='local': with open(os.path.join(self.output_dir + '%s_kb.json' % self.computer_name), 'ab') as fw: json_writer = get_json_writer(fw) headers = ["COMPUTER_NAME", "TYPE", "CAPTION", "CS_NAME", "FIX_COMMENTS", "HOTFIX_ID", "INSTALL_DATE", "INSTALLED_ON", "NAME", "SERVICE_PACK", "STATUS"] for Caption, CSName, FixComments, HotFixID, InstallDate, InstalledOn, Name, ServicePackInEffect, Status in kbs: write_to_json(headers,[self.computer_name, 'kb', Caption, CSName, FixComments, HotFixID, InstallDate, InstalledOn, Name, ServicePackInEffect, Status] , json_writer)
def _json_list_network_drives(self, drives): self.logger.info("Health : Listing network drives") if self.destination == 'local': with open(os.path.join(self.output_dir + '%s_networks_drives.json' % self.computer_name), 'ab') as fw: json_writer = get_json_writer(fw) headers = ["COMPUTER_NAME", "TYPE", "DISK", "FILESYSTEM", "PARTITION_NAME"] for diskCapt, diskFs, diskPName in drives: write_to_json(headers, [self.computer_name, 'list_networks_drives', diskCapt, diskFs, diskPName], json_writer)
def _json_list_drives(self, drives): self.logger.info("Health : Listing drives") if self.destination == 'local': with open(os.path.join(self.output_dir + '%s_list_drives.json' % self.computer_name), 'ab') as fw: json_writer = get_json_writer(fw) headers = ["COMPUTER_NAME", "TYPE", "FAB", "PARTITIONS", "DISK", "FILESYSTEM"] for phCapt, partCapt, logicalCapt, fs in drives: write_to_json(headers, [self.computer_name, 'list_drives', phCapt, partCapt, logicalCapt, fs], json_writer)
def _json_list_share(self, share): self.logger.info("Health : Listing shares") if self.destination == 'local': with open(os.path.join(self.output_dir + '%s_list_share.json' % self.computer_name), 'ab') as fw: json_writer = get_json_writer(fw) headers = ["COMPUTER_NAME", "TYPE", "SHARE_NAME", "SHARE_PATH"] for name, path in share: write_to_json(headers, [self.computer_name, 'shares', name, path], json_writer)
def _json_infos_fs(self, files): if self.destination == 'local': with open(os.path.join(self.output_dir, '%s_Filecatcher.json' % self.computer_name),'wb') as fw: json_writer = get_json_writer(fw) header =['COMPUTER NAME','TYPE', 'DATE','PATH','MD5','SHA1','SHA256','MIMETYPE','ZIP', 'EMPTY','VT'] for f, mime, md5, sha1, sha256, zip_value, datem, empty in files: write_to_json(header,[self.computer_name, 'Filecatcher', unicode(datem), unicode(f), unicode(md5), unicode(sha1), unicode(sha256), unicode(mime), unicode(zip_value), unicode(empty), self._get_url_VT(sha256)], json_writer) self.zip_file.close()
def _json_list_sessions(self, sessions): self.logger.info('Health : Listing sessions') if self.destination == 'local': with open(os.path.join(self.output_dir + '%s_sessions.json' % self.computer_name), 'ab') as fw: json_writer = get_json_writer(fw) headers = ["COMPUTER_NAME", "TYPE", "LOGON_ID", "AUTH_PACKAGE", "START_TIME", "LOGON_TYPE"] for logonID, authenticationPackage, startime, logontype in sessions: write_to_json(headers, [self.computer_name, 'sessions', unicode(logonID), authenticationPackage, unicode(startime.split('.')[0]), unicode(logontype)], json_writer)
def _json_list_services(self, services): self.logger.info('Health : Listing services') if self.destination == 'local': with open(os.path.join(self.output_dir + '%s_list_services.json' % self.computer_name), 'ab') as fw: json_writer = get_json_writer(fw) headers = ["COMPUTER_NAME", "TYPE", "CAPTION", "PID", "SERVICE_TYPE", "PATH_NAME", "STATUS", "STATE", "START_MODE"] for name, caption, processId, pathName, serviceType, status, state, startMode in services: write_to_json(headers,[self.computer_name, 'services', caption, unicode(processId), serviceType, pathName, unicode(status), state, startMode], json_writer)
def _json_list_network_adapters(self, ncs): self.logger.info('Health : Listing network adapters') if self.destination == 'local': with open( os.path.join(self.output_dir + '%s_networks_cards.json' % self.computer_name), 'ab') as fw: json_writer = get_json_writer(fw) headers = [ "COMPUTER_NAME", "TYPE", "NETWORK_CARD", "ADAPTER_TYPE", "DESCRIPTION", "MAC_ADDR", "PRODUCT_NAME", "PHYSICAL_ADAPTER", "SPEED", "IPv4", "IPv6", "DHCP_SERVER", "DNS_SERVER", "DATABASE_PATH", "NBTSTAT_VALUE" ] for netcard, adapter_type, description, mac_address, product_name, physical_adapter, product_name, speed, \ IPv4, IPv6, DHCP_server, DNS_server, database_path, nbtstat_value in ncs: if netcard is None: netcard = ' ' if adapter_type is None: adapter_type = '' if description is None: description = ' ' if mac_address is None: mac_address = ' ' if physical_adapter is None: physical_adapter = ' ' if product_name is None: product_name if speed is None: speed = ' ' if IPv4 is None: IPv4 = ' ' if IPv6 is None: IPv6 = '' if DHCP_server is None: DHCP_server = ' ' if DNS_server is None: DNS_server = ' ' if database_path is None: database_path = ' ' if nbtstat_value is None: nbtstat_value = ' ' try: write_to_json(headers, [ self.computer_name, 'networks_cards', netcard, adapter_type, description, mac_address, product_name, physical_adapter, speed, IPv4, IPv6, DHCP_server, DNS_server, database_path, nbtstat_value ], json_writer) except IOError: self.logger.error(traceback.format_exc())
def _json_list_share(self, share): self.logger.info("Health : Listing shares") if self.destination == 'local': with open( os.path.join(self.output_dir + '%s_list_share.json' % self.computer_name), 'ab') as fw: json_writer = get_json_writer(fw) headers = ["COMPUTER_NAME", "TYPE", "SHARE_NAME", "SHARE_PATH"] for name, path in share: write_to_json(headers, [self.computer_name, 'shares', name, path], json_writer)
def _json_list_sockets_network(self, connections): self.logger.info('Health : Listing sockets networks') if self.destination == 'local': with open(os.path.join(self.output_dir + '%s_sockets.json' % self.computer_name), 'ab') as fw: json_writer = get_json_writer(fw) headers = ["COMPUTER_NAME", "TYPE", "PID", "PROCESS_NAME", "LOCAL_ADDR", "SOURCE_PORT", "REMOTE_ADDR", "REMOTE_PORT", "STATUS"] for pid, name, local_address, source_port, remote_addr, remote_port, status in connections: write_to_json(headers, [self.computer_name, 'sockets', unicode(pid), unicode(name), unicode(local_address), unicode(source_port), unicode(remote_addr), unicode(remote_port), unicode(status)], json_writer)
def json_recycle_bin(self): if self.destination == 'local': with open(os.path.join(self.output_dir, '%s_recycle_bin.json' % self.computer_name), 'wb') as output: json_writer = get_json_writer(output) header = ["COMPUTER_NAME", "TYPE", "NAME_1", "NAME_2"] idl = shell.SHGetSpecialFolderLocation(0, shellcon.CSIDL_BITBUCKET) desktop = shell.SHGetDesktopFolder() files = desktop.BindToObject(idl, None, shell.IID_IShellFolder) for bin_file in files: write_to_json(header, [self.computer_name, 'recycle_bin', files.GetDisplayNameOf(bin_file, shellcon.SHGDN_NORMAL), files.GetDisplayNameOf(bin_file, shellcon.SHGDN_FORPARSING)], json_writer) pass
def _json_list_running_process(self, list_running): self.logger.info("Health : Listing running processes") if self.destination == 'local': with open(os.path.join(self.output_dir + '%s_list_running.json' % self.computer_name), 'ab') as fw: json_writer = get_json_writer(fw) headers = ["COMPUTER_NAME", "TYPE", "PID", "PROCESS_NAME", "COMMAND", "EXEC_PATH"] for p in list_running: pid = p[0] name = p[1] cmd = p[2] exe_path = p[3] write_to_json(headers, [self.computer_name, 'processes', unicode(pid), name, unicode(cmd), unicode(exe_path)], json_writer)
def _json_get_startup_files(self, path): with open( self.output_dir + self.computer_name + '_startup_files' + self.rand_ext, 'wb') as output: json_writer = get_json_writer(output) header = [ "COMPUTER_NAME", "TYPE", "FILENAME", "USER", "MD5", "SHA1", "SHA256" ] for startup_file in self._get_startup_files(path): write_to_json(header, startup_file, json_writer) close_json_writer(json_writer) record_sha256_logs( self.output_dir + self.computer_name + '_startup_files' + self.rand_ext, self.output_dir + self.computer_name + '_sha256.log')
def _json_infos_fs(self, files): if self.destination == 'local': with open(self.output_dir + '\\' + self.computer_name + '_Filecatcher' + self.rand_ext, 'wb') as fw: json_writer = get_json_writer(fw) headers = ['COMPUTER NAME', 'TYPE', 'DATE', 'PATH', 'MD5', 'SHA1', 'SHA256', 'MIMETYPE', 'ZIP', 'EMPTY', 'VT'] for f, mime, md5, sha1, sha256, zip_value, datem, empty in files: f = os.path.splitdrive(self.systemroot)[0] + '\\' + f.split('\\', 6)[-1] write_to_json(headers, [self.computer_name, 'Filecatcher', unicode(datem), unicode(f), unicode(md5), unicode(sha1), unicode(sha256), unicode(mime), unicode(zip_value), unicode(empty), self._get_url_VT(sha256)], json_writer) close_json_writer(json_writer) record_sha256_logs(self.output_dir + '\\' + self.computer_name + '_Filecatcher' + self.rand_ext, self.output_dir + '\\' + self.computer_name + '_sha256.log') if self.zip_file: self.zip_file.close()
def _json_list_named_pipes(self, pipes): if self.destination == 'local': with open( self.output_dir + self.computer_name + '_named_pipes' + self.rand_ext, 'wb') as output: json_writer = get_json_writer(output) header = ["COMPUTER_NAME", "TYPE", "NAME"] for pipe in pipes: write_to_json(header, [self.computer_name, 'named_pipes', pipe], json_writer) close_json_writer(json_writer) record_sha256_logs( self.output_dir + self.computer_name + '_named_pipes' + self.rand_ext, self.output_dir + self.computer_name + '_sha256.log')
def _json_list_arp_table(self, arp): self.logger.info('Health : Listing routes tables') if self.destination == 'local': with open(os.path.join(self.output_dir + '%s_arp_table.json' % self.computer_name), 'ab') as fw: json_writer = get_json_writer(fw) headers = ["COMPUTER_NAME", "TYPE", "IP", "MAC_ADDR", "STATUS"] for entry in arp: entry.replace('\xff', '') tokens = entry.split() entry_to_write = '' if len(tokens) == 3: entry_to_write = '"' + self.computer_name + '"|"arp_table"|"' + '"|"'.join(tokens) + '"\n' if entry_to_write.find('\.') != 1 and len(entry_to_write) > 0: arr_to_write = [self.computer_name, 'arp_table'] + tokens write_to_json(headers, arr_to_write, json_writer)
def _json_list_network_adapters(self, ncs): self.logger.info('Health : Listing network adapters') if self.destination == 'local': with open(os.path.join(self.output_dir + '%s_networks_cards.json' % self.computer_name), 'ab') as fw: json_writer = get_json_writer(fw) headers = ["COMPUTER_NAME", "TYPE", "NETWORK_CARD", "ADAPTER_TYPE", "DESCRIPTION", "MAC_ADDR", "PRODUCT_NAME", "PHYSICAL_ADAPTER", "SPEED", "IPv4", "IPv6", "DHCP_SERVER", "DNS_SERVER", "DATABASE_PATH", "NBTSTAT_VALUE"] for netcard, adapter_type, description, mac_address, product_name, physical_adapter, product_name, speed, \ IPv4, IPv6, DHCP_server, DNS_server, database_path, nbtstat_value in ncs: if netcard is None: netcard = ' ' if adapter_type is None: adapter_type = '' if description is None: description = ' ' if mac_address is None: mac_address = ' ' if physical_adapter is None: physical_adapter = ' ' if product_name is None: product_name if speed is None: speed = ' ' if IPv4 is None: IPv4 = ' ' if IPv6 is None: IPv6 = '' if DHCP_server is None: DHCP_server = ' ' if DNS_server is None: DNS_server = ' ' if database_path is None: database_path = ' ' if nbtstat_value is None: nbtstat_value = ' ' try: write_to_json(headers, [self.computer_name, 'networks_cards', netcard, adapter_type, description, mac_address, product_name, physical_adapter, speed, IPv4, IPv6, DHCP_server, DNS_server, database_path, nbtstat_value], json_writer) except IOError: self.logger.error(traceback.format_exc())
def _json_list_network_drives(self, drives): self.logger.info("Health : Listing network drives") if self.destination == 'local': with open( os.path.join(self.output_dir + '%s_networks_drives.json' % self.computer_name), 'ab') as fw: json_writer = get_json_writer(fw) headers = [ "COMPUTER_NAME", "TYPE", "DISK", "FILESYSTEM", "PARTITION_NAME" ] for diskCapt, diskFs, diskPName in drives: write_to_json(headers, [ self.computer_name, 'list_networks_drives', diskCapt, diskFs, diskPName ], json_writer)
def _json_list_route_table(self, routes): self.logger.info('Health : Listing routes tables') if self.destination == 'local': with open( os.path.join(self.output_dir + '%s_routes_tables.json' % self.computer_name), 'ab') as fw: json_writer = get_json_writer(fw) headers = ["COMPUTER_NAME", "TYPE", "NAME", "MASK"] for ip, mask in routes: write_to_json(headers, [ self.computer_name, 'routes_tables', unicode(ip), unicode(mask) ], json_writer)
def _json_list_drives(self, drives): self.logger.info("Health : Listing drives") if self.destination == 'local': with open( os.path.join(self.output_dir + '%s_list_drives.json' % self.computer_name), 'ab') as fw: json_writer = get_json_writer(fw) headers = [ "COMPUTER_NAME", "TYPE", "FAB", "PARTITIONS", "DISK", "FILESYSTEM" ] for phCapt, partCapt, logicalCapt, fs in drives: write_to_json(headers, [ self.computer_name, 'list_drives', phCapt, partCapt, logicalCapt, fs ], json_writer)
def _json_firefox_history(self, fhistory): with open( self.output_dir + self.computer_name + '_firefox_history' + self.rand_ext, 'wb') as output: header = [ "COMPUTER_NAME", "TYPE", "TIME", "URL", "USER", "PROFILE" ] json_writer = get_json_writer(output) for time, url, user, profile in fhistory: write_to_json(header, [ self.computer_name, 'firefox_history', time, url, user, profile ], json_writer) close_json_writer(json_writer) record_sha256_logs( self.output_dir + self.computer_name + '_firefox_history' + self.rand_ext, self.output_dir + self.computer_name + '_sha256.log')
def _json_windows_prefetch(self, wpref): if self.destination == 'local': with open(os.path.join(self.output_dir, '%s_prefetch.json' % self.computer_name), 'wb') as output: json_writer = get_json_writer(output) header = ["COMPUTER_NAME", "TYPE", "FILE", "VERSION", "SIZE", "EXEC_NAME", "CREATE_TIME", "MODIFICATION_TIME", "RUN_COUNT", "START_TIME", "DURATION", "AVERAGE_DURATION", "DLL_LIST"] for pref_file, format_version, file_size, exec_name, tc, tm, run_count, hash_table_a, list_str_c in wpref: str_c = '' for s in list_str_c: str_c += s.replace('\0', '') + ';' write_to_json(header, [self.computer_name, 'prefetch', pref_file, unicode(format_version), unicode(file_size), exec_name.replace('\00', ''), unicode(tc), unicode(tm), unicode(run_count), unicode(hash_table_a['start_time']), unicode(hash_table_a['duration']), unicode(hash_table_a['average_duration']), str_c], json_writer)
def _json_list_services(self, services): self.logger.info('Health : Listing services') if self.destination == 'local': with open( os.path.join(self.output_dir + '%s_list_services.json' % self.computer_name), 'ab') as fw: json_writer = get_json_writer(fw) headers = [ "COMPUTER_NAME", "TYPE", "CAPTION", "PID", "SERVICE_TYPE", "PATH_NAME", "STATUS", "STATE", "START_MODE" ] for name, caption, processId, pathName, serviceType, status, state, startMode in services: write_to_json(headers, [ self.computer_name, 'services', caption, unicode(processId), serviceType, pathName, unicode(status), state, startMode ], json_writer)
def _json_list_kb(self, kbs): self.logger.info('Health : Listing KB installed on computer') if self.destination == 'local': with open( os.path.join(self.output_dir + '%s_kb.json' % self.computer_name), 'ab') as fw: json_writer = get_json_writer(fw) headers = [ "COMPUTER_NAME", "TYPE", "CAPTION", "CS_NAME", "FIX_COMMENTS", "HOTFIX_ID", "INSTALL_DATE", "INSTALLED_ON", "NAME", "SERVICE_PACK", "STATUS" ] for Caption, CSName, FixComments, HotFixID, InstallDate, InstalledOn, Name, ServicePackInEffect, Status in kbs: write_to_json(headers, [ self.computer_name, 'kb', Caption, CSName, FixComments, HotFixID, InstallDate, InstalledOn, Name, ServicePackInEffect, Status ], json_writer)
def _json_list_sessions(self, sessions): self.logger.info('Health : Listing sessions') if self.destination == 'local': with open( os.path.join(self.output_dir + '%s_sessions.json' % self.computer_name), 'ab') as fw: json_writer = get_json_writer(fw) headers = [ "COMPUTER_NAME", "TYPE", "LOGON_ID", "AUTH_PACKAGE", "START_TIME", "LOGON_TYPE" ] for logonID, authenticationPackage, startime, logontype in sessions: write_to_json(headers, [ self.computer_name, 'sessions', unicode(logonID), authenticationPackage, unicode(startime.split('.')[0]), unicode(logontype) ], json_writer)
def _json_chrome_history(self, chistory): if self.destination == 'local': with open( self.output_dir + self.computer_name + '_chrome_history' + self.rand_ext, 'wb') as output: json_writer = get_json_writer(output) header = [ "COMPUTER_NAME", "TYPE", "TIME", "URL", "TITLE", "USER", "PROFILE" ] for time, url, title, user, profile in chistory: write_to_json(header, [ self.computer_name, 'chrome_history', time, url, title, user, profile ], json_writer) close_json_writer(json_writer) record_sha256_logs( self.output_dir + self.computer_name + '_chrome_history' + self.rand_ext, self.output_dir + self.computer_name + '_sha256.log')
def _json_event_logs(self, is_win_xp): server = None # name of the target computer to get event logs, None to get logs from current computer if self.destination == 'local': with open(self.output_dir + '\\' + self.computer_name + '_evts' + self.rand_ext, 'wb') as fw: json_writer = get_json_writer(fw) header = ['COMPUTER', 'TYPE', 'SOURCE', 'CATEGORY', 'SOURCE NAME', 'ID', 'EVENT_TYPE', 'LOG'] if is_win_xp: for eventCategory, sourceName, eventID, eventType, date, log in self._list_evt_xp(server, 'Security'): write_to_json(header, [self.computer_name, 'Logs', 'Security', eventCategory, sourceName, eventID, eventType, date] + log, json_writer) for eventCategory, sourceName, eventID, eventType, date, log in self._list_evt_xp(server, 'Application'): write_to_json(header, [self.computer_name, 'Logs', 'Application', eventCategory, sourceName, eventID, eventType, date, log], json_writer) for eventCategory, sourceName, eventID, eventType, date, log in self._list_evt_xp(server, 'System'): write_to_json(header, [self.computer_name, 'Logs', 'System', eventCategory, sourceName, eventID, eventType, date, log], json_writer) else: # Exports everything from the event viewer evt_handle = win32evtlog.EvtOpenChannelEnum() os.mkdir(self.output_dir + r"\evt") while True: # opening channel for enumeration logtype = win32evtlog.EvtNextChannelPath(evt_handle) if logtype is None: break # fw.write('"Computer Name"|"Type"|"Date"|"logtype"|"log data"\n') self._list_evt_vista(server, logtype) close_json_writer(json_writer)
def _json_list_arp_table(self, arp): self.logger.info('Health : Listing routes tables') if self.destination == 'local': with open( os.path.join(self.output_dir + '%s_arp_table.json' % self.computer_name), 'ab') as fw: json_writer = get_json_writer(fw) headers = ["COMPUTER_NAME", "TYPE", "IP", "MAC_ADDR", "STATUS"] for entry in arp: entry.replace('\xff', '') tokens = entry.split() entry_to_write = '' if len(tokens) == 3: entry_to_write = '"' + self.computer_name + '"|"arp_table"|"' + '"|"'.join( tokens) + '"\n' if entry_to_write.find('\.') != 1 and len( entry_to_write) > 0: arr_to_write = [self.computer_name, 'arp_table' ] + tokens write_to_json(headers, arr_to_write, json_writer)
def _json_hash_running_process(self, list_running): self.logger.info("Health : Hashing running processes") if self.destination == 'local': with open(os.path.join(self.output_dir + '%s_list_share.json' % self.computer_name), 'ab') as fw: json_writer = get_json_writer(fw) headers = ["COMPUTER_NAME", "TYPE", "PID", "PROCESS_NAME", "EXEC_PATH", "MD5", "SHA1", "CTIME", "MTIME", "ATIME"] for p in list_running: pid = p[0] name = p[1] cmd = p[2] exe_path = p[3] if exe_path and os.path.isfile(exe_path): ctime = datetime.datetime.fromtimestamp(os.path.getctime(exe_path)) mtime = datetime.datetime.fromtimestamp(os.path.getmtime(exe_path)) atime = datetime.datetime.fromtimestamp(os.path.getatime(exe_path)) md5 = process_md5(unicode(exe_path)) sha1 = process_sha1(unicode(exe_path)) write_to_json(headers, [self.computer_name, 'processes', unicode(pid), name, unicode(exe_path), md5, sha1, ctime, mtime, atime], json_writer)