Пример #1
0
 def csv_sha256(self, path=os.environ['SYSTEMDRIVE'] + '\\'):
     try:
         list_files = os.listdir(unicode(path))
     except Exception as e:
         self.logger.warn("Cannot list " + path)
         return
     for f in list_files:
         d = os.path.join(path, f)
         if os.path.isdir(d):
             self.csv_sha256(d)
         elif os.path.isfile(d):
             try:
                 sha = process_sha256(d)
                 with open(
                         self.output_dir + '\\' + self.computer_name +
                         '_sha256.csv', 'ab') as output:
                     csv_writer = get_csv_writer(output)
                     write_to_csv(
                         ['sha256', d, sha.hexdigest()], csv_writer)
             except UnicodeError:
                 pass
             except IOError:
                 pass
             except ValueError:
                 pass
Пример #2
0
def generate_data():
    print("Starting ipblocks unblocks...")

    # get db names
    wikis = utils.get_wikis_url()
    failed_wikis = []
    data = []

    # get data for each wiki
    for wiki in wikis:
        try:
            data.append(fetch_revision(wiki))
        except:
            failed_wikis.append(wiki)

    total = len(data)
    total_modified = sum([options is not None for options in data])

    stats = (total_modified, round(total_modified * 100 / total, 2), total)

    common_lenght_options = get_common_lenght_options(data)

    headers = ('Total modified', '% modified', 'Common length options')
    csv_data = [(total_modified, round(total_modified * 100 / total,
                                       2), common_lenght_options)]

    utils.write_to_csv('ipboptions', headers, csv_data)
Пример #3
0
	def _csv_list_named_pipes(self,pipes):
		with open(self.output_dir + '\\' + self.computer_name + '_named_pipes.csv', 'wb') as output:
			csv_writer = get_csv_writer(output)
			#output.write('"Computer Name"|"Type"|"Name"\n')
			for pipe in pipes:
				write_to_csv([self.computer_name, 'PIPES', pipe], csv_writer)
		record_sha256_logs(self.output_dir + '\\' + self.computer_name + '_named_pipes.csv',self.output_dir +'\\'+self.computer_name+'_sha256.log')
def generate_data():
    print("Starting block edits...")
    data = []

    # get db names
    db_names = utils.get_db_names()
    conn = conn_manager.get_conn()

    #get data for each wiki
    for dbname in db_names:
        try:
            conn.select_db(dbname)

            wiki = dbname
            total_reblocks = get_total_reblocks()
            perc_reblock_author = get_perc_reblock_author()

            data.append((wiki, total_reblocks, perc_reblock_author))
        except Exception as err:
            print('Something wrong with %s, %s' % (dbname, err))

    # create csv
    headers = ('Wiki', 'Total reblocks', 'Same author reblock %')
    utils.write_to_csv('block_modifications', headers, data)

    print('Fin...')
Пример #5
0
def generate_data():
    print("Starting blocks per wiki...")

    # get db names
    db_names = utils.get_db_names()
    conn = conn_manager.get_conn()
    data = []

    #get data for each wiki
    for dbname in db_names:
        try:
            conn.select_db(dbname)

            wiki = dbname
            avg_blocks_per_hour = get_avg_blocks_per_hour()
            total_active_autoblocks = get_total_active_blocks(True)
            total_active_blocks = get_total_active_blocks()
            block_length_distribution = get_block_length_distribution()
            block_reasons = get_common_block_reasons()
            perc_blocks_prevent_talk_page = get_blocks_prevent_talk_page()

            data.append(
                (wiki, avg_blocks_per_hour, total_active_blocks,
                 total_active_autoblocks, perc_blocks_prevent_talk_page,
                 block_length_distribution, block_reasons))
        except Exception as err:
            print('Something wrong with %s, %s' % (dbname, err))

    # create csv
    headers = ('Wiki', 'Avg blocks per hour', 'Total active blocks',
               'Total active autoblocks', '% blocks prevent edit talk page',
               'Block length distribution'
               'Common unblock reasons')
    utils.write_to_csv('blocks_per_wiki', headers, data)
Пример #6
0
    def _csv_all_modules_dll(self):
        ''' Outputs all processes and their opened dll in a csv '''
        hProcessSnap = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0)

        pe32 = PROCESSENTRY32()
        pe32.dwSize = sizeof(PROCESSENTRY32)
        ret = Process32First(hProcessSnap, pointer(pe32))

        with open(
                self.output_dir + '\\' + self.computer_name +
                '_processes_dll.csv', 'wb') as output:
            csv_writer = get_csv_writer(output)
            #output.write('"Computer Name"|"Type"|"PID"|"Name"|"Module"\r\n')
            while ret:
                self.logger.info("  process ID		= %d" % pe32.th32ProcessID)

                modules = self._GetProcessModules(pe32.th32ProcessID, False)
                if len(modules) > 0:
                    process_name = modules.pop(
                        0)  # first element is the name of the process
                    for module in modules:
                        write_to_csv([
                            self.computer_name, 'DLL',
                            unicode(pe32.th32ProcessID), process_name, module
                        ], csv_writer)
                #ListProcessThreads( pe32.th32ProcessID )

                ret = Process32Next(hProcessSnap, pointer(pe32))
Пример #7
0
def save_to_file(filename, records):
	""" Save data before quitting."""
	output_file = generate_output_filename(filename, '_email')
	headers = ['contact', 'salutation', 'company', 'title', 'department', 'phone', 'mobile', 'city', 'province', 'country', 'website', 'email']
	strip_extra_fields(headers, records)
	write_to_csv(output_file, headers, records)
	return
Пример #8
0
def generate_data():
    print("Starting partial blocks...")
    data = []

    # get db names
    db_names = ['itwiki_p']
    conn = conn_manager.get_conn()

    #get data for each wiki
    for dbname in db_names:
        try:
            conn.select_db(dbname)

            wiki = dbname
            blocks = get_partial_blocks()

            data.extend(blocks)
        except Exception as err:
            print('Something wrong with %s, %s' % (dbname, err))

    # create csv
    headers = ('blocker', 'blockee', 'date', 'reason', 'params')
    utils.write_to_csv('partial_blocks', headers, data)

    print('Fin...')
Пример #9
0
	def csv_clipboard(self):
		''' Exports the clipboard contents '''
		# TODO check if it is the same for older windows
		self.logger.info('Getting clipboard contents')
		with open(self.output_dir + '\\' + self.computer_name + '_clipboard.csv', 'wb') as output:
			csv_writer = get_csv_writer(output)
			#output.write('"Computer Name"|"Type"|Data"\n')
			try:
				r = Tk() # Using Tk instead because it supports exotic characters
				data = r.selection_get(selection='CLIPBOARD')
				r.destroy()
				#data = win32clipboard.GetClipboardData().decode('utf_8')
				write_to_csv([self.computer_name, 'String data in clipboard', unicode(data)], csv_writer)
			except:
				r.destroy()
				win32clipboard.OpenClipboard()
				clip = win32clipboard.EnumClipboardFormats(0)
				while clip:
					try: format_name = win32clipboard.GetClipboardFormatName(clip)
					except win32api.error: format_name = "?"
					self.logger.info('format ' + unicode(clip) + ' ' + unicode(format_name))
					if clip == 15: # 15 seems to be a list of filenames
						filenames = win32clipboard.GetClipboardData(clip)
						for filename in filenames:
							write_to_csv([self.computer_name, 'List of files in clipboard', filename], csv_writer)
					clip = win32clipboard.EnumClipboardFormats(clip)
				win32clipboard.CloseClipboard()
Пример #10
0
	def csv_recent_docs(self):
		# Shows where recently opened files are saved and when they were opened
		self.logger.info('Getting recent_docs from registry')
		path = '\Software\Microsoft\Windows\CurrentVersion\Explorer\RecentDocs\\'
		aReg = ConnectRegistry(None,HKEY_USERS)
		with open(self.output_dir + '\\' + self.computer_name + '_recent_docs.csv', 'wb') as output:
			csv_writer = get_csv_writer(output)
			for index_sid in range(QueryInfoKey(aReg)[0]): # the number of subkeys (SIDs)
				str_sid = EnumKey(aReg, index_sid)
				full_path = str_sid + path
				try:
					username = str_sid2username(str_sid)
					result = [username, str_sid]
					reg_recent_docs = OpenKey(aReg, full_path)
					# Get values of RecentDocs itself
					for index_value in range(QueryInfoKey(reg_recent_docs)[1]): # the number of values (RecentDocs)
						str_value_name = EnumValue(reg_recent_docs, index_value)[0]
						str_value_datatmp = EnumValue(reg_recent_docs, index_value)[1]
						if str_value_name != "MRUListEx":
							value_decoded = self.__decode_recent_docs_MRU(str_value_datatmp)
							write_to_csv(result + value_decoded, csv_writer)
					# Get values of RecentDocs subkeys
					for index_recent_docs_subkey in range(QueryInfoKey(reg_recent_docs)[0]): # the number of subkeys (RecentDocs)
						recent_docs_subkey = EnumKey(reg_recent_docs, index_recent_docs_subkey)
						reg_recent_docs_subkey = OpenKey(aReg, full_path + recent_docs_subkey)
						for index_value in range(QueryInfoKey(reg_recent_docs_subkey)[1]): # the number of values (RecentDocs subkeys)
							str_value_name = EnumValue(reg_recent_docs_subkey, index_value)[0]
							str_value_datatmp = EnumValue(reg_recent_docs_subkey, index_value)[1]
							if str_value_name != "MRUListEx":
								value_decoded = self.__decode_recent_docs_MRU(str_value_datatmp)
								write_to_csv(result + value_decoded, csv_writer)
					#self._dump_csv_registry_to_output('HKEY_USERS', full_path, aReg, csv_writer, username)
				except WindowsError:
					pass
		CloseKey(aReg)
Пример #11
0
 def __print_regkey_csv(self, bKey, key_path, csv_writer, is_recursive,
                        subkey_type_to_query, additional_info_function):
     ''' Recursive method that will parse the registry and write in the output file '''
     ''' The subkey_type_to_query is a string that will be checked against the subkeys name if it is not None '''
     for i in range(QueryInfoKey(bKey)[0]):
         try:
             subkey_name = EnumKey(bKey, i)
             if subkey_type_to_query is None or subkey_type_to_query in subkey_name:
                 # if it is None, then we go inside no matter what, else we check if it is in the name
                 key_info = ''
                 if additional_info_function:
                     # this function is a parameter, it is None by default
                     key_info = additional_info_function(subkey_name)
                 subkey = OpenKey(bKey, subkey_name)
                 subkey_path = key_path + subkey_name + '\\'
                 node_type = 'Key'
                 date_last_mod = convert_windate(QueryInfoKey(subkey)[2])
                 #self.logger.info(date_last_mod + ' : ' + subkey_name)
                 write_to_csv([
                     self.computer_name, date_last_mod,
                     'HKEY_LOCAL_MACHINE', subkey_path, node_type, key_info
                 ], csv_writer)
                 if is_recursive:
                     self.__print_regkey_values_csv(
                         subkey, date_last_mod, 'HKEY_LOCAL_MACHINE',
                         subkey_path, csv_writer, is_recursive,
                         subkey_type_to_query)  # print the values first
                     self.__print_regkey_csv(
                         subkey, subkey_path,
                         csv_writer)  # and then go deeper in the tree
         except EnvironmentError:
             break
def main():
    """Using the specified border crossing entry data (input file),
       returns the desired statistics. """

    # Read in the border_crossing data
    args = parse_args()
    if args.input is None:
        raise ImportError('Did not specify the correct input file!')
    if args.output is None:
        raise ImportError('Did not specify the correct output file!')

    with open(args.input, mode='r') as csv_file:

        result = NestedDict()
        csv_reader = csv.DictReader(csv_file, delimiter=',')
        for row in csv_reader:

            # These are the keys
            path = [
                row['Border'], row['Measure'], row['Date'],
                int(row['Value'])
            ]

            # The integer values
            result[path] = 0

        final_list = find_average(result)

    write_to_csv(args.output, final_list)
Пример #13
0
    def _csv_windows_prefetch(self, wpref):
        with open(
                self.output_dir + '\\' + self.computer_name + '_prefetch.csv',
                'wb') as output:
            csv_writer = get_csv_writer(output)
            #output.write('"Computer Name"|"Type"|"File"|"Version"|"Size"|"name Exec"|"Create Time"|"Modification Time"\n')
            for prefetch_file, format_version, file_size, exec_name, tc, tm, run_count, hash_table_a, list_str_c in wpref:
                str_c = ''
                for s in list_str_c:
                    str_c += s.replace('\0', '') + ';'

                write_to_csv([
                    self.computer_name, 'Prefetch', prefetch_file,
                    unicode(format_version),
                    unicode(file_size),
                    exec_name.replace('\00', ''),
                    unicode(tc),
                    unicode(tm),
                    unicode(run_count),
                    unicode(hash_table_a['start_time']),
                    unicode(hash_table_a['duration']),
                    unicode(hash_table_a['average_duration']), str_c
                ], csv_writer)
        record_sha256_logs(
            self.output_dir + '\\' + self.computer_name + '_prefetch.csv',
            self.output_dir + '\\' + self.computer_name + '_sha256.log')
Пример #14
0
	def _csv_list_network_drives(self,drives):
		self.logger.info("Health : Listing network drives")
		with open(self.output_dir+'_list_networks_drives.csv','wb') as fw:
			csv_writer = get_csv_writer(fw)
			#fw.write('"Computer Name"|"Type"|"disque"|"fs"|"Partition Name"\r\n')
			for diskCapt,diskFs,diskPName in drives:
				write_to_csv([self.computer_name, 'Network drives', diskCapt, diskFs, diskPName], csv_writer)
		record_sha256_logs(self.output_dir+ '_list_networks_drives.csv',self.output_dir +'_sha256.log')
Пример #15
0
	def _csv_list_drives(self,drives):
		self.logger.info("Health : Listing drives")
		with open(self.output_dir+'_list_drives.csv','wb') as fw:
			csv_writer = get_csv_writer(fw)
			#fw.write('"Computer Name"|"Type"|"Fab"|"Partions"|"Disk"|"File System"\r\n')
			for phCapt,partCapt,logicalCapt,fs in drives:
				write_to_csv([self.computer_name, 'Drives', phCapt, partCapt, logicalCapt, fs], csv_writer)
		record_sha256_logs(self.output_dir + '_list_drives.csv',self.output_dir+'_sha256.log')
Пример #16
0
	def _csv_list_share(self,share):
		self.logger.info("Health : Listing shares")
		with open(self.output_dir + '_shares.csv','wb') as fw:
			csv_writer = get_csv_writer(fw)
			#fw.write('"Computer Name"|"Type"|"Name"|"Path"\r\n')
			for name,path in share:
				write_to_csv([self.computer_name, 'Shares', name, path], csv_writer)
		record_sha256_logs(self.output_dir +'_shares.csv',self.output_dir +'_sha256.log')
Пример #17
0
def output_replies_to_ids(cf,path,ids):
    ## For each id, find all replies and output them into a csv file in the path folder with the name id.csv
    os.makedirs(path, exist_ok=True)
    for id in args.ids.split(','):
        comments=general_storage.get_item_comments(cf,{"id":id,"object_type":"post"})
        with open(path+id+".csv", 'w') as csvfile:
            for tweet in comments:
                utils.write_to_csv(csvfile,[tweet['post_id'],tweet['object_id'],tweet['original_data']['created_at'],tweet['original_data'].get('user').get('name'),tweet['message']])       
Пример #18
0
	def _csv_list_route_table(self,routes):
		self.logger.info('Health : Listing routes tables')
		with open(self.output_dir+"_routes_tables.csv",'ab') as fw:
			csv_writer = get_csv_writer(fw)
			#fw.write('"Computer Name"|"Route"|"Name"|"Mask"\r\n')
			for name,mask in routes:
				write_to_csv([self.computer_name, 'Route table', unicode(name), unicode(mask)], csv_writer)
		record_sha256_logs(self.output_dir +'_routes_tables.csv',self.output_dir +'_sha256.log')
Пример #19
0
	def _csv_list_sessions(self,sessions):
		self.logger.info('Health : Listing sessions')
		with open(self.output_dir+'_sessions.csv','ab') as fw:
			csv_writer = get_csv_writer(fw)
			#fw.write('"Computer Name"|"Type"|"Logon ID"|"Authentication Package"|"Start Time"|"Logon Type"\r\n')
			for logonID,authenticationPackage,startime,logontype in sessions:
				write_to_csv([	self.computer_name, 'Active sessions', unicode(logonID),
								authenticationPackage, unicode(startime.split('.')[0]),  unicode(logontype)], csv_writer)
		record_sha256_logs(self.output_dir + '_sessions.csv',self.output_dir +'_sha256.log')
Пример #20
0
    def _csv_open_save_MRU(self, str_opensaveMRU):
        ''' Extracts information from OpenSaveMRU registry key which contains information about opened and saved windows '''
        # TODO : Win XP
        self.logger.info('Getting open_save_MRU from registry')
        aReg = ConnectRegistry(None, HKEY_USERS)

        with open(
                self.output_dir + '\\' + self.computer_name +
                '_opensaveMRU.csv', 'wb') as output:
            csv_writer = get_csv_writer(output)
            for index_sid in range(
                    QueryInfoKey(aReg)[0]):  # the number of subkeys
                # in HKEY_USERS, we have a list of subkeys which are SIDs
                str_sid = EnumKey(aReg, index_sid)
                try:
                    username = str_sid2username(str_sid)
                    path = str_sid + '\\' + str_opensaveMRU
                    reg_opensaveMRU = OpenKey(aReg, path)
                    for index_clsid in range(QueryInfoKey(reg_opensaveMRU)
                                             [0]):  # the number of subkeys
                        str_filetype = EnumKey(reg_opensaveMRU, index_clsid)
                        reg_filetype = OpenKey(aReg,
                                               path + '\\' + str_filetype)
                        date_last_mod = convert_windate(
                            QueryInfoKey(reg_filetype)[2])
                        # now get the value from the SID subkey
                        for index_value in range(
                                QueryInfoKey(reg_filetype)
                            [1]):  # the number of values
                            value_filetype = EnumValue(reg_filetype,
                                                       index_value)
                            # Here, it is quite... dirty, it is a binary MRU list in which we have to extract the interesting values
                            if value_filetype[0] != 'MRUListEx':
                                l_printable = self.__extract_filename_from_PIDLMRU(
                                    value_filetype[1])

                                # VERY DIRTY, if the list is empty it's probably because the string is off by 1...
                                if len(l_printable) == 0:
                                    # So we take away the first char to have a correct offset (modulo 2)
                                    l_printable = self.__extract_filename_from_PIDLMRU(
                                        value_filetype[1][1:])
                                if len(l_printable) != 0:
                                    str_printable = l_printable[-1]
                                    write_to_csv([
                                        username, str_sid, str_filetype,
                                        date_last_mod, str_printable
                                    ], csv_writer)
                                else:  # if the length is still 0 then... I'm at a loss for words
                                    write_to_csv([
                                        username, str_sid, str_filetype,
                                        date_last_mod
                                    ], csv_writer)
                        CloseKey(reg_filetype)
                    CloseKey(reg_opensaveMRU)
                except WindowsError:
                    pass
        CloseKey(aReg)
Пример #21
0
def output_items_random(cf,path,query,random_no):
    ## Randomly select give number of tweets from results of query, limiting the query to the size 10*random_no
    os.makedirs(path, exist_ok=True)
    total,items=query_items(cf,query,random_no*10)
    if total>0:
        selected_items_ids=random.sample(es_outputs_to_ids(items),min(random_no, len(items)))
        with open(path+query+".csv", 'w') as csvfile:
            for item in general_storage.get_items_by_ids(cf,selected_items_ids):
                utils.write_to_csv(csvfile,[item['post_id'],item['object_id'],item['original_data']['created_at'],item['original_data'].get('user').get('name'),item['message']])           
Пример #22
0
	def _csv_list_sockets_network(self,connections):
		self.logger.info('Health : Listing sockets networks')
		with open(self.output_dir+'_sockets.csv','ab') as fw:
			csv_writer = get_csv_writer(fw)
			#fw.write('"Computer Name"|"Type"|"pid"|"name"|"local address"|"source port"|"remote addr"|"remote port"|"status"\r\n')
			for pid,name,local_address,source_port,remote_addr,remote_port,status in connections:
				write_to_csv([	self.computer_name, 'Sockets', unicode(pid),
								unicode(name), unicode(local_address), unicode(source_port),
								unicode(remote_addr), unicode(remote_port), unicode(status)], csv_writer)
		record_sha256_logs(self.output_dir +'_sockets.csv',self.output_dir +'_sha256.log')
Пример #23
0
	def _csv_list_services(self,services):
		self.logger.info('Health : Listing services')
		with open(self.output_dir+'_services.csv','ab') as fw:
			csv_writer = get_csv_writer(fw)
			#fw.write('"Computer Name"|"Type"|"name"|"caption"|"processId"|"pathName"|"serviceType"|"status"|"state"|"startMode"\r\n')
			for name,caption,processId,pathName,serviceType,status,state,startMode in services:
				write_to_csv([	self.computer_name, 'Services', caption,
								unicode(processId), serviceType, pathName,
								unicode(status), state, startMode], csv_writer)
		record_sha256_logs(self.output_dir +'_services.csv',self.output_dir +'_sha256.log')
Пример #24
0
 def _csv_list_share(self, share):
     self.logger.info("Health : Listing shares")
     with open(self.output_dir + '_shares.csv', 'wb') as fw:
         csv_writer = get_csv_writer(fw)
         #fw.write('"Computer Name"|"Type"|"Name"|"Path"\r\n')
         for name, path in share:
             write_to_csv([self.computer_name, 'Shares', name, path],
                          csv_writer)
     record_sha256_logs(self.output_dir + '_shares.csv',
                        self.output_dir + '_sha256.log')
def main():
    """Using the specified border crossing entry data (input file),
        returns the desired statistics. """

    # Input and Output files Error-Handling
    args = parse_args()
    if args.input is None:
        raise ImportError('Did not specify the correct input file!')
    if args.output is None:
        raise ImportError('Did not specify the correct output file!')

    # Read in the border_crossing data
    with open(args.input, mode='r') as csv_file:

        # Read the CSV data into a list of lists
        csv_reader = csv.reader(csv_file, delimiter=',')

        # Sort the list by Border, Date, and Measure in descending order
        sorted_list = sorted(csv_reader, key=itemgetter(3, 5))

        # Make sure the sorted_list rows are not empty
        if check_all_there(sorted_list):
            pass

        # Let's group the sorted list via the keys--border names, dates,
        # and measures, so that there are rows with the same border name, date,
        # measure, but different values! In each row, check if the
        # 6th index (this is our value) is a number and is not 0! If true, then
        # add those values together and create a new list, which holds this aggregated
        # summation of values for each border name, date, and measure
        list_with_agg_values = [
            key + [
                sum([
                    int(r[6])
                    for r in rows if r[6].isdigit() and int(r[6]) != 0
                ])
            ] for key, rows in groupby(sorted_list, key=lambda x: x[3:6])
        ]

        # x number of months -- could be a dictionary or int
        num_of_months = count_the_months(list_with_agg_values)

        # calculate the average crossing per month and per measure
        list_with_avg = calculate_average_crossing_per_month_and_measure(
            num_of_months, list_with_agg_values)

        # Sort the list by Date, Value, Measure, Border in descending order
        sorted_list_with_vbm = sorted(list_with_avg,
                                      key=itemgetter(3, 2, 0),
                                      reverse=True)
        final_sorted_list = sorted(
            sorted_list_with_vbm,
            key=lambda x: datetime.strptime(x[1], '%d/%m/%Y %H:%M:%S %p'),
            reverse=True)
    write_to_csv(args.output, final_sorted_list)
Пример #26
0
def label_to_ans(path_label, path_ans):
    items = retrieve_item_ids()
    pairs = set()
    with open(path_label, 'r') as f_label:
        for line in f_label:
            if line.startswith('user'):
                continue
            l = line.split(',')
            if l[2] == '4' and l[1] in items:
                pairs.add(','.join(l[:2]))
    write_to_csv(path_ans, 'user_id,item_id', pairs, func=lambda x: x + '\n')
Пример #27
0
 def _csv_list_named_pipes(self, pipes):
     with open(
             self.output_dir + '\\' + self.computer_name +
             '_named_pipes.csv', 'wb') as output:
         csv_writer = get_csv_writer(output)
         #output.write('"Computer Name"|"Type"|"Name"\n')
         for pipe in pipes:
             write_to_csv([self.computer_name, 'PIPES', pipe], csv_writer)
     record_sha256_logs(
         self.output_dir + '\\' + self.computer_name + '_named_pipes.csv',
         self.output_dir + '\\' + self.computer_name + '_sha256.log')
Пример #28
0
 def _csv_list_network_drives(self, drives):
     self.logger.info("Health : Listing network drives")
     with open(self.output_dir + '_list_networks_drives.csv', 'wb') as fw:
         csv_writer = get_csv_writer(fw)
         #fw.write('"Computer Name"|"Type"|"disque"|"fs"|"Partition Name"\r\n')
         for diskCapt, diskFs, diskPName in drives:
             write_to_csv([
                 self.computer_name, 'Network drives', diskCapt, diskFs,
                 diskPName
             ], csv_writer)
     record_sha256_logs(self.output_dir + '_list_networks_drives.csv',
                        self.output_dir + '_sha256.log')
Пример #29
0
 def _csv_list_drives(self, drives):
     self.logger.info("Health : Listing drives")
     with open(self.output_dir + '_list_drives.csv', 'wb') as fw:
         csv_writer = get_csv_writer(fw)
         #fw.write('"Computer Name"|"Type"|"Fab"|"Partions"|"Disk"|"File System"\r\n')
         for phCapt, partCapt, logicalCapt, fs in drives:
             write_to_csv([
                 self.computer_name, 'Drives', phCapt, partCapt,
                 logicalCapt, fs
             ], csv_writer)
     record_sha256_logs(self.output_dir + '_list_drives.csv',
                        self.output_dir + '_sha256.log')
Пример #30
0
	def _csv_list_running_process(self,list_running):
		self.logger.info("Health : Listing running processes")
		with open(self.output_dir+'_processes.csv','ab') as fw:
			csv_writer = get_csv_writer(fw)
			#fw.write('"Computer Name"|"Type"|"PID"|"Name"|"Command"|"Path Exec"\r\n')
			for p in list_running:
				pid=p[0]
				name=p[1]
				cmd=p[2]
				exe_path=p[3]
				write_to_csv([self.computer_name, 'Running processes', unicode(pid), name, unicode(cmd), unicode(exe_path)], csv_writer)
		record_sha256_logs(self.output_dir +'_processes.csv',self.output_dir+'_sha256.log')
Пример #31
0
def test(train_features, train_labels, test_features, optimal_lambda):
    features, labels, y_pred_softmax, train_step, loss, accuracy = build_model(
        optimal_lambda)

    with tf.Session() as sess:
        sess.run(tf.global_variables_initializer())

        # print('Training start ...')
        local_log_path = log_path + 'lr_l2_log.txt'
        write_to_file(local_log_path, 'Training logs\n', True)
        train_time = time.time()

        batch_xs, batch_ys = train_features, np.eye(NUM_CLASS)[train_labels]
        for episode_i in range(MAX_EPISODES):
            _, loss_step = sess.run([train_step, loss],
                                    feed_dict={
                                        features: batch_xs,
                                        labels: batch_ys
                                    })
            print(episode_i, ' | ', loss_step)
            write_to_file(local_log_path,
                          str(episode_i) + ' | ' + str(loss_step) + '\n',
                          False)

            if episode_i % 200 == 0:
                train_accuracy = sess.run(accuracy,
                                          feed_dict={
                                              features: batch_xs,
                                              labels: batch_ys
                                          })
                str_i = 'step, loss, accuracy = {0} | {1:.4f} | {2:.4%}'.format(
                    episode_i, loss_step, train_accuracy)
                print(str_i)
                write_to_file(local_log_path, str_i + '\n', False)

        write_to_file(local_log_path,
                      'Training times {0}s\n'.format(time.time() - train_time),
                      False)

        test_time = time.time()

        result_list = ['id,categories\n']
        y_pred = sess.run(y_pred_softmax, feed_dict={features: test_features})
        pred_cls = np.argmax(y_pred, 1)
        for i in range(len(pred_cls)):
            result_list.append(str(i) + ',' + str(pred_cls[i]) + '\n')

        write_to_file(local_log_path,
                      'Test times {0}s\n'.format(time.time() - test_time),
                      False)

        write_to_csv(output_path, result_list)
Пример #32
0
	def csv_recycle_bin(self):
		''' Exports the filenames contained in the recycle bin '''
		with open(self.output_dir + '\\' + self.computer_name + '_recycle_bin.csv', 'wb') as output:
			csv_writer = get_csv_writer(output)
			#output.write('"Computer Name"|"Type"|"Name 1"|"Name 2"\n')
			idl = shell.SHGetSpecialFolderLocation(0, shellcon.CSIDL_BITBUCKET)
			desktop = shell.SHGetDesktopFolder()
			files = desktop.BindToObject(idl, None, shell.IID_IShellFolder)
			
			for bin_file in files:
				write_to_csv([	self.computer_name, 'Recycle Bin', files.GetDisplayNameOf(bin_file, shellcon.SHGDN_NORMAL),
								files.GetDisplayNameOf(bin_file, shellcon.SHGDN_FORPARSING)], csv_writer)
		record_sha256_logs(self.output_dir + '\\' + self.computer_name + '_recycle_bin.csv',self.output_dir +'\\'+self.computer_name+'_sha256.log')
Пример #33
0
 def _csv_list_services(self, services):
     self.logger.info('Health : Listing services')
     with open(self.output_dir + '_services.csv', 'ab') as fw:
         csv_writer = get_csv_writer(fw)
         #fw.write('"Computer Name"|"Type"|"name"|"caption"|"processId"|"pathName"|"serviceType"|"status"|"state"|"startMode"\r\n')
         for name, caption, processId, pathName, serviceType, status, state, startMode in services:
             write_to_csv([
                 self.computer_name, 'Services', caption,
                 unicode(processId), serviceType, pathName,
                 unicode(status), state, startMode
             ], csv_writer)
     record_sha256_logs(self.output_dir + '_services.csv',
                        self.output_dir + '_sha256.log')
Пример #34
0
 def _csv_list_route_table(self, routes):
     self.logger.info('Health : Listing routes tables')
     with open(self.output_dir + "_routes_tables.csv", 'ab') as fw:
         csv_writer = get_csv_writer(fw)
         #fw.write('"Computer Name"|"Route"|"Name"|"Mask"\r\n')
         for name, mask in routes:
             write_to_csv([
                 self.computer_name, 'Route table',
                 unicode(name),
                 unicode(mask)
             ], csv_writer)
     record_sha256_logs(self.output_dir + '_routes_tables.csv',
                        self.output_dir + '_sha256.log')
Пример #35
0
    def _csv_all_modules_opened_files(self):
        ''' Outputs all processes and their opened files in a csv '''
        hProcessSnap = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0)

        pe32 = PROCESSENTRY32()
        pe32.dwSize = sizeof(PROCESSENTRY32)
        ret = Process32First(hProcessSnap, pointer(pe32))

        with open(
                self.output_dir + '\\' + self.computer_name +
                '_processes_opened_files.csv', 'wb') as output:
            csv_writer = get_csv_writer(output)

            #output.write('"Computer Name"|"PID"|"Process Name"|"File Opened"\r\n')
            while ret:
                #print "  process ID		= %d" % pe32.th32ProcessID
                try:
                    p = psutil.Process(pe32.th32ProcessID)
                    process_name = p.name()
                    self.logger.info('Getting opened files for : ' +
                                     process_name + '(' +
                                     unicode(pe32.th32ProcessID) + ')')
                    # Here, we need open a subprocess because get_open_files may hang forever
                    q = Queue()
                    process = Process(target=timer_open_files, args=(
                        p,
                        q,
                    ))
                    process.start()
                    # We wait for 2 seconds
                    process.join(2)
                    if process.is_alive():
                        # If the subprocess is still alive, assume it is hanged and kill it
                        q.close()
                        process.terminate()
                    else:
                        # Otherwise, continue normal processing
                        opened_files = q.get()
                        if isinstance(opened_files, list):
                            for opened_file in opened_files:
                                write_to_csv([
                                    self.computer_name, 'Files Opened',
                                    unicode(pe32.th32ProcessID), process_name,
                                    opened_file[0]
                                ], csv_writer)
                except psutil.AccessDenied:
                    self.logger.warn('Could not open handle for PID : ' +
                                     unicode(pe32.th32ProcessID))
                #ListProcessThreads( pe32.th32ProcessID )

                ret = Process32Next(hProcessSnap, pointer(pe32))
Пример #36
0
def generate_data():
    print("Starting top admins...")

    db_names = [
        'plwiki_p', 'svwiki_p', 'jawiki_p', 'nlwiki_p', 'zhwiki_p', 'ptwiki_p',
        'nnwiki_p', 'ukwiki_p', 'hewiki_p', 'cswiki_p', 'tawiki_p', 'itwiki_p',
        'enwiki_p', 'dewiki_p', 'frwiki_p', 'eswiki_p', 'ruwiki_p', 'ptwiki_p',
        'kowiki_p', 'arwiki_p', 'fawiki_p', 'commonswiki_p', 'wikidatawiki_p',
        'metawiki_p'
    ]

    conn = conn_manager.get_conn()
    data = []

    # get data for each wiki
    for dbname in db_names:
        try:
            conn.select_db(dbname)

            sql = """
                SELECT
                    '%s' AS wiki,
                    CAST(log_user_text as CHAR) as user,
                    log_user,
                    (
                        SELECT IF (COUNT(*) > 0, 'No', 'Yes')
                        FROM user_properties
                        WHERE up_property = 'disablemail'
                        AND up_value = 1 AND up_user = log_user
                    ) AS disablemail,
                    COUNT(*) AS total_blocks
                FROM logging
                LEFT JOIN user_groups ON ug_user = log_user
                    AND ug_group IN ('bot', 'flow-bot')
                WHERE log_action = 'block' AND log_type = 'block'
                AND log_timestamp >= 20180801000000 AND log_timestamp <= 20181031235959
                AND ug_group IS NULL
                GROUP BY log_user ORDER BY total_blocks DESC LIMIT 30
            """ % dbname[:-2]

            results = fetch_all(sql)

            data.extend(results)

        except Exception as err:
            print('Something wrong with %s, %s' % (dbname, err))

    # create csv
    headers = ('wiki', 'username', 'user id', 'allow email',
               '# blocks performed')
    utils.write_to_csv('top_admins', headers, data)
	def csv_list_scheduled_jobs(self):
		self.logger.info('Health : Listing scheduled jobs')
		file_tasks=self.output_dir + '_tasks.csv'
		with open(file_tasks, 'wb') as tasks_logs:
			proc=subprocess.Popen(["schtasks.exe",'/query','/fo','CSV'],stdout=subprocess.PIPE)
			res = proc.communicate()
			res = get_terminal_decoded_string(res[0])
			write_to_output(res, tasks_logs, self.logger)
		with open(file_tasks, "r") as fr, open(self.output_dir,'ab') as fw:
			csv_writer = get_csv_writer(fw)
			for l in fr.readlines():
				l = l.decode('utf8')[:-1].replace('"', '')
				if l.find(',') !=-1:
					write_to_csv([self.computer_name, 'Scheduled jobs'] + l.split(','), csv_writer)
Пример #38
0
	def _csv_windows_prefetch(self,wpref):
		with open(self.output_dir + '\\' + self.computer_name + '_prefetch.csv', 'wb') as output:
			csv_writer = get_csv_writer(output)
			#output.write('"Computer Name"|"Type"|"File"|"Version"|"Size"|"name Exec"|"Create Time"|"Modification Time"\n')
			for prefetch_file, format_version, file_size, exec_name, tc, tm, run_count, hash_table_a, list_str_c in wpref:
				str_c = ''
				for s in list_str_c:
					str_c += s.replace('\0', '') + ';'
				
				write_to_csv([	self.computer_name, 'Prefetch', prefetch_file,
									unicode(format_version), unicode(file_size), exec_name.replace('\00', ''),
									unicode(tc), unicode(tm), unicode(run_count), unicode(hash_table_a['start_time']),
									unicode(hash_table_a['duration']), unicode(hash_table_a['average_duration']), str_c], csv_writer)
		record_sha256_logs(self.output_dir + '\\' + self.computer_name + '_prefetch.csv',self.output_dir +'\\'+self.computer_name+'_sha256.log')
Пример #39
0
 def _csv_list_sessions(self, sessions):
     self.logger.info('Health : Listing sessions')
     with open(self.output_dir + '_sessions.csv', 'ab') as fw:
         csv_writer = get_csv_writer(fw)
         #fw.write('"Computer Name"|"Type"|"Logon ID"|"Authentication Package"|"Start Time"|"Logon Type"\r\n')
         for logonID, authenticationPackage, startime, logontype in sessions:
             write_to_csv([
                 self.computer_name, 'Active sessions',
                 unicode(logonID), authenticationPackage,
                 unicode(startime.split('.')[0]),
                 unicode(logontype)
             ], csv_writer)
     record_sha256_logs(self.output_dir + '_sessions.csv',
                        self.output_dir + '_sha256.log')
Пример #40
0
	def _csv_list_arp_table(self,arp):
		self.logger.info('Health : Listing ARP tables')
		with open(self.output_dir + "_arp_table.csv",'wb') as fw:
			csv_writer = get_csv_writer(fw)
			#fw.write('"Computer Name"|"Type"|"IP"|"Mac"|"Status"\n')
			for entry in arp:
				entry.replace('\xff','')
				tokens=entry.split()
				entry_to_write=''
				if len(tokens)==3:
					entry_to_write='"'+self.computer_name+'"|"ARP table"|"'+'"|"'.join(tokens)+'"\n'
				if entry_to_write.find('\.')!=1 and len(entry_to_write) >0:
					arr_to_write = [self.computer_name, 'ARP table'] + tokens
					write_to_csv(arr_to_write, csv_writer)
		record_sha256_logs(self.output_dir +'_arp_table.csv',self.output_dir +'_sha256.log')
Пример #41
0
	def __print_regkey_values_csv(self, bKey, date_last_mod, hive_name, key_path, csv_writer, additional_data=None, optional_function=None):
		''' Get the registry values and write those in the output file '''
		for i in range(QueryInfoKey(bKey)[1]): # the number of values
			try:
				value_name=EnumValue(bKey,i)
				subkey_path = key_path + value_name[0].replace(b'\xa0', b' ')
				node_type = ''
				values = []
				if value_name[2] == REG_MULTI_SZ: # the value is a list
					node_type = 'REG_MULTI_SZ'
					values += value_name[1] # concat both lists
				elif value_name[2] == REG_QWORD: # the value is a list
					node_type = 'REG_QWORD'
					hex_str = '0x'
					for c in value_name[1]:
						hex_str += c.encode('hex') 
					values.append(hex_str) # get hexadecimal from string
				elif value_name[2] == REG_BINARY:
					node_type = 'REG_BINARY'
					if optional_function:
						res = optional_function(value_name[0], value_name[1])
						if res:
							values += res
					else:
						values.append('')
				else:
					if value_name[2] == REG_SZ:
						node_type = 'REG_SZ'
					elif value_name[2] == REG_DWORD:
						node_type = 'REG_DWORD'
					values.append(unicode(value_name[1])) # just add the element to the list
				for value in values:
					'''if node_type != 'REG_BINARY':
						value_tmp = value.replace('","', '_')
					else:
						value_tmp = value'''
					if isinstance(value, list):
						# we want to concat list for the csv, so if it is not a list, put it in a list...
						value_tmp = value
					else:
						value_tmp = [value]
					if additional_data:
						arr_output = [self.computer_name, additional_data, date_last_mod, hive_name+'\\'+subkey_path, node_type] + value_tmp
						write_to_csv(arr_output, csv_writer)
					else:
						write_to_csv([self.computer_name, date_last_mod, hive_name+'\\'+subkey_path, node_type] + value_tmp, csv_writer)
			except EnvironmentError:
				break
 def csv_list_scheduled_jobs(self):
     self.logger.info('Health : Listing scheduled jobs')
     file_tasks = self.output_dir + '_tasks.csv'
     with open(file_tasks, 'wb') as tasks_logs:
         proc = subprocess.Popen(["schtasks.exe", '/query', '/fo', 'CSV'],
                                 stdout=subprocess.PIPE)
         res = proc.communicate()
         res = get_terminal_decoded_string(res[0])
         write_to_output(res, tasks_logs, self.logger)
     with open(file_tasks, "r") as fr, open(self.output_dir, 'ab') as fw:
         csv_writer = get_csv_writer(fw)
         for l in fr.readlines():
             l = l.decode('utf8')[:-1].replace('"', '')
             if l.find(',') != -1:
                 write_to_csv([self.computer_name, 'Scheduled jobs'] +
                              l.split(','), csv_writer)
Пример #43
0
def get_and_save_orders(force=False, force_lookups=False, safe_regions=True):

    lookups = get_name_lookups(force_lookups)
    region_name_by_region = lookups["regions"]
    system_name_by_system = lookups["systems"]
    type_name_by_type = lookups["types"]

    print("\n------------- Getting Orders -------------\n")
    orders = []
    num_regions = len(region_name_by_region.keys())
    for i, region in enumerate(region_name_by_region.keys()):
        if safe_regions and region_name_by_region[region] not in SAFE_REGIONS:
            continue
        u.overwrite_print("---> Working on region: " + region_name_by_region[region] + ". " + str(i + 1) + "/" + str(num_regions))
        url = "https://esi.evetech.net/latest/markets/" + region + "/orders/?datasource=tranquility&order_type=all"
        orders_for_region = u.get_data(
            url=url,
            fileloc="./data/orders/" + region_name_by_region[region] + ".json",
            force=force,
            paged=True
        )
        orders_for_region[:] = [o for o in orders_for_region if o != u'error']
        for j, order in enumerate(orders_for_region):
            orders_for_region[j]["region"] = region
            orders_for_region[j]["region_name"] = region_name_by_region[region]
            orders_for_region[j]["system_name"] = system_name_by_system[str(orders_for_region[j]["system_id"])]
            orders_for_region[j]["type_name"] = type_name_by_type[str(orders_for_region[j]["type_id"])]

        orders += orders_for_region

    # Encode everything to ensure we can write to csv
    print("\nEncoding to utf-8")
    for i, order in enumerate(orders):
        orders[i] = {k: unicode(v).encode("utf-8") for k,v in orders[i].iteritems()}

    u.write_to_csv(
        [
            "region","region_name","duration","is_buy_order","issued","location_id",
            "min_volume","order_id","price","range","system_id","system_name",
            "type_id","type_name","volume_remain","volume_total"
        ],
        orders,
        "./data/orders/orders.csv",
        d=True
    )

    return
Пример #44
0
 def _csv_list_arp_table(self, arp):
     self.logger.info('Health : Listing ARP tables')
     with open(self.output_dir + "_arp_table.csv", 'wb') as fw:
         csv_writer = get_csv_writer(fw)
         #fw.write('"Computer Name"|"Type"|"IP"|"Mac"|"Status"\n')
         for entry in arp:
             entry.replace('\xff', '')
             tokens = entry.split()
             entry_to_write = ''
             if len(tokens) == 3:
                 entry_to_write = '"' + self.computer_name + '"|"ARP table"|"' + '"|"'.join(
                     tokens) + '"\n'
             if entry_to_write.find('\.') != 1 and len(entry_to_write) > 0:
                 arr_to_write = [self.computer_name, 'ARP table'] + tokens
                 write_to_csv(arr_to_write, csv_writer)
     record_sha256_logs(self.output_dir + '_arp_table.csv',
                        self.output_dir + '_sha256.log')
Пример #45
0
    def collect_page(self, page):
        # navigate to page
        self.browser.get(

            'https://www.facebook.com/' + page + '/')

        # Scroll down depth-times and wait delay seconds to load
        # between scrolls
        for scroll in range(self.depth):

            # Scroll down to bottom
            self.browser.execute_script(
                "window.scrollTo(0, document.body.scrollHeight);")

            # Wait to load page
            time.sleep(self.delay)

        # Once the full page is loaded, we can start scraping
        links = self.browser.find_elements_by_link_text("See more")
        for link in links:
            link.click()
        posts = self.browser.find_elements_by_class_name(
            "userContentWrapper")
        poster_names = self.browser.find_elements_by_xpath(
            "//a[@data-hovercard-referer]")

        for count, post in enumerate(posts):
            # Creating first CSV row entry with the poster name (eg. "Donald Trump")
            analysis = [poster_names[count].text]

            # Creating a time entry.
            time_element = post.find_element_by_css_selector("abbr")
            utime = time_element.get_attribute("data-utime")
            analysis.append(utime)

            # Creating post text entry
            text = post.find_element_by_class_name("userContent").text
            status = utils.strip(text)
            analysis.append(status)

            # Write row to csv
            utils.write_to_csv(self.out_file, analysis)
            with open('post_csv.csv', 'w+', newline='', encoding='utf-8') as csv_file:
                writer = csv.writer(csv_file, delimiter=";")
                writer.writerow('my_utf8_string')
Пример #46
0
    def f(network, t, seed, max_procs):
        loc = os.path.join(bench_loc, network)
        if not os.path.isdir(loc):
            os.makedirs(loc)

        command_template = "mpirun -np {nprocs} nengo_mpi {netfile} {t}"
        pattern = "{0}_p(?P<p>.+)_e(?P<e>.+)_c(?P<c>.+).net".format(network)

        os.environ['NENGO_MPI_LOADTIMES_FILE'] = os.path.join(loc, "loadtimes.csv")
        os.environ['NENGO_MPI_RUNTIMES_FILE'] = os.path.join(loc, "runtimes.csv")

        for d in os.listdir(loc):
            dir_path = os.path.join(loc, d)
            if not os.path.isdir(dir_path):
                continue

            for netfile in os.listdir(dir_path):
                m = re.match(pattern, netfile)
                if m:
                    n_procs = int(m.groupdict()['p'])

                    if n_procs <= max_procs:
                        print("** Running %s" % netfile)
                        netfile_path = os.path.join(dir_path, netfile)
                        command = command_template.format(
                            nprocs=n_procs, netfile=netfile_path, t=t)

                        t0 = time.time()
                        output = subprocess.check_output(command.split(' '))
                        t1 = time.time()

                        print(output)
                        print("** The command took " + str(t1 - t0) + " seconds.")

                        vals = dict(
                            runtimes=t1-t0, label=netfile,
                            t=t, nprocs=n_procs)
                        runtimes_file = os.path.join(loc, 'total_runtimes.csv')
                        write_to_csv(runtimes_file, vals)
                    else:
                        print("** Ignoring %s because maxprocs (%s) "
                              "exceeded" % (netfile, max_procs))

                else:
                    print("** Ignoring non-network %s" % netfile)
Пример #47
0
	def _csv_list_scheduled_jobs(self):
		self.logger.info('Health : Listing scheduled jobs')
		file_tasks=self.output_dir + '_tasks.csv'
		with open(file_tasks,'wb') as tasks_logs:
			proc=subprocess.Popen(["schtasks.exe",'/query','/fo','CSV'], stdout=subprocess.PIPE)
			res = proc.communicate()
			res = get_terminal_decoded_string(res[0])
			write_to_output(res, tasks_logs, self.logger)
		with open(file_tasks,"r") as fr, open(self.output_dir + "_scheduled_jobs.csv",'wb') as fw:
			csv_writer = get_csv_writer(fw)
			#fw.write('"Computer Name"|"Type"|"Name"|"Time"|"State"\r\n')
			for l in fr.readlines():
				l = l.decode('utf8')
				if l.find('\\') > 0:
					l = l[:-1].replace('"', '') # remove the end of line
					arr_write = [self.computer_name, 'Scheduled jobs'] + l.split(',')
					write_to_csv(arr_write, csv_writer)
		record_sha256_logs(self.output_dir +'_scheduled_jobs.csv',self.output_dir +'_sha256.log')
Пример #48
0
	def _csv_list_network_adapters(self,ncs):
		self.logger.info('Health : Listing network adapters')
		with open(self.output_dir + "_networks_cards.csv",'wb') as fw:
			csv_writer = get_csv_writer(fw)
			#fw.write('"Computer Name"|"Type"|"netcard"|"adapter_type"|"description"|"mac_address"|"product_name"|"physical_adapter"|"speed"|"IPv4"|"IPv6"|"DHCP_server"|"DNS_server"|"database_path"|"nbtstat_value"\r\n')
			for netcard,adapter_type,description,mac_address,product_name,physical_adapter,product_name,speed,IPv4,IPv6,DHCP_server,DNS_server,database_path,nbtstat_value in ncs:
				if netcard is None:
					netcard=' '
				if adapter_type is None:
					adapter_type=''
				if description is None:
					description=' '
				if mac_address is None:
					mac_address=' '
				if physical_adapter is None:
					physical_adapter=' '
				if product_name is None:
					product_name
				if speed is None:
					speed=' '
				if IPv4 is None:
					IPv4=' '
				if IPv6 is None:
					IPv6=''
				if DHCP_server is None:
					DHCP_server=' '
				if DNS_server is None:
					DNS_server=' '
				if database_path is None:
					database_path=' '
				if nbtstat_value is None:
					nbtstat_value=' '
				try:
					write_to_csv([self.computer_name,
					'Network adapter', netcard, adapter_type,
					description, mac_address, product_name,
					physical_adapter, speed, IPv4,
					IPv6, DHCP_server, DNS_server,
					database_path, nbtstat_value], csv_writer)
				except Exception:
					self.logger.error(traceback.format_exc())
		record_sha256_logs(self.output_dir +'_networks_cards.csv',self.output_dir +'_sha256.log') 						
Пример #49
0
def main(filename):
	# parse input
	#filename = parse_args()
	output_filename = generate_output_filename(filename, "_web")
	headers = ['contact', 'company', 'title', 'city', 'province', 'country', 'website']
	
	# read data from csv
	employees = dict_read_csv(filename)
	
	# get website for companies
	augmented = get_websites(employees, filename, headers)	
	
	# write results to csv
	write_to_csv(output_filename, headers, augmented)
	
	# remove input file if op was succesful
	if os.path.exists(output_filename):
		os.remove(filename)
	
	return
Пример #50
0
	def _csv_open_save_MRU(self, str_opensaveMRU):
		''' Extracts information from OpenSaveMRU registry key which contains information about opened and saved windows '''
		# TODO : Win XP
		self.logger.info('Getting open_save_MRU from registry')
		aReg = ConnectRegistry(None,HKEY_USERS)
		
		with open(self.output_dir + '\\' + self.computer_name + '_opensaveMRU.csv', 'wb') as output:
			csv_writer = get_csv_writer(output)
			for index_sid in range(QueryInfoKey(aReg)[0]): # the number of subkeys
				# in HKEY_USERS, we have a list of subkeys which are SIDs
				str_sid = EnumKey(aReg, index_sid)
				try:
					username = str_sid2username(str_sid)
					path = str_sid + '\\' + str_opensaveMRU
					reg_opensaveMRU = OpenKey(aReg, path)
					for index_clsid in range(QueryInfoKey(reg_opensaveMRU)[0]): # the number of subkeys
						str_filetype = EnumKey(reg_opensaveMRU, index_clsid)
						reg_filetype = OpenKey(aReg, path + '\\' + str_filetype)
						date_last_mod = convert_windate(QueryInfoKey(reg_filetype)[2])
						# now get the value from the SID subkey
						for index_value in range(QueryInfoKey(reg_filetype)[1]): # the number of values
							value_filetype = EnumValue(reg_filetype, index_value)
							# Here, it is quite... dirty, it is a binary MRU list in which we have to extract the interesting values
							if value_filetype[0] != 'MRUListEx':
								l_printable = self.__extract_filename_from_PIDLMRU(value_filetype[1])
								
								# VERY DIRTY, if the list is empty it's probably because the string is off by 1...
								if len(l_printable) == 0:
									# So we take away the first char to have a correct offset (modulo 2)
									l_printable = self.__extract_filename_from_PIDLMRU(value_filetype[1][1:])
								if len(l_printable) != 0:
									str_printable = l_printable[-1]
									write_to_csv([username, str_sid, str_filetype, date_last_mod, str_printable], csv_writer)
								else: # if the length is still 0 then... I'm at a loss for words
									write_to_csv([username, str_sid, str_filetype, date_last_mod], csv_writer)
						CloseKey(reg_filetype)
					CloseKey(reg_opensaveMRU)
				except WindowsError:
					pass
		CloseKey(aReg)
Пример #51
0
 def _csv_event_logs(self, is_win_xp):
     """Prints the event logs in a csv, the called method is different for WinXP and lower"""
     server = None  # name of the target computer to get event logs, None to get logs from current computer
     with open(self.output_dir + '\\' + self.computer_name + '_evts.csv', 'wb') as fw:
         csv_writer = get_csv_writer(fw)
         if is_win_xp:
             for eventCategory, sourceName, eventID, eventType, date, log in self._list_evt_xp(server, 'Security'):
                 write_to_csv([self.computer_name, 'Logs', 'Security', eventCategory, sourceName, eventID, eventType,
                               date] + log, csv_writer)
             for eventCategory, sourceName, eventID, eventType, date, log in self._list_evt_xp(server,
                                                                                               'Application'):
                 write_to_csv(
                     [self.computer_name, 'Logs', 'Application', eventCategory, sourceName, eventID, eventType,
                      date] + log, csv_writer)
             for eventCategory, sourceName, eventID, eventType, date, log in self._list_evt_xp(server, 'System'):
                 write_to_csv([self.computer_name, 'Logs', 'System', eventCategory, sourceName, eventID, eventType,
                               date] + log, csv_writer)
         else:
             # Exports everything from the event viewer
             evt_handle = win32evtlog.EvtOpenChannelEnum()
             os.mkdir(self.output_dir + r"\evt")
             while True:
                 # opening channel for enumeration
                 logtype = win32evtlog.EvtNextChannelPath(evt_handle)
                 if logtype is None:
                     break
                     # fw.write('"Computer Name"|"Type"|"Date"|"logtype"|"log data"\n')
                 self._list_evt_vista(server, logtype)
Пример #52
0
 def csv_yara(self,path=os.environ['SYSTEMDRIVE']+'\\'):
     try:
         if os.path.isdir(path):
             list_files=os.listdir(unicode(path))
     except Exception as e:
         self.logger.warn(traceback.format_exc().decode(sys.stdin.encoding))
         return
     for f in list_files:
         d=os.path.join(path,f)
         if os.path.isdir(d):
             self.csv_yara(d)
         try:
             if os.path.isfile(d):    
                 matches = self.rules.match(data=open(d,'rb').read())
                 if matches: 
                     sha = process_sha256(d)
                     for m in matches.get('main',[]):
                         with open(self.output_dir + '\\' + self.computer_name + '_yara.csv', 'ab') as output:
                             csv_writer = get_csv_writer(output)    
                             write_to_csv(['yara',d,f,m,sha.hexdigest()], csv_writer)
         except Exception as e:
             self.logger.error(traceback.format_exc())
Пример #53
0
	def csv_sha256(self,path=os.environ['SYSTEMDRIVE']+'\\'):
		try:
			list_files=os.listdir(unicode(path))
		except Exception as e:
			self.logger.warn("Cannot list " + path)
			return
		for f in list_files:
			d=os.path.join(path,f)
			if os.path.isdir(d):
				self.csv_sha256(d)
			elif os.path.isfile(d):
				try:
					sha = process_sha256(d)
					with open(self.output_dir + '\\' + self.computer_name + '_sha256.csv', 'ab') as output:
						csv_writer = get_csv_writer(output)	
						write_to_csv(['sha256',d,sha.hexdigest()], csv_writer)
				except UnicodeError:
					pass
				except IOError:
					pass
				except ValueError:
					pass
Пример #54
0
	def _csv_all_modules_dll(self):
		''' Outputs all processes and their opened dll in a csv '''
		hProcessSnap = CreateToolhelp32Snapshot( TH32CS_SNAPPROCESS , 0 )
	
		pe32 = PROCESSENTRY32()
		pe32.dwSize = sizeof(PROCESSENTRY32)
		ret = Process32First(hProcessSnap, pointer(pe32))
		
		with open(self.output_dir + '\\' + self.computer_name + '_processes_dll.csv', 'wb') as output:
			csv_writer = get_csv_writer(output)
			#output.write('"Computer Name"|"Type"|"PID"|"Name"|"Module"\r\n')
			while ret:
				self.logger.info("  process ID		= %d" % pe32.th32ProcessID)
				
				modules = self._GetProcessModules(pe32.th32ProcessID, False)
				if len(modules) > 0:
					process_name = modules.pop(0) # first element is the name of the process
					for module in modules:
						write_to_csv([self.computer_name, 'DLL', unicode(pe32.th32ProcessID), process_name, module], csv_writer)
				#ListProcessThreads( pe32.th32ProcessID )
				
				ret = Process32Next(hProcessSnap, pointer(pe32))
Пример #55
0
	def _csv_all_modules_opened_files(self):
		''' Outputs all processes and their opened files in a csv '''
		hProcessSnap = CreateToolhelp32Snapshot( TH32CS_SNAPPROCESS , 0 )
	
		pe32 = PROCESSENTRY32()
		pe32.dwSize = sizeof(PROCESSENTRY32)
		ret = Process32First(hProcessSnap, pointer(pe32))
		
		with open(self.output_dir + '\\' + self.computer_name + '_processes_opened_files.csv', 'wb') as output:
			csv_writer = get_csv_writer(output)
			
			#output.write('"Computer Name"|"PID"|"Process Name"|"File Opened"\r\n')
			while ret:
				#print "  process ID		= %d" % pe32.th32ProcessID
				try:
					p = psutil.Process(pe32.th32ProcessID)
					process_name = p.name()
					self.logger.info('Getting opened files for : ' + process_name + '(' + unicode(pe32.th32ProcessID) + ')')
					# Here, we need open a subprocess because get_open_files may hang forever
					q = Queue()
					process = Process(target=timer_open_files, args=(p,q,))
					process.start()
					# We wait for 2 seconds
					process.join(2)
					if process.is_alive():
						# If the subprocess is still alive, assume it is hanged and kill it
						q.close()
						process.terminate()
					else:
						# Otherwise, continue normal processing
						opened_files = q.get()
						if isinstance(opened_files, list):
							for opened_file in opened_files:
								write_to_csv([self.computer_name, 'Files Opened', unicode(pe32.th32ProcessID), process_name, opened_file[0]], csv_writer)
				except psutil.AccessDenied:
					self.logger.warn('Could not open handle for PID : ' + unicode(pe32.th32ProcessID))
				#ListProcessThreads( pe32.th32ProcessID )
				
				ret = Process32Next(hProcessSnap, pointer(pe32))
Пример #56
0
	def __print_regkey_csv(self, bKey, key_path, csv_writer, is_recursive, subkey_type_to_query, additional_info_function):
		''' Recursive method that will parse the registry and write in the output file '''
		''' The subkey_type_to_query is a string that will be checked against the subkeys name if it is not None '''
		for i in range(QueryInfoKey(bKey)[0]):
			try:
				subkey_name=EnumKey(bKey,i)
				if subkey_type_to_query is None or subkey_type_to_query in subkey_name:
					# if it is None, then we go inside no matter what, else we check if it is in the name
					key_info = ''
					if additional_info_function:
						# this function is a parameter, it is None by default
						key_info = additional_info_function(subkey_name)
					subkey=OpenKey(bKey,subkey_name)
					subkey_path = key_path + subkey_name + '\\'
					node_type = 'Key'
					date_last_mod = convert_windate(QueryInfoKey(subkey)[2])
					#self.logger.info(date_last_mod + ' : ' + subkey_name)
					write_to_csv([self.computer_name, date_last_mod, 'HKEY_LOCAL_MACHINE', subkey_path, node_type, key_info], csv_writer)
					if is_recursive:
						self.__print_regkey_values_csv(subkey, date_last_mod, 'HKEY_LOCAL_MACHINE', subkey_path, csv_writer, is_recursive, subkey_type_to_query) # print the values first
						self.__print_regkey_csv(subkey, subkey_path, csv_writer) # and then go deeper in the tree
			except EnvironmentError:
				break
Пример #57
0
 def csv_sha256(self,path=os.environ['SYSTEMDRIVE']+'\\'):
     try:
         if os.path.isdir(path):
             list_files=os.listdir(unicode(path))
     except Exception as e:
         self.logger.error(traceback.format_exc().decode(sys.stdin.encoding))    
         return
     for f in list_files:
         d=os.path.join(path,f)
         if os.path.isdir(d):
             self.csv_sha256(d)
         elif os.path.isfile(d):
             try:
                 sha = process_sha256(d)
                 issuer=''
                 subject=''
                 if self.extractCerfif:
                     issuer,subject=self._extractSignature(d)
                 with open(self.output_dir + '\\' + self.computer_name + '_sha256.csv', 'ab') as output:
                     csv_writer = get_csv_writer(output)    
                     write_to_csv(['sha256',d,sha.hexdigest(),issuer,subject], csv_writer)
             except Exception as e:
                 self.logger.error(traceback.format_exc())