コード例 #1
0
ファイル: directives.py プロジェクト: vaginessa/analyzeEVTX
 def _prepare_args(self):
     '''
     @ParseDirectiveMixin._prepare_args
     '''
     self.conn_string = self._prepare_conn_string(self.args)
     self.manager = DBManager(conn_string=self.conn_string, metadata=db.BaseTable.metadata)
     self.manager.initialize(bootstrap=True)
コード例 #2
0
ファイル: directives.py プロジェクト: vaginessa/analyzeEVTX
 def run(self):
     '''
     Args:
         N/A
     Procedure:
         Query EVTX information from database
     Preconditions:
         @BaseDirective.run_directive
         @ParseDBDirective.run (args.db_*)
         @ParseCSVDirective.run (args.target, args.sep)
         self.args.query is of type String
         self.args.title is of type String
     '''
     assert isinstance(self.args.query, str), 'Query is not of type String'
     if self.args.target is not None:
         assert path.isdir(path.dirname(self.args.target)), 'Target does not point to existing directory'
     conn_string = self._prepare_conn_string(self.args)
     manager = DBManager(conn_string=conn_string, metadata=db.BaseTable.metadata)
     manager.initialize(create_session=True)
     try:
         result_proxy = manager.session.execute(text(self.args.query))
     except Exception as e:
         Logger.error('Failed to submit query to database (%s)'%(str(e)))
     else:
         headers = result_proxy.keys()
         resultset = result_proxy.fetchall()
         if len(resultset) > 0:
             if self.args.target is not None:
                 self.args.target = path.abspath(self.args.target)
                 try:
                     with open(self.args.target, 'a') as target:
                         target.write(self.args.sep.join(headers) + '\n')
                         for result in resultset:
                             try:
                                 target.write(self.args.sep.join([str(item) for item in result]) + '\n')
                             except Exception as e:
                                 Logger.error('Failed to write result to output file %s (%s)'%(self.args.target, str(e)))
                 except Exception as e:
                     Logger.error('Failed to write results to output file %s (%s)'%(self.args.target, str(e)))
             else:
                 if sys.stdout.isatty():
                     table_data = [headers]
                     for result in resultset:
                         table_data.append([str(item) for item in result])
                     table = AsciiTable(table_data)
                     if self.args.title:
                         table.title = self.args.title
                     print(table.table)
                 else:
                     print(self.args.sep.join(headers))
                     for result in resultset:
                         print(self.args.sep.join([str(item) for item in result]))
         else:
             Logger.info('No results found for query %s'%self.args.query)
コード例 #3
0
ファイル: directives.py プロジェクト: vaginessa/analyzePF
 def _prepare_worker_pools(self):
     '''
     @ParseDirectiveMixin._prepare_worker_pools
     '''
     if self.pools is None:
         self.pools = Container()
     self.pools.progress = parallel.WorkerPool(\
         parallel.JoinableQueue(-1),
         tasks.ParseDBTaskStage2,
         daemonize=False,
         worker_class=parallel.DBProgressTrackerWorker,
         worker_count=1,
         worker_kwargs=dict(\
             log_path=self.args.log_path,
             pcount=len(self.frontier),
             pdesc='Total',
             punit='files',
             manager=DBManager(conn_string=self.conn_string)\
         )\
     )
     self.pools.parser = parallel.WorkerPool(\
         parallel.JoinableQueue(-1),
         tasks.ParseDBTaskStage1,
         daemonize=False,
         worker_count=self.args.threads,
         worker_kwargs=dict(\
             result_queue=self.pools.progress.queue,
             log_path=self.args.log_path\
         )
     )
コード例 #4
0
ファイル: directives.py プロジェクト: vaginessa/analyzePF
class ParseDBDirective(ParseDirectiveMixin, BaseDirective, DBConnectionMixin):
    '''
    Directive for parsing Prefetch file to DB format
    '''
    def __init__(self, args):
        self._frontier = None
        self._pools = None
        self._conn_string = None
        self._manager = None
        super(ParseDBDirective, self).__init__(args)

    @property
    def conn_string(self):
        '''
        @conn_string.getter
        '''
        return self._conn_string

    @conn_string.setter
    def conn_string(self, value):
        '''
        @conn_string.setter
        Preconditions:
            value is of type String
        '''
        assert isinstance(value, str), 'Value is not of type String'
        self._conn_string = value

    @property
    def manager(self):
        '''
        @manager.getter
        '''
        return self._manager

    @manager.setter
    def manager(self, value):
        '''
        @manager.setter
        Preconditions:
            value is of type DBManager  (assumed True)
        '''
        self._manager = value

    def _prepare_args(self):
        '''
        @ParseDirectiveMixin._prepare_args
        '''
        self.conn_string = self._prepare_conn_string(self.args)
        self.manager = DBManager(conn_string=self.conn_string,
                                 metadata=BaseTable.metadata)
        self.manager.initialize(bootstrap=True)
        self.manager.engine.dispose()
        self.manager = None

    def _prepare_frontier(self):
        '''
        @ParseDirectiveMixin._prepare_frontier
        '''
        self.frontier = self._get_frontier(self.args.sources)

    def _should_parse(self):
        '''
        @ParseDirectiveMixin._should_parse
        '''
        return len(self.frontier) > 0

    def _prepare_worker_pools(self):
        '''
        @ParseDirectiveMixin._prepare_worker_pools
        '''
        if self.pools is None:
            self.pools = Container()
        self.pools.progress = parallel.WorkerPool(\
            parallel.JoinableQueue(-1),
            tasks.ParseDBTaskStage2,
            daemonize=False,
            worker_class=parallel.DBProgressTrackerWorker,
            worker_count=1,
            worker_kwargs=dict(\
                log_path=self.args.log_path,
                pcount=len(self.frontier),
                pdesc='Total',
                punit='files',
                manager=DBManager(conn_string=self.conn_string)\
            )\
        )
        self.pools.parser = parallel.WorkerPool(\
            parallel.JoinableQueue(-1),
            tasks.ParseDBTaskStage1,
            daemonize=False,
            worker_count=self.args.threads,
            worker_kwargs=dict(\
                result_queue=self.pools.progress.queue,
                log_path=self.args.log_path\
            )
        )

    def _parse_preamble(self):
        '''
        @ParseDirectiveMixin._parse_preamble
        '''
        tqdm.set_lock(parallel.RLock())

    def _parse_loop(self):
        '''
        @ParseDirectiveMixin._parse_loop
        '''
        self.pools.progress.start()
        self.pools.parser.start()
        for nodeidx, node in enumerate(self.frontier):
            Logger.info('Parsing prefetch file %s (node %d)' % (node, nodeidx))
            self.pools.parser.add_task(node)
        self.pools.parser.join_tasks()
        self.pools.progress.join_tasks()
        self.pools.progress.add_poison_pills()
        self.pools.progress.join_workers()
        self.pools.parser.add_poison_pills()
        self.pools.parser.join_workers()

    def _parse_postamble(self):
        '''
        @ParseDirectiveMixin._parse_postamble
        '''
        pass

    def run(self):
        '''
        Args:
            N/A
        Procedure:
            Parse Prefetch information to database
            self.args.db_driver is of type String
            self.args.db_name is of type String
            self.args.db_conn_string is of type String
            self.args.db_user is of type String
            self.args.db_passwd is of type String
            self.args.db_host is of type String
            self.args.db_port is of type String
            one of the following conditions must be true:
                1) self.args.db_driver is sqlite and self.args.db_name is a valid path
                2) self.args.db_conn_string is not None and is valid connection string 
                3) self.args.db_user, self.args.db_passwd, self.args.db_host, and self.args.db_port are not None
        '''
        super(ParseDBDirective, self).run()
コード例 #5
0
ファイル: directives.py プロジェクト: vaginessa/analyzeEVTX
 def _parse_loop(self):
     '''
     @ParseDirectiveMixin._parse_loop
     '''
     try:
         self.pools.parser.start()
         record_count = 0
         with tqdm(total=len(self.frontier), desc='Total', unit='files') as node_progress:
             for nodeidx, node in enumerate(self.frontier):
                 Logger.info('Parsing EVTX file %s (node %d)'%(node, nodeidx))
                 evtx_file = EventLogX(node)
                 try:
                     recordidx = 0
                     remaining_count = self._get_remaining_count(node, record_count, self.args.count)
                     if remaining_count > 0:
                         try:
                             if self.manager.session is None:
                                 try:
                                     self.manager.create_session()
                                 except Exception as e:
                                     Logger.critical('Failed to establish database session (%s)'%str(e))
                                     break
                             metadata = evtx_file.get_metadata()
                             fileledger = self.manager.query(db.FileLedger, sha2hash=metadata.sha2hash).first()
                             if fileledger is not None:
                                 for field in metadata:
                                     metadata[field] = getattr(fileledger, field)
                                 metadata.id = fileledger.id
                             else:
                                 fileledger = db.FileLedger().populate_fields(metadata)
                                 try:
                                     self.manager.add(fileledger, commit=True)
                                 except Exception as e:
                                     Logger.error('Failed to add metadata for %s to database (%s)'%(node, str(e)))
                                     continue
                                 else:
                                     metadata.id = fileledger.id
                         except Exception as e:
                             Logger.error('Failed to get metadata for file %s (%s)'%(node, str(e)))
                             continue
                         else:
                             self.manager.close_session()
                             self.manager.engine.dispose()
                             self.pools.progress.worker_kwargs = dict(\
                                 log_path=self.args.log_path,
                                 pcount=remaining_count,
                                 pdesc='%d. %s'%(nodeidx, path.basename(node)),
                                 punit='records',
                                 manager=DBManager(conn_string=self.conn_string)\
                             )
                             self.pools.progress.refresh()
                             self.pools.progress.start()
                             for evtx_record in evtx_file.records:
                                 if remaining_count == 0:
                                     break
                                 self.pools.parser.add_task(evtx_record, nodeidx, recordidx, metadata)
                                 recordidx += 1
                                 remaining_count -= 1
                 finally:
                     record_count += (recordidx + 1)
                 self.pools.parser.join_tasks()
                 self.pools.progress.join_tasks()
                 self.pools.progress.add_poison_pills()
                 self.pools.progress.join_workers()
                 node_progress.update(1)
                 if record_count >= self.args.count:
                     break
         self.pools.parser.add_poison_pills()
         self.pools.parser.join_workers()
     finally:
         self.manager.close_session()
コード例 #6
0
ファイル: directives.py プロジェクト: vaginessa/analyzeEVTX
class ParseDBDirective(ParseDirectiveMixin, BaseDirective, DBConnectionMixin):
    '''
    Directive for parsing EVTX file to DB format
    '''
    def __init__(self, args):
        self._frontier = None
        self._pools = None
        self._conn_string = None
        self._manager = None
        super(ParseDBDirective, self).__init__(args)
    @property
    def conn_string(self):
        '''
        @conn_string.getter
        '''
        return self._conn_string
    @conn_string.setter
    def conn_string(self, value):
        '''
        @conn_string.setter
        Preconditions:
            value is of type String
        '''
        assert isinstance(value, str), 'Value is not of type String'
        self._conn_string = value
    @property
    def manager(self):
        '''
        @manager.getter
        '''
        return self._manager
    @manager.setter
    def manager(self, value):
        '''
        @manager.setter
        Preconditions:
            value is of type DBManager  (assumed True)
        '''
        self._manager = value
    def _prepare_args(self):
        '''
        @ParseDirectiveMixin._prepare_args
        '''
        self.conn_string = self._prepare_conn_string(self.args)
        self.manager = DBManager(conn_string=self.conn_string, metadata=db.BaseTable.metadata)
        self.manager.initialize(bootstrap=True)
    def _prepare_frontier(self):
        '''
        @ParseDirectiveMixin._prepare_frontier
        '''
        self.frontier = self._get_frontier(self.args.sources)
    def _should_parse(self):
        '''
        @ParseDirectiveMixin._should_parse
        '''
        return len(self.frontier) > 0
    def _prepare_worker_pools(self):
        '''
        @ParseDirectiveMixin._prepare_worker_pools
        '''
        if self.pools is None:
            self.pools = Container()
        self.pools.progress = parallel.WorkerPool(\
            parallel.JoinableQueue(-1), 
            tasks.ParseDBTaskStage2,
            daemonize=False, 
            worker_class=parallel.DBProgressTrackerWorker,
            worker_count=1\
        )
        self.pools.parser = parallel.WorkerPool(\
            parallel.JoinableQueue(-1), 
            tasks.ParseDBTaskStage1, 
            daemonize=False, 
            worker_count=self.args.threads,
            worker_kwargs=dict(\
                result_queue=self.pools.progress.queue, 
                log_path=self.args.log_path\
            )
        )
    def _parse_preamble(self):
        '''
        @ParseDirectiveMixin._parse_preamble
        '''
        tqdm.set_lock(parallel.RLock())
    def _parse_loop(self):
        '''
        @ParseDirectiveMixin._parse_loop
        '''
        try:
            self.pools.parser.start()
            record_count = 0
            with tqdm(total=len(self.frontier), desc='Total', unit='files') as node_progress:
                for nodeidx, node in enumerate(self.frontier):
                    Logger.info('Parsing EVTX file %s (node %d)'%(node, nodeidx))
                    evtx_file = EventLogX(node)
                    try:
                        recordidx = 0
                        remaining_count = self._get_remaining_count(node, record_count, self.args.count)
                        if remaining_count > 0:
                            try:
                                if self.manager.session is None:
                                    try:
                                        self.manager.create_session()
                                    except Exception as e:
                                        Logger.critical('Failed to establish database session (%s)'%str(e))
                                        break
                                metadata = evtx_file.get_metadata()
                                fileledger = self.manager.query(db.FileLedger, sha2hash=metadata.sha2hash).first()
                                if fileledger is not None:
                                    for field in metadata:
                                        metadata[field] = getattr(fileledger, field)
                                    metadata.id = fileledger.id
                                else:
                                    fileledger = db.FileLedger().populate_fields(metadata)
                                    try:
                                        self.manager.add(fileledger, commit=True)
                                    except Exception as e:
                                        Logger.error('Failed to add metadata for %s to database (%s)'%(node, str(e)))
                                        continue
                                    else:
                                        metadata.id = fileledger.id
                            except Exception as e:
                                Logger.error('Failed to get metadata for file %s (%s)'%(node, str(e)))
                                continue
                            else:
                                self.manager.close_session()
                                self.manager.engine.dispose()
                                self.pools.progress.worker_kwargs = dict(\
                                    log_path=self.args.log_path,
                                    pcount=remaining_count,
                                    pdesc='%d. %s'%(nodeidx, path.basename(node)),
                                    punit='records',
                                    manager=DBManager(conn_string=self.conn_string)\
                                )
                                self.pools.progress.refresh()
                                self.pools.progress.start()
                                for evtx_record in evtx_file.records:
                                    if remaining_count == 0:
                                        break
                                    self.pools.parser.add_task(evtx_record, nodeidx, recordidx, metadata)
                                    recordidx += 1
                                    remaining_count -= 1
                    finally:
                        record_count += (recordidx + 1)
                    self.pools.parser.join_tasks()
                    self.pools.progress.join_tasks()
                    self.pools.progress.add_poison_pills()
                    self.pools.progress.join_workers()
                    node_progress.update(1)
                    if record_count >= self.args.count:
                        break
            self.pools.parser.add_poison_pills()
            self.pools.parser.join_workers()
        finally:
            self.manager.close_session()
    def _parse_postamble(self):
        '''
        @ParseDirectiveMixin._parse_postamble
        '''
        pass
    def run(self):
        '''
        Args:
            N/A
        Procedure:
            Parse EVTX information to database
        Preconditions:
            @BaseDirective.run_directive
            self.args.db_driver is of type String
            self.args.db_name is of type String
            self.args.db_conn_string is of type String
            self.args.db_user is of type String
            self.args.db_passwd is of type String
            self.args.db_host is of type String
            self.args.db_port is of type String
            one of the following conditions must be true:
                1) self.args.db_driver is sqlite and self.args.db_name is a valid path
                2) self.args.db_conn_string is not None and is valid connection string 
                3) self.args.db_user, self.args.db_passwd, self.args.db_host, and self.args.db_port are not None
        '''
        super(ParseDBDirective, self).run()