Ejemplo n.º 1
0
    def get_blocks(self, kw, regex):
        """ Updates variable self.blocks, that stores set of blocks for kw and partition, creating new 'block' and 'hits' files """
        self.blocks_file_path = os.path.join(self.search_path,
                                             "blocks_{}".format(kw))
        hits_file = os.path.join(self.search_path, "hits_%s" % kw)

        # Create hits file if not found
        if not check_file(hits_file) or os.path.getsize(hits_file) == 0:
            self.logger().debug('Creating {} file'.format("hits_%s" % kw))
            extra_args = {'write_header': True, 'file_exists': 'OVERWRITE'}
            save_csv(self.search_strings(kw, regex),
                     config=self.config,
                     outfile=hits_file,
                     **extra_args)

        # Create or load blocks file if not found
        if not check_file(self.blocks_file_path) or os.path.getsize(
                self.blocks_file_path) == 0:
            self.blocks[kw] = defaultdict(list)
            cmd = "sed -n '1!p' {} | cut -d ';' -f1,3 | sort | uniq".format(
                hits_file)
            for line in yield_command(cmd, logger=self.logger()):
                part, blk = line.split(';')
                part = part.strip('"')
                self.blocks[kw][part].append(int(blk.strip('"').rstrip('\n')))
            self.save_blocks_file(self.blocks[kw], kw)
        else:
            self.logger().info('Loading {} file'.format("blocks_%s" % kw))
            try:
                with open(self.blocks_file_path, "r") as block_file:
                    self.blocks[kw] = json.load(block_file)
            except Exception as exc:
                self.logger().error('Cannot load {}'.format(
                    self.blocks_file_path))
                raise exc
Ejemplo n.º 2
0
    def parse_SysCache_hive(self):
        outfolder = self.myconfig('voutdir') if self.vss else self.myconfig(
            'outdir')
        # self.tl_file = os.path.join(self.myconfig('timelinesdir'), "%s_BODY.csv" % self.myconfig('source'))
        check_directory(outfolder, create=True)
        SYSC = self.search.search(r"/System Volume Information/SysCache.hve$")

        ripcmd = self.config.get('plugins.common', 'rip',
                                 '/opt/regripper/rip.pl')

        for f in SYSC:
            p = f.split('/')[2]
            output_text = run_command([
                ripcmd, "-r",
                os.path.join(self.myconfig('casedir'), f), "-p", "syscache_csv"
            ],
                                      logger=self.logger())
            output_file = os.path.join(outfolder, "syscache_%s.csv" % p)

            self.path_from_inode = FileSystem(
                config=self.config).load_path_from_inode(self.myconfig,
                                                         p,
                                                         vss=self.vss)

            save_csv(self.parse_syscache_csv(p, output_text),
                     outfile=output_file,
                     file_exists='OVERWRITE')

        self.logger().info("Finished extraction from SysCache")
Ejemplo n.º 3
0
    def run(self, path=""):
        vss = self.myflag('vss')
        self.search = GetFiles(self.config, vss=vss)

        outfolder = self.myconfig('voutdir') if vss else self.myconfig(
            'outdir')
        check_directory(outfolder, create=True)

        amcache_hives = [path] if path else self.search.search("Amcache.hve$")
        for am_file in amcache_hives:
            self.amcache_path = os.path.join(self.myconfig('casedir'), am_file)
            partition = am_file.split("/")[2]
            self.logger().info("Parsing {}".format(am_file))
            self.outfile = os.path.join(outfolder,
                                        "amcache_{}.csv".format(partition))

            try:
                reg = Registry.Registry(
                    os.path.join(self.myconfig('casedir'), am_file))
                entries = self.parse_amcache_entries(reg)
                save_csv(entries,
                         outfile=self.outfile,
                         file_exists='OVERWRITE',
                         quoting=0)
            except KeyError:
                self.logger().warning(
                    "Expected subkeys not found in hive file: {}".format(
                        am_file))
            except Exception as exc:
                self.logger().warning("Problems parsing: {}. Error: {}".format(
                    am_file, exc))

        self.logger().info("Amcache.hve parsing finished")
        return []
Ejemplo n.º 4
0
    def run(self, path=""):
        self.search = GetFiles(self.config, vss=self.myflag("vss"))
        self.vss = self.myflag('vss')
        self.logger().info("Parsing ShimCache from registry")

        outfolder = self.myconfig('voutdir') if self.vss else self.myconfig(
            'outdir')
        SYSTEM = list(self.search.search(r"windows/System32/config/SYSTEM$"))
        check_directory(outfolder, create=True)

        partition_list = set()
        for f in SYSTEM:
            aux = re.search(r"([vp\d]*)/windows/System32/config", f, re.I)
            partition_list.add(aux.group(1))

        output_files = {
            p: os.path.join(outfolder, "shimcache_%s.csv" % p)
            for p in partition_list
        }

        for f in SYSTEM:
            save_csv(self.parse_ShimCache_hive(f),
                     outfile=output_files[f.split("/")[2]],
                     file_exists='OVERWRITE',
                     quoting=0)

        self.logger().info("Finished extraction from ShimCache")
        return []
Ejemplo n.º 5
0
    def parse_BITS(self):
        if self.vss:
            base_path = self.myconfig('voutdir')
            bitsdb = self.search.search(
                r"v\d+p\d+/ProgramData/Microsoft/Network/Downloader/qmgr0.dat$"
            )
        else:
            base_path = self.myconfig('outdir')
            bitsdb = self.search.search(
                r"p\d+/ProgramData/Microsoft/Network/Downloader/qmgr0.dat$")
        check_directory(base_path, create=True)

        fields = OrderedDict([('job_id', None), ('name', None), ('desc', None),
                              ('type', None), ('priority', None),
                              ('sid', None), ('state', None), ('cmd', None),
                              ('args', None), ('file_count', 0),
                              ('file_id', 0), ('dest_fn', None),
                              ('src_fn', None), ('tmp_fn', None),
                              ('download_size', -1), ('transfer_size', -1),
                              ('drive', None), ('vol_guid', None),
                              ('ctime', None), ('mtime', None),
                              ('other_time0', None), ('other_time1', None),
                              ('other_time2', None), ('carved', False)])

        for f in bitsdb:
            analyzer = bits.Bits.load_file(
                os.path.join(self.myconfig('casedir'), f))
            jobs = analyzer.parse()
            res_generator = (OrderedDict([(field, j.get(field, fields[field]))
                                          for field in fields]) for j in jobs)
            output_file = os.path.join(base_path,
                                       "bitsdb_%s.csv" % f.split("/")[2])
            save_csv(res_generator,
                     outfile=output_file,
                     file_exists='OVERWRITE')
Ejemplo n.º 6
0
    def run(self, path=""):
        """ Parses lnk files, jumlists and customdestinations

        """
        self.logger().info("Extraction of lnk files")

        self.Files = GetFiles(self.config, vss=self.myflag("vss"))
        self.filesystem = FileSystem(self.config)
        self.mountdir = self.myconfig('mountdir')

        lnk_path = self.myconfig('{}outdir'.format('v' if self.vss else ''))
        check_folder(lnk_path)

        users = get_user_list(self.mountdir, self.vss)
        artifacts = {
            'lnk': {
                'filename': "{}_lnk.csv",
                'regex': r"{}/.*\.lnk$",
                'function': self.lnk_parser
            },
            'autodest': {
                'filename': "{}_jl.csv",
                'regex': r"{}/.*\.automaticDestinations-ms$",
                'function': self.automaticDest_parser
            },
            'customdest': {
                'filename': "{}_jlcustom.csv",
                'regex': r"{}/.*\.customDestinations-ms$",
                'function': self.customDest_parser
            }
        }

        for user in users:
            usr = "******".format(user.split("/")[0], user.split("/")[2])

            for a_name, artifact in artifacts.items():
                out_file = os.path.join(lnk_path,
                                        artifact['filename'].format(usr))
                files_list = list(
                    self.Files.search(artifact['regex'].format(user)))
                self.logger().info(
                    "Founded {} {} files for user {} at {}".format(
                        len(files_list), a_name,
                        user.split("/")[-1],
                        user.split("/")[0]))
                if len(files_list) > 0:
                    save_csv(artifact['function'](files_list),
                             config=self.config,
                             outfile=out_file,
                             quoting=0,
                             file_exists='OVERWRITE')
                    self.logger().info(
                        "{} extraction done for user {} at {}".format(
                            a_name,
                            user.split("/")[-1],
                            user.split("/")[0]))

        self.logger().info("RecentFiles extraction done")
        return []
Ejemplo n.º 7
0
 def parse_schedlgu(self):
     sched_files = list(self.search.search(r"schedlgu\.txt$"))
     for file in sched_files:
         partition = file.split("/")[2]
         save_csv(self._parse_schedlgu(
             os.path.join(self.myconfig('casedir'), file)),
                  outfile=os.path.join(self.outfolder,
                                       'schedlgu_{}.csv'.format(partition)),
                  file_exists='OVERWRITE',
                  quoting=0)
     self.logger().info("Finished extraction from schedlgu.txt")
Ejemplo n.º 8
0
    def _parse_usnjrnl(self, pname):
        """ Get and parses UsnJrnl file for a partition """
        inode = self.filesystem.get_inode_from_path('/$Extend/$UsnJrnl:$J',
                                                    pname)

        if inode == -1:
            self.logger().warning(
                "Problem getting UsnJrnl from partition {}. File may not exist"
                .format(pname))
            return

        # Dumps UsnJrnl file from the data stream $J
        self.logger().info(
            "Dumping journal file of partition {}".format(pname))
        if self.vss:
            self.filesystem.icat(inode,
                                 pname,
                                 output_filename=self.usn_jrnl_file,
                                 attribute="$J",
                                 vss=True)
        else:
            self.filesystem.icat(inode,
                                 pname,
                                 output_filename=self.usn_jrnl_file,
                                 attribute="$J")
        self.logger().info(
            "Extraction of journal file completed for partition {}".format(
                pname))

        self.logger().info("Creating file {}".format(
            os.path.join(self.usn_path, "UsnJrnl_{}.csv".format(pname))))
        if os.stat(self.usn_jrnl_file).st_size > 0:
            # Create dump file
            records = self.parseUsn(infile=self.usn_jrnl_file, partition=pname)
            outfile = os.path.join(self.usn_path,
                                   "UsnJrnl_dump_{}.csv".format(pname))
            save_csv(records,
                     outfile=outfile,
                     file_exists='OVERWRITE',
                     quoting=0)
            # Create summary file from dump file
            filtered_records = self.summaryUsn(infile=outfile, partition=pname)
            out_summary = os.path.join(self.usn_path,
                                       "UsnJrnl_{}.csv".format(pname))
            save_csv(filtered_records,
                     outfile=out_summary,
                     file_exists='OVERWRITE',
                     quoting=0)
Ejemplo n.º 9
0
    def save_recycle_files(self, output_file, partition=None, sorting=True):
        """ Sort recycle bin files by date and save to 'output_file' csv. """
        if not (len(self.i_files) or len(self.r_files)):
            self.logger().info('No RecycleBin files found{}.'.format(
                ' in partition {}'.format(partition if partition else '')))
            return
        if sorting:
            self.RB_files = list(self.i_files.values()) + self.r_files
            self.RB_files = sorted(self.RB_files, key=lambda it: it['Date'])
        else:
            self.RB_files = chain(self.i_files.values(), self.r_files)

        check_file(output_file, delete_exists=True)
        save_csv(self.RB_files,
                 outfile=output_file,
                 quoting=0,
                 file_exists='OVERWRITE')
Ejemplo n.º 10
0
    def write_tables(self):
        """ Write csv files containing information for each of defined tables. """
        for typ in self.table:
            values = self.table[typ]['data']
            if self.table.get('sort'):
                values = sorted(self.table[typ]['data'],
                                key=lambda x: x[self.table[typ]['sort']])

            # Generator to update with user and partition and yield to save to csv
            def update_gen(elements):
                for element in elements:
                    element.update({
                        'partition': self.partition,
                        'user': self.user
                    })
                    yield element

            save_csv(update_gen(values),
                     outfile=os.path.join(self.outdir,
                                          self.table[typ]['out_file']),
                     file_exists='APPEND')
Ejemplo n.º 11
0
    def run(self, path=""):
        """ Creates a report based on the output of LnkExtract.

        """
        vss = self.myflag('vss')
        self.logger().info("Generating lnk files report")

        self.mountdir = self.myconfig('mountdir')

        lnk_path = self.config.get('plugins.windows.RVT_lnk.LnkExtract',
                                   '{}outdir'.format('v' * vss))
        report_lnk_path = self.myconfig('{}outdir'.format('v' * vss))

        check_directory(lnk_path, error_missing=True)
        check_folder(report_lnk_path)

        outfile = os.path.join(report_lnk_path, 'recentfiles.csv')
        save_csv(self.report_recent(lnk_path),
                 config=self.config,
                 outfile=outfile,
                 quoting=0)

        return []
Ejemplo n.º 12
0
    def run(self, path=""):
        """ Generator of INDX entries as dictionaries. Also writes to csv files"""
        self.disk = getSourceImage(self.myconfig)
        self.sector_size = self.disk.sectorsize

        self.parseINDX_ROOTFiles = self.myflag(
            'root', False)  # Parse also INDX_ROOT records if set
        self.skip_short_filenames = self.myflag('skip_short', False)
        self.only_slack = self.myflag('only_slack', False)

        outdir = self.myconfig('outdir')
        check_directory(outdir, create=True)

        for p in self.disk.partitions:
            if not p.isMountable:
                continue

            # Get a dictionary {inode: list of names} from 'fls' to later relate inodes to a path. 'inode' keys are strings, not int.
            part_name = ''.join(['p', p.partition])
            try:
                self.inode_fls = FileSystem(
                    self.config).load_path_from_inode(partition=part_name)
                self.logger().debug(
                    'Correctly loaded inode-name relation file for partiton {}'
                    .format(part_name))
            except Exception as e:
                self.logger().error(e)
                continue

            # Start the carving at next to last execution block parsed
            outfile = os.path.join(
                outdir, '{}{}_INDX_timeline.csv'.format(
                    part_name, '_slack' if self.only_slack else ''))
            self.lastParsedBlk = 0
            if self.myflag('use_localstore'):
                self.lastParsedBlk = int(
                    self.config.store_get(
                        'last_{}_block_parsed'.format(part_name), 0))
            self.logger().debug('lastParsedBlk: {}'.format(self.lastParsedBlk))

            csv_args = {'file_exists': 'APPEND', 'write_header': True}
            if self.lastParsedBlk:
                if not os.path.exists(outfile):
                    self.logger().warning(
                        'Starting new file {0} at an advanced offset. Set "last_{0}_block_parsed" at 0 in "store.ini" if a fresh start is desired'
                        .format(outfile))
                else:
                    csv_args['write_header'] = False
            else:
                if os.path.exists(outfile):
                    self.logger().warning(
                        'Overwriting file {}'.format(outfile))
                    csv_args['file_exists'] = 'OVERWRITE'

            # Write the parsed entries to a csv file for each partition.
            save_csv(self.parse_INDX(p),
                     config=self.config,
                     outfile=outfile,
                     quoting=0,
                     **csv_args)
        return []