Esempio n. 1
0
 def _modifier_wrapper(self, tmp_path: Path, name: str, impact: str,
                       ask: bool, modifier: Callable[[int], None]) -> None:
     count = count_lines(tmp_path)
     if ask and not pause(
             f'{cyan(name)} keywords (impact: {impact} => {eval(f"{count}{impact}")})',
             cancel=True):
         return
     modifier(tmp_path, count)
     new_count = count_lines(tmp_path)
     pr(f'{name} added {cyan(new_count - count)} new keywords, total: {cyan(new_count)}'
        )
Esempio n. 2
0
    def crawl_tree(self, directory: Path, depth: int) -> dict:
        total = {}
        for file_name in listdir(directory):
            # Discard some
            if file_name in self.exclude:
                continue
            if not self.inc_df and file_name.startswith('.'):
                continue
            path = directory.joinpath(file_name)

            # If subpath is a dir - go deeper
            if path.is_dir():
                if depth == 0:
                    continue
                try:
                    sub_results = self.crawl_tree(path, depth - 1)
                    # Merge subdir results with this-level's ones
                    for suf, v in sub_results.items():
                        if suf not in total:
                            total[suf] = {}
                        total[suf] = {**total[suf], **v}
                except (PermissionError, OSError) as e:
                    pr(f'Error "{e}" in directory: "{path}"', '!')
                continue

            # Check suffix
            suf = path.suffix[1:]  # Get last suffix without dot
            if self.exc_suffixes:
                if suf in self.suffixes:
                    continue
            else:
                if self.suffixes and suf not in self.suffixes:
                    continue

            # Update total
            try:
                lc = count_lines(path)
            except (PermissionError, OSError) as e:
                pr(f'Error "{e}" in file: "{path}"', '!')
                continue
            if suf not in total:
                total[suf] = {}
            total[suf][path.relative_to(self.target)] = lc

        return total
Esempio n. 3
0
    def print_all(self, args: tuple) -> None:
        f = self._get_wordlist_path()
        if not f:
            return

        lc = count_lines(f)
        if not lc:
            return pr('Wordlist is empty!', '!')

        # Get arguments
        total = False
        if args:
            total = args[0] == 'total'

        # Print relevant info
        if not total:
            if lc > self.config['list_treshold']:
                if not pause(f'show all {cyan(lc)} keywords', cancel=True):
                    return
            for v in self._gen_wordlist(f):
                cprint('  ' + v, "yellow")
        pr(f'Total keywords count: ' + cyan(lc))
Esempio n. 4
0
    def expand(self, args: tuple) -> None:
        f = self._get_wordlist_path()
        if not f:
            return

        lc = count_lines(f)
        if not self:
            return pr('Wordlist is empty!', '!')

        # Get new wordlist name
        try:
            print('Enter name for new expanded wordlist:')
            save_path = input(colored('>', 'yellow'))
            if not save_path:
                return
            save_path = self.workspace.joinpath('expand_' + str(save_path))
            if save_path.is_file():
                pr(f'File {cyan(save_path)} already exists, overwrite?', '!')
                if not pause(cancel=True):
                    return
        except KeyboardInterrupt:
            return

        # Get arguments
        auto_all = False
        if args:
            auto_all = args[0] == 'all'

        _, tmp_path = mkstemp('stargen')
        tmp_path = Path(tmp_path)

        # Copy initial content
        tmp_path.write_bytes(f.read_bytes())

        def _capitalize(tmp_p: Path, count: int) -> None:
            with tmp_p.open('a', encoding='utf8') as file:
                for k in self._gen_wordlist(f):
                    file.write(k.capitalize() + '\n')

        self._modifier_wrapper(tmp_path, 'Capitalize', '*2', not auto_all,
                               _capitalize)

        def _leetify(tmp_p: Path, count: int) -> None:
            with tmp_p.open('a', encoding='utf8') as file:
                for k in self._gen_wordlist(f):
                    file.write(leetify(k) + '\n')

        self._modifier_wrapper(tmp_path, '13371fy', '*2', not auto_all,
                               _leetify)

        def _mockify(tmp_p: Path, count: int) -> None:
            with tmp_p.open('a', encoding='utf8') as file:
                for k in self._gen_wordlist(f):
                    file.write(mockify(k, True) + '\n')
                    file.write(mockify(k, False) + '\n')

        self._modifier_wrapper(tmp_path, 'MoCkIfY', '*3', not auto_all,
                               _mockify)

        def _intermix(tmp_p: Path, count: int) -> None:
            itmr = IterationTimer(count**2)
            with tmp_p.open('a', encoding='utf8') as file:
                for a in self._gen_wordlist(f):
                    for b in self._gen_wordlist(f):
                        file.write(a + b + '\n')
                        file.write(b + a + '\n')
                        itmr.tick()

        self._modifier_wrapper(tmp_path, 'Intermix', '**2', not auto_all,
                               _intermix)

        # Save as
        pr(f'Saving as: {cyan(save_path)}')
        move(tmp_path, save_path)
        new_lc = count_lines(save_path)

        # Show current status
        print()
        a = []
        if new_lc > self.config['list_treshold']:
            a += ['total']
        self.print_all(a)