def __init__(self, args: Namespace): # Load config self.config = DictConfig( args.config if args.config else Stargen.DEFAULT_CONFIG_PATH, Stargen.DEFAULT_CONFIG_SETUP) # Create workspace dir self.workspace = Path(self.config['workspace']) Path(self.workspace).mkdir(exist_ok=True) # Initialize modules self.modules = (Keyword(self), Crunch(self), Download(self), Combination(self)) # Initialize menu menu = {} for mod in self.modules: mod: Module menu.update(mod.menu()) # Welcoming message & enter main menu cprint(banner('Stargen'), choice(('red', 'green', 'blue'))) pr( f'Enter "{cyan("help")}" to see list of available commands,\n' + ' [Ctrl + C] to exit', '?') while 1: try: # Configure prompt current = self.modules[0].current if current: prompt = self.config['use_prompt'] prompt = prompt.replace('$u', current.stem) prompt = prompt.replace('$U', current.name) else: prompt = self.config['prompt'] # Get user input inp = input(colored(prompt, 'red', attrs=['bold'])) if not inp: continue # Help the user if 'help' in inp: for k, v in menu.items(): print(f' {cyan(k)} -> {colored(v[1], "yellow")}') continue # Get command pts = inp.split(' ') cmd = pts[0] if cmd not in menu: pr(f'No such command! try "{cyan("help")}".', '!') continue # Call menu entry menu.get(cmd)[0](tuple([i for i in pts[1:] if i])) print() except (KeyboardInterrupt, EOFError): print() break self.config.save()
def menu(args, definitions) -> int: auto_mode = args.automode auto_time = 0.5 if args.automode_time: auto_time = max(0.5, args.automode_time) pr(f'Using Auto-mode time delay: {auto_time}', '*') while 1: try: fetch_definitions(args, definitions) if auto_mode: sleep(auto_time) except KeyboardInterrupt: print() if auto_mode: auto_mode = False if not args.quiet: pr('Auto-mode stopped!') else: if not args.quiet: pr('Interrupted!', '!') break except RequestException: pr("Couldn't connect to API @ api.cryptowat.ch", 'X') return 2 if not auto_mode: c = choose(['Reload values', 'Enter auto mode']) if c == 0: continue elif c == 1: auto_mode = True else: break if not args.quiet: pr('Bye!')
def _select_wordlist(title: str): pr(f'Select {title} wordlist:') f = choose_file(self.workspace) if not f: raise KeyboardInterrupt fsb, flc, ftxt = file_volume(f) pr(f' {ftxt}') return f, fsb, flc
def parse_proxies_file(path: Path) -> list: buff = [] for line in path.read_text().splitlines(): try: buff.append(Proxy(line)) except InvalidProxyFormatError: pr(f'Invalid proxy line: {line} in file: {str(path)}', '!') return buff
def prompt_mask() -> (str, None): try: return input( colored( 'Mask special chars: \n\t@ will insert lower case characters\n\t, will insert upper case characters\n\t% will insert numbers\n\t^ will insert symbols\n\n> ', 'yellow')) except Exception as e: pr('An exception caught!', 'X') return print(e)
def mod(mode: str, proxies: Iterable[str], submode: str): def _mod_from_file(action: str, path: Path) -> int: ''' Attempt to read file and for each line update/remove proxy in the DB. Catches exceptions. returns -> int: Count of proxies modified ''' pr(f'{action}ing from a proxy list: {cyan(str(path))}') method = ProxyDB.update_some if action == 'Add' else ProxyDB.remove_some count = method(parse_proxies_file(path)) if not count: return pr('No proxies added!', '!') pr(f'{action}ed {cyan(count)} proxies!') return count def _mod_single_proxy(action: str, proxy_ip: str) -> bool: ''' Attempt to update/remove single proxy in the DB. Catches exceptions. returns -> bool: Success ''' pr(f'{action}ing a single proxy: {cyan(proxy_ip)}') method = ProxyDB.update_one if action == 'Add' else ProxyDB.remove_one try: method(Proxy(proxy_ip)) return True except KeyError: pr(f'Proxy {cyan(str(proxy_ip))} not found in DB!', '!') except InvalidProxyFormatError: pr(f'Invalid proxy format: {cyan(str(proxy_ip))}', '!') return False action = 'Add' if mode == 'add' else 'Remov' for arg in proxies: try: if not submode: arg_path = Path(arg) if arg_path.is_file(): _mod_from_file(action, arg_path) elif is_proxy_format(arg): _mod_single_proxy(action, arg) else: raise ValueError elif submode == 'literal': if not is_proxy_format(arg): raise ValueError _mod_single_proxy(action, arg) elif submode == 'file': if not Path(arg).is_file(): raise ValueError _mod_from_file(action, arg) except ValueError: pr(f'Invalid argument: {cyan(arg)}', '!')
def _modifier_wrapper(self, tmp_path: Path, name: str, impact: str, ask: bool, modifier: Callable[[int], None]) -> None: count = count_lines(tmp_path) if ask and not pause( f'{cyan(name)} keywords (impact: {impact} => {eval(f"{count}{impact}")})', cancel=True): return modifier(tmp_path, count) new_count = count_lines(tmp_path) pr(f'{name} added {cyan(new_count - count)} new keywords, total: {cyan(new_count)}' )
def _crunch(self, crunch_method) -> None: try: # Crunch it cn = crunch_method() if not cn: return pr("Crunch was NOT generated!", 'X') self.crunches.append(cn) # Save result except KeyboardInterrupt: print() pr('Interrupted!', '!') return
def clear(self, args: tuple) -> None: if not pause('truncatete wordlist', cancel=True): return f = self._get_wordlist_path() if not f: return with f.open('w') as file: file.truncate() pr('Wordlist truncateted!')
def show_ebt(algos: dict, tlc: int): """ Show estimated burn time tlc -> Total lines count """ assert tlc > 0 for algo, elps in algos.items(): # Calculate EBT ebt = ((tlc * 2) / elps) ebt = strftime('%H:%M:%S', gmtime(ebt)) pr(f'Estimated {cyan(algo)} time: {cyan(ebt)} (assuming elps={elps})')
def get_pypi_ver(name): pr('Checking PyPI: ', end='') url = f'https://pypi.org/project/{name}/' res = get(url) if res.status_code != 200: print('Not found!') return '0' bs = BeautifulSoup(res.text, features='html.parser') h1 = bs.find('h1', {"class": 'package-header__name'}) ver = h1.get_text(strip=True).split()[-1] # Version print(ver) return ver
def _gen(self, file_name: str, cmd: iter) -> (str, None): # Verify destination dir self.dest_dir.mkdir(exist_ok=True) ap: Path = self.dest_dir / file_name if ap.is_file(): pr('Skipping, crunch already exist!') return file_name pr('Generating crunch: ' + cyan(file_name)) cmd += ['-o', ap] call(cmd) if not ap.is_file(): return return file_name
def handle_checker_loop(self, procs: Iterable[mp.Process], jobs_count: int): print_interval = 3 last_print = time() while self.active_children(procs): sleep(0.25) if self.verbose and time() - last_print > print_interval: last_print = time() pr( 'Jobs Progress: [%d/%d] = %d%%' % (self.jobs_done.value, jobs_count, self.jobs_done.value * 100 / jobs_count), '*') self.show_status()
def get_aur_ver(name): pr('Checking AUR: ', end='') url = f'https://aur.archlinux.org/packages/python-{name}/' res = get(url) if res.status_code != 200: print('Not found!') return '0' bs = BeautifulSoup(res.text, features='html.parser') d = bs.find('div', {"id": 'pkgdetails'}) h2 = d.find('h2') ver = h2.get_text(strip=True).split()[-1] # Version print(ver) return ver
def isin(self, args: tuple) -> None: if not args: return pr('Usage: add <keyword...>', '*') f = self._get_wordlist_path() if not f: return for a in args: if a in self._gen_wordlist(f): pr(f'Found a match for {cyan(a)}') else: pr(f'No match for {cyan(a)}', '!')
def _mod_from_file(action: str, path: Path) -> int: ''' Attempt to read file and for each line update/remove proxy in the DB. Catches exceptions. returns -> int: Count of proxies modified ''' pr(f'{action}ing from a proxy list: {cyan(str(path))}') method = ProxyDB.update_some if action == 'Add' else ProxyDB.remove_some count = method(parse_proxies_file(path)) if not count: return pr('No proxies added!', '!') pr(f'{action}ed {cyan(count)} proxies!') return count
def main() -> int: args = parse_args() definitions = [] if args.file: file = Path(args.file) if not file.is_file(): pr('Specified file not found!', '!') else: definitions = file.read_text().splitlines() for d in definitions: if not _check_def(d): pr(f'Suspicious definition: "{d}" from file', '!') if args.defs: if len(args.defs) == 1 and ',' in args.defs[0]: args.defs = args.defs[0].split(',') definitions += args.defs for d in args.defs: if not _check_def(d): pr(f'Suspicious definition: "{d}" from arguments', '!') if not definitions: pr('No definitions defined, exiting', '!') exit(1) if args.interactive or args.automode: return menu(args, definitions) return fetch_definitions(args, definitions)
def duplicate(self, args: tuple) -> None: if not args: return pr('Usage: duplicate <copy_name>', '*') f = self._get_wordlist_path() if not f: return dst = Path(args[0]) if dst.is_file(): pr(f'File {cyan(dst)} already exists, overwrite?', '!') if not pause(cancel=True): return copy(f, dst) pr(f'Copied to {cyan(dst)}!')
def add(self, args: tuple) -> None: if not args: return pr('Usage: add <keyword...>', '*') f = self._get_wordlist_path() if not f: return for a in args: if a in self._gen_wordlist(f): pr(f'Skipping duplicate "{cyan(a)}"', '*') continue pr(f'Adding "{a}"') with f.open('a') as file: file.write(a)
def crawl_tree(self, directory: Path, depth: int) -> dict: total = {} for file_name in listdir(directory): # Discard some if file_name in self.exclude: continue if not self.inc_df and file_name.startswith('.'): continue path = directory.joinpath(file_name) # If subpath is a dir - go deeper if path.is_dir(): if depth == 0: continue try: sub_results = self.crawl_tree(path, depth - 1) # Merge subdir results with this-level's ones for suf, v in sub_results.items(): if suf not in total: total[suf] = {} total[suf] = {**total[suf], **v} except (PermissionError, OSError) as e: pr(f'Error "{e}" in directory: "{path}"', '!') continue # Check suffix suf = path.suffix[1:] # Get last suffix without dot if self.exc_suffixes: if suf in self.suffixes: continue else: if self.suffixes and suf not in self.suffixes: continue # Update total try: lc = count_lines(path) except (PermissionError, OSError) as e: pr(f'Error "{e}" in file: "{path}"', '!') continue if suf not in total: total[suf] = {} total[suf][path.relative_to(self.target)] = lc return total
def crunch(self, args: tuple) -> None: if not is_package('crunch'): return pr('Package "crunch" not installed!', 'X') # Get args is_mask = args and args[0] == 'mask' # Mode switch: [Mask / Charset] if is_mask: mask = prompt_mask() if not mask: return pr('Invalid mask!', '!') file_name = f'mask_{ask("Enter save name:")}.dict' l = str(len(mask)) return self._crunch( lambda: self._gen(file_name, ['crunch', l, l, '-t', mask])) # Get min and max seq len min_len = prompt_int('Enter minimum length') max_len = prompt_int('Enter maximum length') if not min_len or not max_len or \ min_len < 1 or max_len < min_len: return pr('Invalid paramenters!', '!') # Ask for a charset to use with CHAR_FILE.open(encoding='utf-8') as char_file: sets = [n[:-1] for n in char_file if '=' in n and 'sv' not in n] select = choose(sets, 'Choose charset:', default=26) # Actually 27 if select < 0: exit(-1) charset = sets[select].split(' ')[0] # Confirm if not pause( f'generate ({cyan(min_len)}-{cyan(max_len)}) length dict via "{cyan(charset)}" charset', cancel=True): return file_name = f'{charset}_{min_len}-{max_len}.dict' return self._crunch(lambda: self._gen(file_name, [ 'crunch', str(min_len), str(max_len), '-f', CHAR_FILE, charset ]))
def use(self, args: tuple) -> None: if self.current: if not pause( f'Switching from {cyan(self.current)} to new wordlist', cancel=True): return f = choose_file(self.workspace) if not f: return sb, lc, txt = file_volume(f) pr(txt) # if sb > 1*1024**3: # 1 GB # pr(f'File is too larget to be loaded into RAM!', '!') # return self.current = f pr(f'Now using {cyan(self.current)} wordlist!')
def ask_two_wl(self, _total_calc=Callable[[int, int], int], _write_action=Callable[[Path, Path, Path, IterationTimer], None]): def _select_wordlist(title: str): pr(f'Select {title} wordlist:') f = choose_file(self.workspace) if not f: raise KeyboardInterrupt fsb, flc, ftxt = file_volume(f) pr(f' {ftxt}') return f, fsb, flc # Get wordlists try: f1, f1sb, f1lc = _select_wordlist('first') f2, f2sb, f2lc = _select_wordlist('secund') except KeyboardInterrupt: print() return pr('Interrupted', '!') # Calculate impact and let the user accept the facts tsb = _total_calc(f1sb, f2sb) tlc = _total_calc(f1lc, f2lc) pr(f'Mixing will allocate {cyan(human_bytes(tsb))} for {cyan("{:,}".format(tlc))} lines') free = disk_usage(self.workspace.resolve()).free pr(f"Available space on workspace's disk: " + cyan(human_bytes(free))) if tsb > free: pr('Not enough space on the workspace disk for allocation!', '!') return max_size = self.config['max_created_file_size'] if tsb > max_size: return pr(f'Calculation resulted in an oversized file (>{human_bytes(max_size)}), aborting!', '!') if not pause(cancel=True): return out_path = self.dest_dir.joinpath(f'{f1.stem}_{f2.stem}') with out_path.open('w', encoding='utf-8') as out_file: itmr = IterationTimer(tlc, init_interval=1, max_interval=15) _write_action(f1, f2, out_file, itmr) # Finalize pr('Wordlist written into: ' + cyan(out_path.name)) show_ebt({ # TODO Move to config 'WPA2': 57000 }, tlc)
def show(self, args: tuple) -> None: pr(f'Destination directory: "{cyan(str(self.dest_dir))}"') if not self.crunches: return pr('No crunches downloaded yet!', '!') pr('Available crunches:') for p in self.crunches: cprint(' ' + p, 'yellow') pr(f'Crunches count: ' + cyan(len(self.crunches)))
def __init__(self, checklist: Iterable[Proxy], max_threads: int = Defaults.checker_max_threads, timeout: int = Defaults.checker_timeout, checker_filter: CheckerFilter = None, no_shuffle: bool = False, verbose: bool = False): self.timeout = timeout self.verbose = verbose if max_threads < 1: raise ValueError(f'Invalid thread count: {max_threads}') if not checklist: raise ValueError('No proxies to check!') # Build job queue based on filter options self.queue = mp.Queue() jobs_count = 0 for job in checker_filter.build_joblist(checklist, no_shuffle): self.queue.put(job) jobs_count += 1 max_threads = min(max_threads, jobs_count) pr('Checking %s proxies (%s jobs) on %s threads' % (cyan(len(checklist)), cyan(jobs_count), cyan(max_threads))) self._terminate_flag = False with mp.Manager() as manager: self.up = manager.list() self.jobs_done = manager.Value('i', 0) procs = [] for _ in range(max_threads): procs.append(p := mp.Process(target=self.worker, daemon=True)) p.start() try: self.handle_checker_loop(procs, jobs_count) except KeyboardInterrupt: self.handle_checker_interruption(procs, jobs_count) finally: pr('All children exited') self.show_status()
def show_status(self) -> None: def _sort_protocols(working: List[Proxy]) -> Dict[str, list]: ''' Sort proxies by ProxyType''' dic = {} for proto in Defaults.checker_proxy_protocols: dic.update({proto: []}) for proxion in working: for proto in proxion.protos: dic[proto].append(proxion) return dic ''' Show status (using the collected results) ''' working = self.up text = 'Working:' for proto, proxies in _sort_protocols(working).items(): if proxies: text += f' {cyan(proto.upper())}:{cyan(len(proxies))}' pr(text) print()
def update_pkgbuild_version(pkgbuild: Path, directory: Path, title: str, new_ver: str): # Calculate source targz checksums pr('Calculating SHA256 sum of distributed tarball') tarball = directory.joinpath('dist', f'{title}-{new_ver}.tar.gz') if not tarball.is_file(): pr('Tarball not found, building..') if not build_wheel(directory): return targz_checksum = sha256(tarball.read_bytes()).hexdigest() # update_pkgbuild pr(f'Updating PKGBUILD version to {new_ver}') with FileInput(pkgbuild, inplace=True) as file: for line in file: if line.startswith('pkgver='): old_ver = line.strip().split('=')[1] print(line.replace(old_ver, new_ver), end='') elif line.startswith('pkgrel='): print(line.replace(line.strip().split('=')[1], '1'), end='') elif line.startswith('sha256sums=('): s = line.split('(')[0] + '("' print(s + targz_checksum + '")') else: print(line, end='') return True # Success
def init(cls) -> int: args = Args.parse_arguments() # General cls.verbose = args.verbose # Workspace cls.store = Defaults.store if args.store: try: args.store = Path(args.store) if not args.store.isdir(): raise FileNotFoundError except FileNotFoundError: pr(f'No such directory: {colored(str(args.store), "cyan")} , using default!', '!') else: cls.store = args.store cls.store.mkdir(exist_ok=True) cls.db_file = args.db_file if cls.verbose: pr(f'Using store directory in: {cyan(str(cls.store))}', '*') pr(f'Using proxies DB named: {cyan(cls.db_file)}', '*') return args
def print_all(self, args: tuple) -> None: f = self._get_wordlist_path() if not f: return lc = count_lines(f) if not lc: return pr('Wordlist is empty!', '!') # Get arguments total = False if args: total = args[0] == 'total' # Print relevant info if not total: if lc > self.config['list_treshold']: if not pause(f'show all {cyan(lc)} keywords', cancel=True): return for v in self._gen_wordlist(f): cprint(' ' + v, "yellow") pr(f'Total keywords count: ' + cyan(lc))
def aur_deploy(args): if args.directory: directory = Path(args.directory) if directory.is_file(): directory = directory.parent else: directory = Path.cwd() if not directory.is_dir(): pr(f'Cnnot run in directory, No such directory {directory} !', 'X') return 1 pr(f'Running in: {directory} directory') if not directory.joinpath('setup.py').is_file(): pr( 'No setup.py found in directory, ' + 'Please prepare setup.py for deployment!', 'X') return 1 # Load setup.py title, new_ver, description = check_output( ['python3', 'setup.py', '--name', '--version', '--description'], cwd=directory).decode().splitlines() pr(f'Project {title} {new_ver} in: {directory}') # Check PyPI if args.force or version.parse(new_ver) > version.parse( get_pypi_ver(title)): if not pause('publish to PyPI', cancel=True) \ or not pypi_procedure(directory): return 1 # Check AUR if args.no_aur: return aur_ver = get_aur_ver(title) if args.force or version.parse(new_ver) > version.parse(aur_ver): if not pause('publish to AUR', cancel=True) \ or not aur_procedure(aur_ver == '0', args.aur_depends, directory, title, new_ver): return 1