def scraper_magic(target=paths.scrapersites_path, only_scratch=False): sites = [] if os.path.isfile(target): with open(target, 'r') as f: for _ in f.readlines(): if _.startswith(pyoptions.annotator): pass else: sites.append(checkurl(_)) else: sites.append(checkurl(target)) rawlist = scratchword(sites) if only_scratch: storepath = os.path.join( paths.results_path, "%s_%s%s" % (pystrs.SCFATCH_prefix, mybuildtime(), pyoptions.filextension)) with open(storepath, "a") as f: for line in rawlist: f.write(str(line) + pyoptions.CRLF) finishprinter(finishcounter(storepath), storepath) else: storepath = os.path.join( paths.results_path, "%s_%s%s" % (pystrs.SCFATCH_prefix, mybuildtime(), pyoptions.filextension)) with open(storepath, "a") as f: for line in rawlist: f.write(str(line) + pyoptions.CRLF) get_extend_dic(rawlist, need_extendscratch=True)
def scraper_magic(target=paths.scrapersites_path): sites = [] if os.path.isfile(target): with open(target, 'r') as f: for _ in f.readlines(): if _.startswith(pyoptions.annotator): pass else: sites.append(checkurl(_)) else: sites.append(checkurl(target)) get_extend_dic(scratchword(sites), need_passcratch=True)
except WindowsError: exit(pyoptions.CRLF + cool.red("[-] Cannot create result file: %s " % paths.results_path)) if __name__ == '__main__': print("{}".format(cool.green(pydictor_art_text))) init() if pyoptions.args_base: get_base_dic(pyoptions.args_base) elif pyoptions.args_char: get_char_dic(pyoptions.args_char) elif pyoptions.args_chunk: get_chunk_dic(pyoptions.args_chunk) elif pyoptions.args_extend: get_extend_dic(pyoptions.args_extend) elif pyoptions.args_plug: plug_parser() elif pyoptions.args_sedb: try: sedb_tricks() shell = SEDB() shell.cmdloop() except Exception as e: exit(e) elif pyoptions.args_conf != 'default': conf_parser() elif pyoptions.args_pattern != 'default': pattern_parser() elif pyoptions.args_tool: tool_parser()