def fetch_handler(): source_name = "CVESearch" colors.print_info("[-] Downloading %s" % source_name) source = sourcehelper.SourceHelper( "https://cve.circl.lu/static/circl-cve-search-expanded.json.gz") source_data = source.fetch() source_data = source_data.decode("utf8") source_data = source_data.replace("\n", ",\n") source_data = "[" + source_data source_data = source_data[:-2] + "]\n" # Reformat document # sourcehelper.write_source(source_name, source_data) # TODO: Reconvert data to Vulners JSON model source_data = json.dumps(json.loads(source_data)) colors.print_success("Saving source %s" % source_name) sourcehelper.write_source(source_name, source_data) sourcehelper.write_source_sig(source_name, sourcehelper.make_sig(source_data))
def source_update(src_name): # Need new connection for the new process phdb = prohacktivedb.ProHacktiveDB() src_sig = sourcehelper.read_source_sig(src_name).decode("utf8") src_dat = sourcehelper.read_file_bytes( sourcehelper.get_fetched_srcs_dir() + src_name + ".dat").decode("utf8") colors.print_info("[-] Inserting source %s signature %s" % (src_name, src_sig)) phdb.insert_src_sig(src_name, src_sig) colors.print_info("[-] Inserting source %s dat %s" % (src_name, src_dat)) phdb.insert_src_dat(src_name, src_dat) colors.print_info("[-] Inserting source %s" % src_name) src_data = json.loads(sourcehelper.read_source(src_name)) colors.print_info("[-] Erasing old vulnerabilities %s ..." % src_name) phdb.collections.drop_collection(src_name) colors.print_info("[-] Inserting vulnerabilities of %s ..." % src_name) phdb.insert_vulnerabilities(src_data, src_name) colors.print_success("[x] Updated %s" % src_name)
def main(): colors.print_info("[-] ProHacktive updating running...") srcs_name = SourcesManager().list_all() phdb = prohacktivedb.ProHacktiveDB() if len(srcs_name) == 0: colors.print_warn("[-] No sources to update!") else: colors.print_warn("[-] updating on host %s with port %s" % (phdb.host, phdb.port)) colors.print_info("[-] Updating sources") processes_list = list() # Prepare processes for each sources for src_name in srcs_name: processes_list.append( processes.CProcess(src_name, source_update, src_name)) process_limit_update = config.current_config.process_limit_update # Process sources updating processes.handle_processes(processes_list, process_limit_update, 0.01) colors.print_success( "[x] ProHacktive database has been updated successfully!")
def unzip(path, extract_to="./", pwd=""): try: with zipfile.ZipFile(path) as file: file.extractall(path=extract_to, pwd=bytes(pwd, "utf-8")) zipfile.ZipFile.close(file) colors.print_success("[+] FILES EXTRACTED:") return True except Exception as err: raise err
def hash_file(filepath): md5_hash = hashlib.md5() with open(filepath, 'rb') as f: for byte_block in iter(lambda: f.read(4096), b''): md5_hash.update(byte_block) hash = md5_hash.hexdigest() colors.print_success(f'[+] File: {filepath}') colors.print_header(f' [~] Size: {os.path.getsize(filepath)} kbs') colors.print_header(f' [~] Hash: {hash}') check_hash(hash)
def fetch_source(self, module, module_name): # Get function from module fetch_func = getattr(module, self.str_fetch_func) # If it's present load it if fetch_func: # Fetch data from the source colors.print_warn("[-] Loading source %s" % module_name) fetch_func() colors.print_success("[x] Loaded source %s" % module_name) else: colors.print_error( "[!] fetch_handler function isn't avaiable on module %s" % module_name)
def main(): colors.print_info("[-] ProHacktive fetching running...") srcsmanager = SourcesManager() if srcsmanager.fetch_all(): colors.print_info("[-] Sources generated signatures:") for source in srcsmanager.read_srcs_sigs(): colors.print_info(" Source Name: %s -> %s" % (source["_id"], source["sig"])) colors.print_success("[x] ProHacktive fetching done!") else: colors.print_error("[!] ProHacktive fetching failed")
def source_update(src_name): # Need new connection for the new process phdb = prohacktivedb.ProHacktiveDB() # Read local sources signatures source_local_sig = sourcehelper.read_source_sig(src_name).decode("utf8") source_remote_sig = phdb.find_src_sig_from_name(src_name) if source_local_sig == source_remote_sig: colors.print_info("[-] Same file signature on %s (%s-%s), skipping" % (src_name, source_local_sig, source_remote_sig)) return # Get time from the top newest update update_date_remote = phdb.find_src_dat_from_name(src_name) update_date_remote = datetime.strptime(update_date_remote, "%Y-%m-%dT%H:%M:%S") # Find first the top newest updates on local # Read source data source_data = json.loads(sourcehelper.read_source(src_name).decode("utf8")) vulnerabilities_to_update = list() for vulnerability in source_data: vulnerability_lastseen_date = vulnerability["_source"]["lastseen"] vulnerability_published_date = vulnerability["_source"]["published"] vulnerability_modified_date = vulnerability["_source"]["modified"] # Get the max date between all those dates vulnerability_update_date = max(vulnerability_lastseen_date, vulnerability_modified_date, vulnerability_published_date) # If the date is higher than the last source fetching date on remote, # we append the vulnerabilities we need to update/insert vulnerability_date_local = datetime.strptime(vulnerability_update_date, "%Y-%m-%dT%H:%M:%S") if vulnerability_date_local > update_date_remote: vulnerabilities_to_update.append(vulnerability) if len(vulnerabilities_to_update) == 0: raise Exception( "File signature has changed but no vulnerabilities to update found" ) # Update all vulnerabilities into the list for vulnerability in vulnerabilities_to_update: phdb.update_vulnerability(vulnerability, src_name) phdb.update_src_sig(src_name, source_local_sig) colors.print_success("[x] Updated %s" % src_name)
def main(): phdb = prohacktivedb.ProHacktiveDB() if len(srcs_name) == 0: colors.print_warn("[-] No sources to update!") else: colors.print_warn( "[-] Full updating on host %s with port %s" % (phdb.host, phdb.port)) colors.print_info("[-] Erasing old signatures") phdb.collections.drop_collection(phdb.get_srcs_sigs_collection_name()) colors.print_info("[-] Erasing old data informations") phdb.collections.drop_collection(phdb.get_srcs_dat_collection_name()) colors.print_info("[-] Erasing old statistics") phdb.drop_remote_stats() colors.print_info("[-] Updating sources") processes_list = list() # Prepare processes for each sources for src_name in srcs_name: processes_list.append( processes.CProcess( src_name, source_update, src_name)) process_limit_update = config.current_config.process_limit_update # Process sources updating processes.handle_processes(processes_list, process_limit_update, 0.01) colors.print_success( "[x] ProHacktive database has been full updated successfully!")
def connect_ssh(hostname, username, password): client = paramiko.SSHClient() try: client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) except: pass try: colors.print_warning( f"[ATTEMPTING CONNECTION] || {hostname} || {username}:{password}") client.connect(hostname=hostname, username=username, password=password, timeout=3) except socket.timeout as err: colors.print_fail(f"[!] Invalid Host: {hostname}") raise err except paramiko.AuthenticationException: return None except paramiko.SSHException as err: timeout_attempts += 1 if (timeout_attempts < 5): colors.print_info(f"Time Locked retrying... {timeout_attempts}/5") time.sleep(60) return connect_ssh(hostname, username, password) else: raise err except Exception as err: raise err colors.print_success("[+] CONNECTION ESTABLISHED:") print(f""" {colors.colors.ENDC}HOSTNAME: {colors.colors.HEADER}{hostname} {colors.colors.ENDC}USERNAME: {colors.colors.HEADER}{username} {colors.colors.ENDC}PASSWORD: {colors.colors.HEADER}{password} {colors.colors.ENDC} """) return client
def get_cookies(url): session = requests.Session() session.get(url) colors.print_header(f"--- {url} ---") colors.print_success(f" [~] COOKIES: {session.cookies}")
# Count all dictionaries count_dicts(json_data) pbar = colors.print_progress_start(dict_count) colors.print_info("[-] Analyzing %i dictionaries..." % dict_count) # Reset counter dict_count = 0 # For each objects find json_keys = make_dictionary_of_keys(json_data) pbar.finish() colors.print_success("") # There's multiples possibilities because sometimes keyvalues store # different data types """if isinstance(json_keys, list): list_number = 1 for jk in json_keys: colors.print_info("--- Possibility %i ----"%list_number) colors.print_success((json.dumps(jk, sort_keys=True, indent=4))) list_number += 1 else:""" colors.print_success((json.dumps(json_keys, sort_keys=True, indent=4))) colors.print_success("") colors.print_success("[x] The json file has been successfully analyzed!")