コード例 #1
1
def fetch_handler():
    source_name = "CVESearch"
    colors.print_info("[-] Downloading %s" % source_name)
    source = sourcehelper.SourceHelper(
        "https://cve.circl.lu/static/circl-cve-search-expanded.json.gz")
    source_data = source.fetch()
    source_data = source_data.decode("utf8")
    source_data = source_data.replace("\n", ",\n")
    source_data = "[" + source_data
    source_data = source_data[:-2] + "]\n"
    # Reformat document
    # sourcehelper.write_source(source_name, source_data)
    # TODO: Reconvert data to Vulners JSON model
    source_data = json.dumps(json.loads(source_data))
    colors.print_success("Saving  source %s" % source_name)
    sourcehelper.write_source(source_name, source_data)
    sourcehelper.write_source_sig(source_name,
                                  sourcehelper.make_sig(source_data))
コード例 #2
0
def source_update(src_name):
    # Need new connection for the new process
    phdb = prohacktivedb.ProHacktiveDB()
    src_sig = sourcehelper.read_source_sig(src_name).decode("utf8")
    src_dat = sourcehelper.read_file_bytes(
        sourcehelper.get_fetched_srcs_dir() + src_name + ".dat").decode("utf8")

    colors.print_info("[-] Inserting source %s signature %s" %
                      (src_name, src_sig))

    phdb.insert_src_sig(src_name, src_sig)

    colors.print_info("[-] Inserting source %s dat %s" %
                      (src_name, src_dat))

    phdb.insert_src_dat(src_name, src_dat)

    colors.print_info("[-] Inserting source %s" % src_name)
    src_data = json.loads(sourcehelper.read_source(src_name))

    colors.print_info("[-] Erasing old vulnerabilities %s ..." % src_name)
    phdb.collections.drop_collection(src_name)

    colors.print_info("[-] Inserting vulnerabilities of %s ..." % src_name)

    phdb.insert_vulnerabilities(src_data, src_name)

    colors.print_success("[x] Updated %s" % src_name)
コード例 #3
0
def main():
    colors.print_info("[-] ProHacktive updating running...")

    srcs_name = SourcesManager().list_all()

    phdb = prohacktivedb.ProHacktiveDB()

    if len(srcs_name) == 0:
        colors.print_warn("[-] No sources to update!")
    else:
        colors.print_warn("[-] updating on host %s with port %s" %
                          (phdb.host, phdb.port))

        colors.print_info("[-] Updating sources")

        processes_list = list()

        # Prepare processes for each sources
        for src_name in srcs_name:
            processes_list.append(
                processes.CProcess(src_name, source_update, src_name))

        process_limit_update = config.current_config.process_limit_update

        # Process sources updating
        processes.handle_processes(processes_list, process_limit_update, 0.01)

        colors.print_success(
            "[x] ProHacktive database has been updated successfully!")
コード例 #4
0
def download(url, path):
    if os.path.exists(path):
        print_info("File %s already exists. Skipping download." % path)
        return
    u = urllib2.urlopen(url)

    make_dirs_if_needed(os.path.dirname(path))
    f = open(path, 'wb')
    meta = u.info()
    file_size = int(meta.getheaders("Content-Length")[0])

    print_info("Path: %s" % path)

    file_size_dl = 0
    block_sz = 8192
    while True:
        buffer = u.read(block_sz)
        if not buffer:
            break
        file_size_dl += len(buffer)
        f.write(buffer)
        percent = file_size_dl * 100. / file_size
        msg = "Downloading: [%s]" % sizeof_fmt(file_size) + " ... " + str(
            int(percent))
        sys.stdout.write("\r%s%%" % msg)
        sys.stdout.flush()
    print
    f.close()
コード例 #5
0
ファイル: downloader.py プロジェクト: r00tarded/giffy
def reporthook(blocknum, blocksize, totalsize):
    readsofar = blocknum * blocksize
    percent = int((readsofar * 1e2 / totalsize) / 2)

    r_size = totalsize / 1024**2
    d_size = readsofar / 1024**2

    pgbar = '[{}{}] '.format('█' * percent, ' ' * (50 - percent)) + '[{0:.2f}/{1:.2f} MB]'.format(d_size, r_size)

    colors.print_info('Downloading: ', pgbar, start='\r', end='\r')
コード例 #6
0
def source_update(src_name):
    # Need new connection for the new process
    phdb = prohacktivedb.ProHacktiveDB()

    # Read local sources signatures
    source_local_sig = sourcehelper.read_source_sig(src_name).decode("utf8")
    source_remote_sig = phdb.find_src_sig_from_name(src_name)

    if source_local_sig == source_remote_sig:
        colors.print_info("[-] Same file signature on %s (%s-%s), skipping" %
                          (src_name, source_local_sig, source_remote_sig))
        return

    # Get time from the top newest update
    update_date_remote = phdb.find_src_dat_from_name(src_name)
    update_date_remote = datetime.strptime(update_date_remote,
                                           "%Y-%m-%dT%H:%M:%S")

    # Find first the top newest updates on local
    # Read source data
    source_data = json.loads(sourcehelper.read_source(src_name).decode("utf8"))

    vulnerabilities_to_update = list()

    for vulnerability in source_data:
        vulnerability_lastseen_date = vulnerability["_source"]["lastseen"]
        vulnerability_published_date = vulnerability["_source"]["published"]
        vulnerability_modified_date = vulnerability["_source"]["modified"]
        # Get the max date between all those dates
        vulnerability_update_date = max(vulnerability_lastseen_date,
                                        vulnerability_modified_date,
                                        vulnerability_published_date)
        # If the date is higher than the last source fetching date on remote,
        # we append the vulnerabilities we need to update/insert
        vulnerability_date_local = datetime.strptime(vulnerability_update_date,
                                                     "%Y-%m-%dT%H:%M:%S")
        if vulnerability_date_local > update_date_remote:
            vulnerabilities_to_update.append(vulnerability)

    if len(vulnerabilities_to_update) == 0:
        raise Exception(
            "File signature has changed but no vulnerabilities to update found"
        )

    # Update all vulnerabilities into the list
    for vulnerability in vulnerabilities_to_update:
        phdb.update_vulnerability(vulnerability, src_name)

    phdb.update_src_sig(src_name, source_local_sig)
    colors.print_success("[x] Updated %s" % src_name)
コード例 #7
0
def check_hash(hash):
    try:
        res = vtotal.file_report([hash])
        data = res.get('json_resp')
        if (data.get('response_code') == 0):
            colors.print_header('  [~] No threat found')
            return
        positives = int(data.get("positives"))
        if (positives >= 3):
            colors.print_fail(f'  [!] {positives} Threats found ')
        else:
            colors.print_warning(f'  [!] {positives} Potential threats found')
    except Exception as err:
        colors.print_info("  [!] MAX REQUESTS EXCEEDED")
コード例 #8
0
    def fetch(self) -> bytes:
        try:
            response = requests.get(self.url)
        except requests.RequestException as e:
            colors.print_error("[!]" + e)
            return False

        # Get response
        data = response.content

        # Check if data is compressed
        if is_compressed(data):
            colors.print_info("[-] Decompressing %s" % self.url)
            # Write to temporary file the response
            if not os.path.exists(temp_dir):
                os.mkdir(temp_dir)

            temp_filename = temp_dir + "tempfile"
            # Sadly we need to write it to a file because pyunpack can't yet
            # decompress from binary data directly from memory
            temp_file = open(temp_filename, "wb")
            temp_file.write(data)
            temp_file.close()

            # Decompress
            filename = temp_filename
            archive_dir = temp_dir + "archive/"

            if not os.path.exists(archive_dir):
                os.mkdir(archive_dir)

            # Sometimes it's compressed multiple times
            while (True):
                arch = pyunpack.Archive(filename)
                arch.extractall(archive_dir)
                os.remove(filename)
                filename = archive_dir + os.listdir(archive_dir)[0]
                compressed = is_file_compressed(filename)
                if not compressed:
                    break

        temp_file = open(filename, "rb")
        data = bytes(temp_file.read())
        temp_file.close()
        os.remove(filename)
        return data
コード例 #9
0
def connect_ssh(hostname, username, password):
    client = paramiko.SSHClient()
    try:
        client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
    except:
        pass
    try:
        colors.print_warning(
            f"[ATTEMPTING CONNECTION] || {hostname} || {username}:{password}")
        client.connect(hostname=hostname,
                       username=username,
                       password=password,
                       timeout=3)
    except socket.timeout as err:
        colors.print_fail(f"[!] Invalid Host: {hostname}")
        raise err
    except paramiko.AuthenticationException:
        return None
    except paramiko.SSHException as err:
        timeout_attempts += 1
        if (timeout_attempts < 5):
            colors.print_info(f"Time Locked retrying... {timeout_attempts}/5")
            time.sleep(60)
            return connect_ssh(hostname, username, password)
        else:
            raise err
    except Exception as err:
        raise err

    colors.print_success("[+] CONNECTION ESTABLISHED:")

    print(f"""
    {colors.colors.ENDC}HOSTNAME: {colors.colors.HEADER}{hostname}
    {colors.colors.ENDC}USERNAME: {colors.colors.HEADER}{username}
    {colors.colors.ENDC}PASSWORD: {colors.colors.HEADER}{password}
    {colors.colors.ENDC}
    """)
    return client
コード例 #10
0
def fetch_handler():
    vulners_api = vulners.Vulners(vulners_api_key)
    colors.print_info("[-] Vulners API loading multiples collections:\n")

    count_for_newline = 0
    print_new_line = False

    collections = vulners_api.collections()
    number_of_collections = len(collections)

    for collection_name in collections:
        if (count_for_newline > 0) and ((count_for_newline % 6) == 0):
            print("\n", end="")
            print_new_line = False

        if count_for_newline == (number_of_collections - 1):
            print(collection_name, end="")
        else:
            print(collection_name + ", ", end="")

        count_for_newline += 1
        print_new_line = True

    if print_new_line:
        print("\n")

    colors.print_warn("[-] Downloading %i sub-sources..." % len(collections))

    processes_list = list()
    for collection_name in collections:
        processes_list.append(
            processes.CProcess(collection_name, vulners_source_fetch,
                               vulners_api_key, collection_name))

    processes_count_limit = len(processes_list)
    processes.handle_processes(processes_list, processes_count_limit, 0.01)
コード例 #11
0
def main():

    colors.print_info("[-] ProHacktive fetching running...")

    srcsmanager = SourcesManager()
    if srcsmanager.fetch_all():
        colors.print_info("[-] Sources generated signatures:")
        for source in srcsmanager.read_srcs_sigs():
            colors.print_info("     Source Name: %s -> %s" %
                              (source["_id"], source["sig"]))
        colors.print_success("[x] ProHacktive fetching done!")
    else:
        colors.print_error("[!] ProHacktive fetching failed")
コード例 #12
0
def main():
    phdb = prohacktivedb.ProHacktiveDB()

    if len(srcs_name) == 0:
        colors.print_warn("[-] No sources to update!")
    else:
        colors.print_warn(
            "[-] Full updating on host %s with port %s" %
            (phdb.host, phdb.port))

        colors.print_info("[-] Erasing old signatures")
        phdb.collections.drop_collection(phdb.get_srcs_sigs_collection_name())

        colors.print_info("[-] Erasing old data informations")
        phdb.collections.drop_collection(phdb.get_srcs_dat_collection_name())

        colors.print_info("[-] Erasing old statistics")
        phdb.drop_remote_stats()

        colors.print_info("[-] Updating sources")

        processes_list = list()

        # Prepare processes for each sources
        for src_name in srcs_name:
            processes_list.append(
                processes.CProcess(
                    src_name,
                    source_update,
                    src_name))

        process_limit_update = config.current_config.process_limit_update

        # Process sources updating
        processes.handle_processes(processes_list, process_limit_update, 0.01)

        colors.print_success(
            "[x] ProHacktive database has been full updated successfully!")
コード例 #13
0
    def __del__(self):
        # Inserts stats at the end of connection
        colors.print_info("[-] Updating stats on %s:%i" %
                          (self.host, self.port))

        self.update_remote_stats()
コード例 #14
0
import os
import sys
import json

runPath = os.path.dirname(os.path.realpath(__file__ + "../"))
sys.path.append(runPath + "/lib/")

import config
import colors
import prohacktivedb
import processes
import sourcehelper
from sourcesmanager import SourcesManager

colors.print_info("[-] ProHacktive full updating running...")

srcs_name = SourcesManager().list_all()


def source_update(src_name):
    # Need new connection for the new process
    phdb = prohacktivedb.ProHacktiveDB()
    src_sig = sourcehelper.read_source_sig(src_name).decode("utf8")
    src_dat = sourcehelper.read_file_bytes(
        sourcehelper.get_fetched_srcs_dir() + src_name + ".dat").decode("utf8")

    colors.print_info("[-] Inserting source %s signature %s" %
                      (src_name, src_sig))

    phdb.insert_src_sig(src_name, src_sig)
コード例 #15
0
def vulners_source_fetch(api_key, collection_name):
    source_name = "Vulners_" + collection_name
    source_file_data = source_name + ".dat"
    source_file_sig = source_name + ".sig"
    datetime_now = datetime.now()
    time_fmt = datetime_now.strftime("%Y-%m-%dT%H:%M:%S")
    important_files_exists = (
        os.path.isfile(fetched_srcs + source_name)
        and os.path.isfile(fetched_srcs + source_file_sig)
        and os.path.isfile(fetched_srcs + source_file_data))

    if important_files_exists:
        read_source_date = sourcehelper.read_file(fetched_srcs +
                                                  source_file_data)
        from_date = str(read_source_date)
        to_date = time_fmt

        colors.print_info("[-] Downloading %s from date %s to date %s" %
                          (collection_name, from_date, to_date))

        vulners_api = vulners.Vulners(api_key)
        source_update = vulners_api.vulners_get_request(
            'archive', {
                'type': collection_name,
                'datefrom': from_date
            })

        # Decompress zip data
        with ZipFile(BytesIO(source_update)) as zip_file:
            if len(zip_file.namelist()) > 1:
                raise Exception("Unexpected file count in Vulners ZIP archive")
            file_name = zip_file.namelist()[0]
            source_update = bytes(zip_file.open(file_name).read())

        source_update = json.loads(source_update.decode("utf8"))

        # No updates
        if len(source_update) == 0:
            colors.print_info("[-] No updates on %s, skipping" % source_name)
            sourcehelper.write_file(fetched_srcs + source_file_data, to_date)
            return

        source_data = sourcehelper.read_source(source_name).decode("utf8")
        source_data = json.loads(source_data)

        # Find every vulnerabilities that needs update into the file and update it
        for vulnerability_update in source_update:
            # By default it's not inserted
            updated_vulnerability = False
            # Check if it has been inserted already
            for vulnerability_index in range(len(source_data)):
                if source_data[vulnerability_index][
                        "_id"] == vulnerability_update["_id"]:
                    source_data[vulnerability_index] = vulnerability_update
                    updated_vulnerability = True
            # If the vulnerability isn't here, we append it into the file
            if not updated_vulnerability:
                source_data.append(vulnerability_update)

        colors.print_info("[-] Saving file signature %s" %
                          (fetched_srcs + source_file_sig))

        # Re-encode data
        source_data = bytes(json.dumps(source_data).encode("utf8"))

        # Write file signature
        sourcehelper.write_source_sig(source_name,
                                      sourcehelper.make_sig(source_data))

        # Write file date (we might use the date directly from the os but to be sure)
        sourcehelper.write_file(fetched_srcs + source_file_data, to_date)

        colors.print_info("[-] Saving source %s" %
                          (fetched_srcs + source_name))

        sourcehelper.write_source(source_name, source_data)

    else:

        colors.print_info("[-] Downloading %s" % source_name)
        vulners_api = vulners.Vulners(api_key)
        collection = vulners_api.vulners_get_request('archive',
                                                     {'type': collection_name})

        # Decompress zip data
        with ZipFile(BytesIO(collection)) as zip_file:
            if len(zip_file.namelist()) > 1:
                raise Exception("Unexpected file count in Vulners ZIP archive")
            file_name = zip_file.namelist()[0]
            # Reformat document with a json load so it writes correct signature & convert it to bytes
            collection = bytes(
                json.dumps(
                    json.loads(zip_file.open(file_name).read().decode(
                        "utf8"))).encode("utf8"))

        src_sig = sourcehelper.make_sig(collection)

        # Write collection date, used for faster updates by using the API
        sourcehelper.write_file(fetched_srcs + source_file_data, time_fmt)
        colors.print_info("[-] Saving file signature %s" %
                          (fetched_srcs + source_file_sig))

        # Write file signature
        sourcehelper.write_source_sig(source_name, src_sig)

        colors.print_info("[-] Saving source %s" %
                          (fetched_srcs + source_name))

        sourcehelper.write_source(source_name, collection)
コード例 #16
0
def count_dicts(json_obj):
    # If dictionary start directly from there
    if isinstance(json_obj, dict):
        _count_dicts(json_obj)
    # Otherwhise it must be a list
    else:
        for o in json_obj:
            _count_dicts(o)


# Check if argument is passed
sys.argv.append("tmp/fetched_srcs/Vulners_cve")
if sys.argv[1]:

    # Read json data
    colors.print_info("[-] Loading %s" % sys.argv[1])
    json_file = open(sys.argv[1], "r")
    json_data = json.loads(json_file.read())
    json_file.close()

    colors.print_info("[-] Counting dictionaries...")

    dict_count = 0

    # Count all dictionaries
    count_dicts(json_data)

    pbar = colors.print_progress_start(dict_count)

    colors.print_info("[-] Analyzing %i dictionaries..." % dict_count)
コード例 #17
0
        config_gen["DATABASE"] = {
            "db_host": self.db_host,
            "db_port": str(self.db_port),
            "db_name": self.db_name,
            "db_user": self.db_user,
            "db_pass": self.db_password}
        config_gen["VULNERS"] = {"vulners_api_key": self.vulners_api_key}
        config_gen["processes"] = {
            "process_limit_update": int(self.process_limit_update), "process_limit_fetch": int(self.process_limit_fetch)}
        return config_gen


default_config = Config()
current_config = default_config

colors.print_info("[-] Parsing %s" % config_filename)
config = configparser.ConfigParser()
dataset = config.read(config_filename)
if len(dataset) == 0:
    colors.print_error("[!] Couldn't read config file at %s" % config_filename)
    colors.print_warn("[!] Using default configuration")
else:
    db = config["DATABASE"]
    vulners_api_key = config["VULNERS"]["vulners_api_key"]
    processes = config["PROCESSES"]
    current_config = Config(
        db["db_host"],
        db["db_port"],
        db["db_name"],
        db["db_user"],
        db["db_pass"],