def check_cert(cert_path) :
    try :
        cache_path = "/opt/bunkerweb/cache/customcert/" + cert_path.replace("/", "_") + ".hash"
        current_hash = jobs.file_hash(cert_path)
        if not os.path.isfile(cache_path) :
            with open(cache_path, "w") as f :
                f.write(current_hash)
        old_hash = jobs.file_hash(cache_path)
        if old_hash == current_hash :
            return False
        with open(cache_path, "w") as f :
            f.write(current_hash)
        return True
    except :
        logger.log("CUSTOM-CERT", "❌", "Exception while running custom-cert.py (check_cert) :")
        print(traceback.format_exc())
    return False
Example #2
0
    logger.log("JOBS", "ℹ️",
               "Downloading mmdb file from url " + mmdb_url + " ...")
    resp = requests.get(mmdb_url)

    # Save it to temp
    logger.log("JOBS", "ℹ️", "Saving mmdb file to tmp ...")
    with open("/opt/bunkerweb/tmp/asn.mmdb", "wb") as f:
        f.write(gzip.decompress(resp.content))

    # Try to load it
    logger.log("JOBS", "ℹ️", "Checking if mmdb file is valid ...")
    with maxminddb.open_database("/opt/bunkerweb/tmp/asn.mmdb") as reader:
        pass

    # Check if file has changed
    file_hash = jobs.file_hash("/opt/bunkerweb/tmp/asn.mmdb")
    cache_hash = jobs.cache_hash("/opt/bunkerweb/cache/asn.mmdb")
    if file_hash == cache_hash:
        logger.log(
            "JOBS", "ℹ️",
            "New file is identical to cache file, reload is not needed")
        os._exit(0)

    # Move it to cache folder
    logger.log("JOBS", "ℹ️", "Moving mmdb file to cache ...")
    cached, err = jobs.cache_file("/opt/bunkerweb/tmp/asn.mmdb",
                                  "/opt/bunkerweb/cache/asn.mmdb", file_hash)
    if not cached:
        logger.log("JOBS", "❌", "Error while caching mmdb file : " + err)
        os._exit(2)
Example #3
0
                if line == "" or line.startswith("#") or line.startswith(";"):
                    continue
                ok, data = check_line(line)
                if ok:
                    f.write(data + "\n")
                    i += 1
        except:
            status = 2
            logger.log(
                "REALIP", "❌",
                "Exception while getting RealIP list from " + url + " :")
            print(traceback.format_exc())
    f.close()

    # Check if file has changed
    file_hash = jobs.file_hash("/opt/bunkerweb/tmp/realip-combined.list")
    cache_hash = jobs.cache_hash("/opt/bunkerweb/cache/realip/combined.list")
    if file_hash == cache_hash:
        logger.log(
            "REALIP", "ℹ️",
            "New file is identical to cache file, reload is not needed")
        os._exit(0)

    # Put file in cache
    cached, err = jobs.cache_file("/opt/bunkerweb/tmp/realip-combined.list",
                                  "/opt/bunkerweb/cache/realip/combined.list",
                                  file_hash)
    if not cached:
        logger.log("REALIP", "❌", "Error while caching list : " + err)
        os._exit(2)
           "w") as f:
     for line in resp.iter_lines(decode_unicode=True):
         line = line.strip()
         if kind != "USER_AGENT":
             line = line.strip().split(" ")[0]
         if line == "" or line.startswith(
                 "#") or line.startswith(";"):
             continue
         ok, data = check_line(kind, line)
         if ok:
             f.write(data + "\n")
             i += 1
 logger.log("WHITELIST", "ℹ️",
            "Downloaded " + str(i) + " bad " + kind)
 # Check if file has changed
 file_hash = jobs.file_hash("/opt/bunkerweb/tmp/whitelist/" +
                            kind + ".list")
 cache_hash = jobs.cache_hash(
     "/opt/bunkerweb/cache/whitelist/" + kind + ".list")
 if file_hash == cache_hash:
     logger.log(
         "WHITELIST", "ℹ️", "New file " + kind +
         ".list is identical to cache file, reload is not needed"
     )
 else:
     logger.log(
         "WHITELIST", "ℹ️", "New file " + kind +
         ".list is different than cache file, reload is needed")
     # Put file in cache
     cached, err = jobs.cache_file(
         "/opt/bunkerweb/tmp/whitelist/" + kind + ".list",
         "/opt/bunkerweb/cache/whitelist/" + kind + ".list",
Example #5
0
    logger.log("JOBS", "ℹ️",
               "Downloading mmdb file from url " + mmdb_url + " ...")
    resp = requests.get(mmdb_url)

    # Save it to temp
    logger.log("JOBS", "ℹ️", "Saving mmdb file to tmp ...")
    with open("/opt/bunkerweb/tmp/country.mmdb", "wb") as f:
        f.write(gzip.decompress(resp.content))

    # Try to load it
    logger.log("JOBS", "ℹ️", "Checking if mmdb file is valid ...")
    with maxminddb.open_database("/opt/bunkerweb/tmp/country.mmdb") as reader:
        pass

    # Check if file has changed
    file_hash = jobs.file_hash("/opt/bunkerweb/tmp/country.mmdb")
    cache_hash = jobs.cache_hash("/opt/bunkerweb/cache/country.mmdb")
    if file_hash == cache_hash:
        logger.log(
            "JOBS", "ℹ️",
            "New file is identical to cache file, reload is not needed")
        os._exit(0)

    # Move it to cache folder
    logger.log("JOBS", "ℹ️", "Moving mmdb file to cache ...")
    cached, err = jobs.cache_file("/opt/bunkerweb/tmp/country.mmdb",
                                  "/opt/bunkerweb/cache/country.mmdb",
                                  file_hash)
    if not cached:
        logger.log("JOBS", "❌", "Error while caching mmdb file : " + err)
        os._exit(2)
         continue
     i = 0
     with open("/opt/bunkerweb/tmp/blacklist/" + kind + ".list", "w") as f :
         for line in resp.iter_lines(decode_unicode=True) :
             line = line.strip()
             if kind != "USER_AGENT" :
                 line = line.strip().split(" ")[0]
             if line == "" or line.startswith("#") or line.startswith(";") :
                 continue
             ok, data = check_line(kind, line)
             if ok :
                 f.write(data + "\n")
                 i += 1
     logger.log("BLACKLIST", "ℹ️", "Downloaded " + str(i) + " bad " + kind)
     # Check if file has changed
     file_hash = jobs.file_hash("/opt/bunkerweb/tmp/blacklist/" + kind + ".list")
     cache_hash = jobs.cache_hash("/opt/bunkerweb/cache/blacklist/" + kind + ".list")
     if file_hash == cache_hash :
         logger.log("BLACKLIST", "ℹ️", "New file " + kind + ".list is identical to cache file, reload is not needed")
     else :
         logger.log("BLACKLIST", "ℹ️", "New file " + kind + ".list is different than cache file, reload is needed")
         # Put file in cache
         cached, err = jobs.cache_file("/opt/bunkerweb/tmp/blacklist/" + kind + ".list", "/opt/bunkerweb/cache/blacklist/" + kind + ".list", file_hash)
         if not cached :
             logger.log("BLACKLIST", "❌", "Error while caching blacklist : " + err)
             status = 2
         if status != 2 :
             status = 1
 except :
     status = 2
     logger.log("BLACKLIST", "❌", "Exception while getting blacklist from " + url + " :")