Ejemplo n.º 1
0
#!/usr/bin/python3

import sys, os, traceback

sys.path.append("/opt/bunkerweb/deps/python")
sys.path.append("/opt/bunkerweb/utils")

import logger, jobs
import requests, datetime, gzip, maxminddb

status = 0

try:

    # Don't go further if the cache is fresh
    if jobs.is_cached_file("/opt/bunkerweb/cache/asn.mmdb", "month"):
        logger.log("JOBS", "ℹ️",
                   "asn.mmdb is already in cache, skipping download...")
        os._exit(0)

    # Compute the mmdb URL
    today = datetime.date.today()
    mmdb_url = "https://download.db-ip.com/free/dbip-asn-lite-{}-{}.mmdb.gz".format(
        today.strftime("%Y"), today.strftime("%m"))

    # Download the mmdb file
    logger.log("JOBS", "ℹ️",
               "Downloading mmdb file from url " + mmdb_url + " ...")
    resp = requests.get(mmdb_url)

    # Save it to temp
    os.makedirs("/opt/bunkerweb/tmp/whitelist", exist_ok=True)

    # Our urls data
    urls = {"IP": [], "RDNS": [], "ASN": [], "USER_AGENT": [], "URI": []}

    # Don't go further if the cache is fresh
    kinds_fresh = {
        "IP": True,
        "RDNS": True,
        "ASN": True,
        "USER_AGENT": True,
        "URI": True
    }
    all_fresh = True
    for kind in kinds_fresh:
        if not jobs.is_cached_file(
                "/opt/bunkerweb/cache/whitelist/" + kind + ".list", "hour"):
            kinds_fresh[kind] = False
            all_fresh = False
            logger.log(
                "WHITELIST", "ℹ️", "Whitelist for " + kind +
                " is not cached, processing downloads...")
        else:
            logger.log(
                "WHITELIST", "ℹ️", "Whitelist for " + kind +
                " is already in cache, skipping downloads...")
    if all_fresh:
        os._exit(0)

    # Get URLs
    urls = {"IP": [], "RDNS": [], "ASN": [], "USER_AGENT": [], "URI": []}
    for kind in urls:
Ejemplo n.º 3
0
                         os.getenv("USE_REALIP")) == "yes":
                blacklist_activated = True
                break
    # Singlesite case
    elif os.getenv("USE_REALIP") == "yes":
        blacklist_activated = True
    if not blacklist_activated:
        logger.log("REALIP", "ℹ️",
                   "RealIP is not activated, skipping download...")
        os._exit(0)

    # Create directory if it doesn't exist
    os.makedirs("/opt/bunkerweb/cache/realip", exist_ok=True)

    # Don't go further if the cache is fresh
    if jobs.is_cached_file("/opt/bunkerweb/cache/realip/combined.list",
                           "hour"):
        logger.log("REALIP", "ℹ️",
                   "RealIP list is already in cache, skipping download...")
        os._exit(0)

    # Get URLs
    urls = []
    for url in os.getenv("REALIP_FROM_URLS", "").split(" "):
        if url != "" and url not in urls:
            urls.append(url)

    # Download and write data to temp file
    i = 0
    f = open("/opt/bunkerweb/tmp/realip-combined.list", "w")
    for url in urls:
        try:
Ejemplo n.º 4
0
#!/usr/bin/python3

import sys, os, traceback

sys.path.append("/opt/bunkerweb/deps/python")
sys.path.append("/opt/bunkerweb/utils")

import logger, jobs
import requests, datetime, gzip, maxminddb

status = 0

try:

    # Don't go further if the cache is fresh
    if jobs.is_cached_file("/opt/bunkerweb/cache/country.mmdb", "month"):
        logger.log("JOBS", "ℹ️",
                   "country.mmdb is already in cache, skipping download...")
        os._exit(0)

    # Compute the mmdb URL
    today = datetime.date.today()
    mmdb_url = "https://download.db-ip.com/free/dbip-country-lite-{}-{}.mmdb.gz".format(
        today.strftime("%Y"), today.strftime("%m"))

    # Download the mmdb file
    logger.log("JOBS", "ℹ️",
               "Downloading mmdb file from url " + mmdb_url + " ...")
    resp = requests.get(mmdb_url)

    # Save it to temp