Exemplo n.º 1
0
def sqlmap_parse_data_extracted(sqlmap_output):
    global data_to_extract
    # Let's parse the data extracted
    debugMsg = "sqlmap output {}".format(sqlmap_output)
    logger.debug(debugMsg)
    extracted_values = {}
    debugMsg = "data to extract {}".format(data_to_extract)
    logger.debug(debugMsg)
    for tblcol in data_to_extract:
        debugMsg = "tblcol: ".format(tblcol)
        logger.debug(debugMsg)
        tbl_list = tblcol.split(".")
        debugMsg = "tbl_list[1]: ".format(tbl_list[1])
        logger.debug(debugMsg)
        tmp_table = tbl_list[0]
        tmp_column = tbl_list[1]
        try:
            extracted_values[tmp_table]
        except KeyError:
            extracted_values[tmp_table] = {}
        try:
            extracted_values[tmp_table][tmp_column] = sqlmap_output["data"][2]["value"][tmp_column]["values"]
        except Exception:
            logger.critical("error in the sqlmap output")
            sqlmap.kill
            exit()
        logger.debug("Ending sqlmap extraction")
        sqlmap.kill()
    return extracted_values
Exemplo n.º 2
0
 def run_wfuzz(self, url):
     """
     Executes Wfuzz and returns a list of files retrieved during the fuzzing.
     :param url: the url of the target
     """
     self._wfuzz_cmd.append(url)
     debugMsg = "executing WFUZZ {}".format(self._wfuzz_cmd)
     logger.debug(debugMsg)
     p1 = subprocess.Popen(self._wfuzz_cmd,
                           cwd=self._wfuzz_path,
                           universal_newlines=True,
                           stderr=subprocess.PIPE,
                           stdout=subprocess.PIPE)
     try:
         out, err = p1.communicate(timeout=10)
         debugMsg = "wfuzz out {}".format(out)
         logger.debug(debugMsg)
         # return a list containing the successful URL
         urls = []
         json_out = json.loads(out)
         for req in json_out:
             urls.append(req["url"])
         return urls
     except subprocess.TimeoutExpired:
         p1.kill()
         logger.critical("Error: wfuzz timed out.")
         exit()
Exemplo n.º 3
0
def del_task(task_id):
    url = SQLMAP_BASE_URL + "/task/" + task_id + "/delete"
    r = requests.get(url)
    try:
        json_result = json.loads(r.text)
    except json.decoder.JSONDecodeError as e:
        logger.critical("JSON decoder error")
        exit()
    return json_result['success']
Exemplo n.º 4
0
def get_data(task_id):
    url = SQLMAP_BASE_URL + "/scan/" + task_id + "/data"
    r = requests.get(url)
    try:
        json_result = json.loads(r.text)
    except json.decoder.JSONDecodeError as e:
        logger.critical("JSON decoder error")
        exit()
    return json_result
Exemplo n.º 5
0
def kill_task(task_id):
    url = SQLMAP_BASE_URL + "/scan/" + task_id + "/kill"
    r = requests.get(url)
    try:
        json_result = json.loads(r.text)
    except json.decoder.JSONDecodeError as e:
        logger.critical("JSON decoder error")
        exit()
    if json_result['success'] == True:
        return json_result
    else:
        return False
Exemplo n.º 6
0
Arquivo: fs.py Projeto: rhaidiz/wafex
def save_extracted_file(name, text):
    filepath = os.path.join(config.WFAST_EXTRACTED_FILES_DIR, name)
    try:
        f = open(filepath, "w")
        f.write(text)
    except Exception as e:
        criticalMsg = "Error {}\n in saving the file {}, aborting execution!".format(
            e, filepath)
        logger.critical(criticalMsg)
        exit(0)
    f.close()
    return filepath
Exemplo n.º 7
0
def start_scan(url_to_scan, task_id):
    url = SQLMAP_BASE_URL + "/scan/" + task_id + "/start"
    params = {"url": url_to_scan}
    r = requests.post(url, json=params)
    try:
        json_result = json.loads(r.text)
    except json.decoder.JSONDecodeError as e:
        logger.critical("JSON decoder error")
        exit()
    if json_result['success'] == True:
        return json_result['engineid']
    else:
        return False
Exemplo n.º 8
0
def set_option(option, value, task_id):
    url = SQLMAP_BASE_URL + "/option/" + task_id + "/set"
    params = {option: value}
    debugMsg = "task {} setting {} to {}".format(task_id, option, value)
    logger.debug(debugMsg)
    r = requests.post(url, json=params)
    try:
        json_result = json.loads(r.text)
    except json.decoder.JSONDecodeError as e:
        logger.critical("JSON decoder error")
        exit()
    if json_result['success'] == True:
        return True
    else:
        return False
Exemplo n.º 9
0
def new_task():
    #time.sleep(1)
    url = SQLMAP_BASE_URL + "/task/new"
    try:
        r = requests.get(url)
        json_result = json.loads(r.text)
    except json.decoder.JSONDecodeError as e:
        logger.critical("JSON decoder error")
        exit()
    except Exception as e:
        criticalMsg = "Somethin bad happened {}".format(e)
        logger.critical(criticalMsg)
    if json_result['success'] == True:
        return json_result['taskid']
    else:
        return False
Exemplo n.º 10
0
def get_list_extracted_files(attack_domain):
    files_extracted = []
    debugMsg = "domain {}".format(attack_domain)
    logger.debug(debugMsg)
    __sqlmap_files_path = expanduser(join("~",".sqlmap","output",attack_domain,"files"))

    try:
        files = [f for f in listdir(__sqlmap_files_path) if isfile(join(__sqlmap_files_path,f))]
    except FileNotFoundError:
        criticalMsg = "File not found {}".format(__sqlmap_files_path)
        logger.critical(criticalMsg)
        logger.critical("Aborting execution")
        exit(0)
    for f in files:
        tmp = join( __sqlmap_files_path, f)
        files_extracted.append(tmp)
    return files_extracted
Exemplo n.º 11
0
Arquivo: mc.py Projeto: rhaidiz/wafex
def local_cl_atse(file_aslan, options=[]):
    global CLATSE
    logger.info("Executing CL-Atse locally")
    atse_output = os.path.splitext(file_aslan)[0] + ".atse"
    atse_output_descriptor = open(atse_output, "w")
    atse_execution_array = [CLATSE] + options + [file_aslan]
    logger.debug(atse_execution_array)
    p1 = subprocess.Popen(atse_execution_array,
                          universal_newlines=True,
                          stderr=subprocess.PIPE,
                          stdout=subprocess.PIPE)
    try:
        out, err = p1.communicate(timeout=config.mc_timeout)
    except subprocess.TimeoutExpired:
        p1.kill()
        logger.critical("Model checker timed out")
        exit()
    atse_output_descriptor.write(out)
    atse_output_descriptor.close()
    return atse_output
Exemplo n.º 12
0
Arquivo: mc.py Projeto: rhaidiz/wafex
def aslanpp2aslan(file_aslanpp):
    #connector = config.connector
    # get the filename without extension
    debugMsg = "file aslanpp: {}".format(file_aslanpp)
    logger.debug(debugMsg)
    basename = os.path.splitext(os.path.basename(file_aslanpp))[0]
    translator_output_file = "tmp_" + basename + ".aslan"

    logger.info("Generating ASlan model")
    debugMsg = "{} on {} out-file {}".format(connector, file_aslanpp,
                                             translator_output_file)
    logger.debug(debugMsg)

    p1 = subprocess.Popen([
        "java", "-jar", connector, file_aslanpp, "-o", translator_output_file
    ],
                          universal_newlines=True,
                          stderr=subprocess.PIPE)

    try:
        out, err = p1.communicate(timeout=30)
    except subprocess.TimeoutExpired:
        p1.kill()
        criticalMsg = "Error: {} timed out."
        logger.critical(criticalMsg)
        exit()

    # check if an error has been generated from the translator
    if "FATAL" in err or "ERROR" in err:
        # there was an error in executing the translator
        logger.critical("Translator generated an error")
        logger.critical(err)
        exit()

    if config.verbosity and "WARNING" in err:
        logger.debug(err.strip())
    logger.info("ASlan model generated")
    return translator_output_file, err
Exemplo n.º 13
0
def execute_sqlmap(sqlmap_details):
    global data_to_extract

    logger.info("run sqlmapapi.py")
    is_sqlmap_up = sqlmap.run_api_server()
    if not is_sqlmap_up:
        logger.critical("sqlmap server not running")
        exit()
    task = sqlmap.new_task()

    # configure proxy
    if config.proxy_ip and config.proxy_port and config.proxy_port.isdigit():
        sqlmap.set_option("proxy","http://{}:{}".format(config.proxy_ip, config.proxy_port), task)

    url = sqlmap_details["url"]
    method = sqlmap_details["method"]
    vuln_param = sqlmap_details["vuln_param"]

    url_params = ""
    if "get_params" in sqlmap_details:
        params = sqlmap_details["get_params"]
        url_params = ""
        for ab_k,real_c in params.items():
            # specify an injection point
            if ab_k == vuln_param:
                url_params = url_params+real_c[0]+"="+real_c[1]+"*&"
            else:
                url_params = url_params+real_c[0]+"="+real_c[1]+"&"
        url_params = url_params[:-1]

    body_params = ""
    if "post_params" in sqlmap_details:
        params = sqlmap_details["post_params"]
        body_params = ""
        for ab_k,real_c in params.items():
            # specify an injection point
            if ab_k == vuln_param:
                body_params = body_params+real_c[0]+"="+real_c[1]+"*&"
            else:
                body_params = body_params+real_c[0]+"="+real_c[1]+"&"
        body_params = body_params[:-1]

    url = url+"?"+url_params
    sqlmap.set_option("url",url,task)

    if method == "POST":
        sqlmap.set_option("data",body_params,task)

    # if "params" in sqlmap_details:
    #     params = sqlmap_details["params"]
    #     url_params = ""
    #     for k,v in params.items():
    #         url_params = url_params+k+"="+v+"&"
    #     url_params = url_params[:-1]
    #     if method == "GET":
    #         url = url+"?"+url_params
    #         sqlmap.set_option("url",url,task)
    #     elif method == "POST":
    #         sqlmap.set_option("url",url,task)
    #         sqlmap.set_option("data",url_params,task)

    # set cookie if present and should be considered
    if "cookies" in sqlmap_details:
        c = ""
        for k,v in sqlmap_details["cookies"].items():
            c = c + k + "=" + v + ";"
        debugMsg = "sqlmap with cookie {}".format(c)
        logger.debug(debugMsg)
        sqlmap.set_option("cookie",c,task)


    # BEGIN: set specific attack details
    # data extraction
    if "extract" in sqlmap_details:
        data_to_extract = sqlmap_details["extract"]
        sqlmap.set_option("dumpTable","true",task)
        # set data extraction only if we have data to extract
        col = ""
        tbl = ""
        for tblcol in data_to_extract:
            tbl_list = tblcol.split(".")
            # TODO: in here we're basically overwriting the table name
            # whenever we find a new one
            tbl = tbl_list[0]
            col = col + tbl_list[1]
        sqlmap.set_option("tbl",tbl,task)
        sqlmap.set_option("col",col,task)

    if "dumpall" in sqlmap_details:
        # dump the entire database
        sqlmap.set_option("dumpAll","true",task)

    # file read
    if "read" in sqlmap_details:
        file_to_extract = sqlmap_details["read"]
        sqlmap.set_option("rFile",file_to_extract,task)
    # file write
    if "write" in sqlmap_details:

        file_to_write = sqlmap_details["write"]
        remote_path = sqlmap_details["path"]
        if not isfile(file_to_write):
            criticalMsg = "Error: {} file not found".format(file_to_write)
            debug.critical(criticalMsg)
            exit()
        sqlmap.set_option("wFile",join(".",file_to_write),task)
        sqlmap.set_option("dFile",remote_path,task)
    # second order
    if "secondOrder" in sqlmap_details:
        secondOrder_url = sqlmap_details["secondOrder"]
        sqlmap.set_option("secondOrder",secondOrder_url,task)
    # END: set specific attack details

    logger.info("sqlmap analysis started")
    sqlmap.start_scan(url,task)

    stopFlag = threading.Event()
    sqlmap_data = None
    sqlmap_log = None
    while not stopFlag.wait(5):
        r = sqlmap.get_status(task)
        if "terminated" in r:
            logger.debug(sqlmap.get_log(task))
            sqlmap_data = sqlmap.get_data(task)
            sqlmap_log = sqlmap.get_log(task)
            stopFlag.set()
        else:
            logger.debug(sqlmap.get_log(task))
            logger.info("sqlmap analysis in progress ... ")

    # we check if the last message generated by sqlmap is critical
    # or an error
    level   = sqlmap_log[-1]["level"]
    message = sqlmap_log[-1]["message"]

    if level == "WARNING":
        logger.warning(message)

    if level == "INFO":
           logger.info(message)

    if level == "ERROR" or level == "CRITICAL":
        logger.critical("sqlmap generated an error")
        logger.critical(message)
        logger.critical("Aborting execution")
        exit()

    logger.info("sqlmap analysis terminated")

    return sqlmap_data, sqlmap_log
Exemplo n.º 14
0
def main():
    # command line parsing
    cmd = argparse.ArgumentParser()
    cmd.add_argument("model",help="An ASLAn++ model")
    cmd.add_argument("--c",metavar="concre_file",help="The concretization file, needed for executing Abstract Attack Trace")
    cmd.add_argument("--debug",help="Print debug messages",action="store_true")
    cmd.add_argument("--mc-only",help="Run the model-checker only and exit",action="store_true")
    cmd.add_argument("--interactive", help="Ask input of every parameter", action="store_true")
    #cmd.add_argument("--merger",help="Use the specified file as a base file to merge with the given model", metavar="basefile")
    cmd.add_argument("--verbose", help="Increase the output verbosity",action="store_true")
    translator = cmd.add_argument_group('Translator')
    translator_versions = ["1.4.1","1.4.9","1.3"]
    translator.add_argument("--translator",help="Specify a jar translator to use. Allowed values are "+", ".join(translator_versions)+". Default (1.4.1)", metavar='',choices=translator_versions)

    requests = cmd.add_argument_group("HTTP(S) options")
    requests.add_argument("--proxy", metavar="ip:port", help="Use an HTTP proxy when executing requests")

    model_checker = cmd.add_argument_group("Cl-Atse options")
    model_checker.add_argument("--mc-options",help="String representing the options to pass to Cl-Atse. For more information on the available options check Cl-Atse manual")
    model_checker.add_argument("--mc-timeout", metavar="T", help="If Cl-Atse runs more than T seconds, abort (default: 600)", type=int)

    args = cmd.parse_args()
    load_model = args.model


    mc_options = args.mc_options.split(" ") if args.mc_options else []
    if args.mc_timeout:
        config.mc_timeout = args.mc_timeout

    if args.interactive:
        config.interactive = True

    # check if model file exists
    if not os.path.isfile(load_model):
        criticalMsg = "Error {} file not found".format(load_model)
        logger.critical(criticalMsg)
        exit()
    # check if concretization file exists only if --mc-only hasn't been specified
    if args.c == None and not args.mc_only:
        logger.critical("Concretization file not specified")
        exit()
    elif not args.mc_only and not os.path.isfile(args.c):
        criticalMsg = "Error: {} file not found".format(args.c)
        logger.critical(criticalMsg)
        exit()
    elif not args.mc_only and args.c != None and  os.path.isfile(args.c):
        config.concretization = args.c

    print(config.BANNER.format(config.VERSION,config.SITE))

    # register exiting cleanup function
    atexit.register(exitcleanup)

    # set global variables
    config.verbosity = args.verbose
    config.DEBUG = args.debug

    # create folders if they do not exists
    if not os.path.isdir(config.WFAST_HOME):
        logger.info("Creating {} home folder".format(config.TOOL_NAME))
        os.makedirs(config.WFAST_HOME)
    if not os.path.isdir(config.WFAST_EXTRACTED_FILES_DIR):
        logger.info("Creating {} extracted files folder".format(config.TOOL_NAME))
        os.makedirs(config.WFAST_EXTRACTED_FILES_DIR)

    if config.DEBUG:
        logger.setLevel(logging.DEBUG)
    else:
        logger.setLevel(logging.INFO)

    if args.proxy:
        proxy = args.proxy.split(":")
        if len(proxy) == 2 and proxy[0] and proxy[1].isdigit():
            config.proxy_ip = proxy[0]
            config.proxy_port = proxy[1]
        else:
            criticalMsg = "Invalid proxy format {}".format(args.proxy)
            logger.error(criticalMsg)
            exit(-1)
    if args.translator == "1.4.9":
        mc.connector = config.CONNECTOR_1_4_9
    if args.translator == "1.3":
        mc.connector = config.CONNECTOR_1_3

    # if args.merger:
    #     base_model = args.merger
    #     webapp = load_model
    #     load_model = "out.aslan++"
    #     # merge the files
    #     merger(webapp,base_model,load_model)

    # first thing is to confert the ASLan++ model in ASLan
    file_aslan_model, err = mc.aslanpp2aslan(load_model)

    # we can now run the model checker, by default we use Cl-Atse locally
    file_attack_trace = mc.local_cl_atse(file_aslan_model,mc_options)

    # translate the attack trace in msc
    msc_output = mc.generate_msc(file_attack_trace,file_aslan_model)


    if not args.mc_only:

        # read the output and parse it
        msc = mc.parse_msc(msc_output)

        logger.debug("Parsed MSC")
        for msg in msc:
            debugMsg = "{} {}:{}:{} {} > tag{}".format(msg.sender , msg.receiver, msg.params, msg.action, msg.action_params, msg.tag)
            logger.debug(debugMsg)

        # execute the attack trace
        execute_attack(msc)
Exemplo n.º 15
0
Arquivo: mc.py Projeto: rhaidiz/wafex
def generate_msc(file_attack_trace, file_aslan_model):

    r_time = re.compile("STATISTICS TIME (.*)")
    r_tested = re.compile("TESTED (.*)")
    r_reached = re.compile("REACHED (.*)")
    r_reading = re.compile("READING (.*)")
    r_analyze = re.compile("ANALYSE (.*)")
    r_unused = re.compile("UNUSED: { (.*)")
    r_goal = re.compile("GOAL: (.*)")
    r_end_unused = re.compile("(.*) }")
    unused_flag = 0

    tmp_attack_trace = ""
    p1 = subprocess.Popen([
        "java", "-jar", connector, "-ar", file_attack_trace, file_aslan_model
    ],
                          universal_newlines=True,
                          stderr=subprocess.PIPE,
                          stdout=subprocess.PIPE)
    try:
        out, err = p1.communicate(timeout=10)
    except subprocess.TimeoutExpired:
        p1.kill()
        logger.critical("MSC creation timed out")
        exit()
    if config.verbosity:
        # print the generated output on a file
        msc_verbose = open("tmp_msc.txt", "w")
        msc_verbose.write(out)
        msc_verbose.close()
    f = open(file_attack_trace)
    msc = ""
    comments = False
    for line in f.readlines():
        line = line.strip()
        if "SUMMARY ATTACK_FOUND" in line:
            # we found an attack, so return the generated MSC
            i = out.find("MESSAGES:")
            msc = out[i + 9:]
            logger.info("Abstract Attack Trace found:")
            print(msc)
        elif "SUMMARY NO_ATTACK_FOUND" in line:
            # no attack found, we don't need the MSC
            logger.warning("NO ATTACK FOUND")
        else:
            goal = r_goal.search(line)
            if goal:
                infoMsg = "GOAL: {}".format(goal.group(1))
                logger.info(infoMsg)
                continue
            if "COMMENTS" in line:
                comments = True
                logger.info("COMMENTS")
                continue
            if "STATISTICS TIME" in line:
                comments = False
                continue
            if comments == True:
                print(line)
                continue
            tested = r_tested.search(line)
            if tested:
                infoMsg = "TESTED: {}".format(tested.group(1))
                logger.info(infoMsg)
                continue
            reached = r_reached.search(line)
            if reached:
                infoMsg = "REACHED: {}".format(reached.group(1))
                logger.info(infoMsg)
                continue
            analyze = r_analyze.search(line)
            if analyze:
                infoMsg = "ANALYZE: {}".format(analyze.group(1))
                logger.info(infoMsg)
                # I return here because if I reached ANALYZE, I don't care of
                # reading the remaning part of the output
                return msc
            unused = r_unused.search(line)
            if unused:
                logger.debug("UNUSED:")
                logger.debug(unused.group(1))
                unused_flag = 1
                continue
            else:
                last_line_unused = r_end_unused.search(line)
                if unused_flag == 1 and last_line_unused:
                    # last line of the unused
                    logger.debug(last_line_unused.group(1))
                    unused_flag = 0
                    continue
                elif unused_flag == 1:
                    # keep reading next files
                    logger.debug(line)
                    continue
    # this return is for safety reason. Theoretically it should always
    # return when ANALYZE is found
    return msc