Esempio n. 1
0
def sqlmap_parse_data_extracted(sqlmap_output):
    global data_to_extract
    # Let's parse the data extracted
    debugMsg = "sqlmap output {}".format(sqlmap_output)
    logger.debug(debugMsg)
    extracted_values = {}
    debugMsg = "data to extract {}".format(data_to_extract)
    logger.debug(debugMsg)
    for tblcol in data_to_extract:
        debugMsg = "tblcol: ".format(tblcol)
        logger.debug(debugMsg)
        tbl_list = tblcol.split(".")
        debugMsg = "tbl_list[1]: ".format(tbl_list[1])
        logger.debug(debugMsg)
        tmp_table = tbl_list[0]
        tmp_column = tbl_list[1]
        try:
            extracted_values[tmp_table]
        except KeyError:
            extracted_values[tmp_table] = {}
        try:
            extracted_values[tmp_table][tmp_column] = sqlmap_output["data"][2]["value"][tmp_column]["values"]
        except Exception:
            logger.critical("error in the sqlmap output")
            sqlmap.kill
            exit()
        logger.debug("Ending sqlmap extraction")
        sqlmap.kill()
    return extracted_values
Esempio n. 2
0
 def run_wfuzz(self, url):
     """
     Executes Wfuzz and returns a list of files retrieved during the fuzzing.
     :param url: the url of the target
     """
     self._wfuzz_cmd.append(url)
     debugMsg = "executing WFUZZ {}".format(self._wfuzz_cmd)
     logger.debug(debugMsg)
     p1 = subprocess.Popen(self._wfuzz_cmd,
                           cwd=self._wfuzz_path,
                           universal_newlines=True,
                           stderr=subprocess.PIPE,
                           stdout=subprocess.PIPE)
     try:
         out, err = p1.communicate(timeout=10)
         debugMsg = "wfuzz out {}".format(out)
         logger.debug(debugMsg)
         # return a list containing the successful URL
         urls = []
         json_out = json.loads(out)
         for req in json_out:
             urls.append(req["url"])
         return urls
     except subprocess.TimeoutExpired:
         p1.kill()
         logger.critical("Error: wfuzz timed out.")
         exit()
Esempio n. 3
0
def setName():
    logger.info("post recieved")
    if request.method == 'POST':
        payload = request.form
        logger.debug(f'post recieved : {payload}')
        data = payload['data']
        return {"Successfully stored ": str(data)}
    return None
Esempio n. 4
0
def set_option(option, value, task_id):
    url = SQLMAP_BASE_URL + "/option/" + task_id + "/set"
    params = {option: value}
    debugMsg = "task {} setting {} to {}".format(task_id, option, value)
    logger.debug(debugMsg)
    r = requests.post(url, json=params)
    try:
        json_result = json.loads(r.text)
    except json.decoder.JSONDecodeError as e:
        logger.critical("JSON decoder error")
        exit()
    if json_result['success'] == True:
        return True
    else:
        return False
Esempio n. 5
0
def get_list_extracted_files(attack_domain):
    files_extracted = []
    debugMsg = "domain {}".format(attack_domain)
    logger.debug(debugMsg)
    __sqlmap_files_path = expanduser(join("~",".sqlmap","output",attack_domain,"files"))

    try:
        files = [f for f in listdir(__sqlmap_files_path) if isfile(join(__sqlmap_files_path,f))]
    except FileNotFoundError:
        criticalMsg = "File not found {}".format(__sqlmap_files_path)
        logger.critical(criticalMsg)
        logger.critical("Aborting execution")
        exit(0)
    for f in files:
        tmp = join( __sqlmap_files_path, f)
        files_extracted.append(tmp)
    return files_extracted
Esempio n. 6
0
File: mc.py Progetto: rhaidiz/wafex
def local_cl_atse(file_aslan, options=[]):
    global CLATSE
    logger.info("Executing CL-Atse locally")
    atse_output = os.path.splitext(file_aslan)[0] + ".atse"
    atse_output_descriptor = open(atse_output, "w")
    atse_execution_array = [CLATSE] + options + [file_aslan]
    logger.debug(atse_execution_array)
    p1 = subprocess.Popen(atse_execution_array,
                          universal_newlines=True,
                          stderr=subprocess.PIPE,
                          stdout=subprocess.PIPE)
    try:
        out, err = p1.communicate(timeout=config.mc_timeout)
    except subprocess.TimeoutExpired:
        p1.kill()
        logger.critical("Model checker timed out")
        exit()
    atse_output_descriptor.write(out)
    atse_output_descriptor.close()
    return atse_output
Esempio n. 7
0
def execute_traversal(s, request, check=common_check, fname=common_files):
    logger.debug("Executing directory traversal attack")
    params = request["params"]
    payloads = __payloadgenerator(fname)
    for park, parv in params.items():
        if parv == "?":
            for idx, p in enumerate(payloads):
                params[park] = p
                logger.debug("trying: " + p)
                logger.debug("looking for: " + check[idx % len(check)])
                r = execute_request(s, request)
                if check[idx % len(check)] in r.text:
                    return True
    return False
Esempio n. 8
0
def _normal_request(http, s):
    """
        Perform a normal HTTP request. Returns a Response object 
        that contains the response.
    """
    logger.info("Perform normal request")
    url_params = dict()
    for abstract_k in http.get_params:
        _get = http.get_params[abstract_k]
        k, v = _instantiate_value(_get[0], _get[1], http.params[abstract_k])
        url_params[k] = v
    debugMsg = url_params
    logger.debug(debugMsg)

    post_params = dict()
    for abstract_k in http.post_params:
        _post = http.post_params[abstract_k]
        k, v = _instantiate_value(_post[0], _post[1], http.params[abstract_k])
        post_params[k] = v
    debugMsg = post_params
    logger.debug(debugMsg)

    # configure the proxy if it was set
    if config.proxy_ip and config.proxy_port:
        proxies = {
            "http": "http:{}:{}".format(config.proxy_ip, config.proxy_port),
            "https": "https{}:{}".format(config.proxy_ip, config.proxy_port)
        }
        resp = s.request(method=http.method,
                         url=http.url,
                         params=url_params,
                         data=post_params,
                         proxies=proxies)
    else:
        resp = s.request(method=http.method,
                         url=http.url,
                         params=url_params,
                         data=post_params)

    debugMsg = resp.headers
    logger.debug(debugMsg)
    return resp
Esempio n. 9
0
File: mc.py Progetto: rhaidiz/wafex
def aslanpp2aslan(file_aslanpp):
    #connector = config.connector
    # get the filename without extension
    debugMsg = "file aslanpp: {}".format(file_aslanpp)
    logger.debug(debugMsg)
    basename = os.path.splitext(os.path.basename(file_aslanpp))[0]
    translator_output_file = "tmp_" + basename + ".aslan"

    logger.info("Generating ASlan model")
    debugMsg = "{} on {} out-file {}".format(connector, file_aslanpp,
                                             translator_output_file)
    logger.debug(debugMsg)

    p1 = subprocess.Popen([
        "java", "-jar", connector, file_aslanpp, "-o", translator_output_file
    ],
                          universal_newlines=True,
                          stderr=subprocess.PIPE)

    try:
        out, err = p1.communicate(timeout=30)
    except subprocess.TimeoutExpired:
        p1.kill()
        criticalMsg = "Error: {} timed out."
        logger.critical(criticalMsg)
        exit()

    # check if an error has been generated from the translator
    if "FATAL" in err or "ERROR" in err:
        # there was an error in executing the translator
        logger.critical("Translator generated an error")
        logger.critical(err)
        exit()

    if config.verbosity and "WARNING" in err:
        logger.debug(err.strip())
    logger.info("ASlan model generated")
    return translator_output_file, err
Esempio n. 10
0
File: mc.py Progetto: rhaidiz/wafex
def generate_msc(file_attack_trace, file_aslan_model):

    r_time = re.compile("STATISTICS TIME (.*)")
    r_tested = re.compile("TESTED (.*)")
    r_reached = re.compile("REACHED (.*)")
    r_reading = re.compile("READING (.*)")
    r_analyze = re.compile("ANALYSE (.*)")
    r_unused = re.compile("UNUSED: { (.*)")
    r_goal = re.compile("GOAL: (.*)")
    r_end_unused = re.compile("(.*) }")
    unused_flag = 0

    tmp_attack_trace = ""
    p1 = subprocess.Popen([
        "java", "-jar", connector, "-ar", file_attack_trace, file_aslan_model
    ],
                          universal_newlines=True,
                          stderr=subprocess.PIPE,
                          stdout=subprocess.PIPE)
    try:
        out, err = p1.communicate(timeout=10)
    except subprocess.TimeoutExpired:
        p1.kill()
        logger.critical("MSC creation timed out")
        exit()
    if config.verbosity:
        # print the generated output on a file
        msc_verbose = open("tmp_msc.txt", "w")
        msc_verbose.write(out)
        msc_verbose.close()
    f = open(file_attack_trace)
    msc = ""
    comments = False
    for line in f.readlines():
        line = line.strip()
        if "SUMMARY ATTACK_FOUND" in line:
            # we found an attack, so return the generated MSC
            i = out.find("MESSAGES:")
            msc = out[i + 9:]
            logger.info("Abstract Attack Trace found:")
            print(msc)
        elif "SUMMARY NO_ATTACK_FOUND" in line:
            # no attack found, we don't need the MSC
            logger.warning("NO ATTACK FOUND")
        else:
            goal = r_goal.search(line)
            if goal:
                infoMsg = "GOAL: {}".format(goal.group(1))
                logger.info(infoMsg)
                continue
            if "COMMENTS" in line:
                comments = True
                logger.info("COMMENTS")
                continue
            if "STATISTICS TIME" in line:
                comments = False
                continue
            if comments == True:
                print(line)
                continue
            tested = r_tested.search(line)
            if tested:
                infoMsg = "TESTED: {}".format(tested.group(1))
                logger.info(infoMsg)
                continue
            reached = r_reached.search(line)
            if reached:
                infoMsg = "REACHED: {}".format(reached.group(1))
                logger.info(infoMsg)
                continue
            analyze = r_analyze.search(line)
            if analyze:
                infoMsg = "ANALYZE: {}".format(analyze.group(1))
                logger.info(infoMsg)
                # I return here because if I reached ANALYZE, I don't care of
                # reading the remaning part of the output
                return msc
            unused = r_unused.search(line)
            if unused:
                logger.debug("UNUSED:")
                logger.debug(unused.group(1))
                unused_flag = 1
                continue
            else:
                last_line_unused = r_end_unused.search(line)
                if unused_flag == 1 and last_line_unused:
                    # last line of the unused
                    logger.debug(last_line_unused.group(1))
                    unused_flag = 0
                    continue
                elif unused_flag == 1:
                    # keep reading next files
                    logger.debug(line)
                    continue
    # this return is for safety reason. Theoretically it should always
    # return when ANALYZE is found
    return msc
Esempio n. 11
0
def sqli(msc_table,extended):
    logger.info("Looking for SQL injection attacks")
    sqli = []
    injection_point = ""

    # regexp
    r_sqli           = re.compile("(?:.*?)\.?sqli\.(?:.*)\.?")
    r_tuple_response = re.compile("(?:.*?)\.?tuple\(")
    r_tuple_request  = re.compile("([a-z]*?)\.s\.tuple\((?:.*?)\)(?:\.s)?")
    r_sqli_read      = re.compile("(?:.*?)e_file\((.*?)\)")
    r_sqli_write      = re.compile("(?:.*?)newFile\((.*?)\)")

    # data extraction
    tag_sqli = ""

    # second-order conditions
    so_cond1 = False # i -> webapp : <something>.sqli.<something>
    so_cond2 = False # i -> webapp : <something>
    so_cond3 = False # webapp -> i : tuple(<something>.sqli.<something>
    tag_so_cond1 = ""
    tag_so    = ""


    for idx, row in enumerate(msc_table):
        tag = row[0]
        step = row[1]
        sender = step[0]
        receiver = step[1]
        msg = step[2]
        entry = None

        if sender not in config.receiver_entities:
            # is a message from the intruder
            debugMsg = "Processing {}".format(msg)
            logger.debug(debugMsg)

            if r_sqli.search(msg) and "tuple(" not in msg:
                if so_cond1 == False:
                    so_cond1 = True
                    tag_so_cond1 = tag
                    logger.debug("so_cond1")
                # get the response
                response = msc_table[idx+1]
                response_msg = response[1][2]   # (tag , ( sender, receiver, msg))
                # and check if we have something function of file (sqli for file reading)
                match_ftr = r_sqli_read.search(response_msg)
                if match_ftr:
                    logger.debug("SQLi file-read {}".format(tag))

                    file_to_read = match_ftr.group(1)
                    extended[tag]["read"] = file_to_read
                    extended[tag]["attack"] = 1
                else:
                    # or something function of new_file (sqli for file writing)
                    match_ftw = r_sqli_write.search(response_msg)
                    if match_ftw:
                        logger.debug("SQLi file-write {}".format(tag))
                        
                        file_to_write = match_ftw.group(1)
                        extended[tag]["write"] = file_to_write
                        extended[tag]["attack"] = 2
                    else:
                    # ... otherwise is a sql injection
                        logger.debug("SQLi bypass {}".format(tag))
                        
                        params = utils.__get_parameters(msg)
                        entry = { "attack":10, "params" : params }
                        extended[tag]["attack"] = 10
                        tag_sqli = tag
            else:
                exploit_sqli = r_tuple_request.findall(msg)
                if exploit_sqli:
                    debugMsg = "Exploit SQLi {}".format(exploit_sqli)
                    logger.debug(debugMsg)
                    # it means we are using again the function tuple so it
                    # was a data extraction attack
                    debugMsg = "Change SQLi of {} to data extraction".format(tag_sqli)
                    logger.debug(debugMsg)
                    extended[tag_sqli]["attack"] = 0
                    extended[tag_sqli]["extract"] = exploit_sqli
                    extended[tag_sqli]["tag_sqli"] = tag
                    params = utils.__get_parameters(msg)
                    entry = { "attack" : 6, "params" : params }

                    extended[tag]["attack"] = 6
                elif tag != "tag":
                    # this is a normal request ...
                    # we check if previous conditions for so are valid
                        if so_cond1 == True and so_cond2 == False:
                            logger.debug("so_cond2")
                            so_cond2 = True
                            tag_so = tag
                        logger.debug("Normal request {}".format(tag))
                        params = utils.__get_parameters(msg)
                        entry = {"attack":-1,"params":params}

                        extended[tag]["attack"] = -1
        else:
            debugMsg = "Processing {}".format(msg)
            logger.debug(debugMsg)
            if r_tuple_response.search(msg):
                # we are exploiting a sqli
                if so_cond1 == True and so_cond2 == True and so_cond3 == False:
                    logger.debug("so_cond3")
                    # we check if previous conditions for so are valid
                    extended[tag_so_cond1]["attack"] = 8
                    extended[tag_so_cond1]["tag_so"] = tag_so
                    so_cond3 = True
Esempio n. 12
0
File: fs.py Progetto: rhaidiz/wafex
def filesystem(msc_table, extended):
    logger.info("Looking for file-system attacks")
    fs = []

    # regexp
    r_write_no_sqli = re.compile("([a-zA-Z]*?)\.s\.evil_file(?:.*?)")
    r_path_injection = re.compile("([a-zA-Z]*?)\.s\.path_injection(?:.*?)")
    r_file = re.compile("([a-z]*?)\.s\.e_file\((.*?)\)")
    r_evil_file = re.compile("^evil_file")
    r_e_file = re.compile("e_file\((.*)\)")

    for idx, row in enumerate(msc_table):
        tag = row[0]
        step = row[1]
        sender = step[0]
        receiver = step[1]
        msg = step[2]
        entry = None

        if sender not in config.receiver_entities:
            # since in presence of a query the attacker always perform
            # a SQLi, it might be that he wants to perform an upload file
            # but he also need a SQLi bypass in order to proceed. So we give
            # a low priority to SQLi bypass and we check it again for other
            # attacks.
            if extended[tag]["attack"] != 10 and extended[tag]["attack"] != -1:
                continue
            # is a message from the intruder
            debugMsg = "processing {}".format(msg)
            logger.debug(debugMsg)
            params = r_write_no_sqli.search(msg)
            if params:
                # is a malicious file-write (upload)
                #entry = {"attack":5,"params":{params.group(1):"evil_file"}}
                debugMsg = "Unrestricted file upload {}".format(tag)
                logger.debug(debugMsg)

                params = utils.__get_parameters(msg)
                entry = {"attack": 5, "params": params}

                extended[tag]["attack"] = 5
            else:
                if r_evil_file.match(msg):
                    debugMsg = "Exploiting evil_file {}".format(tag)
                    logger.debug(debugMsg)

                    params = utils.__get_parameters(msg)
                    entry = {"attack": 9, "params": params}
                    extended[tag]["attack"] = 9

                # inj_point = r_path_injection.search(msg)
                # if "evil_file" not in msg and inj_point:
                #     # is a file-include with payload path_injection
                #     #entry = { "attack" : 4, "params" : { params.group(1) : "?" } }
                #     params = utils.__get_parameters(msg)
                #     extended[tag]["attack"] = 4
                #     extended[tag]["inj_point"] = inj_point.group(1)
                # else:
                #     # The intruder is sending something
                #     # function of file(). Find where
                #     # the file-name was previously used and, if we
                #     # marked the action as normal request (-1), change
                #     # it as file inclusion (4)
                payload = r_file.search(msg)
                current_attack_number = extended[tag]["attack"]
                if payload and current_attack_number == -1:
                    for _tag in extended:
                        attack = extended[_tag]
                        for k, v in attack["params"].items():
                            if _tag != tag and payload.group(
                                    2) in v and extended[_tag]["attack"] == -1:
                                extended[_tag]["attack"] = 4

                                debugMsg = "File inclusion vulnerability {}".format(
                                    tag)
                                logger.debug(debugMsg)

                    params = utils.__get_parameters(msg)
                    extended[tag]["attack"] = 7
                    extended[tag]["inj_point"] = {
                        payload.group(1): payload.group(2)
                    }

                    debugMsg = "Exploit file extracted {}".format(tag)
                    logger.debug(debugMsg)
                else:
                    if tag not in extended and tag != "tag":
                        # this is a normal request
                        params = utils.__get_parameters(msg)
                        entry = {"attack": -1, "params": params}
                        extended[tag]["attack"] = -1

                        debugMsg = "Normal request: {} params {}".format(
                            tag, params)
                        logger.debug(debugMsg)
        else:
            # we are in the receiving part
            msg_result = msg.split(",")[1]
            payload = r_e_file.search(msg_result)
            # check if something function of file is sent back to the intruder
            if payload:
                for _tag in extended:
                    attack = extended[_tag]
                    for k, v in attack["params"].items():
                        if _tag != tag and payload.group(
                                1) in v and extended[_tag]["attack"] == -1:

                            debugMsg = "File inclusion vulnerability {}".format(
                                _tag)
                            logger.debug(debugMsg)

                            extended[_tag]["attack"] = 4
                            extended[_tag]["read"] = payload.group(1)
Esempio n. 13
0
def execute_sqlmap(sqlmap_details):
    global data_to_extract

    logger.info("run sqlmapapi.py")
    is_sqlmap_up = sqlmap.run_api_server()
    if not is_sqlmap_up:
        logger.critical("sqlmap server not running")
        exit()
    task = sqlmap.new_task()

    # configure proxy
    if config.proxy_ip and config.proxy_port and config.proxy_port.isdigit():
        sqlmap.set_option("proxy","http://{}:{}".format(config.proxy_ip, config.proxy_port), task)

    url = sqlmap_details["url"]
    method = sqlmap_details["method"]
    vuln_param = sqlmap_details["vuln_param"]

    url_params = ""
    if "get_params" in sqlmap_details:
        params = sqlmap_details["get_params"]
        url_params = ""
        for ab_k,real_c in params.items():
            # specify an injection point
            if ab_k == vuln_param:
                url_params = url_params+real_c[0]+"="+real_c[1]+"*&"
            else:
                url_params = url_params+real_c[0]+"="+real_c[1]+"&"
        url_params = url_params[:-1]

    body_params = ""
    if "post_params" in sqlmap_details:
        params = sqlmap_details["post_params"]
        body_params = ""
        for ab_k,real_c in params.items():
            # specify an injection point
            if ab_k == vuln_param:
                body_params = body_params+real_c[0]+"="+real_c[1]+"*&"
            else:
                body_params = body_params+real_c[0]+"="+real_c[1]+"&"
        body_params = body_params[:-1]

    url = url+"?"+url_params
    sqlmap.set_option("url",url,task)

    if method == "POST":
        sqlmap.set_option("data",body_params,task)

    # if "params" in sqlmap_details:
    #     params = sqlmap_details["params"]
    #     url_params = ""
    #     for k,v in params.items():
    #         url_params = url_params+k+"="+v+"&"
    #     url_params = url_params[:-1]
    #     if method == "GET":
    #         url = url+"?"+url_params
    #         sqlmap.set_option("url",url,task)
    #     elif method == "POST":
    #         sqlmap.set_option("url",url,task)
    #         sqlmap.set_option("data",url_params,task)

    # set cookie if present and should be considered
    if "cookies" in sqlmap_details:
        c = ""
        for k,v in sqlmap_details["cookies"].items():
            c = c + k + "=" + v + ";"
        debugMsg = "sqlmap with cookie {}".format(c)
        logger.debug(debugMsg)
        sqlmap.set_option("cookie",c,task)


    # BEGIN: set specific attack details
    # data extraction
    if "extract" in sqlmap_details:
        data_to_extract = sqlmap_details["extract"]
        sqlmap.set_option("dumpTable","true",task)
        # set data extraction only if we have data to extract
        col = ""
        tbl = ""
        for tblcol in data_to_extract:
            tbl_list = tblcol.split(".")
            # TODO: in here we're basically overwriting the table name
            # whenever we find a new one
            tbl = tbl_list[0]
            col = col + tbl_list[1]
        sqlmap.set_option("tbl",tbl,task)
        sqlmap.set_option("col",col,task)

    if "dumpall" in sqlmap_details:
        # dump the entire database
        sqlmap.set_option("dumpAll","true",task)

    # file read
    if "read" in sqlmap_details:
        file_to_extract = sqlmap_details["read"]
        sqlmap.set_option("rFile",file_to_extract,task)
    # file write
    if "write" in sqlmap_details:

        file_to_write = sqlmap_details["write"]
        remote_path = sqlmap_details["path"]
        if not isfile(file_to_write):
            criticalMsg = "Error: {} file not found".format(file_to_write)
            debug.critical(criticalMsg)
            exit()
        sqlmap.set_option("wFile",join(".",file_to_write),task)
        sqlmap.set_option("dFile",remote_path,task)
    # second order
    if "secondOrder" in sqlmap_details:
        secondOrder_url = sqlmap_details["secondOrder"]
        sqlmap.set_option("secondOrder",secondOrder_url,task)
    # END: set specific attack details

    logger.info("sqlmap analysis started")
    sqlmap.start_scan(url,task)

    stopFlag = threading.Event()
    sqlmap_data = None
    sqlmap_log = None
    while not stopFlag.wait(5):
        r = sqlmap.get_status(task)
        if "terminated" in r:
            logger.debug(sqlmap.get_log(task))
            sqlmap_data = sqlmap.get_data(task)
            sqlmap_log = sqlmap.get_log(task)
            stopFlag.set()
        else:
            logger.debug(sqlmap.get_log(task))
            logger.info("sqlmap analysis in progress ... ")

    # we check if the last message generated by sqlmap is critical
    # or an error
    level   = sqlmap_log[-1]["level"]
    message = sqlmap_log[-1]["message"]

    if level == "WARNING":
        logger.warning(message)

    if level == "INFO":
           logger.info(message)

    if level == "ERROR" or level == "CRITICAL":
        logger.critical("sqlmap generated an error")
        logger.critical(message)
        logger.critical("Aborting execution")
        exit()

    logger.info("sqlmap analysis terminated")

    return sqlmap_data, sqlmap_log
Esempio n. 14
0
def _file_inc(http, s):
    """ Performs a file inclusion attack using Wfuzz. """

    vuln_param = http.action_params[0]
    file_to_read = http.action_params[1]
    promptMsg = "What is the name of the file {} you want to read?\n".format(
        file_to_read)
    file_to_read = ""
    while file_to_read == "":
        file_to_read = input(promptMsg)
    # set wfuzzer output
    #wfuzz.set_param("-o","json")
    # we need to write a file with the payload to pass to wfuzz
    f = open("_tmp_wfuzz_payloads", "w")
    for p in wfuzz_payloads:
        f.write(p.format(file_to_read))
    f.close()
    payloads_path = os.path.join(os.getcwd(), "_tmp_wfuzz_payloads")
    #wfuzz.set_param("-w",payloads_path)

    url_params = ""
    for abstract_k in http.get_params:
        _get = http.get_params[abstract_k]
        vuln_param = http.action_params[0]
        k = _get[0]
        v = _get[1]
        if vuln_param == abstract_k:
            # this is the injection point
            # automatically take care of this thy using xss_payload
            infoMsg = "Injection point: {}".format(vuln_param)
            logger.info(infoMsg)
            v = "FUZZ"
        else:
            k, v = _instantiate_value(k, v, http.params[abstract_k])

        url_params += "{}={}&".format(k, v)
    debugMsg = url_params
    logger.debug(debugMsg)

    post_params = ""
    for abstract_k in http.post_params:
        _post = http.post_params[abstract_k]
        vuln_param = http.action_params[0]
        k = _post[0]
        v = _post[1]
        if vuln_param == abstract_k:
            # this is the injection point
            # automatically take care of this thy using xss_payload
            infoMsg = "Injection point: {}".format(vuln_param)
            logger.info(infoMsg)
            v = "FUZZ"
        else:
            k, v = _instantiate_value(k, v, http.params[abstract_k])

        post_params += "{}={}&".format(k, v)

    post_params = ""

    cookies = ""
    cookies_dict = s.cookies.get_dict()
    for key in cookies_dict:
        cookies += "{}:{},".format(key, cookies_dict[key])

    # get a Wfuzz object and run a scan
    wz = Wfuzz()
    # configure proxy
    if proxy_ip and proxy_port and proxy_port.isdigit():
        wz.self.set_param("-p", "{}:{}".format(proxy_ip, proxy_port))
    wz.set_param("-o", "json")
    wz.set_param("-w", payloads_path)
    if post_params:
        wz.set_param("-d", post_param)
    if cookies:
        wz.set_param("-b", cookies)
    wz.set_param("-b", "cookie1=valuecookie1")
    url = "{}?{}".format(http.url, url_params)

    out = wz.run_wfuzz(url)
    if out:
        for url in out:
            # Wfuzz was successful, display the valid url and continue
            infoMsg = "{}".format(url)
            logger.info(infoMsg)
Esempio n. 15
0
def execute_request(s, request):
    url = request["url"]
    if "method" in request:
        method = request["method"]
    else:
        method = "GET"
    try:
        logger.debug(request["params"])
        for k, v in request["params"].items():
            if v == "?":
                inputMsg = "Provide value for: {}\n".format(k)
                new_value = input(inputMsg)
                request["params"][k] = new_value
        params = request["params"]
    except KeyError:
        params = []
    try:
        cookies = request["cookies"]
    except KeyError:
        cookies = []
    #cookies = {'8c7a5a8dc980f43a35da380d188606dd': 'my-app/0.0.1'}
    try:
        files = request["files"]
    except KeyError:
        files = {}

    logger.debug("Execute request")
    debugMsg = "url: {}".format(url)
    logger.debug(debugMsg)
    debugMsg = "method: {}".format(method)
    logger.debug(debugMsg)
    debugMsg = "params: {}".format(params)
    logger.debug(debugMsg)
    debugMsg = "cookies: {}".format(cookies)
    logger.debug(debugMsg)
    debugMsg = "files: {}".format(files)
    logger.debug(debugMsg)
    #url = 'https://157.27.244.25/chained'
    if config.proxy != None:
        proxies = {
            "http": "http://" + config.proxy,
            "https": "https://" + config.proxy
        }
    r = None
    if method == "GET":
        if config.proxy != None:
            r = s.get(url,
                      proxies=proxies,
                      params=params,
                      cookies=cookies,
                      verify=False,
                      auth=('regis', 'password'))
        else:
            r = s.get(url,
                      params=params,
                      verify=False,
                      cookies=cookies,
                      auth=('regis', 'password'))
    else:
        if config.proxy != None:
            r = s.post(url,
                       proxies=proxies,
                       data=params,
                       files=files,
                       cookies=cookies,
                       verify=False,
                       auth=('regis', 'password'))
        else:
            r = s.post(url,
                       data=params,
                       verify=False,
                       files=files,
                       cookies=cookies,
                       auth=('regis', 'password'))

    logger.debug(r.text)
    return r
Esempio n. 16
0
def main():
    # command line parsing
    cmd = argparse.ArgumentParser()
    cmd.add_argument("model",help="An ASLAn++ model")
    cmd.add_argument("--c",metavar="concre_file",help="The concretization file, needed for executing Abstract Attack Trace")
    cmd.add_argument("--debug",help="Print debug messages",action="store_true")
    cmd.add_argument("--mc-only",help="Run the model-checker only and exit",action="store_true")
    cmd.add_argument("--interactive", help="Ask input of every parameter", action="store_true")
    #cmd.add_argument("--merger",help="Use the specified file as a base file to merge with the given model", metavar="basefile")
    cmd.add_argument("--verbose", help="Increase the output verbosity",action="store_true")
    translator = cmd.add_argument_group('Translator')
    translator_versions = ["1.4.1","1.4.9","1.3"]
    translator.add_argument("--translator",help="Specify a jar translator to use. Allowed values are "+", ".join(translator_versions)+". Default (1.4.1)", metavar='',choices=translator_versions)

    requests = cmd.add_argument_group("HTTP(S) options")
    requests.add_argument("--proxy", metavar="ip:port", help="Use an HTTP proxy when executing requests")

    model_checker = cmd.add_argument_group("Cl-Atse options")
    model_checker.add_argument("--mc-options",help="String representing the options to pass to Cl-Atse. For more information on the available options check Cl-Atse manual")
    model_checker.add_argument("--mc-timeout", metavar="T", help="If Cl-Atse runs more than T seconds, abort (default: 600)", type=int)

    args = cmd.parse_args()
    load_model = args.model


    mc_options = args.mc_options.split(" ") if args.mc_options else []
    if args.mc_timeout:
        config.mc_timeout = args.mc_timeout

    if args.interactive:
        config.interactive = True

    # check if model file exists
    if not os.path.isfile(load_model):
        criticalMsg = "Error {} file not found".format(load_model)
        logger.critical(criticalMsg)
        exit()
    # check if concretization file exists only if --mc-only hasn't been specified
    if args.c == None and not args.mc_only:
        logger.critical("Concretization file not specified")
        exit()
    elif not args.mc_only and not os.path.isfile(args.c):
        criticalMsg = "Error: {} file not found".format(args.c)
        logger.critical(criticalMsg)
        exit()
    elif not args.mc_only and args.c != None and  os.path.isfile(args.c):
        config.concretization = args.c

    print(config.BANNER.format(config.VERSION,config.SITE))

    # register exiting cleanup function
    atexit.register(exitcleanup)

    # set global variables
    config.verbosity = args.verbose
    config.DEBUG = args.debug

    # create folders if they do not exists
    if not os.path.isdir(config.WFAST_HOME):
        logger.info("Creating {} home folder".format(config.TOOL_NAME))
        os.makedirs(config.WFAST_HOME)
    if not os.path.isdir(config.WFAST_EXTRACTED_FILES_DIR):
        logger.info("Creating {} extracted files folder".format(config.TOOL_NAME))
        os.makedirs(config.WFAST_EXTRACTED_FILES_DIR)

    if config.DEBUG:
        logger.setLevel(logging.DEBUG)
    else:
        logger.setLevel(logging.INFO)

    if args.proxy:
        proxy = args.proxy.split(":")
        if len(proxy) == 2 and proxy[0] and proxy[1].isdigit():
            config.proxy_ip = proxy[0]
            config.proxy_port = proxy[1]
        else:
            criticalMsg = "Invalid proxy format {}".format(args.proxy)
            logger.error(criticalMsg)
            exit(-1)
    if args.translator == "1.4.9":
        mc.connector = config.CONNECTOR_1_4_9
    if args.translator == "1.3":
        mc.connector = config.CONNECTOR_1_3

    # if args.merger:
    #     base_model = args.merger
    #     webapp = load_model
    #     load_model = "out.aslan++"
    #     # merge the files
    #     merger(webapp,base_model,load_model)

    # first thing is to confert the ASLan++ model in ASLan
    file_aslan_model, err = mc.aslanpp2aslan(load_model)

    # we can now run the model checker, by default we use Cl-Atse locally
    file_attack_trace = mc.local_cl_atse(file_aslan_model,mc_options)

    # translate the attack trace in msc
    msc_output = mc.generate_msc(file_attack_trace,file_aslan_model)


    if not args.mc_only:

        # read the output and parse it
        msc = mc.parse_msc(msc_output)

        logger.debug("Parsed MSC")
        for msg in msc:
            debugMsg = "{} {}:{}:{} {} > tag{}".format(msg.sender , msg.receiver, msg.params, msg.action, msg.action_params, msg.tag)
            logger.debug(debugMsg)

        # execute the attack trace
        execute_attack(msc)
Esempio n. 17
0
def exiting():
    kill()
    logger.debug("Done!")
Esempio n. 18
0
def exitcleanup(args):
    """ Performs exiting and cleanup operations. """
    debugMsg = "exiting {}".format(__name__)
    logger.debug(debugMsg)
Esempio n. 19
0
def _run_payload_request(http, s, xss_payload):
    """ Performs a malicious injection request.
        :param http: an AbstractHttpRequest representing the request.
        :param s: the session to use for performing the request.
        :param xss_payload: the payload to use in the injection point.
    """
    # ask values for the GET params
    url_params = dict()
    for abstract_k in http.get_params:
        _get = http.get_params[abstract_k]
        vuln_param = http.action_params[0]
        k = _get[0]
        v = _get[1]
        if vuln_param == abstract_k:
            # this is the injection point
            # automatically take care of this thy using xss_payload
            infoMsg = "Injection point: {}".format(vuln_param)
            logger.info(infoMsg)
            v = xss_payload
        else:
            k, v = _instantiate_value(k, v, http.params[abstract_k])

        url_params[k] = v
    debugMsg = url_params
    logger.debug(debugMsg)

    # ask values for the POST params
    post_params = dict()
    for abstract_k in http.post_params:
        _post = http.post_params[abstract_k]
        vuln_param = http.action_params[0]
        k = _post[0]
        v = _post[1]
        if vuln_param == abstract_k:
            # this is the injection point
            # automatically take care of this thy using xss_payload
            infoMsg = "Injection point: {}".format(vuln_param)
            logger.info(infoMsg)
            v = "xss"
        else:
            k, v = _instantiate_value(k, v, http.params[abstract_k])

        post_params[k] = v
    debugMsg = post_params
    logger.debug(debugMsg)

    # configure the proxy if it was set
    if config.proxy_ip and config.proxy_port:
        proxies = {
            "http": "http:{}:{}".format(config.proxy_ip, config.proxy_port),
            "https": "https{}:{}".format(config.proxy_ip, config.proxy_port)
        }
        resp = s.request(method=http.method,
                         url=http.url,
                         params=url_params,
                         data=post_params,
                         proxies=proxies)
    else:
        resp = s.request(method=http.method,
                         url=http.url,
                         params=url_params,
                         data=post_params)