Exemplo n.º 1
0
def backtrack(driver, fsm, node, formValues, tillEnd,path):
    logger.info("Doing backtrack")
    #driver.back()
    #if fsm.doBacktrack == False:
        #driver.back()
    #else:
    
    graph = fsm.graph
    path = graph.node[node]['nodedata'].backtrackPath
    driver.get(path[0])
    for i in range(1, len(path)-1+tillEnd):
        time.sleep(0.5)
        fillFormValues(formValues, driver)
        time.sleep(0.5)
        #action, target= path[i].split(":")
        driver.find_element_by_xpath(path[i].xpath).click()
    
    clearContent()
Exemplo n.º 2
0
def backtrack(driver, fsm, node, formValues, tillEnd):
    logger.info("Doing backtrack")
    #driver.back()
    '''
    if fsm.doBacktrack == False:
        driver.back()
    else:
    '''
    graph = fsm.graph
    path = graph.node[node]['nodedata'].backtrackPath
    driver.get(path[0])
    '''
    for i in range(1,len(path)):
        print path[i].tag, path[i].attr, path[i].attrVal
    '''
    for i in range(1, len(path)-1+tillEnd):
        '''
        if i==1:
            driver.switch_to.parent_frame()
            driver.switch_to.frame(driver.find_element_by_name("menu"))
        '''
        time.sleep(0.5)
        fillFormValues(formValues, driver)
        time.sleep(0.5)
        #action, target= path[i].split(":")
        '''
        if tag == "a":
            driver.find_element_by_xpath("//"+tag+"[@"+attr+"='" + attrVal + "']").click()
        elif tag == "input":
            element = driver.find_element_by_xpath("(//"+tag+"[@"+attr+"='"+attrVal+"'])[" + str(tagNumber) + "]")
            element.click()
        '''
        driver.find_element_by_xpath(path[i].xpath).click()
        '''
        if i==1:
            driver.switch_to.parent_frame()
            driver.switch_to.frame(driver.find_element_by_name("body"))
        time.sleep(1.0)
        '''
    clearContent()	
Exemplo n.º 3
0
def Crawl(curNode, fsm, driver, globalVariables, depth):
    '''
    Crawls the Application by doing the Breadth First Search
    over the State Nodes.
    '''
    #print globalVariables.depth
    if depth > globalVariables.depth:
        logger.info("depth exceeded")
        #driver.back()
        backtrack(driver,fsm,curNode,globalVariables.formFieldValues, 0)
        return
    logger.info("crawling in normal mode")
    graph = fsm.graph
    graph.node[curNode]['nodedata'].visited = 1
    clickables = []
    clickables = graph.node[curNode]['nodedata'].clickables
    #print driver.page_source
    #print clickables
    domString = graph.node[curNode]['nodedata'].domString
    logger.info("Clicking All Clickables to get a New State")
    for entity in clickables:

        if entity.tag == "a" and entity.attrs.has_key('href'):

            if checkForBannedUrls(
                    entity.attrs,
                    globalVariables,
                    graph.node[curNode]['nodedata'].link):
                continue

            if fsm.checkStateUrlExist(globalVariables.baseAddress+entity.attrs['href']):
                continue

        logger.info("Trying to click the element"+entity.xpath)
        time.sleep(1.5)
        fillFormValues(globalVariables.formFieldValues, driver)
        time.sleep(1.5)
        try:
            driver.find_element_by_xpath(entity.xpath).click()
        except Exception as e:
            print e

        AcceptAlert(driver)

        time.sleep(1)
         
       # make a new node add in the graph and the queue
        newNode = CreateNode(driver)
        # add the Node checking if the node already exists
        nodeNumber = addGraphNode(newNode,curNode,driver,fsm,entity)
        if nodeNumber != -1:
            Crawl(nodeNumber, fsm, driver, globalVariables, depth+1)
        else:
            logger.info("going back click")
            backtrack(driver,fsm,curNode,globalVariables.formFieldValues, 1)
    #submitButtons = getSubmitButtons(domString)

    #submitButtonNumber = getSubmitButtonNumber(domString, driver)
    #time.sleep(2.0)
    #fillFormValues(globalVariables.formFieldValues, driver)
    #time.sleep(2.0)

    '''
    logger.info("Initiating Crawling Submit Button")
    for entity in submitButtons:
        tag = entity.tag
        attr = entity.attr
        attrVal = entity.attrVal
        tagNumber = entity.tagNumber
        logger.info("clicking %d submit button" % (tagNumber))
        print tag
        print attr
        print attrVal
        print tagNumber
        element = driver.find_element_by_xpath(
            "(//"+tag+"[@"+attr+"='"+attrVal+"'])[" + str(tagNumber+1) + "]")

        element.click()

        AcceptAlert(driver)

        time.sleep(0.5)
        newNode = CreateNode(driver)

        nodeNumber = addGraphNode(newNode,curNode,driver,fsm,entity)
        if nodeNumber != -1:
            Crawl(nodeNumber, fsm, driver, globalVariables, depth+1)
        else:
            logger.info("going back submit")
            backtrack(driver,fsm,curNode,globalVariables.formFieldValues, 1)
    WebDriverWait(driver, 2000)
    '''
    WebDriverWait(driver, 2000);
    backtrack(driver,fsm,curNode,globalVariables.formFieldValues,0)