def start(self): if self.count > 1 and self.url is None: print("Cannot provide page count with no URL given") exit(1) if self.removeParams and self.url is None: print("WARNING: Must have Internet connection to remove unneeded parameters") #Scan for all APIs if self.url: os.makedirs(self.harDirectory,exist_ok=True) self.deleteExistingHars() self.browser = Browser("chromedriver/chromedriver", "browsermob-proxy-2.1.4/bin/browsermob-proxy", self.harDirectory, cookies=self.cookies) if self.searchString is not None: print("Searching URL "+self.url+" for string "+self.searchString) #Move recursively through the site apiCalls = self.crawlingScan(self.url) #Scan directory of har files else: print("Parsing existing directory of har files") harParser = HarParser(self.harDirectory, self.searchString, self.removeParams) apiCalls = harParser.parseMultipleHars() if self.browser is not None: self.browser.close() return apiCalls
def start(self): if self.count > 1 and self.url is None: print("Cannot provide page count with no URL given") exit(1) if self.removeParams and self.url is None: print( "WARNING: Must have Internet connection to remove unneeded parameters" ) #Scan for all APIs if self.url: os.makedirs(self.harDirectory, exist_ok=True) self.deleteExistingHars() self.browser = Browser( "chromedriver/chromedriver", "browsermob-proxy-2.1.4/bin/browsermob-proxy", self.harDirectory, cookies=self.cookies) if self.searchString is not None: print("Searching URL " + self.url + " for string " + self.searchString) #Move recursively through the site apiCalls = self.crawlingScan(self.url) #Scan directory of har files else: print("Parsing existing directory of har files") harParser = HarParser(self.harDirectory, self.searchString, self.removeParams) apiCalls = harParser.parseMultipleHars() if self.browser is not None: self.browser.close() return apiCalls