def url_handler(): if request.form['url'] is not None: w = Webpage(request.form['url']) w.store_content(w.get_content()) return w.retreive_content() #return render_template('url_result.html') else: return "url parameter not given."
def getWebpagesHerrmann(webpageIds, traceIndexStart, traceIndexEnd): webpages = [] for webpageId in webpageIds: webpage = Webpage(webpageId) for traceIndex in range(traceIndexStart, traceIndexEnd): trace = Datastore.getTraceHerrmann(webpageId, traceIndex) webpage.addTrace(trace) webpages.append(webpage) return webpages
def getWebpagesHerrmann( webpageIds, traceIndexStart, traceIndexEnd ): webpages = [] for webpageId in webpageIds: webpage = Webpage(webpageId) for traceIndex in range(traceIndexStart, traceIndexEnd): trace = Datastore.getTraceHerrmann( webpageId, traceIndex ) webpage.addTrace(trace) webpages.append(webpage) return webpages
def getWebpagesHoneyPatchSysdigTest( webpageIds, traceIndexStart, traceIndexEnd ): webpages = [] for webpageId in webpageIds: webpage = Webpage(webpageId) for traceIndex in range(traceIndexStart, traceIndexEnd): trace = Datastore.getTraceHoneyPatchSysdig( webpageId, traceIndex ) # webpageId = {0, 1} and this specifies the folder {benign, attack}. traceIndex = x which is stream-x.pcap webpage.addTrace(trace) webpages.append(webpage) return webpages
def getWebpagesEsorics16Tor( webpageIds, traceIndexStart, traceIndexEnd ): webpages = [] for webpageId in webpageIds: webpage = Webpage(webpageId) for traceIndex in range(traceIndexStart, traceIndexEnd): trace = Datastore.getTraceEsorics16Tor( webpageId, traceIndex ) # For monitored websites: read file batch/webpageId-traceIndex webpage.addTrace(trace) webpages.append(webpage) return webpages
def getWebpagesLL(webpageIds, traceIndexStart, traceIndexEnd): webpages = [] for webpageId in webpageIds: webpage = Webpage(webpageId) for traceIndex in range(traceIndexStart, traceIndexEnd): #NESTED LOOPING trace = Datastore.getTraceLL(webpageId, traceIndex) webpage.addTrace(trace) webpages.append(webpage) return webpages
def getDummyWebpages(webpageId): dummyWebpages = [] dummyWebpage = Webpage(webpageId) # add an empty trace #dummyTrace = Trace(webpageId) #dummyWebpage.addTrace(dummyTrace) dummyWebpage.addTrace([]) dummyWebpages.append(dummyWebpage) return dummyWebpages
def getWebpagesLL( webpageIds, traceIndexStart, traceIndexEnd ): webpages = [] count = 0 max_count = len(webpageIds) for webpageId in webpageIds: count = count + 1 webpage = Webpage(webpageId) for traceIndex in range(traceIndexStart, traceIndexEnd): trace = Datastore.getTraceLL( webpageId, traceIndex ) webpage.addTrace(trace) webpages.append(webpage) return webpages
def getWebpagesHoneyPatchSomePackets( webpageIds, traceIndexStart, traceIndexEnd ): webpages = [] for webpageId in webpageIds: webpage = Webpage(webpageId) for traceIndex in range(traceIndexStart, traceIndexEnd): checkKey = str(webpageId) + '_' + str(traceIndex) if config.excludedInst.get( checkKey ): print str(webpageId) + '_' + str(traceIndex) + ' removed' continue trace = Datastore.getTraceHoneyPatchSomePackets( webpageId, traceIndex ) # webpageId = {0, 1} and this specifies the folder {benign, attack}. traceIndex = x which is stream-x.pcap webpage.addTrace(trace) webpages.append(webpage) return webpages
def get_url_contents(self, url): # Get url contents page = Webpage(url) response = page.get_page() try: if not response == '': self.set_page(url, response) except Exception as e: self.error(e, '0x1') if not response == '': anchors = page.get_anchors(response) # Repeat the crawl function for every anchor found for anchor in anchors: self.found_urls.append(anchor[0])
def getWebpagesHoneyPatchSysdig( webpageIds, traceIndexStart, traceIndexEnd ): webpages = [] for webpageId in webpageIds: webpage = Webpage(webpageId) for traceIndex in range(traceIndexStart, traceIndexEnd): trace = Datastore.getTraceHoneyPatchSysdig( webpageId, traceIndex ) # webpageId = {0, 1} and this specifies the folder {benign, attack}. traceIndex = x which is stream-x.pcap #print webpageId, traceIndex if trace.getEventCount() != 0: webpage.addTrace(trace) else: key = str(webpageId) + '_' + str(traceIndex) config.excludedInst[key] = 1 print str(webpageId) + '_' + str(traceIndex) + ' is empty.' webpages.append(webpage) return webpages
def query(): if request.args.get('query') is not None: sql_parser = SQLParser(request.args.get('query')) stmt = sql_parser.parse() " Only valid query is SELECT query " if stmt is None: return "Invalid Query" table_name = sql_parser.understand() if table_name is not None: Webpage.get_star(table_name) return "cool" else: return "query not present"