def main(args): logger = log.getdebuglogger("dbmanager") graph = Graph(host=NEO4J_HOST, user=NEO4J_USERNAME, password=NEO4J_PASSWORD) args_obj = parse_args(args) if args_obj.verbose: logger.setLevel(log.LEVELS[-1]) else: logger.setLevel(log.LEVELS[0]) args_obj.func(args_obj, graph, logger)
def main(): global LOG LOG = log.getdebuglogger("headershttpd") HOST_NAME = "127.0.0.1" PORT_NUMBER = 8080 if len(sys.argv) == 3: HOST_NAME = sys.argv[1] PORT_NUMBER = int(sys.argv[2]) server_class = ThreadedHTTPServer httpd = server_class((HOST_NAME, PORT_NUMBER), InfiniteHeadersServer) LOG.info("Server Starts - %s:%s" % (HOST_NAME, PORT_NUMBER)) try: httpd.serve_forever() except KeyboardInterrupt: pass httpd.server_close()
import signal DEBUG = False VERBOSITY = 2 SIM_DELAY = False DELAY = 1 # COMMENT: Parsed arguments object args_obj = None if DEBUG: log.LEVEL = log.LEVELS[-1] else: log.LEVEL = log.LEVELS[0] # COMMENT: Installing two loggers s_logger = log.getdebuglogger("selenese-runner") v_logger = log.getdebuglogger("vilanoo2") m_logger = log.getdebuglogger("mosgi") # COMMENT: MOSGI connection mosgi_connection = None mosgi_start_command_byte = 0 mosgi_finish_response_byte = 2 # COMMENT: Database parameters sqlitedb = None sqlite_schema = os.path.join(os.getcwd(), "../../data/DBSchemaDump.sql") # COMMENT: Lock to remove async requests toward upstream server lock = threading.Lock()
produces an HTTP 302 with location NEW_URL @author: gianko ''' from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler from SocketServer import ThreadingMixIn import threading import urlparse import sys import utils.log as log import common LOG = log.getdebuglogger("httpdred") HOST_NAME = "127.0.0.1" PORT_NUMBER = 80 class HttpdRed(BaseHTTPRequestHandler): def do_GET(self): tname = threading.currentThread().getName() LOG.info("<%s> URL %s" % (tname, self.path)) LOG.info("<%s> client_address=%s" % (tname, self.client_address)) LOG.debug("<%s> headers=%s" % (tname, self.headers)) urlp = urlparse.urlparse(self.path) qs = urlparse.parse_qs(urlp.query)
import utils.log as log from utils.common import LockedSet from webscraping.webkit import WebPage, NetworkAccessManager from PyQt4.QtCore import QTimer, QUrl, QThread from PyQt4.QtGui import QApplication from PyQt4.QtWebKit import QWebView, QWebElement, QWebSettings from PyQt4.QtNetwork import QNetworkRequest import redis from collections import deque import urlparse, sys, json LOGGER = log.getdebuglogger("utils.web") NUM_THREADS = 10 # how many threads to use MAX_LEVEL = 1 # max depth _URLs_SET_KEY = "crawler:data" def normalize_url(base, url): return urlparse.urljoin(base, url) def normalize_form(url, form): if "action" in form["attributes"]: form["attributes"]["action"] = normalize_url( url, form["attributes"]["action"])
''' Created on Sep 10, 2014 @author: gianko ''' from BaseHTTPServer import BaseHTTPRequestHandler import threading import urlparse import utils.log as log LOG = log.getdebuglogger("common") def create_httpd(callback): class HTTPScanMonitor(BaseHTTPRequestHandler): def do_GET(self): tname = threading.currentThread().getName() LOG.info("<%s> GET %s" % (tname, self.path)) LOG.info("<%s> client_address=%s" % (tname, self.client_address)) LOG.info("<%s> headers=%s" % (tname, self.headers)) callback(self) return HTTPScanMonitor def serve_302(self, f_location): self.send_response(302) self.send_header("Content-type", "text/html") self.send_header("Content-Length", 0) self.send_header("Location", f_location())
@TODO: to be completed. @author: gianko ''' from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler from SocketServer import ThreadingMixIn import threading import urlparse import sys import utils.log as log import common import httplib2 LOG = log.getdebuglogger("webapp") HOST_NAME = "127.0.0.1" PORT_NUMBER = 8080 TIMEOUT = 30 def handle_ssrf(self): """ @TODO: to be completed.""" url = urlparse.urlparse(self.path) urlp = urlparse.urlparse(self.path) qs = urlparse.parse_qs(urlp.query) if "fetch" not in qs: self.serve_404() return
Created on Sep 10, 2014 @author: gianko ''' from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler from SocketServer import ThreadingMixIn import threading import urlparse import sys import utils.log as log import monitor.common as common LOG = log.getdebuglogger("httpdmon") HOST_NAME = "127.0.0.1" MONITOR = "127.0.0.1" PORT_NUMBER = 8080 def get_jssupp_xmlhttpreq(self): page = common.JSSUPP_XMLHTTPREQ % { "monitor": "http://%s:%s" % (MONITOR, PORT_NUMBER) } self.send_response(200) self.send_header("Content-type", "text/html") self.send_header("Content-Length", len(page)) self.end_headers() self.wfile.write(page)
from utils.common import LockedSet from webscraping.webkit import WebPage, NetworkAccessManager from PyQt4.QtCore import QTimer, QUrl, QThread from PyQt4.QtGui import QApplication from PyQt4.QtWebKit import QWebView, QWebElement, QWebSettings from PyQt4.QtNetwork import QNetworkRequest import redis from collections import deque import urlparse, sys, json LOGGER = log.getdebuglogger("utils.web") NUM_THREADS = 10 # how many threads to use MAX_LEVEL = 1 # max depth _URLs_SET_KEY = "crawler:data" def normalize_url(base, url): return urlparse.urljoin(base, url) def normalize_form(url, form): if "action" in form["attributes"]: form["attributes"]["action"] = normalize_url(url, form["attributes"]["action"]) else: LOGGER.debug("No FORM action found: %s" % form["attributes"]) return form
Created on Sep 10, 2014 @author: gianko ''' from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler from SocketServer import ThreadingMixIn import threading import urlparse import sys import utils.log as log import monitor.common as common LOG = log.getdebuglogger("httpdmon") HOST_NAME = "127.0.0.1" MONITOR = "127.0.0.1" PORT_NUMBER = 8080 def get_jssupp_xmlhttpreq(self): page = common.JSSUPP_XMLHTTPREQ % {"monitor": "http://%s:%s" % (MONITOR, PORT_NUMBER)} self.send_response(200) self.send_header("Content-type", "text/html") self.send_header("Content-Length", len(page)) self.end_headers() self.wfile.write(page) def get_jssupp_img(self): page = common.JSSUPP_IMG % {"monitor": "http://%s:%s" % (MONITOR, PORT_NUMBER)} self.send_response(200)
import utils.log as log from requests.packages.urllib3.exceptions import InsecureRequestWarning from utils.cookie import BetterCookie requests.packages.urllib3.disable_warnings(InsecureRequestWarning) # from urlparse import urlunparse, urlparse DEBUG = False TIMEOUT = 120 MAX_RETRY = 3 if DEBUG: log.LEVEL = log.LEVELS[-1] else: log.LEVEL = log.LEVELS[0] logger = log.getdebuglogger("csrf-test-runner") # Selenese runner s_logger = log.getdebuglogger("selenese") selrun_thr = None # MOSGI STUFF # MOSGI logger m_logger = log.getdebuglogger("mosgi") # MOSGI connection mosgi_connection = None mosgi_start_command_byte = 0 mosgi_finish_response_byte = 2
from requests.packages.urllib3.exceptions import InsecureRequestWarning from utils.cookie import BetterCookie requests.packages.urllib3.disable_warnings(InsecureRequestWarning) # from urlparse import urlunparse, urlparse DEBUG = False TIMEOUT = 120 MAX_RETRY = 3 if DEBUG: log.LEVEL = log.LEVELS[-1] else: log.LEVEL = log.LEVELS[0] logger = log.getdebuglogger("csrf-test-runner") # Selenese runner s_logger = log.getdebuglogger("selenese") selrun_thr = None # MOSGI STUFF # MOSGI logger m_logger = log.getdebuglogger("mosgi") # MOSGI connection mosgi_connection = None mosgi_start_command_byte = 0 mosgi_finish_response_byte = 2
@TODO: to be completed. @author: gianko ''' from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler from SocketServer import ThreadingMixIn import threading import urlparse import sys import utils.log as log import common import httplib2 LOG = log.getdebuglogger("webapp") HOST_NAME = "127.0.0.1" PORT_NUMBER = 8080 TIMEOUT = 30 def handle_ssrf(self): """ @TODO: to be completed.""" url = urlparse.urlparse(self.path) urlp = urlparse.urlparse(self.path) qs = urlparse.parse_qs(urlp.query) if "fetch" not in qs: self.serve_404()