コード例 #1
0
	def __init__(self, args):

		# call base class's init to finish setup
		unittest.TestCase.__init__(self, args)

		# disable SQL logging before the database has been created.
		# this prevents logging info on table schemas and other startup info.
		logging.getLogger('sqlalchemy.engine').setLevel(logging.CRITICAL)

		# set up the database once for all tests
		# TODO: refactor ndb so we can pass a blank set of args
		# or a simple dictionary like {'dbecho':True, 'dbname':self.TEST_DATABASE})
		# argparse objects don't like being compared to None.
		args = ndb.get_parser().parse_args()
		args.dbname = self.TEST_DATABASE
		self.ndb = ndb(args)

		# now turn on sqlalchemy logging.
		# do not use the 'dbecho' flag, so we can control the formatting ourselves
		# and avoid duplicate logs. see the sqlalchemy docs for details -
		# http://docs.sqlalchemy.org/en/rel_0_8/core/engines.html#dbengine-logging 

		# format logging to print only SQL with no other info
		# (e.g. remove the timestamp and logging level)
		logging.basicConfig(format="\n>>>>>>>\n%(message)s\n<<<<<<<\n",
			filename=self.LOG_FILE, filemode='w')
		logging.getLogger(self.SQL_LOG_CHANNEL).setLevel(logging.INFO)
コード例 #2
0
    def __init__(self, args):

        # call base class's init to finish setup
        unittest.TestCase.__init__(self, args)

        # disable SQL logging before the database has been created.
        # this prevents logging info on table schemas and other startup info.
        logging.getLogger('sqlalchemy.engine').setLevel(logging.CRITICAL)

        # set up the database once for all tests
        # TODO: refactor ndb so we can pass a blank set of args
        # or a simple dictionary like {'dbecho':True, 'dbname':self.TEST_DATABASE})
        # argparse objects don't like being compared to None.
        args = ndb.get_parser().parse_args()
        args.dbname = self.TEST_DATABASE
        self.ndb = ndb(args)

        # now turn on sqlalchemy logging.
        # do not use the 'dbecho' flag, so we can control the formatting ourselves
        # and avoid duplicate logs. see the sqlalchemy docs for details -
        # http://docs.sqlalchemy.org/en/rel_0_8/core/engines.html#dbengine-logging

        # format logging to print only SQL with no other info
        # (e.g. remove the timestamp and logging level)
        logging.basicConfig(format="\n>>>>>>>\n%(message)s\n<<<<<<<\n",
                            filename=self.LOG_FILE,
                            filemode='w')
        logging.getLogger(self.SQL_LOG_CHANNEL).setLevel(logging.INFO)
コード例 #3
0
	def __init__(self):
		parser = argparse.ArgumentParser(parents=[keymanager.get_parser(), ndb.get_parser()],
			description=self.__doc__, version=self.VERSION,
			epilog="If the database schema does not exist it will be automatically created on launch.")
		portgroup = parser.add_mutually_exclusive_group()
		portgroup.add_argument('--webport', '-p', default=self.DEFAULT_WEB_PORT, type=int,
			help="Port to use for the web server. Ignored if --envport is specified. Default: \'%(default)s\'.")
		portgroup.add_argument('--envport', '-e', action='store_true', default=False,
			help="Read which port to use from the environment variable '" + self.ENV_PORT_KEY_NAME + "'. Using this will override --webport. Default: \'%(default)s\'")
		parser.add_argument('--echo-screen', '--echoscreen', '--screenecho', action='store_true', default=False,
			help='Send web server output to stdout rather than a log file.')

		cachegroup = parser.add_mutually_exclusive_group()
		cachegroup.add_argument('--memcache', '--memcached', action='store_true', default=False,
			help="Use memcache to cache observation data, to increase performance and reduce load on the notary database.\
				Cached info expires after " + str(self.CACHE_EXPIRY / 3600) + " hours. " + cache.Memcache.get_help())
		cachegroup.add_argument('--memcachier', action='store_true', default=False,
			help="Use memcachier to cache observation data. " + cache.Memcachier.get_help())
		cachegroup.add_argument('--redis', action='store_true', default=False,
			help="Use redis to cache observation data. " + cache.Redis.get_help())

		args = parser.parse_args()

		# pass ndb the args so it can use any relevant ones from its own parser
		try:
			self.ndb = ndb(args)
		except Exception as e:
			self.ndb = None
			print >> sys.stderr, "Database error: '%s'" % (str(e))

		# same for keymanager
		km = keymanager(args)
		(self.notary_public_key, self.notary_priv_key) = km.get_keys()
		if (self.notary_public_key == None or self.notary_priv_key == None):
			print >> sys.stderr, "Could not get public and private keys."
			exit(1)
		print "Using public key\n" + self.notary_public_key

		self.create_static_index()

		self.web_port = self.DEFAULT_WEB_PORT
		if (args.envport):
			if (self.ENV_PORT_KEY_NAME in os.environ):
				self.web_port = int(os.environ[self.ENV_PORT_KEY_NAME])
			else:
				raise ValueError("--envport option specified but no '%s' variable exists." % \
					(self.ENV_PORT_KEY_NAME))
		elif (args.webport):
			self.web_port = args.webport

		self.cache = None
		if (args.memcache):
			self.cache = cache.Memcache()
		elif (args.memcachier):
			self.cache = cache.Memcachier()
		elif (args.redis):
			self.cache = cache.Redis()

		self.active_threads = 0 
		self.args = args
コード例 #4
0
    def __init__(self, args):

        # call base class's init to finish setup
        unittest.TestCase.__init__(self, args)

        # set up the database once for all tests
        # TODO: refactor ndb so we can pass a blank set of args
        # or a simple dictionary like {'dbecho':True, 'dbname':self.TEST_DATABASE})
        # argparse objects don't like being compared to None.
        args = ndb.get_parser().parse_args()
        args.dbname = self.TEST_DATABASE
        args.metricsdb = True
        self.ndb = ndb(args)
コード例 #5
0
	def __init__(self, args):

		# call base class's init to finish setup
		unittest.TestCase.__init__(self, args)

		# set up the database once for all tests
		# TODO: refactor ndb so we can pass a blank set of args
		# or a simple dictionary like {'dbecho':True, 'dbname':self.TEST_DATABASE})
		# argparse objects don't like being compared to None.
		args = ndb.get_parser().parse_args()
		args.dbname = self.TEST_DATABASE
		args.metricsdb = True
		self.ndb = ndb(args)
コード例 #6
0
    def __init__(self):
        parser = argparse.ArgumentParser(
            parents=[keymanager.get_parser(),
                     ndb.get_parser()],
            description=self.__doc__,
            version=self.VERSION,
            epilog=
            "If the database schema does not exist it will be automatically created on launch."
        )
        portgroup = parser.add_mutually_exclusive_group()
        portgroup.add_argument(
            '--webport',
            '-p',
            default=self.DEFAULT_WEB_PORT,
            type=int,
            help=
            "Port to use for the web server. Ignored if --envport is specified. Default: \'%(default)s\'."
        )
        portgroup.add_argument(
            '--envport',
            '-e',
            action='store_true',
            default=False,
            help="Read which port to use from the environment variable '" +
            self.ENV_PORT_KEY_NAME +
            "'. Using this will override --webport. Default: \'%(default)s\'")
        parser.add_argument('--echo-screen', '--echoscreen', '--screenecho', '--screen-echo',\
         action='store_true', default=False,
         help='Send web server output to stdout rather than a log file.')
        parser.add_argument(
            '--sni',
            action='store_true',
            default=False,
            help=
            "Use Server Name Indication when scanning sites. See section 3.1 of http://www.ietf.org/rfc/rfc4366.txt.\
			 Default: \'%(default)s\'")

        cachegroup = parser.add_mutually_exclusive_group()
        cachegroup.add_argument(
            '--memcache',
            '--memcached',
            action='store_true',
            default=False,
            help=
            "Use memcache to cache observation data, to increase performance and reduce load on the notary database.\
				Cached info expires after " + str(self.CACHE_EXPIRY / 3600) + " hours. " +
            cache.Memcache.get_help())
        cachegroup.add_argument(
            '--memcachier',
            action='store_true',
            default=False,
            help="Use memcachier to cache observation data. " +
            cache.Memcachier.get_help())
        cachegroup.add_argument('--redis',
                                action='store_true',
                                default=False,
                                help="Use redis to cache observation data. " +
                                cache.Redis.get_help())
        cachegroup.add_argument(
            '--pycache',
            default=False,
            const=cache.Pycache.CACHE_SIZE,
            nargs='?',
            metavar=cache.Pycache.get_metavar(),
            help="Use RAM to cache observation data on the local machine only.\
			If you don't use any other type of caching, use this! " +
            cache.Pycache.get_help())

        parser.add_argument(
            '--cache-only',
            action='store_true',
            default=False,
            help=
            "When retrieving data, *only* read from the cache - do not read any database records. Default: %(default)s"
        )

        parser.add_argument('--cache-expiry', '--cache-duration',\
         default=self.CACHE_EXPIRY, type=self.cache_duration,
         metavar="CACHE_EXPIRY[Ss|Mm|Hh]",
         help="Expire cache entries after this many seconds / minutes / hours. " +\
         "Hours is the default time unit if none is provided. " +\
         "The default client settings ignore notary results that have not been updated in the past 48 hours, " +\
         "so you may want your (scan frequency + scan duration + cache expiry) to be <= 48 hours. Default: " +\
         str(self.CACHE_EXPIRY / 3600) + " hours.")

        # socket_queue_size and thread_pool use the cherrypy defaults,
        # but we hardcode them here rather than refer to the cherrypy variables directly
        # just in case the cherrypy architecture changes.
        parser.add_argument('--socket-queue-size', '--socket-queue',\
         default=5, type=self.positive_integer,
         help="The maximum number of queued connections. Must be a positive integer. Default: %(default)s.")

        parser.add_argument('--thread-pool-size', '--thread-pool', '--threads',\
         default=10, type=self.positive_integer,
         help="The number of worker threads to start up in the pool. Must be a positive integer. Default: %(default)s.")

        args = parser.parse_args()

        # pass ndb the args so it can use any relevant ones from its own parser
        try:
            self.ndb = ndb(args)
        except Exception as e:
            self.ndb = None
            print >> sys.stderr, "Database error: '%s'" % (str(e))

        # same for keymanager
        km = keymanager(args)
        (self.notary_public_key, self.notary_priv_key) = km.get_keys()
        if (self.notary_public_key == None or self.notary_priv_key == None):
            print >> sys.stderr, "Could not get public and private keys."
            exit(1)

        self.web_port = self.DEFAULT_WEB_PORT
        if (args.envport):
            if (self.ENV_PORT_KEY_NAME in os.environ):
                self.web_port = int(os.environ[self.ENV_PORT_KEY_NAME])
            else:
                raise ValueError("--envport option specified but no '%s' variable exists." % \
                 (self.ENV_PORT_KEY_NAME))
        elif (args.webport):
            self.web_port = args.webport

        self.cache = None
        if (args.memcache):
            self.cache = cache.Memcache()
        elif (args.memcachier):
            self.cache = cache.Memcachier()
        elif (args.redis):
            self.cache = cache.Redis()
        elif (args.pycache):
            self.cache = cache.Pycache(args.pycache)

        self.create_folder(self.LOG_DIR)

        self.use_sni = args.sni
        self.create_static_index()
        self.args = args

        print "Using public key\n" + self.notary_public_key
コード例 #7
0
	def get_parser(cls):
		"""Return an argparser for NotaryHTTPServer."""
		parser = argparse.ArgumentParser(parents=[keymanager.get_parser(), ndb.get_parser()],
			description=cls.__doc__, version=cls.VERSION,
			epilog="If the database schema does not exist it will be automatically created on launch.")
		portgroup = parser.add_mutually_exclusive_group()
		portgroup.add_argument('--webport', '-p', default=cls.DEFAULT_WEB_PORT, type=int,
			help="Port to use for the web server. Ignored if --envport is specified. Default: \'%(default)s\'.")
		portgroup.add_argument('--envport', '-e', action='store_true', default=False,
			help="Read which port to use from the environment variable '" + cls.ENV_PORT_KEY_NAME + "'. Using this will override --webport. Default: \'%(default)s\'")
		loggroup = parser.add_mutually_exclusive_group()
		loggroup.add_argument('--echo-screen', '--echoscreen', '--screenecho', '--screen-echo',\
			action='store_true', default=True,
			help='Send web server output to stdout rather than a log file.')
		loggroup.add_argument('--logfile', action='store_true', default=False,
			help="Log to a file on disk rather than standard out.\
			A rotating set of {0} logs will be used, each capturing up to {1} bytes.\
			File will written to {2}\
			Default: \'%(default)s\'".format(
				notary_logs.LOGGING_BACKUP_COUNT + 1,
				notary_logs.LOGGING_MAXBYTES,
				notary_logs.get_log_file(cls.LOG_FILE)))

		cachegroup = parser.add_mutually_exclusive_group()
		cachegroup.add_argument('--memcache', '--memcached', action='store_true', default=False,
			help="Use memcache to cache observation data, to increase performance and reduce load on the notary database.\
				Cached info expires after " + str(cls.CACHE_EXPIRY / 3600) + " hours. " + cache.Memcache.get_help())
		cachegroup.add_argument('--memcachier', action='store_true', default=False,
			help="Use memcachier to cache observation data. " + cache.Memcachier.get_help())
		cachegroup.add_argument('--redis', action='store_true', default=False,
			help="Use redis to cache observation data. " + cache.Redis.get_help())
		cachegroup.add_argument('--pycache', default=False, const=cache.Pycache.CACHE_SIZE,
			nargs='?', metavar=cache.Pycache.get_metavar(),
			help="Use RAM to cache observation data on the local machine only.\
			If you don't use any other type of caching, use this! " + cache.Pycache.get_help())

		parser.add_argument('--sni', action='store_true', default=False,
			help="Use Server Name Indication when scanning sites. See section 3.1 of http://www.ietf.org/rfc/rfc4366.txt.\
			 Default: \'%(default)s\'")
		parser.add_argument('--cache-only', action='store_true', default=False,
			help="When retrieving data, *only* read from the cache - do not read any database records. Default: %(default)s")

		parser.add_argument('--cache-expiry', '--cache-duration',\
			default=cls.CACHE_EXPIRY, type=cls.cache_duration,
			metavar="CACHE_EXPIRY[Ss|Mm|Hh]",
			help="Expire cache entries after this many seconds / minutes / hours. " +\
			"Hours is the default time unit if none is provided. " +\
			"The default client settings ignore notary results that have not been updated in the past 48 hours, " +\
			"so you may want your (scan frequency + scan duration + cache expiry) to be <= 48 hours. Default: " +\
			str(cls.CACHE_EXPIRY) + " hours.")

		# socket_queue_size and thread_pool use the cherrypy defaults,
		# but we hardcode them here rather than refer to the cherrypy variables directly
		# just in case the cherrypy architecture changes.
		parser.add_argument('--socket-queue-size', '--socket-queue',\
			default=5, type=cls.positive_integer,
			help="The maximum number of queued connections. Must be a positive integer. Default: %(default)s.")

		parser.add_argument('--thread-pool-size', '--thread-pool', '--threads',\
			default=10, type=cls.positive_integer,
			help="The number of worker threads to start up in the pool. Must be a positive integer. Default: %(default)s.")

		return parser
コード例 #8
0
	def __init__(self):
		parser = argparse.ArgumentParser(parents=[keymanager.get_parser(), ndb.get_parser()],
			description=self.__doc__, version=self.VERSION,
			epilog="If the database schema does not exist it will be automatically created on launch.")
		portgroup = parser.add_mutually_exclusive_group()
		portgroup.add_argument('--webport', '-p', default=self.DEFAULT_WEB_PORT, type=int,
			help="Port to use for the web server. Ignored if --envport is specified. Default: \'%(default)s\'.")
		portgroup.add_argument('--envport', '-e', action='store_true', default=False,
			help="Read which port to use from the environment variable '" + self.ENV_PORT_KEY_NAME + "'. Using this will override --webport. Default: \'%(default)s\'")
		parser.add_argument('--echo-screen', '--echoscreen', '--screenecho', '--screen-echo',\
			action='store_true', default=False,
			help='Send web server output to stdout rather than a log file.')
		parser.add_argument('--sni', action='store_true', default=False,
			help="Use Server Name Indication when scanning sites. See section 3.1 of http://www.ietf.org/rfc/rfc4366.txt.\
			 Default: \'%(default)s\'")

		cachegroup = parser.add_mutually_exclusive_group()
		cachegroup.add_argument('--memcache', '--memcached', action='store_true', default=False,
			help="Use memcache to cache observation data, to increase performance and reduce load on the notary database.\
				Cached info expires after " + str(self.CACHE_EXPIRY / 3600) + " hours. " + cache.Memcache.get_help())
		cachegroup.add_argument('--memcachier', action='store_true', default=False,
			help="Use memcachier to cache observation data. " + cache.Memcachier.get_help())
		cachegroup.add_argument('--redis', action='store_true', default=False,
			help="Use redis to cache observation data. " + cache.Redis.get_help())
		cachegroup.add_argument('--pycache', default=False, const=cache.Pycache.CACHE_SIZE,
			nargs='?', metavar=cache.Pycache.get_metavar(),
			help="Use RAM to cache observation data on the local machine only.\
			If you don't use any other type of caching, use this! " + cache.Pycache.get_help())

		parser.add_argument('--cache-only', action='store_true', default=False,
			help="When retrieving data, *only* read from the cache - do not read any database records. Default: %(default)s")

		parser.add_argument('--cache-expiry', '--cache-duration',\
			default=self.CACHE_EXPIRY, type=self.cache_duration,
			metavar="CACHE_EXPIRY[Ss|Mm|Hh]",
			help="Expire cache entries after this many seconds / minutes / hours. " +\
			"Hours is the default time unit if none is provided. " +\
			"The default client settings ignore notary results that have not been updated in the past 48 hours, " +\
			"so you may want your (scan frequency + scan duration + cache expiry) to be <= 48 hours. Default: " +\
			str(self.CACHE_EXPIRY / 3600) + " hours.")

		# socket_queue_size and thread_pool use the cherrypy defaults,
		# but we hardcode them here rather than refer to the cherrypy variables directly
		# just in case the cherrypy architecture changes.
		parser.add_argument('--socket-queue-size', '--socket-queue',\
			default=5, type=self.positive_integer,
			help="The maximum number of queued connections. Must be a positive integer. Default: %(default)s.")

		parser.add_argument('--thread-pool-size', '--thread-pool', '--threads',\
			default=10, type=self.positive_integer,
			help="The number of worker threads to start up in the pool. Must be a positive integer. Default: %(default)s.")

		args = parser.parse_args()

		# pass ndb the args so it can use any relevant ones from its own parser
		try:
			self.ndb = ndb(args)
		except Exception as e:
			self.ndb = None
			print >> sys.stderr, "Database error: '%s'" % (str(e))

		# same for keymanager
		km = keymanager(args)
		(self.notary_public_key, self.notary_priv_key) = km.get_keys()
		if (self.notary_public_key == None or self.notary_priv_key == None):
			print >> sys.stderr, "Could not get public and private keys."
			exit(1)

		self.web_port = self.DEFAULT_WEB_PORT
		if (args.envport):
			if (self.ENV_PORT_KEY_NAME in os.environ):
				self.web_port = int(os.environ[self.ENV_PORT_KEY_NAME])
			else:
				raise ValueError("--envport option specified but no '%s' variable exists." % \
					(self.ENV_PORT_KEY_NAME))
		elif (args.webport):
			self.web_port = args.webport

		self.cache = None
		if (args.memcache):
			self.cache = cache.Memcache()
		elif (args.memcachier):
			self.cache = cache.Memcachier()
		elif (args.redis):
			self.cache = cache.Redis()
		elif (args.pycache):
			self.cache = cache.Pycache(args.pycache)

		self.create_folder(self.LOG_DIR)

		self.use_sni = args.sni
		self.create_static_index()
		self.args = args

		print "Using public key\n" + self.notary_public_key
コード例 #9
0
	def test_call_ndb_after_extending_argparser(self):
		parser = argparse.ArgumentParser(parents=[ndb.get_parser()])
		args = parser.parse_args()
		db = ndb(args)
		db.get_all_service_names()