def test_instantiated_codebase(self): codebase = Codebase.Instantiate({ 'test_module/__init__.py': '', 'test_module/inner.py': 'f = lambda: 10', }) self.assertEqual(codebase.getClassByName('test_module.inner.f')(), 10) codebase2 = Codebase._FromModule(codebase.getModuleByName("test_module")) self.assertEqual(codebase2.getClassByName('test_module.inner.f')(), 10) self.assertEqual(codebase.filesToContents, codebase2.filesToContents)
def __init__(self, channel, connectionMetadata=None): self._channel = channel self._transaction_callbacks = {} self._connectionMetadata = connectionMetadata or {} self._lock = threading.RLock() # transaction of what's in the KV store self._cur_transaction_num = 0 self.serializationContext = TypedPythonCodebase.coreSerializationContext().withoutCompression() # a datastructure that keeps track of all the different versions of the objects # we have mapped in. self._connection_state = DatabaseConnectionState() self._connection_state.setSerializationContext(self.serializationContext) self._connection_state.setTriggerLazyLoad(self.loadLazyObject) self._lazy_object_read_blocks = {} self.initialized = threading.Event() self.disconnected = threading.Event() # for each schema name we've sent, an event that's triggered # when the server has acknowledged the schema and given us a definition self._schema_response_events = {} self._fields_to_field_ids = Dict(FieldDefinition, int)() self._field_id_to_schema_and_typename = {} self._field_id_to_field_def = Dict(int, FieldDefinition)() self.connectionObject = None # transaction handlers. These must be nonblocking since we call them under lock self._onTransactionHandlers = set() self._flushEvents = {} # set(schema) self._schemas = set() self._messages_received = 0 self._pendingSubscriptions = {} # from (schema, typename, fieldname_and_val) -> {'values', 'index_values', 'identities'} # where (fieldname_and_val) is OneOf(None, (str, IndexValue)) self._subscription_buildup = {} self._channel.setServerToClientHandler(self._onMessage) self._flushIx = 0 self._largeSubscriptionHeartbeatDelay = 0 self._logger = logging.getLogger(__name__) self._auth_token = None self._max_tid_by_schema = {} self._max_tid_by_schema_and_type = {}
def instantiate(self, module_name=None): """Instantiate a codebase on disk and load it.""" with _codebase_lock: if self.hash not in _codebase_cache: try: if not os.path.exists(_codebase_instantiation_dir): os.makedirs(_codebase_instantiation_dir) except Exception: logging.getLogger(__name__).warn( "Exception trying to make directory %s", _codebase_instantiation_dir) disk_path = os.path.join(_codebase_instantiation_dir, self.hash) #preload the files, since they're lazy. object_database.current_transaction().db().requestLazyObjects( set(self.files.values())) fileContents = { fpath: file.contents for fpath, file in self.files.items() } _codebase_cache[self.hash] = TypedPythonCodebase.Instantiate( fileContents, disk_path) if module_name is None: return _codebase_cache[self.hash] return _codebase_cache[self.hash].getModuleByName(module_name)
def doWork(self, shouldStop): self._logger.info("Configuring ActiveWebService") with self.db.view() as view: config = Configuration.lookupAny(service=self.serviceObject) assert config, "No configuration available." self._logger.setLevel(config.log_level) host, port = config.hostname, config.port login_config = config.login_plugin codebase = login_config.codebase if codebase is None: ser_ctx = TypedPythonCodebase.coreSerializationContext() else: ser_ctx = codebase.instantiate().serializationContext view.setSerializationContext(ser_ctx) self.login_plugin = login_config.login_plugin_factory( self.db, login_config.auth_plugins, login_config.config) # register `load_user` method with login_manager self.login_plugin.load_user = self.login_manager.user_loader( self.login_plugin.load_user) self.authorized_groups_text = self.login_plugin.authorized_groups_text self.login_plugin.init_app(self.app) self._logger.info("ActiveWebService listening on %s:%s", host, port) server = pywsgi.WSGIServer((host, port), self.app, handler_class=WebSocketHandler) server.serve_forever()
def __init__(self, port=None): self._logger = logging.getLogger(__name__) self._port_num = port self._auth_token = "fake_auth_token" self._tempDirObj = tempfile.TemporaryDirectory() self._cleanupFns = [] self.startObjectDB() self.serviceManager = SubprocessServiceManager( "localhost", "localhost", self._port_num, os.path.join(self.tempDir, "code"), os.path.join(self.tempDir, "storage"), self._auth_token, isMaster=True, shutdownTimeout=.25, maxGbRam= 128, # our simulated services don't actually use this much, but we need to reserve maxCores=16 # enough to be able to do the simulation. ) self.serializationContext = TypedPythonCodebase.FromRootlevelModule( research_app).serializationContext self.serviceManager.start() self.db = self.connect() self.db.subscribeToSchema(service_schema) self.db.setSerializationContext(self.serializationContext) self.researchFrontendHelper = ResearchFrontendTestHelper(self) self.webServiceHelper = WebServiceHelper(self)
def test_serialize_modules(self): codebase = Codebase.FromModule(dummy_test_module) sc = codebase.serializationContext self.assertIn('.modules.pytz', sc.nameToObject) pytz = dummy_test_module.pytz self.assertIs(pytz, sc.deserialize(sc.serialize(pytz)))
def main(argv): parser = argparse.ArgumentParser(description='Generate types') parser.add_argument('dest', nargs='?', default='DefaultGeneratedTestTypes.hpp') parser.add_argument('-t', '--testTypes', action='store_true') parser.add_argument('-c', '--testTypes2', action='store_true') parser.add_argument('-d', '--testTypes3', action='store_true') parser.add_argument('-v', '--verbose', action='store_true') args = parser.parse_args() ret = 0 try: if args.testTypes: generate_some_types(args.dest) elif args.testTypes2: codebase = Codebase.FromRootlevelPath("object_database") generate_from_codebase(codebase, args.dest, ["object_database.messages.ClientToServer"], verbose=args.verbose) getOrSetTypeResolver(cb_resolver(codebase)) elif args.testTypes3: codebase = Codebase.FromRootlevelPath("typed_python") getOrSetTypeResolver(cb_resolver(codebase)) testTypes = [ "typed_python.direct_types.generate_types.A", "typed_python.direct_types.generate_types.Overlap", "typed_python.direct_types.generate_types.NamedTupleTwoStrings", "typed_python.direct_types.generate_types.NamedTupleBoolIntStr", "typed_python.direct_types.generate_types.NamedTupleIntFloatDesc", "typed_python.direct_types.generate_types.NamedTupleBoolListOfInt", "typed_python.direct_types.generate_types.NamedTupleAttrAndValues", "typed_python.direct_types.generate_types.AnonTest", "typed_python.direct_types.generate_types.Bexpress" ] generate_from_codebase(codebase, args.dest, testTypes, verbose=args.verbose) ret = cpp_tests() except Exception: print("FAILED:\n", traceback.format_exc()) return 1 return ret
def test_create_current_codebase(self): cb = Codebase.FromRootlevelModule(typed_python) serializer = cb.serializationContext self.assertIs( serializer.deserialize(serializer.serialize(thisIsAFunction)), thisIsAFunction )
def __init__(self, channel): self._channel = channel self._transaction_callbacks = {} self._lock = threading.Lock() # transaction of what's in the KV store self._cur_transaction_num = 0 # a datastructure that keeps track of all the different versions of the objects # we have mapped in. self._versioned_data = ManyVersionedObjects() # a map from lazy object id to (schema, typename) self._lazy_objects = {} self._lazy_object_read_blocks = {} self.initialized = threading.Event() self.disconnected = threading.Event() self.connectionObject = None # transaction handlers. These must be nonblocking since we call them under lock self._onTransactionHandlers = [] self._flushEvents = {} # Map: schema.name -> schema self._schemas = {} self._messages_received = 0 self._pendingSubscriptions = {} #if we have object-level subscriptions to a particular type (e.g. not everything) #then, this is from (schema, typename) -> {object_id -> transaction_id} so that #we can tell when the subscription should become valid. Subscriptions are permanent #otherwise, if we're subscribed, it's 'Everything' self._schema_and_typename_to_subscription_set = {} #from (schema,typename,field_val) -> {'values', 'index_values', 'identities'} self._subscription_buildup = {} self._channel.setServerToClientHandler(self._onMessage) self._flushIx = 0 self._largeSubscriptionHeartbeatDelay = 0 self.serializationContext = TypedPythonCodebase.coreSerializationContext( ) self._logger = logging.getLogger(__name__)
def test_serialize_lambdas_with_references_in_list_comprehensions(self): codebase = Codebase.FromModule(dummy_test_module) sc = codebase.serializationContext #note that it matters that the 'module_level_testfun' is at the module level, #because that induces a freevar in a list-comprehension code object def f(): return [module_level_testfun() for _ in range(1)][0] self.assertEqual(f(), "testfunction") self.assertEqual(sc.deserialize(sc.serialize(f))(), "testfunction")
def test_basic(self): codebase = Codebase.Instantiate({ "a.py": textwrap.dedent(""" from typed_python import Function, ListOf, OneOf @Function def g(x): return x+x @Function def f(x: float): y = 0 while x > 0: x -= 1 y += g(x) return y """) }) f = codebase.getClassByName("a.f") t0 = time.time() compiledCodebase = CodebaseCompiler.compile(codebase) compilation_time = time.time() - t0 t0 = time.time() f(100000) f_time_first = time.time() - t0 t0 = time.time() compiledCodebase.install() install_time = time.time() - t0 t0 = time.time() f(100000) f_time_second = time.time() - t0 self.assertTrue(f_time_second < f_time_first * .1) self.assertTrue(install_time < compilation_time * 0.1)
def createOrUpdateService(serviceClass, serviceName, target_count=None, placement=None, isSingleton=None, coresUsed=None, gbRamUsed=None): service = service_schema.Service.lookupAny(name=serviceName) if not service: service = service_schema.Service(name=serviceName, placement=placement or "Any") service.service_module_name = serviceClass.__module__ service.service_class_name = serviceClass.__qualname__ if not service.service_module_name.startswith("object_database."): # find the root of the codebase module = sys.modules[serviceClass.__module__] root_path = TypedPythonCodebase.rootlevelPathFromModule(module) tpCodebase = service_schema.Codebase.createFromRootlevelPath(root_path) service.setCodebase(tpCodebase) if target_count is not None: service.target_count = target_count if placement is not None: service.placement = placement if coresUsed is not None: service.coresUsed = coresUsed else: service.coresUsed = serviceClass.coresUsed if gbRamUsed is not None: service.gbRamUsed = gbRamUsed else: service.gbRamUsed = serviceClass.gbRamUsed return service
def __init__(self, kvstore, auth_token): self._kvstore = kvstore self._auth_token = auth_token self.serializationContext = TypedPythonCodebase.coreSerializationContext( ).withoutCompression() self._lock = threading.RLock() self.verbose = False self._gc_interval = DEFAULT_GC_INTERVAL self._typeMap = None # InMemoryChannel or ServerToClientProtocol -> ConnectedChannel self._clientChannels = {} # id of the next transaction self._cur_transaction_num = 0 # for each key, the last version number we committed self._version_numbers = {} self._version_numbers_timestamps = {} # _field_id to set(subscribed channel) self._field_id_to_channel = {} # index-stringname to set(subscribed channel) self._index_to_channel = Dict(IndexId, object)() # for each individually subscribed ID, a set of channels self._id_to_channel = {} self.longTransactionThreshold = 1.0 self.logFrequency = 10.0 self.MAX_NORMAL_TO_SEND_SYNCHRONOUSLY = 1000 self.MAX_LAZY_TO_SEND_SYNCHRONOUSLY = 10000 self._transactions = 0 self._keys_set = 0 self._index_values_updated = 0 self._subscriptions_written = 0 self._subscriptionResponseThread = None self._shouldStop = threading.Event() # a queue of queue-subscription messages. we have to handle # these on another thread because they can be quite large, and we don't want # to prevent message processing on the main thread. self._subscriptionQueue = queue.Queue() # if we're building a subscription up, all the objects that have changed while our # lock was released. self._pendingSubscriptionRecheck = None # fault injector to test this thing self._subscriptionBackgroundThreadCallback = None self._lazyLoadCallback = None self._last_garbage_collect_timestamp = None self.identityProducer = IdentityProducer( self.allocateNewIdentityRoot()) self._logger = logging.getLogger(__name__) self._removeOldDeadConnections()
def getSerializationContext(self): if self.codebase is None: return TypedPythonCodebase.FromRootlevelModule( object_database).serializationContext else: return self.codebase.instantiate().serializationContext
def hashCurrentCodebase(): return TypedPythonCodebase.FromRootlevelModule(research_app, ignoreCache=True).hash()
def check_module_name(mod_name): mod = sys.modules[mod_name] path = Codebase.rootlevelPathFromModule(mod) self.assertEqual(os.path.basename(path), 'typed_python')
def serializeFromModule(self, module): """Give the project root we want to serialize from.""" self.setSerializationContext( TypedPythonCodebase.FromRootlevelModule( module).serializationContext)
def createFromRootlevelPath(rootPath): return Codebase.createFromCodebase( TypedPythonCodebase.FromRootlevelPath(rootPath))
def setUp(self): self.harness = ServiceTestHarness() self.harness.researchFrontendHelper.createResearchFrontend() self.ser_ctx = TypedPythonCodebase.FromRootlevelModule(research_app).serializationContext
def configurableSetUp( self, hostname='localhost', login_plugin_factory=None, # default: LoginIpPlugin, login_config=None, auth_plugins=(None), module=None, db_init_fun=None): self.base_url = "http://{host}:{port}".format(host=hostname, port=WEB_SERVER_PORT) login_plugin_factory = login_plugin_factory or LoginIpPlugin self.token = genToken() log_level = self._logger.getEffectiveLevel() loglevel_name = logging.getLevelName(log_level) self.server, self.cleanupFn = autoconfigure_and_start_service_manager( port=DATABASE_SERVER_PORT, auth_token=self.token, loglevel_name=loglevel_name, own_hostname=hostname, db_hostname=hostname) try: self.database = connect(hostname, DATABASE_SERVER_PORT, self.token, retry=True) self.database.subscribeToSchema(core_schema, service_schema, active_webservice_schema) if db_init_fun is not None: db_init_fun(self.database) codebase = None if module is not None and not module.__name__.startswith( "object_database."): self.database.serializeFromModule(module) root_path = TypedPythonCodebase.rootlevelPathFromModule(module) tpcodebase = TypedPythonCodebase.FromRootlevelPath(root_path) with self.database.transaction(): codebase = service_schema.Codebase.createFromCodebase( tpcodebase) with self.database.transaction(): service = ServiceManager.createOrUpdateService( ActiveWebService, "ActiveWebService", target_count=0) ActiveWebService.configureFromCommandline(self.database, service, [ '--port', str(WEB_SERVER_PORT), '--host', hostname, '--log-level', loglevel_name, ]) if login_config is None: login_config = self.login_config ActiveWebService.setLoginPlugin(self.database, service, login_plugin_factory, auth_plugins, codebase=codebase, config=login_config) with self.database.transaction(): ServiceManager.startService("ActiveWebService", 1) self.waitUntilUp() except Exception: self.cleanupFn(error=True) raise
def __init__(self, service_test_base ): self._base = service_test_base self._db = None self.ser_ctx = TypedPythonCodebase.FromRootlevelModule(research_app).serializationContext