def __init__(self, cfg_manager, session_pool_id, timeout): # session_pool_id makes no sense in Memory object.__init__(self) # Using SERIALIZE_MEMORY_GATEWAY_SESSIONS has an impact on performance (it will serialize # and deserialize every session everytime), but isolates every session just as if they # were stored in the database self._serialize = cfg_manager[configuration_doc.SESSION_MEMORY_GATEWAY_SERIALIZE] self._cfg_manager = cfg_manager self._generator = SessionGenerator.SessionGenerator() self._serializer = SessionSerializer.SessionSerializer() self._sessions = { # First char : (dict_lock, { session_id : session}) } self._session_locks = { # First char : { session_id : session_lock } } self._timeout = timeout for first_char in self._generator.alphabet: self._sessions[first_char] = (threading.RLock(), {}) self._session_locks[first_char] = {}
def do_send_async_file(self, session, file_content, file_info): """ Runs the experiment server's send_file_to_device asynchronously, by running the call on its own thread and storing the result, to be returned through the check_async_command_status request. @param session: Session @param request_identifiers: List of request identifiers whose status to check. @return A dictionary with each request identifier as key and a (status, contents) tuple as values. The status can either be "ok", if the request is done, "error", if it failed, and "running", if it has not finished yet. In the first two cases, contents will return the response. """ # Call the async method which will run on its own thread. Store the object # it returns, so that we can know whether it has finished. threadobj = self._send_async_file_t(session, file_content, file_info) # Create a new identifier for the new request # TODO: Consider refactoring this gen = SessionGenerator.SessionGenerator() request_id = gen.generate_id(16) # Store the new request in our dictionary session_id = session['session_id'] if (session_id not in self._async_requests): self._async_requests[session_id] = {} self._async_requests[session_id][request_id] = threadobj return request_id
def do_send_async_command(self, session, command): """ Runs the experiment server's send_command_to_device asynchronously, by running the call on its own thread and storing the result, to be returned through the check_async_command_status request. @param session: Session @param command: Command to execute asynchronously """ # Call the async method which will run on its own thread. Store the object # it returns, so that we can know whether it has finished. threadobj = self._send_async_command_t(session, command) # Create a new identifier for the new request # TODO: Consider refactoring this gen = SessionGenerator.SessionGenerator() request_id = gen.generate_id(16) # Store the new request in our dictionary session_id = session['session_id'] if (session_id not in self._async_requests): self._async_requests[session_id] = {} self._async_requests[session_id][request_id] = threadobj return request_id
def __init__(self, cfg_manager, session_pool_id, timeout): super(SessionSqlalchemyGateway, self).__init__() self.session_pool_id = session_pool_id self.timeout = timeout self.cfg_manager = cfg_manager (engine_name, host, port, dbname, username, password) = self._parse_config() SessionSqlalchemyGateway.username = username SessionSqlalchemyGateway.password = password SessionSqlalchemyGateway.host = host SessionSqlalchemyGateway.port = port SessionSqlalchemyGateway.dbname = dbname self._generator = SessionGenerator.SessionGenerator() self._serializer = SessionSerializer.SessionSerializer() self._lock = DbLock.DbLock(cfg_manager, session_pool_id) if SessionSqlalchemyGateway.engine is None: getconn = generate_getconn(engine_name, username, password, host, port, dbname) if engine_name == 'sqlite': sqlalchemy_engine_str = 'sqlite:///%s' % get_sqlite_dbname( dbname) pool = sqlalchemy.pool.NullPool(getconn) else: if port is None: port_str = '' else: port_str = ':%s' % port sqlalchemy_engine_str = "%s://%s:%s@%s%s/%s" % ( engine_name, username, password, host, port_str, dbname) pool = sqlalchemy.pool.QueuePool(getconn, pool_size=15, max_overflow=20, recycle=3600) SessionSqlalchemyGateway.engine = sqlalchemy.create_engine( sqlalchemy_engine_str, convert_unicode=True, echo=False, pool=pool) self._session_maker = sessionmaker(bind=self.engine, autoflush=True, autocommit=False)
def __init__(self, cfg_manager, session_pool_id, timeout): #check if the pool id is an integer try: int(session_pool_id) except ValueError: raise TypeError( "Session pool id needs to be an integer. Got '%s'" % session_pool_id) self.session_pool_id = session_pool_id if timeout is None: self.timeout = 10 * 365 * 24 * 3600 # expire in 10 years (also known as "never expire") else: self.timeout = timeout self.cfg_manager = cfg_manager ( host, port, db_index, self.session_lock_key, #class var self.session_key_prefix, #class var ) = self._parse_config() self._generator = SessionGenerator.SessionGenerator() self._serializer = SessionSerializer.SessionSerializer() #New pool or not new? if session_pool_id not in self.redisPools: pool = redis.ConnectionPool(host=host, port=port, db=db_index) SessionRedisGateway.redisPools[self.session_pool_id] = pool else: pool = self.redisPools[self.session_pool_id] #We have the redis client now self._client_creator = lambda: redis.Redis(connection_pool=pool)