def reconnect(self): """ Reconnect to the database and rebuild indices if necessary. Users should typically not have to call this method. """ db_connection = getDbConnection() self.database = db_connection.get_default_database() self.collection = MongoProxy(self.database[self.name]) for index in self._indices: if isinstance(index, (list, tuple)): self.collection.ensure_index(index[0], **index[1]) else: self.collection.ensure_index(index) if type(self._textIndex) is dict: textIdx = [(k, 'text') for k in self._textIndex.keys()] try: self.collection.ensure_index( textIdx, weights=self._textIndex, default_language=self._textLanguage) except pymongo.errors.OperationFailure: print( TerminalColor.warning('WARNING: Text search not enabled.'))
def getDbConnection(uri=None, replicaSet=None): """ Get a MongoClient object that is connected to the configured database. We lazy-instantiate a module-level singleton, the MongoClient objects manage their own connection pools internally. :param uri: if specified, connect to this mongo db rather than the one in the config. :param replicaSet: if uri is specified, use this replica set. """ global _dbClients origKey = (uri, replicaSet) if origKey in _dbClients: return _dbClients[origKey] if uri is None or uri == '': dbConf = getDbConfig() uri = dbConf.get('uri') replicaSet = dbConf.get('replica_set') clientOptions = { 'connectTimeoutMS': 15000, # This is the maximum time between when we fetch data from a cursor. # If it times out, the cursor is lost and we can't reconnect. If it # isn't set, we have issues with replica sets when the primary goes # down. This value can be overridden in the mongodb uri connection # string with the socketTimeoutMS. 'socketTimeoutMS': 60000, } if uri is None: dbUriRedacted = 'mongodb://*****:*****@') if len(parts) == 2: dbUriRedacted = 'mongodb://' + parts[1] else: dbUriRedacted = uri if replicaSet: client = pymongo.MongoReplicaSetClient( uri, replicaSet=replicaSet, read_preference=ReadPreference.SECONDARY_PREFERRED, **clientOptions) else: client = pymongo.MongoClient(uri, **clientOptions) client = MongoProxy(client, logger=logger) _dbClients[origKey] = _dbClients[(uri, replicaSet)] = client desc = '' if replicaSet: desc += ', replica set: %s' % replicaSet print( TerminalColor.info('Connected to MongoDB: %s%s' % (dbUriRedacted, desc))) return client
def __init__(self): self.name = None self._indices = [] self._textIndex = None self._textLanguage = None self._filterKeys = { AccessType.READ: set(), AccessType.WRITE: set(), AccessType.ADMIN: set(), AccessType.SITE_ADMIN: set() } self.initialize() db_connection = getDbConnection() self.database = db_connection.get_default_database() self.collection = MongoProxy(self.database[self.name]) for index in self._indices: if isinstance(index, (list, tuple)): self.collection.ensure_index(index[0], **index[1]) else: self.collection.ensure_index(index) if type(self._textIndex) is dict: textIdx = [(k, 'text') for k in self._textIndex.keys()] try: self.collection.ensure_index( textIdx, weights=self._textIndex, default_language=self._textLanguage) except pymongo.errors.OperationFailure: print( TerminalColor.warning('WARNING: Text search not enabled.'))
def mongoCollection(self, connectionUri, collectionName): # TODO not sure if this is a good idea to do this db stuff here # maybe this suggests a new model? from girder.models import getDbConnection dbConn = getDbConnection(connectionUri) db = dbConn.get_default_database() from girder.external.mongodb_proxy import MongoProxy collection = MongoProxy(db[collectionName]) return collection
def __init__(self, assetstore): """ :param assetstore: The assetstore to act on. """ super(GridFsAssetstoreAdapter, self).__init__(assetstore) recent = False try: # Guard in case the connectionArgs is unhashable key = (self.assetstore.get('mongohost'), self.assetstore.get('replicaset'), self.assetstore.get('shard')) if key in _recentConnections: recent = (time.time() - _recentConnections[key]['created'] < RECENT_CONNECTION_CACHE_TIME) except TypeError: key = None try: # MongoClient automatically reuses connections from a pool, but we # want to avoid redoing ensureChunkIndices each time we get such a # connection. client = getDbConnection(self.assetstore.get('mongohost'), self.assetstore.get('replicaset'), quiet=recent) self.chunkColl = MongoProxy(client[self.assetstore['db']].chunk) if not recent: _ensureChunkIndices(self.chunkColl) if self.assetstore.get('shard') == 'auto': _setupSharding(self.chunkColl) if key is not None: if len(_recentConnections) >= RECENT_CONNECTION_CACHE_MAX_SIZE: _recentConnections.clear() _recentConnections[key] = { 'created': time.time() } except pymongo.errors.ConnectionFailure: logger.error('Failed to connect to GridFS assetstore %s', self.assetstore['db']) self.chunkColl = 'Failed to connect' self.unavailable = True except pymongo.errors.ConfigurationError: logger.exception('Failed to configure GridFS assetstore %s', self.assetstore['db']) self.chunkColl = 'Failed to configure' self.unavailable = True
def getDbConnection(uri=None, replicaSet=None, autoRetry=True, quiet=False, **kwargs): """ Get a MongoClient object that is connected to the configured database. We lazy-instantiate a module-level singleton, the MongoClient objects manage their own connection pools internally. Any extra kwargs you pass to this method will be passed through to the MongoClient. :param uri: if specified, connect to this mongo db rather than the one in the config. :param replicaSet: if uri is specified, use this replica set. :param autoRetry: if this connection should automatically retry operations in the event of an AutoReconnect exception. If you're testing the connection, set this to False. If disabled, this also will not cache the mongo client, so make sure to only disable if you're testing a connection. :type autoRetry: bool :param quiet: if true, don't logprint warnings and success. :type quiet: bool """ global _dbClients origKey = (uri, replicaSet) if origKey in _dbClients: return _dbClients[origKey] dbConf = getDbConfig() if uri is None or uri == '': uri = dbConf.get('uri') replicaSet = dbConf.get('replica_set') clientOptions = { # This is the maximum time between when we fetch data from a cursor. # If it times out, the cursor is lost and we can't reconnect. If it # isn't set, we have issues with replica sets when the primary goes # down. This value can be overridden in the mongodb uri connection # string with the socketTimeoutMS. 'socketTimeoutMS': 60000, 'connectTimeoutMS': 20000, 'serverSelectionTimeoutMS': 20000, 'readPreference': 'secondaryPreferred', 'replicaSet': replicaSet, 'w': 'majority' } # All other options in the [database] section will be passed directly as # options to the mongo client for opt, val in six.viewitems(dict(dbConf)): if opt not in {'uri', 'replica_set'}: clientOptions[opt] = val # Finally, kwargs take precedence clientOptions.update(kwargs) # if the connection URI overrides any option, honor it above our own # settings. uriParams = urllib.parse.parse_qs(urllib.parse.urlparse(uri).query) for key in uriParams: if key in clientOptions: del clientOptions[key] if uri is None: dbUriRedacted = 'mongodb://*****:*****@') if len(parts) == 2: dbUriRedacted = 'mongodb://' + parts[1] else: dbUriRedacted = uri client = pymongo.MongoClient(uri, **clientOptions) if not quiet: desc = '' if replicaSet: desc += ', replica set: %s' % replicaSet logprint.info('Connecting to MongoDB: %s%s' % (dbUriRedacted, desc)) # Make sure we can connect to the mongo server at startup client.server_info() if autoRetry: client = MongoProxy(client, logger=logger) _dbClients[origKey] = _dbClients[(uri, replicaSet)] = client return client
def getDbConnection(uri=None, replicaSet=None, autoRetry=True, **kwargs): """ Get a MongoClient object that is connected to the configured database. We lazy-instantiate a module-level singleton, the MongoClient objects manage their own connection pools internally. Any extra kwargs you pass to this method will be passed through to the MongoClient. :param uri: if specified, connect to this mongo db rather than the one in the config. :param replicaSet: if uri is specified, use this replica set. :param autoRetry: if this connection should automatically retry operations in the event of an AutoReconnect exception. If you're testing the connection, set this to False. If disabled, this also will not cache the mongo client, so make sure to only disable if you're testing a connection. :type autoRetry: bool """ global _dbClients origKey = (uri, replicaSet) if origKey in _dbClients: return _dbClients[origKey] if uri is None or uri == '': dbConf = getDbConfig() uri = dbConf.get('uri') replicaSet = dbConf.get('replica_set') clientOptions = { # This is the maximum time between when we fetch data from a cursor. # If it times out, the cursor is lost and we can't reconnect. If it # isn't set, we have issues with replica sets when the primary goes # down. This value can be overridden in the mongodb uri connection # string with the socketTimeoutMS. 'socketTimeoutMS': 60000, 'connectTimeoutMS': 20000, 'serverSelectionTimeoutMS': 20000, 'read_preference': ReadPreference.SECONDARY_PREFERRED, 'replicaSet': replicaSet } clientOptions.update(kwargs) if uri is None: dbUriRedacted = 'mongodb://*****:*****@') if len(parts) == 2: dbUriRedacted = 'mongodb://' + parts[1] else: dbUriRedacted = uri client = pymongo.MongoClient(uri, **clientOptions) # Make sure we can connect to the mongo server at startup client.server_info() if autoRetry: client = MongoProxy(client, logger=logger) _dbClients[origKey] = _dbClients[(uri, replicaSet)] = client desc = '' if replicaSet: desc += ', replica set: %s' % replicaSet print( TerminalColor.info('Connected to MongoDB: %s%s' % (dbUriRedacted, desc))) return client