def __init__(self, id: ObjectId, db: Database, nm: NotificationManager): self._id = id self.db = db self.nm = nm self.profiles = db.get_collection("profiles") self.projects = db.get_collection("projects") self.accounts = db.get_collection("users")
def __init__(self, app: Flask, db: Database, nm: NotificationManager): self.app = app self.nm = nm self.projects = db.get_collection("projects") self.users = db.get_collection("users") self.invitations = db.get_collection("invitations") self.requests = db.get_collection("join_requests") self.favourites = db.get_collection("favourites")
async def get_roles( database: Database, *, force_refresh: bool = False ) -> list[models.DiscordRole]: """ Get a list of all roles from the cache, or discord API if not available. If `force_refresh` is True, the cache is skipped and the roles are updated. """ collection = database.get_collection("roles") if force_refresh: # Drop all values in the collection await collection.delete_many({}) # `create_index` creates the index if it does not exist, or passes # This handles TTL on role objects await collection.create_index( "inserted_at", expireAfterSeconds=60 * 60 * 24, # 1 day name="inserted_at", ) roles = [models.DiscordRole(**json.loads(role["data"])) async for role in collection.find()] if len(roles) == 0: # Fetch roles from the API and insert into the database roles = await _get_role_info() await collection.insert_many({ "name": role.name, "id": role.id, "data": role.json(), "inserted_at": datetime.datetime.now(tz=datetime.timezone.utc), } for role in roles) return roles
class MongoopTrigger(BaseTrigger): def __init__(self, *args, **kwargs): super(MongoopTrigger, self).__init__(*args, **kwargs) database = self.params.get('database', 'mongoop') collection = self.params.get('collection', 'history') self.db = Database(self.mongoop.conn, database) self.collection = self.db.get_collection(collection) self.collection.create_index([('opid', DESCENDING)], unique=True, background=True) def op_nok(self, operations): try: if operations: self.collection.insert_many(operations) except Exception as e: logging.error('unable to bulk operations :: {} :: {}'.format( self.name, e)) return False else: logging.info('run :: {} :: bulk insert {} operations'.format( self.name, len(operations))) return True
def __init__(self, mongo_serialization_factory: MongoSerializationFactory, db: Database): self.mongo_serialization_factory = mongo_serialization_factory self.mongo_serialization = self.mongo_serialization_factory.get_instance( ) self.db = db self.coll = db.get_collection(UserDaoImpl.COLLECTION_NAME) self.create_index()
def __init__(self, app: Flask, db: Database, jwt: JWTManager, nm: NotificationManager): self.users = db.get_collection("users") self.profiles = db.get_collection("profiles") # In memory store of revoked tokens. WARNING: will allow logged out users to # log back in if app is restarted. self.revoked_tokens: Set[str] = set() self.nm = nm # Overrides the default function of jwt.current_user to return a User object. @jwt.user_loader_callback_loader def user_loader_callback(id: str): return User(ObjectId(id), db, nm) # Check for revoked tokens. @jwt.token_in_blacklist_loader def check_if_token_in_blacklist(decrypted_token: Dict[str, str]): jti = decrypted_token["jti"] return jti in self.revoked_tokens
def test_get_collection(self): db = Database(self.client, "pymongo_test") codec_options = CodecOptions( tz_aware=True, uuid_representation=JAVA_LEGACY) write_concern = WriteConcern(w=2, j=True) coll = db.get_collection( 'foo', codec_options, ReadPreference.SECONDARY, write_concern) self.assertEqual('foo', coll.name) self.assertEqual(codec_options, coll.codec_options) self.assertEqual(JAVA_LEGACY, coll.uuid_subtype) self.assertEqual(ReadPreference.SECONDARY, coll.read_preference) self.assertEqual(write_concern.document, coll.write_concern) pref = Secondary([{"dc": "sf"}]) coll = db.get_collection('foo', read_preference=pref) self.assertEqual(pref.mode, coll.read_preference) self.assertEqual(pref.tag_sets, coll.tag_sets) self.assertEqual(db.codec_options, coll.codec_options) self.assertEqual(db.uuid_subtype, coll.uuid_subtype) self.assertEqual(db.write_concern, coll.write_concern)
def migrateOne(db: database.Database): old = db.get_collection("instances_old") new = db.get_collection("instances") # if already migrated return if old.count() == new.count(): return # if previous migration failed restore state if old.count() > 0 and new.count() == 0: new.drop() old = db.get_collection("instances_old") old.rename("instances") # migrate new.rename("instances_old") old = db.get_collection("instances_old", CodecOptions(uuid_representation=JAVA_LEGACY)) new = db.get_collection("instances", CodecOptions(uuid_representation=STANDARD)) # don't overwrite data if new.count() > 0: return for record in old.find(): new.insert(record)
def reset_device_id(db: database.Database, uid: str): hh_user = db.get_collection("user") user = hh_user.find_one({"userInfo.uid": uid}) if user is None: print("user {} not found".format(uid)) return device_id = user["userInfo"]["deviceID"] print("find user device id {}".format(device_id)) result = hh_user.update_many({"userInfo.deviceID": device_id}, {"$set": { "userInfo.deviceID": "" }}) print(result)
def __init__( self, model_class: Type[T], database: Database, col_name: str, indexes: Optional[list[Union[IndexModel, str]]] = None, wrap_object_str_id=True, ): codecs = CodecOptions( type_registry=TypeRegistry([c() for c in [DecimalCodec]])) self.collection = database.get_collection(col_name, codecs) if indexes: indexes = [ parse_str_index_model(i) if isinstance(i, str) else i for i in indexes ] self.collection.create_indexes(indexes) self.model_class = model_class self.wrap_object_id = model_class.__fields__[ "id"].type_ == ObjectIdStr and wrap_object_str_id
def test_update_collection_schema(mongo_handler: MongoHandler, evo_db: Database, temp_coll_name: str, scheme_and_data: Tuple[Dict[str, Any]]): valid_scheme, valid_data, invalid_data = scheme_and_data collection = evo_db.get_collection(temp_coll_name) result = mongo_handler.update_collection_schema( temp_coll_name, valid_scheme) assert result with pytest.raises(WriteError): collection.insert_one(invalid_data) doc_id = collection.insert_one(valid_data) assert doc_id is not None col_name = "imaginary" result = mongo_handler.update_collection_schema(col_name, valid_scheme) assert not result
async def get_member( database: Database, user_id: str, *, force_refresh: bool = False ) -> typing.Optional[models.DiscordMember]: """ Get a member from the cache, or from the discord API. If `force_refresh` is True, the cache is skipped and the entry is updated. None may be returned if the member object does not exist. """ collection = database.get_collection("discord_members") if force_refresh: await collection.delete_one({"user": user_id}) # `create_index` creates the index if it does not exist, or passes # This handles TTL on member objects await collection.create_index( "inserted_at", expireAfterSeconds=60 * 60, # 1 hour name="inserted_at", ) result = await collection.find_one({"user": user_id}) if result is not None: return models.DiscordMember(**json.loads(result["data"])) member = await _fetch_member_api(user_id) if not member: return None await collection.insert_one({ "user": user_id, "data": member.json(), "inserted_at": datetime.datetime.now(tz=datetime.timezone.utc), }) return member
def read_raw_data(f_path: str, db: database.Database) -> None: with open(f_path) as f: data = json.load(f) db.get_collection(os.path.basename(f_path)).insert_many(data)
def __init__(self, database: Database): self.censored_channels = database.get_collection('censoredChannels') self.censored_channels.create_index('channelId', unique=True) self.censored_channels_cache = {}
def __init__(self, db: Database): self.db = db.get_collection("notifications")
class Mongoop(object): def __init__(self, mongodb_host, mongodb_port, mongodb_credentials=None, mongodb_options=None, frequency=0, op_triggers=None, balancer_triggers=None, threshold_timeout=None, query=None): try: # mongodb self._mongodb_host = mongodb_host self._mongodb_port = mongodb_port self._mongodb_credentials = mongodb_credentials or {} self._mongodb_options = mongodb_options or {} # mongoop triggers self._frequency = frequency or 30 self.op_triggers = op_triggers or {} self.balancer_triggers = balancer_triggers or {} self._threshold_timeout = threshold_timeout or 60 self._query = query or {} # NOTE: retrieve the minimum threshold. if self.op_triggers: self._threshold_timeout = min([v['threshold'] for v in self.op_triggers.values() if 'threshold' in v]) self._base_op_query = { 'secs_running': {'$gte': self._threshold_timeout}, 'op': {'$ne': 'none'} } self._base_op_query.update(self._query) self.conn = MongoClient( host=self._mongodb_host, port=self._mongodb_port, read_preference=ReadPreference.PRIMARY, **self._mongodb_options ) self.db = Database(self.conn, 'admin') if self._mongodb_credentials: # NOTE: avoid a breaking chance since the version 0.5 username = self._mongodb_credentials.get('name') or self._mongodb_credentials.get('username') self.db.authenticate(username, self._mongodb_credentials['password']) # NOTE: add the callable for each trigger self.cycle_op_triggers = [] self.cycle_balancer_triggers = [] for t_name, t_values in self.op_triggers.items(): _callable = self._get_trigger_callable(t_name, t_values) if _callable: self.cycle_op_triggers.append(_callable) for t_name, t_values in self.balancer_triggers.items(): _callable = self._get_trigger_callable(t_name, t_values, category='balancer') if _callable: self.cycle_balancer_triggers.append(_callable) except TypeError as e: logging.error('unable to authenticate to admin database :: {}'.format(e)) exit(1) except OperationFailure as e: logging.error('authentication failure :: {}'.format(e)) except ConnectionFailure as e: logging.error('unable to connect to database :: {}'.format(e)) else: logging.info('start mongoop :: {}'.format(self)) def __str__(self): return u'{} :: frequency={} :: slow_query={} :: op_triggers={} :: balancer_triggers={}'.format( self.conn, self._frequency, self._base_op_query, len(self.cycle_op_triggers), len(self.cycle_balancer_triggers)) def __call__(self): """ Main function. """ while True: start = time() self.call_op_triggers() self.call_balancer_triggers() exec_time = time() - start if exec_time < self._frequency: sleep(self._frequency - exec_time) def call_op_triggers(self): """ Main function to run the op triggers. """ operations = self._current_op() for trigger in self.cycle_op_triggers: trigger.run(operations=operations) def call_balancer_triggers(self): """ Main function to run the balancer triggers. """ if not self.balancer_triggers: return True balancer_state = self._get_balancer_state() for trigger in self.cycle_balancer_triggers: trigger.run(balancer_state=balancer_state) def _get_trigger_callable(self, trigger_name, trigger_params, category='op'): """ Retrieve the corresponding trigger by name and add into the triggers list. Args: """ try: trigger_module = import_module('mongoop.triggers.{}'.format(trigger_params['type'])) trigger_class = getattr(trigger_module, 'MongoopTrigger') trigger = trigger_class(name=trigger_name, params=trigger_params, mongoop=self, category=category) except Exception as e: logging.error('unable to retrieve the trigger callable :: {}'.format(e)) else: return trigger def _current_op(self): """ Get informations on operations currently running. """ try: op_inprog = {} coll = self.db.get_collection("$cmd.sys.inprog") result = coll.find_one(self._base_op_query) op_inprog = result.get('inprog', {}) except Exception as e: logging.error('unable to retrieve op :: {}'.format(e)) else: if op_inprog: logging.info('found {} slow op'.format(len(op_inprog))) logging.debug('found {} slow op'.format(len(op_inprog))) finally: return op_inprog def _get_balancer_state(self): """ Return the balancer state. Returns: bool: True it's running, False otherwhise. """ try: if self.conn.config.settings.find_one({'_id': 'balancer', 'stopped': True}): logging.info('balancer state :: stopped') return False logging.info('balancer state :: started') return True except Exception as e: logging.error('unable to get the balancer state :: {}'.format(e))
def __init__(self, mongo_serialization_factory: MongoSerializationFactory, db: Database): self.mongo_serialization_factory = mongo_serialization_factory self.mongo_serialization = self.mongo_serialization_factory.get_instance() self.db = db self.coll = db.get_collection(UserDaoImpl.COLLECTION_NAME) self.create_index()
def __init__(self, db: Database, name: str, **kwargs): try: self._instance = db.get_collection(name) except PyMongoError as e: self._instance = None PrintException()
def __init__(self, db: Database, collection_name: str): self.db: Database = db self.collection: Collection = db.get_collection(collection_name)
def __init__(self, db: Database): self.favourites = db.get_collection("favourites") self.users = db.get_collection("users") self.projects = db.get_collection("projects")
def collection_factory(collection_name: str, database: MongoDatabase): """Creates a new collection with the given name.""" return database.get_collection(collection_name)
class Mongoop(object): def __init__(self, mongodb_host, mongodb_port, mongodb_credentials=None, mongodb_options=None, frequency=0, op_triggers=None, balancer_triggers=None, threshold_timeout=None, query=None): try: # mongodb self._mongodb_host = mongodb_host self._mongodb_port = mongodb_port self._mongodb_credentials = mongodb_credentials or {} self._mongodb_options = mongodb_options or {} # mongoop triggers self._frequency = frequency or 30 self.op_triggers = op_triggers or {} self.balancer_triggers = balancer_triggers or {} self._threshold_timeout = threshold_timeout or 60 self._query = query or {} # NOTE: retrieve the minimum threshold. if self.op_triggers: self._threshold_timeout = min([ v['threshold'] for v in self.op_triggers.values() if 'threshold' in v ]) self._base_op_query = { 'secs_running': { '$gte': self._threshold_timeout }, 'op': { '$ne': 'none' } } self._base_op_query.update(self._query) self.conn = MongoClient(host=self._mongodb_host, port=self._mongodb_port, read_preference=ReadPreference.PRIMARY, **self._mongodb_options) self.db = Database(self.conn, 'admin') if self._mongodb_credentials: # NOTE: avoid a breaking chance since the version 0.5 username = self._mongodb_credentials.get( 'name') or self._mongodb_credentials.get('username') self.db.authenticate(username, self._mongodb_credentials['password']) # NOTE: add the callable for each trigger self.cycle_op_triggers = [] self.cycle_balancer_triggers = [] for t_name, t_values in self.op_triggers.items(): _callable = self._get_trigger_callable(t_name, t_values) if _callable: self.cycle_op_triggers.append(_callable) for t_name, t_values in self.balancer_triggers.items(): _callable = self._get_trigger_callable(t_name, t_values, category='balancer') if _callable: self.cycle_balancer_triggers.append(_callable) except TypeError as e: logging.error( 'unable to authenticate to admin database :: {}'.format(e)) exit(1) except OperationFailure as e: logging.error('authentication failure :: {}'.format(e)) except ConnectionFailure as e: logging.error('unable to connect to database :: {}'.format(e)) else: logging.info('start mongoop :: {}'.format(self)) def __str__(self): return u'{} :: frequency={} :: slow_query={} :: op_triggers={} :: balancer_triggers={}'.format( self.conn, self._frequency, self._base_op_query, len(self.cycle_op_triggers), len(self.cycle_balancer_triggers)) def __call__(self): """ Main function. """ while True: start = time() self.call_op_triggers() self.call_balancer_triggers() exec_time = time() - start if exec_time < self._frequency: sleep(self._frequency - exec_time) def call_op_triggers(self): """ Main function to run the op triggers. """ operations = self._current_op() for trigger in self.cycle_op_triggers: trigger.run(operations=operations) def call_balancer_triggers(self): """ Main function to run the balancer triggers. """ if not self.balancer_triggers: return True balancer_state = self._get_balancer_state() for trigger in self.cycle_balancer_triggers: trigger.run(balancer_state=balancer_state) def _get_trigger_callable(self, trigger_name, trigger_params, category='op'): """ Retrieve the corresponding trigger by name and add into the triggers list. Args: """ try: trigger_module = import_module('mongoop.triggers.{}'.format( trigger_params['type'])) trigger_class = getattr(trigger_module, 'MongoopTrigger') trigger = trigger_class(name=trigger_name, params=trigger_params, mongoop=self, category=category) except Exception as e: logging.error( 'unable to retrieve the trigger callable :: {}'.format(e)) else: return trigger def _current_op(self): """ Get informations on operations currently running. """ try: op_inprog = {} coll = self.db.get_collection("$cmd.sys.inprog") result = coll.find_one(self._base_op_query) op_inprog = result.get('inprog', {}) except Exception as e: logging.error('unable to retrieve op :: {}'.format(e)) else: if op_inprog: logging.info('found {} slow op'.format(len(op_inprog))) logging.debug('found {} slow op'.format(len(op_inprog))) finally: return op_inprog def _get_balancer_state(self): """ Return the balancer state. Returns: bool: True it's running, False otherwhise. """ try: if self.conn.config.settings.find_one({ '_id': 'balancer', 'stopped': True }): logging.info('balancer state :: stopped') return False logging.info('balancer state :: started') return True except Exception as e: logging.error('unable to get the balancer state :: {}'.format(e))
def __init__(self, database: Database): self.stickers_cache = database.get_collection('stickersCache') self.stickers_cache.create_index('tag', unique=True)
def __init__(self, db: Database): self.users = db.get_collection("users")
def getMongoCol(): uri = "mongodb://%s" % ('localhost:27017') client = MongoClient(uri) db = Database(client, 'test') col = db.get_collection('images') return col
def __init__(self, database: Database): self.permissions = database.get_collection('permissions') self.permissions.create_index('roleId', unique = True) self.permissions_cache = {}