def test_file_multidb(self): register_connection('test_files', 'test_files') class TestFile(Document): name = StringField() the_file = FileField(db_alias="test_files", collection_name="macumba") TestFile.drop_collection() # delete old filesystem get_db("test_files").macumba.files.drop() get_db("test_files").macumba.chunks.drop() # First instance test_file = TestFile() test_file.name = "Hello, World!" test_file.the_file.put(six.b('Hello, World!'), name="hello.txt") test_file.save() data = get_db("test_files").macumba.files.find_one() self.assertEqual(data.get('name'), 'hello.txt') test_file = TestFile.objects.first() self.assertEqual(test_file.the_file.read(), six.b('Hello, World!')) test_file = TestFile.objects.first() test_file.the_file = six.b('HELLO, WORLD!') test_file.save() test_file = TestFile.objects.first() self.assertEqual(test_file.the_file.read(), six.b('HELLO, WORLD!'))
def init_db(): with current_app.app_context(): folder_name = app.config.get('INIT_DATA_FOLDER_NAME') folder_path = ResourceLoader.get().get_resoure(folder_name).path if folder_path and os.path.isdir(folder_path): for data_file in os.listdir(folder_path): with open(folder_path + os.path.sep + data_file, 'r') as mqls: get_db().eval(mqls.read())
def _dump_collections(collection_names=None): if collection_names is None: collection_names = [coll for coll in get_db().collection_names() if coll != 'system.indexes'] for coll in collection_names: subprocess.call([ 'mongoexport', '-d', '%s' % get_db().name, '-c', '%s' % coll, '-o', 'output_%s.json' % coll])
def _load_collections(collection_names=None, drop='--drop'): if collection_names is None: collection_names = [coll for coll in get_db().collection_names() if coll != 'system.indexes'] for coll in collection_names: subprocess.call([ 'mongoimport', '-d', '%s' % get_db().name, '-c', '%s' % coll, '--file', 'output_%s.json' % coll, '%s' % drop])
def default(self, obj): if isinstance(obj, QuerySet): return list(obj) if isinstance(obj, DBRef): doc = get_db().dereference(obj) try: doc.pop('_cls') doc.pop('_types') except: pass return doc if isinstance(obj, (Document, EmbeddedDocument)): doc = obj.to_mongo() try: doc.pop('_cls') doc.pop('_types') except: pass return doc if isinstance(obj, ObjectId): return str(obj) elif isinstance(obj, datetime): return obj.isoformat().replace('T', ' ') elif isinstance(obj, (date, time)): return obj.isoformat() return JSONEncoder.default(self, obj)
def test_connect_uri_with_authsource(self): """Ensure that the connect() method works well with the option `authSource` in URI. This feature was introduced in MongoDB 2.4 and removed in 2.6 """ # Create users c = connect('mongoenginetest') c.admin.system.users.remove({}) c.admin.add_user('username2', 'password') # Authentication fails without "authSource" if IS_PYMONGO_3: test_conn = connect('mongoenginetest', alias='test1', host='mongodb://*****:*****@localhost/mongoenginetest') self.assertRaises(OperationFailure, test_conn.server_info) else: self.assertRaises( ConnectionError, connect, 'mongoenginetest', alias='test1', host='mongodb://*****:*****@localhost/mongoenginetest' ) self.assertRaises(ConnectionError, get_db, 'test1') # Authentication succeeds with "authSource" connect( 'mongoenginetest', alias='test2', host=('mongodb://*****:*****@localhost/' 'mongoenginetest?authSource=admin') ) # This will fail starting from MongoDB 2.6+ db = get_db('test2') self.assertTrue(isinstance(db, pymongo.database.Database)) self.assertEqual(db.name, 'mongoenginetest') # Clear all users c.admin.system.users.remove({})
def test_connect_uri_without_db(self): """Ensure that the connect() method works properly with uri's without database_name """ c = connect(db='mongoenginetest', alias='admin') c.admin.system.users.remove({}) c.mongoenginetest.system.users.remove({}) c.admin.add_user("admin", "password") c.admin.authenticate("admin", "password") c.mongoenginetest.add_user("username", "password") self.assertRaises(ConnectionError, connect, "testdb_uri_bad", host='mongodb://*****:*****@localhost') connect("mongoenginetest", host='mongodb://localhost/') conn = get_connection() self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) db = get_db() self.assertTrue(isinstance(db, pymongo.database.Database)) self.assertEqual(db.name, 'mongoenginetest') c.admin.system.users.remove({}) c.mongoenginetest.system.users.remove({})
def _delete_expired_gridfs_files(cls, expired_date, collection_name): database = get_db() collection = database[collection_name] files = collection.files # chunks = collection.chunks # 只要删除files就可以,chunks会自动处理 files.delete_many({"uploadDate": {'$lt': expired_date}})
def tags(request): """docstring for tags""" error = '' data = None # /api/api/tags/?callback=jsonp1268179474512&match=exe match = request.REQUEST.get('match') callback = request.REQUEST.get('callback') if match: db = get_db() update_keyword_index() results = db.keyword.find( { "_id" : re.compile('^%s*' % match, re.IGNORECASE)} ) #.count() data = [i['_id'] for i in results] # print data else: error = 'no match parameter received' if error: return JsonResponse(error=error) return JsonResponse(data=data, callback=callback)
def __init__(self, ext): """ Constructor. :param ext: instance of :class:`EveMongoengine`. """ # get authentication info username = ext.app.config['MONGO_USERNAME'] password = ext.app.config['MONGO_PASSWORD'] auth = (username, password) if any(auth) and not all(auth): raise ConfigException('Must set both USERNAME and PASSWORD ' 'or neither') # try to connect to db self.conn = connect(ext.app.config['MONGO_DBNAME'], host=ext.app.config['MONGO_HOST'], port=ext.app.config['MONGO_PORT']) self.models = ext.models self.app = ext.app # create dummy driver instead of PyMongo, which causes errors # when instantiating after config was initialized self.driver = type('Driver', (), {})() self.driver.db = get_db() # authenticate if any(auth): self.driver.db.authenticate(username, password) # helper object for managing PATCHes, which are a bit dirty self.updater = MongoengineUpdater(self) # map resource -> Mongoengine class self.cls_map = ResourceClassMap(self)
def test_connect_uri(self): """Ensure that the connect() method works properly with uri's """ c = connect(alias='admin') register_db('mongoenginetest', 'admin', 'admin') c.admin.system.users.remove({}) c.mongoenginetest.system.users.remove({}) c.admin.add_user("admin", "password") c.admin.authenticate("admin", "password") c.mongoenginetest.add_user("username", "password") self.assertRaises( ConnectionError, connect, "testdb_uri_bad", host='mongodb://*****:*****@localhost') # Whilst database names can be specified in the URI, they are ignored # in mongoengine since the DB/connection split connect(host='mongodb://*****:*****@localhost/mongoenginetest') register_db('testdb_uri') conn = get_connection() self.assertTrue(isinstance(conn, pymongo.connection.Connection)) db = get_db() self.assertTrue(isinstance(db, pymongo.database.Database)) self.assertEqual(db.name, 'testdb_uri')
def _sync(cls, key, src): tgt, created = cls.objects.get_or_create(awsid=src[key]) modified = set() for k, v in src.items(): t_v = getattr(tgt, k, None) if t_v != v: setattr(tgt, k, v) modified.add(k) if created: tgt.ctime = datetime.datetime.utcnow() tgt.awsid = src[key] elif modified: print "Modified", src[key], modified data = tgt.to_mongo() data['res_class'] = cls.__name__.lower() del data['_id'] db = connection.get_db() db.versions.insert(data, w=1) if created or modified: tgt.ltime = datetime.datetime.utcnow() tgt.save() if created: return CREATED if modified: return MODIFIED return NOCHANGE
def test_connect_uri_with_authsource(self): """Ensure that the connect() method works well with the option `authSource` in URI. """ # Create users c = connect('mongoenginetest') c.admin.system.users.remove({}) c.admin.add_user('username', 'password') # Authentication fails without "authSource" self.assertRaises( ConnectionError, connect, 'mongoenginetest', alias='test1', host='mongodb://*****:*****@localhost/mongoenginetest' ) self.assertRaises(ConnectionError, get_db, 'test1') # Authentication succeeds with "authSource" connect( 'mongoenginetest', alias='test2', host=('mongodb://*****:*****@localhost/' 'mongoenginetest?authSource=admin') ) db = get_db('test2') self.assertTrue(isinstance(db, pymongo.database.Database)) self.assertEqual(db.name, 'mongoenginetest') # Clear all users c.admin.system.users.remove({})
def test_query_counter(self): connect('mongoenginetest') db = get_db() collection = db.query_counter collection.drop() def issue_1_count_query(): collection.find({}).count() def issue_1_insert_query(): collection.insert_one({'test': 'garbage'}) def issue_1_find_query(): collection.find_one() counter = 0 with query_counter() as q: self.assertEqual(q, counter) self.assertEqual(q, counter) # Ensures previous count query did not get counted for _ in range(10): issue_1_insert_query() counter += 1 self.assertEqual(q, counter) for _ in range(4): issue_1_find_query() counter += 1 self.assertEqual(q, counter) for _ in range(3): issue_1_count_query() counter += 1 self.assertEqual(q, counter)
def test_connect_uri(self): """Ensure that the connect() method works properly with URIs.""" c = connect(db='mongoenginetest', alias='admin') c.admin.system.users.remove({}) c.mongoenginetest.system.users.remove({}) c.admin.add_user("admin", "password") c.admin.authenticate("admin", "password") c.mongoenginetest.add_user("username", "password") if not IS_PYMONGO_3: self.assertRaises( MongoEngineConnectionError, connect, 'testdb_uri_bad', host='mongodb://*****:*****@localhost' ) connect("testdb_uri", host='mongodb://*****:*****@localhost/mongoenginetest') conn = get_connection() self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) db = get_db() self.assertTrue(isinstance(db, pymongo.database.Database)) self.assertEqual(db.name, 'mongoenginetest') c.admin.system.users.remove({}) c.mongoenginetest.system.users.remove({})
def handle_noargs(self, **options): database = options.get('database') verbosity = int(options.get('verbosity')) interactive = options.get('interactive') if not database: raise base.CommandError("No MongoDB database specified.") db = connection.get_db(database) if interactive: confirm = raw_input("""You have requested a flush of the database. This will IRREVERSIBLY DESTROY all data currently in the '%s' database (alias '%s'). Are you sure you want to do this? Type 'yes' to continue, or 'no' to cancel: """ % (db.name, database)) else: confirm = 'yes' if confirm == 'yes': try: for collection in db.collection_names(): if collection == 'system.indexes': continue db.drop_collection(collection) except Exception, e: raise base.CommandError("""Database '%s' couldn't be flushed. The full error: %s""" % (database, e)) if verbosity > 1: self.stdout.write("Database '%s' flushed.\n" % database)
def fieldCount(self, key): collection_name = self._document._meta.get('collection') collection = get_db()[collection_name] if isinstance(key, str): keys = [key] else: keys = key init = { 'count' : 0 } red = ''' function(doc, prev) { prev.count += 1; } ''' fin = ''' function(doc) { doc.%s__count = doc.count; } ''' % keys[0] return collection.group(keys, self._query, init, red, fin)
def fieldAverage(self, key, field): collection_name = self._document._meta.get('collection') collection = get_db()[collection_name] init = { 'total' : 0, 'count' : 0 } red = ''' function(doc, prev) { prev.total += doc.%s; prev.count += 1; } ''' % field fin = ''' function(doc) { doc.%s__average = (1.0 * doc.total) / (1.0 * doc.count); } ''' % field if isinstance(key, str): keys = [key] else: keys = key return collection.group(keys, self._query, init, red, fin)
def setUp(self): # データベースに接続 addr = '127.0.0.1' port = 27017 connect('test', host=addr, port=port) self.conn = get_connection() self.db = get_db()
def test_connect_uri_without_username_password(self): """Ensure that the connect() method works properly with a uri, when the username/password is specified outside the uri """ c = connect(db='mongoenginetest', alias='admin') c.admin.system.users.remove({}) c.mongoenginetest.system.users.remove({}) c.admin.add_user("admin", "password") c.admin.authenticate("admin", "password") c.mongoenginetest.add_user("username", "password") conn = connect(alias='test_uri_no_username', host='mongodb://localhost/mongoenginetest', username="******", password="******") self.assertTrue(isinstance(conn, pymongo.mongo_client.MongoClient)) # Since the mongodb instance used for testing doesn't require # authentication (and turning that on breaks some 85 tests), and there # doesn't appear to be any way to check to see if a connection has # authenticated, I instead expose some internals of mongoengine to # make sure the correct settings have been saved. # Without this, instead of the test failing everything would appear to # work fine, but there would be no username/password on the # connection. self.assertEqual(me_connection._connection_settings['test_uri_no_username']['username'], 'username') self.assertEqual(me_connection._connection_settings['test_uri_no_username']['password'], 'password') db = get_db(alias='test_uri_no_username') self.assertTrue(isinstance(db, pymongo.database.Database)) self.assertEqual(db.name, 'mongoenginetest') c.admin.system.users.remove({}) c.mongoenginetest.system.users.remove({})
def __init__(self, ext): """ Constructor. :param ext: instance of :class:`EveMongoengine`. """ # get authentication info username = ext.app.config.get('MONGO_USERNAME') password = ext.app.config.get('MONGO_PASSWORD') auth = (username, password) if any(auth) and not all(auth): raise ConfigException('Must set both USERNAME and PASSWORD ' 'or neither') # try to connect to db self.conn = connect(ext.app.config['MONGO_DBNAME'], host=ext.app.config['MONGO_HOST'], port=ext.app.config['MONGO_PORT']) self.models = ext.models self.app = ext.app # create dummy driver instead of PyMongo, which causes errors # when instantiating after config was initialized self.driver = type('Driver', (), {})() self.driver.db = get_db() # authenticate if any(auth): self.driver.db.authenticate(username, password)
def _delete_gridfs_files(cls, file_id_list, collection_name): database = get_db() collection = database[collection_name] files = collection.files chunks = collection.chunks files.delete_many({"_id": {'$in': file_id_list}}) chunks.delete_many({"files_id": {'$in': file_id_list}})
def create_text_indexes(request,service=None): url = '/create_indexes/' db = get_db() print 'INDEX' print service if service=='facebook' or service == None: print 'ensure facebook index' db.facebook_data.ensure_index([("$**","text")],name="FacebookTextIndex") if service=='twitter' or service == None: print 'ensure twitter index' db.twitter_data.ensure_index([("$**","text")],name="TwitterTextIndex") if service=='foursquare' or service == None: print 'ensure foursquare index' db.foursquare_data.ensure_index([("$**","text")],name="FoursquareTextIndex") if service=='dropbox' or service == None: print 'ensure dropbox index' db.dropbox_data.ensure_index([("$**","text")],name="DropboxTextIndex") if service=='linkedin' or service == None: print 'ensure linkedin index' db.linked_in_data.ensure_index([("$**","text")],name="LinkedInTextIndex") if service=='googledrive' or service == None: print 'ensure googledrive index' db.gdrive_data.ensure_index([("$**","text")],name="GDriveTextIndex") if service=='gcal' or service == None: print 'ensure gcal index' db.gcal_data.ensure_index([("$**","text")],name="GCalendarTextIndex") if service=='googlelattitde' or service == None: print 'ensure latitude index' db.g_latitude_data.ensure_index([("$**","text")],name="GoogleLatitudeTextIndex") return HttpResponseRedirect(url)
def write_file(self, collection_name, data, content_type='', hash_id=False): if data is None or len(data) <= 0: return None db = get_db() fs = gridfs.GridFS(db, collection_name) # redis 中的数据是先编码成 base64, 这里要重新转换回来 data = b64decode(data) content = BytesIO(utf8(data)) if not hash_id: _id = fs.put(content, content_type=content_type) logger.debug(_id) else: md5 = hashlib.md5(content.getvalue()).hexdigest() # TODO 并发情况下, 这里会出问题, 导致可能有相同md5的数据 grid_out = fs.find_one({'md5': md5}) if not grid_out: _id = fs.put(content, content_type=content_type) else: _id = grid_out._id # 直接让引用计数的 _id 等于 file 的 _id logger.debug(_id) logger.debug(collection_name) db['ref_%s' % collection_name].update({'_id': _id}, {'$inc': {'count': 1}}, upsert=True) return _id
def test_delete_version_with_mod_file(self): """ Ensure delete version actually deletes the mod_file. """ from gridfs import GridFS from mongoengine.connection import get_db # Create a dummy mod mod = create_mod(self.contributor).save() # Create a modversion self.makeOne(matchrequest( params=MultiDict(mock_version_data(mod_file=self.mock_file)), id=mod.id)).addversion() # Reload the mod mod.reload() # Get the new modversion v = mod.versions[0] # Create a variable for the mod_file mf_id = v.mod_file.grid_id # Create request to delete the new modversion request = matchrequest(id=v.id) # Run self.makeOne(request).deleteversion() # Reload the mod again mod.reload() # Check if the version's gone self.assertFalse(mod.versions) # Check if the mod file has been deleted self.assertFalse(GridFS(get_db(), collection='modfs').exists(mf_id))
def test_connect_uri(self): """Ensure that the connect() method works properly with URIs.""" c = connect(db='mongoenginetest', alias='admin') c.admin.system.users.delete_many({}) c.mongoenginetest.system.users.delete_many({}) c.admin.command("createUser", "admin", pwd="password", roles=["root"]) c.admin.authenticate("admin", "password") c.admin.command("createUser", "username", pwd="password", roles=["dbOwner"]) if not IS_PYMONGO_3: self.assertRaises( MongoEngineConnectionError, connect, 'testdb_uri_bad', host='mongodb://*****:*****@localhost' ) connect("testdb_uri", host='mongodb://*****:*****@localhost/mongoenginetest') conn = get_connection() self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) db = get_db() self.assertIsInstance(db, pymongo.database.Database) self.assertEqual(db.name, 'mongoenginetest') c.admin.system.users.delete_many({}) c.mongoenginetest.system.users.delete_many({})
def _get_unique_filename(name, db_alias=DEFAULT_CONNECTION_NAME, collection_name='fs'): fs = GridFS(get_db(db_alias), collection_name) file_root, file_ext = os.path.splitext(name) count = itertools.count(1) while fs.exists(filename=name): # file_ext includes the dot. name = os.path.join("%s_%s%s" % (file_root, next(count), file_ext)) return name
def setup_mongoengine(cls): try: cls._connection = connect(db=TESTDB_NAME) cls._connection.drop_database(TESTDB_NAME) cls.db = get_db() except: cls.db = None cls._connection = None
def _get_fs(request): #settings = request.registry.settings #db = settings['db_conn'][db_url.path[1:]] #if db_url.username and db_url.password: #db.authenticate(db_url.username, db_url.password) #return db db = get_db() return GridFS(db, collection='images')
def setup_databases(self, **kwangs): db_alias = 'test' db_name = get_db(db_alias).name self.conn = get_connection(db_alias) print("Creating mongo '{0}' database for alias '{1}'...".format(db_name, db_alias)) return db_alias
def _fetch_objects(self, doc_type=None): """Fetch all references and convert to their document objects """ object_map = {} for collection, dbrefs in self.reference_map.iteritems(): if hasattr(collection, 'objects'): # We have a document class for the refs col_name = collection._get_collection_name() refs = [ dbref for dbref in dbrefs if (col_name, dbref) not in object_map ] references = collection.objects.in_bulk(refs) for key, doc in references.iteritems(): object_map[(col_name, key)] = doc else: # Generic reference: use the refs data to convert to document if isinstance(doc_type, ( ListField, DictField, MapField, )): continue refs = [ dbref for dbref in dbrefs if (collection, dbref) not in object_map ] if doc_type: references = doc_type._get_db()[collection].find( {'_id': { '$in': refs }}) for ref in references: doc = doc_type._from_son(ref) object_map[(collection, doc.id)] = doc else: references = get_db()[collection].find( {'_id': { '$in': refs }}) for ref in references: if '_cls' in ref: doc = get_document(ref['_cls'])._from_son(ref) elif doc_type is None: doc = get_document(''.join( x.capitalize() for x in collection.split('_')))._from_son(ref) else: doc = doc_type._from_son(ref) object_map[(collection, doc.id)] = doc return object_map
def seed_user_db(): """Seed the database with some basic users.""" db = get_db('default') print('Seeding users to <{}> database:'.format(db.name), file=sys.stderr) user_data_path = Path(BASE_DIR) / 'seed_user_data.json' alloy_data_path = Path(BASE_DIR) / 'seed_alloy_data.json' if os.path.isfile(user_data_path): with open(user_data_path) as f: user_data = json.load(f) if os.path.isfile(alloy_data_path): with open(alloy_data_path) as f: alloy_data = json.load(f) tbl = PrettyTable(['No.', 'Email', 'Name', 'Admin', 'Alloys']) for i, u in enumerate(user_data): new_user = User(email=u['email'], first_name=u['first_name'], last_name=u['last_name']) new_user.set_password(u['password']) if u['first_name'] == 'Tony' or u['first_name'] == 'Natasha': for alloy in alloy_data['alloys']: new_user.saved_alloys.create(**alloy) if u.get("profile", None): profile = UserProfile( **{ 'aim': u['profile']['aim'], 'highest_education': u['profile']['highest_education'], 'sci_tech_exp': u['profile']['sci_tech_exp'], 'phase_transform_exp': u['profile']['phase_transform_exp'], }) new_user.profile = profile if u.get('is_admin', False): profile = AdminProfile( position=u['admin_profile']['position'], mobile_number=u['admin_profile']['mobile_number']) profile.verified = True new_user.disable_admin = not u.get('is_admin', False) new_user.admin_profile = profile new_user.verified = True new_user.save() tbl.add_row( (str(i + 1), u['email'], '{} {}'.format(u['first_name'], u['last_name']), new_user.is_admin, new_user.saved_alloys.count())) tbl.align['Name'] = 'l' tbl.align['Email'] = 'l' print(tbl)
def rehash(timestamp): """ A script to recalculate all observable data hashes according to CERT requirements (can safely be run multiple times) """ PAGE_SIZE = 5000 cert_builder.apply_customizations() db = get_db() base_query = { 'type': 'obs', 'data.summary.type': { '$ne': 'ObservableComposition' } } if timestamp: base_query.update({'created_on': {'$gte': timestamp}}) cursor = db.stix.find(base_query, {'_id': 1}) bulk = db.stix.initialize_unordered_bulk_op() update_count = 0 def bulk_execute(bulk): try: bulk.execute() except Exception: pass return db.stix.initialize_unordered_bulk_op() for row in cursor: update_count += 1 stix_id = row['_id'] eo = EdgeObject.load(stix_id) ao = eo.to_ApiObject() new_hash = ao.localhash() bulk.find({ '_id': stix_id, 'data.hash': { '$ne': new_hash } }).update({'$set': { 'data.hash': new_hash }}) if not update_count % PAGE_SIZE: bulk = bulk_execute(bulk) if update_count % PAGE_SIZE: bulk_execute(bulk)
def _set_site(site_id): object_site_id = ObjectId(site_id) db = get_db() db.peer_sites.update({'_id': object_site_id}, {'$set': { 'is_publish_site': True }}) db.peer_sites.update({'_id': { '$ne': object_site_id }}, {'$set': { 'is_publish_site': False }}, multi=True)
def __init__(self): """Construct the query_counter """ self.db = get_db() self.initial_profiling_level = None self._ctx_query_counter = 0 # number of queries issued by the context self._ignored_query = { 'ns': {'$ne': '%s.system.indexes' % self.db.name}, 'op': {'$ne': 'killcursors'} }
def find_duplicates(edge_object): duplicates = [ doc['_id'] for doc in get_db().stix.find( { 'data.hash': edge_object.doc['data']['hash'], 'type': edge_object.ty, '_id': { '$ne': edge_object.id_ } }, {'_id': 1}) ] return duplicates
def migrate(): connect( yeti_config.mongodb.database, host=yeti_config.mongodb.host, port=yeti_config.mongodb.port, username=yeti_config.mongodb.username, password=yeti_config.mongodb.password, connect=True, ) db = get_db() for i in list(db.observable.list_indexes()): if i.to_dict()["name"] == "value_1": db.observable.drop_index("value_1")
def test_connect_uri_with_replicaset(self): """Ensure connect() works when specifying a replicaSet.""" if IS_PYMONGO_3: c = connect(host='mongodb://localhost/test?replicaSet=local-rs') db = get_db() self.assertTrue(isinstance(db, pymongo.database.Database)) self.assertEqual(db.name, 'test') else: # PyMongo < v3.x raises an exception: # "localhost:27017 is not a member of replica set local-rs" with self.assertRaises(MongoEngineConnectionError): c = connect( host='mongodb://localhost/test?replicaSet=local-rs')
def setUp(self): connect(db="mongoenginetest") self.db = get_db() class Person(Document): name = StringField() age = IntField() non_field = True meta = {"allow_inheritance": True} self.Person = Person
def remap_backlinks_for_original(original, duplicates): parents_of_original, parents_of_duplicate = STIXDedup.calculate_backlinks( original, duplicates) parents_of_original.update(parents_of_duplicate) for dup in duplicates: # Strip out references to duplicates in updated backlinks if dup in parents_of_original: del parents_of_original[dup] try: get_db().stix_backlinks.update( {'_id': original}, {'$set': { 'value': parents_of_original }}, upsert=True) except PyMongoError as pme: log_error(pme, 'adapters/dedup/dedup', 'Updating backlinks failed') if parents_of_duplicate: try: get_db().stix_backlinks.remove({'_id': {'$in': duplicates}}) except PyMongoError as pme: log_error(pme, 'adapters/dedup/dedup', 'Removing parent backlinks failed')
def test_register_connection(self): """Ensure that connections with different aliases may be registered. """ register_connection("testdb", "mongoenginetest2") with pytest.raises(ConnectionFailure): get_connection() conn = get_connection("testdb") assert isinstance(conn, pymongo.mongo_client.MongoClient) db = get_db("testdb") assert isinstance(db, pymongo.database.Database) assert db.name == "mongoenginetest2"
def migrate(db): log.info('Processing OAuth2Client objects.') db = get_db() oauth_clients = db.oauth2_client oauth_clients.update_many({}, {'$rename': {'scopes': 'scope'}}) for client in oauth_clients.find(): if type(client['scope']) == list: scope_str = ' '.join(client['scope']) client['scope'] = scope_str oauth_clients.save(client) log.info('Completed.')
def test_register_connection(self): """Ensure that connections with different aliases may be registered. """ register_connection('testdb') register_db('mongoenginetest2', 'testdb', 'testdb') self.assertRaises(ConnectionError, get_connection) conn = get_connection('testdb') self.assertTrue(isinstance(conn, pymongo.connection.Connection)) db = get_db('testdb') self.assertTrue(isinstance(db, pymongo.database.Database)) self.assertEqual(db.name, 'mongoenginetest2')
def ensure_owner_collection_migration(): """We ran into an issue with 3.0.5 where Requests and Jobs got migrated over to the Owner collection. This is in place to resolve that.""" database = get_db() if database["owner"].count(): logger.warning( "Found owner collection, migrating documents to appropriate collections. " "This could take a while :)") for doc in database["owner"].find({"_cls": "Owner.Request"}): try: del doc["_cls"] if doc.get("has_parent"): doc["parent"] = DBRef("request", doc["parent"].id) database["request"].insert_one(doc) except Exception: logger.error(f"Error migrating request {doc['_id']}") for doc in database["owner"].find({"_cls": "Owner.Job"}): try: del doc["_cls"] database["job"].insert_one(doc) except Exception: logger.error(f"Error migrating job {doc['_id']}") for doc in database["file"].find(): try: if doc["owner_type"] == "REQUEST": doc["request"] = doc["owner"] elif doc["owner_type"] == "JOB": doc["job"] = doc["owner"] else: logger.error( f"Unable to migrate file {doc['_id']}: bad owner type") database["file"].delete_one({"_id": doc["_id"]}) continue doc["owner"] = None database["file"].replace_one({"_id": doc["_id"]}, doc) except Exception: logger.error(f"Error migrating file {doc['_id']}, removing") database["file"].delete_one({"_id": doc["_id"]}) logger.info("Dropping owner collection (this is intended!)") database.drop_collection("owner")
def segments_with_both_kinds(self, kind_a, kind_b): if kind_a != kind_b: return list( TextSegment.objects(entities__kind=kind_a)( entities__kind=kind_b)) else: # Need a different query here, we need to check that the type # appears twice db = get_db() pipeline = [ { '$match': { "entities.kind": kind_a } }, { '$unwind': "$entities" }, { '$group': { '_id': { '_id': "$_id", 'k': "$entities.kind" }, 'count': { '$sum': 1 } } }, { '$match': { '_id.k': kind_a, 'count': { '$gte': 2 } } }, { '$project': { '_id': 0, 'id': "$_id._id" } }, ] objects = db.text_segment.aggregate(pipeline) segments = list( TextSegment.objects.in_bulk( [c['id'] for c in objects[u'result']]).values()) return segments
def test_long_field_is_considered_as_int64(self): """ Tests that long fields are stored as long in mongo, even if long value is small enough to be an int. """ class TestLongFieldConsideredAsInt64(Document): some_long = LongField() doc = TestLongFieldConsideredAsInt64(some_long=42).save() db = get_db() assert isinstance( db.test_long_field_considered_as_int64.find()[0]["some_long"], Int64) assert isinstance(doc.some_long, six.integer_types)
def test_connect(self): """Ensure that the connect() method works properly.""" connect('mongoenginetest') conn = get_connection() self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) db = get_db() self.assertIsInstance(db, pymongo.database.Database) self.assertEqual(db.name, 'mongoenginetest') connect('mongoenginetest2', alias='testdb') conn = get_connection('testdb') self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
def _get_orphaned_external_observable_compositions(as_at_timestamp): composition_ids = get_db().stix.find( { 'data.summary.type': 'ObservableComposition', 'created_on': { '$lt': as_at_timestamp } }, {'_id': 1}) composition_ids = [doc['_id'] for doc in composition_ids] orphaned_ids = [] for page_index in range(0, len(composition_ids), STIXPurge.PAGE_SIZE): chunk_ids = composition_ids[page_index:page_index + STIXPurge.PAGE_SIZE] with_back_links_chunk = get_db().stix_backlinks.find( {'_id': { '$in': chunk_ids }}, {'_id': 1}) orphaned_ids += list( set(chunk_ids) - set([doc['_id'] for doc in with_back_links_chunk])) return orphaned_ids
def test_query_counter_counts_getmore_queries(self): connect('mongoenginetest') db = get_db() collection = db.query_counter collection.drop() many_docs = [{'test': 'garbage %s' % i} for i in range(150)] collection.insert_many(many_docs) # first batch of documents contains 101 documents with query_counter() as q: self.assertEqual(q, 0) list(collection.find()) self.assertEqual(q, 2) # 1st select + 1 getmore
def get_application_guides_files(filename=None, belongsto=None): if request.method == 'POST': query = request.get_json() if 'filename' in query: filename = query['filename'] belongsto = query['belongsTo'] if filename is not None: try: if belongsto is not None: appguide = ApplicationGuides.objects.get(filename=filename, belongsTo=belongsto) else: appguide = ApplicationGuides.objects.get(filename=filename) except Exception as e: error = { "message": str(e), "userMessage": "filename - '{}' or belongsTo - '{}' criteria doesn't exists". format(filename, belongsto) } return error, 404 if appguide: dbConn = get_db() FS = GridFS(dbConn) try: file = FS.get(appguide._id) response = make_response(file.read()) response.mimetype = file.content_type return response except NoFile: return { "message": "file can't find in the database. please check the name" }, 404 except Exception as e: return {"message": str(e)}, 500 else: return { "Error": "filename - {} doesn't exists.".format(filename) }, 403 return {"Error": "filename and belongsTo are required"}, 403
def setUp(self): self.db_name = 'mongoenginetest_IndexesTest_' + str(randint(0, self._MAX_RAND)) self.connection = connect(db=self.db_name) self.db = get_db() class Person(Document): name = StringField() age = IntField() non_field = True meta = {"allow_inheritance": True} self.Person = Person
def test_connect(self): """Ensure that the connect() method works properly.""" connect("mongoenginetest") conn = get_connection() assert isinstance(conn, pymongo.mongo_client.MongoClient) db = get_db() assert isinstance(db, pymongo.database.Database) assert db.name == "mongoenginetest" connect("mongoenginetest2", alias="testdb") conn = get_connection("testdb") assert isinstance(conn, pymongo.mongo_client.MongoClient)
def setUp(self): self.connection = connect(db='mongoenginetest') self.db = get_db() self.mongodb_version = get_mongodb_version() class Person(Document): name = StringField() age = IntField() non_field = True meta = {"allow_inheritance": True} self.Person = Person
def _clear_db(self, db_name: str): """Dangerous, make sure you are deleting the right DB""" # make sure it's the right DB if get_db().name == db_name: logging.info('Clearing database: {}'.format(db_name)) Result.drop_collection() Molecule.drop_collection() Options.drop_collection() Collection.drop_collection() TaskQueue.drop_collection() Procedure.drop_collection() User.drop_collection() self.client.drop_database(db_name)
def test_connect_with_db_name_external(self): """Ensure that connect() works if db name is $external""" """Ensure that the connect() method works properly.""" connect("$external") conn = get_connection() assert isinstance(conn, pymongo.mongo_client.MongoClient) db = get_db() assert isinstance(db, pymongo.database.Database) assert db.name == "$external" connect("$external", alias="testdb") conn = get_connection("testdb") assert isinstance(conn, pymongo.mongo_client.MongoClient)
def test_connect(self): """Ensure that the connect() method works properly. """ connect('mongoenginetest') conn = get_connection() self.assertTrue(isinstance(conn, pymongo.connection.Connection)) db = get_db() self.assertTrue(isinstance(db, pymongo.database.Database)) self.assertEqual(db.name, 'mongoenginetest') connect('mongoenginetest2', alias='testdb') conn = get_connection('testdb') self.assertTrue(isinstance(conn, pymongo.connection.Connection))
def test_query_counter_counts_getmore_queries(self): connect("mongoenginetest") db = get_db() collection = db.query_counter collection.drop() many_docs = [{"test": "garbage %s" % i} for i in range(150)] collection.insert_many( many_docs) # first batch of documents contains 101 documents with query_counter() as q: assert q == 0 list(collection.find()) assert q == 2 # 1st select + 1 getmore
def test_connect_with_replicaset_via_kwargs(self): """Ensure connect() works when specifying a replicaSet via the connection kwargs """ if IS_PYMONGO_3: c = connect(replicaset='local-rs') self.assertEqual(c._MongoClient__options.replica_set_name, 'local-rs') db = get_db() self.assertIsInstance(db, pymongo.database.Database) self.assertEqual(db.name, 'test') else: # PyMongo < v3.x raises an exception: # "localhost:27017 is not a member of replica set local-rs" with self.assertRaises(MongoEngineConnectionError): c = connect(replicaset='local-rs')
def test_connect_with_db_name_external(self): """Ensure that connect() works if db name is $external """ """Ensure that the connect() method works properly.""" connect('$external') conn = get_connection() self.assertIsInstance(conn, pymongo.mongo_client.MongoClient) db = get_db() self.assertIsInstance(db, pymongo.database.Database) self.assertEqual(db.name, '$external') connect('$external', alias='testdb') conn = get_connection('testdb') self.assertIsInstance(conn, pymongo.mongo_client.MongoClient)
def __init__(self, alias=DEFAULT_CONNECTION_NAME): self.db = get_db(alias=alias) self.initial_profiling_level = None self._ctx_query_counter = 0 # number of queries issued by the context self._ignored_query = { "ns": { "$ne": "%s.system.indexes" % self.db.name }, "op": { "$ne": "killcursors" }, # MONGODB < 3.2 "command.killCursors": { "$exists": False }, # MONGODB >= 3.2 }