def loadTestDbDataFromText(text): db = getattr(MongoDBConfig.getInstance().connection, MongoDBConfig.getInstance().database_name) dbDict = json.loads(text, object_hook=json_util.object_hook) for (tableName, tableContents) in dbDict.items(): if tableContents: batchSize = 20 curr = 0 while curr < len(tableContents): nextCurr = curr + batchSize currBatch = tableContents[curr:nextCurr] getattr(db, tableName).insert(currBatch) curr = nextCurr
def decoratorFn(userFunction): funcName = userFunction.func_name if className is not None: funcName = '%s.%s' % (className, funcName) # At the very beginning we clear out any prior cached results for this function so that we're starting the test # from a clean state. MongoDBConfig.getInstance().connection.stamped.cache.remove({'func_name':funcName}) @functools.wraps(userFunction) def wrappedFn(*args, **kwargs): usageCounters.count(funcName) return userFunction(*args, **kwargs) return wrappedFn
def issueQueries(queries): # TODO: Pull this out to a named constant somewhere, or be more intelligent about this, or something. config = { 'mongodb' : { 'hosts': [get_db_config('peach.db3')] } } devDbConfig = MongoDBConfig() devDbConfig.config = AttributeDict(config) devDb = devDbConfig.connection.stamped # TODO: This whole "stamped_fixtures" thing really needs to be a constant. localDb = MongoDBConfig.getInstance().connection.stamped_fixtures for (collectionName, query) in queries: devCollection = getattr(devDb, collectionName) results = list(devCollection.find(query)) localCollection = getattr(localDb, collectionName) localCollection.insert(results)
def wrappedFn(*args, **kwargs): global cacheTableError if cacheTableError is not None: # We haven't been able to connect to the cache. MongoDB may not be running. Just issue the call. return userFunction(*args, **kwargs) now = datetime.datetime.now() fullArgs = args if memberFn == True: self = args[0] fnName = '%s.%s' % (self.__class__.__name__, userFnName) args = args[1:] else: fnName = userFnName assertCallIsSerializable(args, kwargs) callHash = hashFunctionCall(fnName, args, kwargs) force_recalculate = kwargs.pop('force_recalculate', False) try: connection = MongoDBConfig.getInstance().connection dbname = MongoDBConfig.getInstance().database_name table = getattr(getattr(connection, dbname), cacheTableName) result = table.find_one({'_id':callHash}) except AutoReconnect as exc: cacheTableError = exc logs.warning("Couldn't connect to Mongo cache table; disabling Mongo cache.") return userFunction(*fullArgs, **kwargs) if result and result['expiration'] is None and not disableStaleness: raise ValueError('We should never be using non-expiring cache entries outside of test fixtures!') if result and result['expiration'] is not None and disableStaleness: raise ValueError('We should never be using expiring cache entries inside of test fixtures!') if result and (disableStaleness or (result['expiration'] > now)) and not force_recalculate: # We hit the cache and the result isn't stale! Woo! return deserializeValue(result['value'], schemaClassesMap) elif exceptionOnCacheMiss: raise CacheMissException(fnName) expiration = None if disableStaleness else now + maxStaleness result = userFunction(*fullArgs, **kwargs) cacheEntry = {'_id':callHash, 'func_name': fnName, 'value': serializeValue(result, schemaClassesMap), 'expiration':expiration} table.update({'_id':callHash}, cacheEntry, upsert=True) return result
def main(): import argparse parser = argparse.ArgumentParser() parser.add_argument('-d', '--drop', action="store_true", default=False, help="drop existing collections before importing") parser.add_argument("-s", "--source", default=None, type=str, help="db to import from") parser.add_argument("-t", "--target", default=None, type=str, help="db to import to") args = parser.parse_args() host, port = utils.get_db_config(args.source) utils.log("SOURCE: %s:%s" % (host, port)) old_host = host old_connection = pymongo.Connection(host, port) old_database = old_connection['stamped'] collections = old_database.collection_names() new_host = args.target if new_host is None: dest = MongoDBConfig.getInstance() new_host = dest.host utils.log("DEST: %s:%s" % (new_host, port)) if not os.path.isdir('/stamped/tmp/stamped/'): os.makedirs('/stamped/tmp/stamped') ignore = set([ 'tempentities', 'logs', 'logstats', ]) for collection in collections: print 'RUN %s' % collection if collection in ignore: print 'PASS' else: ret = mongoExportImport(collection, old_host, new_host) if 0 == ret: print 'COMPLETE' else: print "ERROR restoring collection '%s'" % collection print try: utils.runMongoCommand('db.runCommand( {createCollection:"logs", capped:true, size:500000} )') except: utils.printException()
def init_db_config(config_desc): """ initializes MongoDB with proper host configuration """ host, port = get_db_config(config_desc) config = { 'mongodb' : { 'hosts' : [(host, port)], } } # TODO: there is a Python oddity that needs some investigation, where, depending on # where and when the MongoDBConfig Singleton is imported, it'll register as the same # instance that AMongoCollection knows about or not. For now, as a workaround, just # import it multiple ways and initialize the config with both possible import paths. from api.db.mongodb.AMongoCollection import MongoDBConfig cfg = MongoDBConfig.getInstance() cfg.config = AttributeDict(config) from api.db.mongodb.AMongoCollection import MongoDBConfig as MongoDBConfig2 cfg2 = MongoDBConfig2.getInstance() cfg2.config = AttributeDict(config)
def init_db_config(conf): """ initializes MongoDB with proper host configuration """ if ':' in conf: host, port = conf.split(':') port = int(port) else: host, port = (conf, 27017) if '.' in conf and not conf.endswith('.com'): # attempt to resolve the (possible) semantic EC2 instance name to # a valid DNS name or associated IP address instance = getInstance(conf) if instance: if is_ec2(): host = instance.private_dns_name else: host = instance.public_dns_name config = { 'mongodb' : { 'host' : host, 'port' : port, } } # TODO: there is a Python oddity that needs some investigation, where, depending on # where and when the MongoDBConfig Singleton is imported, it'll register as the same # instance that AMongoCollection knows about or not. For now, as a workaround, just # import it multiple ways and initialize the config with both possible import paths. from api.db.mongodb.AMongoCollection import MongoDBConfig cfg = MongoDBConfig.getInstance() cfg.config = AttributeDict(config) from db.mongodb.AMongoCollection import MongoDBConfig as MongoDBConfig2 cfg2 = MongoDBConfig2.getInstance() cfg2.config = AttributeDict(config) return config
def runMongoCommand(mongo_cmd, db='stamped', verbose=False): from api.db.mongodb.AMongoCollection import MongoDBConfig cmd_template = "mongo --quiet %s:%s/%s --eval 'printjson(%s);'" cfg = MongoDBConfig.getInstance() cmd = cmd_template % (cfg.host, cfg.port, db, mongo_cmd) if verbose: log(cmd) path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '.temp.sh') f=open(path, 'w') f.write(cmd) f.close() os.system('chmod +x %s' % path) cmd = '/bin/bash -c %s' % path ret = shell(cmd) try: return json.loads(ret[0]) except ValueError: return ret[0]
if result and (disableStaleness or (result['expiration'] > now)) and not force_recalculate: # We hit the cache and the result isn't stale! Woo! return deserializeValue(result['value'], schemaClassesMap) elif exceptionOnCacheMiss: raise CacheMissException(fnName) expiration = None if disableStaleness else now + maxStaleness result = userFunction(*fullArgs, **kwargs) cacheEntry = {'_id':callHash, 'func_name': fnName, 'value': serializeValue(result, schemaClassesMap), 'expiration':expiration} table.update({'_id':callHash}, cacheEntry, upsert=True) return result return wrappedFn return decoratingFn import sys if __name__ == '__main__': connection = MongoDBConfig.getInstance().connection table = getattr(connection.stamped, cacheTableName) if len(sys.argv) > 1 and sys.argv[1] == 'purge': table.drop() else: table.remove({'expiration':{'$lt':datetime.datetime.now()}})
def runTest(self, *args, **kwargs): useDbFixture = __fixture_test_flags.use_db_fixture useCacheFixture = __fixture_test_flags.use_cache_fixture writeFixtureFiles = __fixture_test_flags.write_fixture_files dbFixtureFilename = defaultFixtureFilename(self, testFn, 'dbfixture') cacheFixtureFilename = defaultFixtureFilename(self, testFn, 'cachefixture') if not useLocalDb: MongoDBConfig.getInstance().database_name = 'stamped_fixtures' from api.db.mongodb.AMongoCollection import MongoDBConfig as MongoDBConfig2 MongoDBConfig2.getInstance().database_name = 'stamped_fixtures' MongoCache.disableStaleness = True MongoCache.cacheTableName = 'cache_fixture' db = getattr(MongoDBConfig.getInstance().connection, MongoDBConfig.getInstance().database_name) dbDict = {} if not useLocalDb: # Some functions may want a fixture literally so simple that they can specify it as inline text -- it doesn't come # from the database, it doesn't need to be updated, it's just something quick and hand-written. In that case, even # when we're doing runs of tests that regenerate the fixtures of the test suite, for these tests we still need to # load fixtures as normal. useDbFixture = useDbFixture or (generateLocalDbFn is None and generateLocalDbQueries is None) # Clear out the whole test DB before running the test. [getattr(db, tableName).drop() for tableName in db.collection_names() if tableName != 'system.indexes'] if useDbFixture or useCacheFixture: if fixtureText is not None: loadTestDbDataFromText(fixtureText) else: try: loadTestDbDataFromFilename(dbFixtureFilename) except IOError: # We wanted to use a fixture, but we didn't find one. In this case we just decide to fall back # to generating data if either a function or query to do so is provided. If neither is provided, # we assume that this function doesn't touch the database at all. useDbFixture = False if useCacheFixture: try: loadTestDbDataFromFilename(cacheFixtureFilename) except IOError: useCacheFixture = False # Take anything out of the database that we don't want. if not useDbFixture and not useLocalDb: [getattr(db, tableName).drop() for tableName in db.collection_names() if tableName not in [MongoCache.cacheTableName, 'system.indexes']] pass if not useCacheFixture: getattr(db, MongoCache.cacheTableName).drop() # Generate the DB objects anew if we're not loading them from file. if not useDbFixture and not useLocalDb: if generateLocalDbFn is not None: generateLocalDbFn() elif generateLocalDbQueries is not None: issueQueries(generateLocalDbQueries) if useCacheFixture: MongoCache.exceptionOnCacheMiss = not __fixture_test_flags.live_calls_on_cache_miss # The actual DB fixtures we want to snapshot before the function runs, because we don't want to incorporate # anything written during the function. But the third-party calls cache we want to snapshot after the # function runs. if writeFixtureFiles and not useLocalDb: for tableName in db.collection_names(): if tableName not in ['cache', 'system.indexes']: dbDict[tableName] = list(getattr(db, tableName).find()) try: testResult = testFn(self, *args, **kwargs) finally: # Clean up after ourselves. MongoCache.exceptionOnCacheMiss = False if writeFixtureFiles: dumpDbDictToFilename(dbDict, dbFixtureFilename) if getattr(db, MongoCache.cacheTableName).count(): dumpDbDictToFilename({MongoCache.cacheTableName: list(getattr(db, MongoCache.cacheTableName).find())}, cacheFixtureFilename) MongoDBConfig.getInstance().database_name = 'stamped' from api.db.mongodb.AMongoCollection import MongoDBConfig as MongoDBConfig2 MongoDBConfig2.getInstance().database_name = 'stamped' MongoCache.disableStaleness = False if MongoCache.cacheTableName == 'cache_fixture': MongoCache.cacheTableName = 'cache' db.cache_fixture.drop() return testResult