def __sync_workspace(): """ 同步workspace """ if webapp_models.Workspace.objects.count() > 0: print('[environment]: NO need to sync workspace') return #清理mongo print('************* clear MONGODB *************') print(settings.PAGE_STORE_SERVER_HOST) connection = Connection(settings.PAGE_STORE_SERVER_HOST, settings.PAGE_STORE_SERVER_PORT) connection.drop_database(settings.PAGE_STORE_DB) #清理数据库 weapp_product_models.Product.objects.all().delete() webapp_models.Workspace.objects.all().delete() webapp_models.Project.objects.all().delete() client = Client() client.login(username='******', password='******') data = { 'modules_info': '{"modules":["cms","mall","user_center","viper_workspace_home_page"],"allow_update":false}' } response = client.post('/webapp/api/workspace/sync/', data)
def test_ProviderProductListView(self): """ Page must return HTTP 200 status, products with largest items_sold must come first. """ copy_service_and_config_to_default_db() service = Service.objects.using(UMBRELLA).get( pk='56eb6d04b37b3379b531b102') add_database_to_settings(service.database) Product.objects.using(service.database).all().delete() call_command('loaddata', 'kc_setup_data.yaml', database=service.database) call_command('loaddata', 'products.yaml', database=service.database) provider = Service.objects.using( service.database).get(pk='56eb6d04b37b3379b531b102') Product.objects.using( service.database).exclude(provider=provider).delete() response = self.client.get( reverse('kako:provider_product_list', args=('56922874b37b33706b51f002', ))) self.assertEqual(response.status_code, 200) products = response.context['products'] self.assertEqual(products.count(), 3) self.assertEqual(products[0].name, 'Breaded fish') from pymongo import Connection cnx = Connection() cnx.drop_database(service.database)
def test_sync_changes(self): """ sync_changes() should copy media in ContentUpdate from database of provider to the database of buyer as well as copy posters file from the media folder of provider to the media folder of the buyer :return: """ member = Member.objects.get(pk='56eb6d04b37b3379b531e012') update = ContentUpdate.objects.all()[0] test_home_folder = getattr(settings, 'STATIC_ROOT') + 'test_home_kombi' member.customer.home_folder = test_home_folder member.customer.save() test_media_folder = test_home_folder + '/media' test_movies_folder = test_home_folder + '/media/movies' test_series_folder = test_home_folder + '/media/series' if not os.path.exists(test_movies_folder): os.makedirs(test_movies_folder) if not os.path.exists(test_series_folder): os.mkdir(test_series_folder) service_id = getattr(settings, 'IKWEN_SERVICE_ID') database = Service.objects.get(pk=service_id).database from pymongo import Connection cnx = Connection() cnx.drop_database(database) # Create actual posters files for movie in update.movies_add_list: fh = open(movie.poster.path, 'w') fh_small = open(movie.poster.small_path, 'w') fh_thumb = open(movie.poster.thumb_path, 'w') fh.write('somedata') fh_small.write('somedata') fh_thumb.write('somedata') fh.close() fh_small.close() fh_thumb.close() for series in get_series_from_episodes( update.series_episodes_add_list): fh = open(series.poster.path, 'w') fh_small = open(series.poster.small_path, 'w') fh_thumb = open(series.poster.thumb_path, 'w') fh.write('somedata') fh_small.write('somedata') fh_thumb.write('somedata') fh.close() fh_small.close() fh_thumb.close() sync_changes(update) ContentUpdate.objects.using(database).get( member=member, status=ContentUpdate.DELIVERED) # Should be found in the database for movie in update.movies_add_list: os.path.exists(test_media_folder + '/' + movie.poster.name) for series in get_series_from_episodes( update.series_episodes_add_list): os.path.exists(test_media_folder + '/' + series.poster.name) shutil.rmtree(test_home_folder) cnx.drop_database(database)
def drop_mongo_data(self, db_name, collection_name, host="127.0.0.1", port=27017): connection = Connection(host, port) result_value = connection[db_name].drop_collection(collection_name) connection.drop_database(db_name) pass
def testModEvent(self): app = Flask(__name__) conn = Connection() with app.test_client() as c: testRequest = c.get('/event.json?_username=tester&_session=1234&'\ '_method=PUT&activity=testing&variables=score&'\ 'types=int&officials=tester&oldid=testing122&'\ 'descr=testing%20stuff&id=testing123') event = {'id': 'testing122', 'activity': 'testing', 'officials': ['tester'], 'descr': 'testing stuff', 'fields': ['participants', 'score'], 'rends': [], 'types': ['varchar', 'int'], 'rstarts': [], 'checks': []} statserv.server.sessions['1234'] = ('tester', None, True) statserv.server.database = conn.test_db db = conn.test_db db.stattrtbls.insert(event) del event['_id'] event['id'] = 'testing123' statserv.server.mod_event(request) actual = db.stattrtbls.find_one({}) del actual['_id'] self.assertEquals(event, actual, 'The mod_event method did something we didn\'t '\ 'expect. Expected: `%s`, actual: `%s`.'\ % (event, actual)) conn.drop_database('test_db')
def test_update_info_with_correct_parameters_and_gender_previously_unset( self): """ All user information should be updated and correct information message returned """ self.client.login(username='******', password='******') response = self.client.get( reverse('ikwen:update_info'), { 'email': '*****@*****.**', 'phone': '655000014', 'gender': Member.MALE, 'name': 'Sah Fogaing' }) self.assertEqual( response.content, json.dumps( {'message': _('Your information were successfully updated.')})) m = Member.objects.get(email='*****@*****.**') m1 = Member.objects.using(UMBRELLA).get(email='*****@*****.**') self.assertEqual(m.phone, '655000014') self.assertEqual(m.gender, Member.MALE) self.assertEqual(m.first_name, 'Sah') self.assertEqual(m.last_name, 'Fogaing') self.assertEqual(m1.phone, '655000014') self.assertEqual(m1.gender, Member.MALE) self.assertEqual(m1.first_name, 'Sah') self.assertEqual(m1.last_name, 'Fogaing') from pymongo import Connection cnx = Connection() cnx.drop_database('test_registered_member')
def drop(user, password): """ Drop every database and re-add net user account (CAREFUL BEFORE RUNNING) """ connection = Connection(get_config('database', 'master_host'), int(get_config('database', 'port'))) db = database.Database(connection, 'admin') db.authenticate(user, password) for log_type in get_log_types(): try: db = database.Database(connection, log_type) print "dropping " + log_type connection.drop_database(log_type) # re-add net user account db.add_user(get_config('database', 'user'), get_config('database', 'password')) except Exception as e: print str(e) continue
def testGetUsers(self): app = Flask(__name__) conn = Connection() with app.test_client() as c: testRequest = c.get('/users.json?_username=test&_session=1234'\ '&callback=blah') statserv.server.sessions['1234'] = ('test', request.remote_addr, True) statserv.server.config['adminuser'] = '******' db = conn['test_db'] adminuser = {u'username': u'admin', u'password': '******', u'profile': ''} otherusers = [{u'username': u'other', u'password': '******', u'profile': u'The first other user'}, {u'username': u'other_two', u'password': '******', u'profile': u'The other other user'}] db.stattrusers.insert(adminuser) db.stattrusers.insert(otherusers[0]) db.stattrusers.insert(otherusers[1]) statserv.server.database = conn['test_db'] del otherusers[0]['_id'] del otherusers[1]['_id'] del otherusers[0]['password'] del otherusers[1]['password'] expected = statserv.server.make_response('blah', {'users': otherusers}) actual = statserv.server.get_users() conn.drop_database('test_db') self.assertEquals(actual, expected, 'The get_users method returned something other '\ 'than what we expected. Expected: `%s`, Actual: '\ '`%s`' % (expected, actual))
def Calling_Utility(Enableread,iteration,ifile1,lno): uout_file_name = 'utilityout.txt' uout = open(uout_file_name, 'a') wordsin = [] itera = int (iteration) count = 0 for i in range(itera): cmd = './CMDFinal1' return_code = subprocess.call(cmd, stdout=uout) print(return_code) print("executed driver test for %s" %count) os.system('echo 3 > /proc/sys/vm/drop_caches') print("echo command") if (not (int(Enableread)) and not (i == itera-1)): print ("inside if condition of writeclean") os.system('rm -rf ./chunkfile/*') print ("removed chunkfiles") c = Connection('localhost', 27017) c.drop_database('makdatabase') print("dropped db") else: print ("writeElseeeeeeeeee") count = count + 1 uout.close() if int(Enableread): print ("inside if condition of read write") os.system('rm -rf ./chunkfile/*') print ("removed chunkfiles") c = Connection('localhost', 27017) c.drop_database('makdatabase') print("dropped db") else: print ("Elseeeeeeeeee") read_output_file_of_utility(uout_file_name,ifile1,lno) print (count)
def test_do_import_items(self): """ Importing items merely copies a list of items from the provider database to the retailer database. Retailer profile and Member object are copied to the provider's database and vice versa. Every imported item must have the field is_retailed=True. """ call_command('loaddata', 'wn_members.yaml', database=UMBRELLA) copy_service_and_config_to_default_db() service = Service.objects.using(UMBRELLA).get(pk='56eb6d04b37b3379b531b102') add_database_to_settings(service.database) Item.objects.using(service.database).all().delete() call_command('loaddata', 'wn_profiles.yaml', database=service.database) call_command('loaddata', 'items.yaml', database=service.database) self.client.login(username='******', password='******') response = self.client.get(reverse('items:do_import_items'), {'provider_slug': 'les-brasseries-du-cameroun', 'provider_id': '56922874b37b33706b51f002', 'item_ids': '55d1fa8feb60008099bd4152,55d1fa8feb60008099bd4153'}) self.assertEqual(response.status_code, 200) response = json.loads(response.content) self.assertTrue(response['success']) self.assertEqual(Item.objects.using('default').all().count(), 2) Member.objects.using(service.database).get(username='******') # Member must be in Provider's database OperatorProfile.objects.using(service.database).get(pk='56922874b37b33706b51f003') # Retailer must be in Provider's database Service.objects.using(service.database).get(pk='56eb6d04b37b3379b531b103') # Service must be in Provider's database for item in Item.objects.using(service.database).filter(pk__in=['55d1fa8feb60008099bd4152', '55d1fa8feb60008099bd4153']): self.assertTrue(item.is_retailed) from pymongo import Connection cnx = Connection() cnx.drop_database(service.database)
def test_ProviderProductListView_query_by_category_json_format(self): """ Requesting products list with GET parameter format=json&category_slug=slug&q=searchTerm should. Result must return as a list of JSON objects """ copy_service_and_config_to_default_db() service = Service.objects.using(UMBRELLA).get( pk='56eb6d04b37b3379b531b102') add_database_to_settings(service.database) Product.objects.using(service.database).all().delete() call_command('loaddata', 'kc_setup_data.yaml', database=service.database) call_command('loaddata', 'products.yaml', database=service.database) provider = Service.objects.using( service.database).get(pk='56eb6d04b37b3379b531b102') Product.objects.using( service.database).exclude(provider=provider).delete() response = self.client.get( reverse('kako:provider_product_list', args=('56922874b37b33706b51f002', )), { 'format': 'json', 'category_slug': 'food', 'q': 'col', 'start': 0, 'length': 24 }) self.assertEqual(response.status_code, 200) products = json.loads(response.content) self.assertEqual(len(products), 1) self.assertEqual(products[0]['name'], 'Coca-Cola') from pymongo import Connection cnx = Connection() cnx.drop_database(service.database)
def drop_database(self): ''' Useful method, drop test database ''' c = Connection() db_name = self.config.application.registry.settings[ 'mongodb'].database_name c.drop_database(db_name)
def removeService(name): try: obj = getServiceIdByName(name) db[COLLECTION].remove({ID: obj['_id']}) connection = Connection() connection.drop_database(name) except ServiceNotExistException: raise
def _drop_database(self, database_name): c = Connection() try: if database_name in c.database_names(): self.log("Dropping database: %s" % database_name) c.drop_database(database_name) finally: c.close()
def handle(self, **options): #清理mongo print '************* start clear MONGODB(%s:%s) *************' % ( settings.PAGE_STORE_SERVER_HOST, settings.PAGE_STORE_SERVER_PORT) connection = Connection(settings.PAGE_STORE_SERVER_HOST, settings.PAGE_STORE_SERVER_PORT) connection.drop_database(settings.PAGE_STORE_DB) print '************* finish clear MONGODB *************'
class MongoStore(DocStore): #: the name of the collection that stores log entries cLOG = u'logentries' #: the name of the collection that stores api keys documents cKEYS = u'keys' #: the name of the collection that stores the dif documents cDIFS = u'difs' def __init__(self, url): host = url.hostname port = url.port self.conn = Connection(host=host, port=port) self.dbid = url.path.split('/')[1] self.db = self.conn[self.dbid] self._bootstrap() def _bootstrap(self): self.logc = self.db[self.cLOG] self.logc.ensure_index(self.LOG_INDEX) self.keyc = self.db[self.cKEYS] self.difc = self.db[self.cDIFS] @staticmethod def _hash(key, difs): return Binary(DocStore._hash(key, difs)) def _add_difs_records(self, records): # mongodb does not yet support bulk insert of docs with potentially # duplicate keys: http://jira.mongodb.org/browse/SERVER-509 uniq = True for r in records: try: self.difc.insert(r, check_keys=False, safe=True) except: uniq = False return uniq def register_key(self, newkey): try: self.keyc.insert({self.kID: newkey}, check_keys=False, safe=True) return True except: return False def ensure_keys(self, keys): for key in keys: self.register_key(key) return imap(itemgetter(self.kID), self.keyc.find()) def clear(self): self.conn.drop_database(self.db) self._bootstrap() def _add_log_record(self, record): self.logc.insert(record) def iterlog(self): return self.logc.find()
class MongoStore(DocStore): #: the name of the collection that stores log entries cLOG = u"logentries" #: the name of the collection that stores api keys documents cKEYS = u"keys" #: the name of the collection that stores the dif documents cDIFS = u"difs" def __init__(self, url): host = url.hostname port = url.port self.conn = Connection(host=host, port=port) self.dbid = url.path.split("/")[1] self.db = self.conn[self.dbid] self._bootstrap() def _bootstrap(self): self.logc = self.db[self.cLOG] self.logc.ensure_index(self.LOG_INDEX) self.keyc = self.db[self.cKEYS] self.difc = self.db[self.cDIFS] @staticmethod def _hash(key, difs): return Binary(DocStore._hash(key, difs)) def _add_difs_records(self, records): # mongodb does not yet support bulk insert of docs with potentially # duplicate keys: http://jira.mongodb.org/browse/SERVER-509 uniq = True for r in records: try: self.difc.insert(r, check_keys=False, safe=True) except: uniq = False return uniq def register_key(self, newkey): try: self.keyc.insert({self.kID: newkey}, check_keys=False, safe=True) return True except: return False def ensure_keys(self, keys): for key in keys: self.register_key(key) return imap(itemgetter(self.kID), self.keyc.find()) def clear(self): self.conn.drop_database(self.db) self._bootstrap() def _add_log_record(self, record): self.logc.insert(record) def iterlog(self): return self.logc.find()
def __clear_all_openapi_data(): ''' 清理openapi mongo ''' print('************* clear OEPNAPI MONGODB *************') connection = Connection(settings.OPENAPI_MONGO_HOST, settings.OPENAPI_MONGO_PORT) connection.drop_database(settings.OPENAPI_MONGO_DB)
class MongoDBContext(Vows.Context): def setup(self): self.fixtures_folder = join(abspath(dirname(__file__)), 'fixtures') self.connection = Connection('localhost', 7777) self.collection = self.connection['thumbor']['images'] def teardown(self): self.connection.drop_database('thumbor')
class MongoDBContext(Vows.Context): def setup(self): self.fixtures_folder = join(abspath(dirname(__file__)), 'fixtures') self.connection = Connection('localhost', 7777) self.collection = self.connection['thumbor']['images'] self.collection.save({'_id': ObjectId(IMAGE_URL) , "content": base64.b64encode(IMAGE_BYTES)}) def teardown(self): self.connection.drop_database('thumbor')
def teardown(total): # drop the mongo db mongo_config = app.config['MONGO_CONFIG'] mongodb_uri = 'mongodb://' if mongo_config['dbuser'] and mongo_config['dbpassword']: mongodb_uri += '%s:%s@' % (mongo_config['dbuser'] , mongo_config['dbpassword']) mongodb_uri += '%s/%s' % (mongo_config['hosts'], mongo_config['db_name']) connection = Connection(host=mongodb_uri) connection.drop_database(app.config['MONGO_CONFIG']['db_name'])
def importTaccData(): # Pass1: build appname table for jobpart in forTaccPart(): buildAppnameTable(jobpart) fiveOnly() print "--------------------------------------------" nLabels = appExecs.keys() n = len(nLabels) #lookup = { nLabels[i]: i for i in range(0,n) } #dsm = numpy.zeros((n,n), dtype=numpy.int) c = Connection() c.drop_database("snm-tacc") jobsizeHist = defaultdict(int) for job in forTaccLongJob(): jobsizeHist[len(job)] += 1 #logdeps = getLogicalDeps([guess1App(j) for j in job]) prevapp = "" for j in job: app = guess1App(j) if (app != ''): rec = copy.copy(j) rec["endEpoch"] = rec["startEpoch"] + rec["runTime"] rec["startTime"] = "" rec["dynDeps"] = [] rec["exec"] = app rec["dynPackDeps"] = [] rec["weakDeps"] = [] if (prevapp != "" and app != "" and app != prevapp): rec["weakPackDeps"] = {app: [prevapp]} else: rec["weakPackDeps"] = {} rec["weakPackDeps"] = scrub_dots(rec["weakPackDeps"]) if (isinstance(rec["pkgT"], list)): rec["pkgT"] = {} else: rec["pkgT"] = { k: v["libA"] for (k, v) in rec["pkgT"].items() } rec["pkgT"][app] = rec["pkgT"].keys() rec["pkgT"] = scrub_dots(rec["pkgT"]) data = json.dumps(rec) registerParsed(c, rec, "0.0.0.0", dbraw="snm-tacc-raw", dbcooked="snm-tacc", postponeCalc=True) prevapp = app recalcApps(c, c["snm-tacc"])
def drop_databae(server_oid, database_name): server = g.db['mangoadmin']['servers'].find_one({ '_id': ObjectId(server_oid) }) if not server: flash('Server %s not found' % server_oid, 'error') return redirect('/servers') connection = Connection(host=server['address'], port=int(server['port'])) connection.drop_database(database_name) flash('%s dropped.' % database_name, 'success') return redirect('/servers/view/%s' % server_oid)
def generate_db(**kwargs): import functs, utils global global_vals #t = lambda _list: reduce(lambda x,y: str(x)+'@'+str(y),_list) if _list != [] else '' t = lambda _list: '@'.join(_list) if _list != [] else '' db, col, gran = None, None, None if 'db' in kwargs: db = kwargs['db'] col = 'examples' else: db = global_vals.db_name col_name = global_vals.db_collection if 'gran' not in kwargs: print( 'Please provide time granularity for the database to be generated') sys.exit() else: gran = kwargs['gran'] print( 'Generating a new db from mother db %s with time granularity = %s' ) % (db, gran) connection = Connection() dbs = connection.database_names() if db not in dbs: print('ERROR: No such mother database exists: %s') % (db) sys.exit() else: db_ = connection[db] size = db_.examples.count() print('DB contains %s examples' % (str(size))) db = db + '_granularity_' + str(gran) print('Generating new db: %s') % (db) connection.drop_database(db) ## clear if exists db = connection[db] step = int(gran) for i in range(0, size, step): exists = True anot_i, nar_i, innert_i = [], [], [] j, k = i - step, i + 1 cursor = db_.examples.find({'example': {"$gt": j, "$lt": k}}) (pos, nar) = utils.get_from_cursor(cursor) if j >= 0: nar.append('starttime(%s)' % (str(j))) if True: try: post = {'example': i, 'pos': pos, 'nar': nar} print('#Example,IntrvStart,IntrvEnd:', i, i - step, i) except TypeError: print('TypeError at') print(anot_i) print(nar_i) print(innert_i) sys.exit() db.examples.insert(post)
def setUp(self): self.source_name = 'Oracle' self.db_name = 'test_oracle_mongo' self.collection_name = 'test_oracle_mongo' self.program_mode = '' self.schema_name = 'BT_DW_SVC' self.table_name = 'DW_SVC_ID' self.row_limit = 10 conn = Connection() # this is really the server object that we comm with if self.db_name in conn.database_names(): conn.drop_database(self.db_name)
def test_register_with_correct_values_next_url_and_other_get_parameters( self): """ Correct parameters save user in default and foundation databases. Prior GET parameters remain """ import ikwen.conf.settings as ikwen_settings ikwen_settings.IKWEN_SERVICE_ID = getattr(settings, 'IKWEN_SERVICE_ID') service = Service.objects.get(pk=getattr(settings, 'IKWEN_SERVICE_ID')) Config.objects.create(service=service, company_name='Project', contact_email='*****@*****.**', signature='') contact_url = reverse('ikwen:forgotten_password') origin = reverse('ikwen:register') + '?next=' + urlencode( contact_url + '?p1=v1&p2=v2') response = self.client.post(origin, { 'username': '******', 'password': '******', 'password2': 'secret', 'phone': '655000001', 'first_name': 'Sah', 'last_name': 'Fogaing' }, follow=True) m1 = Member.objects.using(UMBRELLA).get( username='******') m2 = Member.objects.get(email='*****@*****.**') self.assertEqual(self.client.session['_auth_user_id'], m1.id) # Test whether user is actually logged in self.assertEqual(m1.id, m2.id) self.assertEqual(m1.full_name, 'Sah Fogaing') final = urlparse(response.redirect_chain[-1][0]) location = final.path.strip('/').split('/')[-1] self.assertEqual(location, 'forgottenPassword') params = unquote(final.query).split('&') self.assertGreaterEqual(params.index('p1=v1'), 0) self.assertGreaterEqual(params.index('p2=v2'), 0) response = self.client.post(reverse('ikwen:do_sign_in'), { 'username': '******', 'password': '******' }, follow=True) final = response.redirect_chain[-1] location = final[0].strip('?').strip('/').split('/')[-1] self.assertEqual(location, 'console') perm_list = UserPermissionList.objects.get(user=m2) group = Group.objects.get(name=COMMUNITY) self.assertIn(group.id, perm_list.group_fk_list) self.assertIn(group.id, m1.group_fk_list) self.assertIn(group.id, m2.group_fk_list) from pymongo import Connection cnx = Connection() cnx.drop_database('test_registered_member')
def handle(self, *args, **options): if 'verbosity' in options: verbosity = options['verbosity'] else: verbosity = 1 #Get DB connection #conn = connections["default"] db_name = settings.DATABASES['default']['NAME'] conn = Connection() #Flush database #management.call_command('flush', interactive=False) conn.drop_database(db_name) #Sync database management.call_command('syncdb', interactive=False, verbosity=verbosity) #Ensure indexes #This is right to ensure an index *using* a subelement #conn[db_name]['cvm_batch'].ensure_index('profile.index', unique=True) #Ensuring indexes *within* subelements is not possible if len(args) > 0: subdirectory = args[0] + "/" if verbosity > 0 : print 'Selected fixtures subdirectory', args[0] else: subdirectory = "" if verbosity > 0 : print 'Using default fixtures directory' #Get all fixtures fixture_path = settings.PROJECT_PATH+'/fixtures/'+subdirectory if verbosity > 0 : print 'Loading fixtures from', fixture_path+'*' fixtures = glob.glob(fixture_path+'*.json') for f in fixtures: name = f.split('/')[-1].split('.')[0] if verbosity > 0 : print '\tLoading fixtures for collection', name, '...' contents = json.loads(file(f).read(), object_hook=json_util.object_hook) #conn.get_collection(name).insert(contents) try: conn[db_name][name].insert(contents) except pymongo.errors.InvalidOperation: if verbosity > 0 : print "\t\tThe json file for", name, "appears to be empty.\n"
def generate_db(**kwargs): import functs,utils global global_vals #t = lambda _list: reduce(lambda x,y: str(x)+'@'+str(y),_list) if _list != [] else '' t = lambda _list: '@'.join(_list) if _list != [] else '' db,col,gran = None,None,None if 'db' in kwargs: db = kwargs['db'] col = 'examples' else: db = global_vals.db_name col_name = global_vals.db_collection if 'gran' not in kwargs: print('Please provide time granularity for the database to be generated') sys.exit() else: gran = kwargs['gran'] print('Generating a new db from mother db %s with time granularity = %s')%(db,gran) connection = Connection() dbs = connection.database_names() if db not in dbs: print('ERROR: No such mother database exists: %s')%(db) sys.exit() else: db_ = connection[db] size = db_.examples.count() print('DB contains %s examples'%(str(size))) db = db+'_granularity_'+str(gran) print('Generating new db: %s')%(db) connection.drop_database(db) ## clear if exists db = connection[db] step = int(gran) for i in range(0,size,step): exists = True anot_i,nar_i,innert_i = [],[],[] j,k = i-step,i+1 cursor = db_.examples.find({'example':{"$gt": j,"$lt": k }}) (pos,nar) = utils.get_from_cursor(cursor) if j >= 0: nar.append('starttime(%s)'%(str(j))) if True: try: post = {'example':i,'pos':pos,'nar':nar} print('#Example,IntrvStart,IntrvEnd:',i,i-step,i) except TypeError: print('TypeError at') print(anot_i) print(nar_i) print(innert_i) sys.exit() db.examples.insert(post)
class TestMailr(object): def setUp(self): self.mongo = Connection('localhost') self.databasename = 'test' self.collectioname = 'mails_test' self.mail_dir = 'test_data' self.part = mock() def tearDown(self): self.mongo.drop_database(self.databasename) def test_load_data(self): mailbox = MailBox(self.mail_dir) load_data(self.mongo, mailbox, self.databasename, self.collectioname) def test_process_charset(self): expected = 'utf-8' when(self.part).get(CONTENT_TYPE, '').thenReturn('text/plain; charset="utf-8"') actual = process_charset(self.part) assert expected == actual def test_process_charset_ascii(self): expected = 'ascii' when(self.part).get(CONTENT_TYPE, '').thenReturn('text/plain; charset="ascii"') actual = process_charset(self.part) assert expected == actual def test_process_text(self): data = 'my simple milde text' def test_decode(self): encode_type = 'base64' expected = 'this message should not be read by humans' message = base64.encodestring(expected) actual = decode(encode_type, message) assert expected == actual def test_main(self): sys.argv.append( ['-f', '/var/tmp/', '-d','bitiching', '-c', 'junk']) main()
def initiate_testing(): app.config['TESTING'] = True # setup test db by modifying db name from config app.config['MONGO_CONFIG']['db_name'] += '_testing' # make sure it doesn't already exists # teardown's not cleaning up properly, it seems.. connection = Connection(app.config['MONGO_CONFIG']['host'], app.config['MONGO_CONFIG']['port']) connection.drop_database(app.config['MONGO_CONFIG']['db_name']) # populate the db views.seed()
def initiate_testing(): app.config['TESTING'] = True # setup test db by modifying db name from config app.config['MONGO_CONFIG']['db_name'] += '_testing' # make sure it doesn't already exists # teardown's not cleaning up properly, it seems.. connection = Connection(app.config['MONGO_CONFIG']['host'] , app.config['MONGO_CONFIG']['port']) connection.drop_database(app.config['MONGO_CONFIG']['db_name']) # populate the db views.seed()
def main(): with open('Input_for_Read_Write.txt', 'r') as f: # Opening the Input_Data_file in read mode data = f.readlines() # Read line by line count = 0 for line in data: words = line.strip().split(",") # Splitting the data values print (words[0],words[1],words[2]) # For Debugging Data_size = words[0] # Assigning the values Block_Size = words[1] Def_Chunk_Size = words[2] Enable_read = words[3] Enable_write = words[4] document = ElementTree.parse( 'conf.xml' ) # Open the input_XML_file membership = document.getroot() # Get the root of the XML users = membership.find( 'ChunkUtiliy' ) # Find the tag for user in document.findall( 'ChunkUtiliy/Write' ): # Set the values of data file to XML attributes user.set('BlockSize', Block_Size) user.set('DataToWriteMB',Data_size ) user.set('EnableWrite',Enable_write ) for user in document.findall( 'ChunkUtiliy/Read' ): # Set the values of data file to XML attributes user.set('DataToReadMB',Data_size ) user.set('EnableRead',Enable_read ) for user in document.findall( 'ChunkUtiliy/Chunk' ):# Set the values of data file to XMl attributes user.set('ChunkSize', Def_Chunk_Size) document.write('conf.xml') # Write into the XML file print ("Updated Conf file") cmd = 'time ./BulkInsert' # Run the Utility with the given input os.system(cmd) print("executed driver test") os.system('echo 3 > /proc/sys/vm/drop_caches') print("echo command") count = count + 1 print ('%s experiment done' %(count)) if int(Enable_read): print ("inside if condition of read write") os.system('rm -rf ./chunkfile/*') print ("removed chunkfiles") c = Connection('localhost', 27017) c.drop_database('makdatabase') print("dropped db") else: print ("Elseeeeeeeeee") f.close()
def testGetProfile(self): app = Flask(__name__) conn = Connection() with app.test_client() as c: testRequest = c.get('/profile.json?username=test&callback=blah') db = conn.test_db tablecol = db.stattrtbls usercol = db.stattrusers event = {'id': 'testing123', 'fields': ['participants', 'score', 'victory'], 'descr': 'Testing a whole bunch of stuff.', 'types': ['varchar', 'int', 'bool'], 'activity': 'testing' } results = [{'participants': ['test1', 'test2'], 'score': [1, 2], 'victory': [False, True]}, {'participants': ['test1', 'test'], 'score': [3, 1], 'victory': [True, False]}] user = {'username': '******', 'profile': 'tests a lot', 'password': '******'} db.stattrusers.insert(user) db.stattrtbls.insert(event) db.testing123.insert(results[0]) db.testing123.insert(results[1]) del user['_id'] del user['password'] del event['_id'] del results[0] del results[0]['_id'] del event['id'] # Gods of Python, I apologize for the below travesty. For whatever # reason, the way the dictionaries are being converted to strings # is not consistent. This is necessary to my test suite. expected = 'blah({"events": {"testing123": {"fields": '\ '["participants", "score", "victory"], "descr": '\ '"Testing a whole bunch of stuff.", "types": ["varchar",'\ ' "int", "bool"], "activity": "testing"}}, "user": '******'{"username": "******", "profile": "tests a lot"}, '\ '"results": {"testing123": [{"participants": ["test1", '\ '"test"], "score": [3, 1], "victory": [true, false]}]}});' statserv.server.database = conn.test_db actual = statserv.server.get_profile() self.assertEquals(expected, actual, 'The get_profile method returned something we '\ 'didn\'t expect. Expected: `%s`, '\ 'actual: `%s`.' % (expected, actual)) conn.drop_database('test_db')
class MongoObject(object): def __init__(self, app=None): if app is not None: self.app = app self.init_app(app) self.Model = self.make_model() self.mapper = {} def init_app(self, app): app.config.setdefault('MONGODB_HOST', "mongodb://localhost:27017") app.config.setdefault('MONGODB_DATABASE', "") app.config.setdefault('MONGODB_AUTOREF', True) # initialize connection and Model properties self.app = app self.connect() self.app.after_request(self.close_connection) def connect(self): self.connection = Connection(self.app.config['MONGODB_HOST']) def init_connection(self): self.connection = Connection(self.app.config['MONGODB_HOST']) def make_model(self): model = Model model.query = _QueryProperty(self) return model @property def session(self): if not getattr(self, "db", None): self.db = self.connection[self.app.config['MONGODB_DATABASE']] if self.app.config['MONGODB_AUTOREF']: self.db.add_son_manipulator(NamespaceInjector()) self.db.add_son_manipulator(AutoReferenceObject(self)) return self.db def set_mapper(self, model): # Set up mapper for model, so when ew retrieve documents from database, # we will know how to map them to model object based on `_ns` fields self.mapper[model.__collection__] = model def close_connection(self, response): self.connection.end_request() return response def clear(self): self.connection.drop_database(self.app.config['MONGODB_DATABASE']) self.connection.end_request()
def recreatedb(uri, database_name): connection = Connection(uri) connection.drop_database(database_name) db = connection[database_name] for cc in COLLECTION_CLASSES: collection = db[cc.name] try: cc.initialize(collection) except AttributeError: pass try: cc.prepopulate(collection) except AttributeError: pass return db
def mongodb_drop_database(dbname): """return a list of all dbs and related collections""" #print "skip and limit", skip, limit l=[] response_dict={} try: c= Connection(settings.MONGO_HOST, settings.MONGO_PORT) c.drop_database(dbname) #print "success" return "" except: #error connecting to mongodb #print str(sys.exc_info()) return str(sys.exc_info())
def removedb(request): c = Connection() c.drop_database('twitter') c.drop_database('weibo') return HttpResponse('success!')
def write_xml(Data_size, Block_Size, Def_Chunk_Size, Enable_read, Enable_write, Iteration, ifile1, lno): print("inside if condition of read_xml") os.system('rm -rf ./chunkfile/*') print("removed read_xmlchunkfiles") c = Connection('localhost', 27017) c.drop_database('makdatabase') print("dropped read_xmldb") print(Iteration) print('inside write_xmlwrite_xml ') Iteration = '1' print('after assingin iteration00') print(Iteration) document = ElementTree.parse('conf.xml') # Open the input_XML_file membership = document.getroot() # Get the root of the XML users = membership.find('ChunkUtiliy') # Find the tag for user in document.findall( 'ChunkUtiliy/Write' ): # Set the values of data file to XML attributes user.set('BlockSize', Block_Size) user.set('DataToWriteMB', Data_size) user.set('EnableWrite', Enable_write) for user in document.findall( 'ChunkUtiliy/Read' ): # Set the values of data file to XML attributes user.set('DataToReadMB', Data_size) user.set('EnableRead', Enable_read) for user in document.findall( 'ChunkUtiliy/Chunk' ): # Set the values of data file to XMl attributes user.set('ChunkSize', Def_Chunk_Size) for user in document.findall('ChunkUtiliy/Iteration'): user.set('Count', Iteration) document.write('conf.xml') # Write into the XML file print("Updated Conf file") print(lno) uout_file_name = 'utilityout.txt' open_file = open(uout_file_name, 'w') open_file.write("Write write_xmlin Conf_xml_file inside") open_file.close() print('b4 Calling_Utility in write_xml') cal.Calling_Utility(Enable_read, Iteration, ifile1, lno)
def test_00_init_ca(self): from pymongo import Connection c = Connection() c.drop_database('utest_fabnet_ca') c.close() self.clear_files() dbconn = 'mongodb://localhost/utest_fabnet_ca' create_ca_ks(FILES[0], PWD, 'root', None, db_conn_str=dbconn) self.assertTrue(os.path.exists(FILES[0])) root_ks = KeyStorage(FILES[0], PWD) with self.assertRaises(Exception): KeyStorage('/some/file/name', PWD).load() with self.assertRaises(Exception): KeyStorage(FILES[0], 'fake') with self.assertRaises(Exception): create_ca_ks(FILES[1], PWD, 'node', root_ks, 'FirstDataCenter', db_conn_str='test_host') create_ca_ks(FILES[1], PWD, 'node', root_ks, 'FirstDataCenter', db_conn_str=dbconn) self.assertTrue(os.path.exists(FILES[1])) with self.assertRaises(Exception): create_ca_ks(FILES[1], PWD, 'node', root_ks, 'FirstDataCenter', db_conn_str=dbconn) node_ks = KeyStorage(FILES[1], PWD) create_ca_ks(FILES[2], PWD, 'client', root_ks, 'Base clients certificate', db_conn_str=dbconn) self.assertTrue(os.path.exists(FILES[2])) clients_ks = KeyStorage(FILES[2], PWD) create_ca_ks(FILES[3], PWD, 'crm.fabnet.com', node_ks, 'CRM', db_conn_str=dbconn) self.assertTrue(os.path.exists(FILES[3])) crm_ks = KeyStorage(FILES[3], PWD) crm_cert = crm_ks.cert_obj() node_cert = node_ks.cert_obj() self.assertEqual(crm_cert.get_serial_number(), 4) #FIXME VALIDATE CERTS self.assertTrue(sub_clients_cert.verify(cliens_cert)) create_ca_ks(FILES[4], PWD, 'test', node_ks, 'Test', db_conn_str='test_host', serial_num=55) ks = KeyStorage(FILES[4], PWD) add_ca_cert(ks.cert(), dbconn) with self.assertRaises(Exception): add_ca_cert(ks.cert(), dbconn)
def cprofile_main(): from pymongo import Connection connection = Connection() connection.drop_database('timeit_test') connection.disconnect() from mongoengine import Document, DictField, connect connect("timeit_test") class Noddy(Document): fields = DictField() for i in xrange(1): noddy = Noddy() for j in range(20): noddy.fields["key" + str(j)] = "value " + str(j) noddy.save()
def importTaccData(): # Pass1: build appname table for jobpart in forTaccPart(): buildAppnameTable(jobpart) fiveOnly() print "--------------------------------------------" nLabels = appExecs.keys() n = len(nLabels) #lookup = { nLabels[i]: i for i in range(0,n) } #dsm = numpy.zeros((n,n), dtype=numpy.int) c = Connection() c.drop_database("snm-tacc") jobsizeHist = defaultdict(int) for job in forTaccLongJob(): jobsizeHist[len(job)] += 1 #logdeps = getLogicalDeps([guess1App(j) for j in job]) prevapp = "" for j in job: app = guess1App(j) if (app != ''): rec = copy.copy(j) rec["endEpoch"] = rec["startEpoch"] + rec["runTime"] rec["startTime"] = "" rec["dynDeps"] = [] rec["exec"] = app rec["dynPackDeps"] = [] rec["weakDeps"] = [] if (prevapp != "" and app != "" and app != prevapp): rec["weakPackDeps"] = { app: [prevapp] } else: rec["weakPackDeps"] = { } rec["weakPackDeps"] = scrub_dots(rec["weakPackDeps"]) if (isinstance(rec["pkgT"], list)): rec["pkgT"] = {} else: rec["pkgT"] = { k: v["libA"] for (k,v) in rec["pkgT"].items() } rec["pkgT"][app] = rec["pkgT"].keys() rec["pkgT"] = scrub_dots(rec["pkgT"]) data = json.dumps(rec) registerParsed(c, rec, "0.0.0.0", dbraw="snm-tacc-raw", dbcooked="snm-tacc", postponeCalc = True) prevapp = app recalcApps(c, c["snm-tacc"])
class TestMongoDBBasic(unittest.TestCase): def setUp(self): pass # TODO: It would be better to have a configuration file. # That would be loaded at this place. J.A.S def test_config_settings(self): self.assertEqual(obp_config.MONGODB_SERVER,'obp_mongod') self.assertEqual(obp_config.MONGODB_SERVER_PORT,27017) def test_host_entry(self): result = gethostbyname("obp_mongod") #check for host entry self.assertIsNot(result,None) def test_mongodb_connection(self): result = Connection('obp_mongod', 27017) self.assertIsNot(result,None) result.disconnect() def test_mongodb_database_collections(self): """ Check baisc function test for MongoDB. So we can ensure that all use function exist and are not gone. """ self.connection = Connection('obp_mongod', 27017) # This line below, ensure that we don't have dupiclaed db self.connection.drop_database('test_obp_import_db') self.mongo_db = self.connection.test_obp_import_db result = self.mongo_db.collection_names() # Nothing for insert, so nothing is created, should return a 0 in len self.assertEqual(len(result), 0) should_result = [u'test_obp_import_db', u'system.indexes'] self.mongo_db.test_obp_import_db.insert({'test':123}) result = self.mongo_db.collection_names() self.assertEqual(result, should_result) self.connection.drop_database('test_obp_import_db') # Check for no collection in test_obp_import_db self.mongo_db = self.connection.test_obp_import_db result = self.mongo_db.collection_names() # Should return a 0 in len self.assertEqual(len(result), 0)
def delete_database(db_id): # Connect to the database MongoDB try: connection = Connection("localhost", 27017) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() if db_id not in db_names: return json.dumps("Database doesn't exist!") try: connection.drop_database(db_id) except: return json.dumps("Unable to delete the database") return json.dumps("Database successfully deleted!")
def delete_database(db_id): # Connect to the database MongoDB try: connection = Connection('localhost', 12345) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() if db_id not in db_names: return json.dumps("Database doesn't exist!") try: connection.drop_database(db_id) except: return json.dumps("Unable to delete the database") return json.dumps('Database successfully deleted!')
def initiate_testing(): app.config['TESTING'] = True # setup test db by modifying db name from config app.config['MONGO_CONFIG']['db_name'] += '_testing' # make sure it doesn't already exists # teardown's not cleaning up properly, it seems.. mongo_config = app.config['MONGO_CONFIG'] mongodb_uri = 'mongodb://' if mongo_config['dbuser'] and mongo_config['dbpassword']: mongodb_uri += '%s:%s@' % (mongo_config['dbuser'] , mongo_config['dbpassword']) mongodb_uri += '%s/%s' % (mongo_config['hosts'], mongo_config['db_name']) connection = Connection(host=mongodb_uri) connection.drop_database(app.config['MONGO_CONFIG']['db_name']) # populate the db controllers.seed()
def test_simple_ops(self): if not have_ssl: raise SkipTest() try: conn = Connection(connectTimeoutMS=100, ssl=True) # MongoDB not configured for SSL? except ConnectionFailure: raise SkipTest() response = conn.admin.command('ismaster') if 'setName' in response: conn = ReplicaSetConnection(replicaSet=response['setName'], w=len(response['hosts']), ssl=True) db = conn.pymongo_ssl_test self.assert_(db.test.insert({'ssl': True}, safe=True)) self.assertTrue(db.test.find_one()['ssl']) conn.drop_database(db)
def testGetConf(self): conn = Connection() dbname = 'test_db' db = conn[dbname] expected = dict({u'sitename': u'test', u'logo': u'http://imgur.com/123456.png', u'location': u'redlands, ca'}) db.stattrconf.insert(expected) del expected['_id'] statserv.server.database = conn[dbname] app = Flask(__name__) with app.test_client() as c: testRequest = c.get('/conf.json?callback=blah') result = statserv.server.get_conf() conn.drop_database(dbname) self.assertEquals(statserv.server.make_response('blah', expected), result, 'The get_conf method is broken, it did not '\ 'return what we put in.')
def test_ProviderItemListView_json_format(self): """ Requesting items list with GET parameter format=json&q=searchTerm should. Result must return as a list of JSON objects """ copy_service_and_config_to_default_db() service = Service.objects.using(UMBRELLA).get(pk='56eb6d04b37b3379b531b102') add_database_to_settings(service.database) Item.objects.using(service.database).all().delete() call_command('loaddata', 'items.yaml', database=service.database) response = self.client.get(reverse('items:provider_item_list', args=('56922874b37b33706b51f002', )), {'format': 'json', 'q': 'col', 'start': 0, 'length': 24}) self.assertEqual(response.status_code, 200) items = json.loads(response.content) self.assertEqual(len(items), 1) self.assertEqual(items[0]['name'], 'Coca-Cola') from pymongo import Connection cnx = Connection() cnx.drop_database(service.database)
def test_update_password_with_correct_values(self): self.assertTrue(self.client.login(username='******', password='******')) response = self.client.get(reverse('ikwen:update_password'), { 'password': '******', 'password1': 'value', 'password2': 'value' }) self.assertEqual( response.content, json.dumps( {'message': _('Your password was successfully updated.')})) self.assertTrue(self.client.login(username='******', password='******')) m2 = Member.objects.using(UMBRELLA).get(username='******') self.assertTrue(m2.check_password('value')) from pymongo import Connection cnx = Connection() cnx.drop_database('test_registered_member')
def mongo_cleaner(request): """ Warning - don't use this in production! :) """ mongo_server = request.getfuncargvalue('mongo_server') conn = Connection(mongo_server.hostname, mongo_server.port) print print "=" * 80 print "MongoCleaner dropping databases {}".format(conn.database_names()) print "=" * 80 print [conn.drop_database(i) for i in conn.database_names()]
def Calling_Utility(Enableread, iteration, ifile1, lno): print('inside Calling_Utility') uout_file_name = 'utilityout.txt' uout = open(uout_file_name, 'a') wordsin = [] itera = int(iteration) count = 0 print('Iteration') print(itera) for i in range(itera): cmd = './VHFSUtility' return_code = subprocess.call(cmd, stdout=uout) print('return_code: %s' % return_code) print("executed driver test for %s" % count) os.system('sync; echo 3 > /proc/sys/vm/drop_caches') print("echo command") if (not (int(Enableread)) and not (i == itera - 1)): print("inside if condition of writeclean") os.system('find . -name "*.sgcf" -print0 | xargs -0 rm') os.system('rm -rf ./chunkfile/*') print("removed chunkfiles") c = Connection('localhost', 27017) c.drop_database('makdatabase') print("dropped db") else: print("writeElseeeeeeeeee") count = count + 1 uout.close() if int(Enableread): print("inside if condition of Enableread") os.system('find . -name "*.sgcf" -print0 | xargs -0 rm') os.system('rm -rf ./chunkfile/*') print("removed chunkfiles") c = Connection('localhost', 27017) c.drop_database('makdatabase') print("dropped db") else: print("EnablereadElseeeeeeeeee") print('b4 read_output_file_of_utility') read_output_file_of_utility(uout_file_name, ifile1, lno) print(count)
def test_join(self): """ Joining adds the requesting member in the service local database and create the corresponding UserPermissionList object with the right group """ service = Service.objects.get(pk='56eb6d04b37b3379b531b102') et = ConsoleEventType.objects.get( pk='56eb6db3379b531a0104b371' ) # Collaboration Access Request event type group_id = '5804b37b3379b531e01eb6d2' add_database_to_settings(service.database) Group.objects.using(service.database).create(pk=group_id, name=COMMUNITY) UserPermissionList.objects.using(service.database).all().delete() self.client.login(username='******', password='******') response = self.client.get(reverse('ikwen:join'), { 'service_id': service.id, 'format': 'json' }) json_response = json.loads(response.content) self.assertTrue(json_response['success']) member3 = Member.objects.get(pk='56eb6d04b37b3379b531e013') self.assertIn(group_id, member3.group_fk_list) self.assertEqual( ConsoleEvent.objects.filter(member=service.member, event_type=et).count(), 0) perm_obj = UserPermissionList.objects.using( service.database).get(user=member3) self.assertListEqual(perm_obj.group_fk_list, [group_id]) self.client.logout() self.client.login(username='******', password='******') response = self.client.get(reverse('ikwen:console')) self.assertEqual(len(response.context['event_list']), 1) self.client.logout() self.client.login(username='******', password='******') response = self.client.get(reverse('ikwen:console')) self.assertIn(service.id, member3.customer_on_fk_list) self.assertEqual(len(response.context['event_list']), 1) from pymongo import Connection cnx = Connection() cnx.drop_database(service.database)
class MongoDBTests(unittest.TestCase): def setUp(self): self.connection = Connection() self.db = self.connection.test_database def tearDown(self): self.connection.drop_database(self.db) def test_inserting_locations(self): locations = self.db.locations for l in LOCATIONS: location_hash = grid_id(l[0], l[1], 400, 600) locations.insert({ 'latitude': l[0], 'longitude': l[1], 'hash': location_hash}) self.assertEqual(len(LOCATIONS), locations.count()) expected_latitude = 37.58 spot = locations.find_one({"hash": (382, 203)}) self.assertEqual(expected_latitude, spot["latitude"])
def handle(self, *args, **options): #Get DB connection #conn = connections["default"] db_name = settings.DATABASES['default']['NAME'] conn = Connection() #Flush database #management.call_command('flush', interactive=False) conn.drop_database(db_name) #Sync database management.call_command('syncdb', interactive=False) #Ensure indexes #This is right to ensure an index *using* a subelement conn[db_name]['tb_app_batch'].ensure_index('profile.index', unique=True) #Ensuring indexes *within* subelements is not possible if len(args) > 0: subdirectory = args[0] + "/" print 'Selected fixtures subdirectory', args[0] else: subdirectory = "" print 'Using default fixtures directory' #Get all fixtures fixture_path = settings.PROJECT_PATH + '/fixtures/' + subdirectory print 'Loading fixtures from', fixture_path + '*' fixtures = glob.glob(fixture_path + '*.json') for f in fixtures: name = f.split('/')[-1].split('.')[0] print '\tLoading fixtures for collection', name, '...' contents = json.loads(file(f).read(), object_hook=json_util.object_hook) #conn.get_collection(name).insert(contents) conn[db_name][name].insert(contents)