def start(): global connection, db, dbname global tagscollection, t, started global mongod_process if started: return args.source_dir = os.path.expanduser(args.source_dir) # Start mongodb dbpath = os.path.join(args.source_dir, "mongo_db") mongod_process = start_mongod(dbpath=dbpath) while 1: try: log("trying to connect to mongod...") connection = Connection() break except: sleep(1) continue dbname = args.source_dir.replace('/', '|').replace('.', '') db = connection[dbname] t = db[tagscollection] log("Found databases: %s" % (', '.join(connection.database_names()))) if dbname not in connection.database_names(): log("Database for this project does not exist. Creating one...") create_project(args.source_dir) started = True
def start(): global connection, db, dbname global tagscollection, t, started global mongod_process if started: return args.source_dir = os.path.expanduser(args.source_dir) # Start mongodb dbpath = os.path.join(args.source_dir, "mongo_db") mongod_process = start_mongod(dbpath=dbpath) while 1: try: log("trying to connect to mongod...") connection = Connection() break except: sleep(1) continue dbname = args.source_dir.replace('/','|').replace('.','') db = connection[dbname] t = db[tagscollection] log("Found databases: %s" % (', '.join(connection.database_names()))) if dbname not in connection.database_names(): log("Database for this project does not exist. Creating one...") create_project(args.source_dir) started = True
def find_loc(self, db=None, col=None, x='lon', y='lat', idcol='_id', properties=False, query=None, callback=None): """ For a specific lat/lon column pair return GeoJSON representation of the coordinates. :param db: Optional, mongodb database, if not specified a list of dbs is returned :param col: Optional, mongodb collection :praam x: x-coordinate (longitude) :param y: y-coordinate (lattitude) :param query: Optional, query provided as a python dictionary (see pymongo and mongodb docs for query syntax) :param callback: Optional, used for returning output as JSONP (not implemented yet) :param showids: Optional, return mongodb _id's :param date: Optional, helper for simpler syntax in date range queries (broken) Example: >>> get.find_loc('flora', 'data', x='midlon', y='midlat', idcol='REF_NO', properties= True) """ # Make connection con = Connection(self.MONGOHOST,self.MONGOPORT) # Browse or return databases if db in con.database_names(): db = con[db] else: serialized = json.dumps(con.database_names()) # Browse or return collections if col in db.collection_names(): col = db[col] else: serialzed = json.dumps(db.collection_names()) # Two types of output, with and without properties if properties: # Return GeoJSON with all properties cur = col.find() serialized = geojson.dumps(geojson.FeatureCollection([ geojson.Feature( geometry=geojson.Point((item[x], item[y])), properties={'id': item[idcol], 'attributes': item } ) for item in cur if x in item.keys() and y in item.keys() ] ), indent=2, default=handler) else: # Return GeoJSON with only lat/lon and id column. cur = col.find(fields=[x,y,idcol]) serialized = geojson.dumps(geojson.FeatureCollection([ geojson.Feature( geometry=geojson.Point((item[x], item[y])), properties={'id': item[idcol] } ) for item in cur if x in item.keys() and y in item.keys() ], ), indent=2, default=handler) if callback: return str(callback) + '(' + serialized + ')' else: return serialized
def mongo_cleaner(request): """ Warning - don't use this in production! :) """ mongo_server = request.getfuncargvalue('mongo_server') conn = Connection(mongo_server.hostname, mongo_server.port) print print "=" * 80 print "MongoCleaner dropping databases {}".format(conn.database_names()) print "=" * 80 print [conn.drop_database(i) for i in conn.database_names()]
def change_collection(db_id, coll_id): new_name = request.data # Connect to the database MongoDB try: connection = Connection('localhost', 12345) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() if db_id not in db_names: return json.dumps("Database doesn't exist!") db = connection[db_id] coll_names = db.collection_names() if coll_id not in coll_names: return json.dumps("Experiment doesn't exist!") if new_name in coll_names: return json.dumps("New name already exist!") collection = db[coll_id] try: collection.rename(new_name) except: return json.dumps("Unable to change the name of the experiment!") return json.dumps("Experiment's name changed!")
def create_collection(db_id): coll_id = request.data # Connect to the database MongoDB try: connection = Connection(hostname, port_number) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() if db_id in db_names: db = connection[db_id] else: return json.dumps("No such database!") coll_names = db.collection_names() if coll_id in coll_names: return json.dumps("Collection already exists!") try: db.create_collection(coll_id) except: return json.dumps("Unable to create a collection") return json.dumps('Collection successfully created!')
def store_message(db_id, coll_id): try: raw_data_collection = raw_data_pb2.RawRFReadingCollection() raw_data_collection.ParseFromString(request.data) except: return json.dumps('Message is not well formated!') # Connect to the database MongoDB try: connection = Connection(hostname, port_number) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() if db_id in db_names: db = connection[db_id] else: return json.dumps("No such database!") coll_names = db.collection_names() if coll_id in coll_names: collection = db[coll_id] else: return json.dumps("No such collection in the database!") try: collection.insert(protobuf_json.pb2json(raw_data_collection)) except: return json.dumps("Unable to store data into the database!") return json.dumps('Data stored!')
def change_collection(db_id, coll_id): new_name = request.data # Connect to the database MongoDB try: connection = Connection(hostname, port_number) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() if db_id not in db_names: return json.dumps("Database doesn't exist!") db = connection[db_id] coll_names = db.collection_names() if coll_id not in coll_names: return json.dumps("Collection doesn't exist!") if new_name in coll_names: return json.dumps("New name already exist!") collection = db[coll_id] try: collection.rename(new_name) except: return json.dumps("Unable to change the name of the collection!") return json.dumps("Collection's name changed!")
def distinct(self,db=None, col=None, distinct_key=None, query=None, callback=None): con = Connection(self.MONGOHOST,self.MONGOPORT) if db: db=con[db] else: return json.dumps(con.database_names()) if col: col = db[col] else: return json.dumps(db.collection_names()) dump_out = [] if query and distinct_key: query = ast.literal_eval(query) cur = col.find(**query).distinct(distinct_key) for item in cur: dump_out.append(item) if callback: return str(callback) + '(' + json.dumps(dump_out, default = handler, sort_keys=True, indent=4) + ')' else: return json.dumps(dump_out, default = handler, sort_keys=True, indent=4) else: return json.dumps({ "error": "You must supply a distinct_key and query specification"})
def store_message(db_id, coll_id): experiment_collection = experiment_results_pb2.Experiment() try: experiment_collection.ParseFromString(request.data) except: return json.dumps('Experiment is not well defined!') # Connect to the database MongoDB try: connection = Connection('localhost', 12345) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() if db_id in db_names: db = connection[db_id] else: return json.dumps("No such database!") coll_names = db.collection_names() if coll_id in coll_names: collection = db[coll_id] else: return json.dumps("No such experiment in the database!") try: collection.insert(protobuf_json.pb2json(experiment_collection)) except: return json.dumps("Unable to store data into the database!") return json.dumps('Data stored!')
def main(): connection = Connection() def compute_signature(index): signature = index["ns"] for key in index["key"]: try: signature += "%s_%s" % (key, int(index["key"][key])) except ValueError: signature += "%s_%s" % (key, index["key"][key]) return signature def report_redundant_indexes(current_db): print "Checking DB: %s" % current_db.name indexes = current_db.system.indexes.find() index_map = {} for index in indexes: signature = compute_signature(index) index_map[signature] = index for signature in index_map.keys(): for other_sig in index_map.keys(): if signature == other_sig: continue if other_sig.startswith(signature): print "Index %s[%s] may be redundant with %s[%s]" % ( index_map[signature]["ns"], index_map[signature]["name"], index_map[other_sig]["ns"], index_map[other_sig]["name"], ) for db in connection.database_names(): report_redundant_indexes(connection[db])
def connect(uri, database_name): connection = Connection(uri, safe=True) if database_name not in connection.database_names(): db = recreatedb(uri, database_name) else: db = connection[database_name] return db
def main(): connection = Connection() def compute_signature(index): signature = index["ns"] for key in index["key"]: try: signature += "%s_%s" % (key, int(index["key"][key])) except ValueError: signature += "%s_%s" % (key, index["key"][key]) return signature def report_redundant_indexes(current_db): print "Checking DB: %s" % current_db.name indexes = current_db.system.indexes.find() index_map = {} for index in indexes: signature = compute_signature(index) index_map[signature] = index for signature in index_map.keys(): for other_sig in index_map.keys(): if signature == other_sig: continue if other_sig.startswith(signature): print "Index %s[%s] may be redundant with %s[%s]" % ( index_map[signature]["ns"], index_map[signature]["name"], index_map[other_sig]["ns"], index_map[other_sig]["name"]) for db in connection.database_names(): report_redundant_indexes(connection[db])
def show_dbs(): """return a list of all dbs and related collections. Return an empty list on error. """ #print "skip and limit", skip, limit l=[] response_dict={} try: c= Connection(settings.MONGO_HOST, settings.MONGO_PORT) dbs = c.database_names() #print "Databases", dbs dbs.remove("local") for d in dbs: dbc = c[d] collections = dbc.collection_names() collections = remove_values_from_list(collections, "system.indexes") l.append({"name":d, "collections":collections}) return tuple(l) except: #error connecting to mongodb print str(sys.exc_info()) return ()
def create_collection(db_id): coll_id = request.data # Connect to the database MongoDB try: connection = Connection('localhost', 12345) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() if db_id in db_names: db = connection[db_id] else: return json.dumps("No such database!") coll_names = db.collection_names() if coll_id in coll_names: return json.dumps("Collection already exists!") try: db.create_collection(coll_id) except: return json.dumps("Unable to create an experiment") return json.dumps('Experiment successfully created!')
def generate_virutal_training_fingerprints(db_id_original, coll_id_original, db_id_enriched, coll_id_enriched): parameters = json.loads(request.data) # Connect to the database MongoDB try: connection = Connection(hostname, port_number) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() if db_id_original in db_names: db1 = connection[db_id_original] else: return json.dumps("Database " + db_id_original + " doesn't exist!") if db_id_enriched in db_names: db2 = connection[db_id_enriched] else: return json.dumps("Database " + db_id_enriched + " doesn't exist!") coll_names = db1.collection_names() if coll_id_original not in coll_names: return json.dumps("Collection " + Coll_id_original + " doesn't exist!") coll_names = db2.collection_names() if coll_id_enriched not in coll_names: return json.dumps("Collection " + coll_id_enriched + " doesn't exist!") coordinates, rssis = get_coordinates_rssi(db_id_original, coll_id_original, parameters['transmitters']) if parameters['define_virtual_points'] == 'User': points = EF.virtual_point_user() elif parameters['define_virtual_points'] == 'Voronoi': points = EF.virtual_point_modified_voronoi(coordinates) else: return json.dumps( 'Unknown method for the definition of virtual training points') if parameters['propagation_model'] == 'IDWI': virtual_fingerprints = EF.generate_virtual_fingerprints_idwi( coordinates, rssis, points, parameters['transmitters']) reply = store_virtual_fingerprints(db_id_original, coll_id_original, db_id_enriched, coll_id_enriched, points, virtual_fingerprints) return json.dumps(reply) elif parameters['propagation_model'] == 'Multiwall': virtual_fingerprints = EF.generate_virtual_fingerprints_multiwall( points, parameters['transmitters']) reply = store_virtual_fingerprints(db_id_original, coll_id_original, db_id_enriched, coll_id_enriched, points, virtual_fingerprints) return json.dumps(reply) else: return "Unknown method for the generation of virtual training fingerprints" return json.dumps("Something is wrong!")
def _drop_database(self, database_name): c = Connection() try: if database_name in c.database_names(): self.log("Dropping database: %s" % database_name) c.drop_database(database_name) finally: c.close()
def group(self,db=None, col=None, key=None,variable=None,query=None, callback=None): """Perform group by aggregations from a specific mongoDB db and collection :param db: Optional, mongodb database, if not specified a list of dbs is returned :param col: Optional, mongodb collection, if not specified a list of collections is returned :param key: Optional, List for keys to Group By(['key1','key2']), if not specified list available keys :param variable: varaible to sum,average, and count, if not specified list available variables :param query: Optional, query provided as a python dictionary (see pymongo and mongodb docs for query syntax) :param callback: Optional, used for returning output as JSONP At the moment this method assumes you want output as JSON, should probably refactor to default to dict and allow options for JSON/JSONP """ con = Connection(self.MONGOHOST,self.MONGOPORT, replicaset='cybercom', read_preference=ReadPreference.SECONDARY) # if db is set create db object, else show db names if db: db = con[db] else: return json.dumps(con.database_names()) # If collection is set return records, else show collection names if col: col = db[col] else: return json.dumps(db.collection_names()) dump_out = [] # If query set, run query options through pymongo find, else show all records if query: query = ast.literal_eval(query) cur = col.find(query).limit(1)[0] else: query={} cur = col.find().limit(1)[0] if not key: return json.dumps("Key is a list of key(s) you want to Group By: " + str(cur.keys())) if variable: if not variable in cur.keys(): return json.dumps("Variable is a string of the key you want to aggregate: " + str(cur.keys())) else: return json.dumps("Variable is a string of the key you want to aggregate: " + str(cur.keys())) reduce = Code(" function(obj,prev) {prev.Sum += obj.%s;prev.count+=1; prev.Avg = prev.Sum/prev.count;}" % (variable)) results = col.group(ast.literal_eval(key),query,{'Sum':0,'Avg':0,'count':0,'Variable':variable},reduce) #dump_out = list(results) try: sortlist=ast.literal_eval(key) dump_out = self.multikeysort(results, sortlist) except Exception as inst: dump_out = results #for item in results: # dump_out.append(item) #serialize and return JSON or JSONP serialized = json.dumps(dump_out, default = handler, sort_keys=True, indent=4) if callback is not None: return str(callback) + '(' + serialized + ')' else: return serialized
def __init__(self,Database,create = False): """ Sets up a connection to a gridFS on a given database if the database does not exist and create=True, the Database is also created """ conn = Connection() if Database not in conn.database_names(): if not create: raise NameError('Database does not exist. \nCall get_FS with create=True if you want to create it.') self.fs = gridfs.GridFS(conn[Database])
def generate_db(**kwargs): import functs, utils global global_vals #t = lambda _list: reduce(lambda x,y: str(x)+'@'+str(y),_list) if _list != [] else '' t = lambda _list: '@'.join(_list) if _list != [] else '' db, col, gran = None, None, None if 'db' in kwargs: db = kwargs['db'] col = 'examples' else: db = global_vals.db_name col_name = global_vals.db_collection if 'gran' not in kwargs: print( 'Please provide time granularity for the database to be generated') sys.exit() else: gran = kwargs['gran'] print( 'Generating a new db from mother db %s with time granularity = %s' ) % (db, gran) connection = Connection() dbs = connection.database_names() if db not in dbs: print('ERROR: No such mother database exists: %s') % (db) sys.exit() else: db_ = connection[db] size = db_.examples.count() print('DB contains %s examples' % (str(size))) db = db + '_granularity_' + str(gran) print('Generating new db: %s') % (db) connection.drop_database(db) ## clear if exists db = connection[db] step = int(gran) for i in range(0, size, step): exists = True anot_i, nar_i, innert_i = [], [], [] j, k = i - step, i + 1 cursor = db_.examples.find({'example': {"$gt": j, "$lt": k}}) (pos, nar) = utils.get_from_cursor(cursor) if j >= 0: nar.append('starttime(%s)' % (str(j))) if True: try: post = {'example': i, 'pos': pos, 'nar': nar} print('#Example,IntrvStart,IntrvEnd:', i, i - step, i) except TypeError: print('TypeError at') print(anot_i) print(nar_i) print(innert_i) sys.exit() db.examples.insert(post)
def setUp(self): self.source_name = 'Oracle' self.db_name = 'test_oracle_mongo' self.collection_name = 'test_oracle_mongo' self.program_mode = '' self.schema_name = 'BT_DW_SVC' self.table_name = 'DW_SVC_ID' self.row_limit = 10 conn = Connection() # this is really the server object that we comm with if self.db_name in conn.database_names(): conn.drop_database(self.db_name)
def main(): description = 'Print out potentially redundant indexes in a MongoDB instance.' global args parser = ArgumentParser(description=description) parser.add_argument( '-H', '--host', default='localhost', help= "mongodb host, e.g. 'api.foo.com' default to 'localhost' if not specified" ) parser.add_argument('-P', '--port', type=int, default=27017, help="mongodb port if not the default 27017") args = parser.parse_args() connection = Connection(host=args.host, port=args.port, read_preference=ReadPreference.SECONDARY) def compute_signature(index): signature = index["ns"] for key in index["key"]: try: signature += "%s_%s" % (key, int(index["key"][key])) except ValueError: signature += "%s_%s" % (key, index["key"][key]) return signature def report_redundant_indexes(current_db): print "Checking DB: %s" % current_db.name indexes = current_db.system.indexes.find() index_map = {} for index in indexes: signature = compute_signature(index) index_map[signature] = index for signature in index_map.keys(): for other_sig in index_map.keys(): if signature == other_sig: continue if other_sig.startswith(signature): print "Index %s[%s] may be redundant with %s[%s]" % ( index_map[signature]["ns"], index_map[signature]["name"], index_map[other_sig]["ns"], index_map[other_sig]["name"]) for db in connection.database_names(): report_redundant_indexes(connection[db])
def __init__(self, host='localhost'): """Connect to MongoDB database, get logaar collections Create and populate them if needed""" if not has_c(): log.warning("Pymongo C module not available. Consider installing it to increase performances.") c = Connection() self._connection = c if 'logaar' not in c.database_names(): log.info("Creating logaar database") db = c.logaar if 'logs' not in db.collection_names(): log.info("Creating collection 'logs'") self.logs = db.logs # Create index if needed db.logs.ensure_index( [ ('date', 1), ('level', 1), ('message', 1), ('program', 1), ('pid', 1), ], unique = True, dropDups = True, ) if 'incoming' not in db.collection_names(): log.info("Creating collection 'incoming'") db.create_collection("incoming", capped=True,size="100000") self.incoming = db.incoming # Create index if needed db.incoming.ensure_index( [ ("date", DESCENDING) ] ) if 'rules' not in db.collection_names(): log.info("Creating collection 'rules'") #TODO: rewrite this using proper collection dump/restore import setup_rules for rulevals in setup_rules.rules_tuple: d = dict(zip(setup_rules.keys, rulevals)) db.rules.insert(d) self.rules = db.rules log.info('connected')
def __init__(self, Database, create=False): """ Sets up a connection to a gridFS on a given database if the database does not exist and create=True, the Database is also created """ conn = Connection() if Database not in conn.database_names(): if not create: raise NameError( 'Database does not exist. \nCall get_FS with create=True if you want to create it.' ) self.fs = gridfs.GridFS(conn[Database])
def replace_message(db_id, coll_id, data_id): raw_data_collection = raw_data_pb2.RawRFReadingCollection() raw_metadata = raw_metadata_pb2.Metadata() # Connect to the database MongoDB try: connection = Connection(hostname, port_number) except: return json.dumps("Unable to connect to the database!") try: raw_data_collection.ParseFromString(request.data) except: return json.dumps('Message is not well defined!') db_names = connection.database_names() if db_id not in db_names: return json.dumps("Database doesn't exist!") db = connection[db_id] coll_names = db.collection_names() if coll_id not in coll_names: return json.dumps("Collection doesn't exist!") collection = db[coll_id] try: message_collection = collection.find_one({'data_id': data_id}) except: return json.dumps("Unable to read data from the collection!") if message_collection is None: return json.dumps("No data with this ID in the collection!") message_collection['_id'] = str(message_collection['_id']) message_backup = message_collection try: collection.remove({'data_id': data_id}) except: collection.insert(message_backup) return json.dumps("Unable to read data from the database!") try: collection.insert(protobuf_json.pb2json(raw_data_collection)) except: collection.insert(message_backup) return json.dumps("Unable to store data into the collection!") return json.dumps('Message successfully replaced!')
def generate_db(**kwargs): import functs,utils global global_vals #t = lambda _list: reduce(lambda x,y: str(x)+'@'+str(y),_list) if _list != [] else '' t = lambda _list: '@'.join(_list) if _list != [] else '' db,col,gran = None,None,None if 'db' in kwargs: db = kwargs['db'] col = 'examples' else: db = global_vals.db_name col_name = global_vals.db_collection if 'gran' not in kwargs: print('Please provide time granularity for the database to be generated') sys.exit() else: gran = kwargs['gran'] print('Generating a new db from mother db %s with time granularity = %s')%(db,gran) connection = Connection() dbs = connection.database_names() if db not in dbs: print('ERROR: No such mother database exists: %s')%(db) sys.exit() else: db_ = connection[db] size = db_.examples.count() print('DB contains %s examples'%(str(size))) db = db+'_granularity_'+str(gran) print('Generating new db: %s')%(db) connection.drop_database(db) ## clear if exists db = connection[db] step = int(gran) for i in range(0,size,step): exists = True anot_i,nar_i,innert_i = [],[],[] j,k = i-step,i+1 cursor = db_.examples.find({'example':{"$gt": j,"$lt": k }}) (pos,nar) = utils.get_from_cursor(cursor) if j >= 0: nar.append('starttime(%s)'%(str(j))) if True: try: post = {'example':i,'pos':pos,'nar':nar} print('#Example,IntrvStart,IntrvEnd:',i,i-step,i) except TypeError: print('TypeError at') print(anot_i) print(nar_i) print(innert_i) sys.exit() db.examples.insert(post)
def databases(): # Connect to the database MongoDB try: connection = Connection('localhost', 12345) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() db_list = {} for iter_id in db_names: if iter_id != 'local': db_list[iter_id] = url_for("database", db_id = iter_id, _external = True) return json.dumps(db_list)
def get_coordinates_rssi(db_id, coll_id, transmitters): """ Given the database and collection IDs, function returns a list of measurment locations (x,y) coordinates and the related RSSI measurments. """ # Connect to the database MongoDB try: connection = Connection(hostname, port_number) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() if db_id in db_names: db = connection[db_id] else: return json.dumps("No such database!") coll_names = db.collection_names() if coll_id in coll_names: collection = db[coll_id] else: return json.dumps("No such collection in the database!") try: message_collection = collection.find({}) except: return json.dumps("Unable to read data from the collection!") message_collection_list = {} message_collection_list_full = list(message_collection) coordinates = [] rssis = [] for i in range(0, len(message_collection_list_full)): rssi_transmitter = {} coordinates.append((message_collection_list_full[i]['raw_measurement'] [0]['receiver_location']['coordinate_x'], message_collection_list_full[i]['raw_measurement'] [0]['receiver_location']['coordinate_y'])) for meas in message_collection_list_full[i]['raw_measurement']: if meas['sender_bssid'] in transmitters: try: rssi_transmitter[meas['sender_bssid']].append(meas['rssi']) except: rssi_transmitter[meas['sender_bssid']] = [] rssi_transmitter[meas['sender_bssid']].append(meas['rssi']) rssis.append(rssi_transmitter) return coordinates, rssis
def before_building(self, application): name = self.generate_name(application) login, password = self.generate_auth(application) connection = Connection(self.settings.uri) if name not in connection.database_names(): log.info(_("Creating MongoDB database: {name}").format(name=name)) db = connection[name] collection_name = 'upaas_test' collection = db[collection_name] collection.insert({}) db.drop_collection(collection_name) db.add_user(login, password, roles=['dbAdmin']) db.authenticate(login, password) db.logout() connection.close()
def databases(): # Connect to the database MongoDB try: connection = Connection('localhost', 12345) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() db_list = {} for iter_id in db_names: if iter_id != 'local': db_list[iter_id] = url_for("database", db_id=iter_id, _external=True) return json.dumps(db_list)
def main(): description = 'Print out potentially redundant indexes in a MongoDB instance.' global args parser = ArgumentParser(description=description) parser.add_argument('-H', '--host', default='localhost', help="mongodb host, e.g. 'api.foo.com' default to 'localhost' if not specified") parser.add_argument('-P', '--port', type=int, default=27017, help="mongodb port if not the default 27017") args = parser.parse_args() connection = Connection(host=args.host, port=args.port, read_preference=ReadPreference.SECONDARY) def compute_signature(index): signature = index["ns"] for key in index["key"]: try: signature += "%s_%s" % (key, int(index["key"][key])) except ValueError: signature += "%s_%s" % (key, index["key"][key]) return signature def report_redundant_indexes(current_db): print "Checking DB: %s" % current_db.name indexes = current_db.system.indexes.find() index_map = {} for index in indexes: signature = compute_signature(index) index_map[signature] = index for signature in index_map.keys(): for other_sig in index_map.keys(): if signature == other_sig: continue if other_sig.startswith(signature): print "Index %s[%s] may be redundant with %s[%s]" % ( index_map[signature]["ns"], index_map[signature]["name"], index_map[other_sig]["ns"], index_map[other_sig]["name"]) for db in connection.database_names(): report_redundant_indexes(connection[db])
def store_message(db_id, coll_id): detect_message = 0 try: raw_data_collection = raw_data_pb2.RawRFReadingCollection() raw_data_collection.ParseFromString(request.data) detect_message = 1 except: try: raw_metadata = raw_metadata_pb2.Metadata() raw_metadata.ParseFromString(request.data) detect_message = 2 except: return json.dumps("Message is not well formated!") # Connect to the database MongoDB try: connection = Connection("localhost", 27017) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() if db_id in db_names: db = connection[db_id] else: return json.dumps("No such database!") coll_names = db.collection_names() if coll_id in coll_names: collection = db[coll_id] else: return json.dumps("No such collection in the database!") if detect_message == 1: try: collection.insert(protobuf_json.pb2json(raw_data_collection)) except: return json.dumps("Unable to store data into the database!") else: try: collection.insert(protobuf_json.pb2json(raw_metadata)) except: return json.dumps("Unable to store data into the database!") return json.dumps("Data stored!")
def delete_database(db_id): # Connect to the database MongoDB try: connection = Connection("localhost", 27017) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() if db_id not in db_names: return json.dumps("Database doesn't exist!") try: connection.drop_database(db_id) except: return json.dumps("Unable to delete the database") return json.dumps("Database successfully deleted!")
def delete_database(db_id): # Connect to the database MongoDB try: connection = Connection('localhost', 12345) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() if db_id not in db_names: return json.dumps("Database doesn't exist!") try: connection.drop_database(db_id) except: return json.dumps("Unable to delete the database") return json.dumps('Database successfully deleted!')
def check_data(host, port): status = "Normal" try: content = {"hostname": hostname, "stat": "unupdate", "datetime": d} cnx = Connection(host=host, port=port) dbs = cnx.database_names() exclude = ["admin", "local"] for e in exclude: if e in dbs: dbs.remove(e) for db in dbs: database = cnx[db] #进行删除测试 database.yunwei_check.remove({"hostname": hostname}) #检查删除测试 result = database.yunwei_check.find_one({"hostname": hostname}) if result: info = "host: %s, port: %s, database: %s delete operation failed" % ( hostname, port, db) inotify(info, email) #进行插入测试 database.yunwei_check.insert(content) #检查插入测试 result = database.yunwei_check.find_one(content) if not result: info = "host: %s, port: %s, database: %s insert operation failed" % ( hostname, port, db) inotify(info, email) #进行更改测试 database.yunwei_check.update({"hostname": hostname}, {"$set": { "stat": "update" }}) #检查更改测试 result = database.yunwei_check.find_one({"stat": "update"}) if not result: info = "host: %s, port: %s, database: %s update operation failed" % ( hostname, port, db) inotify(info, email) except Exception, err: info = '"fail","host: %s,port %s,mongodb check error, error info:%s"' % ( hostname, port, err) inotify(info, email)
def change_message(db_id, coll_id, data_id): new_message_parameters = json.loads(request.data) # Connect to the database MongoDB try: connection = Connection(hostname, port_number) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() if db_id not in db_names: return json.dumps("Database doesn't exist!") db = connection[db_id] coll_names = db.collection_names() if coll_id not in coll_names: return json.dumps("Collection doesn't exist!") collection = db[coll_id] try: message_collection = collection.find_one({'data_id': data_id}) except: return json.dumps("Unable to read data from the collection!") if message_collection is None: return json.dumps("No data with this ID in the collection!") message_collection['_id'] = str(message_collection['_id']) message_backup = message_collection for key in new_message_parameters.keys(): message_collection[key] = new_message_parameters[key] try: collection.remove({'data_id': data_id}) collection.insert(message_collection) except: collection.insert(message_backup) return json.dumps("Unable to store data into the database!") return json.dumps('Message successfully replaced!')
def collection(db_id, coll_id): # Connect to the database MongoDB try: connection = Connection(hostname, port_number) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() if db_id in db_names: db = connection[db_id] else: return json.dumps("No such database!") coll_names = db.collection_names() if coll_id in coll_names: collection = db[coll_id] else: return json.dumps("No such collection in the database!") try: message_collection = collection.find({}) except: return json.dumps("Unable to read data from the collection!") message_collection_list = {} message_collection_list_full = list(message_collection) for i in range(0, len(message_collection_list_full)): message_collection_list[i] = {} message_collection_list[i]['_id'] = str( message_collection_list_full[i]['_id']) message_collection_list[i]['data_id'] = message_collection_list_full[ i]['data_id'] message_collection_list[i]['URI'] = url_for( "message", db_id=db_id, coll_id=coll_id, data_id=message_collection_list_full[i]['data_id'], _external=True) return json.dumps(message_collection_list)
def change_message(db_id, coll_id, data_id): new_message_parameters = json.loads(request.data) # Connect to the database MongoDB try: connection = Connection("localhost", 27017) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() if db_id not in db_names: return json.dumps("Database doesn't exist!") db = connection[db_id] coll_names = db.collection_names() if coll_id not in coll_names: return json.dumps("Collection doesn't exist!") collection = db[coll_id] try: message_collection = collection.find_one({"data_id": data_id}) except: return json.dumps("Unable to read data from the collection!") if message_collection is None: return json.dumps("No data with this ID in the collection!") message_collection["_id"] = str(message_collection["_id"]) message_backup = message_collection for key in new_message_parameters.keys(): message_collection[key] = new_message_parameters[key] try: collection.remove({"data_id": data_id}) collection.insert(message_collection) except: collection.insert(message_backup) return json.dumps("Unable to store data into the database!") return json.dumps("Message successfully replaced!")
def database(db_id): # Connect to the database MongoDB try: connection = Connection('localhost', 12345) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() if db_id in db_names: db = connection[db_id] else: return json.dumps("No such database!") coll_names = db.collection_names() coll_list = {} for iter_id in coll_names: if iter_id != 'system.indexes': coll_list[iter_id] = url_for("experiment", db_id = db_id, coll_id = iter_id, _external = True) return json.dumps(coll_list)
def create_database(): db_id = request.data # Connect to the database MongoDB try: connection = Connection('localhost', 12345) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() if db_id in db_names: return json.dumps("Database already exists!") try: db = connection[db_id] coll = db.create_collection('test_tmp') except: return json.dumps("Unable to create new database") db.test_tmp.drop() return json.dumps('Database successfully created!')
def replace_location(db_id, coll_id): experiment_collection = experiment_results_pb2.Experiment() # Connect to the database MongoDB try: connection = Connection('localhost', 12345) except: return json.dumps("Unable to connect to the database!") try: experiment_collection.ParseFromString(request.data) except: return json.dumps('Message is not well defined!') db_names = connection.database_names() if db_id not in db_names: return json.dumps("Database doesn't exist!") db = connection[db_id] coll_names = db.collection_names() if coll_id not in coll_names: return json.dumps("Collection doesn't exist!") collection = db[coll_id] try: collection_backup = collection.find_one({}) collection.remove() except: return json.dumps("Unable to read data from the database!") try: collection.insert(protobuf_json.pb2json(experiment_collection)) except: collection.insert(collection_backup) return json.dumps("Unable to store data into the database!") return json.dumps('Message successfully replaced!')
def collection(db_id, coll_id): # Connect to the database MongoDB try: connection = Connection("localhost", 27017) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() if db_id in db_names: db = connection[db_id] else: return json.dumps("No such database!") coll_names = db.collection_names() if coll_id in coll_names: collection = db[coll_id] else: return json.dumps("No such collection in the database!") try: message_collection = collection.find({}) except: return json.dumps("Unable to read data from the collection!") message_collection_list = {} message_collection_list_full = list(message_collection) for i in range(0, len(message_collection_list_full)): message_collection_list[i] = {} message_collection_list[i]["_id"] = str(message_collection_list_full[i]["_id"]) message_collection_list[i]["data_id"] = message_collection_list_full[i]["data_id"] message_collection_list[i]["metadata_id"] = message_collection_list_full[i]["metadata_id"] message_collection_list[i]["URI"] = url_for( "message", db_id=db_id, coll_id=coll_id, data_id=message_collection_list_full[i]["data_id"], _external=True ) return json.dumps(message_collection_list)
def __init__(self, saveName): client = MongoClient() if saveName in client.database_names(): self.db = client[saveName] else: print saveName raise ValueError, "File not found" self.meta = self.db.meta.find_one() self.cycleFactory = CycleFactory(self.db, self) self.saveName = saveName self.memDumpAddr = 0 try: self.memDumpAddr = int(self.meta['memDumpAddr']) except: pass self.memory = MemoryHistory(self) #We aren't thread-safe, must use target.getLock() #with all DB access for each request :( self.lock = Lock()
def delete_collection(db_id, coll_id): # Connect to the database MongoDB try: connection = Connection('localhost', 12345) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() if db_id not in db_names: return json.dumps("Database doesn't exist!") db = connection[db_id] coll_names = db.collection_names() if coll_id not in coll_names: return json.dumps("Experiment doesn't exist!") try: db.drop_collection(coll_id) except: return json.dumps("Unable to delete the experiment") return json.dumps('Experiment successfully deleted!')
def delete_collection(db_id, coll_id): # Connect to the database MongoDB try: connection = Connection(hostname, port_number) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() if db_id not in db_names: return json.dumps("Database doesn't exist!") db = connection[db_id] coll_names = db.collection_names() if coll_id not in coll_names: return json.dumps("Collection doesn't exist!") try: db.drop_collection(coll_id) except: return json.dumps("Unable to delete the collection") return json.dumps('Collection successfully deleted!')
def delete_collection(db_id, coll_id): # Connect to the database MongoDB try: connection = Connection("localhost", 27017) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() if db_id not in db_names: return json.dumps("Database doesn't exist!") db = connection[db_id] coll_names = db.collection_names() if coll_id not in coll_names: return json.dumps("Collection doesn't exist!") try: db.drop_collection(coll_id) except: return json.dumps("Unable to delete the collection") return json.dumps("Collection successfully deleted!")
def database(db_id): # Connect to the database MongoDB try: connection = Connection('localhost', 12345) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() if db_id in db_names: db = connection[db_id] else: return json.dumps("No such database!") coll_names = db.collection_names() coll_list = {} for iter_id in coll_names: if iter_id != 'system.indexes': coll_list[iter_id] = url_for("experiment", db_id=db_id, coll_id=iter_id, _external=True) return json.dumps(coll_list)
def delete_message(db_id, coll_id, data_id): # Connect to the database MongoDB try: connection = Connection(hostname, port_number) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() if db_id not in db_names: return json.dumps("Database doesn't exist!") db = connection[db_id] coll_names = db.collection_names() if coll_id not in coll_names: return json.dumps("Collection doesn't exist!") collection = db[coll_id] try: collection.remove({"data_id": data_id}) except: return json.dumps("Unable to delete the message") return json.dumps('Message successfully deleted!')
def view(server_oid): server = g.db['mangoadmin']['servers'].find_one({ '_id': ObjectId(server_oid) }) if not server: flash('Server %s not found' % server_oid, 'error') return redirect('/servers') connection = Connection(host=server['address'], port=int(server['port'])) server_info = connection.server_info() databases = { database: { collection : { 'count': connection[database][collection].count(), 'index_count': len(connection[database][collection].index_information().keys()), } for collection in connection[database].collection_names() if not collection == 'system.indexes' } for database in connection.database_names() } return render_template( 'server_view.html', server=server, server_info=server_info, databases=databases )
def delete_message(db_id, coll_id, data_id): # Connect to the database MongoDB try: connection = Connection("localhost", 27017) except: return json.dumps("Unable to connect to the database!") db_names = connection.database_names() if db_id not in db_names: return json.dumps("Database doesn't exist!") db = connection[db_id] coll_names = db.collection_names() if coll_id not in coll_names: return json.dumps("Collection doesn't exist!") collection = db[coll_id] try: collection.remove({"data_id": data_id}) except: return json.dumps("Unable to delete the message") return json.dumps("Message successfully deleted!")
import cherrypy import os import json from threading import Lock from time import time as systemtime from TargetTrace import TargetTrace from taint import * from pymongo import Connection as MongoClient client = MongoClient() traces = {} for name in client.database_names(): if name == "local" or name == "test": continue print name traces[name] = TargetTrace(name) if len(traces) == 0: print >> sys.stderr, "No traces found in database, no point in starting the GUI" print >> sys.stderr, "Create traces using the preprocess tools (see wiki for details)" os.sys.exit(1) defaultTrace = traces.keys()[0] def parseExpr(x): return int(x, 16) def getTrace(): try: return cherrypy.session['trace']
def do_server_status(self): host = self.mongo_host port = self.mongo_port user = self.mongo_user passwd = self.mongo_password perf_data = False con = Connection(host=self.mongo_host, port=self.mongo_port, slave_okay=True) if not self.mongo_db: self.mongo_db = con.database_names() db = con[self.mongo_db[0]] if self.mongo_user and self.mongo_password: db.authenticate(self.mongo_user, self.mongo_password) server_status = db.command('serverStatus') version = server_status['version'] at_least_2_4 = V(version) >= V('2.4.0') # operations for k, v in server_status['opcounters'].items(): self.submit('total_operations', k, v) # memory for t in ['resident', 'virtual', 'mapped']: self.submit('memory', t, server_status['mem'][t]) # connections self.submit('connections', 'connections', server_status['connections']['current']) # locks if self.lockTotalTime is not None and self.lockTime is not None: if self.lockTime == server_status['globalLock']['lockTime']: value = 0.0 else: value = float(server_status['globalLock']['lockTime'] - self.lockTime) * 100.0 / float( server_status['globalLock']['totalTime'] - self.lockTotalTime) self.submit('percent', 'lock_ratio', value) self.lockTotalTime = server_status['globalLock']['totalTime'] self.lockTime = server_status['globalLock']['lockTime'] # indexes accesses = None misses = None index_counters = server_status[ 'indexCounters'] if at_least_2_4 else server_status[ 'indexCounters']['btree'] if self.accesses is not None: accesses = index_counters['accesses'] - self.accesses if accesses < 0: accesses = None misses = (index_counters['misses'] or 0) - (self.misses or 0) if misses < 0: misses = None if accesses and misses is not None: self.submit('cache_ratio', 'cache_misses', int(misses * 100 / float(accesses))) else: self.submit('cache_ratio', 'cache_misses', 0) self.accesses = index_counters['accesses'] self.misses = index_counters['misses'] for mongo_db in self.mongo_db: db = con[mongo_db] if self.mongo_user and self.mongo_password: db.authenticate(self.mongo_user, self.mongo_password) db_stats = db.command('dbstats') # stats counts self.submit('counter', 'object_count', db_stats['objects'], mongo_db) self.submit('counter', 'collections', db_stats['collections'], mongo_db) self.submit('counter', 'num_extents', db_stats['numExtents'], mongo_db) self.submit('counter', 'indexes', db_stats['indexes'], mongo_db) # stats sizes self.submit('file_size', 'storage', db_stats['storageSize'], mongo_db) self.submit('file_size', 'index', db_stats['indexSize'], mongo_db) self.submit('file_size', 'data', db_stats['dataSize'], mongo_db) # Replica check rs_status = {} slaveDelays = {} try: # Get replica set status try: rs_status = con.admin.command("replSetGetStatus") except pymongo.errors.OperationFailure, e: if e.code == None and str(e).find( 'failed: not running with --replSet"'): print "OK - Not running with replSet" con.disconnect() return 0 rs_conf = con.local.system.replset.find_one() for member in rs_conf['members']: if member.get('slaveDelay') is not None: slaveDelays[member['host']] = member.get('slaveDelay') else: slaveDelays[member['host']] = 0 # Find the primary and/or the current node primary_node = None host_node = None for member in rs_status["members"]: if member["stateStr"] == "PRIMARY": primary_node = member if member["name"].split(':')[0] == host and int( member["name"].split(':')[1]) == port: host_node = member # Check if we're in the middle of an election and don't have a primary if primary_node is None: print "WARNING - No primary defined. In an election?" con.disconnect() return 1 # Check if we failed to find the current host # below should never happen if host_node is None: print "CRITICAL - Unable to find host '" + host + "' in replica set." con.disconnect() return 2 # Is the specified host the primary? if host_node["stateStr"] == "PRIMARY": if max_lag == False: print "OK - This is the primary." con.disconnect() return 0 else: #get the maximal replication lag data = "" maximal_lag = 0 for member in rs_status['members']: if not member['stateStr'] == "ARBITER": lastSlaveOpTime = member['optimeDate'] replicationLag = abs( primary_node["optimeDate"] - lastSlaveOpTime ).seconds - slaveDelays[member['name']] data = data + member[ 'name'] + " lag=%d;" % replicationLag maximal_lag = max(maximal_lag, replicationLag) # send message with maximal lag message = "Maximal lag is " + str(maximal_lag) + " seconds" print message self.submit('replication', 'maximal-lag-seconds', str(maximal_lag)) # send message with maximal lag in percentage err, con = mongo_connect( primary_node['name'].split(':')[0], int(primary_node['name'].split(':')[1]), False, user, passwd) if err != 0: con.disconnect() return err primary_timediff = replication_get_time_diff(con) maximal_lag = int( float(maximal_lag) / float(primary_timediff) * 100) message = "Maximal lag is " + str( maximal_lag) + " percents" print message self.submit('replication', 'maximal-lag-percentage', str(maximal_lag)) con.disconnect() return str(maximal_lag) elif host_node["stateStr"] == "ARBITER": print "OK - This is an arbiter" con.disconnect() return 0 # Find the difference in optime between current node and PRIMARY optime_lag = abs(primary_node["optimeDate"] - host_node["optimeDate"]) if host_node['name'] in slaveDelays: slave_delay = slaveDelays[host_node['name']] elif host_node['name'].endswith(':27017') and host_node[ 'name'][:-len(":27017")] in slaveDelays: slave_delay = slaveDelays[host_node['name'][:-len(":27017")]] else: raise Exception( "Unable to determine slave delay for {0}".format( host_node['name'])) try: # work starting from python2.7 lag = optime_lag.total_seconds() except: lag = float(optime_lag.seconds + optime_lag.days * 24 * 3600) # send message with lag message = "Lag is " + str(lag) + " seconds" print message self.submit('replication', 'lag-seconds', str(lag)) # send message with lag in percentage err, con = mongo_connect(primary_node['name'].split(':')[0], int(primary_node['name'].split(':')[1]), False, user, passwd) if err != 0: con.disconnect() return err primary_timediff = replication_get_time_diff(con) if primary_timediff != 0: lag = int(float(lag) / float(primary_timediff) * 100) else: lag = 0 message = "Lag is " + str(lag) + " percents" print message self.submit('replication', 'lag-percentage', str(lag)) con.disconnect() return str(lag)