示例#1
0
def entityProperties(entity):
	# Conexion a Mongo
	conn = Connection() 		
	db = conn.grupo10_taller4
	colTypeQueryCache = db.typeQueryCache
	colDescribeQueryCache = db.describeQueryCache
	
	# Revisar si la consulta existe en el cache
	describeCache = colDescribeQueryCache.find({"term":entity})
	if (describeCache.count() > 0):
		print "Describe cache hit for %s" % entity
		for cacheResult in describeCache:
			return cacheResult["results"]
			
	sparql = SPARQLWrapper("http://dbpedia.org/sparql")
	sparql.setQuery("""
	PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>

	DESCRIBE ?object WHERE {
	?object rdfs:label '%s'@en .
	}
	""" % entity
	)
	sparql.setReturnFormat(JSON)
	results = sparql.query().convert()
	queryRes = [{"term":entity,"results":results}]
	colDescribeQueryCache.insert(queryRes)
	conn.disconnect()
	return results
示例#2
0
class DB(object):
    def __init__(self, uri):
        if not uri.startswith('mongodb://'): uri = environ[uri]
        p = urlparse(uri)
        self.URI = {
            'host': p.hostname,
            'port': p.port,
            'username': p.username,
            'password': p.password,
            'database_name': p.path[1:]
        }
        self._uri = uri
        self._conn = None

    def __collection(self, collection):
        if not self._conn: self._conn = Connection(self._uri)
        return self._conn[self.URI['database_name']][collection]

    def __getattr__(self, collection):
        return self.__collection(collection)

    def __getitem__(self, collection):
        return self.__collection(collection)

    def __del__(self):
        if self._conn: self._conn.disconnect()
示例#3
0
def setup_database():
    # Disconnect from the default mongo db, and use a test db instead.
    
    conn = Connection()
    checkin_service = conn["checkin_service"]
    results = conn["results"]
      
    for collection in ['splice_server', 'marketing_product_usage']:
        if drop:
          print('DROPPING')
          checkin_service.drop_collection(collection)
        print 'importing %s collection' % collection
        call(['mongoimport', '--db', 'checkin_service', '-c', collection, '--file', 
              '%s.json' % os.path.join(DUMP_DIR, collection)]
              )
    """    
    for collection in ['marketing_report_data']:
            results.drop_collection(collection)
            print 'importing %s collection' % collection
            call(['mongoimport', '--db', 'results', '-c', collection, '--file', 
                  '%s.json' % os.path.join(DUMP_DIR, collection)]
                  ) 
    """

            
    conn.disconnect()
示例#4
0
def repair(conf, options):
    '''fixes issues created by myself.  Currently, only orphan files and
    item links are detected and automatically removed.'''

    con = Connection(conf['MONGODB_HOST'], conf['MONGODB_PORT'])
    db = con[conf['MONGODB_NAME']]
    fs = GridFS(db)

    objs = [obj['_id'] for obj in fs._GridFS__files.find()]
    meta = [cur['_id'] for cur in db.items.find()]

    if objs != meta:
        # 1. metadata has some files missing, no repair possible
        diff1 = filter(lambda i: not i in objs, meta)
        diff2 = filter(lambda i: not i in meta, objs)
        for item in diff1:
            print 'removing metadata for `%s`' % item
            db.items.remove(item)

        # 2. metadata is missing, but file is there. Recover possible, but not implemented #win
        for item in diff2:
            print 'removing GridFS-File `%s`' % item
            objs.remove(item)

    # rebuild accounts items, when something changed
    for cur in db.accounts.find():
        if str(cur['_id']).startswith('_'):
            continue
        _id, items = cur['_id'], cur['items']
        items = filter(lambda i: i in objs, items)
        db.accounts.update({'_id': _id}, {'$set': {'items': items}})

    con.disconnect()
示例#5
0
 def dropCollectionInDB(self, DBName, collection):
     c = Connection('localhost',27017)
     dbh = c[DBName]
     assert dbh.connection == c
     dbh[collection].drop()
     c.disconnect()
     return
 def dropCollectionInDB(self, DBName, collection):
     c = Connection('localhost', 27017)
     dbh = c[DBName]
     assert dbh.connection == c
     dbh[collection].drop()
     c.disconnect()
     return
示例#7
0
def activate(conf, options, args):
    '''When PUBLIC_REGISTRATION is set to false, you have to activate
    registered accounts manually by invoking "activate $email"'''

    con = Connection(conf['MONGODB_HOST'], conf['MONGODB_PORT'])
    accounts = con[conf['MONGODB_NAME']].accounts

    if len(args) == 2:
        acc = accounts.find_one({'email': args[1]})
        if not acc:
            print '`%s` does not exist' % args[1]
            sys.exit(1)
        elif acc['activated_at'] != None:
            print '`%s` already activated' % args[1]
        else:
            act = {'activated_at': strftime('%Y-%m-%dT%H:%M:%SZ', gmtime())}
            accounts.update({'_id': acc['_id']}, {'$set': act})
            print '`%s` activated' % args[1]
    else:
        inactivated = [acc for acc in accounts.find() if not acc.get('activated_at', True)]
        if not inactivated:
            print 'no pending non-activated accounts'
        for acc in inactivated:
            print '%s [%s]' % (acc['email'].ljust(16), acc['created_at'])

    con.disconnect()
示例#8
0
 def __init__(self, tweet, host = 'localhost', database = 'twitter_grafico', displayed = False):
     
     self.id = tweet.id
     self.user = tweet.user.id
     self.message = tweet.text
     #The date comes in GTM
     self.created_at = datetime.strptime(tweet.created_at,"%a %b %d %H:%M:%S +0000 %Y")
     self.url = self.get_url(self.message)
     User(tweet.user.id, tweet.user.screen_name, tweet.user.followers_count).save()
     connection = Connection(host = host)
     result = connection[database].tweets.find_one({"id":self.id})
     self.image = None
     self.keywords = None
     self.text = None
     self.massive = False
     if displayed:
         self.displayed = True
     self.check_massive_users(tweet.user)
     if result:
         self.image = result.get('image')
         self.text = result.get('text')
     else:
         if self.url:
             params = self.get_representative_image_keywords(self.url)
             self.image = params.get('image')
             self.keywords = params.get('keywords')
             self.text = params.get('text')
         self.save(host = host, database = database)
     connection.disconnect()
	def registerUser(self,params):
		# Conecta con la base de datos
		conn_ = Connection()
		db_ = conn_['blogomatic']
		err = False
		
		try:
			# Se autentifica para acceder a la base de datos
			db_.authenticate('blogomatic','bnBN98cv.mongo')
			inv_ = db_['invitations']

			# Nos aseguramos de que tanto el email como la clave cumplen una longitud minima
			if len(params['email']) < 3 or not('@' in params['email'] ) or len(params['password']) < 6 :
				err = True
			
			else:
				uDoc = inv_.find_one({'email': params['email']})
			
				if uDoc == None:
					# Guardamos el usuario y su clave y limitamos su numero maximo de blogs a 20
					uDoc = {'email': params['email'],'password': params['password'],'instances': 20}
					inv_.save(uDoc)

				else:
					err = True
					
		except TypeError:
			err = True
		except:
			err = True
		finally:
			db_.logout()
			conn_.disconnect()
			return err
示例#10
0
class MongoPersistence():
    
    def __init__(self):
        pass
    
    def setup(self):
        self.__connection = Connection(db_host, db_port)
        db = self.__connection.td
        self.__tdlist_collection = db.tdlist
        
    def teardown(self):
        self.__connection.disconnect()
    
    def persist(self, tdlist):
        #tdlist_doc = {"date" : "2012-FEB-06", 
        #              "tds": [ {"status": "DONE", "number": 1, "text" : "elso feladat"},
        #                       {"status": "TODO", "number": 2, "text" : "masodik feladat"}]}
        
        self.__tdlist_collection.insert(tdlist_todict(tdlist))
    
    
    def search(self, date_str):
        result = self.__tdlist_collection.find_one({"date": date_str})
        return result
    
    def remove(self, date_str):
        self.__tdlist_collection.remove({"date": date_str})
示例#11
0
def uploadStudy(studyid):

        #path for study files
        path = 'Studies/' + studyid +'/'
        listing = os.listdir(path)

	connection = Connection(HOST, PORT)
	db = connection.MINE

        #for all studies (exclude the file log.txt)
        for file in listing:
                if not file == 'log.txt' and not file == 'logx.txt':
                        try:
                                #open the file
                                logger.info("trying to open " + file)
                                f = open(path + file)
                                count = 0

                                #for each line after the header
				logger.info("trying to read lines")
                                for line in f:
                                        count = count + 1
					
                                        if count > 1:
						logger.debug("reading line " + str(count))
                                                try:
                                                        #insert lines
							#logging.debug("splitting line")
                                                        cells = line.split("\t")
							#logging.debug("stripping newline char")
                                                        cells[-1] = cells[-1].rstrip()
							#logging.debug("uploading line")
                                                        #uploadLine(studyid, cells[0], cells[1:])
                                                        line = {"id": cells[0], "canonical_id":cells[0].upper(), "data": cells[1:]}
							db[studyid].insert(line)

                                                except:
                                                        logging.error("line " + count + " could not be read")
					else:
						logger.debug("header line read")
						gsm_ids = line.rstrip().split("\t")
						gsm_ids = gsm_ids[1:]
						n = 1
						for id in gsm_ids:
							more_ids = id.split(";")
							for x in more_ids:
								db[studyid+"order"].insert({"no":n,"GSM":x})
								n+= 1
                                #close file
				logger.info("lines successfully read")
                                logger.info("closing " + file)
                                f.close()

				#mark request queued
				markRequestQueued(studyid)
				db[studyid].ensure_index("canonical_id",1)
                        except:
                                logger.error("Error parsing file  " + file + "See last line to see where program failed")
		
       	Connection.disconnect(connection)
示例#12
0
def retrieveAttrData(studyid):
    #connect
    connection = Connection(HOST, PORT)
    db = connection.MINE
        # get list of data
    attributes = list(db[studyid+'GSM'].find_one().keys())
    wantedAttrs = {}
    for attribute in attributes:
        x = db[studyid+'GSM'].find().distinct(attribute)
        if len(x) > 1 and not attribute == '_id' :
            wantedAttrs[attribute] = 2
    wantedAttrs["_id"] = 0
    
    times = db[studyid].find_one()['data']
    retlist=[]
    n=1
    specdict = {}
    for x in times:
        next = db[studyid +"order"].find({"no":n}).distinct("GSM")[0]
        specdict["geo_accession"] = next
        ret = db[studyid+"GSM"].find(spec=specdict, fields=wantedAttrs)
        for r in ret:
            retlist.append({"sample":r})
        #disconnect
        n+=1

    Connection.disconnect(connection)
    
    return retlist
示例#13
0
	def searchBlog(self,params):
		# Conecta con la base de datos
		conn_ = Connection()
		db_ = conn_['blogomatic']
		ret = {}
		
		try:
			# Se autentifica para acceder a la base de datos
			db_.authenticate('blogomatic','bnBN98cv.mongo')					
			blogs_ = db_['blogs']
			keywords = params['search'].split(' ')

			for keyword in keywords:
				# Hace una busqueda por palabra clave ignorando mayusculas o minusculas
				for blog in blogs_.find({'title': {'$regex': keyword, '$options':'i'}}):
					ret[blog['url']] = blog['title']
					
		except TypeError:
			ret = {}
		except:
			ret = {}
		finally:
			db_.logout()
			conn_.disconnect()
			return ret			
示例#14
0
def activate(conf, options, args):
    '''When PUBLIC_REGISTRATION is set to false, you have to activate
    registered accounts manually by invoking "activate $email"'''

    con = Connection(conf['MONGODB_HOST'], conf['MONGODB_PORT'])
    accounts = con[conf['MONGODB_NAME']].accounts

    if len(args) == 2:
        acc = accounts.find_one({'email': args[1]})
        if not acc:
            print '`%s` does not exist' % args[1]
            sys.exit(1)
        elif acc['activated_at'] != None:
            print '`%s` already activated' % args[1]
        else:
            act = {'activated_at': strftime('%Y-%m-%dT%H:%M:%SZ', gmtime())}
            accounts.update({'_id': acc['_id']}, {'$set': act})
            print '`%s` activated' % args[1]
    else:
        inactivated = [
            acc for acc in accounts.find()
            if not acc.get('activated_at', True)
        ]
        if not inactivated:
            print 'no pending non-activated accounts'
        for acc in inactivated:
            print '%s [%s]' % (acc['email'].ljust(16), acc['created_at'])

    con.disconnect()
def index():
    connection = Connection(MONGODB_HOST, MONGODB_PORT)
    collection = connection[DBS_NAME][COLLECTION_NAME]
    _id = request.args.get('_id','')
    selected=None
    try:
        selected = collection.find({'_id':ObjectId(_id)}).next()
    except:
        pass
    items = collection.find({'visible':'True'}).sort('name')
    connection.disconnect()
    fields=deepcopy(FIELDS)
    editform=False
    if selected is not None:
        editform=True
        for field in fields:
            try:
                fieldname = field['name']
                field['default_value']=selected[fieldname]
            except:
                field['default_value']=''
    return render_template("index.html",
                        items=items,
                        selected=selected,
                        fields=fields,
                        editform=editform,
                        )
def retrieveChartData(studyid, gene_x, gene_y):
	#connect                                                                                                                         
        connection = Connection(HOST, PORT)
        db = connection.MINE
        # get list of data                                                                                                               
	x = list( db[studyid].find({"canonical_id":gene_x.upper()}) )[0]['data']
	y = list( db[studyid].find({"canonical_id":gene_y.upper()}) )[0]['data']

        n = 1
	ret = []
	for t in x:
		entry = {}
		next = db[studyid +"order"].find({"no":n}).distinct("GSM")[0]
		data = db[studyid +"GSM"].find({"geo_accession":next}).distinct("characteristics_ch1")
		
		#change list of characteristics to dict
		sample = {}
		for item in data:
			pair = item.split(": ")
			sample[pair[0]] = getCorrectType(pair[1])
		sample['id'] = next
		entry["y"] = getCorrectType(y[n-1])
		entry["x"] = getCorrectType(x[n-1])
		entry["sample"] = sample
		ret.append(entry)
		n+=1

        #disconnect                                                                                                                      
        Connection.disconnect(connection)
                                                                                    
        return ret
示例#17
0
def writeAttrFile(f, studyid):
    #connection to the database                                                
    connection = Connection(HOST, PORT)
    db = connection.MINE

    attributes = db[studyid+'GSM'].find_one().keys()

    samples = db[studyid+'order'].find().distinct('GSM')
    #write attributes to file in form attribute_(attr) # # # #  where #'s are the sample's attributes and attr is the attribute name


    for attr in attributes:

        if not attr == '_id' and not attr == 'GSM':

            f.write('attribute_'+attr)
            
            for sample in samples:
                sample = sample.rstrip()
                samp = db[studyid+'GSM'].find({"geo_accession":sample}).distinct(attr)
                if samp[0]:
                    
                    #if integer or float cast to string
                    try:
                        text = str(samp[0])
                    #else encode into unicode
                    except:
                        text = samp[0].encode("utf8")
                    f.write('\t' + text)
            f.write('\n')

    Connection.disconnect(connection)
示例#18
0
def repair(conf, options):
    '''fixes issues created by myself.  Currently, only orphan files and
    item links are detected and automatically removed.'''

    con = Connection(conf['MONGODB_HOST'], conf['MONGODB_PORT'])
    db = con[conf['MONGODB_NAME']]
    fs = GridFS(db)

    objs = [obj['_id'] for obj in fs._GridFS__files.find()]
    meta = [cur['_id'] for cur in db.items.find()]

    if objs != meta:
        # 1. metadata has some files missing, no repair possible
        diff1 = filter(lambda i: not i in objs, meta)
        diff2 = filter(lambda i: not i in meta, objs)
        for item in diff1:
            print 'removing metadata for `%s`' % item
            db.items.remove(item)

        # 2. metadata is missing, but file is there. Recover possible, but not implemented #win
        for item in diff2:
            print 'removing GridFS-File `%s`' % item
            objs.remove(item)

    # rebuild accounts items, when something changed
    for cur in db.accounts.find():
        if str(cur['_id']).startswith('_'):
            continue
        _id, items = cur['_id'], cur['items']
        items = filter(lambda i: i in objs, items)
        db.accounts.update({'_id': _id}, {'$set': {'items': items}})

    con.disconnect()
示例#19
0
    def test_05_save_entity(self):
        connection = MongoDatabaseAPI("localhost", DB_NAME)
        connection.connect()
        #   Given wrong record type to save
        self.assertRaises(BIValueError, connection.save_entity, MongoDatabaseAPI.ET_RESOURCE, "not a dict value")

        #   Insert new record("_id" exist in incoming dictionary but not in DB)
        new_record = {"_id" : objectid.ObjectId("5010cb8137adc7191b000000"), "res_type" : "printer", "res_status" : "active", "owner" : "Priocom", "external_system" : "", "description" : "Printer in the Brannigan Team workroom", "additional_parameters" : {"Vendor" : "Canon", "model" : "iR1018"}}
        
        id = connection.save_entity(MongoDatabaseAPI.ET_RESOURCE, new_record)
        conn = Connection()
        result = conn[DB_NAME][MongoDatabaseAPI.ET_RESOURCE].find_one({'_id': objectid.ObjectId(id)})
        self.assertEqual(new_record, result)

        #   Insert another new record(no "_id" key in incoming dictionary)
        new_record = {"res_type" : "printer", "res_status" : "active", "owner" : "Priocom", "external_system" : "", "description" : "Printer in the Brannigan Team workroom", "additional_parameters" : {"Vendor" : "Canon", "model" : "iR1018"}}
        id = connection.save_entity(MongoDatabaseAPI.ET_RESOURCE, new_record)
        conn = Connection()
        result = conn[DB_NAME][MongoDatabaseAPI.ET_RESOURCE].find_one({'_id': objectid.ObjectId(id)})
        self.assertEqual(new_record, result)

        #   Update existing record(change res_status and additional_parameters:model)
        new_record = {"_id": id, "res_type" : "printer", "res_status" : "down", "owner" : "Priocom", "external_system" : "", "description" : "Printer in the Brannigan Team workroom", "additional_parameters" : {"Vendor" : "Canon", "model" : "iR1022"}}
        id = connection.save_entity(MongoDatabaseAPI.ET_RESOURCE, new_record)
        result = conn[DB_NAME][MongoDatabaseAPI.ET_RESOURCE].find_one({'_id': objectid.ObjectId(id)})
        self.assertEqual(new_record, result)

        conn.disconnect()
        connection.close()
示例#20
0
文件: db.py 项目: nmercer/raceall_api
class Database():
    def __init__(self):
        config = Config()
        self.host = config.parser['database']['host']
        self.port = int(config.parser['database']['port'])
        self.db_name = config.parser['database']['name']
        self.conn = Connection(self.host, self.port)
        self.db = self.conn[self.db_name]

    def insert(self, collection=None, *args):
        if collection:
            return self.db[collection].insert(args[0])

    def select(self, collection=None, *args):
        if collection:
            return self.db[collection].find(args[0])

    def select_one(self, collection=None, *args):
        if collection:
            return self.db[collection].find_one(args[0])

    def delete(self, collection=None, *args):
        if collection:
            return self.db[collection].remove(args[0])

    def update(self, collection=None, *args):
        if collection:
            return self.db[collection].update(args[0])

    def close(self):
        self.conn.disconnect()

    #Return database cursor
    def connect(self):
        return Connection(self.host, self.port)
示例#21
0
    def do_host_status(self,mongo):
        con = Connection(host=mongo['host'], port=mongo['port'], slave_okay=True)
        db = con['admin']
        if mongo['user'] and mongo['password']:
            db.authenticate(mongo['user'], mongo['password'])

        server_status = db.command('serverStatus')

        # operations
        for k, v in server_status['opcounters'].items():
            self.submit('mongo_total_operations', k, v, host=mongo['host'], port=mongo['port'])

        # memory
        for t in ['resident', 'virtual', 'mapped']:
            self.submit('mongo_memory', t, server_status['mem'][t], host=mongo['host'], port=mongo['port'])

        # connections
        self.submit('mongo_connections', 'connections', server_status['connections']['current'],host=mongo['host'], port=mongo['port'])

        # locks
        if mongo['lockTotalTime'] is not None and mongo['lockTime'] is not None:
            #collectd.warning( "total %d - %d / %d - %d " % (server_status['globalLock']['totalTime'], self.lockTotalTime, server_status['globalLock']['totalTime'], self.lockTime))
            if mongo['lockTime']==server_status['globalLock']['lockTime']:
                value=0.0
            else:
                value=float (server_status['globalLock']['lockTime'] - mongo['lockTime']) *100.0 / float(server_status['globalLock']['totalTime'] - mongo['lockTotalTime'] ) 
            #collectd.warning( "Submitting value %d " % value)
            self.submit('mongo_percent', 'lock_ratio', value, host=mongo['host'], port=mongo['port'])


        mongo['lockTotalTime']=server_status['globalLock']['totalTime']
        mongo['lockTime']=server_status['globalLock']['lockTime']


        # indexes
        accesses = server_status['indexCounters']['btree']['accesses']
        misses = server_status['indexCounters']['btree']['misses']
        if misses:
            self.submit('mongo_cache_ratio', 'cache_misses', accesses / float(misses),host=mongo['host'], port=mongo['port'])
        else:
            self.submit('mongo_cache_ratio', 'cache_misses', 0, host=mongo['host'], port=mongo['port'])

        for mongo_db in mongo['db']:
            db = con[mongo_db]
            if mongo['user'] and mongo['password']:
                db.authenticate(mongo['user'], mongo['password'])
            db_stats = db.command('dbstats')

            # stats counts
            self.submit('mongo_counter', 'object_count', db_stats['objects'], mongo_db, host=mongo['host'], port=mongo['port'])
            self.submit('mongo_counter', 'collections', db_stats['collections'], mongo_db, host=mongo['host'], port=mongo['port'])
            self.submit('mongo_counter', 'num_extents', db_stats['numExtents'], mongo_db, host=mongo['host'], port=mongo['port'])
            self.submit('mongo_counter', 'indexes', db_stats['indexes'], mongo_db, host=mongo['host'], port=mongo['port'])

            # stats sizes
            self.submit('mongo_file_size', 'storage', db_stats['storageSize'], mongo_db, host=mongo['host'], port=mongo['port'])
            self.submit('mongo_file_size', 'index', db_stats['indexSize'], mongo_db, host=mongo['host'], port=mongo['port'])
            self.submit('mongo_file_size', 'data', db_stats['dataSize'], mongo_db, host=mongo['host'], port=mongo['port'])

        con.disconnect()
示例#22
0
 def save(self, host = 'localhost', database = 'twitter_grafico'):
     connection = Connection(host = host)
     result = connection[database].users.find_one({"user_id":self.user_id})
     if result:
         connection[database].users.update({'user_id': self.user_id}, self.__dict__)
     else:
         connection[database].users.save(self.__dict__)
     connection.disconnect()
示例#23
0
def getNumberOfColumns(studyid):
	connection = Connection(HOST, PORT)
        db = connection.MINE

        #get data from database                                                                                                                                                                          
	x = len(db[studyid].find_one()['data'])
        Connection.disconnect(connection)
        return x
示例#24
0
def removeByNumber(studyid):
	#connect
        connection = Connection(HOST, PORT)
        db = connection.MINE
	#remove studyid request
        db.request.remove({"gse":studyid})
	#disconnect
        Connection.disconnect(connection)
示例#25
0
def getDateRequested(studyid):
	#connect
	connection = Connection(HOST, PORT)
	db = connection.MINE
	ret = db.request.find({"gse":studyid}).distinct("datetime")[0]
     	ret = ret.strftime("%B %d, %Y")
	Connection.disconnect(connection)
	return ret
def markRequestQueued(studyid):
	#connect
        connection = Connection(HOST, PORT)
        db = connection.MINE
	#update request
        db.request.update({"gse":studyid,}, {"$set": {"queued":True}})
        #disconnect
	Connection.disconnect(connection)
示例#27
0
	def createBlog(self,params):
		# Conecta con la base de datos
		conn_ = Connection()
		db_ = conn_['blogomatic']
		err = False
		
		try:
			# Se autentifica para acceder a la base de datos
			db_.authenticate('blogomatic','bnBN98cv.mongo')
			inv_ = db_['invitations']
			blogs_ = db_['blogs']
			
			
			# Nos aseguramos de que ningun campo se encuentra vacio
			if params['title'] == "" or params['url'] == ""  or params['keywords'] == ""  or (' ' in params['url']) :
				err = True
			
			else:
				# Comprobamos el usuario
				uDoc = inv_.find_one({'email': params['email']})
				
				if uDoc != None and uDoc['instances'] > 0:
					# Comprobamos la clave
					password = uDoc['password']
					if password == params['password']:
						# Ahora podemos comprobar que no existe un blog con el mismo titulo y url
						bDocTitle = blogs_.find_one({'title': params['title']})
						bDocUrl = blogs_.find_one({'url': params['url']})

						# Si es vacio significa que podemos crear el solicitado
						if bDocTitle == None and bDocUrl == None:
							# Llama al script del sistema para crear el blog
							# Saca el path del script del fichero de configuracion
							proc = Popen('/opt/blog-o-matic/bin/create_blog.sh "%s" "%s" "%s" >> /opt/blog-o-matic/var/log/create_blog.log 2>&1' % (params['title'], params['url'], params['keywords']), shell=True, stdout=PIPE)						

							bDoc = {'email': params['email'],'url': params['url'],'title': params['title']}
							blogs_.save(bDoc)
				
							inst = uDoc['instances']
							uDoc['instances'] = inst - 1

							inv_.save(uDoc)
						else:
							err = True
					else:
						err = True

				else:
					err = True
					
		except TypeError:
			err = True
		except:
			err = True
		finally:
			db_.logout()
			conn_.disconnect()
			return err
示例#28
0
def markRequestSent(studyid):
     #connect
    connection = Connection(HOST, PORT)
    db = connection.MINE
        #update request
    db.request.update({"gse":studyid,}, {"$set": {"queued":True}})
        #disconnect
    Connection.disconnect(connection)
    logging.info("study " + studyid + "has been successfully queued")
def getWaitingStudyList():
	#connect
        connection = Connection(HOST, PORT)
        db = connection.MINE
	# get list of distinct processed GSEids
	x =  db.request.find({"processed":False, "queued":False}).distinct("gse")
	#disconnect
        Connection.disconnect(connection)
        return x
示例#30
0
def uploadLine(studyid, varname, floats):
	#connect
        connection = Connection(HOST, PORT)
        db = connection.MINE
	#create and upload line
        line = {"id": varname,"canonical_id":varname.upper(), "data": floats}
        db[studyid].insert(line)
	#disconnect
        Connection.disconnect(connection)
示例#31
0
 def save(self, host = 'localhost', database = 'twitter_grafico'):
     connection = Connection(host = host)
     result = connection[database].tweets.find_one({"id":self.id})
     if result:
         self._id = result.get("_id")
         connection[database].tweets.update({'_id': result.get("_id")}, self.__dict__)
     else:
         connection[database].tweets.save(self.__dict__)
     connection.disconnect()
示例#32
0
def uploadProcessedData(studyid, var1, var2, mic, pcc):
	# connect
        connection = Connection(HOST, PORT)
        db = connection.MINE
	#upload data
        data = {"var1": var1,"var2":var2, "mic": mic, "pcc": pcc}
        db[studyid].insert(data)
	#disconnect
        Connection.disconnect(connection)
示例#33
0
 def save(self, host = 'localhost', database = 'twitter_grafico'):
     connection = Connection(host = host)
     result = connection[database].accounts.find_one({"username":self.username})
     if result:
         self.__dict__['username'] = result.get("username")
         connection[database].accounts.update({'username': result.get("username")}, self.__dict__)
     else:
         connection[database].accounts.save(self.__dict__)
     connection.disconnect()
    def insKeywordCountinDB(self, indict, DBName, collection):
        c = Connection('localhost', 27017)
        dbh = c[DBName]
        dbh.connection == c
        for keyword, count in indict[0:40]:
            dbh[collection].save({"keyword": keyword, "count": count})

        c.disconnect()
        return
示例#35
0
def getNumberOfRows(studyid):
	connection = Connection(HOST, PORT)
	db = connection.MINE

        #get data from database                                                                                                                                                                             
	
	ret = db[studyid].find().count()
        Connection.disconnect(connection)
        return ret
示例#36
0
def donorschoose_projects():
    connection = Connection(MONGODB_HOST, MONGODB_PORT)
    collection = connection[DBS_NAME][COLLECTION_NAME]
    projects = collection.find(fields=FIELDS)
    json_projects = []
    for project in projects:
        json_projects.append(project)
    json_projects = json.dumps(json_projects, default=json_util.default)
    connection.disconnect()
    return json_projects
示例#37
0
def test_projects():
    connection = Connection(MONGODB_HOST, MONGODB_PORT)
    collection = connection[DBS_NAME][COLLECTION_NAME]
    projects = collection.find({"harvested_at" : {"$gte": start}},fields=FIELDS).limit(100000)
    json_projects = []
    for project in projects:
        json_projects.append(project)
    json_projects = json.dumps(json_projects, default=json_util.default)
    connection.disconnect()
    return json_projects
示例#38
0
    def execute(self):
        """
        return list of latest data. specified group_name (and specified command_name)
        """
        mongos = {
            'mongo_host': self.config.get('server', 'mongo_host'),
            'mongo_port': self.config.get('server', 'mongo_port'),
            'mongo_dbs': self.config.get('server', 'mongo_dbs'),
            'mongo_collection': self.config.get('server', 'mongo_collection'),
        }
        connection = Connection(
            mongos['mongo_host'],
            int(mongos['mongo_port']),
        )
        collection = connection[mongos['mongo_dbs']][
            mongos['mongo_collection']]

        base_find_condition = {
            'group_name': self.group_name,
            'visible': 'True',
        }
        if self.command_name is not None:
            base_find_condition.update({
                'command_name': self.command_name,
            })

        # find hosts in group
        condition = deepcopy(base_find_condition)
        host_names = collection.find(condition).distinct('host_name')
        host_names.sort()

        # find latest result on each host
        results = []
        for host_name in host_names:
            condition = deepcopy(base_find_condition)
            condition.update({'host_name': host_name})
            latests = collection.find(condition).sort('execute_at',
                                                      DESCENDING).limit(1)
            latest_execute_at = ''
            for latest in latests:
                latest_execute_at = latest['execute_at']
                break

            condition = deepcopy(base_find_condition)
            condition.update({'host_name': host_name})
            condition.update({'execute_at': latest_execute_at})
            documents = collection.find(condition).sort(
                'command_name', ASCENDING)
            for document in documents:
                results.append(document)

        # finalize
        connection.disconnect()

        return results
    def retKeywordCountFromDB(self, DBName, collection):
        c = Connection('localhost', 27017)
        dbh = c[DBName]
        assert dbh.connection == c
        outdict = dict()
        cursor = dbh[collection].find()
        for doc in cursor:
            outdict[doc["keyword"]] = doc["count"]

        c.disconnect()
        return outdict
示例#40
0
def getMetroEvents(metro_id):
    conn = Connection()
    db = conn.ts_db
    events = db.events
    one_week = str(date.today() + relativedelta(days=+9))
    print "THIS IS ONE WEEK: " + one_week
    all_events = []
    for e in events.find({"date": {"$lt": one_week}}, {'_id': 0}):
        all_events.append(e)

    conn.disconnect()
    return json.dumps(all_events)
示例#41
0
def entityType(entity):
	# Conexion a Mongo
	conn = Connection() 		
	db = conn.grupo10_taller4
	colTypeQueryCache = db.typeQueryCache
	colDescribeQueryCache = db.describeQueryCache
	
	# Revisar si la consulta existe en el cache
	typeCache = colTypeQueryCache.find({"term":entity})
	if (typeCache.count() > 0):
		print "Type cache hit for %s" % entity
		for cacheResult in typeCache:
			return cacheResult["mainType"]
	
	sparql = SPARQLWrapper("http://dbpedia.org/sparql")
	sparql.setQuery("""
	PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
	PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
	SELECT DISTINCT ?s ?o ?class WHERE { 
		?someobj  ?p ?s . 
			?s  rdfs:label '%s' @en .
		?s  rdfs:label ?o . 
			?s  rdfs:comment ?comment .
			?s rdf:type ?class .
		FILTER (!regex(str(?s), '^http://dbpedia.org/resource/Category:')). 
		FILTER (!regex(str(?s), '^http://dbpedia.org/resource/List')).
		FILTER (!regex(str(?s), '^http://sw.opencyc.org/')). 
		FILTER (lang(?o) = 'en').  
		FILTER (!isLiteral(?someobj)).
		}
	Limit 20
	""" % entity)
	sparql.setReturnFormat(JSON)
	results = sparql.query().convert()
	for result in results["results"]["bindings"]:
		currentValue = result["class"]["value"]
		if "Person" in currentValue:
			mainType = "Person"
			break
		elif "Place" in currentValue:
			mainType = "Place"
			break
		elif "Organization" in currentValue:
			mainType = "Organization"
			break
			
	if mainType == None:
		mainType = "Thing"

	queryRes = [{"term":entity,"mainType":mainType,"results":results}]
	colTypeQueryCache.insert(queryRes)
	conn.disconnect()
	return mainType
 def testExistInDB(self, DBName, article):
     c = Connection('localhost', 27017)
     dbh = c[DBName]
     assert dbh.connection == c
     cursor = dbh[article].find()
     cnt = cursor.count()
     if cnt == 0:
         print article, "does not exist!!!"
         c.disconnect()
         return 0
     else:
         print article, "exist"
         c.disconnect()
         return 1
示例#43
0
class MongoHandler(logging.Handler):
    def __init__(self, level=logging.NOTSET, **kwargs):
        super(MongoHandler, self).__init__(level)
        self.host = kwargs.get('host', settings.database.host)
        self.port = kwargs.get('port', settings.database.port)
        self.database_name = kwargs.get('database', settings.database.name)
        self.collection_name = kwargs.get('collection', 'logs')
        self.fail_silently = kwargs.get('fail_silently', False)
        self.formatter = kwargs.get('formatter', MongoFormatter())

        self.connection = None
        self.database = None
        self.collection = None
        self.connect()

    def connect(self):
        """
        Connect to the Mongo database.
        """
        try:
            self.connection = Connection(host=self.host, port=self.port)
        except PyMongoError:
            if self.fail_silently:
                return
            else:
                raise

        self.database = self.connection[self.database_name]
        self.collection = self.database[self.collection_name]

    def close(self):
        """
        Close the connection to the Mongo database.
        """
        if self.connection is not None:
            self.connection.disconnect()
            self.connection = None

    def emit(self, record):
        """
        Insert log record into Mongo database
        """
        if self.collection is not None:
            try:
                self.collection.insert(self.format(record))
            except Exception:
                if not self.fail_silently:
                    self.handleError(record)
示例#44
0
def api_values(group_name, field_name):
    connection = Connection(MONGODB_HOST, MONGODB_PORT)
    collection = connection[DBS_NAME][COLLECTION_NAME]
    filter_condition = {'visible': 'True'}
    if group_name != '*':
        filter_condition.update({'group_name': group_name})
    items = collection.find(filter_condition).sort(field_name).distinct(
        field_name)
    jsondata = list()
    for item in items:
        if item == '':
            continue
        jsondata.append(item)
    jsondata.sort()
    connection.disconnect()
    return jsonify({'values': jsondata})
示例#45
0
    def load_config(self):
        from pymongo import Connection
        self._config = {}
        name_by_id = dict(
            (sconf['id'], name)
            for name, sconf in self._topology.items())
        for master in self._coll.find():
            if master['_id'] != 'master_checkpoint':
                self._config[name_by_id[master['_id']]] = master

                master_uri = self._topology[name_by_id[master['_id']]]['uri']
                conn = Connection(master_uri)
                coll = conn.local[MMM_DB_NAME]
                for cp in coll.find(dict(_id='master_checkpoint')):
                    self._master_checkpoint = cp['ts']
                conn.disconnect()
示例#46
0
def info(conf):
    '''A basic, incomplete short info.  Displays overall file size and
    account counts.'''

    con = Connection(conf['MONGODB_HOST'], conf['MONGODB_PORT'])
    db = con[conf['MONGODB_NAME']]
    fs = GridFS(db)

    overall_file_size = sum([f['length'] for f in fs._GridFS__files.find()])
    inactivated = [
        acc for acc in db.accounts.find() if not acc.get('activated_at', True)
    ]
    print fs._GridFS__files.count(), 'files [%s]' % ppsize(overall_file_size)
    print db.accounts.count() - 1, 'accounts total,', len(
        inactivated), 'not activated'

    con.disconnect()
示例#47
0
def cprofile_main():
    from pymongo import Connection
    connection = Connection()
    connection.drop_database('timeit_test')
    connection.disconnect()

    from mongoengine import Document, DictField, connect
    connect("timeit_test")

    class Noddy(Document):
        fields = DictField()

    for i in xrange(1):
        noddy = Noddy()
        for j in range(20):
            noddy.fields["key" + str(j)] = "value " + str(j)
        noddy.save()
示例#48
0
def account(conf, options, args):
    '''View details or summary of all accounts.'''

    con = Connection(conf['MONGODB_HOST'], conf['MONGODB_PORT'])
    db = con[conf['MONGODB_NAME']]
    fs = GridFS(db)

    if options.all:
        query = None
    elif len(args) == 2:
        query = {
            '_id': int(args[1])
        } if args[1].isdigit() else {
            'email': args[1]
        }
    else:
        log.error('account <email or _id> requires a valid email or _id')
        sys.exit(1)

    for acc in db.accounts.find(query):
        if str(acc['_id']).startswith('_'):
            continue
        print '%s [id:%s]' % (acc['email'], acc['id'])
        for key in acc:
            if key in ['email', '_id', 'id']:
                continue
            if key == 'items':
                try:
                    size = sum([fs.get(_id).length for _id in acc['items']])
                except NoFile:
                    log.warn('Account `%s` has some files missing:', _id)
                    # fail safe counting
                    size = 0
                    missing = []
                    for i in acc['items']:
                        if not fs.exists(i):
                            missing.append(i)
                        else:
                            size += fs.get(i).length
                print '    size: %s' % ppsize(size)
            print '    %s: %s' % (key, acc[key])
    if options.all:
        print db.accounts.count() - 1, 'accounts total'  # -1 for _autoinc

    con.disconnect()
示例#49
0
    def get_opcounters(self):
        """
        doctest convenience function to retrieve ``opcounters`` from
        ``serverStatus`` for each configured node, aggregate the results
        and compute some custom counters on them.
        The purpose is to prove that the ``write`` operations are dispatched
        to the ``PRIMARY``, while the ``read`` operations are dispatched to
        the ``SECONDARIES``.
        """
        from pymongo import Connection
        stats = {
            'custom': {},
            'hosts': {},
        }
        for port in self.storage_ports:
            host = '{0}:{1}'.format(self.hostname, port)
            mongo_conn = Connection(host, safe=True)
            status = mongo_conn.admin.command('serverStatus')
            mongo_conn.disconnect()

            # get all opcounters
            info = status.get('opcounters')

            # whether the current host is a PRIMARY or a SECONDARY
            ismaster = status.get('repl', {}).get('ismaster', False)

            # record all opcounter data by host
            stats['hosts'][host] = info

            # record custom counters:
            # - how many inserts are done on the PRIMARY
            # - how many queries are routed to all SECONDARIES
            stats['custom'].setdefault('primary.insert', 0)
            stats['custom'].setdefault('secondary.query', 0)
            if ismaster:
                stats['custom']['primary.insert'] += info['insert']
            else:
                stats['custom']['secondary.query'] += info['query']

        return stats
示例#50
0
def api_results(group_name, field_name):
    connection = Connection(MONGODB_HOST, MONGODB_PORT)
    collection = connection[DBS_NAME][COLLECTION_NAME]
    filter_condition = {'visible': 'True'}
    if group_name == '*':
        return jsonify({'values': ''}, 500)
    filter_condition.update({'group_name': group_name})
    if field_name != '*' and re.match(r'^command_', field_name):
        filter_condition.update({'command_name': field_name})
    host_names = collection.find(filter_condition).distinct('host_name')
    host_names.sort()
    items = []
    for host_name in host_names:
        host_filter = {'visible': 'True'}
        host_filter.update({'group_name': group_name})
        host_filter.update({'command_name': field_name})
        host_filter.update({'host_name': host_name})
        latest = collection.find(host_filter).sort('time',
                                                   DESCENDING).limit(1).next()
        items.append(latest)
    jsondata = list()
    for item in items:
        if item == '':
            continue
        else:
            ## output
            item['output'] = escape(item['output'])
            ## _id
            item['_id'] = str(item['_id'])
            ## time
            from datetime import datetime
            from calendar import timegm
            timetuple = item['time'].timetuple()
            item['time'] = timegm(timetuple)
        jsondata.append(item)
    connection.disconnect()
    return jsonify({'results': jsondata})
示例#51
0
    def persistSimulation(self):
        # TODO include the result from getEnvironmentInformation
        simulationTotals = {
            "SimulationId": self.simulationId.encode('utf-8'),
            "CO2": self.total_CO2_mg,
            "CO": self.total_CO_mg,
            "HC": self.total_HC_mg,
            "PMx": self.total_PMx_mg,
            "NOx": self.total_NOx_mg,
            "Fuel": self.total_Fuel_ml,
            "Noise": self.total_Noise_dBA,
            "Halting": self.total_Halting_vehicles,
            "MeanSpeed": self.total_MeanSpeed_m_per_s,
            "SimulationSteps": self.simulation_steps,
            "NetworkConfiguration": self.visible_network_agents
        }
        self.verbose_display("Simulation Totals: %s", simulationTotals, 1)

        # Persist to MongoDB
        cn = Connection('localhost')
        db = cn.macts
        metrics = db.metrics
        metrics.insert(simulationTotals)
        cn.disconnect()
示例#52
0
class Corpus(object):
    
    def __init__(self, address = 'localhost', database = '', collection = ''):

        if len(database) == 0 or len(collection) == 0:
            print('Need specified name database or collection.')
            return
        
        self.connection = Connection('localhost', 27017)
        self.db = self.connection[database]

        self.collection = self.db[collection]


    def __del__(self):
        
        self.connection.disconnect()


    def append(self, record):
        """ ディクショナリ形式でデータを追加, .
        """
        try:
            if self.collection.find(record).count() > 0:
                pass
            else:
                self.collection.insert(record)
        except bson.errors.InvalidStringData as e:
            print(e.message)


    def find(self, queries = None):

        findresult = {}
        for i, r in enumerate(self.collection.find(queries)):
            findresult.setdefault(i, r)
   
        return tuple([record[1] for record in findresult.items()])


    def update(self, record, new_record):

        try:
            self.collection.update(record, new_record)
        except TypeError as e:
            print('%s #%s\n\tMessage: %s'
                    % (str(self.__class__.__name__), traceback.extract_stack()[-1][2], e.message))


    def exists(self, queries):

        find_count = 0
        for attr, value in queries.items():
            result = self.find({ attr: value })
            find_count += len(result)

        return find_count > 0


    def remove(self, item):
        
        self.collection.remove(item)
示例#53
0
    def do_server_status(self):
        con = Connection(host=self.mongo_host,
                         port=self.mongo_port,
                         slave_okay=True)
        db = con[self.mongo_db[0]]
        if self.mongo_user and self.mongo_password:
            db.authenticate(self.mongo_user, self.mongo_password)
        server_status = db.command('serverStatus')

        version = server_status['version']
        at_least_2_4 = V(version) >= V('2.4.0')

        # operations
        for k, v in server_status['opcounters'].items():
            self.submit('total_operations', k, v)

        # memory
        for t in ['resident', 'virtual', 'mapped']:
            self.submit('memory', t, server_status['mem'][t])

        # connections
        self.submit('connections', 'connections',
                    server_status['connections']['current'])

        # locks
        if self.lockTotalTime is not None and self.lockTime is not None:
            if self.lockTime == server_status['globalLock']['lockTime']:
                value = 0.0
            else:
                value = float(server_status['globalLock']['lockTime'] -
                              self.lockTime) * 100.0 / float(
                                  server_status['globalLock']['totalTime'] -
                                  self.lockTotalTime)
            self.submit('percent', 'lock_ratio', value)

        self.lockTotalTime = server_status['globalLock']['totalTime']
        self.lockTime = server_status['globalLock']['lockTime']

        # indexes
        accesses = None
        misses = None
        index_counters = server_status[
            'indexCounters'] if at_least_2_4 else server_status[
                'indexCounters']['btree']

        if self.accesses is not None:
            accesses = index_counters['accesses'] - self.accesses
            if accesses < 0:
                accesses = None
        misses = (index_counters['misses'] or 0) - (self.misses or 0)
        if misses < 0:
            misses = None
        if accesses and misses is not None:
            self.submit('cache_ratio', 'cache_misses',
                        int(misses * 100 / float(accesses)))
        else:
            self.submit('cache_ratio', 'cache_misses', 0)
        self.accesses = index_counters['accesses']
        self.misses = index_counters['misses']

        for mongo_db in self.mongo_db:
            db = con[mongo_db]
            if self.mongo_user and self.mongo_password:
                con[self.mongo_db[0]].authenticate(self.mongo_user,
                                                   self.mongo_password)
            db_stats = db.command('dbstats')

            # stats counts
            self.submit('counter', 'object_count', db_stats['objects'],
                        mongo_db)
            self.submit('counter', 'collections', db_stats['collections'],
                        mongo_db)
            self.submit('counter', 'num_extents', db_stats['numExtents'],
                        mongo_db)
            self.submit('counter', 'indexes', db_stats['indexes'], mongo_db)

            # stats sizes
            self.submit('file_size', 'storage', db_stats['storageSize'],
                        mongo_db)
            self.submit('file_size', 'index', db_stats['indexSize'], mongo_db)
            self.submit('file_size', 'data', db_stats['dataSize'], mongo_db)

        con.disconnect()
示例#54
0
class MonitorCronJob(object):
    def __init__(self):
        self.mongo_host = MONGO_HOST
        self.mongo_port = MONGO_PORT
        self.redis_host = REDIS_HOST
        self.redis_port = REDIS_PORT
        self.rmap = REDIS_MAP
        self.emap = EXTRA_MAP
        self.ulist = '%s:start_urls' % SPIDER
        self.dupefilter_key = '%s:dupefilter' % SPIDER

        self._connectDB()
        self._connectRedis()
        self._cleanRedis()

    def __del__(self):
        self._disconnectDB()

    def _connectDB(self):
        self.c = Connection(self.mongo_host, self.mongo_port)
        self.db = self.c[TASK_DB]
        self.collection = self.db[TASK_COLLECTION]
        print "[log] Connect to MongoDB %s:%s" % (self.mongo_host,
                                                  self.mongo_port)

    def _disconnectDB(self):
        self.c.disconnect()
        del self.c, self.db, self.collection
        print "[log] Disconnect from MongoDB %s:%s" % (self.mongo_host,
                                                       self.mongo_port)

    def _alive(self):
        return True if self.c and self.c.alive() else False

    def _connectRedis(self):
        self.r = redis.Redis(host=self.redis_host, port=self.redis_port)
        print "[log] Connect to Redis %s:%s" % (self.redis_host,
                                                self.redis_port)

    def _cleanRedis(self):
        self.r.delete(self.rmap)
        print "[log] Clean url hash map of %s" % self.rmap
        self.r.delete(self.emap)
        print "[log] Clean extra url hash map of %s" % self.emap
        self.r.delete(self.dupefilter_key)
        print "[log] Clean dupefilter_key of %s" % self.dupefilter_key

    def read_task(self, rto_type='cursor'):
        """ Get tasks from MongoDB, then return a list
		"""
        if self._alive():

            cursor = self.collection.find({'state': 1})

            if rto_type == 'cursor':
                print "[log] Read %d tasks from DB" % cursor.count()
                return cursor

            elif rto_type == 'list':
                tasks = []
                for i in cursor:
                    tasks.append(i)
                print "[log] Read %d tasks from DB" % len(tasks)
                return tasks

        else:
            pass

    def map_tasks(self):
        """ 
		Map tasks' url and SKU
		----------------------

		Task's structure
		{
			'sku' : string, product sku of feifei,
			'urls' : list, list of this task's urls
		}
		"""
        def to_redis(url):
            self.r.lpush(self.ulist, url)

        tasks = self.read_task('cursor')

        for i in tasks:
            sku = i.get('sku', None)
            urls = i.get('urls', None)
            extras = i.get('extras', None)
            if sku:
                if urls:
                    for url in urls:
                        self.r.hset(self.rmap, url,
                                    sku)  # hset(hash, key, value)
                        to_redis(url)
                        print "[log] List %s to %s" % (url, self.ulist)
                if extras:
                    for extra in extras:
                        self.r.hset(self.emap, extra, sku)
                        to_redis(extra)
                        print "[log] List %s to %s" % (extra, self.ulist)

    def test(self):

        with open('test.txt', 'ab') as f:
            f.write('1\n')

    def __getattribute__(self, name):
        try:
            rt = object.__getattribute__(self, name)
        except:
            rt = None
        return rt
示例#55
0
    start = datetime.date.today() - datetime.timedelta(1)
    end = datetime.date(2013, 1, 1)
    if False:
        get_prec_block()
        sys.exit()
    for pair in db.weather_place.find():
        prec = pair['prec_id']
        block = pair['block_id']
        date = start
        while date != end:
            if block < 10:
                url = u'{0}hourly_a1.php?prec_no={1}&block_no=000{2}&year={3}&month={4}&day={5}&view='.format(
                    base_url, prec, block, date.year, date.month, date.day)
            elif block < 100:
                url = u'{0}hourly_a1.php?prec_no={1}&block_no=00{2}&year={3}&month={4}&day={5}&view='.format(
                    base_url, prec, block, date.year, date.month, date.day)
            elif block < 1000:
                url = u'{0}hourly_a1.php?prec_no={1}&block_no=0{2}&year={3}&month={4}&day={5}&view='.format(
                    base_url, prec, block, date.year, date.month, date.day)
            elif block > 40000:
                url = u'{0}hourly_s1.php?prec_no={1}&block_no={2}&year={3}&month={4}&day={5}&view='.format(
                    base_url, prec, block, date.year, date.month, date.day)
            else:
                url = u'{0}hourly_a1.php?prec_no={1}&block_no={2}&year={3}&month={4}&day={5}&view='.format(
                    base_url, prec, block, date.year, date.month, date.day)
            source = common.get_source_content(url)
            if len(source) < 6500: continue
            if Weather.get_weather(url, prec, block, date, source): break
            date = date - datetime.timedelta(1)
    connect.disconnect()
示例#56
0
class HttpApiTest(TestCase):
    """
    
    You must 
    
    Django client cheat sheet:
    
    Client methods:
    
        get(path, data={}, follow=False) 
        post(path, data={}, follow=False) 
        head(path, data={}, follow=False)
        options(path, data={}, follow=False) 
        put(path, data={}, follow=False) 
        delete(path, data={}, follow=False) 
        login(options) 
        logout() 
    
    Reponse attributes:
       
        client 
        content 
        context 
        request 
        status_code
        template  
    
    """

    fixtures = ['features_test.json']

    def __init__(self, *args, **kwargs):

        # settings up HTTP authentification credentials

        try:
            username = settings.HTTP_TEST_USERNAME
            password = settings.HTTP_TEST_PASSWORD
        except AttributeError:
            raise Exception('You must define settings.HTTP_TEST_USERNAME '\
                            'and settings.HTTP_TEST_USERNAME to be able to '\
                            'test HTTP authentification')
        if not authenticate(username=username, password=password):
            raise Exception('settings.HTTP_TEST_USERNAME and '\
                            'settings.HTTP_TEST_PASSWORD are not valid '\
                            'credentials. Could not login.')

        auth = 'Basic %s' % base64.encodestring('%s:%s' % (username, password))
        self.auth = {'HTTP_AUTHORIZATION': auth.strip()}
        TestCase.__init__(self, *args, **kwargs)

    def assertJsonMatchDict(self, url, d, code):
        response = self.client.get(url, **self.auth)
        self.assertEqual(d, json.loads(response.content))
        self.failUnlessEqual(str(response.status_code), str(code))

    def assertPageMatch(self, url, content, code='200'):
        response = self.client.get(url, **self.auth)
        self.assertTrue(content in response.content)
        self.assertEqual(str(response.status_code), str(code))

    def create_test_feature(self, name='test'):
        url = '/api/v1/createfeature/'
        coord = "[%s, %s]" % (random.random() * 10, random.random() * 10)
        response = self.client.post(
            url, {
                "subdivision_code": "FR",
                "country_code": "FR",
                "feature_type": "school",
                "geometry_type": "Point",
                "geometry_coordinates": coord,
                "name": name
            }, **self.auth)
        return response

    def setUp(self):

        self.client = Client()

        # set up mongo
        self.con = Connection(settings.MONGO_HOST, settings.MONGO_PORT)
        self.db = self.con[settings.MONGO_DB_NAME]
        self.transactions = self.db[settings.MONGO_DB_NAME]
        self.history = self.db[settings.MONGO_HISTORYDB_NAME]

        # set up a test user
        self.user, created = User.objects.get_or_create(
            username=settings.HTTP_TEST_USERNAME)
        self.user.set_password(settings.HTTP_TEST_PASSWORD)
        self.user.save()

    def tearDown(self):
        self.con.drop_database(settings.MONGO_DB_NAME)
        self.con.disconnect()

    def test_view_unknown_page(self):
        response = self.client.get('/nothing/', {}, **self.auth)
        self.failUnlessEqual(response.status_code, 404)

    def test_auth(self):
        response = self.client.get(
            '/api/v1/feature/111111111111111111111.json')
        self.assertEqual(
            json.loads(response.content), {
                u'message':
                u'Unauthorized - Your account credentials were invalid.',
                u'code': u'401'
            })
        self.failUnlessEqual(str(response.status_code), '401')

    def test_view_unknown_feature(self):
        self.assertJsonMatchDict('/api/v1/feature/111111111111111111111.json',
                                 {
                                     'status': 404,
                                     'total': 0,
                                     'type': "FeatureCollection",
                                     'features': []
                                 }, 404)

    def test_home_page(self):
        self.assertPageMatch('/', 'Welcome to georegistry')

    def test_create_feature_form(self):
        url = '/api/v1/createfeature/'
        self.assertPageMatch(url, 'Upload Feature Form')
        response = self.create_test_feature()
        self.assertEqual(str(response.status_code), "200")
        self.assertTrue("test" in response.content)

    def test_get_feature(self):

        response = self.create_test_feature()
        data = json.loads(response.content)

        feature_id = data['features'][0]['properties']['id']
        epoch = data['features'][0]['properties']['epoch']

        response = self.client.get('/api/v1/feature/%s.json' % feature_id,
                                   **self.auth)
        self.assertTrue("test" in response.content)
        self.assertTrue(feature_id in response.content)

        response = self.client.get(
            '/api/v1/feature/%s@%s.json' % (feature_id, epoch), **self.auth)
        self.assertTrue("test" in response.content)
        self.assertTrue(feature_id in response.content)
        self.assertTrue(epoch in response.content)

    def test_search_feature(self):

        response = self.create_test_feature()
        data = json.loads(response.content)

        feature_id = data['features'][0]['properties']['id']
        epoch = data['features'][0]['properties']['epoch']

        response = self.client.get('/api/v1/features/search',
                                   {'feature_type': 'school'}, **self.auth)
        self.assertTrue("test" in response.content)
        self.assertTrue("school" in response.content)

        response = self.client.get('/api/v1/search',
                                   {'feature_type': 'church'}, **self.auth)
        self.assertEqual(str(response.status_code), "404")

    def test_update_feature(self):
        pass
示例#57
0
    def do_server_status(self):
        host = self.mongo_host
        port = self.mongo_port
        user = self.mongo_user
        passwd = self.mongo_password
        perf_data = False
        con = Connection(host=self.mongo_host,
                         port=self.mongo_port,
                         slave_okay=True)
        if not self.mongo_db:
            self.mongo_db = con.database_names()
        db = con[self.mongo_db[0]]
        if self.mongo_user and self.mongo_password:
            db.authenticate(self.mongo_user, self.mongo_password)
        server_status = db.command('serverStatus')

        version = server_status['version']
        at_least_2_4 = V(version) >= V('2.4.0')

        # operations
        for k, v in server_status['opcounters'].items():
            self.submit('total_operations', k, v)

        # memory
        for t in ['resident', 'virtual', 'mapped']:
            self.submit('memory', t, server_status['mem'][t])

        # connections
        self.submit('connections', 'connections',
                    server_status['connections']['current'])

        # locks
        if self.lockTotalTime is not None and self.lockTime is not None:
            if self.lockTime == server_status['globalLock']['lockTime']:
                value = 0.0
            else:
                value = float(server_status['globalLock']['lockTime'] -
                              self.lockTime) * 100.0 / float(
                                  server_status['globalLock']['totalTime'] -
                                  self.lockTotalTime)
            self.submit('percent', 'lock_ratio', value)

        self.lockTotalTime = server_status['globalLock']['totalTime']
        self.lockTime = server_status['globalLock']['lockTime']

        # indexes
        accesses = None
        misses = None
        index_counters = server_status[
            'indexCounters'] if at_least_2_4 else server_status[
                'indexCounters']['btree']

        if self.accesses is not None:
            accesses = index_counters['accesses'] - self.accesses
            if accesses < 0:
                accesses = None
        misses = (index_counters['misses'] or 0) - (self.misses or 0)
        if misses < 0:
            misses = None
        if accesses and misses is not None:
            self.submit('cache_ratio', 'cache_misses',
                        int(misses * 100 / float(accesses)))
        else:
            self.submit('cache_ratio', 'cache_misses', 0)
        self.accesses = index_counters['accesses']
        self.misses = index_counters['misses']

        for mongo_db in self.mongo_db:
            db = con[mongo_db]
            if self.mongo_user and self.mongo_password:
                db.authenticate(self.mongo_user, self.mongo_password)
            db_stats = db.command('dbstats')

            # stats counts
            self.submit('counter', 'object_count', db_stats['objects'],
                        mongo_db)
            self.submit('counter', 'collections', db_stats['collections'],
                        mongo_db)
            self.submit('counter', 'num_extents', db_stats['numExtents'],
                        mongo_db)
            self.submit('counter', 'indexes', db_stats['indexes'], mongo_db)

            # stats sizes
            self.submit('file_size', 'storage', db_stats['storageSize'],
                        mongo_db)
            self.submit('file_size', 'index', db_stats['indexSize'], mongo_db)
            self.submit('file_size', 'data', db_stats['dataSize'], mongo_db)

        # Replica check

        rs_status = {}
        slaveDelays = {}
        try:
            # Get replica set status
            try:
                rs_status = con.admin.command("replSetGetStatus")
            except pymongo.errors.OperationFailure, e:
                if e.code == None and str(e).find(
                        'failed: not running with --replSet"'):
                    print "OK - Not running with replSet"
                    con.disconnect()
                    return 0

            rs_conf = con.local.system.replset.find_one()
            for member in rs_conf['members']:
                if member.get('slaveDelay') is not None:
                    slaveDelays[member['host']] = member.get('slaveDelay')
                else:
                    slaveDelays[member['host']] = 0

            # Find the primary and/or the current node
            primary_node = None
            host_node = None

            for member in rs_status["members"]:
                if member["stateStr"] == "PRIMARY":
                    primary_node = member
                if member["name"].split(':')[0] == host and int(
                        member["name"].split(':')[1]) == port:
                    host_node = member

            # Check if we're in the middle of an election and don't have a primary
            if primary_node is None:
                print "WARNING - No primary defined. In an election?"
                con.disconnect()
                return 1

            # Check if we failed to find the current host
            # below should never happen
            if host_node is None:
                print "CRITICAL - Unable to find host '" + host + "' in replica set."
                con.disconnect()
                return 2
            # Is the specified host the primary?
            if host_node["stateStr"] == "PRIMARY":
                if max_lag == False:
                    print "OK - This is the primary."
                    con.disconnect()
                    return 0
                else:
                    #get the maximal replication lag
                    data = ""
                    maximal_lag = 0
                    for member in rs_status['members']:
                        if not member['stateStr'] == "ARBITER":
                            lastSlaveOpTime = member['optimeDate']
                            replicationLag = abs(
                                primary_node["optimeDate"] - lastSlaveOpTime
                            ).seconds - slaveDelays[member['name']]
                            data = data + member[
                                'name'] + " lag=%d;" % replicationLag
                            maximal_lag = max(maximal_lag, replicationLag)

                    # send message with maximal lag
                    message = "Maximal lag is " + str(maximal_lag) + " seconds"
                    print message
                    self.submit('replication', 'maximal-lag-seconds',
                                str(maximal_lag))

                    # send message with maximal lag in percentage
                    err, con = mongo_connect(
                        primary_node['name'].split(':')[0],
                        int(primary_node['name'].split(':')[1]), False, user,
                        passwd)
                    if err != 0:
                        con.disconnect()
                        return err
                    primary_timediff = replication_get_time_diff(con)
                    maximal_lag = int(
                        float(maximal_lag) / float(primary_timediff) * 100)
                    message = "Maximal lag is " + str(
                        maximal_lag) + " percents"
                    print message
                    self.submit('replication', 'maximal-lag-percentage',
                                str(maximal_lag))
                    con.disconnect()
                    return str(maximal_lag)
            elif host_node["stateStr"] == "ARBITER":
                print "OK - This is an arbiter"
                con.disconnect()
                return 0

            # Find the difference in optime between current node and PRIMARY

            optime_lag = abs(primary_node["optimeDate"] -
                             host_node["optimeDate"])
            if host_node['name'] in slaveDelays:
                slave_delay = slaveDelays[host_node['name']]
            elif host_node['name'].endswith(':27017') and host_node[
                    'name'][:-len(":27017")] in slaveDelays:
                slave_delay = slaveDelays[host_node['name'][:-len(":27017")]]
            else:
                raise Exception(
                    "Unable to determine slave delay for {0}".format(
                        host_node['name']))

            try:  # work starting from python2.7
                lag = optime_lag.total_seconds()
            except:
                lag = float(optime_lag.seconds + optime_lag.days * 24 * 3600)

            # send message with lag
            message = "Lag is " + str(lag) + " seconds"
            print message
            self.submit('replication', 'lag-seconds', str(lag))

            # send message with lag in percentage
            err, con = mongo_connect(primary_node['name'].split(':')[0],
                                     int(primary_node['name'].split(':')[1]),
                                     False, user, passwd)
            if err != 0:
                con.disconnect()
                return err
            primary_timediff = replication_get_time_diff(con)
            if primary_timediff != 0:
                lag = int(float(lag) / float(primary_timediff) * 100)
            else:
                lag = 0
            message = "Lag is " + str(lag) + " percents"
            print message
            self.submit('replication', 'lag-percentage', str(lag))
            con.disconnect()
            return str(lag)
#!/usr/bin/env python

from pymongo import Connection

conn = Connection("localhost:27017", slave_okay=True)

config = {
    '_id':
    'foo',
    'members': [{
        '_id': 0,
        'host': 'localhost:27017'
    }, {
        '_id': 1,
        'host': 'localhost:27018'
    }, {
        '_id': 2,
        'host': 'localhost:27019'
    }]
}

resp = conn.admin.command("replSetInitiate", config)

print resp

conn.disconnect()