Exemple #1
0
def getCampaignWordsPerDay(campaignID):
 #   print "Get Campaign Words Per Day"
      
    connection = Connection(DB.getDBHost(), DB.getDBPort())
    
    try:
        
        db = connection.meteor
         
        campaign = db.campaigns.find_one({"_id":campaignID})
        
        connection.close()
        
        if (campaign == ""):
            return "1"
        
        wordsPerDay = campaign['sendcount']
        
        if (wordsPerDay == ""):
            return "1"
    
        return wordsPerDay
    
    except:
        print "**** getCampaignWordsPerDay Exception ****"
        return "1"
Exemple #2
0
def getActiveWordList(campaignID):
    print "ACTIVE Word List Order"
      
    connection = Connection(DB.getDBHost(), DB.getDBPort())
#    print "Connecting to Campaigns"
    
    db = connection.meteor
#    print "Connected to Words"    
    
    activeWordList = []
#    print "Active Word List Order"
    
    campaign = db.campaigns.find_one({"_id":campaignID})
    
    connection.close()
    
    if (campaign == ""):
        return ""
    
    wordOrder = campaign['cwordorder']
    
    if (wordOrder == ""):
        return ""
    
    wordOrderArray = wordOrder.split(",")
    
    for word in wordOrderArray:
        if (len(word) > 0):
            activeWordList.append(word)             
 #           print "activeWordList  add- > " + word
    
    return activeWordList    
Exemple #3
0
def getCampaignName(campaignID):
 #   print "Get Campaign Name"
      
    connection = Connection(DB.getDBHost(), DB.getDBPort())
    
    try:
        
        db = connection.meteor
         
        campaign = db.campaigns.find_one({"_id":campaignID})
        
        connection.close()
        
        if (campaign == ""):
            return "N/A"
        
        name  = campaign['campaign']
        
        if (name  == ""):
            return "N/A"
    
        return name
    
    except:
        print "**** getCampaignName Exception ****"
        return "N/A"    
Exemple #4
0
def init_apns():
    mongodb = None
    while not mongodb:
        try:
            mongodb = Connection(options.mongohost, options.mongoport)
        except:
            pass
        # wait 5 secs to reconnect
        time.sleep(5)
    masterdb = mongodb[options.masterdb]
    apps = masterdb.applications.find({'enableapns': 1})
    apnsconns = {}
    for app in apps:
        apnsconns[app['shortname']] = []
        conns = int(app['connections'])
        if conns > 5:
            conns = 5
        if conns < 1:
            conns = 1
        if not app.has_key('environment'):
            app['environment'] = 'sandbox'
        for instanceid in range(0, conns):
            apn = APNClient(app['environment'], app['certfile'],
                            app['keyfile'], app['shortname'], instanceid)
            apnsconns[app['shortname']].append(apn)
    mongodb.close()
    return apnsconns
Exemple #5
0
class BaseMongoDB(object):
    def __init__(self, db, host=DEFAULT_HOST, user=None, password=None,
                 admin_db=None, ssl=DEFAULT_SSL, port=DEFAULT_PORT,
                 timeout=DEFAULT_TIMEOUT,
                 write_concern=DEFAULT_WRITE_CONCERN):
        self._host = host
        self._password = password
        self._user = user
        self._db = db
        self._admin_db = admin_db

        self._ssl = ssl
        self._port = port
        self._timeout = timeout
        self._write_concern = write_concern

    @property
    def db(self):
        # FIXME: memoize
        if not hasattr(self, '_db_proxy'):
            try:
                self._proxy = Connection(self._host, self._port,
                                         ssl=self._ssl,
                                         tz_aware=True,
                                         network_timeout=self._timeout)
            except errors.ConnectionFailure as e:
                raise errors.ConnectionFailure("MongoDB Failed to connect (%s): %s" % (self._host, e))

            self._db_proxy = self._auth_db()

        # return the connected, authenticated database object
        return self._db_proxy

    def close(self):
        if hasattr(self, '_proxy'):
            self._proxy.close()

    def _auth_db(self):
        '''
        by default the default user only has read-only access to db
        '''
        if not self._password:
            pass
        elif self._admin_db:
            admin_db = self._proxy[self._admin_db]
            if not admin_db.authenticate(self._user, self._password):
                raise RuntimeError(
                    "MongoDB failed to authenticate with user (%s)" % self._user)
        else:
            if not self._proxy[self._db].authenticate(self._user, self._password):
                raise RuntimeError(
                    "MongoDB failed to authenticate with user (%s)" % self._user)

        return self._proxy[self._db]

    def set_collection(self, collection):
        self.collection = collection

    def __getitem__(self, collection):
        return self.db[collection]
Exemple #6
0
def getActiveStudents(campaignID):
#    print "ACTIVE Students"
    
    
    connection = Connection(DB.getDBHost(), DB.getDBPort())
#    print "Connecting to Students"

    try: 
        db = connection.meteor
        print "Connected to Students"
    
        activeStudentList = []
    
        print "Active Student List for Campaign " + campaignID
    
        for students in db.students.find({'campaign':campaignID}):         
#            print students['cell']  + "   "  + students['campaign']              
#            print " IS Active"
            activeStudentList.append(students['cell'])
             
        connection.close()
        
#        print "ACTIVE Students List EXIT"

        return activeStudentList
    
    except:
        print "GET ACTIVE STUDENT EXCEPTION"
def run(observationNumber=None):
    # Create an empty response object.
    global nodeCount
    response = {}

    # open a link to the backing database

    connection = Connection('localhost', 27017)
    db = connection["FeederWatch"]
    data_coll = db['interactions']

    # split the single JSON object up into a hierarchy of objects by traversing the nested dictionaries and writing
    # documents in mongo during the traversal.  Store in a collection according to the name of the file dropped.
    try:
        query = {'observation': observationNumber}
        interactionCursor = data_coll.find(query)
    except:
        response['error'] = "Could not find this observation"
        return bson.json_util.dumps(response)

    connection.close()

    response['data'] = []
    for x in interactionCursor:
        response['data'].append(x)

    # Pack the results into the response object, and return it.
    #response['result'] = jsonoutput
    response['result'] = 'OK'

    # Return the response object.
    #tangelo.log(str(response))
    return bson.json_util.dumps(response)
Exemple #8
0
def getActiveWordAnswerTF(thisWord, ansNumber):

     connection = Connection(getDBHost(), getDBPort())     
     question = ""
     print "Find This Word Answer TF " + thisWord  +  "    Answer Number  " + str(ansNumber)
        
     try: 
        db = connection.meteor    
        word = db.words.find_one({'seqnum':thisWord})
        connection.close()
        
        print "FOUND ANSWER"
        
        answer = "F"
        
        if ansNumber > 5:
            answer = "Answer Number Error 2"
            
        if ansNumber < 1:
            answer = "Answer Number Error 1"
            
        ansIndex = "active" + str(ansNumber)
        
        
        if (word[ansIndex] == True):
            answer = "T"
                   
        
     except:
        print "EXCEPTION -  TF ANSWER QUESTION"
        
        
     return answer
Exemple #9
0
def getActiveWordRemediation(thisWord, rightWrong):

     connection = Connection(getDBHost(), getDBPort())     
     question = ""
     print "Find Correct Remediation for word " + thisWord  +  "   Right/Wrong  " + rightWrong
        
     try: 
        db = connection.meteor    
        word = db.words.find_one({'seqnum':thisWord})
        connection.close()
        
        print "Found Word"
        
        if (rightWrong == "Correct"):
            remediation = word['remedifcorrect']
        else:
            remediation = word['remedifwrong']
            
        return remediation    
        
                   
        
     except:
        print "EXCEPTION -  In getActiveWordRemediation"
        
        
     return ""
Exemple #10
0
def ec2():
    aws_access_key_id = ''
    aws_secret_access_key = ''
    kw_params = {}
    kw_params.update({'aws_access_key_id':aws_access_key_id})
    kw_params.update({'aws_secret_access_key':aws_secret_access_key})
    conn = boto.ec2.connect_to_region('ap-northeast-1', **kw_params)
    #res = conn.get_all_instances(filters={'tag-value': 'mongodb-query-test'})
    res = conn.get_all_instances()
    con = Connection(host='localhost')
    db = con['mongodb-test']

    a = ['ec2-xxxxxx.ap-northeast-1.compute.amazonaws.com']

    for x in a:
        print x
        db.public_dns_name.insert({
            'public_dns_name': x
            })
#if res:
#    for inst in res:
#        name = inst.instances[0].tags['Name']
#        logging.warning(name)
#        if name == 'ClientMaster2':
#            if inst.instances[0].public_dns_name != None:
#                logging.warning(name)
#                print inst.instances[0].public_dns_name
#                db.public_dns_name.insert({
#                       'public_dns_name': inst.instances[0].public_dns_name
#                       })
    con.close()
Exemple #11
0
def getActiveWordAnswer(thisWord, ansNumber):

     connection = Connection(getDBHost(), getDBPort())     
     question = ""
     print "Find This Word Answer  " + thisWord  +  "    Answer Number  " + str(ansNumber)
        
     try: 
        db = connection.meteor    
        collection = db.words
        word = db.words.find_one({'seqnum':thisWord})
        connection.close()
        
        print "FOUND ANSWER"
        answer = ""
        
        if ansNumber > 5:
            answer = "Answer Number Error 2"
            
        if ansNumber < 1:
            answer = "Answer Number Error 1"
            
        ansIndex = "ans" + str(ansNumber)
        
        print "  Answer index  " + ansIndex
        answer = word[ansIndex]
        
        
     except:
        print "EXCEPTION -  ANSWER QUESTION"
        answer = ""
        
        
     return answer
Exemple #12
0
def ec2():
    aws_access_key_id = ''
    aws_secret_access_key = ''
    kw_params = {}
    kw_params.update({'aws_access_key_id': aws_access_key_id})
    kw_params.update({'aws_secret_access_key': aws_secret_access_key})
    conn = boto.ec2.connect_to_region('ap-northeast-1', **kw_params)
    #res = conn.get_all_instances(filters={'tag-value': 'mongodb-query-test'})
    res = conn.get_all_instances()
    con = Connection(host='localhost')
    db = con['mongodb-test']

    a = ['ec2-xxxxxx.ap-northeast-1.compute.amazonaws.com']

    for x in a:
        print x
        db.public_dns_name.insert({'public_dns_name': x})


#if res:
#    for inst in res:
#        name = inst.instances[0].tags['Name']
#        logging.warning(name)
#        if name == 'ClientMaster2':
#            if inst.instances[0].public_dns_name != None:
#                logging.warning(name)
#                print inst.instances[0].public_dns_name
#                db.public_dns_name.insert({
#                       'public_dns_name': inst.instances[0].public_dns_name
#                       })
    con.close()
Exemple #13
0
def load_json(filename='', db_name='', collection_name='', clean=False):
    """
    -------------------------
    load data into Mongo db
    -------------------------
    """
    connection = Connection() ##('localhost', 27017) #! get connection
    db = connection[db_name] #! get database
    collection = db[collection_name] #! get collection
    if clean:
    	collection.remove() # 

    count = 0
    with open(filename, 'rb') as input_file:
        for idx, line in enumerate(input_file):
            line = line.strip('\r\n').strip('\n')
            json_obj = json.loads(line, encoding="utf-8")
            try:
                collection.insert(json_obj)
                count += 1
            except:
                print traceback.format_exc()
                break

    connection.close()
    print '------------- total json objects found = %s --- ' % (idx+1)
    print '------------- total json objects loaded = %s --- ' % (count)
Exemple #14
0
class Mongo(Output):
    def init(self):
        addr = "localhost"
        port = 27017
        db_name = "twitter"
        self.connection = Connection(addr, port)
        self.db = self.connection[db_name]
        
    def throw(self, packet):
        data = packet["data"]
        if data.get("event") is None:
            self.db.tweet.insert(data)
        elif "favorite" in data["event"]:
            self.db.favorite.insert(data)
        elif "list" in data["event"]:
            self.db.list.insert(data)
        elif "retweet" == data["event"]:
            self.db.retweet.insert(data)
        elif "follow" == data["event"]:
            self.db.follow.insert(data)
        elif "dm" == data["event"]:
            self.db.dm.insert(data)
        elif "delete" == data["event"]:
            self.db.delete.insert(data)
        
    def cleanup(self):
        self.connection.close()
 def execute(self, arborapi=None):
     print self.name + " executing"
     # setup mongo connection and look through all input collections, copying
     connection = Connection('localhost', 27017)
     if len(self.databaseName) > 0:
         db = connection[self.databaseName]
     else:
         db = connection['arbor']
     outputcoll = db[self.outputInformation.collectionName]
     # clear out the output to prepare for running an algorithm
     outputcoll.drop()
     # loop through all inputs and process all objects in the inputs
     for thisInput in self.inputs:
         inputcoll = db[thisInput.collectionName]
         # find all documents in this input (repeated for each input)
         queryResults = inputcoll.find()
         print "found that ", thisInput.collectionName, " has ", queryResults.count(
         ), " records"
         # write the documents into the output collection and indicate the output time changed
         for result in queryResults:
             outputcoll.insert(result)
         #  pause the writer enough to allow the write to complete? Found example code here:
         #  http://sourceforge.net/u/rick446/random/ci/master/tree/lib.py
         db.last_status()
     # rest the filter's modified time and assign it to the output object
     self.outputInformation.modifiedTime = self.modifiedTime = time.time()
     connection.close()
def query_log():
    connection = Connection('localhost', 27017)
    db = connection.mydb
    collection = db.logdata
    for doc in collection.find():
        print(doc)
    connection.close()
def displaydb():
    """
	displays all data inside db called file name 
	something similar to read db
	"""
    global host
    connection = Connection(host, 27017)
    db = connection[dbname]
    collection = db[coll]
    posts = db.coll
    if posts.count():
        for post in posts.find():
            for key in post:
                if key != '_id':
                    print '%22s' % (str(key)),
            print
            break
        for post in posts.find():
            for key, val in post.iteritems():
                if key != '_id':
                    print '%22s' % (str(val)),
            print
    else:
        return 'no data'
    connection.close()
Exemple #18
0
class MongoHandler():
  __metaclass__ = Singleton

  def __init__(self):

    try:
      self.db_connection = None
      self.init_config()
      self.init_db()
    except Exception as e:
      raise Exception('In MongoHandler init: ' + str(e))


  def init_config(self):
    self.db_connection_str = 'localhost:27017'
    #self.db_user = config_handler.get_key_from_config('db_user', 'admin')
    #self.db_password = config_handler.get_key_from_config('db_password', 'int311af33d')
    self.db_name = 'intellafeed'


  def init_db(self):
    self.db_connection = Connection(self.db_connection_str)
    self.db = self.db_connection[self.db_name]

    #self.db.authenticate(self.db_user, self.db_password)

  def close_connection(self):
    if self.db_connection:
      self.db_connection.close()
 def execute(self):
     print self.name + " executing"
     # setup mongo connection and look through all input collections, copying
     connection = Connection('localhost', 27017)
     if len(self.databaseName) > 0:
         db = connection[self.databaseName]
     else:
         db = connection['arbor']
     outputcoll = db[self.outputInformation.collectionName]
     # clear out the output to prepare for running an algorithm
     outputcoll.drop()
     # loop through all parameters on this filter and output a line in the collection
     # for each parameter as a key/value pair
     for thisparam in self.parameters:
         outdict = dict()
         outdict['key'] = thisparam
         outdict['value'] = self.parameters[thisparam]
         # find all documents in this input (repeated for each input)
         outputcoll.insert(outdict)
         #  pause the writer enough to allow the write to complete? Found example code here:
         #  http://sourceforge.net/u/rick446/random/ci/master/tree/lib.py
         db.last_status()
     # rest the filter's modified time and assign it to the output object
     self.outputInformation.modifiedTime = self.modifiedTime = time.time()
     connection.close()
Exemple #20
0
class MongoDataSource(DataSource):  # define parent class

    connection = None
    database = None

    def open_connection(self, options):
        print 'Opening mongo connection'

        if not options["host"] or not options["port"] or not options[
                "database"]:
            return False

        # Create a connection to mongodb
        self.connection = Connection(options["host"], options["port"])
        self.database = self.connection[options["database"]]

    def get_collection(self, collection):
        if collection in self.database.collection_names():
            return self.database[collection]
        else:
            return None

    def database():
        return self.database

    def index(self, object, collection):
        print "Indexing into MongoDB"
        self.database[collection].save(object)

    def close_connection(self):
        self.connection.close()
Exemple #21
0
    class DB(BaseDB):
        def __init__(self):
            self.connection = Connection()
            self.db = self.connection['oauth']
            #make sure the Tranform class which is a SONManipulator is used
            self.db.add_son_manipulator(Transform())
            self.models = self.db.models


        def get(self, key):
            document = self.models.find_one({'key':key})
            
            return document['class']


        def put(self, key, data):
            #if we don't do this we end up with mutiple copies of the same data
            if not self.contains(key):
                document = {'key':key, 'class': data}
                self.models.insert(document)
            else:
                self.update(key, data)
            
                
            
            

        def update(self, key, data):
            #import pdb; pdb.set_trace()
            #seems you can't use a SONManipulator on updates,
            #manipulate=True also adds a new _id which causes an
            #'OperationFailure: Modifiers and non-modifiers cannot be mixed'
            #Solution: do the transform directly and use the result to
            #update the entry with and don't set manipulate=True
            data = json.loads(jsonpickle.encode(data))
            self.models.update({'key':key}, {"$set": {'class':data}}, multi=False, safe=True)
            

        def delete(self, key):
            self.models.remove({'key':key})

        def commit(self):
            #we don't need this for MongoDB
            pass


        def abort(self):
            #we could do this I think,
            #but am currently debating whether to use it or not
            pass

        def contains(self, key):
            #returns None if no entry with that key filter exists
            #so wrapping it in bool will return False if None,
            #True on anything else
            return bool(self.models.find_one({'key':key}))


        def close(self):
            self.connection.close()
Exemple #22
0
def run():
    # Create an empty response object.
    response = {}
    collectionNames = []

    # look through the collections in the ivaan database and return the name of all collections
    # that match the naming profile for tables.  This is matching to see if the collection name
    # begins with "table_cardiac" since it is only returning cardiac studies from the IVAaN database

    connection = Connection('localhost', 27017)
    db = connection['ivaan']
    # get a list of all collections (excluding system collections)
    collection_list = db.collection_names(False)
    for coll in collection_list:
        # if it is a table, then add it to the response
        if (str(coll[:14]) == 'table_cardiac_'):
            print "found table:", coll
            # don't return the prefix in the project name. Users don't have to know the
            # cardiac project collection names are prepended
            collectionNames.append(coll[14:])

    connection.close()

    # if no projects found at all, return a default name
    if len(collectionNames) == 0:
        collectionNames.append("default")

    # Pack the results into the response object, and return it.
    response['result'] = collectionNames

    # Return the response object.
    tangelo.log(str(response))
    return bson.json_util.dumps(response)
Exemple #23
0
def CalculateSpeciationBySeparateConnection(system, database, port,
                                            tree_collection_name,
                                            matrix_collection_name, character,
                                            verbose):

    connection = Connection(system, port)
    db = connection[database]
    tree_coll = db[str(tree_collection_name)]
    matrix_coll = db[str(matrix_collection_name)]

    # startup up an R interpreter to do the processing.  We will be converting a tree, so create a tree handler
    robjects.r("library('geiger')")
    r = robjects.r
    r('source("/Users/clisle/Projects/Arbor/code/python-R-integration/arbor2apeTreeHandler.R")'
      )
    r('treeHandler = new("arbor2apeTreeHandler")')

    # load function definitions used for speciation
    r('source("speciationDefines.R")')

    result = InvokeSpeciation(tree_collection_name, tree_coll,
                              matrix_collection_name, matrix_coll, character,
                              verbose)
    if (connection):
        connection.close()
    return result
def run(obsid,source,target,interaction,name,lat,lng,datetime):
    # Create an empty response object.
    global nodeCount
    response = {}

    # open a link to the backing database

    connection = Connection('localhost', 27017)
    db = connection["FeederWatch"]
    data_coll = db['interactions']

    currenttime = arrow.now().timestamp
   
    # Write out a single record for the observation.  The location is saved
    # as a tuple [longitude,latitude] to be compatible with standard mongoDB geo practices
    try:
        location_point = [float(lng),float(lat)]
        record = {'observation':obsid,'source':source,'target':target,'interaction':interaction,'location_name':name,'location': location_point,'date':datetime,'recorded':currenttime}
        interactionCursor = data_coll.insert(record)
    except:
        response['error'] = "Could not write"
        return bson.json_util.dumps(response)

    connection.close()

    response['data'] = []

    # Pack the results into the response object, and return it.
    #response['result'] = jsonoutput
    response['result'] = 'OK'

    # Return the response object.
    #tangelo.log(str(response))
    return bson.json_util.dumps(response)
Exemple #25
0
    def connection(self, kind=0, q=None):
        '''
         Access MongoDB and load topology or events data.

         :param kink: specify which request should be running in the DataBase.

         :return: a cursor of topology or events.
         :rtype: Cursor of elements dictionnary or NoneType.
        '''
        if self.username is None:
            connection = Connection()
            db = connection['canopsis']
        else:
            connection = MongoClient(self.MONGO_URL, self.MONGO_PORT)
            db = connection["canopsis"]
            # Do the authentication
            db.authenticate(self.username, self.password)
        if kind == 0:
            query = self.qr
            # Format string
            json_acceptable = query.replace("'", "\"")
            query = json.loads(json_acceptable)
            cursor = db.objectv1.find(query)
        else:
            query = q
            # Format string
            json_acceptable = query.replace("'", "\"")
            query = json.loads(json_acceptable)
            cursor = db.eventsv1.find(query)
        connection.close()
        return cursor
def init_apns():
    mongodb = None
    while not mongodb:
        try:
            mongodb = Connection(options.mongohost, options.mongoport)
        except:
            pass
        # wait 5 secs to reconnect
        time.sleep(5)
    masterdb = mongodb[options.masterdb]
    apps = masterdb.applications.find({'enableapns': 1})
    apnsconns = {}
    for app in apps:
        apnsconns[app['shortname']] = []
        conns = int(app['connections'])
        if conns > 5:
            conns = 5
        if conns < 1:
            conns = 1
        if 'environment' not in app:
            app['environment'] = 'sandbox'

        if 'certfile' in app and 'keyfile' in app and 'shortname' in app:
            for instanceid in range(0, conns):
                try:
                    apn = APNClient(app['environment'], app['certfile'], app['keyfile'], app['shortname'], instanceid)
                except Exception as ex:
                    logging.error(ex)
                    continue
                apnsconns[app['shortname']].append(apn)
    mongodb.close()
    return apnsconns
    def setData(self, serialData):
        try:
            print serialData
            decoded = json.loads(serialData, object_hook=json_util.object_hook)
            id_no = decoded['id_no']            
            sensor = decoded['sensor']            
            porta = decoded['porta']
            valor = str(decoded['valor'])
            if sensor == "2":
                valor = valor + '.00'
            datahora = decoded['datahora']
            lab = decoded['lab']
	        #p = 1000
            #salva no banco de dados
        
            con = Connection('127.0.0.1', 27017, safe=True)
            db = con['inoshare']
            pacote = {'id_no':id_no, 'sensor':sensor, 'porta':porta, 'valor':valor, 'datahora':datahora, 'lab': lab}
            inoshare = db.inoshare
            inoshare.insert(pacote)
            print pacote
            con.close()
            
        except  Exception as e:
            print "Erro no filter data: ", e.message, e.args
def displaydb():
    """
	displays all data inside db called file name 
	something similar to read db
	"""
    global host
    connection = Connection(host, 27017)
    db = connection[dbname]
    collection = db[coll]
    posts = db.coll
    if posts.count():
        for post in posts.find():
            for key in post:
                if key != "_id":
                    print "%22s" % (str(key)),
            print
            break
        for post in posts.find():
            for key, val in post.iteritems():
                if key != "_id":
                    print "%22s" % (str(val)),
            print
    else:
        return "no data"
    connection.close()
Exemple #29
0
class Deleted(Filter):
    def init(self):
        self.watch = []  # 監視対象のスクリーンネームを入れる. 空なら全部通知
        self.exclude = []

        addr = "localhost"
        port = 27017
        db_name = "twitter"
        self.connection = Connection(addr, port)
        self.col = self.connection[db_name].tweet

    def filter(self, packet):
        data = packet["data"]
        if not isinstance(data, dict):
            return None
        if data.get("event") == "delete":
            try:
                tweet = self.col.find({u"id": data["id"]})[0]
            except IndexError:
                return None
            detail = {"user": tweet[u"user"][u"screen_name"], "post": tweet[u"text"]}
            if str(detail["user"]) in self.exclude or (self.watch != [] and str(detail["user"]) not in self.watch):
                return None
            self.send(u"%(user)s deleted: %(post)s" % detail, exclude=["favbot"])

    def cleanup(self):
        self.connection.close()
 def execute(self, arborapi=None):
     print self.name+" executing"
     # setup mongo connection and look through all input collections, copying
     connection = Connection('localhost', 27017)
     if len(self.databaseName)>0:
         db = connection[self.databaseName]
     else:
         db = connection['arbor']
     outputcoll = db[self.outputInformation.collectionName]
     # clear out the output to prepare for running an algorithm
     outputcoll.drop()
     # loop through all inputs and process all objects in the inputs
     for thisInput in self.inputs:
         inputcoll = db[thisInput.collectionName]
         # find all documents in this input (repeated for each input)
         queryResults = inputcoll.find()
         print "found that ", thisInput.collectionName," has ",queryResults.count(), " records"
         # write the documents into the output collection and indicate the output time changed
         for result in queryResults:
             outputcoll.insert(result)
         #  pause the writer enough to allow the write to complete? Found example code here:
         #  http://sourceforge.net/u/rick446/random/ci/master/tree/lib.py
         db.last_status()
     # rest the filter's modified time and assign it to the output object
     self.outputInformation.modifiedTime = self.modifiedTime = time.time()
     connection.close()
 def execute(self):
     print self.name+" executing"
     # setup mongo connection and look through all input collections, copying
     connection = Connection('localhost', 27017)
     if len(self.databaseName)>0:
         db = connection[self.databaseName]
     else:
         db = connection['arbor']
     outputcoll = db[self.outputInformation.collectionName]
     # clear out the output to prepare for running an algorithm
     outputcoll.drop()
     # loop through all parameters on this filter and output a line in the collection
     # for each parameter as a key/value pair
     for thisparam in self.parameters:
         outdict = dict()
         outdict['key'] = thisparam
         outdict['value'] = self.parameters[thisparam]
         # find all documents in this input (repeated for each input)
         outputcoll.insert(outdict)
         #  pause the writer enough to allow the write to complete? Found example code here:
         #  http://sourceforge.net/u/rick446/random/ci/master/tree/lib.py
         db.last_status()
     # rest the filter's modified time and assign it to the output object
     self.outputInformation.modifiedTime = self.modifiedTime = time.time()
     connection.close()
Exemple #32
0
def run(tablename):
    # Create an empty response object.
    response = {}
    print "using collection:",tablename

    # first find out the types of the attributes in this collection. Create a dictionary with the names and types
    connection = Connection('localhost', 27017)
    db = connection['ivaan']
    dataset_collection = db[tablename]
    #tablerecord = dataset_collection.find()[0]

    # Now that we have the proper types in the table collection stored in a handy local dictionary "attributes", lets
    # build a query for mongoDB depending on how many filters are enabled.

    querystring = {}
    print "query to perform:", querystring

    # now that we have the query build, execute it and return the matching records from the collection

    connection = Connection('localhost', 27017)
    db = connection['polar']
    dataset_collection = db[tablename]

    # Do a find operation with the passed arguments.
    it = dataset_collection.find(querystring)
    results = [x for x in it]
   
    connection.close()

    print results

    # convert from the local json to a geojson multipoint result

    #latitudes = [30, 30, 30]
    #longitudes = [10, 20, 30]

    #define multipoint geometry
    multipoint = ogr.Geometry(ogr.wkbMultiPoint)

    #create point geometry and add to multipoint geometry
    for i in range(len(results)):
        point = ogr.Geometry(ogr.wkbPoint)
        fixedlocation = convertStringToFloatPoint(results[i]['stationLng'], results[i]['stationLat'])
        point.AddPoint(fixedlocation['lng'],fixedlocation['lat'])
        multipoint.AddGeometry(point)

    #convert geometry to GeoJSON format
    geojson_multipoint = multipoint.ExportToJson()


    # Pack the results into the response object, and return it.
    response['count'] = it.count()
    response['data'] = geojson_multipoint

    response['result'] = 'OK'

    # Return the response object.
    tangelo.log(str(response))
    return bson.json_util.dumps(response)
 def _drop_database(self, database_name):
     c = Connection()
     try:
         if database_name in c.database_names():
             self.log("Dropping database: %s" % database_name)
             c.drop_database(database_name)
     finally:
         c.close()
def to_db(infile, db_name):
  conn = Connection('localhost', 27017)
  db = conn[db_name]
  locations_info = decode_locations(infile)
  for i in locations_info:
    locations_info[i]['_id'] = i
    db['location_info'].insert(locations_info[i])
  conn.close()
def AssignHierarchicalNamesSeparateConnection(system, database, port, tree_collection_name, verbose):

    connection = Connection(system, port)
    db = connection[database]
    data_coll = db[str(tree_collection_name)]
    AssignHierarchicalNames(data_coll)
    if connection:
        connection.close()
Exemple #36
0
def get_full_code(object_id):
    """get the full code document from the database."""
    connection = Connection()
    database = connection.codingbooth
    collection = database.codes
    result = collection.find_one({'_id': ObjectId(object_id)})
    connection.close()
    return result
Exemple #37
0
def load_user(object_id):
    """Load the user based on the given id."""
    connection = Connection()
    database = connection.codingbooth
    collection = database.users
    result = collection.find_one({'_id': ObjectId(object_id)})
    connection.close()
    return result
Exemple #38
0
def get_code_from_name(name):
    """Retrieve code from the database given a name."""
    connection = Connection()
    database = connection.codingbooth
    collection = database.codes
    result = collection.find_one({'name': name})
    connection.close()
    return result
Exemple #39
0
def get_code(object_id):
    """Retrieve the code from the database."""
    connection = Connection()
    database = connection.codingbooth
    collection = database.codes
    result = collection.find_one({'_id': ObjectId(object_id)})
    connection.close()
    return result['code']
Exemple #40
0
def load_user_by_email(email):
    """Load the user based on the given email"""
    connection = Connection()
    database = connection.codingbooth
    collection = database.users
    result = collection.find_one({'email': email})
    connection.close()
    return result
 def _drop_database(self, database_name):
     c = Connection()
     try:
         if database_name in c.database_names():
             self.log("Dropping database: %s" % database_name)
             c.drop_database(database_name)
     finally:
         c.close()
Exemple #42
0
def set_compile_results(object_id, results):
    """Put the compilation results into the database."""
    connection = Connection()
    database = connection.codingbooth
    collection = database.codes
    db_id = collection.update({'_id': ObjectId(object_id)},
        {'$set': {'compilation': results}})
    connection.close()
    return db_id
def AssignHierarchicalNamesSeparateConnection(system, database, port,
                                              tree_collection_name, verbose):

    connection = Connection(system, port)
    db = connection[database]
    data_coll = db[str(tree_collection_name)]
    AssignHierarchicalNames(data_coll)
    if (connection):
        connection.close()
Exemple #44
0
def set_run_results(object_id, results):
    """Put the results of running the compiled executable into the database."""
    connection = Connection()
    database = connection.codingbooth
    collection = database.codes
    db_id = collection.update({'_id': ObjectId(object_id)},
        {'$set': {'run': results}})
    connection.close()
    return db_id
    def execute(self, arborapi=None):
        print self.name + " executing"
        # setup mongo connection and look through all input collections, copying
        connection = Connection('localhost', 27017)
        if len(self.databaseName) > 0:
            db = connection[self.databaseName]
        else:
            db = connection['arbor']
        outputcoll = db[self.outputInformation.collectionName]
        # clear out the output to prepare for running an algorithm
        outputcoll.drop()
        # loop through all inputs and process all objects in the inputs
        for thisInput in self.inputs:
            inputcoll = db[thisInput.collectionName]

            # find all documents in this input (repeated for each input) that match the
            # test criteria.  If no criteria is specified, pass records through

            for case in switch(self.parameters['filterOperation']):
                if case('GreaterThan'):
                    query = {
                        self.parameters['filterAttribute']: {
                            '$gt': self.parameters['filterValue']
                        }
                    }
                    break
                if case('LessThan'):
                    query = {
                        self.parameters['filterAttribute']: {
                            '$lt': self.parameters['filterValue']
                        }
                    }
                    break
                if case('NotEqual'):
                    query = {
                        self.parameters['filterAttribute']: {
                            '$ne': self.parameters['filterValue']
                        }
                    }
                    break
                if case('Equal') or case('EqualTo'):
                    query = {
                        self.parameters['filterAttribute']:
                        self.parameters['filterValue']
                    }
                    break
            print "query used was: ", query
            queryResults = inputcoll.find(query)

            # write the documents into the output collection and indicate the output time changed
            for result in queryResults:
                outputcoll.insert(result)
        # rest the filter's modified time and assign it to the output object
        self.outputInformation.modifiedTime = self.modifiedTime = time.time()
        print self.name, " passed ", outputcoll.count(), " records"
        connection.close()
Exemple #46
0
def create_user(email, password):
    """Create a user in the database with a given name, email, and password."""
    connection = Connection()
    database = connection.codingbooth
    collection = database.users
    db_id = collection.insert({
        'email': email,
        'password': password})
    connection.close()
    return db_id
Exemple #47
0
def save_user(object_id, email, password):
    """Save the user to the database."""
    connection = Connection()
    database = connection.codingbooth
    collection = database.users
    collection.update({'_id': ObjectId(object_id)},
        {'$set': {
        'email': email,
        'password': password}})
    connection.close()
Exemple #48
0
def mongodb_parser(db, collection, args=None):
    conn = Connection()
    db = conn[db]
    collection = db[collection]
    res = collection.find(args)
    reslist = list(res)
    for r in reslist:
        r.pop('_id')
    jsonlist = json.dumps({'result': reslist})
    conn.close()
    return jsonlist
Exemple #49
0
def mongodb_write(new_dict):
    '''
    Write register to the Monto Database
    '''
    try:
        mongo_conn = Connection(MONGODB_SERVER, 27017)
    except Exception:
        sys.exit("Failure to connect to the mongo database")
    db = mongo_conn[MONGODB_DB]

    db2ins = db.db2ins
    db2ins.insert(new_dict)
    mongo_conn.close()
Exemple #50
0
def set_code(object_id=None, code=""):
    """Store the code block into the database."""
    connection = Connection()
    database = connection.codingbooth
    collection = database.codes
    if object_id:
        collection.update({'_id': ObjectId(object_id)},
            {'$set': {'code': code}})
        db_id = object_id
    else:
        db_id = collection.insert({'code': code})
    connection.close()
    return str(db_id)
Exemple #51
0
def run(obsid, source, target, interaction):
    # Create an empty response object.
    global nodeCount
    response = {}

    # open a link to the backing database

    connection = Connection('localhost', 27017)
    db = connection["FeederWatch"]
    data_coll = db['interactions']

    # split the single JSON object up into a hierarchy of objects by traversing the nested dictionaries and writing
    # documents in mongo during the traversal.  Store in a collection according to the name of the file dropped. The location is saved
    # as a tuple [longitude,latitude] to be compatible with standard mongoDB geo practices

    if source == 'null' and target == 'null':
        # this is the case where the last record of this observation is being deleted
        try:
            record = {'observation': obsid}
            data_coll.remove(record)
            # return the remaining records, so the UI can re-render
            response['data'] = data_coll.find({'observation': obsid})
        except:
            response['error'] = "Could not delete"
        return bson.json_util.dumps(response)
    else:
        # this is the ordinary case where one record out of many is being deleted
        try:
            record = {
                'observation': obsid,
                'source': source,
                'target': target,
                'interaction': interaction
            }
            data_coll.remove(record)
            # return the remaining records, so the UI can re-render
            response['data'] = data_coll.find({'observation': obsid})
        except:
            response['error'] = "Could not delete"
            return bson.json_util.dumps(response)

    connection.close()

    # Pack the results into the response object, and return it.
    #response['result'] = jsonoutput
    response['result'] = 'OK'

    # Return the response object.
    #tangelo.log(str(response))
    return bson.json_util.dumps(response)
Exemple #52
0
def process(id):
    stuff = majdoor.fetch_koji_build(id)
    if stuff and len(stuff) == 3:
        package, nvr, urls = stuff
    else:
        print "??? majdoor skipped / failed", id
        return "OK"

    if not urls:
        return
    for arch, url in urls:
        basename = url.split('/')[-1]
        path = os.path.join("cache", nvr, basename)

        if path.endswith(".rpm") and not \
                path.endswith(".src.rpm") and \
                not "-debuginfo-" in path:
            output = scanner.analyze(path)
            print output

            connection = Connection()
            db = connection.test_database
            analysis = db.analysis
            analysis.insert(json.loads(output))
            connection.close()

    # do rpmgrill stuff, spawn as we don't know how rpmgrill affets our env.
    basepath = os.path.join(os.path.realpath("cache"), nvr)
    print "Running rpmgrill on", basepath
    p = subprocess.Popen("./invoke_rpmgrill.sh %s" % basepath,
                         stderr=subprocess.PIPE,
                         shell=True)
    _, err = p.communicate()

    output = os.path.join(os.path.realpath("cache"), nvr, "rpmgrill.json")
    if not os.path.exists(output):
        print "!!! rpmgrill failed for", basepath
        print err
    else:
        with open(output) as f:
            data = f.read()
        # we store rpmgrill.json in a database
        connection = Connection()
        db = connection.test_database
        rpmgrill = db.rpmgrill
        entry = {"nvr": nvr, "output": data, "package": package}
        rpmgrill.insert(entry)
        print "!!!", err

    return "OK"
def run(study=None, projectname='first'):
    # Create an empty response object.
    global nodeCount
    response = {}

    # first decode the argument from being passed through a URL
    jsonObj = bson.json_util.loads(study)

    # Append the project name to the "table_" prefix, so this dataset
    # will be read as a IVAaN tabular dataset.   Then any dashes are replaced with
    # underscores to avoid dashes in mongo collection names

    print "using project name: ", projectname
    # build name from "table" + date + dropped filename
    collectionName = 'table_cardiac_' + projectname
    collectionNameNoDashes = string.replace(collectionName, '-', '_')
    print "modified collection name: ", collectionNameNoDashes

    # open a database connection
    connection = Connection('localhost', 27017)
    db = connection["ivaan"]
    data_coll = db[collectionNameNoDashes]

    # we have a single JSON object here with all the fields to store, but some of them are floating point numbers which
    # are coming across as strings  (e.g.  "4.555") instead of a number 4.555.  To fix this, we will iterate through the fields
    # and convert values into numbers if they are convertable

    storageDict = {}
    for attrib in jsonObj.keys():
        if isinstance(jsonObj[attrib], list):
            storageDict[attrib] = jsonObj[attrib]
        else:
            storageDict[attrib] = convertIfNumber(jsonObj[attrib])
    try:
        data_coll.insert(storageDict)

    except ValueError:
        response['error'] = "Could not convert to JSON"
        return bson.json_util.dumps(response)

    connection.close()

    # Pack the results into the response object, and return it.
    #response['result'] = jsonoutput
    response['result'] = 'OK'

    # Return the response object.
    tangelo.log(str(response))
    return bson.json_util.dumps(response)
def insertdb(data):
    """
	data is a list of dict's which ll be inserted into db called dbname and a collection called dbname
	"""
    global host
    connection = Connection(host, 27017)
    db = connection[dbname]
    collection = db[coll]
    post_id = []
    posts = db.coll
    post_id.append(posts.insert(data))
    if verbose:
        print 'inserted:', data
    connection.close()
    return post_id
class BooleanSearch:
'''
This class accepts a word, queries MongoDB and returns all the postings list as a Python set
Also, given two sets of postings list this class performs boolean operations of AND, OR and NOT using 
python set object methods of Intersection, Union and Difference.
Additionally, given a set of postings list, it queries the chunk files to ouput the log entries
'''
    def __init__(self):
        self.con = Connection()
        self.db = self.con.indexer
        self.idf = self.db.idf
        self.outputPath = "Processed_Data/"

    def __del__(self):
        self.con.close()

    def getPostingsListSet(self, word): # Given a word returns a set of postings list from MongoDB 
        mySet = set()
        for row in self.idf.find({'Term':word}):
            pList = row['postingsList']
            mystr = '.'
            mystr = mystr.join([str(x) for x in pList.values()])
            mySet.add(mystr)
        return mySet

    def printLogs(self, pListSet): # Give a set of postings list, queries the chunk files to output the logs
        for oneList in pListSet:
            items = oneList.split('.')
            fn = self.outputPath + items[0]
            fhndl = open(fn, 'r')
            fhndl.seek(int(items[1]))
            print fhndl.read(int(items[2])).strip()
            fhndl.close()

    def termAND(self, set1, set2): # Given 2 sets of postings list, performs boolean AND
        setAND = set1.intersection(set2)
        return setAND

    def termOR(self, set1, set2): # Given 2 sets of postings list, performs boolean OR
        setOR = set1.union(set2)
        return setOR

    def termNOT(self, set1, set2): # Given 2 sets of postings list, performs boolean NOT
        setNOT = set2.difference(set1)
        return setNOT

    def singleTerm(self, word): # returns postings list for a single term when no boolean operations is required
        return self.getPostingsListSet(word)