Ejemplo n.º 1
0
Archivo: utils_t.py Proyecto: ktf/DAS
 def test_sorting(self):
     """Test sorting routines"""
     data = [
         {'id':6, 'dataset': 'bla6', 'run':200},
         {'id':1, 'dataset': 'bla1', 'run':100},
         {'id':2, 'dataset': 'bla2', 'run':700},
         {'id':3, 'dataset': 'bla3', 'run':400},
         {'id':4, 'dataset': 'bla4', 'run':300},
         {'id':5, 'dataset': 'bla5', 'run':800},
     ]
     sorted_data = [i for i in sort_data(data, 'dataset')]
     expect = [
             {'run': 100, 'id': 1, 'dataset': 'bla1'}, 
             {'run': 700, 'id': 2, 'dataset': 'bla2'}, 
             {'run': 400, 'id': 3, 'dataset': 'bla3'}, 
             {'run': 300, 'id': 4, 'dataset': 'bla4'}, 
             {'run': 800, 'id': 5, 'dataset': 'bla5'}, 
             {'run': 200, 'id': 6, 'dataset': 'bla6'},
     ]
     self.assertEqual(expect, sorted_data)
     sorted_data = [i for i in sort_data(data, 'run')]
     expect = [
             {'run': 100, 'id': 1, 'dataset': 'bla1'}, 
             {'run': 200, 'id': 6, 'dataset': 'bla6'}, 
             {'run': 300, 'id': 4, 'dataset': 'bla4'}, 
             {'run': 400, 'id': 3, 'dataset': 'bla3'}, 
             {'run': 700, 'id': 2, 'dataset': 'bla2'}, 
             {'run': 800, 'id': 5, 'dataset': 'bla5'},
     ]
     self.assertEqual(expect, sorted_data)
     sorted_data = [i for i in sort_data(data, 'run', 'desc')]
     expect = [
             {'run': 800, 'id': 5, 'dataset': 'bla5'},
             {'run': 700, 'id': 2, 'dataset': 'bla2'}, 
             {'run': 400, 'id': 3, 'dataset': 'bla3'}, 
             {'run': 300, 'id': 4, 'dataset': 'bla4'}, 
             {'run': 200, 'id': 6, 'dataset': 'bla6'}, 
             {'run': 100, 'id': 1, 'dataset': 'bla1'}, 
     ]
     self.assertEqual(expect, sorted_data)
Ejemplo n.º 2
0
    def get_from_cache(self, query, idx=0, limit=0, skey=None, order='asc'):
        """
        Retreieve results from cache, otherwise return null.
        """
        idx = int(idx)
        limit = long(limit)
        stop = idx + limit
        key = genkey(query)
        res = self.memcache.get(key)
        id = idx
        if res and type(res) is int:
            self.logger.info("DASMemcache::result(%s) using cache" % query)
            if skey:
                rowlist = [i for i in range(0, res)]
                rowdict = self.memcache.get_multi(rowlist, key_prefix=key)
                data = rowdict.values()
                gendata = (i for i in sort_data(data, skey, order))

                def subgroup(gen, idx, stop):
                    """Extract sub-group of results from generator"""
                    id = 0
                    for item in gen:
                        if stop:
                            if id >= idx and id < stop:
                                yield item
                        else:
                            if id >= idx:
                                yield item
                        id += 1

                items = subgroup(gendata, idx, stop)
            else:
                if limit:
                    if limit > res:
                        stop = res
                    rowlist = [i for i in range(idx, stop)]
                else:
                    rowlist = [i for i in range(0, res)]
                rowdict = self.memcache.get_multi(rowlist, key_prefix=key)
                items = rowdict.values()
            for item in items:
                #                item['id'] = id
                yield item
                id += 1
Ejemplo n.º 3
0
    def get_from_cache(self, query, idx=0, limit=0, skey=None, order="asc"):
        """
        Retreieve results from cache, otherwise return null.
        """
        idx = int(idx)
        limit = long(limit)
        stop = idx + limit
        key = genkey(query)
        res = self.memcache.get(key)
        id = idx
        if res and type(res) is types.IntType:
            self.logger.info("DASMemcache::result(%s) using cache" % query)
            if skey:
                rowlist = [i for i in range(0, res)]
                rowdict = self.memcache.get_multi(rowlist, key_prefix=key)
                data = rowdict.values()
                gendata = (i for i in sort_data(data, skey, order))

                def subgroup(gen, idx, stop):
                    """Extract sub-group of results from generator"""
                    id = 0
                    for item in gen:
                        if stop:
                            if id >= idx and id < stop:
                                yield item
                        else:
                            if id >= idx:
                                yield item
                        id += 1

                items = subgroup(gendata, idx, stop)
            else:
                if limit:
                    if limit > res:
                        stop = res
                    rowlist = [i for i in range(idx, stop)]
                else:
                    rowlist = [i for i in range(0, res)]
                rowdict = self.memcache.get_multi(rowlist, key_prefix=key)
                items = rowdict.values()
            for item in items:
                #                item['id'] = id
                yield item
                id += 1
Ejemplo n.º 4
0
 def get_from_cache(self, query, idx=0, limit=0, skey=None, order='asc'):
     """
     Retreieve results from cache, otherwise return null.
     """
     #        id      = int(idx)
     idx = int(idx)
     stop = idx + long(limit)
     key = genkey(query)
     sysdir = os.path.join(self.dir, self.get_system(query))
     session = self.session()
     try:  # transactions
         res = session.query(Query, Location).\
                     filter(Query.dir_id==Location.id).\
                     filter(Query.hash==key)
         session.commit()
     except:
         session.rollback()
         self.logger.debug(traceback.format_exc())
         pass
     for qobj, dobj in res:
         valid = eval(qobj.expire) - time.time()
         timestring = eval(qobj.create)
         idir = dobj.dir
         filename = os.path.join(idir, key)
         self.logger.info("DASFilecache::get_from_cache %s" % filename)
         if valid > 0:
             msg = "found valid query in cache, key=%s" % key
             self.logger.debug("DASFilecache::get_from_cache %s" % msg)
             if os.path.isfile(filename):
                 fdr = open(filename, 'rb')
                 if skey:
                     # first retrieve full list of results and sort it
                     data = []
                     id = 0
                     while 1:
                         try:
                             res = marshal.load(fdr)
                             if type(res) is dict:
                                 res['id'] = id
                             data.append(res)
                             id += 1
                         except EOFError as err:
                             break
                     fdr.close()
                     sorted_data = sort_data(data, skey, order)
                     index = 0
                     for row in sorted_data:
                         if limit:
                             if index >= idx and index < stop:
                                 yield row
                         else:
                             if index >= idx:
                                 yield row
                         index += 1
                 else:
                     id = 0
                     while 1:
                         try:
                             res = marshal.load(fdr)
                             if type(res) is dict:
                                 res['id'] = id
                             if limit:
                                 if id >= idx and id < stop:
                                     yield res
                                 if id == stop:
                                     break
                             else:
                                 yield res
                             id += 1
                         except EOFError as err:
                             break
                     fdr.close()
         else:
             msg = "found expired query in cache, key=%s" % key
             self.logger.debug("DASFilecache::get_from_cache %s" % msg)
             fdir = os.path.split(filename)[0]
             if os.path.isfile(filename):
                 os.remove(filename)
             clean_dirs(fdir)
             try:  # session transactions
                 session.delete(qobj)
                 session.commit()
             except:
                 session.rollback()
                 self.logger.debug(traceback.format_exc())
                 msg = "Unable to delete object from DAS filecache DB"
                 raise Exception(msg)
Ejemplo n.º 5
0
 def test_sorting(self):
     """Test sorting routines"""
     data = [
         {
             'id': 6,
             'dataset': 'bla6',
             'run': 200
         },
         {
             'id': 1,
             'dataset': 'bla1',
             'run': 100
         },
         {
             'id': 2,
             'dataset': 'bla2',
             'run': 700
         },
         {
             'id': 3,
             'dataset': 'bla3',
             'run': 400
         },
         {
             'id': 4,
             'dataset': 'bla4',
             'run': 300
         },
         {
             'id': 5,
             'dataset': 'bla5',
             'run': 800
         },
     ]
     sorted_data = [i for i in sort_data(data, 'dataset')]
     expect = [
         {
             'run': 100,
             'id': 1,
             'dataset': 'bla1'
         },
         {
             'run': 700,
             'id': 2,
             'dataset': 'bla2'
         },
         {
             'run': 400,
             'id': 3,
             'dataset': 'bla3'
         },
         {
             'run': 300,
             'id': 4,
             'dataset': 'bla4'
         },
         {
             'run': 800,
             'id': 5,
             'dataset': 'bla5'
         },
         {
             'run': 200,
             'id': 6,
             'dataset': 'bla6'
         },
     ]
     self.assertEqual(expect, sorted_data)
     sorted_data = [i for i in sort_data(data, 'run')]
     expect = [
         {
             'run': 100,
             'id': 1,
             'dataset': 'bla1'
         },
         {
             'run': 200,
             'id': 6,
             'dataset': 'bla6'
         },
         {
             'run': 300,
             'id': 4,
             'dataset': 'bla4'
         },
         {
             'run': 400,
             'id': 3,
             'dataset': 'bla3'
         },
         {
             'run': 700,
             'id': 2,
             'dataset': 'bla2'
         },
         {
             'run': 800,
             'id': 5,
             'dataset': 'bla5'
         },
     ]
     self.assertEqual(expect, sorted_data)
     sorted_data = [i for i in sort_data(data, 'run', 'desc')]
     expect = [
         {
             'run': 800,
             'id': 5,
             'dataset': 'bla5'
         },
         {
             'run': 700,
             'id': 2,
             'dataset': 'bla2'
         },
         {
             'run': 400,
             'id': 3,
             'dataset': 'bla3'
         },
         {
             'run': 300,
             'id': 4,
             'dataset': 'bla4'
         },
         {
             'run': 200,
             'id': 6,
             'dataset': 'bla6'
         },
         {
             'run': 100,
             'id': 1,
             'dataset': 'bla1'
         },
     ]
     self.assertEqual(expect, sorted_data)
Ejemplo n.º 6
0
 def get_from_cache(self, query, idx=0, limit=0, skey=None, order="asc"):
     """
     Retreieve results from cache, otherwise return null.
     """
     #        id      = int(idx)
     idx = int(idx)
     stop = idx + long(limit)
     key = genkey(query)
     sysdir = os.path.join(self.dir, self.get_system(query))
     session = self.session()
     try:  # transactions
         res = session.query(Query, Location).filter(Query.dir_id == Location.id).filter(Query.hash == key)
         session.commit()
     except:
         session.rollback()
         self.logger.debug(traceback.format_exc())
         pass
     for qobj, dobj in res:
         valid = eval(qobj.expire) - time.time()
         timestring = eval(qobj.create)
         idir = dobj.dir
         filename = os.path.join(idir, key)
         self.logger.info("DASFilecache::get_from_cache %s" % filename)
         if valid > 0:
             msg = "found valid query in cache, key=%s" % key
             self.logger.debug("DASFilecache::get_from_cache %s" % msg)
             if os.path.isfile(filename):
                 fdr = open(filename, "rb")
                 if skey:
                     # first retrieve full list of results and sort it
                     data = []
                     id = 0
                     while 1:
                         try:
                             res = marshal.load(fdr)
                             if type(res) is types.DictType:
                                 res["id"] = id
                             data.append(res)
                             id += 1
                         except EOFError, err:
                             break
                     fdr.close()
                     sorted_data = sort_data(data, skey, order)
                     index = 0
                     for row in sorted_data:
                         if limit:
                             if index >= idx and index < stop:
                                 yield row
                         else:
                             if index >= idx:
                                 yield row
                         index += 1
                 else:
                     id = 0
                     while 1:
                         try:
                             res = marshal.load(fdr)
                             if type(res) is types.DictType:
                                 res["id"] = id
                             if limit:
                                 if id >= idx and id < stop:
                                     yield res
                                 if id == stop:
                                     break
                             else:
                                 yield res
                             id += 1
                         except EOFError, err:
                             break
                     fdr.close()