예제 #1
0
파일: jadmin.py 프로젝트: yhinoz/jbims2
class Dao:
    def __init__(self, dbname):
        self.db = Base(dbname).open()
        self.db.set_string_format(unicode, 'utf-8')

    def regist_admin(self, data):
        return self.db.insert(
            generation  = data.get('generation'),
            dept        = data.get('dept'),
            name_sei    = data.get('name_sei'),
            name_mei    = data.get('name_mei'),
            mail        = data.get('mail'),
            account     = data.get('account'),
            passwd      = data.get('crypt_passwd'),
            add_dt      = datetime.now()
        )

    def get_admins(self):
        result = self.db.select()
        return result

    def close(self):
        self.db.close()

    def __del__(self):
        self.db.close()
예제 #2
0
class DataBase():
    def __init__(self):
        self.db=Base(mypath)
        self.sfield = {'शब्द': 'Words', 
                        'पद': 'Speech', 
                        'अर्थ': 'Meaning', 
                        'पर्यायवाचि': 'Synonym', 
                        'विपरीतार्थक': 'Antonym', 
                        'अंग्रेजी': 'English'}
        self.db.open()
            	
    def _get_attribute(self,args):
        ret_val = {}
        for k,f in self.sfield.items():
            ret_val[k] = getattr(args, f)
            if ret_val[k] == None:
                ret_val.pop(k)
        return ret_val
    def select(self,value):
        self.record = self.db.select([f for f in self.sfield.itervalues()],Words=value)
        for v in self.record:
            self.g = self._get_attribute(v)
            return self.g             
    
    def fields(self):
        return self.db.field_names                   		
	
    def get_field_length(self, value):
        t = self.select(value)
        return len(self.g)
예제 #3
0
class RepDB:
    def __init__(self, path):
        self.path = path
        self.dbh_stored_blocks = Base(self.path)
        try:
            self.dbh_stored_blocks.create(('key', str), ('version', str))
        except IOError:
            pass

    def open(self):
        self.dbh_stored_blocks.open()

    def add(self, oid, block_id, version):

        key = str((oid, block_id))

        # lets see if we already have a key stored
        set = self.dbh_stored_blocks.select_for_update(['key', 'version'],
                                                       key=key)
        if set == []:
            self.dbh_stored_blocks.insert(key, str(version))
        else:
            set[0].update()

    def get(self, oid, block_id):
        key = str((oid, block_id))
        result = self.dbh_stored_blocks.select(['key', 'version'], key=key)

        return result[0].version

    def update(self, oid, block_id, version):
        self.add(oid, block_id, version)

    def delete(self, oid, block_id):
        key = str((oid, block_id))

        set = self.dbh_stored_blocks.select_for_update(['key', 'version'],
                                                       key=key)
        self.dbh_stored_blocks.delete(set[0])

    def close(self):
        self.dbh_stored_blocks.close()

    def getIterator(self):
        return RepDBIterator([record for record in self.dbh_stored_blocks])
예제 #4
0
class RepDB:
    def __init__(self, path):
        self.path = path        
        self.dbh_stored_blocks = Base(self.path)
        try:
            self.dbh_stored_blocks.create(('key', str), ('version', str))
        except IOError:    
            pass            
        
    def open(self):
        self.dbh_stored_blocks.open()
        
    def add(self, oid, block_id, version):
        
        key = str((oid, block_id))        
        
        # lets see if we already have a key stored
        set = self.dbh_stored_blocks.select_for_update(['key','version'],key=key)
        if set == []:                    
            self.dbh_stored_blocks.insert(key, str(version))
        else:
            set[0].update()
        
    def get(self, oid, block_id):
        key = str((oid, block_id))        
        result = self.dbh_stored_blocks.select(['key','version'],key=key)        
                
        return result[0].version
        
    def update(self, oid, block_id, version):        
        self.add(oid, block_id, version)
        
    def delete(self, oid, block_id):
        key = str((oid, block_id))
                
        set = self.dbh_stored_blocks.select_for_update(['key','version'],key=key)                
        self.dbh_stored_blocks.delete(set[0])
        
    def close(self):
        self.dbh_stored_blocks.close()

    def getIterator(self):        
        return RepDBIterator([record for record in self.dbh_stored_blocks])
예제 #5
0
class DB:
    def __init__(self, storage_path):

        self.dbh_objects = Base(os.path.join(storage_path, 'objects'))
        self.dbh_blocks = Base(os.path.join(storage_path, 'blocks'))
        self.dbh_replicas = Base(os.path.join(storage_path, 'replicas'))
        self.dbh_tree = Base(os.path.join(storage_path, 'tree'))
        self.dbh_paths = Base(os.path.join(storage_path, 'paths'))
        self.dbh_id = Base(os.path.join(storage_path, 'id'))
        self.dbh_tags = Base(os.path.join(storage_path, 'tags'))
        self.storage_path = storage_path

    def __create_root(self):
        """ Check if the filesystem has a / and if not create it"""

        print "Initializing filesystem..."

        if self.get_file(path='/'):
            return

        print "Creating root..."

        f = FSObject(1, 1, '/', 0, 0, 0, 0)

        # lets see if we already have a key stored
        set = self.dbh_objects.select(['oid'], oid=str(f.oid))
        if set == []:
            # we have create tree and paths first
            self.dbh_tree.insert(str(f.oid), str(f.parent))
            self.dbh_paths.insert(str((f.parent, f.path)))

            self.dbh_objects.insert(str(f.oid), dumps(f),
                                    self.dbh_tree[len(self.dbh_tree) - 1],
                                    self.dbh_paths[len(self.dbh_paths) - 1])

        #set the current oid for the id increment sequence
        set = self.dbh_id.select(['curr_oid'])
        if set == []:
            self.dbh_id.insert('1')

    def setup_fs_db(self):

        try:
            self.dbh_blocks.create(('key', str), ('blocks', str))
        except IOError:
            self.dbh_blocks.open()

        try:
            self.dbh_replicas.create(('key', str), ('replicas', str))
        except IOError:
            self.dbh_replicas.open()

        try:
            self.dbh_tree.create(('oid', str), ('parent', str))
        except IOError:
            self.dbh_tree.open()

        try:
            self.dbh_tags.create(('oid', str), ('tag', str))
        except IOError:
            self.dbh_tags.open()

        try:
            self.dbh_paths.create(('key', str))
        except IOError:
            self.dbh_paths.open()

        try:
            self.dbh_id.create(('curr_oid', str))
        except IOError:
            self.dbh_id.open()

        try:
            self.dbh_objects.create(('oid', str), ('fsobj', str),
                                    ('tree', self.dbh_tree),
                                    ('paths', self.dbh_paths))
        except IOError:
            self.dbh_objects.open()

        self.__create_root()

    def get_path_oid(self, path):
        """Gets the parent filenode for path"""

        nodes = []
        parent_path = path
        while 1:
            (parent_path, node) = os.path.split(parent_path)
            if node == '':
                nodes.insert(0, '/')
                break
            nodes.insert(0, node)

        parent_oid = 0
        for node_name in nodes:
            key = str((parent_oid, node_name))
            print "looking up: %s" % key

            # search for a match
            f = None
            for record in [record for record in self.dbh_objects]:
                if record.paths.key == key:
                    f = loads(record.fsobj)
                    break

            print "found it!"
            if not f:
                return 0

            parent_oid = f.oid

        return parent_oid

    def insert_file(self, path, fsobj):
        #check first if there is a parent directory to store this file

        f = self.get_file(path=path)

        print "inserting file with path: " + path
        print fsobj

        if not f:
            print "ERR: [%s]" % os.path.split(fsobj.path)[0]
            raise FileSystemError('No parent directory to store: %s' %
                                  fsobj.path)

        #the parent of this object is the path
        fsobj.parent = f.oid

        set = self.dbh_id.select_for_update(['curr_oid'])

        curr_oid = int(set[0].curr_oid) + 1
        fsobj.oid = curr_oid
        print "Inserting OID: %s" % fsobj

        # lets see if we already have a key stored
        result = self.dbh_objects.select(['oid', 'fsobj'], oid=str(fsobj.oid))
        if result != []:
            raise FileSystemError('File already exists')
        else:

            # we have create tree and paths first
            self.dbh_tree.insert(str(fsobj.oid), str(fsobj.parent))
            self.dbh_paths.insert(str((fsobj.parent, fsobj.path)))

            self.dbh_objects.insert(str(fsobj.oid), dumps(fsobj),
                                    self.dbh_tree[len(self.dbh_tree) - 1],
                                    self.dbh_paths[len(self.dbh_paths) - 1])

            set[0].update(curr_oid=str(curr_oid))

        return curr_oid

    def get_file(self, oid='', path=''):
        if oid:
            set = self.dbh_objects.select(['oid', 'fsobj'], oid=str(oid))
            if set == []:
                f = None
            else:
                f = set[0].fsobj

        elif path:
            if path == '/':
                key = str((0, '/'))
            else:
                parent_oid = self.get_path_oid(os.path.split(path)[0])
                node_name = os.path.split(path)[1]
                key = str((parent_oid, node_name))

            # search for a match
            f = None
            for record in [record for record in self.dbh_objects]:
                print record.paths.key
                if record.paths.key == key:
                    f = record.fsobj
                    break

        else:
            f = None

        if f:
            f = loads(f)

        return f

    def get_children(self, oid):

        # lookup FSOBJECT with given oid
        set = self.dbh_objects.select(['oid', 'fsobj'], oid=str(oid))
        if set == []:
            return []

        file_array = []

        # lookup objects with parent oid
        set = self.dbh_tree.select(['oid', 'parent'], parent=str(oid))
        for i in set:
            obj = self.dbh_objects.select(['oid', 'fsobj'], oid=str(i.oid))
            if obj != []:
                file_array.append(loads(obj[0].fsobj))

        return file_array

    def debug_print_db(self, db):
        pass

    def print_object_db(self):
        self.debug_print_db(self.dbh_objects)

    def delete_dir(self, oid):
        pass

    def delete_file(self, oid):
        pass

    def rename_file(self, src, dest):
        pass

    def update_file(self, fsobj):

        set = self.dbh_objects.select_for_update(['oid', 'fsobj'],
                                                 oid=str(fsobj.oid))
        if set != []:
            set[0].update(fsobj=dumps(fsobj))

    def add_block(self, block, serverid):

        f = self.get_file(oid=str(block.oid))
        if not f:
            raise FileSystemError('add_block: Object %s does not exist' %
                                  block.oid)

        key = str(
            (long(block.oid),
             long(block.block_id)))  #the key is both the oid and the block_id

        set1 = self.dbh_replicas.select_for_update(['key', 'replicas'],
                                                   key=key)
        if set1 == []:
            replicas = FSReplicas(block.oid, block.block_id)
        else:
            replicas = loads(set1[0].replicas)

        f.blocks[block.block_id] = block.version

        set2 = self.dbh_blocks.select_for_update(['key', 'blocks'], key=key)
        if set2 == []:
            b = None
        else:
            b = set2[0].block

        if b:
            b = loads(b)
            diff = block.size - b.size
        else:
            diff = block.size

        f.size += diff

        # update or insert?
        if set1 == []:
            self.dbh_blocks.insert(key, dumps(block))
        else:
            set1[0].update(blocks=dumps(block))

        self.update_file(f)
        replicas.add(serverid, block.version)

        # update or insert?
        if set2 == []:
            self.dbh_replicas.insert(key, dumps(replicas))
        else:
            set2[0].update(replicas=dumps(replicas))

    def add_block_replica(self, block, serverid):
        f = self.get_file(str(block.oid))

        if not f:
            raise FileSystemError(
                'add_block_replica: Object %s does not exist' % block.oid)

        key = str((block.oid, block.block_id))

        set = self.dbh_replicas.select_for_update(['key', 'replicas'], key=key)
        if set == []:
            replicas = FSReplicas(block.oid, block.block_id)
        else:
            replicas = loads(set[0].replicas)

        replicas.add(serverid, block.version)

        # update or insert?
        if set == []:
            self.dbh_replicas.insert(key, dumps(replicas))
        else:
            set[0].update(replicas=dumps(replicas))

    def get_block_replicas(self, oid, block_id):
        key = str((long(oid), long(block_id)))

        set = self.dbh_replicas.select(['key', 'replicas'], key=key)
        if set == []:
            return None

        return loads(set[0].replicas)

    def get_block(self, oid, block_id):
        key = str((long(oid), long(block_id)))

        set = self.dbh_blocks.select(['key', 'blocks'], key=key)
        if set == []:
            return None

        return loads(set[0].blocks)

    def print_replicas_db(self):
        self.debug_print_db(self.dbh_replicas)

    def close_fs_db(self):
        self.dbh_blocks.close()
        self.dbh_replicas.close()
        self.dbh_tree.close()
        self.dbh_id.close()
        self.dbh_paths.close()
        self.dbh_objects.close()
예제 #6
0
class TransactionsDB(object):

    BASE = 'banan/transactions'

    def __init__(self, conf):
        self.config = conf
        self._sessions = {}
        self.open()

    # Management
    def open(self):
        self.db = Base(TransactionsDB.BASE)
        try:
            self.db.open()
        except IOError:
            self.db.create(('amount'      , float), 
                           ('amount_local', float),
                           ('date'        , date),
                           ('account'     , str),
                           ('label'       , str),
                           ('currency'    , str))

    def close(self):
        self.db.close()
            
    def clearall(self):
        self.db.destroy()
        self.open()
        
    def insert(self, entry):
        self.db.insert(amount       = entry['amount'],
                       amount_local = entry['amount_local'],
                       date         = entry['date'],
                       account      = entry['account'],
                       label        = entry['label'],
                       currency     = entry['currency'])

    def feed(self, fpath, parser, skip_duplicates=True, overwrite=False, delete=False, dry_run=False):

        deleted = added = 0
        for entry in parser.parse(fpath):
            if dry_run:
                print('%s %-40s\t%12.2f %s' % (entry['date'].isoformat(),
                                               entry['account'][:40],
                                               entry['amount'], 
                                               entry['currency'])); continue

            if skip_duplicates or overwrite or delete:
                _dup = self.db(date=entry['date'], account=entry['account'], amount=entry['amount'])
                if _dup:
                    if overwrite or delete:
                        deleted += len(_dup)
                        self.db.delete(_dup)
                    else:
                        continue
                if delete:
                    continue

            entry['label'] = self.config.assign_label(entry)
            self.insert(entry)
            added += 1
        
        if not dry_run:
            INFO('  added %i transactions' % added)
            INFO('  deleted %i transactions' % deleted)
            parser.post_process(self.db, added)

    def update_labels(self):
        
        # Load all records into memory. File will get corrupt if using the iterator.
        records = [rec for rec in self.db]
        for record in records:
            as_dict = dict((field, getattr(record, field)) for field in record.fields)
            label = self.config.assign_label(as_dict)
            if label != record.label:
                self.db.update(record, label=label)

        self.db.cleanup()


    # Queries
    get_amount = lambda rec: rec.amount_local    

    def results_as_text(self, results):

        results.sort_by('date')
        idx = 0
        record = results[idx]
        text_list = []
        while True:
            text_list.append('%s   %-40s\t%12.2f %s' %
                             (record.date.isoformat(), 
                              unicode(record.account[:40], 'utf-8'),
                              record.amount,
                              record.currency)); 
            try:
                idx += 1
                record = results[idx]
            except IndexError:
                return text_list

    
    def assemble_data(self, sid, datatype, foreach, show, select):

        try:

            session = self._sessions.get(sid, {})
            if session:
                if session['raw_query'] == (foreach, show, select):
                    # Same query, return cached result
                    return True, \
                        self._sessions[sid]['flot_' + show] if datatype == 'plot' else \
                        self._sessions[sid]['text']

            # Helpers
            get_amount = lambda rec: rec.amount_local
            M = range(1,13)
            total = strlen = 0

            data = {}
            text = {}
            query = 'date1 <= date < date2 and label == l'

            if foreach == 'label':
                
                if session:
                    if session['raw_query'][0] == 'label' and session['raw_query'][2] == select:
                        # Same query, but different presentation (sum or average)
                        return True, \
                            self._sessions[sid]['flot_' + show] if datatype == 'plot' else \
                            self._sessions[sid]['text']

                # New query
                dates = re.findall('[0-9]{6}', unquote_plus(select))
                date1 = date2 = date(int(dates[0][2:]), int(dates[0][:2]), 1)
                if len(dates) == 2:
                    date2 = date(int(dates[1][2:]), int(dates[1][:2]), 1)
                date2 = date(date2.year + (date2.month == 12), M[date2.month - 12], 1)

                for label in self.config.labels.iterkeys():

                    results = self.db.select(None, query, l = label, date1 = date1, date2 = date2)
                    value = sum(map(get_amount, results))

                    if abs(value) > 1:
                        data[label] = value

                        if label not in self.config.cash_flow_ignore:
                            total += value
                        else:
                            label += '*'

                        text[label] = self.results_as_text(results)
                        strlen = len(text[label][-1])
                        sumstr = '%12.2f %s' % (value, self.config.local_currency)
                        text[label].append('-' * strlen)
                        text[label].append(' ' * (strlen - len(sumstr)) + sumstr)

                ydelta = date2.year - date1.year
                mdelta = date2.month - date1.month
                delta = 12 * ydelta + mdelta

                session['flot_average'] = {}
                for key, val in data.iteritems():
                    session['flot_average'][key] = val/delta


            elif foreach in ('month', 'year'):
                
                # New query
                date1 = date2 = first = datetime.now()
                if foreach == 'month':
                    first = date(date1.year - 1, date1.month, 1)
                    date1 = date(date1.year - (date1.month == 1), M[date1.month - 2], 1)
                    date2 = date(date2.year, date2.month, 1)
                else:
                    first = date(date1.year - 9, 1, 1)
                    date1 = date(date1.year, 1, 1)
                    date2 = date(date2.year + 1, 1, 1)

                select = unquote_plus(select)

                while date1 >= first:

                    results = self.db.select(None, query, l = select, date1 = date1, date2 = date2)
                    value = sum(map(get_amount, results))

                    date2 = date1 
                    if foreach == 'month':
                        key = date1.strftime('%Y.%m') 
                        date1 = date(date2.year - (date2.month == 1), M[date2.month - 2], 1)
                    else:
                        key = str(date1.year)
                        date1 = date(date2.year - 1, 1, 1)

                    data[key] = value
                    total += value
                    if results:
                        text[key] = self.results_as_text(results)
                        strlen = len(text[key][-1])
                        sumstr = '%12.2f %s' % (value, self.config.local_currency)
                        text[key].append('-' * strlen)
                        text[key].append(' ' * (strlen - len(sumstr)) + sumstr)


            # All good, set new session attributes
            session['raw_query'] = (foreach, show, select)
            session['flot_sum'] = data
            session['text'] = text

            if session['text']:
                session['text']['***'] = ['-' * strlen,
                                          'SUM: %12.2f %s' % (total, self.config.local_currency),
                                          '-' * strlen]

            self._sessions[sid] = session
            return True, session['flot_' + show] if datatype == 'plot' else session['text']

        except Exception as e:
            return False, str(e)
예제 #7
0
            10**random.randint(-307,307)))
    db.insert(name=random.choice(names)) # missing fields

# insert as string
db.set_string_format(unicode,'latin-1')
db.set_string_format(date,'%d-%m-%y')
db.insert_as_strings(name="testname",fr_name=random.choice(fr_names),
    age=10,size=123,birth="07-10-94")
print db[len(db)-1].birth
db.insert_as_strings("testname",random.choice(fr_names),
    11,134,"09-10-94",1.0)
print db[len(db)-1].birth

# search between 2 dates
print '\nBirth between 1960 and 1964'
for r in db.select(None,birth=[date(1960,1,1),date(1964,12,13)]):
    print r.name,r.birth

f = buzhug_files.FloatFile().to_block
def all(v):
    print v,[ord(c) for c in f(v)]
# search between 2 floats
print '\nFloat between 0 and 1e50'
print len(db.select(None,afloat=[0.0,1e50]))
all(0.0)
all(1e50)
print 'lc'
for r in [ r for r in db if 0.0 <= r.afloat <= 1e50 ]:
    all(r.afloat)
print '\n select'
for r in db.select(None,'x<=afloat<=y',x=0.0,y=1e50):
#!/bin/python2.7

from buzhug import Base
import os

db = Base(os.getcwd() + '/db/bookDB').open()

res = db.select(reviewed=True) #select all reviewed

for r in res:
    print r.text.encode('utf-8')
예제 #9
0
print "\n**************************"
print "Welcome to Tar Heel Ranker v1.0. The goal of this project is to actively assist in the reviewing process of books that have no home. For more information contact Jesse Osiecki at [email protected]"
print "standard usage: python tarheelreview.py -FLAGS"
print "Flags are: r for redisbayes checker (Bayesian spam filter. Make sure Redis is running for this"
print "g for gibberish detector using markov chains on letter transitions"
print "m for markover -- markov chain filter using word transitions"
print "a for author check. Outputs number of books a given author has in the Database that are already reviewed"
print "s for rating check. Tests books current rating against a set value (4) and returns boolean if higher"
print "recommended usage is -masg"
print "NOTE: to end the program, simply press CTRL-D and the reviewed books will be noted in a file in the CWD"
print "\n\n**************************"

db = Base(os.getcwd() + '/db/bookDB').open()
#open redis
rb = redisbayes.RedisBayes(redis.StrictRedis(host='localhost', port=6379, db=0))
books = db.select(reviewed=False) #select all unreviewed
markov = markover.Markov()


def author_checker(aut):
    authored_books = db.select(author_id=aut, reviewed=True)
    return len(authored_books)
def rating_checker(rat, lim):
    return rat >= lim
redis_bayes = False
gibb = False
markover = False
author_check = False
rating_check = False

l = len(sys.argv)
예제 #10
0
파일: preprocess.py 프로젝트: Perchik/RoD2
def main():
	AestheticScorer.LoadModel()
	
	#Make the database. We need tables for events, locations, photos, people, and faces
	events = Base(os.getcwd()+"\\Databases\\events.db")
	events.create(('name',str),('firsttime',str),('lasttime',str),mode="override")

	locations = Base(os.getcwd()+"\\Databases\\locations.db")
	locations.create(('lat',float),('lon',float),('name',str),mode="override")

	photos = Base(os.getcwd()+"\\Databases\\photos.db")
	photos.create(('path',str),('timestamp',str),('aestheticscore',int),('locationid',int), ('eventid',int), ("width", int), ("height", int),mode="override")

	people = Base(os.getcwd()+"\\Databases\\people.db")
	people.create(('name',str),('age',int),mode="override")

	faces = Base(os.getcwd()+"\\Databases\\faces.db")
	faces.create(('photoid',int),('thumbnail',str),('personid',int),('x',int),('y',int),('w',int),('h',int),mode="override")

	# Walk through all the directories, making a list of photos, geotagging the lowest level subdirectories, and making
	# a master list of the photos and geotags

	photolist = []
	geotaglist = []
	print "geocoding directories"
	for dirname, dirnames, filenames in os.walk(os.getcwd()+"\\Images\\photos"):
		#geocode all the subdirectory names
		for subdirname in dirnames:
			n,lat,long =  geotag.findlocation(subdirname)

			#if we have a problem geotagging, prompt the user for a different location name
			while n == "NONE":
				newname = raw_input("We couldn't find the location '" + subdirname + "'. Type in another place name to try.")
				n, lat, long = geotag.findlocation(subdirname)

			#once we have a valid location, insert it if it's not already in the database
			if not locations(name=n):
				locations.insert(float(lat), float(long), n)

		#add all the files to a file list, and go ahead and make a parallel geotags
		for filename in filenames:
			#print "filename is ",filename
			if filename[-3:] == "jpg" or filename[-3:] == "JPG":
				#find the id for that subdirname in the database so we can geotag it
				locname = dirname[dirname.rfind('\\') + 1:]
				#location = locations(name=dirname)
				photolist.append(os.path.join(dirname,filename))
				geotaglist.append((os.path.join(dirname,filename),locname))

	#make a list to identify which event each photo is in

	#print photolist
	print "getting events"
	eventLabels, uniqueEvents = timecluster.eventCluster(photolist)
	#print "events: "
	#print eventLabels
	print uniqueEvents

	#insert the events into the event database
	for label in uniqueEvents:
		events.insert(label[1],"","")

	#the events are already sorted according to photo names
	
	
	
	#now sort the geotags and photolist according to photo names as well, so we'll have parallel lists
	geotaglist.sort()
	photolist.sort()
	
	#now we can finally insert each photo, with a name, event, and geotag
	for i in range(len(photolist)):
		print i, photolist[i]
		width, height = Image.open(photolist[i]).size
		photos.insert(photolist[i],eventLabels[i][1],
		 AestheticScorer.getScore(photolist[i]),
		 locations(name=geotaglist[i][1])[0].__id__,
		 eventLabels[i][0], int(width), int(height))
	print "finding faces"
	#for all the images we just gathered, find the people and faces, and insert them into the database
	facelist = []
	for file in photolist:
		facelist.append(detectfaces.detectFaceInImage(file))

	faceimages, projections, imgs, minFaces = detectfaces.faceLBP(facelist)#detectfaces.facePCA(facelist)
	labels, nclusters = detectfaces.gMeansCluster(projections, minFaces)
	#detectfaces.visualizeResults(faceimages, labels, nclusters)

	#add the individuals we found in the photos into the people database
	i = 0
	for i in range(0,nclusters):
		people.insert(str(i),0)

	#add the faces, linking them to the individuals
	faceindex = 0
	photoindex = 0
	for listing in facelist:
		facerects = listing[1:]
		for entry in facerects:
			faces.insert(photoindex,imgs[faceindex],people[labels[faceindex]].__id__,entry[0],entry[1],entry[2],entry[3])
			faceindex = faceindex + 1
		photoindex = photoindex + 1
	
	print "updating events"
	#update the events table to include tthe start time
	eventtimes=[]
	for event in events.select(None).sort_by("+__id__"):
		orderedPhotos = photos.select(eventid=event.__id__).sort_by("+timestamp")
		events.update(event,firsttime=orderedPhotos[0].timestamp)
		events.update(event,lasttime=orderedPhotos[-1].timestamp)
	
		
		
	print "Done"
예제 #11
0
def run_test(thread_safe=False):

    if not thread_safe:
        db = Base(r'dummy')
    else:
        db = TS_Base('dummy')

    db.create(('name', str), ('fr_name', unicode), ('age', int),
              ('size', int, 300), ('birth', date, date(1994, 1, 14)),
              ('afloat', float, 1.0), ('birth_hour', dtime, dtime(10, 10, 10)),
              mode='override')

    # test float conversions
    if thread_safe is False:
        f = db._file["afloat"]

        def all(v):
            return [ord(c) for c in v]

        for i in range(10):
            afloat = random.uniform(-10**random.randint(-307, 307),
                                    10**random.randint(-307, 307))
            try:
                assert cmp(afloat, 0.0) == cmp(f.to_block(afloat),
                                               f.to_block(0.0))
            except:
                print afloat
                print "afloat > 0.0 ?", afloat > 0.0
                print "blocks ?", f.to_block(afloat) > f.to_block(0.0)
                print all(f.to_block(afloat)), all(f.to_block(0.0))
                raise

    assert db.defaults["age"] == None
    assert db.defaults["size"] == 300
    assert db.defaults["afloat"] == 1.0
    assert db.defaults["birth_hour"] == dtime(10, 10, 10)
    assert db.defaults["birth"] == date(1994, 1, 14)

    for i in range(100):
        db.insert(name=random.choice(names),
                  fr_name=unicode(random.choice(fr_names), 'latin-1'),
                  age=random.randint(7, 47),
                  size=random.randint(110, 175),
                  birth=date(random.randint(1858, 1999), random.randint(1, 12),
                             10),
                  afloat=random.uniform(-10**random.randint(-307, 307),
                                        10**random.randint(-307, 307)),
                  birth_hour=dtime(random.randint(0,
                                                  23), random.randint(0, 59),
                                   random.randint(0, 59)))

    assert len(db) == 100
    assert isinstance(db[50].fr_name, unicode)
    print db[50].fr_name.encode('latin-1')

    db.open()
    # test if default values have not been modified after open()
    assert db.defaults["age"] == None
    assert db.defaults["size"] == 300
    assert db.defaults["afloat"] == 1.0
    assert db.defaults["birth_hour"] == dtime(10, 10, 10)
    assert db.defaults["birth"] == date(1994, 1, 14)

    for i in range(5):
        # insert a list
        db.insert(
            random.choice(names), unicode(random.choice(fr_names), 'latin-1'),
            random.randint(7, 47), random.randint(110, 175),
            date(random.randint(1958, 1999), random.randint(1, 12), 10),
            random.uniform(-10**random.randint(-307, 307),
                           10**random.randint(-307, 307)),
            dtime(random.randint(0, 23), random.randint(0, 59),
                  random.randint(0, 59)))
        db.insert(name=random.choice(names))  # missing fields
        for field in db.field_names[2:]:
            if field == "name":
                continue
            try:
                assert getattr(db[-1], field) == db.defaults[field]
            except:
                print "attribute %s not set to default value %s" % (field,
                                                                    db[-1])
                raise

    # insert as string
    db.set_string_format(unicode, 'latin-1')
    db.set_string_format(date, '%d-%m-%y')
    db.set_string_format(dtime, '%H-%M-%S')
    db.insert_as_strings(name="testname",
                         fr_name=random.choice(fr_names),
                         age=10,
                         size=123,
                         birth="07-10-95",
                         birth_hour="20-53-3")

    assert db[-1].birth == date(1995, 10, 7)
    assert db[-1].name == "testname"
    assert db[-1].age == 10
    assert db[-1].afloat == db.defaults["afloat"]

    db.insert_as_strings("testname", random.choice(fr_names), 11, 134,
                         "09-12-94", 1.0, "5-6-13")

    assert db[len(db) - 1].birth == date(1994, 12, 9)
    assert db[-1].name == "testname"
    assert db[-1].age == 11
    assert db[-1].size == 134
    assert db[-1].afloat == 1.0

    # search between 2 dates
    print '\nBirth between 1960 and 1970'
    for r in db.select(None, birth=[date(1960, 1, 1), date(1970, 12, 13)]):
        print r.name, r.birth

    print "sorted"
    for r in db.select(None, birth=[date(1960, 1, 1),
                                    date(1970, 12,
                                         13)]).sort_by('+name-birth'):
        print r.name, r.birth

    f = buzhug_files.FloatFile().to_block

    def all(v):
        return [ord(c) for c in f(v)]

    # search between 2 floats

    # selection by list comprehension
    s1 = [r for r in db if 0.0 <= r.afloat <= 1e50]
    # selection by select
    s2 = db.select(None, 'x<=afloat<=y', x=0.0, y=1e50)
    # selction by select with interval
    s3 = db.select(None, afloat=[0.0, 1e50])

    try:
        assert len(s1) == len(s2) == len(s3)
    except:
        print "%s records by list comprehension, " % len(s1)
        print "%s by select by formula," % len(s2)
        print "%s by select by interval" % len(s3)

    for r in s1:
        try:
            assert r in s2
        except:
            print all(r.afloat)

    for r in s2:
        try:
            assert r in s1
        except:
            print "in select but not in list comprehension", r
            raise
    r = db[0]
    assert r.__class__.db is db

    fr = random.choice(fr_names)
    s1 = [r for r in db if r.age == 30 and r.fr_name == unicode(fr, 'latin-1')]
    s2 = db.select(['name', 'fr_name'], age=30, fr_name=unicode(fr, 'latin-1'))

    assert len(s1) == len(s2)

    # different ways to count the number of items
    assert len(db) == sum([1 for r in db]) == len(db.select(['name']))

    # check if version number is correctly incremented
    for i in range(5):
        recs = db.select_for_update(['name'], 'True')
        version = recs[0].__version__
        recs[0].update()
        assert db[0].__version__ == version + 1

    # check if cleanup doesn't change db length
    length_before = len(db)
    db.cleanup()
    assert len(db) == length_before

    # check if selection by select on __id__ returns the same as direct
    # access by id
    recs = db.select([], '__id__ == c', c=20)
    assert recs[0] == db[20]

    # check that has_key returns False for invalid hey
    assert not db.has_key(1000)

    # drop field
    db.drop_field('name')
    # check if field was actually removed from base definition and rows
    assert not "name" in db.fields
    assert not hasattr(db[20], "name")

    # add field
    db.add_field('name', str, default="marcel")
    # check if field was added with the correct default value
    assert "name" in db.fields
    assert hasattr(db[20], "name")
    assert db[20].name == "marcel"

    # change default value
    db.set_default("name", "julie")
    db.insert(age=20)
    assert db[-1].name == "julie"

    # delete a record

    db.delete([db[10]])
    # check if record has been deleted
    try:
        print db[10]
        raise Exception, "Row 10 should have been deleted"
    except IndexError:
        pass

    assert 10 not in db
    assert len(db) == length_before

    # selections

    # selection by generator expression
    # age between 30 et 32
    d_ids = []
    for r in [r for r in db if 33 > r.age >= 30]:
        d_ids.append(r.__id__)

    length = len(db)
    # remove these items
    db.delete([r for r in db if 33 > r.age >= 30])
    # check if correct number of records removed
    assert len(db) == length - len(d_ids)
    # check if all records have been removed
    assert not [r for r in db if 33 > r.age >= 30]

    # updates
    # select name = pierre
    s1 = db.select(['__id__', 'name', 'age', 'birth'], name='pierre')
    # make 'pierre' uppercase
    for record in db.select_for_update(None, 'name == x', x='pierre'):
        db.update(record, name=record.name.upper())
    # check if attribute was correctly updated
    for rec in s1:
        assert db[rec.__id__] == "Pierre"

    # increment ages
    for record in db.select_for_update([], 'True'):
        age = record.age
        if not record.age is None:
            db.update(record, age=record.age + 1)
            # check
            assert db[record.__id__].age == age + 1

    for record in [r for r in db]:
        age = record.age
        if not record.age is None:
            db.update(record, age=record.age + 1)
            # check
            assert db[record.__id__].age == age + 1

    # change dates
    for record in db.select_for_update([], 'age>v', v=35):
        db.update(record,
                  birth=date(random.randint(1958, 1999), random.randint(1, 12),
                             10))

    db.commit()

    # check length after commit
    assert sum([1 for r in db]) == len(db)

    # insert new records
    for i in range(50):
        db.insert(name=random.choice(names),
                  age=random.randint(7, 47),
                  size=random.randint(110, 175))

    # check that record 10 is still deleted
    try:
        print db[10]
        raise Exception, "Row 10 should have been deleted"
    except IndexError:
        pass

    print db.keys()
    print "has key 10 ?", db.has_key(10)
    assert 10 not in db
    #raw_input()

    # check that deleted_lines was cleared by commit()
    assert not db._pos.deleted_lines
    print db._del_rows.deleted_rows

    length = len(db)  # before cleanup

    # physically remove the deleted items
    db.cleanup()
    # check that deleted_lines and deleted_rows are clean
    assert not db._pos.deleted_lines
    assert not db._del_rows.deleted_rows

    # check that record 10 is still deleted
    try:
        print db[10]
        raise Exception, "Row 10 should have been deleted"
    except IndexError:
        pass

    assert 10 not in db

    # check that length was not changed by cleanup
    assert len(db) == length
    assert len([r for r in db]) == length

    # age > 30
    for r in db.select(['__id__', 'name', 'age'],
                       'name == c1 and age > c2',
                       c1='pierre',
                       c2=30):
        assert r.name == "pierre"
        assert r.age > 30

    # name =="PIERRE" and age > 30
    for r in db.select(['__id__', 'name', 'age', 'birth'],
                       'name == c1 and age > c2',
                       c1='PIERRE',
                       c2=30):
        assert r.name == 'PIERRE'
        assert r.age > 30

    # test with !=
    for r in db.select(['__id__'], 'name != c1', c1='claire'):
        assert r.name != 'claire'

    # age > id
    # with select
    s1 = db.select(['name', '__id__', 'age'], 'age > __id__')
    for r in s1:
        assert r.age > r.__id__
    # with iter
    s2 = [r for r in db if r.age > r.__id__]
    for r in s2:
        assert r.age > r.__id__

    assert len(s1) == len(s2)

    # birth > date(1978,1,1)
    # with select
    s1 = db.select(['name', '__id__', 'age'], 'birth > v', v=date(1978, 1, 1))
    for r in s1:
        assert r.birth > date(1978, 1, 1)
    # with iter

    s2 = [r for r in db if r.birth and r.birth > date(1978, 1, 1)]
    for r in s2:
        assert r.birth > date(1978, 1, 1)

    assert len(s1) == len(s2)

    # test with floats
    for i in range(10):
        x = random.uniform(-10**random.randint(-307, 307),
                           10**random.randint(-307, 307))
        s1 = [r for r in db if r.afloat > x]
        s2 = db.select(['name'], 'afloat > v', v=x)
        assert len(s1) == len(s2)

    # base with external link
    houses = Base('houses')
    houses.create(('address', str), ('flag', bool), ('resident', db, db[0]),
                  mode="override")

    addresses = [
        'Giono', 'Proust', 'Mauriac', 'Gide', 'Bernanos', 'Racine',
        'La Fontaine'
    ]
    ks = db.keys()
    for i in range(50):
        x = random.choice(ks)
        address = random.choice(addresses)
        houses.insert(address=address, flag=address[0] > "H", resident=db[x])

    # houses with jean
    s1 = []
    for h in houses:
        if h.resident.name == 'jean':
            s1.append(h)

    # by select : ???
    #s2 = houses.select([],'resident.name == v',v='jean')
    # assert len(s1) == len(s2)

    h1 = Base('houses')
    h1.open()

    l1 = len(h1.select([], flag=True))
    l2 = len(h1.select([], flag=False))
    assert l1 + l2 == len(h1)

    class DictRecord(Record):
        def __getitem__(self, k):
            item = self
            names = k.split('.')
            for name in names:
                item = getattr(item, name)
            return item

    h1.set_record_class(DictRecord)
    print '\nrecord_class = DictRecord, h1[0]'
    print h1[0]
    print "\nResident name: %(resident.name)s\nAddress: %(address)s" % h1[0]
예제 #12
0
assert db[-1].name == "testname"
assert db[-1].age == 10
assert db[-1].afloat == db.defaults["afloat"]

db.insert_as_strings("testname",random.choice(fr_names),
    11,134,"09-12-94",1.0, "5-6-13")

assert db[len(db)-1].birth == date(1994,12,9)
assert db[-1].name == "testname"
assert db[-1].age == 11
assert db[-1].size == 134
assert db[-1].afloat == 1.0

# search between 2 dates
print '\nBirth between 1960 and 1970'
for r in db.select(None,birth=[date(1960,1,1),date(1970,12,13)]):
    print r.name,r.birth

print "sorted"
for r in db.select(None,birth=[date(1960,1,1),date(1970,12,13)]).sort_by('+name-birth'):
    print r.name,r.birth

f = buzhug_files.FloatFile().to_block
def all(v):
    return [ord(c) for c in f(v)]

# search between 2 floats

# selection by list comprehension
s1 = [ r for r in db if 0.0 <= r.afloat <= 1e50 ]
# selection by select
예제 #13
0
# insert as string
db.set_string_format(unicode, 'latin-1')
db.set_string_format(date, '%d-%m-%y')
db.insert_as_strings(name="testname",
                     fr_name=random.choice(fr_names),
                     age=10,
                     size=123,
                     birth="07-10-94")
print db[len(db) - 1].birth
db.insert_as_strings("testname", random.choice(fr_names), 11, 134, "09-10-94",
                     1.0)
print db[len(db) - 1].birth

# search between 2 dates
print '\nBirth between 1960 and 1964'
for r in db.select(None, birth=[date(1960, 1, 1), date(1964, 12, 13)]):
    print r.name, r.birth

f = buzhug_files.FloatFile().to_block


def all(v):
    print v, [ord(c) for c in f(v)]


# search between 2 floats
print '\nFloat between 0 and 1e50'
print len(db.select(None, afloat=[0.0, 1e50]))
all(0.0)
all(1e50)
print 'lc'
예제 #14
0
def main():
	events = Base(os.getcwd()+"\\Databases\\events.db")
	locations = Base(os.getcwd()+"\\Databases\\locations.db")
	photos = Base(os.getcwd()+"\\Databases\\photos.db")
	people = Base(os.getcwd()+"\\Databases\\people.db")
	faces = Base(os.getcwd()+"\\Databases\\faces.db")
	training = Base(os.getcwd()+"\\Databases\\training_images.db")
	features = Base(os.getcwd()+"\\Databases\\features.db")
	try:
		print "============ events ================"
		events.open()
		for field in events.fields:
			print field,events.fields[field]
		print "len",len(events),"\n\n"
		for record in events.select().sort_by("+firsttime"):
			print record
		
		elist =[ None for i in range(len(events))]
		print elist
			
		
		print "============ locations ================"
		locations.open()
		for field in locations.fields:
			print field,locations.fields[field]
		print "len",len(locations),"\n\n"
		#for record in locations:
		#	print record

		print "============ photos ================"
		photos.open()
		for field in photos.fields:
			print field,photos.fields[field]
		print "len",len(photos),"\n\n"
		for record in photos:
			print record
		
		print "============ people ================"
		people.open()
		for field in people.fields:
			print field,people.fields[field]
		print "len",len(people),"\n\n"
		#for record in people:
		#	print record
		
		print "=========== faces =============="
		faces.open()
		for field in faces.fields:
			print field,faces.fields[field]
		print "len",len(faces),"\n\n"
		#for record in faces:
		#	print record
		
		print "============ training ========"
		training.open()
		for field in training.fields:
			print field, training.fields[field]
		print "len",len(training),"\n\n"
	
		print "============ features ==========="
		features.open()
		for field in features.fields:
			print field, features.fields[field]
		print "len",len(features),"\n\n"
		
		
	except IOError as err:
		print "no database there:",err
예제 #15
0
파일: pinguin.py 프로젝트: apie/scripts
time = m.group(1)
yamlfile = yamlfile+time
artist = m.group(2).strip()
title = m.group(3).strip()

dateformat="%Y-%m-%d"

date = datetime.datetime.strftime(datetime.date.today(),dateformat)
timestamp=datetime.datetime.strptime(date+' '+time,dateformat+" %H:%M:%S")
#lastfmsubmitd doesnt accept timezones so convert to UTC
zone = pytz.timezone('Europe/Amsterdam')
localtimestamp = zone.localize(timestamp)
utc=pytz.UTC
timestamp = localtimestamp.astimezone(utc)

all={'artist': artist,'title': title, 'length': 120,'time': '!timestamp '+timestamp.strftime('%Y-%m-%d %H:%M:%S')}

recordsfromthisstamp = db.select(['date'],date=timestamp)

#only update if it is a non-existing entry
if len(recordsfromthisstamp)==0:
  record_id = db.insert(timestamp,artist,title)
  all={'artist': artist,'title': title, 'length': 120,'time': '!timestamp '+timestamp.strftime('%Y-%m-%d %H:%M:%S')}
  stream = file(yamlfile,'w')
  result=yaml.dump(all,default_flow_style=False)
  result=result.replace("'","")
  stream.write(result)
  stream.close()

db.close()
예제 #16
0
#this is a commandline utility to quickly get book text for testing purposes
from buzhug import Base
import os
import sys
import json


db = Base(os.getcwd() + '/db/bookDB')
db.open()
c=0
l = len(sys.argv)
while c < l-1:
    c+=1
    inp = sys.argv[c]
    dupProtect = True
    bookID = int(inp)
    records = db.select(ID = bookID)
    bookText = ''
    slug = ''
    for r in records:
        j = json.loads(r.json)
        if j.get('ID') == bookID:
            text = j.get('pages')
            for p in text:
                bookText += p['text'] + '\n'
            slug = r.slug
            if dupProtect:
                break
    print bookText
db.close()
예제 #17
0
def run_test(thread_safe=False):

    if not thread_safe:
        db = Base(r'dummy') 
    else:
        db = TS_Base('dummy')

    db.create(('name',str), ('fr_name',unicode),
        ('age',int),
        ('size',int,300),
        ('birth',date,date(1994,1,14)),
        ('afloat',float,1.0),
        ('birth_hour', dtime,dtime(10,10,10)),
        mode='override')

    # test float conversions
    if thread_safe is False:
        f = db._file["afloat"]
        def all(v):
            return [ord(c) for c in v]

        for i in range(10):
            afloat = random.uniform(-10**random.randint(-307,307),
                    10**random.randint(-307,307))
            try:
                assert cmp(afloat,0.0) == cmp(f.to_block(afloat),f.to_block(0.0))
            except:
                print afloat
                print "afloat > 0.0 ?",afloat>0.0
                print "blocks ?",f.to_block(afloat)>f.to_block(0.0)
                print all(f.to_block(afloat)),all(f.to_block(0.0))
                raise

    assert db.defaults["age"] == None
    assert db.defaults["size"] == 300
    assert db.defaults["afloat"] == 1.0
    assert db.defaults["birth_hour"] == dtime(10,10,10)
    assert db.defaults["birth"] == date(1994,1,14)

    for i in range(100):
        db.insert(name=random.choice(names),
             fr_name = unicode(random.choice(fr_names),'latin-1'),
             age=random.randint(7,47),size=random.randint(110,175),
             birth=date(random.randint(1858,1999),random.randint(1,12),10),
             afloat = random.uniform(-10**random.randint(-307,307),
                10**random.randint(-307,307)),
             birth_hour = dtime(random.randint(0, 23), random.randint(0, 59), random.randint(0, 59)))

    assert len(db)==100
    assert isinstance(db[50].fr_name,unicode)
    print db[50].fr_name.encode('latin-1')

    db.open()
    # test if default values have not been modified after open()
    assert db.defaults["age"] == None
    assert db.defaults["size"] == 300
    assert db.defaults["afloat"] == 1.0
    assert db.defaults["birth_hour"] == dtime(10,10,10)
    assert db.defaults["birth"] == date(1994,1,14)

    for i in range(5):
        # insert a list
        db.insert(random.choice(names),
             unicode(random.choice(fr_names),'latin-1'),
             random.randint(7,47),random.randint(110,175),
             date(random.randint(1958,1999),random.randint(1,12),10),
             random.uniform(-10**random.randint(-307,307),
                10**random.randint(-307,307)),
             dtime(random.randint(0, 23), random.randint(0, 59), random.randint(0, 59)))
        db.insert(name=random.choice(names)) # missing fields
        for field in db.field_names[2:]:
            if field == "name":
                continue
            try:
                assert getattr(db[-1],field) == db.defaults[field]
            except:
                print "attribute %s not set to default value %s" %(field,db[-1])
                raise

    # insert as string
    db.set_string_format(unicode,'latin-1')
    db.set_string_format(date,'%d-%m-%y')
    db.set_string_format(dtime,'%H-%M-%S')
    db.insert_as_strings(name="testname",fr_name=random.choice(fr_names),
        age=10,size=123,birth="07-10-95", birth_hour="20-53-3")

    assert db[-1].birth == date(1995,10,7)
    assert db[-1].name == "testname"
    assert db[-1].age == 10
    assert db[-1].afloat == db.defaults["afloat"]

    db.insert_as_strings("testname",random.choice(fr_names),
        11,134,"09-12-94",1.0, "5-6-13")

    assert db[len(db)-1].birth == date(1994,12,9)
    assert db[-1].name == "testname"
    assert db[-1].age == 11
    assert db[-1].size == 134
    assert db[-1].afloat == 1.0

    # search between 2 dates
    print '\nBirth between 1960 and 1970'
    for r in db.select(None,birth=[date(1960,1,1),date(1970,12,13)]):
        print r.name,r.birth

    print "sorted"
    for r in db.select(None,birth=[date(1960,1,1),date(1970,12,13)]).sort_by('+name-birth'):
        print r.name,r.birth

    f = buzhug_files.FloatFile().to_block
    def all(v):
        return [ord(c) for c in f(v)]

    # search between 2 floats

    # selection by list comprehension
    s1 = [ r for r in db if 0.0 <= r.afloat <= 1e50 ]
    # selection by select
    s2 = db.select(None,'x<=afloat<=y',x=0.0,y=1e50)
    # selction by select with interval
    s3 = db.select(None,afloat=[0.0,1e50])

    try:
        assert len(s1) == len(s2) == len(s3)
    except:
        print "%s records by list comprehension, " %len(s1)
        print "%s by select by formula," %len(s2)
        print "%s by select by interval" %len(s3)

    for r in s1:
        try:
            assert r in s2
        except:
            print all(r.afloat)

    for r in s2:
        try:
            assert r in s1
        except:
            print "in select but not in list comprehension",r
            raise
    r = db[0]
    assert r.__class__.db is db

    fr=random.choice(fr_names)
    s1 = [ r for r in db if r.age == 30 and r.fr_name == unicode(fr,'latin-1')]
    s2 = db.select(['name','fr_name'],age=30,fr_name = unicode(fr,'latin-1'))

    assert len(s1)==len(s2)

    # different ways to count the number of items
    assert len(db)  == sum([1 for r in db]) == len(db.select(['name']))

    # check if version number is correctly incremented
    for i in range(5):
        recs = db.select_for_update(['name'],'True')
        version = recs[0].__version__
        recs[0].update()
        assert db[0].__version__ == version + 1

    # check if cleanup doesn't change db length
    length_before = len(db)
    db.cleanup()
    assert len(db) == length_before

    # check if selection by select on __id__ returns the same as direct
    # access by id
    recs = db.select([],'__id__ == c',c=20)
    assert recs[0] == db[20]

    # check that has_key returns False for invalid hey
    assert not db.has_key(1000)

    # drop field
    db.drop_field('name')
    # check if field was actually removed from base definition and rows
    assert not "name" in db.fields
    assert not hasattr(db[20],"name")

    # add field
    db.add_field('name',str,default="marcel")
    # check if field was added with the correct default value
    assert "name" in db.fields
    assert hasattr(db[20],"name")
    assert db[20].name == "marcel"

    # change default value
    db.set_default("name","julie")
    db.insert(age=20)
    assert db[-1].name == "julie"

    # delete a record

    db.delete([db[10]])
    # check if record has been deleted
    try:
        print db[10]
        raise Exception,"Row 10 should have been deleted"
    except IndexError:
        pass

    assert 10 not in db
    assert len(db) == length_before

    # selections    

    # selection by generator expression
    # age between 30 et 32
    d_ids = []
    for r in [r for r in db if 33> r.age >= 30]:
        d_ids.append(r.__id__)

    length = len(db)
    # remove these items
    db.delete([r for r in db if 33> r.age >= 30])
    # check if correct number of records removed
    assert len(db) == length - len(d_ids)
    # check if all records have been removed
    assert not [r for r in db if 33> r.age >= 30]

    # updates
    # select name = pierre
    s1 = db.select(['__id__','name','age','birth'],name='pierre')
    # make 'pierre' uppercase
    for record in db.select_for_update(None,'name == x',x='pierre'):
        db.update(record,name = record.name.upper())
    # check if attribute was correctly updated
    for rec in s1:
        assert db[rec.__id__] == "Pierre"

    # increment ages
    for record in db.select_for_update([],'True'):
        age = record.age
        if not record.age is None:
            db.update(record,age = record.age+1)
            # check
            assert db[record.__id__].age == age + 1

    for record in [r for r in db]:
        age = record.age
        if not record.age is None:
            db.update(record,age = record.age+1)
            # check
            assert db[record.__id__].age == age + 1

    # change dates
    for record in db.select_for_update([],'age>v',v=35):
        db.update(record,birth = date(random.randint(1958,1999),
                                random.randint(1,12),10))

    db.commit()

    # check length after commit
    assert sum([1 for r in db]) == len(db)

    # insert new records
    for i in range(50):
        db.insert(name=random.choice(names),
             age=random.randint(7,47),size=random.randint(110,175))

    # check that record 10 is still deleted
    try:
        print db[10]
        raise Exception,"Row 10 should have been deleted"
    except IndexError:
        pass

    print db.keys()
    print "has key 10 ?",db.has_key(10)
    assert 10 not in db
    #raw_input()

    # check that deleted_lines was cleared by commit()
    assert not db._pos.deleted_lines
    print db._del_rows.deleted_rows

    length = len(db) # before cleanup

    # physically remove the deleted items    
    db.cleanup()
    # check that deleted_lines and deleted_rows are clean
    assert not db._pos.deleted_lines
    assert not db._del_rows.deleted_rows

    # check that record 10 is still deleted
    try:
        print db[10]
        raise Exception,"Row 10 should have been deleted"
    except IndexError:
        pass

    assert 10 not in db

    # check that length was not changed by cleanup
    assert len(db) == length
    assert len([ r for r in db]) == length

    # age > 30
    for r in db.select(['__id__','name','age'],
        'name == c1 and age > c2',
        c1 = 'pierre',c2 = 30):
        assert r.name == "pierre"
        assert r.age > 30

    # name =="PIERRE" and age > 30
    for r in db.select(['__id__','name','age','birth'],
                'name == c1 and age > c2',
                c1 = 'PIERRE',c2 = 30):
        assert r.name == 'PIERRE'
        assert r.age > 30

    # test with !=
    for r in db.select(['__id__'],'name != c1',c1='claire'):
        assert r.name != 'claire'

    # age > id
    # with select
    s1 = db.select(['name','__id__','age'],'age > __id__')
    for r in s1:
        assert r.age > r.__id__
    # with iter
    s2 = [ r for r in db if r.age > r.__id__ ]
    for r in s2:
        assert r.age > r.__id__

    assert len(s1) == len(s2)

    # birth > date(1978,1,1)
    # with select
    s1 = db.select(['name','__id__','age'],'birth > v',v=date(1978,1,1))
    for r in s1:
        assert r.birth > date(1978,1,1)
    # with iter

    s2 = [ r for r in db if r.birth and r.birth > date(1978,1,1) ]
    for r in s2:
        assert r.birth > date(1978,1,1)

    assert len(s1) == len(s2)

    # test with floats
    for i in range(10):
        x = random.uniform(-10**random.randint(-307,307),
                10**random.randint(-307,307))
        s1 = [ r for r in db if r.afloat > x ]
        s2 = db.select(['name'],'afloat > v',v=x)
        assert len(s1)==len(s2)

    # base with external link
    houses = Base('houses')
    houses.create(('address',str),('flag',bool),('resident',db,db[0]),mode="override")

    addresses = ['Giono','Proust','Mauriac','Gide','Bernanos','Racine',
        'La Fontaine']
    ks = db.keys()
    for i in range(50):
        x = random.choice(ks)
        address = random.choice(addresses)
        houses.insert(address=address,flag = address[0]>"H",resident=db[x])

    # houses with jean
    s1 = []
    for h in houses:
        if h.resident.name == 'jean':
            s1.append(h)

    # by select : ???
    #s2 = houses.select([],'resident.name == v',v='jean')
    # assert len(s1) == len(s2)

    h1 = Base('houses')
    h1.open()

    l1 = len(h1.select([],flag=True))
    l2 = len(h1.select([],flag=False))
    assert l1 + l2 == len(h1)

    class DictRecord(Record):
        def __getitem__(self, k):
            item = self
            names = k.split('.')
            for name in names:
                item = getattr(item, name)
            return item

    h1.set_record_class(DictRecord)
    print '\nrecord_class = DictRecord, h1[0]'
    print h1[0]
    print "\nResident name: %(resident.name)s\nAddress: %(address)s" % h1[0]
예제 #18
0
class DB:
    def __init__(self, storage_path):
                        
        self.dbh_objects = Base(os.path.join(storage_path, 'objects'))
        self.dbh_blocks = Base(os.path.join(storage_path, 'blocks'))
        self.dbh_replicas = Base(os.path.join(storage_path, 'replicas'))
        self.dbh_tree = Base(os.path.join(storage_path, 'tree'))
        self.dbh_paths = Base(os.path.join(storage_path, 'paths'))
        self.dbh_id = Base(os.path.join(storage_path, 'id'))
        self.dbh_tags = Base(os.path.join(storage_path, 'tags'))
        self.storage_path = storage_path        
    
    def __create_root(self):
        """ Check if the filesystem has a / and if not create it"""
        
        print "Initializing filesystem..."

        if self.get_file(path='/'):
           return
                
        print "Creating root..."
        
        f = FSObject(1,1,'/',0,0,0,0)
        
        # lets see if we already have a key stored
        set = self.dbh_objects.select(['oid'],oid=str(f.oid))
        if set == []:
            # we have create tree and paths first
            self.dbh_tree.insert(str(f.oid), str(f.parent))
            self.dbh_paths.insert(str((f.parent, f.path)))
            
            self.dbh_objects.insert(str(f.oid), dumps(f), self.dbh_tree[len(self.dbh_tree)-1], self.dbh_paths[len(self.dbh_paths)-1])    
        
        
        #set the current oid for the id increment sequence
        set = self.dbh_id.select(['curr_oid'])
        if set == []:
            self.dbh_id.insert('1')
         
        
    def setup_fs_db(self):                
                
        try:
            self.dbh_blocks.create(('key', str), ('blocks', str))
        except IOError:    
            self.dbh_blocks.open()
        
        try:
            self.dbh_replicas.create(('key', str), ('replicas', str))
        except IOError:    
            self.dbh_replicas.open()
        
        try:
            self.dbh_tree.create(('oid', str), ('parent', str))
        except IOError:    
            self.dbh_tree.open()
        
        try:
            self.dbh_tags.create(('oid', str), ('tag', str))
        except IOError:    
            self.dbh_tags.open()
            
        try:
            self.dbh_paths.create(('key', str))
        except IOError:    
            self.dbh_paths.open()
        
        try:
            self.dbh_id.create(('curr_oid', str))
        except IOError:    
            self.dbh_id.open()
                
        try:
            self.dbh_objects.create(('oid', str), ('fsobj', str), ('tree', self.dbh_tree), ('paths', self.dbh_paths))
        except IOError:    
            self.dbh_objects.open()
        
        self.__create_root()                

    
    def get_path_oid(self, path):
        """Gets the parent filenode for path"""
        
        nodes = []
        parent_path = path
        while 1:
            (parent_path,node) = os.path.split(parent_path)
            if node == '':
                nodes.insert(0,'/')
                break
            nodes.insert(0,node)
   
        parent_oid = 0
        for node_name in nodes:
            key = str((parent_oid, node_name))
            print "looking up: %s" % key
                      
            # search for a match
            f = None
            for record in [record for record in self.dbh_objects]:
                if record.paths.key == key:
                    f = loads(record.fsobj)
                    break
            
            print "found it!"      
            if not f:
                return 0
                                    
            parent_oid = f.oid
                
        return parent_oid
        
    
    def insert_file(self, path, fsobj):
        #check first if there is a parent directory to store this file        
        
        f = self.get_file(path=path)
        
        print "inserting file with path: "+path
        print fsobj
        
        if not f:
            print "ERR: [%s]" % os.path.split(fsobj.path)[0]
            raise FileSystemError('No parent directory to store: %s' % fsobj.path)
        
        #the parent of this object is the path
        fsobj.parent = f.oid
            
        set = self.dbh_id.select_for_update(['curr_oid'])        
        
        curr_oid = int(set[0].curr_oid) + 1                
        fsobj.oid = curr_oid
        print "Inserting OID: %s" % fsobj        
            
        # lets see if we already have a key stored
        result = self.dbh_objects.select(['oid','fsobj'],oid=str(fsobj.oid))
        if result != []:
            raise FileSystemError('File already exists')
        else:
            
            # we have create tree and paths first         
            self.dbh_tree.insert(str(fsobj.oid), str(fsobj.parent))
            self.dbh_paths.insert(str((fsobj.parent, fsobj.path)))
            
            self.dbh_objects.insert(str(fsobj.oid), dumps(fsobj), self.dbh_tree[len(self.dbh_tree)-1], self.dbh_paths[len(self.dbh_paths)-1])
            
            set[0].update(curr_oid=str(curr_oid))
                    

        return curr_oid
    
    def get_file(self, oid='', path=''):
        if oid:            
            set = self.dbh_objects.select(['oid', 'fsobj'], oid=str(oid))
            if set == []:
                f = None
            else:
                f = set[0].fsobj
            
        elif path:
            if path == '/':
                key = str((0,'/'))
            else:
                parent_oid = self.get_path_oid(os.path.split(path)[0])
                node_name = os.path.split(path)[1]
                key = str((parent_oid, node_name))
            
            # search for a match
            f = None                      
            for record in [record for record in self.dbh_objects]:
                print record.paths.key
                if record.paths.key == key:
                    f = record.fsobj
                    break
            
        else:
            f = None

        if f:
            f = loads(f)
                    
        return f
            
    def get_children(self, oid):            
        
        # lookup FSOBJECT with given oid
        set = self.dbh_objects.select(['oid', 'fsobj'], oid=str(oid))
        if set == []:
            return []
        
        file_array = []                
                        
        # lookup objects with parent oid
        set = self.dbh_tree.select(['oid', 'parent'], parent=str(oid))        
        for i in set:
            obj = self.dbh_objects.select(['oid', 'fsobj'], oid=str(i.oid))            
            if obj != []:                
                file_array.append(loads(obj[0].fsobj))
        
                            
        return file_array
                        

    def debug_print_db(self, db):
        pass
        
    def print_object_db(self):
        self.debug_print_db(self.dbh_objects)
        
    def delete_dir(self,oid):
        pass
                              
    def delete_file(self, oid):
        pass
        
    def rename_file(self,src,dest):
        pass     
        
    def update_file(self, fsobj):
        
        set = self.dbh_objects.select_for_update(['oid', 'fsobj'], oid=str(fsobj.oid))
        if set != []:
            set[0].update(fsobj=dumps(fsobj))                
        
    
    def add_block(self, block, serverid):
    
        f = self.get_file(oid=str(block.oid))
        if not f:
            raise FileSystemError('add_block: Object %s does not exist' % block.oid)
        
        key = str((long(block.oid),long(block.block_id))) #the key is both the oid and the block_id
        
        set1 = self.dbh_replicas.select_for_update(['key', 'replicas'], key=key)
        if set1 == []:            
            replicas = FSReplicas(block.oid, block.block_id)
        else:
            replicas = loads(set1[0].replicas)
        
        f.blocks[block.block_id] = block.version
        
        set2 = self.dbh_blocks.select_for_update(['key', 'blocks'], key=key)
        if set2 == []:
            b = None
        else:
            b = set2[0].block
        
        if b:
            b = loads(b)
            diff = block.size - b.size
        else:
            diff = block.size
        
        f.size += diff
                
        # update or insert?
        if set1 == []:
            self.dbh_blocks.insert(key, dumps(block))
        else:
            set1[0].update(blocks=dumps(block))
        
        self.update_file(f)
        replicas.add(serverid, block.version)
        
        # update or insert?
        if set2 == []:
            self.dbh_replicas.insert(key,dumps(replicas))
        else:
            set2[0].update(replicas=dumps(replicas))


    def add_block_replica(self, block, serverid):
        f = self.get_file(str(block.oid))
        
        if not f:
            raise FileSystemError('add_block_replica: Object %s does not exist' % block.oid)
            
        key = str((block.oid, block.block_id))                
        
        set = self.dbh_replicas.select_for_update(['key', 'replicas'], key=key)
        if set == []:        
            replicas = FSReplicas(block.oid, block.block_id)
        else:
            replicas = loads(set[0].replicas)
        
        replicas.add(serverid, block.version)
        
        # update or insert?
        if set == []:
            self.dbh_replicas.insert(key,dumps(replicas))
        else:
            set[0].update(replicas=dumps(replicas))

        
    def get_block_replicas(self, oid, block_id):
        key = str((long(oid), long(block_id)))
        
        set = self.dbh_replicas.select(['key', 'replicas'], key=key)
        if set == []:
            return None
        
        return loads(set[0].replicas)

      
    def get_block(self, oid, block_id):
        key = str((long(oid), long(block_id)))
        
        set = self.dbh_blocks.select(['key', 'blocks'], key=key)
        if set == []:
            return None
        
        return loads(set[0].blocks)        
        
    def print_replicas_db(self):
        self.debug_print_db(self.dbh_replicas)
        
    def close_fs_db(self):
        self.dbh_blocks.close()
        self.dbh_replicas.close()
        self.dbh_tree.close()
        self.dbh_id.close()
        self.dbh_paths.close()
        self.dbh_objects.close()
예제 #19
0
class bookDBHandler:

    def __init__(self):

        self.userdir = os.getcwd()
        self.bookDB = Base(self.userdir + '/db/bookDB').open()
        #this selects the whole db, for now
        self.resultset = self.bookDB.select()

        #for loading the db in memory
        self.membooks = []

    def loadDBIn(self):
        for r in self.resultset:
            #load specific book's json from bookDB
            jsonObj = json.loads(r.json)
            dbID = r.__id__
            script_review_status = r.script_review_status
            #put all relavant info into convienient fields.
            #Im sure there is a quicker way involving commas
            #with Python, will look at later
            status = jsonObj.get('status')
            rating_count = jsonObj.get('rating_count')
            #slug = jsonObj.get('slug')
            #language = jsonObj.get('language')
            author = jsonObj.get('author')
            rating_value = jsonObj.get('rating_value')
            title = jsonObj.get('title')
            modified = jsonObj.get('modified')
            #bust = jsonObj.get('bust')
            reviewed = jsonObj.get('reviewed')
            audience = jsonObj.get('audience')
            tags = jsonObj.get('tags')
            created = jsonObj.get('created')
            link = jsonObj.get('link')
            author_id = jsonObj.get('author_id')
            iD = jsonObj.get('ID')
            typex = jsonObj.get('type')
            pages = jsonObj.get('pages')
            categories = jsonObj.get('categories')
            rating_total = jsonObj.get('rating_total')
            slug = r.slug
            text = []
            #do the same for the pages
            for p in pages:
                text.append(p['text'])
            #nice little book object
            #print dbID
            #break
            elBook = book(dbID, status, rating_count,
                          author, rating_value, title, modified,
                          reviewed, audience, tags, created, link,
                          author_id, iD, typex, pages, categories,
                          rating_total, text, script_review_status, slug)
            self.membooks.append(elBook)

    def saveDB(self):
        #for now the only thing that
        #should be saved is the script_reviewed_status
        for b in self.membooks:
            self.resultset[b.dbID].script_review_status = b.script_review_status

    def closeDB(self):
        self.bookDB.cleanup()
        self.bookDB.close()