Beispiel #1
0
class Queue(object):
    """ Simple queue with PyDbLite backend. """
    def __init__(self, queue_type=0, config=None):
        if config is not None:
            # copy config
            pass

        else:
            self.commit = False
            self.db_filename = "/tmp/queue.pydb"
            self.mode = "override"
            self.queue = Base(self.db_filename)

    def create_queue(self):
        self.queue.create('id', 'item', mode = self.mode)
        self.queue.create_index('id')

        return None

    def push(self, item):
        self.queue.insert(self.length(), item)

        return None

    def pop(self):
        if not self.is_empty():
            id = self.length() - 1
            r = self.queue._id[id]
            self.queue.delete(r)

            return r
        else:
            return None

    def list(self):
        return self.queue.records

    def length(self):
        return len(self.queue)

    def is_empty(self):
        return self.length() == 0

    def commit(self):
        if self.commit is True:
            self.queue.commit()

        return None
class ArticleDB:
    """class for persistent storage of articles.
    what is stored from each Article object is defined in Article.TO_SAVE
    """
    def __init__(self, dbfile, mode = 'open', autocommit = False):
        self.db = Base(dbfile)
        self.db.create(*Article.TO_SAVE, **{'mode': mode})
        self.db.create_index(*INDEX_ON)
        self.autocommit = autocommit

    def insertArticle(self, art):
        """insert article into database, with id consistency check"""
        present = []
        if art.id_int != None:
            present.extend(self.db._id_int[art.id_int])
#        if art.idZBL != None:
#            present.extend(self.db._idZBL[art.idZBL])
#        if art.idMR != None:        
#            present.extend(self.db._idMR[art.idMR])
        ids = list(set([rec['__id__'] for rec in present])) # unique ids
        present = [self.db[id] for id in ids] # remove duplicate identical entries (coming from matches on more than one id on the same article)
        new = art
        for old in present: # FIXME HACK turns off consistency checking
            try:
                new.mergeWith(Article.Article(record = old)) # article already present in database -- check if ids are consistent, update it with new info from art
            except Exception, e:
#                logging.error('inconsistent database contents (%i overlapping records); leaving database unchanged' % (len(present)))
                #logging.info('inconsistency between \n%s\n%s' % (new, Article.Article(old)))
                logging.warning('inconsistency between %s and %s' % (new, Article.Article(old)))
#                return False
        if len(present) == 0:
#            logging.debug('inserting a new article')
            pass
        else:
#            logging.debug('replacing %i old (consistent) record(s) for %s' % (len(present), new))
            pass
        self.db.delete(present)
        id = self.db.insert(**new.__dict__)

        if self.autocommit:
            self.commit()
        return True
Beispiel #3
0
"""Create or open the page database

Save this file as wikiBase.py to use a PyDBLite database
"""
import os
from PyDbLite import Base

db = Base(os.path.join(CONFIG.data_dir,'pages.pdl'))
db.create('name','content','admin','nbvisits','created',
    'version','lastmodif',mode="open")
db.create_index('name')
Beispiel #4
0
"""Create or open the page database
"""

from PyDbLite import Base

db = Base('pages.pdl').create('name',
                              'content',
                              'admin',
                              'nbvisits',
                              'created',
                              'version',
                              'lastmodif',
                              mode="open")
db.create_index('name')
Beispiel #5
0
    for i in range(ln):
        res += random.choice(string.letters)
    return res


def sentence(n, m):
    ln = random.randint(1, n)
    res = []
    for i in range(ln):
        res.append(word(m))
    return " ".join(res)


os.remove("blog")
db = Base("blog").create("parent", "title", "author", "text", "date")
db.create_index("parent")

nbthreads = 200
for i in range(nbthreads):
    # generate thread
    author = "pierre"
    title = sentence(10, 10)
    text = sentence(100, 10)
    date = datetime.datetime(
        random.randint(2004, 2006),
        random.randint(1, 12),
        random.randint(1, 28),
        random.randint(0, 23),
        random.randint(0, 59),
        random.randint(0, 59),
    )
Beispiel #6
0
def word(m):
    res = ''
    ln = random.randint(1,m)
    for i in range(ln):
        res += random.choice(string.letters)
    return res

def sentence(n,m):
    ln = random.randint(1,n)
    res = []
    for i in range(ln):
        res.append(word(m))
    return ' '.join(res)
    
db = Base('blog.pdl').create('parent','title','author','text','date',mode="override")
db.create_index('parent')

nbthreads = 200
for i in range(nbthreads):
    # generate thread
    author = 'pierre'
    title = sentence(10,10)
    text = sentence(100,10)
    date = datetime.datetime(random.randint(2006,2008),random.randint(1,12),
        random.randint(1,28),random.randint(0,23),random.randint(0,59),
        random.randint(0,59))
    thread_id = db.insert(parent=-1,author=author,title=title,text=text,date=date)

    # generate comments
    nbcomments = random.randint(0,5)
    for i in range(nbcomments):
Beispiel #7
0
    for i in range(ln):
        res += random.choice(string.letters)
    return res


def sentence(n, m):
    ln = random.randint(1, n)
    res = []
    for i in range(ln):
        res.append(word(m))
    return ' '.join(res)


os.remove('blog')
db = Base('blog').create('parent', 'title', 'author', 'text', 'date')
db.create_index('parent')

nbthreads = 200
for i in range(nbthreads):
    # generate thread
    author = 'pierre'
    title = sentence(10, 10)
    text = sentence(100, 10)
    date = datetime.datetime(random.randint(2004, 2006), random.randint(1, 12),
                             random.randint(1, 28), random.randint(0, 23),
                             random.randint(0, 59), random.randint(0, 59))
    thread_id = db.insert(parent=-1,
                          author=author,
                          title=title,
                          text=text,
                          date=date)
Beispiel #8
0
class TagOrganizer(Organizer):

    def __init__(self, cache, category=None):
        self.tags = None
        self.category = category
        Organizer.__init__(self, cache, False)

    def reset(self):
        if not self.tags:
            self.tags = Base(DB_FILE_TAGS)
        self.tags.create('realpath', 'category', 'tag', mode = 'override')
        self.tags.create_index('realpath')
        self.tags.create_index('category')
        Organizer.reset(self)

    def updatecache(self):
        self._generatetags()
        Organizer.updatecache(self)

    def _deletefromcache(self, path):
        realpath = self.realpath(path)
        logger.debug("_deletefromcache(%s)" % realpath)
        for tag in self.tags.get_index('realpath')[realpath]:
            self.tags.delete(tag)

    def deletefromcache(self, path):
        self._deletefromcache(path)
        Organizer.deletefromcache(self, path)

    def addtocache(self, path):
        self._deletefromcache(path)
        self.generatetags(self.realpath(path))
        Organizer.addtocache(self, path)

    def generatepaths(self, realpath):
        for record in self.tags.get_index('realpath')[realpath]:
            yield os.path.join(os.sep, record['tag'],
                               os.path.basename(realpath))

    def dirlist(self, path):
        if path == '/':
            return self.taglist(self.category)
        else:
            return []

    ############################################
    # Tag functions

    def _generatetags(self):
        for filename in filter(util.ignoretag, #IGNORE:W0141
                               self.cache.filelist()):
            self.generatetags(filename)

    def generatetags(self, filename):
        pass

    def tag(self, realpath, category, tag):
        logger.debug('tag(%s, %s, %s)' % (realpath, category, tag))
        if not tag == None and not tag == '':
            self.tags.insert(realpath, category, tag)

    def filelistbytags(self, category, tags):
        self.refreshcache()
        for record in self.tags.get_index('category')[category]:
            if record['tag'] in tags:
                yield os.path.basename(record['realpath'])

    def taglist(self, category):
        self.refreshcache()
        return util.unique([record['tag'] for record in
                            self.tags.get_index('category')[category]])
Beispiel #9
0
class Organizer(Cacheable):
    """
    This is the base class for organizers
    """

    def __init__(self, cache, recursive=True):
        Cacheable.__init__(self)
        self.cache = cache
        self.recursive = recursive
        self.transformed = None
        # Do not call reset here, it is called from fs.py when the fs is
        # already started

    def reset(self):
        if not self.transformed:
            self.transformed = Base(DB_TRANSFORMED)
        self.transformed.create('realpath', 'path', 'dirname', mode='override')
        self.transformed.create_index('realpath')
        self.transformed.create_index('path')
        self.transformed.create_index('dirname')
        self.cache.reset()
        Cacheable.reset(self)

    def updatecache(self):
        self.generateallpaths()

    def deletefromcache(self, path):
        realpath = self.realpath(path)
        logger.debug("deletefromcache(%s)" % realpath)
        self.cache.deletefromcache(realpath)
        for item in self.transformed.get_index('realpath')[realpath]:
            self.transformed.delete(item)

    def addtocache(self, path):
        if not self.transformed.get_index('path')[path]:
            realpath = self.realpath(path)
            self.cache.addtocache(realpath)
            self.addfile(realpath)

    ############################################
    # Overwritable functions

    def dirlist(self, path): #IGNORE:W0613
        """
        Returns a list of (non-existent, generated, virtual) directories for a
        given path. Default implementation.
        """
        return []

    def generatepaths(self, realpath):
        """
        Generates paths for a given real path. A file can have more than one
        transformed path. Default implementation.
        """
        yield util.addtrailingslash(util.removeroot(realpath,
                                                    self.cache.filter.root))

    def generaterealpath(self, path):
        """
        Generates a real path for a inexistent path. Default implementation.
        """
        return os.path.join(self.cache.filter.root, path[1:])

    ############################################
    # General functions

    def generateallpaths(self):
        """
        Generates paths for all the files given by the cache and stores them
        in self.transformed
        """
        for realpath in self.cache.filelist():
            if self.recursive:
                # Add all sub-directories first
                currentpath = self.cache.filter.root

                for pathpart in util.pathparts(util.removeroot(realpath,
                                                  self.cache.filter.root)):
                    currentpath = os.path.join(currentpath, pathpart)
                    self.addfile(currentpath)
            else:
                self.addfile(realpath)

    def addfile(self, realpath):
        """
        Stores a file in self.transformed if not there already and returns the
        paths for that file in the proxy file system
        """
        logger.debug('addfile(%s)' % realpath)
        if not util.ignoretag(util.removeroot(realpath,
                                              self.cache.filter.root)):
            return []

        self.refreshcache()
        transformed = self.transformed.get_index('realpath')[realpath]

        if transformed:
            return (record['path'] for record in transformed)
        else:
            paths = []

            for path in self.paths(realpath):
                while self.transformed.get_index('path')[path]:
                    path = self.increasefilename(path)

                dirname = os.path.dirname(path)
                logger.debug('addfile(%s, %s, %s)' % (realpath, path, dirname))
                self.transformed.insert(realpath=realpath, path=path,
                                        dirname=dirname)
                paths.append(path)

            return paths

    def increasefilename(self, filename):
        """
        Returns a new filename in sequence. Called if the current filename
        already exists. This default implementation adds a "(1)" to the end if
        not present or increases that number by one.
        """
        root, ext = os.path.splitext(filename)

        num = 1
        matches = _INCREASE_REGEX.match(root)

        if not matches is None:
            num = int(matches.group(2)) + 1
            filename = matches.group(1)

        return '%s(%i)%s' % (root, num, ext)

    ############################################
    # General functions that read the cache

    def filelist(self, path):
        """
        Returns a list of directories and filenames in a list from cache
        """
        logger.debug('filelist(%s)' % path)
        self.refreshcache()

        for dirname in self.dirlist(path):
            yield dirname

        for record in self.transformed.get_index('dirname')[path]:
            yield os.path.basename(record['path'])

    def paths(self, realpath):
        """
        Generates or returns paths from cache for a given real path
        """
        self.refreshcache()
        paths = self.transformed.get_index('realpath')[realpath]

        if paths:
            return (path['path'] for path in paths)
        else:
            return (path for path in self.generatepaths(realpath))

    def realpath(self, path):
        """
        Returns the real path for a file given the path in the file system.
        """
        logger.debug('realpath(%s)' % path)
        self.refreshcache()
        realpaths = [r['realpath']
                     for r in self.transformed.get_index('path')[path]]

        realpath = None

        if realpaths:
            realpath = realpaths[0]
        elif path == '/':
            realpath = self.cache.filter.root
        elif path == util.addtrailingslash(util.ORIGINAL_DIR):
            realpath = '.'
        elif util.isspecial(path, 'original', True):
            realpath = os.path.join('.', os.sep.join(util.pathparts(path)[2:]))
        elif util.isspecial(path, 'root', True):
            realpath = os.path.join(self.cache.filter.root,
                                    os.sep.join(util.pathparts(path)[2:]))
        elif util.isspecial(path, 'commands'):
            realpath = '.'
        elif util.iscommand(path):
            realpath = getserver().tempfile.name
        else:
            realpath = self.generaterealpath(path)

        logger.debug('realpath(%s) = %s' % (path, realpath))
        return realpath

    ############################################
    # File system functions

    def getattr(self, path):
        dirname = os.path.dirname(path)
        if util.removeroot(path, os.sep) in self.dirlist(dirname):
            return self.cache.getattr(self.realpath(dirname))
        else:
            return self.cache.getattr(self.realpath(path))

    def readdir(self, path, offset): #IGNORE:W0613
        for filename in util.getbasefilelist():
            yield fuse.Direntry(filename)

        for filename in self._filelist(path):
            yield fuse.Direntry(filename)

    def _filelist(self, path):
        filelist = []
        if path == util.addtrailingslash(util.ORIGINAL_DIR):
            filelist = ['original', 'root', 'commands']
        elif util.isspecial(path, 'root', True):
            filelist = os.listdir(self.realpath(path))
        elif util.isspecial(path, 'original', True):
            filelist = os.listdir(self.realpath(path))
        elif util.isspecial(path, 'commands'):
            filelist = CommandHandler.COMMANDS
        else:
            filelist = self.filelist(path)

        for filename in filelist:
            yield filename
Beispiel #10
0
class TagOrganizer(Organizer):
    def __init__(self, cache, category=None):
        self.tags = None
        self.category = category
        Organizer.__init__(self, cache, False)

    def reset(self):
        if not self.tags:
            self.tags = Base(DB_FILE_TAGS)
        self.tags.create('realpath', 'category', 'tag', mode='override')
        self.tags.create_index('realpath')
        self.tags.create_index('category')
        Organizer.reset(self)

    def updatecache(self):
        self._generatetags()
        Organizer.updatecache(self)

    def _deletefromcache(self, path):
        realpath = self.realpath(path)
        logger.debug("_deletefromcache(%s)" % realpath)
        for tag in self.tags.get_index('realpath')[realpath]:
            self.tags.delete(tag)

    def deletefromcache(self, path):
        self._deletefromcache(path)
        Organizer.deletefromcache(self, path)

    def addtocache(self, path):
        self._deletefromcache(path)
        self.generatetags(self.realpath(path))
        Organizer.addtocache(self, path)

    def generatepaths(self, realpath):
        for record in self.tags.get_index('realpath')[realpath]:
            yield os.path.join(os.sep, record['tag'],
                               os.path.basename(realpath))

    def dirlist(self, path):
        if path == '/':
            return self.taglist(self.category)
        else:
            return []

    ############################################
    # Tag functions

    def _generatetags(self):
        for filename in filter(
                util.ignoretag,  #IGNORE:W0141
                self.cache.filelist()):
            self.generatetags(filename)

    def generatetags(self, filename):
        pass

    def tag(self, realpath, category, tag):
        logger.debug('tag(%s, %s, %s)' % (realpath, category, tag))
        if not tag == None and not tag == '':
            self.tags.insert(realpath, category, tag)

    def filelistbytags(self, category, tags):
        self.refreshcache()
        for record in self.tags.get_index('category')[category]:
            if record['tag'] in tags:
                yield os.path.basename(record['realpath'])

    def taglist(self, category):
        self.refreshcache()
        return util.unique([
            record['tag']
            for record in self.tags.get_index('category')[category]
        ])
Beispiel #11
0
class Organizer(Cacheable):
    """
    This is the base class for organizers
    """
    def __init__(self, cache, recursive=True):
        Cacheable.__init__(self)
        self.cache = cache
        self.recursive = recursive
        self.transformed = None
        # Do not call reset here, it is called from fs.py when the fs is
        # already started

    def reset(self):
        if not self.transformed:
            self.transformed = Base(DB_TRANSFORMED)
        self.transformed.create('realpath', 'path', 'dirname', mode='override')
        self.transformed.create_index('realpath')
        self.transformed.create_index('path')
        self.transformed.create_index('dirname')
        self.cache.reset()
        Cacheable.reset(self)

    def updatecache(self):
        self.generateallpaths()

    def deletefromcache(self, path):
        realpath = self.realpath(path)
        logger.debug("deletefromcache(%s)" % realpath)
        self.cache.deletefromcache(realpath)
        for item in self.transformed.get_index('realpath')[realpath]:
            self.transformed.delete(item)

    def addtocache(self, path):
        if not self.transformed.get_index('path')[path]:
            realpath = self.realpath(path)
            self.cache.addtocache(realpath)
            self.addfile(realpath)

    ############################################
    # Overwritable functions

    def dirlist(self, path):  #IGNORE:W0613
        """
        Returns a list of (non-existent, generated, virtual) directories for a
        given path. Default implementation.
        """
        return []

    def generatepaths(self, realpath):
        """
        Generates paths for a given real path. A file can have more than one
        transformed path. Default implementation.
        """
        yield util.addtrailingslash(
            util.removeroot(realpath, self.cache.filter.root))

    def generaterealpath(self, path):
        """
        Generates a real path for a inexistent path. Default implementation.
        """
        return os.path.join(self.cache.filter.root, path[1:])

    ############################################
    # General functions

    def generateallpaths(self):
        """
        Generates paths for all the files given by the cache and stores them
        in self.transformed
        """
        for realpath in self.cache.filelist():
            if self.recursive:
                # Add all sub-directories first
                currentpath = self.cache.filter.root

                for pathpart in util.pathparts(
                        util.removeroot(realpath, self.cache.filter.root)):
                    currentpath = os.path.join(currentpath, pathpart)
                    self.addfile(currentpath)
            else:
                self.addfile(realpath)

    def addfile(self, realpath):
        """
        Stores a file in self.transformed if not there already and returns the
        paths for that file in the proxy file system
        """
        logger.debug('addfile(%s)' % realpath)
        if not util.ignoretag(util.removeroot(realpath,
                                              self.cache.filter.root)):
            return []

        self.refreshcache()
        transformed = self.transformed.get_index('realpath')[realpath]

        if transformed:
            return (record['path'] for record in transformed)
        else:
            paths = []

            for path in self.paths(realpath):
                while self.transformed.get_index('path')[path]:
                    path = self.increasefilename(path)

                dirname = os.path.dirname(path)
                logger.debug('addfile(%s, %s, %s)' % (realpath, path, dirname))
                self.transformed.insert(realpath=realpath,
                                        path=path,
                                        dirname=dirname)
                paths.append(path)

            return paths

    def increasefilename(self, filename):
        """
        Returns a new filename in sequence. Called if the current filename
        already exists. This default implementation adds a "(1)" to the end if
        not present or increases that number by one.
        """
        root, ext = os.path.splitext(filename)

        num = 1
        matches = _INCREASE_REGEX.match(root)

        if not matches is None:
            num = int(matches.group(2)) + 1
            filename = matches.group(1)

        return '%s(%i)%s' % (root, num, ext)

    ############################################
    # General functions that read the cache

    def filelist(self, path):
        """
        Returns a list of directories and filenames in a list from cache
        """
        logger.debug('filelist(%s)' % path)
        self.refreshcache()

        for dirname in self.dirlist(path):
            yield dirname

        for record in self.transformed.get_index('dirname')[path]:
            yield os.path.basename(record['path'])

    def paths(self, realpath):
        """
        Generates or returns paths from cache for a given real path
        """
        self.refreshcache()
        paths = self.transformed.get_index('realpath')[realpath]

        if paths:
            return (path['path'] for path in paths)
        else:
            return (path for path in self.generatepaths(realpath))

    def realpath(self, path):
        """
        Returns the real path for a file given the path in the file system.
        """
        logger.debug('realpath(%s)' % path)
        self.refreshcache()
        realpaths = [
            r['realpath'] for r in self.transformed.get_index('path')[path]
        ]

        realpath = None

        if realpaths:
            realpath = realpaths[0]
        elif path == '/':
            realpath = self.cache.filter.root
        elif path == util.addtrailingslash(util.ORIGINAL_DIR):
            realpath = '.'
        elif util.isspecial(path, 'original', True):
            realpath = os.path.join('.', os.sep.join(util.pathparts(path)[2:]))
        elif util.isspecial(path, 'root', True):
            realpath = os.path.join(self.cache.filter.root,
                                    os.sep.join(util.pathparts(path)[2:]))
        elif util.isspecial(path, 'commands'):
            realpath = '.'
        elif util.iscommand(path):
            realpath = getserver().tempfile.name
        else:
            realpath = self.generaterealpath(path)

        logger.debug('realpath(%s) = %s' % (path, realpath))
        return realpath

    ############################################
    # File system functions

    def getattr(self, path):
        dirname = os.path.dirname(path)
        if util.removeroot(path, os.sep) in self.dirlist(dirname):
            return self.cache.getattr(self.realpath(dirname))
        else:
            return self.cache.getattr(self.realpath(path))

    def readdir(self, path, offset):  #IGNORE:W0613
        for filename in util.getbasefilelist():
            yield fuse.Direntry(filename)

        for filename in self._filelist(path):
            yield fuse.Direntry(filename)

    def _filelist(self, path):
        filelist = []
        if path == util.addtrailingslash(util.ORIGINAL_DIR):
            filelist = ['original', 'root', 'commands']
        elif util.isspecial(path, 'root', True):
            filelist = os.listdir(self.realpath(path))
        elif util.isspecial(path, 'original', True):
            filelist = os.listdir(self.realpath(path))
        elif util.isspecial(path, 'commands'):
            filelist = CommandHandler.COMMANDS
        else:
            filelist = self.filelist(path)

        for filename in filelist:
            yield filename