示例#1
0
 def reset(self):
     if not self.tags:
         self.tags = Base(DB_FILE_TAGS)
     self.tags.create('realpath', 'category', 'tag', mode='override')
     self.tags.create_index('realpath')
     self.tags.create_index('category')
     Organizer.reset(self)
示例#2
0
def host():
    # show the user profile for that stock
    db = Base('overall.db')
    db.open()
    #stocks = [r for r in db if r['state']==1]
    stocks = [r for r in db]
    return render_template('all.html', stocks=stocks)
示例#3
0
def users():
    db = Base('alf.db')
    db.open()
    users = []
    for r in db:
        n = r['name']
        p = r['pwd']
        users.append((n, p))
    return render_template('user.html', users=users)
示例#4
0
 def reset(self):
     if not self.transformed:
         self.transformed = Base(DB_TRANSFORMED)
     self.transformed.create('realpath', 'path', 'dirname', mode='override')
     self.transformed.create_index('realpath')
     self.transformed.create_index('path')
     self.transformed.create_index('dirname')
     self.cache.reset()
     Cacheable.reset(self)
示例#5
0
def sell():
    # show the user profile for that stock
    sm = StockManager()
    stock = Stock_Profile('2002') 
    current = stock.get_result(0)[0]
    db = Base('overall.db')
    db.open()
    stocks = [r for r in db if r['sell']==1 and (r['date']==current)]
    return render_template('buy.html', stocks=stocks)
示例#6
0
class KopeteLog():
    def __init__(self, directory=None):

        if not directory:
                directory=os.path.join(os.path.expanduser("~"), 
                        ".kde/share/apps/kopete/logs")

        self.messages = Base('kopete.db')  # Database stuff - Initializing...
        self.messages.create('protocol', 'date', 'time', 'msgfrom', 'msgto',  'sender',  'inbound',  'nick',  'message',  mode='override')
        
        for file in self.searchLogs(directory):
            self.feedDatabase(file)
        
    def searchLogs(self,  dir):
        logfiles = []
        for (basepath,  dirnames,  filenames) in os.walk(dir):
            for child in filenames:
                if child.endswith (".xml"):
                    logfiles.append(os.path.join(basepath, child))
        return logfiles
        
    def feedDatabase(self,  filepath):
        if 'WlmProtocol' in filepath:
            protocol = 'wlm'
        elif 'ICQProtocol' in filepath:
            protocol = 'icq'
        elif 'JabberProtocol' in filepath:
            protocol = 'jabber'
        else:
            protocol = 'unknown'
        xmllog = parse(filepath)
        for head in xmllog.getiterator('head'):
            for date in head.getiterator('date'):
                month=date.attrib['month']
                year=date.attrib['year']
            for contact in head.getiterator('contact'):
                if contact.attrib.has_key('type'):
                    if contact.attrib['type'] == 'myself':
                        contactfrom = contact.attrib['contactId']
                else:
                    contactto = contact.attrib['contactId']
        for msg in xmllog.getiterator('msg'):
            nick = msg.attrib['nick']
            time = msg.attrib['time']
            inbound = msg.attrib['in']
            message = msg.text
            sender = msg.attrib['from']
            date = datetime.strptime("%s;%s;%s" % 
                                     (year,  month,  msg.attrib['time']) , 
                                     "%Y;%m;%d %H:%M:%S")
            self.messages.insert(
                                 protocol=protocol, 
                                 date=date.strftime("%Y%m%d"), 
                                 time=date.strftime("%H:%M:%S"),  
                                 msgfrom=contactfrom,  msgto=contactto,
                                 sender=sender,  inbound=inbound,  nick=nick,
                                 message=message)
示例#7
0
def alf():
    db = Base('alf.db')
    db.open()
    user = request.form['Username']
    pwd = request.form['password']
    user_verf = [r['password'] for r in db if r['name'] == user]
    if len(user_verf) > 0:
        if user_verf[0] == pwd:
            return 'Hello, ' + user
    else:
        return 'Who are you!!!'
示例#8
0
class alf123:
    def __init__(self, sID, onePiece):
        today = datetime.date.today()
        self.sID = sID
        self.opp = onePiece
        self.historyDB = Base("F://alfStock//"+"alf123"+'.history')
        self.currentDB = Base("F://alfStock//"+"alf123"+'.current')
        self.historyDB.open()
        self.currentDB.open()
        db = Base("F://alfStock//"+str(today)+'.db')
        impactDB = Base("F://alfStock//"+str(today)+'.yv')
        if db.exists():
            db.open()

            recs = [ r for r in db if r['sid'] == self.sID ]
            if len(recs) > 0:

                self.history = recs[0]['history']
                self.sCurrent = recs[0]['current']
            else:
                print "already existed:  ", len(db)
                self.insertHistory(db)
        else:
            db.create('sid','history', 'current')
            self.insertHistory(db)

        if impactDB.exists():
            self.idb = impactDB
        else:
            impactDB.create('sid','UpOrDown')# U:up; D:down
            impactDB.open()
            impactDB.commit()
            self.idb = impactDB

    def insertHistory(self, db):
            db.open()
            self.history = self.stockHistoryGet()
            self.sCurrent = self.stockCurrent()
            db.insert(sid = self.sID, history = self.history, current = self.sCurrent)
            db.commit()

    def TTLUrlOpen(self, url, rc):
        retryCount = rc
        if retryCount > 2:
            pass
        else:
            try:
                response = urllib2.urlopen(url, None, 2.5)
            except URLError, e:
                    print "url"
            except socket.timeout:
                    print "Timed out!"
                    response = TTLUrlOpen(self, url, retryCount+1)
示例#9
0
class Queue(object):
    """ Simple queue with PyDbLite backend. """
    def __init__(self, queue_type=0, config=None):
        if config is not None:
            # copy config
            pass

        else:
            self.commit = False
            self.db_filename = "/tmp/queue.pydb"
            self.mode = "override"
            self.queue = Base(self.db_filename)

    def create_queue(self):
        self.queue.create('id', 'item', mode = self.mode)
        self.queue.create_index('id')

        return None

    def push(self, item):
        self.queue.insert(self.length(), item)

        return None

    def pop(self):
        if not self.is_empty():
            id = self.length() - 1
            r = self.queue._id[id]
            self.queue.delete(r)

            return r
        else:
            return None

    def list(self):
        return self.queue.records

    def length(self):
        return len(self.queue)

    def is_empty(self):
        return self.length() == 0

    def commit(self):
        if self.commit is True:
            self.queue.commit()

        return None
示例#10
0
 def reset(self):
     if not self.tags:
         self.tags = Base(DB_FILE_TAGS)
     self.tags.create('realpath', 'category', 'tag', mode = 'override')
     self.tags.create_index('realpath')
     self.tags.create_index('category')
     Organizer.reset(self)
示例#11
0
    def initDB(self):
        self.db = Base("maindb")
        self.db.create("id", "data", mode="open")

        self.searchdb = Base("searchdb")
        self.searchdb.create("module", "id", "timestamp", "data", mode="open")

        self.cache = tools.storage()

        try:
            if len(self.db) < 3:
                self.default()
        except:
            self.default()

        records = self.db(id="version")
        if records[0]["data"] < self.db_version:
            self.default()
示例#12
0
 def reset(self):
     if not self.transformed:
         self.transformed = Base(DB_TRANSFORMED)
     self.transformed.create('realpath', 'path', 'dirname', mode='override')
     self.transformed.create_index('realpath')
     self.transformed.create_index('path')
     self.transformed.create_index('dirname')
     self.cache.reset()
     Cacheable.reset(self)
示例#13
0
def welcome():
    db = Base('alf.db')
    db.create('name','pwd',mode="open") #override
    user = request.form['Username']
    passwd = request.form['password1']
    db.insert(name=user,pwd=passwd)
    db.commit()
    return 'welcome ' + user
示例#14
0
 def __init__(self):
     self.EventPassport = Base('EventPassport/EventPassport.pdl')
     #check if the DB exists. If Yes, open, if not
     #create it:
     if not self.EventPassport.exists():
         self.genPDL()
     else:
         self.EventPassport.open()
     
     self.CleanEvents = CleanEvents.CleanData()
示例#15
0
    def __init__(self, queue_type=0, config=None):
        if config is not None:
            # copy config
            pass

        else:
            self.commit = False
            self.db_filename = "/tmp/queue.pydb"
            self.mode = "override"
            self.queue = Base(self.db_filename)
class ArticleDB:
    """class for persistent storage of articles.
    what is stored from each Article object is defined in Article.TO_SAVE
    """
    def __init__(self, dbfile, mode = 'open', autocommit = False):
        self.db = Base(dbfile)
        self.db.create(*Article.TO_SAVE, **{'mode': mode})
        self.db.create_index(*INDEX_ON)
        self.autocommit = autocommit

    def insertArticle(self, art):
        """insert article into database, with id consistency check"""
        present = []
        if art.id_int != None:
            present.extend(self.db._id_int[art.id_int])
#        if art.idZBL != None:
#            present.extend(self.db._idZBL[art.idZBL])
#        if art.idMR != None:        
#            present.extend(self.db._idMR[art.idMR])
        ids = list(set([rec['__id__'] for rec in present])) # unique ids
        present = [self.db[id] for id in ids] # remove duplicate identical entries (coming from matches on more than one id on the same article)
        new = art
        for old in present: # FIXME HACK turns off consistency checking
            try:
                new.mergeWith(Article.Article(record = old)) # article already present in database -- check if ids are consistent, update it with new info from art
            except Exception, e:
#                logging.error('inconsistent database contents (%i overlapping records); leaving database unchanged' % (len(present)))
                #logging.info('inconsistency between \n%s\n%s' % (new, Article.Article(old)))
                logging.warning('inconsistency between %s and %s' % (new, Article.Article(old)))
#                return False
        if len(present) == 0:
#            logging.debug('inserting a new article')
            pass
        else:
#            logging.debug('replacing %i old (consistent) record(s) for %s' % (len(present), new))
            pass
        self.db.delete(present)
        id = self.db.insert(**new.__dict__)

        if self.autocommit:
            self.commit()
        return True
示例#17
0
    def __init__(self, directory=None):

        if not directory:
            directory = os.path.join(os.path.expanduser("~"),
                                     ".kde/share/apps/kopete/logs")

        self.messages = Base('kopete.db')  # Database stuff - Initializing...
        self.messages.create('protocol',
                             'date',
                             'time',
                             'msgfrom',
                             'msgto',
                             'sender',
                             'inbound',
                             'nick',
                             'message',
                             mode='override')

        for file in self.searchLogs(directory):
            self.feedDatabase(file)
示例#18
0
    def __init__(self, directory=None):

        if not directory:
                directory=os.path.join(os.path.expanduser("~"), 
                        ".kde/share/apps/kopete/logs")

        self.messages = Base('kopete.db')  # Database stuff - Initializing...
        self.messages.create('protocol', 'date', 'time', 'msgfrom', 'msgto',  'sender',  'inbound',  'nick',  'message',  mode='override')
        
        for file in self.searchLogs(directory):
            self.feedDatabase(file)
示例#19
0
 def getStock(self, sid):
     sname = self.dbname(sid)
     exist = os.path.isfile(sname)
     db = ''
     if (exist):
         #read db
         db = Base(sname)
         db.open()
     else:
         #create a new db
         db = Base(sname)
         db.create('date','buySig','sellSig', 'state', 'buy', 'sell' ,mode="override")
         db.open()
     return db
示例#20
0
def get_db(config):
    engine = get_db_engine(config)
    if engine == 'MySQL':
        # put host, user and password in local namespace
        db_settings = os.path.join(config.data_dir,'users_db.py')
        exec(open(db_settings).read())

        from PyDbLite import MySQL
        connection = MySQL.Connection(host,user,password)
        database = connection.create("karrigell_users",mode="open")
        table = MySQL.Table("users",database)
        table.create(("__id__","INTEGER PRIMARY KEY AUTO_INCREMENT"),
            ("host","TEXT"),
            ("login","TEXT"),("email","TEXT"),("password","TEXT"),
            ("role","TEXT"),("session_key","BLOB"),
            ("nb_visits","INTEGER"),
            ("last_visit","TIMESTAMP"),
            mode="open")
        return table
    elif engine == 'SQLite':
        from PyDbLite import SQLite
        conn = SQLite.Database(os.path.join(config.data_dir,
            "users.sqlite"))
        table = SQLite.Table("users",conn)
        table.create(("host","TEXT"),
            ("login","TEXT"),("email","TEXT"),("password","TEXT"),
            ("role","TEXT"),("session_key","BLOB"),
            ("nb_visits","INTEGER"),
            ("last_visit","BLOB"),
            mode="open")
        table.is_datetime('last_visit')
        return table
    elif engine == 'PyDbLite':
        # if nothing else works, use PyDbLite
        from PyDbLite import Base
        db = Base(os.path.join(config.data_dir,"users.pdl"))
        db.create("host","login","email","password","role","session_key",
            "nb_visits","last_visit",mode="open")
        return db
示例#21
0
 def __init__(self):
     self.AnalysisResults = Base('AnalysisResults/AnalysisResults.pdl')
     
     #check if the DB exists. If Yes, open, if not
     #create it:
     if not self.AnalysisResults.exists():
         self.genPDL()
     else:
         self.AnalysisResults.open()
     
     self.PassportOffice = EventPassport.EventPassportOffice()
     self.LoadWaveform = WaveformLoader.LoadWave()
     self.AcousticAnalysis = AnalysisTools.AcousticsAnalysis()
示例#22
0
def makeDB(read, write, startTime = "2010-01-01 00:00:00", \
            endTime = "2010-01-01 00:10:00"):
    db = Base(write)

    startTime = calc.datetonumber(startTime)
    endTime = calc.datetonumber(endTime)

    #Day comes from day of the week.  It is a number from 0 to 6.
    #0 = Monday 6 = Sunday.
    db.create('sensor', 'date', 'weekday', 'index', mode="override")
    db.open()
    allData = {}

    for i in range(len(bbdata.allSensors)):
        s = bbdata.allSensors[i]
        data = []
        print "Parsing sensor " + str(s)
        try:
            sString = read + "sensor" + str(s) + ".txt"

            f = open(sString).readlines()
            oldD = None
            for timeLine in f:
                tmp = timeLine.split()
                tmp = tmp[1] + " " + tmp[2]
                #tmp = tmp[0] + " " + tmp[1]
                d = datetime.datetime.strptime(tmp, "%Y-%m-%d %H:%M:%S")
                foo = calc.datetonumber(d)

                if foo >= startTime and foo <= endTime:
                    data.append(calc.datetonumber(d))

                    if d.toordinal() != oldD:
                        #Add to database
                        db.insert(s, d.toordinal(), d.weekday(), len(data) - 1)
                        oldD = d.toordinal()
                        print "   " + str(d)
        except Exception, e:
            print "Except:" + str(e)
            pass

        allData[s] = data
示例#23
0
 def __init__(self, RunNumber):
     #property self.RunNumber assigned.
     #This is typecasted to string for manipulation
     self.RunNumber = str(RunNumber)
     #property self.PyDB -> Database for pressures
     self.PyDB = Base('pressures/'+self.RunNumber+'.dbl')
     #check if the DB exists. If Yes, open, if not
     #create it:
     if not self.PyDB.exists():
         self.genPDL()
     else:
         self.PyDB.open()
         
     #Define the time iteration between bubbles minimum threshold
     #Remember, each iteration is 1/10th second!
     #Iter must be integer!
     minSecondsBetweenBubbles = 4
     self.minIterBetweenBubbles = int(minSecondsBetweenBubbles*10)
示例#24
0
def makeDB(read, write, startTime = "2010-01-01 00:00:00", \
            endTime = "2010-01-01 00:10:00"):
    db = Base(write)

    startTime = calc.datetonumber(startTime)
    endTime = calc.datetonumber(endTime)
    
    #Day comes from day of the week.  It is a number from 0 to 6.
    #0 = Monday 6 = Sunday.
    db.create('sensor', 'date', 'weekday', 'index', mode="override")
    db.open()
    allData = {}
    
    for i in range(len(bbdata.allSensors)):
        s = bbdata.allSensors[i]
        data = []
        print "Parsing sensor " + str(s)
        try:
            sString = read + "sensor" + str(s) + ".txt"
        
            f = open(sString).readlines()
            oldD = None
            for timeLine in f:
                tmp = timeLine.split()
                tmp = tmp[1] + " " + tmp[2]
                #tmp = tmp[0] + " " + tmp[1]
                d = datetime.datetime.strptime(tmp, "%Y-%m-%d %H:%M:%S")
                foo = calc.datetonumber(d)
                
                if foo >= startTime and foo <= endTime:
                    data.append(calc.datetonumber(d))
                
                    if d.toordinal() != oldD:
                        #Add to database
                        db.insert(s, d.toordinal(), d.weekday(), len(data) - 1)
                        oldD = d.toordinal()
                        print "   " + str(d)
        except Exception, e:
            print "Except:" + str(e)
            pass
        
        allData[s] = data
示例#25
0
from operator import itemgetter
import enchant
from PyDbLite import Base
import jellyfish
import subprocess

testfile = str(sys.argv[1])
jarfile = 'ark-tweet-nlp-0.3.2.jar'

logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
service_url = 'https://www.googleapis.com/freebase/v1/search'
unnecessary = ['sunday','monday','tuesday','wednesday','thursday','friday','saturday','january','february','march','april','may','june','july','august','september','october','november','december','it']
freebase_link = 'http://www.freebase.com'
model1 = word2vec.Word2Vec.load_word2vec_format('freebase-vectors-skipgram1000-en.bin.gz', binary=True)
chant = enchant.Dict("en_US")
bcluster = Base('bcluster.pdl')
bcluster.open()
api_key = 'AIzaSyAW9RPEnSFbJfGsuVXSiTV_xbMySmJfGMw'
mslink = 'http://weblm.research.microsoft.com/rest.svc/bing-body/2013-12/3/jp?u=4e9af3bb-4cd3-4e29-a10b-e15754d454cb'

#Tokenize and Tag individual tokens using Owoputi et al. tagger
def tokenize():
    cmd = 'java -XX:ParallelGCThreads=2 -Xmx500m -jar '+jarfile+' \"'+testfile+'\"'
    process = subprocess.Popen(cmd,
                     stdout=subprocess.PIPE,
                     stderr=subprocess.STDOUT,shell=True)
    return  iter(process.stdout.readline, b'')


#Collect ngrams from the segments
def ngrams(input, n):
示例#26
0
from PyDbLite import Base

db = Base()
db.create("Acc", "Gyro")

db.insert(Acc="136", Gyro="162")

print(db)
示例#27
0
文件: smallfu.py 项目: alf123/alf123
def resetHisDB():
    historyDB = Base("F://alfStock//"+"alf123"+'.history')
    historyDB.create('sid','Edate', 'length')#Edate := started day not end day
    historyDB.open()
    historyDB.commit()
    currentDB = Base("F://alfStock//"+"alf123"+'.current')
    currentDB.create('sid','Edate', 'length')
    currentDB.open()
    currentDB.commit()
示例#28
0
testfile = str(sys.argv[1])
jarfile = 'ark-tweet-nlp-0.3.2.jar'

logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s',
                    level=logging.INFO)
service_url = 'https://www.googleapis.com/freebase/v1/search'
unnecessary = [
    'sunday', 'monday', 'tuesday', 'wednesday', 'thursday', 'friday',
    'saturday', 'january', 'february', 'march', 'april', 'may', 'june', 'july',
    'august', 'september', 'october', 'november', 'december', 'it'
]
freebase_link = 'http://www.freebase.com'
model1 = word2vec.Word2Vec.load_word2vec_format(
    'freebase-vectors-skipgram1000-en.bin.gz', binary=True)
chant = enchant.Dict("en_US")
bcluster = Base('bcluster.pdl')
bcluster.open()
api_key = 'AIzaSyAW9RPEnSFbJfGsuVXSiTV_xbMySmJfGMw'
mslink = 'http://weblm.research.microsoft.com/rest.svc/bing-body/2013-12/3/jp?u=4e9af3bb-4cd3-4e29-a10b-e15754d454cb'


#Tokenize and Tag individual tokens using Owoputi et al. tagger
def tokenize():
    cmd = 'java -XX:ParallelGCThreads=2 -Xmx500m -jar ' + jarfile + ' \"' + testfile + '\"'
    process = subprocess.Popen(cmd,
                               stdout=subprocess.PIPE,
                               stderr=subprocess.STDOUT,
                               shell=True)
    return iter(process.stdout.readline, b'')

示例#29
0
    def __init__(self, sID, onePiece):
        today = datetime.date.today()
        self.sID = sID
        self.opp = onePiece
        self.historyDB = Base("F://alfStock//"+"alf123"+'.history')
        self.currentDB = Base("F://alfStock//"+"alf123"+'.current')
        self.historyDB.open()
        self.currentDB.open()
        db = Base("F://alfStock//"+str(today)+'.db')
        impactDB = Base("F://alfStock//"+str(today)+'.yv')
        if db.exists():
            db.open()

            recs = [ r for r in db if r['sid'] == self.sID ]
            if len(recs) > 0:

                self.history = recs[0]['history']
                self.sCurrent = recs[0]['current']
            else:
                print "already existed:  ", len(db)
                self.insertHistory(db)
        else:
            db.create('sid','history', 'current')
            self.insertHistory(db)

        if impactDB.exists():
            self.idb = impactDB
        else:
            impactDB.create('sid','UpOrDown')# U:up; D:down
            impactDB.open()
            impactDB.commit()
            self.idb = impactDB
示例#30
0
文件: smallfu.py 项目: alf123/alf123
def resetHisDB():
    historyDB = Base("F://alfStock//" + "alf123" + '.history')
    historyDB.create('sid', 'Edate',
                     'length')  #Edate := started day not end day
    historyDB.open()
    historyDB.commit()
    currentDB = Base("F://alfStock//" + "alf123" + '.current')
    currentDB.create('sid', 'Edate', 'length')
    currentDB.open()
    currentDB.commit()
示例#31
0
文件: songDb.py 项目: jhjguxin/PyCDC
from PyDbLite import Base
db = {}
try:
    db['chansons'] = Base('chansons.skl').open()
except IOError:
    import createSongBase
    createSongBase.createBase()
    db['chansons'] = Base('chansons.skl').open()

db['recueils'] = Base('recueils.skl').open()
db['dialectes'] = Base('dialectes.skl').open()
db['genres'] = Base('genres.skl').open()
db['chansons_par_recueil'] = Base('chansons_par_recueil.skl').open()
db['chansons_par_dialecte'] = Base('chansons_par_dialecte.skl').open()
    
示例#32
0
    ln = random.randint(1, m)
    for i in range(ln):
        res += random.choice(string.letters)
    return res


def sentence(n, m):
    ln = random.randint(1, n)
    res = []
    for i in range(ln):
        res.append(word(m))
    return " ".join(res)


os.remove("blog")
db = Base("blog").create("parent", "title", "author", "text", "date")
db.create_index("parent")

nbthreads = 200
for i in range(nbthreads):
    # generate thread
    author = "pierre"
    title = sentence(10, 10)
    text = sentence(100, 10)
    date = datetime.datetime(
        random.randint(2004, 2006),
        random.randint(1, 12),
        random.randint(1, 28),
        random.randint(0, 23),
        random.randint(0, 59),
        random.randint(0, 59),
示例#33
0
import os
from PyDbLite import Base

db = {
    'users': Base(os.path.join(os.getcwd(), 'data', 'users')),
    'news': Base(os.path.join(os.getcwd(), 'data', 'news'))
}
db['users'].create('login', 'password', 'bgcolor', 'fontfamily', mode="open")
db['news'].create('login', 'title', 'body', 'date', mode="open")
示例#34
0
class TagOrganizer(Organizer):
    def __init__(self, cache, category=None):
        self.tags = None
        self.category = category
        Organizer.__init__(self, cache, False)

    def reset(self):
        if not self.tags:
            self.tags = Base(DB_FILE_TAGS)
        self.tags.create('realpath', 'category', 'tag', mode='override')
        self.tags.create_index('realpath')
        self.tags.create_index('category')
        Organizer.reset(self)

    def updatecache(self):
        self._generatetags()
        Organizer.updatecache(self)

    def _deletefromcache(self, path):
        realpath = self.realpath(path)
        logger.debug("_deletefromcache(%s)" % realpath)
        for tag in self.tags.get_index('realpath')[realpath]:
            self.tags.delete(tag)

    def deletefromcache(self, path):
        self._deletefromcache(path)
        Organizer.deletefromcache(self, path)

    def addtocache(self, path):
        self._deletefromcache(path)
        self.generatetags(self.realpath(path))
        Organizer.addtocache(self, path)

    def generatepaths(self, realpath):
        for record in self.tags.get_index('realpath')[realpath]:
            yield os.path.join(os.sep, record['tag'],
                               os.path.basename(realpath))

    def dirlist(self, path):
        if path == '/':
            return self.taglist(self.category)
        else:
            return []

    ############################################
    # Tag functions

    def _generatetags(self):
        for filename in filter(
                util.ignoretag,  #IGNORE:W0141
                self.cache.filelist()):
            self.generatetags(filename)

    def generatetags(self, filename):
        pass

    def tag(self, realpath, category, tag):
        logger.debug('tag(%s, %s, %s)' % (realpath, category, tag))
        if not tag == None and not tag == '':
            self.tags.insert(realpath, category, tag)

    def filelistbytags(self, category, tags):
        self.refreshcache()
        for record in self.tags.get_index('category')[category]:
            if record['tag'] in tags:
                yield os.path.basename(record['realpath'])

    def taglist(self, category):
        self.refreshcache()
        return util.unique([
            record['tag']
            for record in self.tags.get_index('category')[category]
        ])
 def __init__(self, dbfile, mode = 'open', autocommit = False):
     self.db = Base(dbfile)
     self.db.create(*Article.TO_SAVE, **{'mode': mode})
     self.db.create_index(*INDEX_ON)
     self.autocommit = autocommit
示例#36
0
class SignalManip:
    
    #usual stuff in init
    def __init__(self):
        self.AnalysisResults = Base('AnalysisResults/AnalysisResults.pdl')
        
        #check if the DB exists. If Yes, open, if not
        #create it:
        if not self.AnalysisResults.exists():
            self.genPDL()
        else:
            self.AnalysisResults.open()
        
        self.PassportOffice = EventPassport.EventPassportOffice()
        self.LoadWaveform = WaveformLoader.LoadWave()
        self.AcousticAnalysis = AnalysisTools.AcousticsAnalysis()
        
        
    #If DB doesnt exist, make it!
    def genPDL(self):
        #Create the PDL file for database
        self.AnalysisResults.create('EventID','PVar', mode = "open")
    
    #Function to generate signal average
    def genSignalAverage(self, EventType = "Neutron"):
        #get all Events of type EventType
        EventList = []
        EventList = self.PassportOffice.CheckPassport_Runtype(EventType)
        
        SignalAvgMem = numpy.zeros((50000))
        
        for Event in EventList:
            #Load Raw data
            raw_data = self.LoadWaveform.LoadData(Event['Path'][:-3])
            SignalAvgMem += raw_data[0]
        
        SignalAvgMem /= len(EventList)
        
        ####Storage#####
        Storage = open("AnalysisResults/signalAvg."+EventType+".binary", "wb")
        SignalAvgMem.tofile(Storage, format="%f")
        Storage.close()
        
        return SignalAvgMem
    
    #function to generate FFT avergae
    def genFFTAverage(self, EventType="Neutron", doWin = False, winStart=10000, winEnd=30000, Fs = 1250000.0):
        #get all Events of type EventType
        EventList = []
        EventList = self.PassportOffice.CheckPassport_Runtype(EventType)
        
        
        
        
        FFTAvgMem = numpy.zeros((50000))
        FFTAvgBins = numpy.fft.fftfreq(len(FFTAvgMem), 1.0/Fs)
        
        
        for Event in EventList:
            #Load Raw data
            raw_data = self.LoadWaveform.LoadData(Event['Path'][:-3])
            
            
             
            
            ####SignalWindow####
            if doWin:
                print "is it"
                TempSigMem = numpy.zeros((50000))
                TempSigMem[winStart:winEnd] = raw_data[0][winStart:winEnd]
                R_data = TempSigMem
            else:
                R_data = raw_data[0]
            
            #
            
            FFTs = numpy.fft.fft(R_data)
            
            #for i in range(5000,6000):
            #pwrspec = abs(numpy.mean(FFTs[5000:6000]))
            #if pwrspec>10:
            #    print pwrspec, Event
            
            
            FFTAvgMem += FFTs
        
        
        
        FFTAvgMem /= len(EventList)
        
        
        
        ####Storage#####
        #FFT#
        Storage = open("AnalysisResults/FFTAvg."+EventType+"win"+str(doWin)+".binary", "wb")
        FFTAvgMem.tofile(Storage, format="%f")
        Storage.close()
        #FFT FREQS#
        Storage = open("AnalysisResults/FFTAvgBins."+EventType+"win"+str(doWin)+".binary", "wb")
        FFTAvgBins.tofile(Storage, format="%f")
        Storage.close()
        
        ####Plotting#####
        
        return FFTAvgMem, FFTAvgBins
    
    #Functions to show the Average values (load from cache)
    def getSignalAverage(self, EventType = "Neutron"):
        
        Storage = "AnalysisResults/signalAvg."+EventType+".binary"
        
        if not os.path.exists(Storage):
            data = self.genSignalAverage(EventType)
        else:
            data = numpy.fromfile(Storage)
            
        
        return data
    
    #function to show average FFT
    def getFFTAverage(self, EventType = "Neutron", doWin = False,):
        
        Storage_FFT = "AnalysisResults/FFTAvg."+EventType+"win"+str(doWin)+".binary"
        Storage_FFTfreq = "AnalysisResults/FFTAvgBins."+EventType+"win"+str(doWin)+".binary"
        
        #Broken. Needs param check and hassles.
        #if os.path.exists(Storage_FFT) and os.path.exists(Storage_FFTfreq) :
        #    data_FFT = numpy.fromfile(Storage_FFT)
        #    data_FFTFreq = numpy.fromfile(Storage_FFTfreq)
        #else:
        #    data_FFT, data_FFTFreq = self.genFFTAverage(EventType, doWin)
        
        
        data_FFT, data_FFTFreq = self.genFFTAverage(EventType, doWin)
        
        return data_FFT, data_FFTFreq
示例#37
0
class KopeteLog():
    def __init__(self, directory=None):

        if not directory:
            directory = os.path.join(os.path.expanduser("~"),
                                     ".kde/share/apps/kopete/logs")

        self.messages = Base('kopete.db')  # Database stuff - Initializing...
        self.messages.create('protocol',
                             'date',
                             'time',
                             'msgfrom',
                             'msgto',
                             'sender',
                             'inbound',
                             'nick',
                             'message',
                             mode='override')

        for file in self.searchLogs(directory):
            self.feedDatabase(file)

    def searchLogs(self, dir):
        logfiles = []
        for (basepath, dirnames, filenames) in os.walk(dir):
            for child in filenames:
                if child.endswith(".xml"):
                    logfiles.append(os.path.join(basepath, child))
        return logfiles

    def feedDatabase(self, filepath):
        if 'WlmProtocol' in filepath:
            protocol = 'wlm'
        elif 'ICQProtocol' in filepath:
            protocol = 'icq'
        elif 'JabberProtocol' in filepath:
            protocol = 'jabber'
        else:
            protocol = 'unknown'
        xmllog = parse(filepath)
        for head in xmllog.getiterator('head'):
            for date in head.getiterator('date'):
                month = date.attrib['month']
                year = date.attrib['year']
            for contact in head.getiterator('contact'):
                if contact.attrib.has_key('type'):
                    if contact.attrib['type'] == 'myself':
                        contactfrom = contact.attrib['contactId']
                else:
                    contactto = contact.attrib['contactId']
        for msg in xmllog.getiterator('msg'):
            nick = msg.attrib['nick']
            time = msg.attrib['time']
            inbound = msg.attrib['in']
            message = msg.text
            sender = msg.attrib['from']
            date = datetime.strptime(
                "%s;%s;%s" % (year, month, msg.attrib['time']),
                "%Y;%m;%d %H:%M:%S")
            self.messages.insert(protocol=protocol,
                                 date=date.strftime("%Y%m%d"),
                                 time=date.strftime("%H:%M:%S"),
                                 msgfrom=contactfrom,
                                 msgto=contactto,
                                 sender=sender,
                                 inbound=inbound,
                                 nick=nick,
                                 message=message)
示例#38
0
from selenium import webdriver
from selenium.webdriver.support.ui import Select
from PyDbLite import Base
from facepy import *
import time
import winsound
import getpass

db = Base('grades.db')
db.create('grade', 'course', mode='open')

graph = GraphAPI("my_facebook_api_key")

username = raw_input("Username: "******"Password: "******"http://ent.unr-runn.fr/uPortal/")
		select = Select(driver.find_element_by_name('user_idp'))
		select.select_by_visible_text('ENSICAEN')
		driver.find_element_by_id('IdPList').submit()

		driver.find_element_by_id('username').send_keys(username)
		driver.find_element_by_id('password').send_keys(password)
示例#39
0
class doAnalysis:
    
    #What do we need in init now? Ah, the analysis cache DB
    def __init__(self):
        
        self.AnalysisResults = Base('AnalysisResults/AnalysisResults.pdl')
        
        #check if the DB exists. If Yes, open, if not
        #create it:
        if not self.AnalysisResults.exists():
            self.genPDL()
        else:
            self.AnalysisResults.open()
        
        self.PassportOffice = EventPassport.EventPassportOffice()
        self.LoadWaveform = WaveformLoader.LoadWave()
        self.AcousticAnalysis = AnalysisTools.AcousticsAnalysis()
        
        self.SignalManip = SignalManip.SignalManip()
        
        
    #If DB doesnt exist, make it!
    def genPDL(self):
        #Create the PDL file for database
        self.AnalysisResults.create('EventID','PVar', mode = "open")
    
    #Gen PVAr of the Signals!
    def genPVAR(self):
        
        '''
        Filter Params.
        
        doFilter -> Filter on or OFF
        lowfreq_HP -> Low frequency High Pass
        highFreq_LP -> High Frequency low pass
        
        Set both for a band pass filter.
        
        Filter Types:
        ApplyFiltersWall -> Boxcar window
        ApplyFiltersFIR -> Kaiser Window
        '''
        
        doFilter = True
        lowFreq_HP = 3000
        highFreq_LP = None
        
        
        ####Neutron Data#####
        #get the list of events
        PVar_Neutron_List = []
        EventList = self.PassportOffice.CheckPassport_Runtype("Neutron")
        
        #For every Event
        for Event in EventList:
            
            #Load Raw data
            raw_data = self.LoadWaveform.LoadData(Event['Path'][:-3])
            
            #Apply filter. See the docstring
            #for options
            if doFilter:
                filtered_data = self.AcousticAnalysis.ApplyFiltersWall(raw_data[0], lowFreq=lowFreq_HP, highFreq=highFreq_LP)
            else:
                filtered_data = raw_data[0]
            
            #Calculate PVAR
            PVar = self.AcousticAnalysis.calculatePVar(filtered_data)
            
            #PVAr > 25 were observed for events from the wall from 1 specific run!
            #We dont know what to do with those yet.
            
            #if PVar<20:
            PVar_Neutron_List.append(PVar)
                
        ##########Plotting#########
        hist_bins = numpy.arange(10,13.0,0.1)
        #hist_bins=20
        plt.hist(PVar_Neutron_List, bins=hist_bins, normed=True, facecolor='green', alpha=0.75)
        plt.grid(True)
        plt.xlabel("PVar")
        plt.ylabel("Count")
        plt.title("PVar of Entire Dataset")
        
        
        #### ALPHA DATA ####
        PVar_Alpha_List = []
        EventList = self.PassportOffice.CheckPassport_Runtype("Alpha")
        for Event in EventList:
            #get raw data
            raw_data = self.LoadWaveform.LoadData(Event['Path'][:-3])
            #Apply filter. See the docstring
            #for options
            if doFilter:
                filtered_data = self.AcousticAnalysis.ApplyFiltersWall(raw_data[0], lowFreq=lowFreq_HP, highFreq=highFreq_LP)
            else:
                filtered_data = raw_data[0]
            PVar = self.AcousticAnalysis.calculatePVar(filtered_data)
            PVar_Alpha_List.append(PVar)
        
        ########Plotting#######
        #print PVar_Alpha_List
        plt.hist(PVar_Alpha_List, bins=hist_bins, normed=True, facecolor='red', alpha=0.40)
        
        
        plt.show()
        
    #Functions to show the Average values (load from cache)
    def PlotSignalAverage(self):
        
        #Run 2X to get data for alpha and for neutron
        data_neutron = self.SignalManip.getSignalAverage(EventType = "Neutron")
        data_alpha = self.SignalManip.getSignalAverage(EventType = "Alpha")
        
        ###Plotting###
        plt.plot(data_neutron,'g-')
        plt.plot(data_alpha,'r-')
        plt.xlabel("Timestep")
        plt.ylabel("Signal (mv)")
        plt.grid(True)
        plt.show()
    
    #function to show average FFT
    def PlotFFTAverage(self):
        
        #Run 2X to get data for alpha and for neutron
        FFTs_neutron, FFTfreqs = self.SignalManip.getFFTAverage(EventType = "Neutron", doWin=False)
        FFTs_alpha, FFTfreqs_alpha = self.SignalManip.getFFTAverage(EventType = "Alpha", doWin=False)
        
        #get half length of FFT for plotting
        length = len(FFTs_neutron)
        halflength = length/2
        FFTAvgBins_kHz_HL = FFTfreqs[:halflength]/1000.0
         
        
        #PLOTTING#
        plt.plot(FFTAvgBins_kHz_HL, abs(FFTs_neutron[:halflength]),'g-')
        #plt.plot(abs(FFTs_neutron[:halflength]),'g-')
        plt.plot(FFTAvgBins_kHz_HL, abs(FFTs_alpha[:halflength]),'r-')
        
        plt.xlabel("Frequency")
        plt.ylabel("Count")
        plt.title("Average FFT of all signals")
        plt.grid(True)
        plt.show()
       
    
        
    #################
    ################
    ################
    ###THIS FUNCTION IS MY TEST BED AND HAS NO COMMENTS
    #Nor do I plan on putting some!!
    def _ApplyFilter(self):
        PVar_Neutron_List = []
        EventList = self.PassportOffice.CheckPassport_Runtype("Neutron")
        Loc = EventList[12]['Path'][:-3]
        
        EventList2 = self.PassportOffice.CheckPassport_Runtype("Alpha")
        Loc2 = EventList2[12]['Path'][:-3]
        
        raw_dataNeutron = self.LoadWaveform.LoadData('Piezo/triggers.Nov23/trigger_2012.11.23_12.56.15_run_196_110_85')
        #raw_dataNeutron = self.LoadWaveform.LoadData(Loc)
        raw_dataN = raw_dataNeutron[0]
        
        raw_dataAlpha = self.LoadWaveform.LoadData(Loc2)
        raw_dataA = raw_dataAlpha[0]
        #r_data = numpy.zeros((50000))
        #r_data[13000:20000]=raw_data[13000:20000]
        #r_data=raw_data[0]
        #raw_data=r_data
        SampleTime = raw_dataN[1]
        #print 1.0/SampleTime
        
        
        n = len(raw_dataN)
        #filtered_data = self.AcousticAnalysis.ApplyFiltersWall(raw_data, lowFreq=10000, highFreq=None)
        #print filtered_data
        fftsN = numpy.fft.rfft(raw_dataN)
        fftsN = fftsN[:n/2]
        fftfreqsN = numpy.fft.fftfreq(len(raw_dataN), 1.0/1250000.0)
        fftfreqsN = fftfreqsN[:n/2]
        
        fftsA = numpy.fft.rfft(raw_dataA)
        fftsA = fftsA[:n/2]
        fftfreqsA = numpy.fft.fftfreq(len(raw_dataA), 1.0/1250000.0)
        fftfreqsA = fftfreqsA[:n/2]
        
        
        
        #############PLotting##############
        
        plt.title('Data and FFT of Signal')
        ax1 = plt.subplot2grid((4,3), (0,0), colspan=3)
        ax2 = plt.subplot2grid((4,3), (1,0), colspan=3)
        
        
        #data
        ax1.plot(raw_dataN,'g-')
        ax1.set_xlabel('Sample (S.Time = 8e-7s)')
        ax1.set_ylabel('Amplitude (mV)')
        ##### Data INFO
        
        #Low vs High cutoff
        plotrange = 10000
        plotrange_cutoff = 600
        ##########
        
        #All
        ax2.plot(raw_dataA,'r-')
        #ax2.locator_params(axis = 'x',nbins=50)
        ax2.set_xlabel('Frequency (kHz)')
        ##plt.show()
        ##plt.clf()
        #
        #
        #
        #########Plot 2########
        ax3 = plt.subplot2grid((4,3), (2,0), colspan=3)
        ax4 = plt.subplot2grid((4,3), (3,0), colspan=3)
        ##Low
        ax3.plot(abs(fftN),'g-')
        #ax3.plot(fftfreqsN/1000,abs(fftsN),'g-')
        #ax3.locator_params(axis = 'x',nbins=50)
        #ax3.set_xlabel('Frequency (kHz)')
        #ax3.set_ylabel('Count')
        ##high
        ax4.plot(abs(fftsA),'r-')
        #ax4.plot(fftfreqsA/1000,abs(fftsA),'r-')
        ax4.locator_params(axis = 'x',nbins=50)
        ax4.set_xlabel('Frequency (kHz)')
        ##ax4.set_ylabel('Count')
        
        plt.show()
示例#40
0
class EventPassportOffice:
    
    #what do we need in init?
    #pressure run ID number
    #acoustic ID number
    #(btw marking those separate is a bad idea on the operators part)
    def __init__(self):
        self.EventPassport = Base('EventPassport/EventPassport.pdl')
        #check if the DB exists. If Yes, open, if not
        #create it:
        if not self.EventPassport.exists():
            self.genPDL()
        else:
            self.EventPassport.open()
        
        self.CleanEvents = CleanEvents.CleanData()
        
        
    
    def genPDL(self):
        #Create the PDL file for database
        self.EventPassport.create('EventID','Temperature','Pressure','Time', 'RunNumber','Path', 'RunType', mode = "open")
        #RunNumber is defined as RunNumberAcoustic
        #Runtype can be neutron or alpha
    
    def genPassport(self, Path, RunNumberAcoustic, RunNumberPressure, RunType_WS):
        FilteredData = self.CleanEvents.MatchEvent_PressurePiezo(Path, str(RunNumberAcoustic), str(RunNumberPressure))
        
        #Get the last EventID
        recs = [ Record['EventID'] for Record in self.EventPassport if Record['RunNumber'] == RunNumberAcoustic]
        if len(recs) == 0:
            EID = str(RunNumberAcoustic)+"0001"
            EID = int(EID)
        else:
            EID = max(recs)+1
        
        #check if we have a duplicate!
        for DataPoint in FilteredData:
            timestamp =  DataPoint[1]
            #Check if we have a dupe/conflict
            x = [Event for Event in self.EventPassport if Event['Time']-timedelta(seconds=2)<=timestamp<=Event['Time']+timedelta(seconds=2)]
            if len(x) == 0:
                self.EventPassport.insert(EventID = EID ,Temperature = DataPoint[3],Pressure = DataPoint[2],Time = DataPoint[1], RunNumber = RunNumberAcoustic, Path = DataPoint[0], RunType = RunType_WS)
                EID += 1
                print("Inserting Entry ...")
            else:
                print "Duplicate entry found at: "+str(DataPoint[1])+" Event ID: "+str(x[0]['EventID'])
        
        self.EventPassport.commit()
        
    def CheckPassport_RunNumber(self, RunNumberQry):
        return self.EventPassport(RunNumber = RunNumberQry)
    def CheckPassport_Temperature(self, HighTemp, LowTemp):
        return self.EventPassport(HighTemp>Temperature>LowTemp)
    def CheckPassport_Time(self, fromTime, toTime):
        recs = [ r for r in self.EventPassport if fromTime < r['Time'] < toTime]
        return recs
    def SizeofPassportDB(self):
        return len(self.EventPassport)
    def CheckPassport_Runtype(self, runtype_WS):
        return self.EventPassport(RunType = runtype_WS)
    def CheckPassport_eventID(self, EventID_WS):
        return self.EventPassport(EventID = EventID_WS)
    def _deleteEvent(self, RecID_WS):
        del self.EventPassport[RecID_WS]
        self.EventPassport.commit()
示例#41
0
class Organizer(Cacheable):
    """
    This is the base class for organizers
    """
    def __init__(self, cache, recursive=True):
        Cacheable.__init__(self)
        self.cache = cache
        self.recursive = recursive
        self.transformed = None
        # Do not call reset here, it is called from fs.py when the fs is
        # already started

    def reset(self):
        if not self.transformed:
            self.transformed = Base(DB_TRANSFORMED)
        self.transformed.create('realpath', 'path', 'dirname', mode='override')
        self.transformed.create_index('realpath')
        self.transformed.create_index('path')
        self.transformed.create_index('dirname')
        self.cache.reset()
        Cacheable.reset(self)

    def updatecache(self):
        self.generateallpaths()

    def deletefromcache(self, path):
        realpath = self.realpath(path)
        logger.debug("deletefromcache(%s)" % realpath)
        self.cache.deletefromcache(realpath)
        for item in self.transformed.get_index('realpath')[realpath]:
            self.transformed.delete(item)

    def addtocache(self, path):
        if not self.transformed.get_index('path')[path]:
            realpath = self.realpath(path)
            self.cache.addtocache(realpath)
            self.addfile(realpath)

    ############################################
    # Overwritable functions

    def dirlist(self, path):  #IGNORE:W0613
        """
        Returns a list of (non-existent, generated, virtual) directories for a
        given path. Default implementation.
        """
        return []

    def generatepaths(self, realpath):
        """
        Generates paths for a given real path. A file can have more than one
        transformed path. Default implementation.
        """
        yield util.addtrailingslash(
            util.removeroot(realpath, self.cache.filter.root))

    def generaterealpath(self, path):
        """
        Generates a real path for a inexistent path. Default implementation.
        """
        return os.path.join(self.cache.filter.root, path[1:])

    ############################################
    # General functions

    def generateallpaths(self):
        """
        Generates paths for all the files given by the cache and stores them
        in self.transformed
        """
        for realpath in self.cache.filelist():
            if self.recursive:
                # Add all sub-directories first
                currentpath = self.cache.filter.root

                for pathpart in util.pathparts(
                        util.removeroot(realpath, self.cache.filter.root)):
                    currentpath = os.path.join(currentpath, pathpart)
                    self.addfile(currentpath)
            else:
                self.addfile(realpath)

    def addfile(self, realpath):
        """
        Stores a file in self.transformed if not there already and returns the
        paths for that file in the proxy file system
        """
        logger.debug('addfile(%s)' % realpath)
        if not util.ignoretag(util.removeroot(realpath,
                                              self.cache.filter.root)):
            return []

        self.refreshcache()
        transformed = self.transformed.get_index('realpath')[realpath]

        if transformed:
            return (record['path'] for record in transformed)
        else:
            paths = []

            for path in self.paths(realpath):
                while self.transformed.get_index('path')[path]:
                    path = self.increasefilename(path)

                dirname = os.path.dirname(path)
                logger.debug('addfile(%s, %s, %s)' % (realpath, path, dirname))
                self.transformed.insert(realpath=realpath,
                                        path=path,
                                        dirname=dirname)
                paths.append(path)

            return paths

    def increasefilename(self, filename):
        """
        Returns a new filename in sequence. Called if the current filename
        already exists. This default implementation adds a "(1)" to the end if
        not present or increases that number by one.
        """
        root, ext = os.path.splitext(filename)

        num = 1
        matches = _INCREASE_REGEX.match(root)

        if not matches is None:
            num = int(matches.group(2)) + 1
            filename = matches.group(1)

        return '%s(%i)%s' % (root, num, ext)

    ############################################
    # General functions that read the cache

    def filelist(self, path):
        """
        Returns a list of directories and filenames in a list from cache
        """
        logger.debug('filelist(%s)' % path)
        self.refreshcache()

        for dirname in self.dirlist(path):
            yield dirname

        for record in self.transformed.get_index('dirname')[path]:
            yield os.path.basename(record['path'])

    def paths(self, realpath):
        """
        Generates or returns paths from cache for a given real path
        """
        self.refreshcache()
        paths = self.transformed.get_index('realpath')[realpath]

        if paths:
            return (path['path'] for path in paths)
        else:
            return (path for path in self.generatepaths(realpath))

    def realpath(self, path):
        """
        Returns the real path for a file given the path in the file system.
        """
        logger.debug('realpath(%s)' % path)
        self.refreshcache()
        realpaths = [
            r['realpath'] for r in self.transformed.get_index('path')[path]
        ]

        realpath = None

        if realpaths:
            realpath = realpaths[0]
        elif path == '/':
            realpath = self.cache.filter.root
        elif path == util.addtrailingslash(util.ORIGINAL_DIR):
            realpath = '.'
        elif util.isspecial(path, 'original', True):
            realpath = os.path.join('.', os.sep.join(util.pathparts(path)[2:]))
        elif util.isspecial(path, 'root', True):
            realpath = os.path.join(self.cache.filter.root,
                                    os.sep.join(util.pathparts(path)[2:]))
        elif util.isspecial(path, 'commands'):
            realpath = '.'
        elif util.iscommand(path):
            realpath = getserver().tempfile.name
        else:
            realpath = self.generaterealpath(path)

        logger.debug('realpath(%s) = %s' % (path, realpath))
        return realpath

    ############################################
    # File system functions

    def getattr(self, path):
        dirname = os.path.dirname(path)
        if util.removeroot(path, os.sep) in self.dirlist(dirname):
            return self.cache.getattr(self.realpath(dirname))
        else:
            return self.cache.getattr(self.realpath(path))

    def readdir(self, path, offset):  #IGNORE:W0613
        for filename in util.getbasefilelist():
            yield fuse.Direntry(filename)

        for filename in self._filelist(path):
            yield fuse.Direntry(filename)

    def _filelist(self, path):
        filelist = []
        if path == util.addtrailingslash(util.ORIGINAL_DIR):
            filelist = ['original', 'root', 'commands']
        elif util.isspecial(path, 'root', True):
            filelist = os.listdir(self.realpath(path))
        elif util.isspecial(path, 'original', True):
            filelist = os.listdir(self.realpath(path))
        elif util.isspecial(path, 'commands'):
            filelist = CommandHandler.COMMANDS
        else:
            filelist = self.filelist(path)

        for filename in filelist:
            yield filename
示例#42
0
"""Create or open the page database

Save this file as wikiBase.py to use a PyDBLite database
"""
import os
from PyDbLite import Base

db = Base(os.path.join(CONFIG.data_dir,'pages.pdl'))
db.create('name','content','admin','nbvisits','created',
    'version','lastmodif',mode="open")
db.create_index('name')
示例#43
0
    ln = random.randint(1, m)
    for i in range(ln):
        res += random.choice(string.letters)
    return res


def sentence(n, m):
    ln = random.randint(1, n)
    res = []
    for i in range(ln):
        res.append(word(m))
    return ' '.join(res)


os.remove('blog')
db = Base('blog').create('parent', 'title', 'author', 'text', 'date')
db.create_index('parent')

nbthreads = 200
for i in range(nbthreads):
    # generate thread
    author = 'pierre'
    title = sentence(10, 10)
    text = sentence(100, 10)
    date = datetime.datetime(random.randint(2004, 2006), random.randint(1, 12),
                             random.randint(1, 28), random.randint(0, 23),
                             random.randint(0, 59), random.randint(0, 59))
    thread_id = db.insert(parent=-1,
                          author=author,
                          title=title,
                          text=text,
示例#44
0
 def reset(self):
     self.files = self.files or Base(DB_FILES)
     self.files.create('realpath', mode='override')
     self.files.create_index('realpath')
     Cacheable.reset(self)
示例#45
0
"""Create or open the page database
"""

from PyDbLite import Base

db = Base('pages.pdl').create('name','content','admin','nbvisits','created',
    'version','lastmodif',mode="open")
db.create_index('name')
示例#46
0
"""
If the page database doesn't exist, create it
Initialize the variable db
"""

from PyDbLite import Base

db = Base('pages.pdl').create('name','content','admin',
    'nbvisits','created','version','lastmodif',mode="open")
示例#47
0
def createBase():
    for path in ('chansons','recueils','dialectes',
        'genres','chansons_par_dialecte',
        'chansons_par_recueil'):
        try:
            os.remove(path+'.skl')
        except OSError:
            pass

    db_genres = Base('genres.skl').create('nom')
    db_chansons = Base('chansons.skl').create('url','breton',
        'francais','prix','genre')
    db_recueils = Base('recueils.skl').create('nom')
    db_dialectes = Base('dialectes.skl').create('nom')
    db_ch_par_dial = Base('chansons_par_dialecte.skl').create(
        'chanson','dialecte')
    db_ch_par_rec = Base('chansons_par_recueil.skl').create(
        'chanson','recueil')

    chansons=open("base.txt").readlines()

    l_chansons=[]
    id_chanson = 0
    l_recueils=[]
    l_dialectes=[]
    l_genres=[]
    chansonsParRecueil=[]
    chansonsParGenre=[]
    chansonsParDialecte=[]

    for line in chansons:
        [url,breton,francais,recueils,genre,dialectes,enreg]=line.strip().split("#")
        if not genre in l_genres:
            l_genres.append(genre)
        id_genre = l_genres.index(genre)
        prix=random.randrange(200,400)
        l_chansons.append([url,breton,francais,prix,id_genre])

        recs=recueils.split(";")
        for rec in recs:
            if not rec in l_recueils:
                l_recueils.append(rec)
            id_recueil = l_recueils.index(rec)
            chansonsParRecueil.append([id_chanson,id_recueil])
        dials=dialectes.split(";")
        for dial in dials:
            if not dial in l_dialectes:
                l_dialectes.append(dial)
            id_dialecte = l_dialectes.index(dial)
            chansonsParDialecte.append([id_chanson, id_dialecte])

        id_chanson += 1

    for g in l_genres:
        db_genres.insert(nom=g)
    for d in l_dialectes:
        db_dialectes.insert(nom=d)
    for r in l_recueils:
        db_recueils.insert(nom=r)

    for ch in l_chansons:
        print ch
        db_chansons.insert(**dict(zip(db_chansons.fields,ch)))
    for ch_d in chansonsParDialecte:
        db_ch_par_dial.insert(chanson = ch_d[0],
            dialecte = ch_d[1])
    for ch_r in chansonsParRecueil:
        db_ch_par_rec.insert(chanson = ch_r[0],
            recueil = ch_r[1])
    for db in (db_genres,db_dialectes,db_recueils,db_chansons,
        db_ch_par_dial,db_ch_par_rec):
            db.commit()