def _openDBHash(filename,flag,mode=0666): """Open a Berkeley DB hash database. Equivalent to dbhash.open, but when possible, reaches into bsddb.db and uses the DB_RECOVER* flag(s) to handle possible corruption from crashing without closing the database. """ if 1: #XXXX008 This function is borked. Fix it or remove it. return anydbm.open(filename, flag, mode) try: import bsddb except ImportError: # Fallback to anydbm, which delegates to dbhash return anydbm.open(filename, flag, mode) # Adapted from bsddb.hashopen e = bsddb.db.DBEnv(bsddb.db.DB_PRIVATE| bsddb.db.DB_CREATE | bsddb.db.DB_THREAD | bsddb.db.DB_INIT_LOCK | bsddb.db.DB_INIT_MPOOL | bsddb.db.DB_RECOVER ) flags = bsddb.db.DB_CREATE | bsddb.db.DB_THREAD flags |= getattr(bsddb.db, "DB_AUTO_COMMIT", 0) #flags |= getattr(bsddb.db, "DB_RECOVER", 0) #flags |= getattr(bsddb.db, "DB_RECOVER_FATAL", 0) d = bsddb.db.DB(e) d.open(filename, bsddb.db.DB_HASH, flags, mode) return bsddb._DBWithCursor(d)
def readDataFromFile(self): if self.wantAnyDbm: try: if os.path.exists(self.filepath): self.data = anydbm.open(self.filepath, 'w') self.notify.debug('Opening existing anydbm database at: %s.' % (self.filepath,)) else: self.data = anydbm.open(self.filepath, 'c') self.notify.debug('Creating new anydbm database at: %s.' % (self.filepath,)) except anydbm.error: self.notify.warning('Cannot open anydbm database at: %s.' % (self.filepath,)) else: try: file = open(self.filepath + '.bu', 'r') self.notify.debug('Opening backup pickle data file at %s.' % (self.filepath + '.bu',)) if os.path.exists(self.filepath): os.remove(self.filepath) except IOError: try: file = open(self.filepath, 'r') self.notify.debug('Opening old pickle data file at %s..' % (self.filepath,)) except IOError: file = None self.notify.debug('New pickle data file will be written to %s.' % (self.filepath,)) if file: data = cPickle.load(file) file.close() self.data = data else: self.data = {} return
def __init__(self, dbfile, mode='c'): try: self.db = anydbm.open(dbfile, mode) self.dbfile=dbfile except(anydbm.error): os.remove(dbfile) self.db = anydbm.open(dbfile, mode)
def startup (self): path = self.lib ; global dbs, libraries try: # 'r' and 'w' fail if the database doesn't exist. # 'c' creates it only if it doesn't exist. # 'n' always creates a new database. if dbs.has_key(path): self.db = dbs [path] self.trace('Library reusing: %s' % path) elif g.os_path_exists(path): self.db = anydbm.open(path,"rw") self.trace('Library reopening: %s' % path) dbs [path] = self.db else: self.trace('Library creating: %s' % path) self.db = anydbm.open(path,"c") self.path = path except Exception as err: g.es('Library: Exception creating database: %s' % (err,)) ok = (self.path and self.db and hasattr(self.db,'isOpen') and self.db.isOpen() and hasattr(self.db,'sync')) if ok: dbs [path] = self.db else: g.es('problem starting Library: %s' % (path)) return ok
def __init__(self, filespec, tpath, dbpath, drives): """ filespec - Regex of which files to backup. tpath - Temp path for working files. dbpath - Path to where databases are stored. drives - what drives to go through """ # need to clean up the init section and how i open files, etc, move to _backupfiles method? # error checking if not type(drives) == tuple: raise Exception("drives parameter needs to be a tuple") self.filespec = filespec # file regular expression of what files to backup. self.workingpath = tpath # where we should store our files. self.dbpath = dbpath # location where we should keep our DB files. self.drives = drives # what drives to try, can be directories. # generate a file and name in the temporary space, ans use that as the starting # point for the zip and diff db names. self.tempfile = tempfile.TemporaryFile(dir=self.workingpath) self.zipfilename = self.tempfile.name + ".zip" self.diffdbname = os.path.join(self.dbpath, self.tempfile.name + ".db") self.fulldbname = os.path.join(self.dbpath, "dbfull.db") # our actuall storage. self.zipfile = zipfile.ZipFile(self.zipfilename, "w", compression=zipfile.ZIP_DEFLATED) self.dbfull = anydbm.open(self.fulldbname, "c") self.dbdiff = anydbm.open(self.diffdbname, "n") self.regex = re.compile(self.filespec) self.backupcount = 0
def test_beaker_cache(): func = BeakerCacheController.index cache_name = '%s.%s' % (func.__module__, func.__name__) path = encoded_path(data_dir + "/container_dbm", [cache_name], extension=".dbm", digest_filenames=True) cache_key = '[][]' wsgiapp = make_app() app = paste.fixture.TestApp(wsgiapp) response = app.get('/', extra_environ={'resource': "XYZ"}) assert response.status == 200 assert response.body == "200 OK" with closing(dbm.open(path)) as db: assert cache_key in db storedtime, expiretime, value = pickle.loads(db[cache_key]) assert value["status"] == 200 assert value["content"] == "200 OK" response = app.get('/', status=404) assert response.status == 404 with closing(dbm.open(path)) as db: assert cache_key not in db
def calculate_idf(): N = 272700 dfs = {} print "calculating idf" f = anydbm.open(path + '/dataset.anydbm','c') for i in range(N): if i%10000==0: print i doc = f[str(i)] doc = doc.decode('utf-8') words = doc.split(" ") words = list(set(words)) for w in words: if w not in dfs: dfs[w]=0 dfs[w]=dfs[w]+1 f.close() idf = {} i=0 for w in dfs: idf[w]=log10(float(N)/float(dfs[w])) if i%5000==0: print "calculating ",i i+=1 idf_f = anydbm.open(path+"/idfs.anydbm",'c') for w,c in idf.items(): idf_f[str(w.encode("utf-8"))]=str(c) idf_f.close()
def calculate_tf_idf(): N = 272700 print "calucalting tf-idf" tf = anydbm.open(path+"/TFs.anydbm",'c') idf = anydbm.open(path+"/idfs.anydbm",'c') idf_dic = {} for w in idf: idf_dic[w.decode("utf-8")]=float(idf[w]) print len(idf_dic) tf_idf = anydbm.open(path+"/TF_IDF.anydbm",'c') for i in range(N): if (i%5000==0): print i i+=1 tf_dic = cPickle.loads(tf[str(i)]) res = {} vec_module=0 for w,sc in tf_dic.items(): res[w]=sc*idf_dic[w] vec_module+=sc**2 vec_module=sqrt(float(vec_module)) for w,sc in res.items(): res[w]=float(sc)/float(vec_module) tf_idf[str(i)]=cPickle.dumps(res,2)
def DbAddComment(mapid,user,comment): mapfile = 'maps/%s.db' % mapid if not os.access(mapfile,os.F_OK): raise Exception('Invalid map id %s' % mapid) d = getCurrentDate() lock = FileLock(mapfile,5) lock.acquire() #Log('DbAddComment open db r %s\n' % mapfile) db = anydbm.open(mapfile,'r') if db.has_key('last_comment_id'): last_comment_id = int(db['last_comment_id']) else: last_comment_id = 0 db.close() #Log('DbAddComment close db r %s\n' % mapfile) last_comment_id += 1 if last_comment_id>99999: lock.release() raise Exception('Max comments reached') #Log('DbAddComment open db c %s\n' % mapfile) db = anydbm.open(mapfile,'c') db['last_comment_id'] = str(last_comment_id) db['comment%.5d'%last_comment_id] = '%s,%s,%s' % (d,user,comment) db.close() #Log('DbAddComment close db c %s\n' % mapfile) lock.release()
def DbBuildInvert(dbtype,ele,invfunc): if dbtype not in DBTYPES: raise Exception('Invalid database type') # Check ele if ele not in ELELIST[dbtype]: raise Exception('Invalid element') #print '<!-- DbBuildInvert -->\n' # Target inv db dbfileinv = ele.upper()+'_INV.db' # Lock and open inv db lock = FileLock(dbfileinv,5) lock.acquire() #Log('DbBuildInvert open db c %s\n'%dbfileinv) dbinv = anydbm.open(dbfileinv,'c') # Clear inv db dbinv.clear() # List dir for dbfile in os.listdir(dbtype): id = dbfile[:-3] #Log('DbBuildInvert open db r %s/%s\n'%(dbtype,dbfile)) db = anydbm.open('%s/%s' % (dbtype,dbfile),'r') if db.has_key(ele): value = db[ele] for word in invfunc(value): if dbinv.has_key(word): dbinv[word] = dbinv[word] + (',%s' % id) else: dbinv[word] = '%s' % id db.close() #Log('DbBuildInvert close db r %s/%s\n'%(dbtype,dbfile)) dbinv.close() #Log('DbBuildInvert close db c %s\n'%dbfileinv) lock.release() # Rebuild is no more needed RearmRebuild(ele)
def evaluate_weights(): N = 272000 print "loading weights" weights = Unpickler(open(path+"/estimated_weights.txt",'rb')).load() docCat = anydbm.open(path+"/docIDvsCategory.anydbm") docIdvsCat = {} categories = set() print " reading id vs cat" for i in range(N): docIdvsCat[i]=docCat[str(i)] categories.add(docIdvsCat[i]) if i==N: break i+=1 tf_idf = anydbm.open(path+"/TF_IDF.anydbm") print " reading id vs doc" idvsdoc = {} for i in range(1,N): if random.random()<=0.1: idvsdoc[i-1]=cPickle.loads(tf_idf[str(i)]) if i==N: break if i%10000 ==0 : print i i+=1 print "starting" start = time.clock() numErrors = 0 for i in idvsdoc: doc = idvsdoc[i] cat = docIdvsCat[i] resCat = {} for c in categories: res = 0 for w in doc: if w in weights[c]: res+=doc[w]*weights[c][w] resCat[c]=res exp_vals = toExponent(resCat) sumOfExp = sumExp(exp_vals) lik = exp_vals[cat]/sumOfExp for c in categories: if c!=cat: if (exp_vals[c]/sumOfExp) > lik: numErrors+=1 break print str(numErrors), str(len(idvsdoc)), str(float(numErrors)/float(len(idvsdoc))) end = time.clock() print "time : ",str(end-start)
def __load_md_db(self, md_file_path): self.db = anydbm.open(md_file_path, 'c') self.__last_item_id = long(self.__get_db_val('last_item_id', 0)) self.__last_journal_rec_id = long(self.__get_db_val('last_journal_rec_id', 0)) if self.__journal: j_key = self.__get_db_val('journal_key', None) if j_key != self.__journal.get_journal_key(): logger.info('Invalid journal key in metadata database! Recreating it...') self.db.close() self.__remove_md_file(md_file_path) self.db = anydbm.open(md_file_path, 'c') self.db['journal_key'] = self.__journal.get_journal_key() self.__last_item_id = 0 self.__last_journal_rec_id = 0 try: self.__init_from_journal(self.__last_journal_rec_id) except NimbusException, err: logger.error('Metadata was not restored from journal! Details: %s'%err) logger.info('Trying restoring full journal records...') self.db.close() self.__remove_md_file(md_file_path) self.db = anydbm.open(md_file_path, 'c') self.__init_from_journal(0)
def DbBuildInvertOld(ele,invfunc): raise Exception('Deprecated') # Check ele if ele not in ELELIST['maps']: raise Exception('Invalid element') # Target inv db dbfileinv = ele.upper()+'_INV.db' # Lock and open inv db lock = FileLock(dbfileinv,5) lock.acquire() dbinv = anydbm.open(dbfileinv,'c') # Clear inv db dbinv.clear() # List dir for mapdbfile in os.listdir('maps'): mapid = mapdbfile[:-3] dbmap = anydbm.open('maps/%s' % mapdbfile,'r') if dbmap.has_key(ele): value = dbmap[ele] for word in invfunc(value): if dbinv.has_key(word): dbinv[word] = dbinv[word] + (',%s' % mapid) else: dbinv[word] = '%s' % mapid dbmap.close() dbinv.close() lock.release() # Rebuild is no more needed RearmRebuild(ele)
def main_song(name): #name=name.strip('\n|\r').encode('utf-8') PR=anydbm.open('E:\\sam_work\\pagerank\\qqmusic\\db\\uriToPR.db','r') name2uri=anydbm.open('E:\\sam_work\\pagerank\\qqmusic\\db\\NameToUri.db','r') uri2name=anydbm.open('E:\\sam_work\\pagerank\\qqmusic\\db\\uriToName.db','r') song2artist = anydbm.open('E:\\sam_work\\pagerank\\qqmusic\\db\\song2artist.db','r') song2album = anydbm.open('E:\\sam_work\\pagerank\\qqmusic\\db\\song2album.db','r') outmat = [] if not name2uri.has_key(name): return outmat uris = re.compile(r'\|').split(name2uri[name]) myhash = {} for i in uris: if re.search('song',i): try: p = PR[i] except: p = 0 myhash[i] = p keys = sorted(myhash.iteritems(),key=lambda myhash:myhash[1],reverse=True) #print 'search query:',name for i in keys: try: singer = uri2name[song2artist[i[0]]] except: singer = '' try: album = uri2name[song2album[i[0]]] except: album = '' #print 'singer:',singer,' album:',album,' score:',i[1] outstr = 'singer:'+singer+' album:'+album+' score:'+i[1] outmat.append(outstr) return outmat
def _loadIndex( self, compress=False ): """load complete index into memory. if compress is set to true, the index will not be loaded, but a compressed index will be created instead. """ if self.mMethod == "uncompressed": self.mDatabaseFile = open( self.mDbname, "r" ) elif self.mMethod == "dictzip": import dictzip self.mDatabaseFile = dictzip.GzipFile( self.mDbname) elif self.mMethod == "lzo": import lzo self.mDatabaseFile = Uncompressor( self.mDbname, lzo.decompress ) elif self.mMethod == "gzip": self.mDatabaseFile = Uncompressor( self.mDbname, gzip_demangler ) elif self.mMethod == "zlib": self.mDatabaseFile = Uncompressor( self.mDbname, zlib.decompress ) elif self.mMethod == "bzip2": import bz2 self.mDatabaseFile = Uncompressor( self.mDbname, bz2.decompress ) elif self.mMethod == "debug": self.mDatabaseFile = Uncompressor( self.mDbname + ".debug", lambda x: x ) filename_index = self.mNameIndex + ".dbm" if compress: if os.path.exists( filename_index ): raise OSError( "file %s already exists" % filename_index ) self.mIndex = anydbm.open( filename_index, "n" ) elif os.path.exists( filename_index ): self.mIndex = anydbm.open( filename_index, "r" ) self.mIsLoaded = True return else: self.mIndex = {} for line in open(self.mNameIndex, "r"): data = line[:-1].split("\t") if len(data) == 2: # ignore synonyms of non-existent contigs identifier = data[1] if data[0] not in self.mIndex: continue self.mSynonyms[identifier] = data[0] else: ## index with random access points if len(data) > 4: (identifier, pos_id, block_size, lsequence) = data[0], int(data[1]), int(data[2]), int(data[-1]) points = map(int, data[3:-1]) self.mIndex[identifier] = (pos_id, block_size, lsequence, points) else: (identifier, pos_id, pos_seq, lsequence) = data[0], int(data[1]), int(data[2]), int(data[-1]) self.mIndex[identifier] = struct.pack( "QQi", pos_id, pos_seq, lsequence) self._addSynonyms() self.mIsLoaded = True
def main(): db = anydbm.open("mk_html_new.anydbm", "r") db_content = anydbm.open("mk_html_content_multitest.anydbm", "c") domains = [] i = 1 for domain_name in db: print i, "domain", domain_name i += 1 domain_name = domain_name.strip() dont_visit = ["http://www.alfalab.mk", "http://zulu.com.mk"] if domain_name in dont_visit or domain_name in db_content: print "vekje postoi: ", domain_name continue if domain_name not in domains: domains.append({domain_name: cPickle.loads(db[domain_name])}) if i % 100 == 0: content = pool_test(domains) for domain in content: for name in domain: db_content[name] = cPickle.dumps(domain[name], 2) db_content.close() db_content = anydbm.open("mk_html_content_multitest.anydbm", "c") domains = [] db_content.close() db.close()
def doUserTags(bunch, text, env): """[[user_tags] ] - FIXME""" prenote = "" if env["context"]["email_validation"] == "(validated)": usertags = get_formItem(env, "tags", default=None) if usertags != None: f = anydbm.open(env["context"]["userstatefile"],"c") f[str(env["context"]["userid"]) + ":tags:" + env["context"]["pagename"] ] = usertags tags = usertags prenote = "<b>User tags saved</b> <br>" f.close() f = anydbm.open(env["context"]["userstatefile"],"c") try: tags = f[str(env["context"]["userid"]) + ":tags:" + env["context"]["pagename"] ] except KeyError: tags = "( no tags defined - tags are comma seperated 1-3 word phrases)" f.close() tagbox = """%(prenote)s <form method="post" action="%(cgipath)s%(pagename)s" enctype="application/x-www-form-urlencoded"> <input type="hidden" name="usertagsmode" value="update" /> <input type="text" name="tags" value="%(tags)s" style="width: 85%%"> <input type="submit" value="save"></td></tr> </table> </form>""" % { "prenote" : prenote, "cgipath": "/cgi-bin/Wiki/edit/", #env["context"]["cgipath"], "pagename":env["context"]["pagename"], "tags": tags } return tagbox else: return "<font size='-1'>If you had set <a href='%s'>UserPreferences</a> (name & email) and validated them (simple single click in your email), you would be able to define personal tags </font>" % (env["context"]["cgipath"]+"UserPreferences",)
def main(): """Main function""" # WRITE ####### db = anydbm.open('foo_anydbm', 'c') db['one'] = 'un' db['two'] = 'dos' db['three'] = 'tres' db.close() # WHICH DBM ### print "whichdb:", whichdb.whichdb('foo_anydbm') print # READ ######## db = anydbm.open('foo_anydbm', 'r') # Iterate loop: first method (common to any dbm module) for k in db.keys(): print k, ':', db[k] # Iterate loop: second method (only dbhash and dumbdbm supports db.items()) for k, v in db.items(): print k, ':', v db.close()
def main(): db = anydbm.open("mk_html_new.anydbm", "r") db_content = anydbm.open("mk_html_content.anydbm", "c") i = 0 num_urls = 0 for domain_name in db: domain_name = domain_name.strip() if i % 10 == 0: print "domains compared:", i dont_visit = ["http://www.alfalab.mk", "http://zulu.com.mk"] if domain_name in dont_visit or domain_name in db_content: print "vekje postoi: ", domain_name continue if i % 30 == 0: db_content.close() db_content = anydbm.open("mk_html_content.anydbm", "c") i += 1 content = {} domain = cPickle.loads(db[domain_name]) for url in domain: num_urls += 1 if num_urls % 10 == 0: print "%s urls compared: %s" % (domain_name, str(num_urls)) content[url] = site_content(domain, domain_name, url) db_content[domain_name] = cPickle.dumps(content, 2) db_content.close() db.close()
def main( args ): genomes = ['PO', 'ZO', 'At', 'Os', 'Mt', 'Pt', 'Lj' ] # database names aa_dbs = [] for g in genomes: aa_dbs.append( args.get('datasets') + g + '.aa' ) nt_dbs = [] for g in genomes: nt_dbs.append( args.get('datasets') + g + '.nt' ) #dbs.append( args.get('swissprot') + + 'uniprot_sprot.fasta' ) # index databases aa_dbmname = index_databases( aa_dbs, args ) nt_dbmname = index_databases( nt_dbs, args ) clusterhash = get_cluster_ids( args.get('in') ) aa_db = anydbm.open(aa_dbmname, "r") nt_db = anydbm.open(nt_dbmname, "r") for i, idlist in clusterhash.iteritems(): fwaa = open( args.get('out') + 'cluster' + add_leading_zeroes(i,3) + '.aa', 'w' ) fwnt = open( args.get('out') + 'cluster' + add_leading_zeroes(i,3) + '.nt', 'w' ) for id in idlist[1:]: if not aa_db.has_key(id) or not nt_db.has_key(id): stderr( "cluster %s | id %s not in both datasets | skipped." %(i, id) ) continue fwaa.write( ">" + id + "\n" + aa_db[ id ] + "\n" ) fwnt.write( ">" + id + "\n" + nt_db[ id ] + "\n" ) fwaa.flush() fwaa.close() fwnt.flush() fwnt.close()
def __init__(self, filename, mode, serializer=None): """Constructor. The database stores its Serializer, so none needs to be supplied when opening an existing database.""" # pybsddb3 has a bug which prevents it from working with # Berkeley DB 4.2 if you open the db with 'n' ("new"). This # causes the DB_TRUNCATE flag to be passed, which is disallowed # for databases protected by lock and transaction support # (bsddb databases use locking from bsddb version 4.2.4 onwards). # # Therefore, manually perform the removal (we can do this, because # we know that for bsddb - but *not* anydbm in general - the database # consists of one file with the name we specify, rather than several # based on that name). if mode == DB_OPEN_NEW and anydbm._defaultmod.__name__ == 'dbhash': if os.path.isfile(filename): os.unlink(filename) self.db = anydbm.open(filename, 'c') else: self.db = anydbm.open(filename, mode) # Import implementations for many mapping interface methods. for meth_name in ('__delitem__', '__iter__', 'has_key', '__contains__', 'iterkeys', 'clear'): meth_ref = getattr(self.db, meth_name, None) if meth_ref: setattr(self, meth_name, meth_ref) if mode == DB_OPEN_NEW: self.serializer = serializer self.db[self.serializer_key] = cPickle.dumps(self.serializer) else: self.serializer = cPickle.loads(self.db[self.serializer_key])
def main(): try: opts, args = getopt.getopt(sys.argv[1:], "RNr:mlfcCiIp:") except getopt.GetoptError: usage() if len(args) > 1 or len(opts) != 1: usage() if len(args) == 1: Ctx().tmpdir = args[0] for o, a in opts: if o == "-R": show_int2str_db(config.SVN_MIRROR_REVISIONS_TABLE) elif o == "-N": show_str2marshal_db( config.SVN_MIRROR_NODES_STORE, config.SVN_MIRROR_NODES_INDEX_TABLE ) elif o == "-r": try: revnum = int(a) except ValueError: sys.stderr.write('Option -r requires a valid revision number\n') sys.exit(1) db = anydbm.open(config.SVN_MIRROR_REVISIONS_TABLE, 'r') key = db[str(revnum)] db.close() db = anydbm.open(config.SVN_MIRROR_NODES_STORE, 'r') print_node_tree(db, key, "Revision %d" % revnum) elif o == "-m": show_str2marshal_db(config.METADATA_DB) elif o == "-f": prime_ctx() cvs_files = list(Ctx()._cvs_path_db.itervalues()) cvs_files.sort() for cvs_file in cvs_files: print '%6x: %s' % (cvs_file.id, cvs_file,) elif o == "-c": prime_ctx() show_str2ppickle_db( config.SVN_COMMITS_INDEX_TABLE, config.SVN_COMMITS_STORE ) elif o == "-C": show_str2marshal_db(config.CVS_REVS_TO_SVN_REVNUMS) elif o == "-i": prime_ctx() show_cvsitemstore() elif o == "-I": prime_ctx() show_filtered_cvs_item_store() elif o == "-p": obj = pickle.load(open(a)) print repr(obj) print obj else: usage() sys.exit(2)
def __init__(self, path, mode='rw'): if not os.path.exists(path): assert 'w' in mode try: anydbm.open(path, 'n').close() except Exception, e: raise Exception("Unable to create new resource DB at <%s>: %s" % (path, e))
def eliminar(): datos=anydbm.open("nombres","c") del(datos[str(contac.get())]) datos.close() m=anydbm.open("registro","c") del (m[str(contac.get())]) m.close() tkMessageBox.showinfo("Eliminar", "Su Contacto ha sido eliminado con exito")
def _inittrash(mounts=None): # initialize the trash if not mounts: mounts = _getmounts() for mount in mounts: trashpath = os.path.join(mount, '.deleted_files') if not os.path.exists(trashpath): os.mkdir(trashpath) metafile = os.path.join(trashpath, '.fileinfo') anydbm.open(metafile, 'c').close()
def _open(self, mode='r'): try: return anydbm.open(self.filepath, mode) except: # might not be created yet, create, close # open again with whatever mode db = anydbm.open(self.filepath, 'n') db.close() return self._open(mode)
def POST(self): reg=True login=formuLogin() s=web.ctx.session user=s.usuario edit=True re=regis() db=anydbm.open('./registro.txt','c') d=None i = web.input(categoria = 'Categoria-0') try: print (user) d=db[str(user)] except: print ("No existe!!POST") if (d!=None ): Datos_usuario = d.split('|') db.close() print (Datos_usuario) re.nombre.value=user re.apellidos.value=Datos_usuario[1] re.email.value=Datos_usuario[2] re.visa.value=Datos_usuario[3] re.dia.value=Datos_usuario[4] re.mes.value=Datos_usuario[5] re.anio.value=Datos_usuario[6] re.direccion.value=Datos_usuario[7] re.formaPago.value=Datos_usuario[8] re.condiciones.type="hidden" if not re.validates(): #Repasar return render.plantilla(Titulo='Infraestructuras Virtuales', Subtitulo='Servidor Enjaulado',login=login,cate=categorias, Registro=re, autor='Jose Miguel Lopez',reg=reg,user="******",edit=edit,cateA=web.websafe(i.categoria)) else: #Guardamos datos en base de datos. db=anydbm.open('./registro.txt','c') d=None try: d=db[str(re.nombre.value)] except: print ("Usename libre") if (d==None or re.nombre.value==user ): db[str(re.nombre.value)] = str(re.password.value) + '|' + str(re.apellidos.value) + '|' + str(re.email.value) + '|' + str(re.visa.value) + '|' + str(re.dia.value) + '|' + str(re.mes.value) + '|' +str(re.anio.value)+ '|' + str(re.direccion.value)+ '|' +str(re.formaPago.value) s.usuario=str(re.nombre.value) db.close() return """<script type="text/javascript">alert("Datos modificados con exito"); window.location="/principal";</script>""" else: #print ("Ya existe") db.close() return """<script type="text/javascript">alert("Usuario existente, elija otro nombre"); window.location="/registro";</script>"""
def isGreylisted(self, recipient): max_grey = 3000000 too_soon = 180 min_defer_time = 3600 max_defer_time = 25000 IP = self.peer sender = self.sender def _isGreylisted(greylist, seen, IP,sender,recipient): # If greylisted, and not been there too long, allow through if greylist.get(triplet,None) is not None: greytime = float(greylist[triplet]) if (time.time() - greytime) > max_grey: del greylist[triplet] try: del seen[triplet] except KeyError: # We don't care if it's already gone pass # REFUSED: grey too long else: # ACCEPTED: already grey (have reset greytime) greylist[triplet] = str(time.time()) return True # If not seen this triplet before, defer and note triplet if seen.get( triplet, None) is None: seen[triplet] = str(time.time()) return False # If triplet retrying waaay too soon, reset their timer & defer last_tried = float(seen[triplet]) if (time.time() - last_tried) < too_soon: seen[triplet] = str(time.time()) return False # If triplet retrying too soon generally speaking just defer if (time.time() - last_tried) < min_defer_time : return False # If triplet hasn't been seen in aaaages, defer if (time.time() - last_tried) > max_defer_time : seen[triplet] = str(time.time()) return False # Otherwise, allow through & greylist then greylist[triplet] = str(time.time()) return True greylist = anydbm.open("greylisted.dbm","c") seen = anydbm.open("attempters.dbm","c") triplet = repr((IP,sender,recipient)) result = _isGreylisted(greylist, seen, IP,sender,recipient) seen.close() greylist.close() return result
def test_anydbm_create(self): # Verify that anydbm.open does *not* create a bsddb185 file tmpdir = tempfile.mkdtemp() try: dbfile = os.path.join(tmpdir, "foo.db") anydbm.open(dbfile, "c").close() ftype = whichdb.whichdb(dbfile) self.assertNotEqual(ftype, "bsddb185") finally: shutil.rmtree(tmpdir)
def __init__(self, topdir, log): msg_dir = "%s/messages" % topdir dbs = os.listdir(msg_dir) for db in dbs: db_file = "%s/%s" % (msg_dir,db) self.msg_dbs[db] = anydbm.open(db_file, "r", 0644) log_dir = "%s/services" % topdir for t in ("category", "event", "parameter"): db_file = "%s/%s/%s.db" % (log_dir, log, t) self.svc_dbs[t] = anydbm.open(db_file, "r")
def denovo(self): self.toplevel.destroy() arquivo = anydbm.open('misc', 'r') if arquivo['nivel'] == '100' or arquivo['nivel'] == '101' or arquivo['nivel'] == '102' or arquivo['nivel'] == '103' or arquivo['nivel'] == '104' or arquivo['nivel'] == '105': raiz = Tk(className = 'Show Do Milhão') jogo(raiz, '100') raiz.mainloop() arquivo.close() else: raiz = Tk(className = 'Show Do Milhão') jogo(raiz, '0') raiz.mainloop()
def anydbm_store(limit): path = "/tmp/test_py_benchmark_%s.dbm" % limit db = anydbm.open(path, 'n') for i in range(0, limit): k = str(i) v = str(random.randrange(0, 65535)) try: db[k] = v except: return False db.close() return True
def vc (self): self.toplevel.destroy() current = self.valorRadio.get() arquivo = anydbm.open('misc', 'c') chaves = arquivo.keys() #Armazena as chaves do banco de dados, sempre em ordem usuario = chaves[current] arquivo['current'] = usuario arquivo.close() raiz = Tk(className = 'Show Do Milhão') jogo(raiz, '0') raiz.mainloop()
def dbm_cache(): """ context manager for accessing simple dbm cache located at ~/.cumlusci/cache.dbm """ config_dir = os.path.join(os.path.expanduser("~"), YamlGlobalConfig.config_local_dir) if not os.path.exists(config_dir): os.mkdir(config_dir) db = dbm.open(os.path.join(config_dir, "cache.dbm"), "c") yield db db.close()
def getdb(dbpath): while True: try: handle = dbm.open(dbpath, 'c') break except dbm.error as exc: if exc.args[0] == 11: continue else: raise try: yield handle finally: handle.close()
def stor_torr(self, info_hash, torr_name): """ Store torrents we're serving in the database. If it's already in our database, just ignore it. Pairs are stored as key => infohash, value=>torr_name Parameters torr_name: file name of torrent (string) info_hash: info hash of torrent (string) """ self.db = anydbm.open(self.torr_db, 'c') if not self.db.has_key(info_hash): self.db[info_hash] = torr_name log.debug("info_hash %s, torr_name: %s" % (info_hash, torr_name,)) self.db.close()
def test_071_anydbm(): # issue #71 {{{1 import os if sys.version_info[0] == 2: import anydbm else: import dbm as anydbm # FIXME: determine path to sdcard. like: path = os.environ[""] del os.chmod for fname in ( # failed: this is not SL4A application folder... # os.path.join("/data/data/com.googlecode.pythonforandroid", # "files", "test_anydbm.dbm"), # OK: _chmod work well. # os.path.join("/data/local/abc", "test_anydbm.dbm"), # failed: _chmod not worked in FAT (SD card) os.path.join("/sdcard", "sl4a", "test_anydbm.dbm"), ): try: os.remove(fname + ".dat") except: pass anydbm.open(fname, "n") os.remove(fname + ".dat") return True
def open(self): """Open a pre-existing on-disk database. @raise anydbm.error: If there's a problem opening the database. @raise ValueError: If the database is not of the right type. """ if not self.filename: raise ValueError("Can only open on-disk databases") self.db = anydbm.open(self.filename, "w") #raises anydbm.error try: if self.db["--Reserved--type"] != self.type: raise ValueError("Not a %s database" % self.type) except KeyError: raise ValueError("Not a recognized database")
def readDataFromFile(self): if self.wantAnyDbm: try: if os.path.exists(self.filepath): self.data = anydbm.open(self.filepath, 'w') self.notify.debug('Opening existing anydbm database at: %s.' % (self.filepath,)) else: self.data = anydbm.open(self.filepath, 'c') self.notify.debug('Creating new anydbm database at: %s.' % (self.filepath,)) except anydbm.error: self.notify.warning('Cannot open anydbm database at: %s.' % (self.filepath,)) try: file = open(self.filepath + '.bu', 'r') self.notify.debug('Opening backup pickle data file at %s.' % (self.filepath + '.bu',)) if os.path.exists(self.filepath): os.remove(self.filepath) except IOError: try: file = open(self.filepath, 'r') self.notify.debug('Opening old pickle data file at %s..' % (self.filepath,)) except IOError: file = None self.notify.debug('New pickle data file will be written to %s.' % (self.filepath,)) if file: data = cPickle.load(file) file.close() self.data = data else: self.data = { }
def crawl(self, depth=2, timeout=3): """Crawl the web!""" seen = set() while len(self._url_queue): url, depth_ = self._url_queue.pop() # skip this url; it's too deep if depth_ > depth: continue doc_id = self.document_id(url) # we've already seen this document if doc_id in seen: continue db = anydbm.open('url_title', 'c') seen.add(doc_id) # mark this document as haven't been visited socket = None try: socket = urllib2.urlopen(url, timeout=timeout) soup = BeautifulSoup(socket.read()) title = soup.find('title').text title = title.replace(''', "'") title = title.replace('&', '&') db[str(url)] = title # add soup which is document's information into the document_info list self.document_info.insert(doc_id, soup) self.r_server.sadd('doc_index', soup) self._curr_depth = depth_ + 1 self._curr_url = url self._curr_doc_id = doc_id self._font_size = 0 self._curr_words = [ ] self._index_document(soup) self._add_words_to_document() #print " url="+repr(self._curr_url) except Exception as e: #print e pass finally: if socket: socket.close() db.close()
def loadCard(self, filename): """Load a card from disk""" if self.password is None: self.password = getpass.getpass("Please enter your password:"******"mf"], self.password) serializedSAM = read_protected_string(db["sam"], self.password) self.type = db["type"] finally: db.close() self.sam = loads(serializedSAM) self.mf = loads(serializedMF)
def start(startid=17000): # 设置起始查找点为指定值 try: if request.method == 'POST': startid = int(request.form['startid']) db = dbm.open('datafile', 'w') db['startId'] = str(startid) db.close() msg = u'设置查找起点ID为:%d' % startid menulog.info(msg) return msg except (IOError, KeyError): msg = u'缓存/POST参数读取错误' menulog.info(msg) return msg
def _openDBHash(filename, flag, mode=0666): """Open a Berkeley DB hash database. Equivalent to dbhash.open, but when possible, reaches into bsddb.db and uses the DB_RECOVER* flag(s) to handle possible corruption from crashing without closing the database. """ if 1: #XXXX008 This function is borked. Fix it or remove it. return anydbm.open(filename, flag, mode) try: import bsddb except ImportError: # Fallback to anydbm, which delegates to dbhash return anydbm.open(filename, flag, mode) # Adapted from bsddb.hashopen e = bsddb.db.DBEnv(bsddb.db.DB_PRIVATE | bsddb.db.DB_CREATE | bsddb.db.DB_THREAD | bsddb.db.DB_INIT_LOCK | bsddb.db.DB_INIT_MPOOL | bsddb.db.DB_RECOVER) flags = bsddb.db.DB_CREATE | bsddb.db.DB_THREAD flags |= getattr(bsddb.db, "DB_AUTO_COMMIT", 0) #flags |= getattr(bsddb.db, "DB_RECOVER", 0) #flags |= getattr(bsddb.db, "DB_RECOVER_FATAL", 0) d = bsddb.db.DB(e) d.open(filename, bsddb.db.DB_HASH, flags, mode) return bsddb._DBWithCursor(d)
class AnyDBStorage(object): def __init__(self, path, mode='rw'): if not os.path.exists(path): assert 'w' in mode try: anydbm.open(path, 'n').close() except Exception, e: raise Exception( "Unable to create new resource DB at <%s>: %s" % (path, e)) try: Params.log("Opening %s mode=%s" % (path, mode)) self.__be = anydbm.open(path, mode) except anydbm.error, e: raise Exception("Unable to access resource DB at <%s>: %s" % (path, e))
def expire_db(db_path, lock): remove = [] with lock: db = anydbm.open(os.path.join(self.root, db_path), 'c') # The database returned by anydbm is guaranteed to have a # .keys() method, but not necessarily .(iter)items(). for key in db.keys(): if int(time.time() - float(db[key])) >= expiry_time: # Rounding to nearest int avoids an issue when we call # call prune(0) *immediately* after an insertion and might # get hit by floating point weirdness. remove.append(key) log.info("Expiring %d events from %s" % (len(remove), db_path)) for key in remove: del db[key] db.close()
def makedbm(name, replaceCommas=0): dbm = anydbm.open('tmp/configfiles/%s.dat' % name, 'c') file = open('tmp/configfiles/%s' % name) line = file.readline() while line: parts = line.split(':', 1) if len(parts) == 1: key = parts[0].strip() value = '1' else: key, value = [x.strip() for x in parts] if replaceCommas: value = value.replace(',', '\n') + '\n' dbm[key] = value line = file.readline()
def restoreModifications(rootPath, moduleName): #printlC('white', '', '[log] Start restoring modifications') changeDict = {} tempFolder = TEMP_FOLDER + moduleName flDB = anydbm.open(tempFolder + '\\filelist.pdb', 'r') for item in flDB: changeDict[item] = flDB[item] flDB.close() #restore modifications for (root, dirs, files) in os.walk(tempFolder, topdown=False): for name in files: if changeDict.has_key(name): runCmd('cp ' + tempFolder + '\\' + name + ' ' + changeDict[name])
def UnEncryptUserCredentials(FileName, Key): UserDetails = None try: f = anydbm.open(FileName + '.db', 'c') UserName = Key.decrypt(f.get('UserName', None)) Password = Key.decrypt(f.get('Password', None)) f.close() except: print "Error occurred during UnEncrypting Data" UnEncryptedData = None UserDetails = (UserName, Password) return UserDetails
def everything(self): with self.lock: try: with closing(dbm.open(self.dbmfile)) as db: result = {} if hasattr(db, "items"): for key, value in db.items(): result[key.decode("utf-8")] = value.decode("utf-8") else: for key in db.keys(): result[key.decode("utf-8")] = db[key].decode( "utf-8") return result except dbm.error as e: raise NamingError("dbm error in everything: " + str(e))
def __init__(self, use_dbm=False): self.use_dbm = use_dbm if use_dbm: self.temp_dir = tempfile.mkdtemp() self.OIDMAP = dbm.open(os.path.join(self.temp_dir, 'oidmap.db'), 'nf') self.USEDMAP = dbm.open(os.path.join(self.temp_dir, 'usedmap.db'), 'nf') else: self.OIDMAP = {} self.USEDMAP = {} self.TYPEMAP = {} self.TYPESIZE = {} self.TIDS = 0 self.OIDS = 0 self.DBYTES = 0 self.COIDS = 0 self.CBYTES = 0 self.FOIDS = 0 self.FBYTES = 0 self.COIDSMAP = {} self.CBYTESMAP = {} self.FOIDSMAP = {} self.FBYTESMAP = {}
def lose(): # LOSE game_window.unbind("<Button 1>") game_window.config(cursor='arrow') timer_box.delete(0, END) timer_box.insert(0, 'BOOM!') timer_box.config(bg='red', fg='black') status_box.delete(0, END) status_box.insert(0, 'BOOM!') status_box.config(bg='red') hint_box.delete(0, END) hint_box.insert(0, 'BOOM!') hint_box.config(bg='red') hints_remaining_box.delete(0, END) hints_remaining_box.insert(0, 'BOOM!') hints_remaining_box.config(bg='red') pause_button.config(state=DISABLED) hint_button.config(state=DISABLED) game_window.create_image(350, 225, image=explode, anchor=CENTER) losers_screen = Toplevel() losers_screen.title("Loser's Screen") losers_or_winners_screen_canvas = Canvas(losers_screen, width=600, height=300, bg="black", cursor='pirate') losers_or_winners_screen_canvas.grid(row=0, columnspan=5) losers_or_winners_screen_canvas.create_image(20, 15, image=game_over, anchor=NW) home2_button = Button(losers_screen, text="RETURN HOME", bg='blue', cursor='X_cursor', command=return_home) home2_button.grid(column=2) home2_button.config(width=50) # Get the name of the current player db = anydbm.open("data/current_player.db", "c") current_player_name = db["name"] db.close() # Change his/her stat player_database = pickle.load(open("data/player_database.p", "rb")) player_database[current_player_name][6] = "L" pickle.dump(player_database, open("data/player_database.p", "wb"))
def index_databases(dbs, args): ext = os.path.splitext(dbs[0])[1] DBM_name = '/data/l_wiss01/database/all-fasta-records.dbm' + ext if file_exists(DBM_name) and not args.has_key('indexdb'): return DBM_name print "creating DBM:", DBM_name DBM = anydbm.open(DBM_name, 'c') for db in dbs: print "-> adding db:", db handle = open(db) for seq_record in SeqIO.parse(handle, "fasta"): DBM[seq_record.id] = seq_record.seq.tostring() handle.close() DBM.close() print "DONE. indexed database:", DBM_name return DBM_name
def anydbm_fetch(limit): path = "/tmp/test_py_benchmark_%s.dbm" % limit db = anydbm.open(path, 'r') for i in range(0, limit): k = str(random.randrange(0, limit - 1)) try: v = db[k] if len(v) < 1: return False except: return False db.close() return True
def get_rating_dict_func(self): """ Import Rating Database button command. Opens file dialogue and calls db loading functions. :return: None """ fname = tkFileDialog.askopenfilename(filetypes=(("Template files", "*.db"), ("All files", "*"))) try: self.users_rating_dict = {} self.cafe_rating_db = anydbm.open(fname, "c") for i, j in self.cafe_rating_db.items(): self.users_rating_dict[i] = pickle.loads(j) except: raise Exception('Please select a file.')
def check_event(self, event): """ Returns True if event is unseen (and hence good to forward), False otherwise. """ try: db_path, key = self._get_event_details(event) except Exception as e: log.warn("Unparseable IVOID; failing eventdb lookup"); else: with self.databases[db_path]: # Acquire lock with closing(anydbm.open(os.path.join(self.root, db_path), 'c')) as db: if not key in db: db[key] = str(time.time()) return True return False
def main(): db = anydbm.open("bikes.db", 'r') sortedKeys = db.keys() sortedKeys.sort() oldTime = None cumulative = {} print time.ctime(int(sortedKeys[0])) print time.ctime(int(sortedKeys[len(sortedKeys) - 1])) count = 0 for timeKey in sortedKeys[0:len(sortedKeys):1]: count += 1 print float(count) / len(sortedKeys) data = pickle.loads(db[timeKey]) cumulative = checkForLowStation(data, 1, cumulative) printWarnings(cumulative)
def cache_genomes(file, recreate=0): outdbm = file + ".dbm" if os.path.exists(outdbm) and os.path.getsize(outdbm) > 0 and not recreate: return outdbm DBM = anydbm.open(outdbm, 'c') fo = open(file) key = "" for line in fo: line = line.strip() if line.startswith(">"): key = re.match(FASTAID_REGEX, line).group(1) DBM[key] = "" else: DBM[key] += line DBM.close() return outdbm
def main(session): done = anydbm.open('.bills', 'c') markdone = makemarkdone(done) for fn in sorted(glob.glob(GOVTRACK_CRAWL + '/us/%s/bills/*.xml' % session)): print >> sys.stderr, '\r %-25s' % fn, sys.stderr.flush() markdone(loadbill)(fn) for fn in sorted(glob.glob(GOVTRACK_CRAWL + '/us/%s/rolls/*.xml' % session)): print >> sys.stderr, '\r %-25s' % fn, sys.stderr.flush() markdone(loadroll)(fn) print >> sys.stderr, '\r' + ' ' * 72
def create(): from planet import logger as log cache = config.cache_directory() index = os.path.join(cache, 'index') if not os.path.exists(index): os.makedirs(index) import anydbm index = anydbm.open(filename(index, 'id'), 'c') try: import libxml2 except: libxml2 = False from xml.dom import minidom for file in glob(cache + "/*"): if os.path.isdir(file): continue elif libxml2: try: doc = libxml2.parseFile(file) ctxt = doc.xpathNewContext() ctxt.xpathRegisterNs('atom', 'http://www.w3.org/2005/Atom') entry = ctxt.xpathEval('/atom:entry/atom:id') source = ctxt.xpathEval('/atom:entry/atom:source/atom:id') if entry and source: index[filename('', entry[0].content)] = source[0].content doc.freeDoc() except: log.error(file) else: try: doc = minidom.parse(file) doc.normalize() ids = doc.getElementsByTagName('id') entry = [e for e in ids if e.parentNode.nodeName == 'entry'] source = [e for e in ids if e.parentNode.nodeName == 'source'] if entry and source: index[filename('',entry[0].childNodes[0].nodeValue)] = \ source[0].childNodes[0].nodeValue doc.freeDoc() except: log.error(file) log.info(str(len(index.keys())) + " entries indexed") index.close() return open()