def loadmodel(self,string): # {{{ #Get model path if not isinstance(string,(str,unicode)): raise TypeError("argument provided is not a string") path1=os.path.join(self.repository,self.prefix+string+'.python') path2=os.path.join(self.repository,string) #figure out if the model is there, otherwise, we have to use the default path supplied by user. if whichdb(path1): md=loadmodel(path1) return md elif whichdb(path2): md=loadmodel(path2) return md #If we are here, the model has not been found. Try trunk prefix if provided if self.trunkprefix: path2=os.path.join(self.repository,self.trunkprefix+string) if not os.path.exists(path2): raise IOError("Could find neither '%s' nor '%s'" % (path,path2)) else: print "--> Branching '%s' from trunk '%s'" % (self.prefix,self.trunkprefix) md=loadmodel(path2) return md else: raise IOError("Could not find '%s'" % path1)
def main(): print "Pickle is available." db = dumbdbm.open("dumbdb", "c") db["1"] = "1" db.close() dbstr = whichdb.whichdb("dumbdb") if dbstr: print "Dumbdbm is available." else: print "Dumbdbm is not available." db = dbhash.open("dbhash", "c") db["1"] = "1" db.close() dbstr = whichdb.whichdb("dbhash") if dbstr == "dbhash": print "Dbhash is available." else: print "Dbhash is not available." if bsddb is None: dbstr = "" else: db = bsddb.hashopen("bsddb3", "c") db["1"] = "1" db.close() dbstr = whichdb.whichdb("bsddb3") if dbstr == "dbhash": print "Bsddb[3] is available." else: print "Bsddb[3] is not available." print hammie = get_pathname_option("Storage", "persistent_storage_file") use_dbm = options["Storage", "persistent_use_database"] if not use_dbm: print "Your storage %s is a: pickle" % (hammie,) return if not os.path.exists(hammie): print "Your storage file does not exist yet." return db_type = whichdb.whichdb(hammie) if db_type == "dbhash": # could be dbhash or bsddb3 # only bsddb3 has a __version__ attribute - old bsddb module does not if hasattr(bsddb, '__version__'): try: db = bsddb.hashopen(hammie, "r") except bsddb.error: pass else: db.close() print "Your storage", hammie, "is a: bsddb[3]" return elif db_type is None: print "Your storage %s is unreadable." % (hammie,) print "Your storage %s is a: %s" % (hammie, db_type)
def _set_vhost_db(self, vhost_db): """ validate and set the vhost db""" from whichdb import whichdb if vhost_db == '': PropertyManager._setPropValue(self, 'vhost_db', '') elif whichdb(vhost_db) is None or whichdb(vhost_db) == '': raise CheezeError, "vhost_db must point to a valid dbm file" else: clean_path = self._scrub_path(vhost_db) PropertyManager._setPropValue(self, 'vhost_db', clean_path)
def test_whichdb_name(self, name=name, mod=mod): # Check whether whichdb correctly guesses module name # for databases opened with module mod. # Try with empty files first f = mod.open(_fname, 'c') f.close() self.assertEqual(name, whichdb.whichdb(_fname)) # Now add a key f = mod.open(_fname, 'w') f["1"] = "1" f.close() self.assertEqual(name, whichdb.whichdb(_fname))
def loadmodel(path): """ LOADMODEL - load a model using built-in load module check that model prototype has not changed. if so, adapt to new model prototype. Usage: md=loadmodel(path) """ #check existence of database (independent of file extension!) if whichdb(path): #do nothing pass else: try: NCFile = Dataset(path, mode='r') NCFile.close() pass except RuntimeError: raise IOError("loadmodel error message: file '%s' does not exist" % path) # try: #recover model on file and name it md struc = loadvars(path) name = [key for key in struc.iterkeys()] if len(name) > 1: raise IOError( "loadmodel error message: file '%s' contains several variables. Only one model should be present." % path) md = struc[name[0]] return md
def main(): """Main function""" # WRITE ####### db = dbm.open('foo_dbm', 'c') db['one'] = 'un' db['two'] = 'dos' db['three'] = 'tres' db.close() # WHICH DBM ### print "whichdb:", whichdb.whichdb('foo_dbm') print # READ ######## db = dbm.open('foo_dbm', 'r') for k in db.keys(): print k, ':', db[k] db.close()
def main(): """Main function""" # WRITE ####### db = gdbm.open('foo_gdbm', 'c') db['one'] = 'un' db['two'] = 'dos' db['three'] = 'tres' db.close() # WHICH DBM ### print "whichdb:", whichdb.whichdb('foo_gdbm') print # READ ######## db = gdbm.open('foo_gdbm', 'r') # Iterate loop: first method (common to any dbm module) for k in db.keys(): print k, ':', db[k] # Iterate loop: second method (more efficient) # The following code prints every key in the database db, without having to create a list in memory that contains them all. k = db.firstkey() while k != None: print k, ':', db[k] k = db.nextkey(k) db.close()
def get_comment_db(songbook_path): # check if this is the All songs songbook, if so don't do anything if songbook_path == c.ALL_SONGS_PATH: return dict() comment_path = posixpath.splitext(songbook_path)[0] + '.comment' # check if this is an old comment file -- we now use dumbdbm for portability # upgrade done automatically TODO: this could be removed in the future if whichdb.whichdb(comment_path) != 'dumbdbm': # get a copy of the comments old_shelf = shelve.open(comment_path) comments = dict(old_shelf) old_shelf.close() # remove the old database file files = glob.glob(comment_path+'*') for f in files: os.remove(f) # write comments into dumbdbm shelf new_shelf = shelve.Shelf(dumbdbm.open(comment_path)) for k in comments.keys(): new_shelf[k] = comments[k] new_shelf.close() # close to make sure .comment file saved # now assured of a dumbdbm shelf return shelve.Shelf(dumbdbm.open(comment_path))
def open(file, flag = 'r', mode = 0666): # guess the type of an existing database from whichdb import whichdb result=whichdb(file) if result == "": # Check if we have a 0-length file statinfo = os.stat(file) if statinfo.st_size == 0: os.remove(file) result = None if result is None: # db doesn't exist if 'c' in flag or 'n' in flag: # file doesn't exist and the new # flag was used so use default type mod = _defaultmod else: raise error, "need 'c' or 'n' flag to open new db" elif result == "": # db type cannot be determined raise error, "db type could not be determined: %s" % file else: mod = __import__(result) return mod.open(file, flag, mode)
def isDbUpToDate ( databaseFile ): """ Check if index database is up to date """ upToDate = False for dbFile in ( databaseFile + os.path.extsep + 'db', databaseFile ): try: if not os.path.exists(dbFile): # Database doesn't exist, try next one continue; if not whichdb(dbFile): # Not in a readable format logger.debug ( 'Database not in a readable format: %s', dbFile ); break; # From here, database exists and is readable db = dbm.open(dbFile) sourceFile = db["__source_path__"] if not os.path.exists(sourceFile): # Source file doesn't exist any more break; textFileSum = md5sum(sourceFile) if textFileSum != db["__source_md5__"]: logger.debug ( 'Source file checksum differs from the one used to build the database: %s', sourceFile ); db.close() break; if not db["__version__"] == Database.version: logger.debug ( 'Database version "%s" doesn\'t match this version "%s"', db["__version__"], Database.version ); db.close() break; db.close() # Everything is ok with the existing database upToDate = True break; except Exception: pass return upToDate
def main(): """Main function""" # WRITE ####### db = anydbm.open('foo_anydbm', 'c') db['one'] = 'un' db['two'] = 'dos' db['three'] = 'tres' db.close() # WHICH DBM ### print "whichdb:", whichdb.whichdb('foo_anydbm') print # READ ######## db = anydbm.open('foo_anydbm', 'r') # Iterate loop: first method (common to any dbm module) for k in db.keys(): print k, ':', db[k] # Iterate loop: second method (only dbhash and dumbdbm supports db.items()) for k, v in db.items(): print k, ':', v db.close()
def open(file, flag='r', mode=0666): """Open or create database at path given by *file*. Optional argument *flag* can be 'r' (default) for read-only access, 'w' for read-write access of an existing database, 'c' for read-write access to a new or existing database, and 'n' for read-write access to a new database. Note: 'r' and 'w' fail if the database doesn't exist; 'c' creates it only if it doesn't exist; and 'n' always creates a new database. """ # guess the type of an existing database from whichdb import whichdb result = whichdb(file) if result is None: # db doesn't exist if 'c' in flag or 'n' in flag: # file doesn't exist and the new # flag was used so use default type mod = _defaultmod else: raise error, "need 'c' or 'n' flag to open new db" elif result == "": # db type cannot be determined raise error, "db type could not be determined" else: mod = __import__(result) return mod.open(file, flag, mode)
def open(file, flag='r', mode=0666): """Open or create database at path given by *file*. Optional argument *flag* can be 'r' (default) for read-only access, 'w' for read-write access of an existing database, 'c' for read-write access to a new or existing database, and 'n' for read-write access to a new database. Note: 'r' and 'w' fail if the database doesn't exist; 'c' creates it only if it doesn't exist; and 'n' always creates a new database. """ # guess the type of an existing database from whichdb import whichdb result=whichdb(file) if result is None: # db doesn't exist if 'c' in flag or 'n' in flag: # file doesn't exist and the new # flag was used so use default type mod = _defaultmod else: raise error, "need 'c' or 'n' flag to open new db" elif result == "": # db type cannot be determined raise error, "db type could not be determined" else: mod = __import__(result) return mod.open(file, flag, mode)
def main(): """Main function""" # WRITE ####### db = dumbdbm.open('foo_dumbdbm', 'c') db['one'] = 'un' db['two'] = 'dos' db['three'] = 'tres' db.close() # WHICH DBM ### print "whichdb:", whichdb.whichdb('foo_dumbdbm') print # READ ######## db = dumbdbm.open('foo_dumbdbm', 'r') # Iterate loop: first method (common to any dbm module) for k in db.keys(): print k, ':', db[k] # Iterate loop: second method (dumbdbm supports db.items()) for k, v in db.items(): print k, ':', v db.close()
def showStatus (self): try: w = whichdb.whichdb(self.path) except Exception: w = None g.es('whichdb is %s at %s'%(w, self.path))
def showStatus(self): try: w = whichdb.whichdb(self.path) except Exception: w = None g.es('whichdb is %s at %s' % (w, self.path))
def _find_shelves(shelve_dir): """ Return the location of CMIP5 shelve files as a dictionary. """ _check_shelve_version(shelve_dir) # Locations of shelve files template = os.path.join(shelve_dir, TEMPLATE_SHELVE) stdo = os.path.join(shelve_dir, STDO_SHELVE) stdo_mip = os.path.join(shelve_dir, STDO_MIP_SHELVE) assert whichdb(template) assert whichdb(stdo) assert whichdb(stdo_mip) return dict(template=template, stdo=stdo, stdo_mip=stdo_mip)
def test_anydbm_create(self): # Verify that anydbm.open does *not* create a bsddb185 file tmpdir = tempfile.mkdtemp() try: dbfile = os.path.join(tmpdir, "foo.db") anydbm.open(dbfile, "c").close() ftype = whichdb.whichdb(dbfile) self.assertNotEqual(ftype, "bsddb185") finally: shutil.rmtree(tmpdir)
def validate_arguments(args): """ Validates passed arguments, throws exception is something is wrong """ if not path.exists(args.config): raise ValueError("Can not find application config: %s" % args.config) if not whichdb(args.session): raise ValueError("Can not find session storage file (%s), please, run \"install.py\" first" % args.session) if not path.exists(args.file): raise ValueError("Can not find file \"%s\" for backup" % args.file)
def open_db( self, fname, flag ): """Open the database, return a dictionary like object""" shelf = None # Some shelve implementations cannot be reopened if not os.path.isfile( fname ): shelf = shelve.open(filename=fname, flag=flag, protocol=2) shelf.close() dbtype = whichdb.whichdb( fname ) #log.info( 'database type: %s', dbtype ) shelf = shelve.open(filename=fname, flag=flag, protocol=2) return shelf
def loadGiTaxBdb(self,inpFile): import anydbm, whichdb print whichdb.whichdb('/export/atovtchi/taxa.db') gi2taxa = anydbm.open('/export/atovtchi/taxa.db', 'c') inp = file(inpFile,'r') iRow = 0 buff = {} for line in inp: (gi,taxid) = line.split() buff[gi] = taxid if iRow % 100000 == 0: print gi, taxid, iRow gi2taxa.update(buff) buff = {} iRow += 1 inp.close() taxaCnt = {} for (gi,taxid) in gi2taxa.iteritems(): taxaCnt[taxid] = taxaCnt.get(taxid,0) + 1 print sorted(((cnt,taxid) for (taxid,cnt) in taxaCnt.iteritems()))
class PoolFile(object): """ A simple class to retrieve informations about the content of a POOL file. It should be abstracted from the underlying technology used to create this POOL file (Db, ROOT,...). Right now, we are using the easy and loosy solution: going straight to the ROOT 'API'. """ def __init__(self, fileName, verbose=True): object.__init__(self) self._fileInfos = None self.keys = None self.dataHeader = PoolRecord("DataHeader", 0, 0, 0, nEntries = 0, dirType = "T") self.data = [] self.verbose = verbose # get the "final" file name (handles all kind of protocols) try: import PyUtils.AthFile as af protocol, fileName = af.server.fname(fileName) except Exception,err: print "## warning: problem opening PoolFileCatalog:\n%s"%err import traceback traceback.print_exc(err) pass self.poolFile = None dbFileName = whichdb.whichdb( fileName ) if not dbFileName in ( None, '' ): if self.verbose==True: print "## opening file [%s]..." % str(fileName) db = shelve.open( fileName, 'r' ) if self.verbose==True: print "## opening file [OK]" report = db['report'] self._fileInfos = report['fileInfos'] self.dataHeader = report['dataHeader'] self.data = report['data'] else: import PyUtils.Helpers as _H projects = 'AtlasCore' if PoolOpts.FAST_MODE else None with _H.restricted_ldenviron (projects=projects): if self.verbose==True: print "## opening file [%s]..." % str(fileName) self.__openPoolFile( fileName ) if self.verbose==True: print "## opening file [OK]" self.__processFile() return
def init_cache(self): if self.new_style_cache: subliminal.region.configure('subzero.cache.file', expiration_time=datetime.timedelta(days=180), arguments={'appname': "sz_cache", 'app_cache_dir': self.data_path}, replace_existing_backend=True) Log.Info("Using new style file based cache!") return names = ['dbhash', 'gdbm', 'dbm'] dbfn = None self.dbm_supported = False # try importing dbm modules if Core.runtime.os != "Windows": impawrt = None try: impawrt = getattr(sys.modules['__main__'], "__builtins__").get("__import__") except: pass if impawrt: for name in names: try: impawrt(name) except: continue if not self.dbm_supported: self.dbm_supported = name break if self.dbm_supported: # anydbm checks; try guessing the format and importing the correct module dbfn = os.path.join(config.data_items_path, 'subzero.dbm') db_which = whichdb(dbfn) if db_which is not None and db_which != "": try: impawrt(db_which) except ImportError: self.dbm_supported = False if self.dbm_supported: try: subliminal.region.configure('dogpile.cache.dbm', expiration_time=datetime.timedelta(days=30), arguments={'filename': dbfn, 'lock_factory': MutexLock}, replace_existing_backend=True) Log.Info("Using file based cache!") return except: self.dbm_supported = False Log.Warn("Not using file based cache!") subliminal.region.configure('dogpile.cache.memory', replace_existing_backend=True)
def execute(self, opt_values, pos_args): dep_file = opt_values['dep_file'] db_type = whichdb(dep_file) print("DBM type is '%s'" % db_type) if db_type in ('dbm', 'dbm.ndbm'): # pragma: no cover raise InvalidCommand('ndbm does not support iteration of elements') data = dbm.open(dep_file) for key, value_str in dbm_iter(data): value_dict = json.loads(value_str.decode('utf-8')) value_fmt = pprint.pformat(value_dict, indent=4, width=100) print("{key} -> {value}".format(key=key, value=value_fmt))
def open(db_name, mode): if os.path.exists(db_name): # let the file tell us what db to use dbm_type = whichdb.whichdb(db_name) else: # fresh file - open with what the user specified dbm_type = options["globals", "dbm_type"].lower() f = open_funcs.get(dbm_type) if f is None: raise error("Unknown dbm type: %s" % dbm_type) return f(db_name, mode)
def main(): if len(sys.argv) != 2: sys.stderr.write("usage: {} DBM_FILE\n".format(sys.argv[0])) exit(1) filename = sys.argv[1] which = whichdb(filename) # if which returns none and the file does not exist, print usage line if which == None and not os.path.exists(sys.argv[1]): sys.stderr.write('No such file {}\n\n'.format(sys.argv[1])) sys.stderr.write("usage: {} DBM_FILE\n".format(sys.argv[0])) exit(1) # covers case where an ndbm is checked with its extension & identified incorrectly elif 'bsd' in which: correct_file = filename.split(".db")[0] correct_which = whichdb(correct_file) if correct_which in ('dbm', 'dbm.ndbm'): filename = correct_file which = correct_which elif which == '': sys.stderr.write("{} is an unrecognized database type\n".format( sys.argv[1])) sys.stderr.write("Try the file again by removing the extension\n") exit(1) try: out = sys.stdout.buffer except AttributeError: out = sys.stdout out.write( filename.encode('UTF-8') + b' is a ' + which.encode('UTF-8') + b' db\n') db = dbm.open(filename, 'r') for key in db.keys(): out.write(key + b":" + db[key] + b"\n")
def info(ctx, data): ''' Display summary information about the DB ''' print('DB Type:', whichdb(ctx.parent.params['db'])) print('Version:', data.get('_dbversion')) print('Name :', data.get('name')) print('Key :', data.get('_key')) print("Count :", len(data)) print() print('Location(s):') pprint.pprint(data.get('_serial_libraries'))
def open(file, flag = 'r', mode = 438): from whichdb import whichdb result = whichdb(file) if result is None: if 'c' in flag or 'n' in flag: mod = _defaultmod else: raise error, "need 'c' or 'n' flag to open new db" elif result == '': raise error, 'db type could not be determined' else: mod = __import__(result) return mod.open(file, flag, mode)
def open_db(filename): """given a filename string, returns UndoDB or OldUndoDB if the file doesn't exist, this uses UndoDB by default otherwise, detect OldUndoDB if xdelta is installed""" if (BIN.can_execute(BIN["xdelta"])): db = whichdb.whichdb(filename) if ((db is not None) and (db != '')): return OldUndoDB(filename) else: return UndoDB(filename) else: return UndoDB(filename)
def open(db_name, mode): if os.path.exists(db_name) and \ options.default("globals", "dbm_type") != \ options["globals", "dbm_type"]: dbm_type = whichdb.whichdb(db_name) if sys.platform == "win32" and sys.version_info < (2,3) and \ dbm_type == "dbhash": dbm_type = "db3hash" else: dbm_type = options["globals", "dbm_type"].lower() f = open_funcs.get(dbm_type) if f is None: raise error("Unknown dbm type: %s" % dbm_type) return f(db_name, mode)
def dbm_cache_type(dbmfile): global dbm_types if dbm_types.has_key(dbmfile): return dbm_types[dbmfile] module = whichdb.whichdb(dbmfile) if module: dbm_type = __import__(module) dbm_types[dbmfile] = dbm_type return dbm_type else: # this is a new file return anydbm
def perform_maintenance(self): import whichdb try: print("performing maintenance on %d database chunks" % len(self._data)) for db in self._data: path = db[_PATHKEY] db_type = whichdb.whichdb(path) print("checking %s, type: %s" % (path, db_type)) try: db.reorganize() except gdbm.error: print("error: failed to reorganize db chunk") continue finally: self.close()
def open(db_name, mode): if os.path.exists(db_name): # let the file tell us what db to use dbm_type = whichdb.whichdb(db_name) # if we are using Windows and Python < 2.3, then we need to use # db3hash, not dbhash. if sys.platform == "win32" and sys.version_info < (2,3) and \ dbm_type == "dbhash": dbm_type = "db3hash" else: # fresh file - open with what the user specified dbm_type = options["globals", "dbm_type"].lower() f = open_funcs.get(dbm_type) if f is None: raise error("Unknown dbm type: %s" % dbm_type) return f(db_name, mode)
def __init__(self, *name): filename = get_settings_path(*name) # Dirty. Try to avoid locking of gdbm files result = whichdb.whichdb(filename) if result is None: result = anydbm._defaultmod.__name__ elif result == "": raise Exception("db type of %s could not be determined" % filename) if result == "gdbm": flags = "cu" else: flags = "c" self.db = anydbm.open(filename, flags)
def cache_db_type(self, path): ''' determine which DB wrote the class file, and cache it as an attribute of __class__ (to allow for subclassed DBs to be different sorts) ''' db_type = '' if os.path.exists(path): db_type = whichdb.whichdb(path) if not db_type: raise hyperdb.DatabaseError, \ _("Couldn't identify database type") elif os.path.exists(path + '.db'): # if the path ends in '.db', it's a dbm database, whether # anydbm says it's dbhash or not! db_type = 'dbm' self.__class__._db_type = db_type
def cache_db_type(self, path): ''' determine which DB wrote the class file, and cache it as an attribute of __class__ (to allow for subclassed DBs to be different sorts) ''' db_type = '' if os.path.exists(path): db_type = whichdb.whichdb(path) if not db_type: raise hyperdb.DatabaseError, \ _("Couldn't identify database type") elif os.path.exists(path+'.db'): # if the path ends in '.db', it's a dbm database, whether # anydbm says it's dbhash or not! db_type = 'dbm' self.__class__._db_type = db_type
def open(file, flag = 'r', mode = 0666): # guess the type of an existing database from whichdb import whichdb result=whichdb(file) if result is None: # db doesn't exist if 'c' in flag or 'n' in flag: # file doesn't exist and the new # flag was used so use default type mod = _defaultmod else: raise error, "need 'c' or 'n' flag to open new db" elif result == "": # db type cannot be determined raise error, "db type could not be determined" else: mod = __import__(result) return mod.open(file, flag, mode)
def open(db_name, mode): if os.path.exists(db_name) and \ options.default("globals", "dbm_type") != \ options["globals", "dbm_type"]: # let the file tell us what db to use dbm_type = whichdb.whichdb(db_name) # if we are using Windows and Python < 2.3, then we need to use # db3hash, not dbhash. if (sys.platform == "win32" and sys.version_info < (2, 3) and dbm_type == "dbhash"): dbm_type = "db3hash" else: # fresh file or overridden - open with what the user specified dbm_type = options["globals", "dbm_type"].lower() f = open_funcs.get(dbm_type) if f is None: raise error("Unknown dbm type: %s" % dbm_type) return f(db_name, mode)
def initialize_database(writeable=False): # Get the db file name where qcd info is/will_be stored file = mapDbFileNameForHost(qcdCmdParser.getOption("file").value) if not writeable and not isfile(file): print >> sys.stderr, "Database is empty! Try adding something!" sys.exit(1) # changing the global dbType variable, it will contain the infomration # retrieved from whichdb, which basically says which db implementation was # used to create the db file. global dbType dbType = whichdb(file) # Open the db if writeable: return anydbm.open(file, 'c') else: return anydbm.open(file, 'r')
def initialize_database (writeable = False): # Get the db file name where qcd info is/will_be stored file = mapDbFileNameForHost( qcdCmdParser.getOption("file").value ) if not writeable and not isfile(file): print >> sys.stderr, "Database is empty! Try adding something!" sys.exit (1) # changing the global dbType variable, it will contain the infomration # retrieved from whichdb, which basically says which db implementation was # used to create the db file. global dbType dbType = whichdb(file) # Open the db if writeable: return anydbm.open (file, 'c') else: return anydbm.open (file, 'r')
def __init__(self, fileName, verbose=True): object.__init__(self) self._fileInfos = None self.keys = None self.dataHeader = PoolRecord("DataHeader", 0, 0, 0, nEntries = 0, dirType = "T") self.data = [] self.verbose = verbose # get the "final" file name (handles all kind of protocols) try: import PyUtils.AthFile as af protocol, fileName = af.server.fname(fileName) except Exception as err: print("## warning: problem opening PoolFileCatalog:\n%s"%err) import traceback traceback.print_exc(err) pass self.poolFile = None dbFileName = whichdb( fileName ) if dbFileName not in ( None, '' ): if self.verbose is True: print("## opening file [%s]..." % str(fileName)) db = shelve.open( fileName, 'r' ) if self.verbose is True: print("## opening file [OK]") report = db['report'] self._fileInfos = report['fileInfos'] self.dataHeader = report['dataHeader'] self.data = report['data'] else: if self.verbose is True: print("## opening file [%s]..." % str(fileName)) self.__openPoolFile( fileName ) if self.verbose is True: print("## opening file [OK]") self.__processFile() return
def open(self): self.storagelock.acquire() try: if self.opened(): raise TypeError("Storage already opened") if not os.path.isdir(self.path): # No need to compact newly created DB. os.makedirs(self.path) dbexisted = whichdb(self.filepath) self.database = anydbm.open(self.filepath, "c") self.dbmtype = None self._opened.set() self._closed.clear() # Only compact on open if whichdb() indicated dataabase # existing DB was opened; if new DB was created, there's # no need to compact. if dbexisted and self.COMPACT and self.COMPACT_ON_OPEN: self.compact() finally: self.storagelock.release()
def has_dbhash(): " returns 1 if has a dbhash database " dbflag = 0 filename = '_mytmpdata63636' db = shelve.open(filename) # put in some test data (incl. a class object) k = 'key1' db[k] = [1, 0, 2, 0, 3, 0] ac = AC() k = 'key2' db[k] = ac db.close() dbtype = whichdb.whichdb(filename) if dbtype == 'dbhash': dbflag = 1 os.system("\\rm %s*" % filename) return dbflag
def depfile(request): if hasattr(request, 'param'): dep_class = request.param else: dep_class = Dependency # copied from tempdir plugin name = request._pyfuncitem.name name = py.std.re.sub("[\W]", "_", name) my_tmpdir = request.config._tmpdirhandler.mktemp(name, numbered=True) dep_file = dep_class(os.path.join(my_tmpdir.strpath, "testdb")) dep_file.whichdb = whichdb(dep_file.name) dep_file.name_ext = db_ext.get(dep_file.whichdb, ['']) def remove_depfile(): if not dep_file._closed: dep_file.close() remove_db(dep_file.name) request.addfinalizer(remove_depfile) return dep_file
def open_shelf(path): ''' Opens a python shelf file, used to store various types of metadata ''' # As of Exaile 4, new DBs will only be created as Berkeley DB Hash databases # using either bsddb3 (external) or bsddb (stdlib but sometimes removed). # Existing DBs created with other backends will be migrated to Berkeley DB. # We do this because BDB is generally considered more performant, # and because gdbm currently doesn't work at all in MSYS2. # Some DBM modules don't use the path we give them, but rather they have # multiple filenames. If the specified path doesn't exist, double check # to see if whichdb returns a result before trying to open it with bsddb force_migrate = False if not os.path.exists(path): from whichdb import whichdb if whichdb(path) is not None: force_migrate = True if not force_migrate: try: db = bsddb.hashopen(path, 'c') return shelve.BsdDbShelf(db, protocol=PICKLE_PROTOCOL) except bsddb.db.DBInvalidArgError: logger.warning("%s was created with an old backend, migrating it", path) except Exception: raise # special case: zero-length file if not force_migrate and os.path.getsize(path) == 0: os.unlink(path) else: from xl.migrations.database.to_bsddb import migrate migrate(path) db = bsddb.hashopen(path, 'c') return shelve.BsdDbShelf(db, protocol=PICKLE_PROTOCOL)
def main(): """Main function""" # WRITE ####### db = dbhash.open('foo_dbhash', 'c') db['one'] = 'un' db['two'] = 'dos' db['three'] = 'tres' db.close() # WHICH DBM ### print "whichdb:", whichdb.whichdb('foo_dbhash') print # READ ######## db = dbhash.open('foo_dbhash', 'r') # Iterate loop: first method (common to any dbm module) for k in db.keys(): print k, ':', db[k] # Iterate loop: second method (dbhash supports db.items()) for k, v in db.items(): print k, ':', v # Iterate loop: third method (the most efficient) # The following code prints every key in the database db, without having to create a list in memory that contains them all. print db.first() for i in xrange(1, len(db)): print db.next() db.close()
def __init__(self, fileName): object.__init__(self) self._fileInfos = None self.keys = None self.dataHeader = None # trigger stuff self.trigConf = None self.l1_global = None self.l1_items = [] self.l2_global = None self.l2_chains = [] self.ef_global = None self.ef_chains = [] self.poolFile = None dbFileName = whichdb.whichdb(fileName) if dbFileName not in (None, ''): print("## opening file [%s]..." % str(fileName)) db = shelve.open(fileName, 'r') print("## opening file [OK]") report = db['report'] self._fileInfos = report['fileInfos'] self.l1_items = report['l1_data'] self.l2_chains = report['l2_data'] self.ef_chains = report['ef_data'] self.l1_global = report['l1_global'] self.l2_global = report['l2_global'] self.ef_global = report['ef_global'] else: print("## opening file [%s]..." % str(fileName)) self.__openPoolFile(fileName) print("## opening file [OK]") self.__processFile() return