def main(): """Main function""" # WRITE ####### db = dumbdbm.open('foo_dumbdbm', 'c') db['one'] = 'un' db['two'] = 'dos' db['three'] = 'tres' db.close() # WHICH DBM ### print "whichdb:", whichdb.whichdb('foo_dumbdbm') print # READ ######## db = dumbdbm.open('foo_dumbdbm', 'r') # Iterate loop: first method (common to any dbm module) for k in db.keys(): print k, ':', db[k] # Iterate loop: second method (dumbdbm supports db.items()) for k, v in db.items(): print k, ':', v db.close()
def test_eval(self): with open(_fname + '.dir', 'w') as stream: stream.write("str(__import__('sys').stdout.write('Hacked!')), 0\n") with test_support.captured_stdout() as stdout: with self.assertRaises(ValueError): dumbdbm.open(_fname).close() self.assertEqual(stdout.getvalue(), '')
def test_random(self): import random d = {} # mirror the database for dummy in range(5): f = dumbdbm.open(_fname) for dummy in range(100): k = random.choice('abcdefghijklm') if random.random() < 0.2: if k in d: del d[k] del f[k] else: v = random.choice('abc') * random.randrange(10000) d[k] = v f[k] = v self.assertEqual(f[k], v) f.close() f = dumbdbm.open(_fname) expected = d.items() expected.sort() got = f.items() got.sort() self.assertEqual(expected, got) f.close()
def get_comment_db(songbook_path): # check if this is the All songs songbook, if so don't do anything if songbook_path == c.ALL_SONGS_PATH: return dict() comment_path = posixpath.splitext(songbook_path)[0] + '.comment' # check if this is an old comment file -- we now use dumbdbm for portability # upgrade done automatically TODO: this could be removed in the future if whichdb.whichdb(comment_path) != 'dumbdbm': # get a copy of the comments old_shelf = shelve.open(comment_path) comments = dict(old_shelf) old_shelf.close() # remove the old database file files = glob.glob(comment_path+'*') for f in files: os.remove(f) # write comments into dumbdbm shelf new_shelf = shelve.Shelf(dumbdbm.open(comment_path)) for k in comments.keys(): new_shelf[k] = comments[k] new_shelf.close() # close to make sure .comment file saved # now assured of a dumbdbm shelf return shelve.Shelf(dumbdbm.open(comment_path))
def __init__(self, database_filename=None): # for now we disable traj info cache persistence! database_filename = None self.database_filename = database_filename if database_filename is not None: try: self._database = dumbdbm.open(database_filename, flag="c") except dumbdbm.error as e: try: os.unlink(database_filename) self._database = dumbdbm.open(database_filename, flag="n") # persist file right now, since it was broken self._set_curr_db_version(TrajectoryInfoCache.DB_VERSION) # close and re-open to ensure file exists self._database.close() self._database = dumbdbm.open(database_filename, flag="w") except OSError: raise RuntimeError( 'corrupted database in "%s" could not be deleted' % os.path.abspath(database_filename)) else: self._database = {} self._set_curr_db_version(TrajectoryInfoCache.DB_VERSION) self._write_protector = Semaphore()
def test_write_write_read(self): # test for bug #482460 f = dumbdbm.open(_fname) f['1'] = 'hello' f['1'] = 'hello2' f.close() f = dumbdbm.open(_fname) self.assertEqual(f['1'], 'hello2') f.close()
def test_write_write_read(self): # test for bug #482460 f = dumbdbm.open(_fname) f["1"] = "hello" f["1"] = "hello2" f.close() f = dumbdbm.open(_fname) self.assertEqual(f["1"], "hello2") f.close()
def __init__(self, runtime, persist_dir=None): self.storage = None if persist_dir is not None: try: try: os.makedirs(os.path.join(persist_dir, 'localstorage')) except OSError as e: if e.errno != errno.EEXIST: raise self.storage = dumbdbm.open( os.path.join(persist_dir, 'localstorage', str(runtime.pbw.uuid)), 'c') except IOError: pass if self.storage is None: logger.warning("Using transient store.") self.storage = _storage_cache.setdefault(str(runtime.pbw.uuid), {}) self.extension = v8.JSExtension( runtime.ext_name("localstorage"), """ (function() { native function _internal(); var proxy = _make_proxies({}, _internal(), ['set', 'has', 'delete_', 'keys', 'enumerate']); var methods = _make_proxies({}, _internal(), ['clear', 'getItem', 'setItem', 'removeItem', 'key']); proxy.get = function get(p, name) { return methods[name] || _internal().get(p, name); } this.localStorage = Proxy.create(proxy); })(); """, lambda f: lambda: self, dependencies=["runtime/internal/proxy"])
def cvtdb(ctx, data, dbtype): ''' Only used for testing purposes ''' db = ctx.parent.params['db'] newdb = db + '.new' if dbtype == 'gdbm': import gdbm new_d = gdbm.open(newdb, 'n') elif dbtype == 'dbm': import dbm new_d = dbm.open(newdb, 'n') elif dbtype == 'dbhash': import dbhash new_d = dbhash.open(newdb, 'n') elif dbtype == 'bsddb': new_d = bsddb.hashopen(newdb, 'n') elif dbtype == 'dumbdbm': import dumbdbm new_d = dumbdbm.open(newdb, 'n') else: raise click.ClickException("Invalid type %s" % dbtype) new_data = shelve.Shelf(new_d, protocol=exaile_pickle_protocol) for k, v in data.iteritems(): new_data[k] = v new_data.sync() new_data.close()
def write(): # db = DataBase('not_bsd.dat') dumb = dumbdbm.open('test_dumb.dat') db = shelve.Shelf(dumb) db['a'] = range(1000) db['b'] = range(2000) db.close()
def cache_rm_wheels(self, wheelfilenames): with contextlib.closing(dumbdbm.open(self.cache, 'w')) as dbm: for wheelfilename in wheelfilenames: wheelfilename = os.path.basename(wheelfilename) key = self._make_key(wheelfilename) if key in dbm: del dbm[key]
def __init__(self, runtime, persist_dir=None): self.storage = None if persist_dir is not None: try: try: os.makedirs(os.path.join(persist_dir, 'localstorage')) except OSError as e: if e.errno != errno.EEXIST: raise self.storage = dumbdbm.open(os.path.join(persist_dir, 'localstorage', runtime.manifest['uuid']), 'c') except IOError: pass if self.storage is None: logger.warning("Using transient store.") self.storage = _storage_cache.setdefault(runtime.manifest['uuid'], {}) self.extension = v8.JSExtension(runtime.ext_name("localstorage"), """ (function() { native function _internal(); var proxy = _make_proxies({}, _internal(), ['set', 'has', 'delete_', 'keys', 'enumerate']); var methods = _make_proxies({}, _internal(), ['clear', 'getItem', 'setItem', 'removeItem', 'key']); proxy.get = function get(p, name) { return methods[name] || _internal().get(p, name); } this.localStorage = Proxy.create(proxy); })(); """, lambda f: lambda: self, dependencies=["runtime/internal/proxy"])
def test_dumbdbm_creation(self): f = dumbdbm.open(_fname, 'c') self.assertEqual(f.keys(), []) for key in self._dict: f[key] = self._dict[key] self.read_helper(f) f.close()
def download(class_id, dirname, refresh=False): 'Retrieve all the class files to a local directory' try: os.mkdir(dirname) except OSError: pass tags = dumbdbm.open(os.path.join(dirname, 'tags.ddb')) print ('Starting download at %s ' % time.ctime()).center(80, '=') links_url = 'http://dl.dropbox.com/u/3967849/%s/links.txt' % class_id links_info = urllib.urlopen(links_url).read() regex = r'^http://[A-Za-z0-9_/.]+\.(?:.*)\b' targets = re.findall(regex, links_info, re.M) for target in targets: filename = target.split('/')[-1] fullname = os.path.join(dirname, filename) u = urllib.urlopen(target) etag = u.headers.dict.get('etag', 'not found') if u.code == 404: print '!!! Missing resource:', target elif not refresh and tags.get(fullname) == etag: print '... Previously seen: ', target, 'with etag:', etag else: print '+++ Downloading: ', target, '--\x3E', fullname content = u.read() with open(fullname, 'wb') as f: f.write(content) tags[fullname] = etag u.close() tags.close()
def test_line_endings(self): # test for bug #1172763: dumbdbm would die if the line endings # weren't what was expected. f = dumbdbm.open(_fname) f["1"] = "hello" f["2"] = "hello2" f.close() # Mangle the file by adding \r before each newline data = open(_fname + ".dir").read() data = data.replace("\n", "\r\n") open(_fname + ".dir", "wb").write(data) f = dumbdbm.open(_fname) self.assertEqual(f["1"], "hello") self.assertEqual(f["2"], "hello2")
def test_line_endings(self): # test for bug #1172763: dumbdbm would die if the line endings # weren't what was expected. f = dumbdbm.open(_fname) f['1'] = 'hello' f['2'] = 'hello2' f.close() # Mangle the file by adding \r before each newline data = open(_fname + '.dir').read() data = data.replace('\n', '\r\n') open(_fname + '.dir', 'wb').write(data) f = dumbdbm.open(_fname) self.assertEqual(f['1'], 'hello') self.assertEqual(f['2'], 'hello2')
def test_dumbdbm_creation_mode(self): # On platforms without chmod, don't do anything. if not (hasattr(os, 'chmod') and hasattr(os, 'umask')): return try: old_umask = os.umask(0002) f = dumbdbm.open(_fname, 'c', 0637) f.close() finally: os.umask(old_umask) expected_mode = 0635 is_posix = True if os.name == 'java': if os._name != 'posix': is_posix = False elif os.name != 'posix': is_posix = False if not is_posix: # Windows only supports setting the read-only attribute. # This shouldn't fail, but doesn't work like Unix either. expected_mode = 0666 import stat st = os.stat(_fname + '.dat') self.assertEqual(stat.S_IMODE(st.st_mode), expected_mode) st = os.stat(_fname + '.dir') self.assertEqual(stat.S_IMODE(st.st_mode), expected_mode)
def cache_rm(self, distfilenames): with contextlib.closing(dumbdbm.open(self.cache, 'w')) as dbm: for distfilename in distfilenames: distfilename = os.path.basename(distfilename) key = self._make_key(distfilename) if key in dbm: del dbm[key]
def main(): print "Pickle is available." db = dumbdbm.open("dumbdb", "c") db["1"] = "1" db.close() dbstr = whichdb.whichdb("dumbdb") if dbstr: print "Dumbdbm is available." else: print "Dumbdbm is not available." db = dbhash.open("dbhash", "c") db["1"] = "1" db.close() dbstr = whichdb.whichdb("dbhash") if dbstr == "dbhash": print "Dbhash is available." else: print "Dbhash is not available." if bsddb is None: dbstr = "" else: db = bsddb.hashopen("bsddb3", "c") db["1"] = "1" db.close() dbstr = whichdb.whichdb("bsddb3") if dbstr == "dbhash": print "Bsddb[3] is available." else: print "Bsddb[3] is not available." print hammie = get_pathname_option("Storage", "persistent_storage_file") use_dbm = options["Storage", "persistent_use_database"] if not use_dbm: print "Your storage %s is a: pickle" % (hammie,) return if not os.path.exists(hammie): print "Your storage file does not exist yet." return db_type = whichdb.whichdb(hammie) if db_type == "dbhash": # could be dbhash or bsddb3 # only bsddb3 has a __version__ attribute - old bsddb module does not if hasattr(bsddb, '__version__'): try: db = bsddb.hashopen(hammie, "r") except bsddb.error: pass else: db.close() print "Your storage", hammie, "is a: bsddb[3]" return elif db_type is None: print "Your storage %s is unreadable." % (hammie,) print "Your storage %s is a: %s" % (hammie, db_type)
def __init__(self): if self._kanwadict is None: dictpath = resource_filename(__name__, 'kanwadict2') self._kanwadict = dbm.open(dictpath,'r') if self._itaijidict is None: itaijipath = resource_filename(__name__, 'itaijidict2.pickle') itaiji_pkl = open(itaijipath, 'rb') self._itaijidict = load(itaiji_pkl)
def cache_print_errors(self): with contextlib.closing(dumbdbm.open(self.cache, 'r')) as dbm: for key, value in dbm.items(): if not self._key_match(key): continue if value: wheel = self._key_to_wheel(key) click.echo(u"{}: {}".format(wheel, value))
def test_readonly_files(self): dir = _fname os.mkdir(dir) try: fname = os.path.join(dir, 'db') f = dumbdbm.open(fname, 'n') self.assertEqual(list(f.keys()), []) for key in self._dict: f[key] = self._dict[key] f.close() os.chmod(fname + ".dir", stat.S_IRUSR) os.chmod(fname + ".dat", stat.S_IRUSR) os.chmod(dir, stat.S_IRUSR|stat.S_IXUSR) f = dumbdbm.open(fname, 'r') self.assertEqual(sorted(f.keys()), sorted(self._dict)) f.close() # don't write finally: test_support.rmtree(dir)
def test_line_endings(self): if test_support.due_to_ironpython_bug("http://tkbgitvstfat01:8080/WorkItemTracking/WorkItem.aspx?artifactMoniker=315197"): return # test for bug #1172763: dumbdbm would die if the line endings # weren't what was expected. f = dumbdbm.open(_fname) f['1'] = 'hello' f['2'] = 'hello2' f.close() # Mangle the file by adding \r before each newline data = open(_fname + '.dir').read() data = data.replace('\n', '\r\n') open(_fname + '.dir', 'wb').write(data) f = dumbdbm.open(_fname) self.assertEqual(f['1'], 'hello') self.assertEqual(f['2'], 'hello2')
def __init__(self, csm): self.csm = csm if not config.GetBool('want-mongodb', False): filename = simbase.config.GetString('accountdb-local-file', 'databases/dev-accounts.db') if platform == 'darwin': self.dbm = dumbdbm.open(filename, 'c') else: self.dbm = anydbm.open(filename, 'c')
def load_from_shelve(filepath, key="data", use_dumbdbm=True): if not os.path.isfile(filepath): raise ValueError("provided path is not a file") if use_dumbdbm: filepath = filepath.rsplit(".", 1)[0] db = shelve.Shelf(dumbdbm.open(filepath)) if use_dumbdbm else shelve.open(filepath) data = db[key] db.close() return data
def test_readonly_files(self): dir = _fname os.mkdir(dir) try: fname = os.path.join(dir, 'db') f = dumbdbm.open(fname, 'n') self.assertEqual(list(f.keys()), []) for key in self._dict: f[key] = self._dict[key] f.close() os.chmod(fname + ".dir", stat.S_IRUSR) os.chmod(fname + ".dat", stat.S_IRUSR) os.chmod(dir, stat.S_IRUSR | stat.S_IXUSR) f = dumbdbm.open(fname, 'r') self.assertEqual(sorted(f.keys()), sorted(self._dict)) f.close() # don't write finally: test_support.rmtree(dir)
def __init__(self, csm): self.csm = csm # This uses dbm, so we open the DB file: filename = simbase.config.GetString('accountdb-local-file', 'dev-accounts.db') if platform == 'darwin': self.dbm = dumbdbm.open(filename, 'c') else: self.dbm = anydbm.open(filename, 'c')
def test_dumbdbm_creation(self): for ext in [".dir", ".dat", ".bak"]: try: os.unlink(self._fname+ext) except OSError: pass f = dumbdbm.open(self._fname, 'c') self.assertEqual(f.keys(), []) for key in self._dict: f[key] = self._dict[key] self.read_helper(f) f.close()
def upload_wheels(self, wheelfilenames): to_upload = [] for wheelfilename in wheelfilenames: if os.path.isfile(wheelfilename) and \ wheelfilename.lower().endswith('.whl'): to_upload.append(wheelfilename) else: _logger.warn("skipped %s: not a wheel file", wheelfilename) with contextlib.closing(dumbdbm.open(self.cache, 'c')) as dbm: for wheelfilename in sorted(to_upload, key=_split_wheelfilename): self.upload_wheel(wheelfilename, dbm)
def test_line_endings(self): if test_support.due_to_ironpython_bug( "http://tkbgitvstfat01:8080/WorkItemTracking/WorkItem.aspx?artifactMoniker=315197" ): return # test for bug #1172763: dumbdbm would die if the line endings # weren't what was expected. f = dumbdbm.open(_fname) f['1'] = 'hello' f['2'] = 'hello2' f.close() # Mangle the file by adding \r before each newline data = open(_fname + '.dir').read() data = data.replace('\n', '\r\n') open(_fname + '.dir', 'wb').write(data) f = dumbdbm.open(_fname) self.assertEqual(f['1'], 'hello') self.assertEqual(f['2'], 'hello2')
def test_dumbdbm_creation(self): for ext in [".dir", ".dat", ".bak"]: try: os.unlink(self._fname + ext) except OSError: pass f = dumbdbm.open(self._fname, 'c') self.assertEqual(f.keys(), []) for key in self._dict: f[key] = self._dict[key] self.read_helper(f) f.close()
def upload_dists(self, distfilenames): to_upload = [] for distfilename in distfilenames: if os.path.isfile(distfilename) and \ (distfilename.lower().endswith('.whl') or distfilename.lower().endswith('.tar.gzXXX')): to_upload.append(distfilename) else: _logger.debug("skipped %s: not a python distribution", distfilename) with contextlib.closing(dumbdbm.open(self.cache, 'c')) as dbm: for distfilename in sorted(to_upload, key=_split_filename): self.upload_dist(distfilename, dbm)
def populate_molecules_dict(self): self._logger.info('ChEMBL getting Molecule from ' + self.molecule_set_uri_pattern) # Shelve creates a file with specific database. Using a temp file requires a workaround to open it. # dumbdbm creates an empty database file. In this way shelve can open it properly. t_filename = tempfile.NamedTemporaryFile(delete=False).name dumb_dict = dumbdbm.open(t_filename) shelve_out = shelve.Shelf(dict=dumb_dict) with URLZSource(self.molecule_set_uri_pattern).open() as f_obj: for line in f_obj: mol = json.loads(line) shelve_out[str(mol["molecule_chembl_id"])] = mol self._logger.info('ChEMBL Molecule loading done. ') return shelve_out
def test_store_load_traj_info(self): x = np.random.random((10, 3)) my_conf = config() my_conf.cfg_dir = self.work_dir with mock.patch('pyemma.coordinates.data.util.traj_info_cache.config', my_conf): with NamedTemporaryFile(delete=False) as fh: np.savetxt(fh.name, x) reader = api.source(fh.name) info = self.db[fh.name, reader] self.db._database.close() self.db._database = dumbdbm.open(self.db.database_filename, 'r') info2 = self.db[fh.name, reader] self.assertEqual(info2, info)
def test_store_load_traj_info(self): x = np.random.random((10, 3)) try: old_val = config.conf_values['pyemma']['cfg_dir'] config.conf_values['pyemma']['cfg_dir'] = self.work_dir with NamedTemporaryFile(delete=False) as fh: np.savetxt(fh.name, x) reader = api.source(fh.name) info = self.db[fh.name, reader] self.db._database.close() self.db._database = dumbdbm.open(self.db.database_filename, 'r') info2 = self.db[fh.name, reader] self.assertEqual(info2, info) finally: config.conf_values['pyemma']['cfg_dir'] = old_val
def __init__(self, filename, version): """Construct a new PersistentDict instance""" filename = filename.decode('utf-8') filename = filename.encode(sys.getfilesystemencoding()) import dumbdbm super(Cache, self).__init__(dumbdbm.open(filename, 'c'), protocol=2) self.version = version old_version = self.pop('__version__', None) if old_version != self.version: if old_version in updates: updates[old_version](self) else: self.clear()
def __init__(self, filename, version): """Construct a new PersistentDict instance""" filename = filename.decode('utf-8') filename = filename.encode(sys.getfilesystemencoding()) import dumbdbm super(Cache, self).__init__( dumbdbm.open(filename, 'c'), protocol=2) self.version = version old_version = self.pop('__version__', None) if old_version != self.version: if old_version in updates: updates[old_version](self) else: self.clear()
def test_dumbdbm_creation_mode(self): try: old_umask = os.umask(0002) f = dumbdbm.open(_fname, 'c', 0637) f.close() finally: os.umask(old_umask) expected_mode = 0635 if os.name != 'posix' or sys.platform == 'cli': # Windows and IronPython only support setting the read-only attribute. # This shouldn't fail, but doesn't work like Unix either. expected_mode = 0666 import stat st = os.stat(_fname + '.dat') self.assertEqual(stat.S_IMODE(st.st_mode), expected_mode) st = os.stat(_fname + '.dir') self.assertEqual(stat.S_IMODE(st.st_mode), expected_mode)
def test_dumbdbm_creation_mode(self): try: old_umask = os.umask(0002) f = dumbdbm.open(_fname, "c", 0637) f.close() finally: os.umask(old_umask) expected_mode = 0635 if os.name != "posix": # Windows only supports setting the read-only attribute. # This shouldn't fail, but doesn't work like Unix either. expected_mode = 0666 import stat st = os.stat(_fname + ".dat") self.assertEqual(stat.S_IMODE(st.st_mode), expected_mode) st = os.stat(_fname + ".dir") self.assertEqual(stat.S_IMODE(st.st_mode), expected_mode)
def test_dumbdbm_creation_mode(self): # On platforms without chmod, don't do anything. if not (hasattr(os, "chmod") and hasattr(os, "umask")): return try: old_umask = os.umask(0002) f = dumbdbm.open(_fname, "c", 0637) f.close() finally: os.umask(old_umask) expected_mode = 0635 if os.name != "posix": # Windows only supports setting the read-only attribute. # This shouldn't fail, but doesn't work like Unix either. expected_mode = 0666 import stat st = os.stat(_fname + ".dat") self.assertEqual(stat.S_IMODE(st.st_mode), expected_mode) st = os.stat(_fname + ".dir") self.assertEqual(stat.S_IMODE(st.st_mode), expected_mode)
def test_dumbdbm_keys(self): self.init_db() f = dumbdbm.open(_fname) keys = self.keys_helper(f) f.close()
def test_dumbdbm_read(self): self.init_db() f = dumbdbm.open(_fname, 'r') self.read_helper(f) f.close()
def test_dumbdbm_modification(self): self.init_db() f = dumbdbm.open(_fname, 'w') self._dict['g'] = f['g'] = "indented" self.read_helper(f) f.close()
def test_close_twice(self): f = dumbdbm.open(_fname) f['a'] = 'b' self.assertEqual(f['a'], 'b') f.close() f.close()
def init_db(self): f = dumbdbm.open(_fname, 'w') for k in self._dict: f[k] = self._dict[k] f.close()