def app_verify(environ, start_response): "_" A_UBAL, A_AUTH, A_CUST, A_DATE, A_CURR, A_LBAL, A_AAGE, A_CKEY = 0, 1, 2, 3, 4, 5, 6, 7 IG_PRC, IG_CUST, IG_AUTH, IG_COAU, IG_FILE = 0, 1, 2, 3, 4 o, net, dig, tax = '', dbm.open('/u/net'), dbm.open('/u/ig'), dbm.open('/u/tax') for ig in dig.keys(): h = eval(dig[ig]) #o += 'IG %s\n' % ig #o += 'Prices %s %s %s \n' % h[0] #o += 'Customers %s \n' % h[1] #o += 'Main Author %s \n' % h[2] author = h[2] #o += 'CoAuthors %s \n' % h[3] #o += 'P1:%s inf:%s \n' % (h[4][0], h[4][1]) #o += 'signature %s \n' % h[4][2] signature = h[IG_FILE][2] #o += 'date %s \n' % h[4][3] #o += 'encrypted content %s \n' % h[IG_FILE][4] content = h[IG_FILE][4] ag = eval(net[author]) #o += 'pub key %s\n' % ag[A_CKEY] k = [b64toi(x) for x in ag[A_CKEY].split()] assert (verify(k[0], k[2], content, signature)) # verif cc = decrypt(k[1], k[2], content) # decrypt o += 'Content of %s: %s \n' % (ig, cc) net.close() dig.close() tax.close() start_response('200 OK', [('Content-type', 'text/plain; charset=utf-8')]) return [o.encode('utf-8')]
def main(): """Main function""" # WRITE ####### db = dbm.open('foo_anydbm', 'c') db['one'] = 'un' db['two'] = 'dos' db['three'] = 'tres' db.close() # WHICH DBM ### print("whichdb:", dbm.whichdb('foo_anydbm')) # READ ######## db = dbm.open('foo_anydbm', 'r') # Iterate loop: first method (common to any dbm module) for k in db.keys(): print(k, ':', db[k]) # Iterate loop: second method (only dumbdbm supports db.items()) for k, v in db.items(): print(k, ':', v) db.close()
def __init__(self, *args, **config): super(database,self).__init__(*args, **config) default_db = config.get("dbtype","anydbm") if not default_db.startswith("."): default_db = '.' + default_db self._db_path = os.path.join(self.location, fs_template.gen_label(self.location, self.label)+default_db) self.__db = None try: # dbm.open() will not work with bytes in python-3.1: # TypeError: can't concat bytes to str self.__db = anydbm_module.open(self._db_path, 'w', self._perms) except anydbm_module.error: # XXX handle this at some point try: self._ensure_dirs() self._ensure_dirs(self._db_path) except (OSError, IOError) as e: raise cache_errors.InitializationError(self.__class__, e) # try again if failed try: if self.__db == None: # dbm.open() will not work with bytes in python-3.1: # TypeError: can't concat bytes to str self.__db = anydbm_module.open(self._db_path, 'c', self._perms) except anydbm_module.error as e: raise cache_errors.InitializationError(self.__class__, e) self._ensure_access(self._db_path)
def on_text_message(self, message): op, self.key = message.split(maxsplit=1) logger.debug('will store in %s' % (self.config_path + self.config_file)) try: if self.operation: raise RuntimeError("CONFIG_FILE_ALREADY_OPENED") if op == WRITE_OP: self.operation = WRITE_OP self.db = dbm.open(self.config_path + self.config_file, 'c') self.send('{"status": "opened"}') self.begin_recv_binary(self.key) elif op == READ_OP: self.operation = READ_OP self.db = dbm.open(self.config_path + self.config_file, 'c') self.send('{"status": "opened"}') self.read_key() else: raise RuntimeError("BAD_FILE_OPERATION") except RuntimeError as e: self.send_error(e.args[0]) self.close() except PermissionError: self.send_error("ACCESS_DENY") self.close() except Exception as e: self.send_error(e) self.close()
def start(settings): ms = settings['_modules_'] core_folder = os.path.join(settings['_root_'], '_Project_-Core/src/main/java/com/company/_project_') core_folder = format_line(core_folder, settings) if os.path.exists(core_folder): shutil.rmtree(core_folder) #tables tbm = {} for m in ms: mf = ms[m] mf['name'] = m for tbl in mf['tables']: tbm[tbl] = m #print tbm settings['_tbm_'] = tbm for m in ms: mf = ms[m] mf['name'] = m dbm.open(mf, settings) # entity def folder = os.path.join(core_folder, m) os.makedirs(folder) # service def folder1 = os.path.join(folder, 'service') os.makedirs(folder1) # start gen_entity_def(mf, folder, settings)
def main(args): if len(args) <= 0: raise CriticalError("No .outlog files given at command line!") if len(args) == 1 and args[0] == '--print': db = dbm.open(os.environ['REPREPRO_OUT_DB'], 'r') for k in sort(db.keys()): print("%s: %s" % (k, db[k])) return if len(args) == 1 and args[0] == '--check': db = dbm.open(os.environ['REPREPRO_OUT_DB'], 'r') check(db) return for f in args: if len(f) < 8 or f[-7:] != ".outlog": raise CriticalError("command line argument '%s' does not look like a .outlog file!" % f) db = dbm.open(os.environ['REPREPRO_OUT_DB'], 'c') for f in args: donefile = f[:-7] + ".outlogdone" if os.path.exists(donefile): print("Ignoring '%s' as '%s' already exists!" % (f,donefile), file=sys.stderr) continue processfile(f, donefile, db)
def main(): arg_parser = argparse.ArgumentParser() arg_parser.add_argument('cdx_file', nargs='+') args = arg_parser.parse_args() video_2_user_db = dbm.open('video_2_user.dbm', 'c') video_2_server_db = dbm.open('video_2_server.dbm', 'c') for cdx_file in args.cdx_file: print('Opening', cdx_file) for row in read_cdx(cdx_file): (massaged_url, date, url, mime_type, status_code, sha1_checksum, redirect, aif_meta_tags, compressed_archive_size, archive_offset, filename) = row match = re.search(r'justin\.tv/([^/]+)/\w/([\d]+)', url) if match: user = match.group(1) video_id = match.group(2) print(video_id, user) video_2_user_db[video_id] = user match = re.search(r'store.+_([\d]+)\.', url) if match: video_id = match.group(1) print(video_id, url) video_2_server_db[video_id] = url video_2_user_db.close() video_2_server_db.close()
def save(self, dirname = None): """Save the current rdfspace to a directory (by default the directory in which indexes are stored)""" if dirname is None and self._index_dir is not None: dirname = self._index_dir if not os.path.exists(dirname): os.makedirs(dirname) # We memmap big matrices, as pickle eats the whole RAM # We don't save the full adjacency matrix ut_m = np.memmap(os.path.join(dirname, 'ut.dat'), dtype='float64', mode='w+', shape=self._ut_shape) ut_m[:] = self._ut[:] s_m = np.memmap(os.path.join(dirname, 's.dat'), dtype='float64', mode='w+', shape=self._s_shape) s_m[:] = self._s[:] vt_m = np.memmap(os.path.join(dirname, 'vt.dat'), dtype='float64', mode='w+', shape=self._vt_shape) vt_m[:] = self._vt[:] if self._index_dir is None: # The index is in memory, we'll pickle it with the rest (adjacency, ut, s, vt) = (self._adjacency, self._ut, self._s, self._vt) (self._adjacency, self._ut, self._s, self._vt) = (None, None, None, None) f = open(os.path.join(dirname, 'space.dat'), 'w') pickle.dump(self, f) f.close() (self._adjacency, self._ut, self._s, self._vt) = (adjacency, ut, s, vt) else: # Flushing indexes self._uri_index.close() self._index_uri.close() # The index is stored in dbm, we will exclude it from the pickle (adjacency, ut, s, vt) = (self._adjacency, self._ut, self._s, self._vt) (self._adjacency, self._ut, self._s, self._vt, self._uri_index, self._index_uri) = (None, None, None, None, None, None) f = open(os.path.join(dirname, 'space.dat'), 'w') pickle.dump(self, f) f.close() (self._adjacency, self._ut, self._s, self._vt) = (adjacency, ut, s, vt) self._uri_index = dbm.open(os.path.join(dirname, 'uri_index'), 'r') self._index_uri = dbm.open(os.path.join(dirname, 'index_uri'), 'r')
def main(): """Main function""" # WRITE ####### db = dbm.open('foo_dbm', 'c') db['one'] = 'un' db['two'] = 'dos' db['three'] = 'tres' db.close() # WHICH DBM ### print "whichdb:", whichdb.whichdb('foo_dbm') print # READ ######## db = dbm.open('foo_dbm', 'r') for k in db.keys(): print k, ':', db[k] db.close()
def __init__(self, path): try: mkdir_p(path) except OSError as e: raise RuntimeError("Couldn't create statedir %s: %s" % (settings['state_dir'], str(e))) self.feeds = dbm.open(os.path.join(path, "feeds"), "c") self.seen = dbm.open(os.path.join(path, "seen"), "c")
def __init__(self): oauth_keyfile_path = (os.path.dirname(os.path.abspath(__file__)) + '/google_oauth') FLAGS = gflags.FLAGS if os.path.exists(oauth_keyfile_path + '.db'): db = dbm.open(oauth_keyfile_path) client_id = db['client_id'] client_secret = db['client_secret'] developer_key = db['developer_key'] else: print 'create Google OAuth file...' db = dbm.open(oauth_keyfile_path, 'n') client_id = raw_input('input client_id :') client_secret = raw_input('input client_secret :') developer_key = raw_input('input developer key :') db['client_id'] = client_id db['client_secret'] = client_secret db['developer_key'] = developer_key db.close() # Set up a Flow object to be used if we need to authenticate. This # sample uses OAuth 2.0, and we set up the OAuth2WebServerFlow with # the information it needs to authenticate. Note that it is called # the Web Server Flow, but it can also handle the flow for native # applications # The client_id and client_secret are copied from the API Access tab on # the Google APIs Console self.FLOW = OAuth2WebServerFlow( client_id=client_id, client_secret=client_secret, scope='https://www.googleapis.com/auth/calendar', user_agent='twitter2calnedar/1.0') # To disable the local server feature, uncomment the following line: FLAGS.auth_local_webserver = False # If the Credentials don't exist or are invalid, # run through the native client flow. # The Storage object will ensure that if successful the good # Credentials will get written back to a file. self.storage = Storage('calendar.dat') self.credentials = self.storage.get() if self.credentials is None or self.credentials.invalid is True: self.credentials = run(self.FLOW, self.storage) # Create an httplib2.Http object to handle our HTTP requests # and authorize it with our good Credentials. http = httplib2.Http() http = self.credentials.authorize(http) # Build a service object for interacting with the API. Visit # the Google APIs Console # to get a developerKey for your own application. self.service = build( serviceName='calendar', version='v3', http=http, developerKey=developer_key)
def OpenDB(): global db db = dbm.open(DBFILE, 'c') try: for k in db.keys(): None except: db.close() db = dbm.open(DBFILE, 'n')
def __init__(self, cache=True): self.globalData = {} self.env = jinja2.Environment(loader=jinja2.FileSystemLoader('templates'), extensions=[jinja2_highlight.HighlightExtension]) self.env.globals = self.globalData if cache: self.cache = dbm.open('.cache', 'c') else: self.cache = dbm.open('.cache', 'n')
def test_modes(): d = dbm.open(filename, 'r') d.close() d = dbm.open(filename, 'rw') d.close() d = dbm.open(filename, 'w') d.close() d = dbm.open(filename, 'n') d.close()
def main(): with dbm.open("a.db", "c") as db: db["hoe"] = "fue" db["hie"] = "hae" with dbm.open("a.db") as db: print(db.keys()) return
def _reorganize(self): # This is a workaround for shelves implemented with database types # (like gdbm) that won't shrink themselves. # Because we're a delete heavy workload (as we drop items that are no # longer relevant), we check for reorganize() and use it on close, # which should shrink the DB and keep it from growing into perpetuity. log.debug("Checking for DB trim") try: need_reorg = False db = dbm.open(self.filename, "r") if hasattr(db, 'reorganize'): need_reorg = True db.close() if need_reorg: # Workaround Python bug 13947 (gdbm reorganize leaving hanging # file descriptors) by opening the extra fds in a temporary # process. pid = os.fork() if not pid: # Wrap everything to make sure we don't get back into the # primary server code. try: db = dbm.open(self.filename, "w") getattr(db, 'reorganize')() log.debug("Reorged - dying\n") db.close() except: pass sys.exit(0) log.debug("Reorg forked as %d" % pid) tries = 3 while True: try: tries -= 1 os.waitpid(pid, 0) break except Exception as e: log.debug("Waiting, got: %s" % e) if tries <= 0: log.debug("Abandoning %d" % pid) break except Exception as e: log.warn("Failed to reorganize db:") log.warn(traceback.format_exc())
def OnSaveMemo(self,e): zh_file = dbm.open('zh_words','c') memo_file = dbm.open('memo_words','c') if self.word is '': return try: zh_file[self.word] = self.zh_meaning.GetValue().strip().encode('utf-8') except AttributeError: return memo_file[self.word] = self.memo.GetValue().strip().encode('utf-8') zh_file.close() memo_file.close() self.word_search.SetFocus()
def __init__(self, topdir, log): # XXX: should these be opened on-demand and cached for repeated use? msg_dir = "%s/messages" % topdir dbs = os.listdir(msg_dir) for db in dbs: db = db[:len(db)-3] db_file = "%s/%s" % (msg_dir,db) self.msg_dbs[db] = dbm.open(db_file, "r", 0o644) log_dir = "%s/services" % topdir for t in ("category", "event", "parameter"): db_file = "%s/%s/%s" % (log_dir, log, t) self.svc_dbs[t] = dbm.open(db_file, "r")
def igreg(owner, idig, p1, pf, host='localhost', post=False): "_" td, ds = '%s' % datetime.datetime.now(), dbm.open('/u/sk') ki = [b64toi(x) for x in ds[owner].split()] ds.close() s = sign(ki[1], ki[2], ' '.join((td[:10], owner, idig, '%s' % p1, '%s' % pf))) cmd = '/'.join(('ig', owner, idig, '%s' % p1, '%s' % pf, s.decode('ascii'))) era = format_cmd(post, cmd, True, host) if era[:5] != b'Error': sk = decrypt(ki[1], ki[2], era) d = dbm.open('/cup/%s/keys' % __user__, 'c') d[sk] = idig d.close() return era
def main(): video_2_user_db = dbm.open('video_2_user.dbm', 'c') video_2_server_db = dbm.open('video_2_server.dbm', 'c') with open('justout.csv', 'r', newline='') as in_file: reader = csv.reader(in_file, delimiter=';') for row in reader: video_id = row[0] url = row[7] print(video_id, url) video_2_server_db[video_id] = url video_2_user_db.close() video_2_server_db.close()
def on_fc_ok(self,widget,data=None): self.filename=self.fcdialog.get_filename() self.fcdialog.hide() if self.filename.endswith(".train"): db=dbm.open('config.dat','c') db['Training File']=self.filename db.close() elif self.filename.endswith(".test"): db=dbm.open('config.dat','c') db['Test File']=self.filename db.close() elif self.filename.endswith(".fa"): db=dbm.open('config.dat','c') db['Argument Data']=self.filename db.close()
def GET(self): global ftoken_db i = web.input(author_key=[], language=[], first_publish_year=[], publisher_facet=[], subject_facet=[], person_facet=[], place_facet=[], time_facet=[]) if i.get('ftokens', None) and ',' not in i.ftokens: token = i.ftokens if ftoken_db is None: ftoken_db = dbm.open('/olsystem/ftokens', 'r') if ftoken_db.get(token, None): raise web.seeother('/subjects/' + ftoken_db[token].decode('utf-8').lower().replace(' ', '_')) self.redirect_if_needed(i) if 'isbn' in i and all(not v for k, v in i.items() if k != 'isbn'): self.isbn_redirect(i.isbn) q_list = [] q = i.get('q', '').strip() if q: m = re_olid.match(q) if m: raise web.seeother('/%s/%s' % (olid_urls[m.group(1)], q)) m = re_isbn_field.match(q) if m: self.isbn_redirect(m.group(1)) q_list.append(q) for k in ('title', 'author', 'isbn', 'subject', 'place', 'person', 'publisher'): if k in i: q_list.append(k + ':' + i[k].replace(':', '\\:').strip()) return render.work_search(i, ' '.join(q_list), do_search, get_doc)
def test_cup_ratios(): "_" #global __cur_ratio__ now = '%s' % datetime.datetime.now() o, ko = '<p title="The test checks that first that taxes are positives and second it is never valuable to exchange from one local currency to another using ⊔ as intermediate.">%s: ⊔ currencies rates test: ' % now[:10], False dtax = dbm.open('/u/tax', 'w') if dtax[b'TODAY'] == bytes(now[:10],'ascii'): h = eval(dtax[b'HASH']) #__cur_ratio__ = eval(dtax[b'RATE']) else: if b'HASH' in dtax.keys(): dtax[b'OLD_HASH'], dtax[b'OLD_RATE'] = dtax[b'HASH'], dtax[b'RATE'] h = get_today_rates() # only once a day! if b'HASH' not in dtax.keys(): dtax[b'OLD_HASH'] = '%s' % h dtax[b'OLD_RATE'] = '%s' % {x:__cur_ratio__[x][2] for x in __cur_ratio__} dtax[b'HASH'] = '%s' % h dtax[b'RATE'] = '%s' % {x:__cur_ratio__[x][2] for x in __cur_ratio__} dtax[b'TODAY'] = '%s' % now[:10] dtax.close() for r in __cur_ratio__: if (__cur_ratio__[r][1] > __cur_ratio__[r][2]) or (__cur_ratio__[r][2] > __cur_ratio__[r][3]): ko = True o += '<br/><b class="red">ERROR: rates for %s</b>' % r for r in h: r1, r2 = r[:3], r[3:] t = __cur_ratio__[r2][3]/__cur_ratio__[r1][1] if t < h[r] : ko = True o += '<br/><b class="red">ERROR: %s/%s: %5.2f</b>' % (r1, r2, 100*(t-h[r])/h[r]) if not ko: o += '<b>pass</b>' return o + '</p>\n'
def get_videoitems(): db = dbm.open('videodb', 'c') videos = [] video = {} for v in db.values(): videos.append(v.decode("utf-8")) return jsonify({'videos':videos})
def readDB(dbName): try: db = dbm.open(dbName, 'r') print('Reading ', dbName) return [db[datum] for datum in db] finally: db.close()
def __init__(self, planet, url): if not os.path.isdir(planet.cache_directory): os.makedirs(planet.cache_directory) cache_filename = cache.filename(planet.cache_directory, url) cache_file = dbm.open(cache_filename, "c", 0666) cache.CachedInfo.__init__(self, cache_file, url, root=1) self._items = {} self._planet = planet self._expired = [] self.url = url # retain the original URL for error reporting self.configured_url = url self.url_etag = None self.url_status = None self.url_modified = None self.name = None self.updated = None self.last_updated = None self.filter = None self.exclude = None self.next_order = "0" self.cache_read() self.cache_read_entries() if planet.config.has_section(url): for option in planet.config.options(url): value = planet.config.get(url, option) self.set_as_string(option, value, cached=0)
def get_rates(): "_" now, db = '%s' % datetime.datetime.now(), '/cup/rates' if not os.path.isfile(db + '.db'): dr = dbm.open(db, 'c') dr[now[:10]] = b'Init' dr.close() dr = dbm.open(db, 'w') if bytes(now[:10], 'ascii') not in dr.keys(): co, h = http.client.HTTPConnection('currencies.apps.grandtrunk.net'), {} for c in __all_cur__: if c != 'USD': co.request('GET', '/getlatest/%s/USD' %c) h[c+'USD'] = float(co.getresponse().read()) dr[now[:10]] = '%s' % h dr.close()
def readDB(dbName): try: db = dbm.open(dbName, 'r') print('Reading ', dbName) return [db[datum] for datum in db] # if we expected this to be large, we could use a generator and yield lines finally: db.close()
def readDB(dbName): try: db = dbm.open(dbName, 'r') print "Reading", dbName return [db[datum] for datum in db.keys()] finally: db.close()
def GET(self2): input = web.input() email = input['email'] password = input['password'] db = dbm.open('dbm', 'c') if not email + "_password" in db.keys(): raise web.seeother('/index') if db[email + "_password"] == password: # NOTA: Por alguna extraña razon no funciona ses.user cuando # se llama a web.seeother, mejor usar ses._initializer['user'] # ses.email = email # ses.logged_in = True ses._initializer['email'] = email ses._initializer['user'] = db[email+"_firstname"] + " " + db[email+"_lastname"] ses._initializer['logged_in'] = True raise web.seeother('/view') else: raise web.seeother('/index')
def delete(self, key): with dbm.open(self.filename, "c") as db: del db[key]
from random import randint, shuffle from os import system, rename, remove, getcwd from time import sleep from getpass import getpass from Crypto.Cipher import AES from hashlib import sha256, md5 from pickle import loads, dumps import shutil import gzip import dbm system("title PASSTORE") dat = dbm.open("main.db", "c") system("attrib +s +h main.db") alphabet = [ 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z' ] num = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0] sim = [ '`', '~', '!', '@', '#', '$', '%', '^', '&', '*', '(', ')', '_', '-', '+', '=', '{', '[', '}', ']', '|', '\\', ';', ':', '"', "'", '<', ',', ".", "<", '>', '/', '?' ] alphabet_capital = [ 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z' ] def password():
def persist(self, key, value): with dbm.open(self.filename, "c") as db: db[key] = pickle.dumps(value)
def __init__(self, name=None, mode='r'): """ Create a databse. """ self._dbm = dbm.open(name, mode)
def __init__(self, filename): self.filename = filename self._fs = dbm.open(filename, "cf") self._local = {} self._lock = Lock()
Requirements from PyPI: BeautifulSoup4, PyYAML """ import dbm import functools import re import sys import time from typing import List, Set from urllib.request import urlopen from bs4 import BeautifulSoup import yaml cache = dbm.open("bugzilla-cache.db", "c") QUESTION = "what collection type of data do the requested measurements fall under?" CATEGORY_MAP = { 1: "technical", 2: "interaction", 3: "web_activity", 4: "highly_sensitive", } def fetch_url(url: str) -> str: """ Fetch a web page containing a data review, caching it to avoid over-fetching.
def __init__(self, filename, flag='c', protocol=None, writeback=False): import dbm Shelf.__init__(self, dbm.open(filename, flag), protocol, writeback)
def get_logs(log_bucket, strides_ip, strides_port): storage_client = storage.Client() bucket = storage.Client().bucket(log_bucket) format = [ "time_micros", "c_ip", "c_ip_type", "c_ip_region", "cs_method", "cs_uri", "sc_status", "cs_bytes", "sc_bytes", "time_taken_micros", "cs_host", "cs_referer", "cs_user_agent", "s_request_id", "cs_operation", "cs_bucket", "cs_object", ] log_files = bucket.list_blobs() conn = http.client.HTTPConnection(strides_ip, port=strides_port) with dbm.open("gs_agent_blast_hackathon.db", "c") as db: for log_file in log_files: if "_usage_" not in log_file.public_url: print(f"skipping {log_file.public_url}\n") continue if log_file.public_url in db: print(f"{log_file.public_url} already processed\n") continue logs = log_file.download_as_string() body = "" lines = logs.split(b"\n") for line in lines[1:]: line = line.decode() print("line is:" + line) if len(line) < 50: continue line = line.replace("\t", "") csvs = csv.reader([str(line)]) for row in csvs: cols = row print("cols is " + str(cols)) c = 0 fields = {} for col in format: fields[col] = cols[c].replace('"', "") c += 1 print(fields) start = float(fields["time_micros"]) end = start + float(fields["time_taken_micros"]) bytes = int(fields["sc_bytes"]) if (bytes == 0 or fields["cs_method"] != "GET" or int(fields["sc_status"]) >= 400): continue start /= 1_000_000.0 end /= 1_000_000.0 # /download/storage/v1/b/ncbi_sra_realign/o/ERR1620370.summary?generation=1545190393681782&alt=media acc = fields["cs_uri"] acc = acc.split("?")[0] acc = acc.split("/")[-1] # Christiam suggests ignoring extension acc = acc.rsplit(".", 1)[0] if acc == "o": print("Huh" + fields["cs_uri"]) tsv = ( fields["c_ip"], acc, fields["cs_user_agent"] + " (hackathon)", fields["sc_status"], fields["cs_host"], str(start), str(end), str(bytes), "1", ) body += "\t".join(tsv) + "\n" print("\bPosting: " + body) conn.request("POST", "/blast_tsv", body=body) response = conn.getresponse() o = response.read().decode() o.replace("\n", "") print("HTTP Response was", response.status, response.reason, o) db[log_file.public_url] = ""
import dbm import random ROSTER = ["John", "Paul", "George", "Ringo"] GRADES = ["A", "A-", "B+", "B", "B-", "C+", "C", "C-"] db = dbm.open("data/db_student.db", "c") for student in ROSTER: db[student] = random.choice(GRADES) for key in db: print(key, db[key]) db.close()
import dbm dbmFile = '8.DataHasToGoSomewhere/no_sql_dbs/definitions.db' db = dbm.open(dbmFile, 'c') db['mustard'] = 'yellow' db['ketchup'] = 'red' db['pesto'] = 'green' len(db) print(db['pesto']) db.close() db = dbm.open(dbmFile, 'r') print(db['mustard'])
def has_seen(self, key): d = dbm.open(self.filename, 'c') res = str(key) in d d.close() return res
def whichdb(filename): """Guess which db package to use to open a db file. Return values: - None if the database file can't be read; - empty string if the file can be read but can't be recognized - the module name (e.g. "dbm" or "gdbm") if recognized. Importing the given module may still fail, and opening the database using that module may still fail. """ # Check for dbm first -- this has a .pag and a .dir file try: f = open(filename + os.extsep + "pag", "rb") f.close() # dbm linked with gdbm on OS/2 doesn't have .dir file if not (dbm.library == "GNU gdbm" and sys.platform == "os2emx"): f = open(filename + os.extsep + "dir", "rb") f.close() return "dbm" except IOError: # some dbm emulations based on Berkeley DB generate a .db file # some do not, but they should be caught by the dbhash checks try: f = open(filename + os.extsep + "db", "rb") f.close() # guarantee we can actually open the file using dbm # kind of overkill, but since we are dealing with emulations # it seems like a prudent step if dbm is not None: d = dbm.open(filename) d.close() return "dbm" except (IOError, _dbmerror): pass # Check for dumbdbm next -- this has a .dir and a .dat file try: # First check for presence of files os.stat(filename + os.extsep + "dat") size = os.stat(filename + os.extsep + "dir").st_size # dumbdbm files with no keys are empty if size == 0: return "dumbdbm" f = open(filename + os.extsep + "dir", "rb") try: if f.read(1) in ("'", '"'): return "dumbdbm" finally: f.close() except (OSError, IOError): pass # See if the file exists, return None if not try: f = open(filename, "rb") except IOError: return None # Read the start of the file -- the magic number s16 = f.read(16) f.close() s = s16[0:4] # Return "" if not at least 4 bytes if len(s) != 4: return "" # Convert to 4-byte int in native byte order -- return "" if impossible try: (magic, ) = struct.unpack("=l", s) except struct.error: return "" # Check for GNU dbm if magic in (0x13579ace, 0x13579acd, 0x13579acf): return "gdbm" # Check for old Berkeley db hash file format v2 if magic in (0x00061561, 0x61150600): return "bsddb185" # Later versions of Berkeley db hash file have a 12-byte pad in # front of the file type try: (magic, ) = struct.unpack("=l", s16[-4:]) except struct.error: return "" # Check for BSD hash if magic in (0x00061561, 0x61150600): return "dbhash" # Unknown return ""
def __init__(self, filename, mode, perm): import dbm self.db = dbm.open(filename, mode, perm)
"Gregory", "Spencer", "Myat", "Carmen", "Victoria", "Jinna", "Nico", "Meiling", "Jenny", "Xintong", "Shaun", "Brian", "David", "Patrick", "Shirley", "Arteen", "Julie", ) GRADES = ["A", "A-", "B+", "B", "B-", "C+", "C", "C-"] db = dbm.open("session14/db_student.db", "c") for student in ROSTER: db[student] = random.choice(GRADES) for key in db: print(key, db[key]) db.close()
def query(self, key): with dbm.open(self.filename, "c") as db: val = db.get(key, pickle.dumps(None)) return pickle.loads(val)
# Some databases are not relational and don't support SQL. These were written # to handle very large data sets, allow more flexible data definitions, or # support custom data operations. # dbm Family # ----------------------------------------------------------------------------- # key-value stores often embedded in apps such as web browsers to maintain # settings. A dbm database is like a Python dictionary. FYI dbm is used as the # back-end of the shelve module. Example: import dbm db = dbm.open('data/definitions', 'c') # The second argument to the open() method is 'r' to read, 'w' to write, and # 'c' for both, creating the file if it doesn't exist. There's also an 'n' # option which will always create a new file, overwriting the old. # To create key-value pairs, just assign a value to a key just as you would # a dictionary. The keys of the database must be strings. The values must be # strings or None. db['jaune'] = 'yellow' db['rouge'] = 'red' db['vert'] = 'green' print(len(db)) # 3 print(type(db)) # <class '_dbm.dbm'>
#dbmaccess.py: 访问持久字典 import dbm #open existing file db = dbm.open('websites', 'c') #add another item db['www.baidu.com'] = 'Baidu home page' ''' #verity the previous item remains if db['www.python.org']!=None: print('Found www.pyhton.org') else: print('Error: Missing item') ''' #Iterate over the keys.May be slow #May use a lot of memory for key in db.keys(): print("key= ", key, " value= ", db[key]) del db['www.baidu.com'] print('After deleting www.baidu.com,we have: ') for key in db.keys(): print("key= ", key, " value= ", db[key]) #close and save to disk db.close()
# coding=utf-8 # 代码文件:chapter17/ch17.5.2.py import dbm with dbm.open('mydb', 'c') as db: db['name'] = 'tony' # 更新数据 print(db['name'].decode()) # 取出数据 age = int(db.get('age', b'18').decode()) # 取出数据 print(age) if 'age' in db: # 判断是否存在age数据 db['age'] = '20' # 或者 b'20' del db['name'] # 删除name数据
def __init__(self, datadir): self.blockDB = dbm.open(datadir + "/blocks", 'c') self.currentBlock = 0L self.headers_map = dict()
__module_name__ = 'soft-ignore' __module_version__ = '0.96' __module_description__ = """With this plug-in, you can prevent some people from getting your attention.""" DBFILE = os.environ['HOME'] + '/.config/hexchat/soft-ignore.conf' SEPARATOR = ',' xchat.prnt('%(name)s, version %(version)s' % { 'name': __module_name__, 'version': __module_version__ }) # loads the database encoding = locale.getdefaultlocale()[1] db = dbm.open(DBFILE, 'c') try: nicks = db['soft-ignore'].decode(encoding).split(SEPARATOR) except KeyError: db['soft-ignore'] = '' nicks = [] # this function strops the colors # from the nicknames in a safe(r) fashion def remove_color(nick): # 0 is x3 # then follows 1 or 2 numbers return "".join(list(dropwhile(lambda x: x.isdigit(), nick[1:])))
import dbm with dbm.open('chche', 'c') as db: db['key1'] = 'value1' db['key2'] = 'value2' with dbm.open('chche', 'r') as db: print(db.get('key1'))
def __init__(self, datadir): self.txDB = dbm.open(datadir + "/transactions", 'c')
import glob import dbm import json for name in glob.glob('dbms/*.db'): db = dbm.open(name) for key in db.keys(): obj = json.loads(db[key].decode()) print(obj)
import dbm # Open database or create if not exists with dbm.open('cache', 'c') as db: # Add some value to db db[b'hello'] = b'Welcome in database' db['name_constructor'] = 'Kamil' db['id_constructor'] = '254331' # Read value from db print(bytes.decode(db.get('hello'))) print(bytes.decode(db.get('name_constructor'))) print(bytes.decode(db.get('hello', None)))
def __init__(self, host='localhost', bindingip='', localport=5060, port=5060, externalip=None, username=None, crackmode=1, crackargs=None, realm=None, sessionpath=None, selecttime=0.005, compact=False, reusenonce=False, extension=None, maxlastrecvtime=10, domain=None, requesturi=None, method='REGISTER', ipv6=False): self.log = logging.getLogger('ASipOfRedWine') family = socket.AF_INET if ipv6: family = socket.AF_INET6 self.ipv6 = ipv6 self.sock = socket.socket(family, socket.SOCK_DGRAM) self.sock.settimeout(10) self.sessionpath = sessionpath self.maxlastrecvtime = maxlastrecvtime self.lastrecvtime = time.time() self.dbsyncs = False self.method = method if self.sessionpath is not None: self.resultpasswd = dbm.open( os.path.join(self.sessionpath, 'resultpasswd'), 'c') try: self.resultpasswd.sync() self.dbsyncs = True self.log.info("Db does sync") except AttributeError: self.log.info("Db does not sync") pass else: self.resultpasswd = dict() self.nomore = False self.passwordcracked = False self.rlist = [self.sock] self.wlist = list() self.xlist = list() self.challenges = list() self.crackmode = crackmode self.crackargs = crackargs self.dsthost, self.dstport = host, int(port) self.domain = self.dsthost if domain: self.domain = domain if crackmode == 1: self.passwdgen = numericbrute(*crackargs) elif crackmode == 2: self.passwdgen = dictionaryattack(crackargs) self.username = username self.realm = realm self.selecttime = selecttime self.dstisproxy = None self.ignorenewnonce = True self.noauth = False self.auth = dict() self.previouspassword = str() self.compact = compact self.reusenonce = reusenonce self.staticnonce = None self.staticcid = None if extension is not None: self.extension = extension else: self.extension = username self.bindingip = bindingip self.localport = localport self.requesturi = requesturi self.noncecount = 1 self.originallocalport = localport if self.sessionpath is not None: self.packetcount = packetcounter(50) if externalip is None: self.log.debug("external ip was not set") if (self.bindingip != '0.0.0.0') and (len(self.bindingip) > 0): self.log.debug( "but bindingip was set! we'll set it to the binding ip") self.externalip = self.bindingip else: try: self.log.info( "trying to get self ip .. might take a while") self.externalip = socket.gethostbyname( socket.gethostname()) except socket.error: self.externalip = '127.0.0.1' else: self.log.debug("external ip was set") self.externalip = externalip
import dbm with dbm.open("dictionary.txt", "c") as file_1: file_1['first'] = "{1:'one', 2:'two'}" with dbm.open("dictionary.txt") as file_2: print(file_2['first'].decode())
def __init__(self, file, flag): if 'c' in flag: flag = 'c' elif 'r' in flag: flag = 'r' self._dbm = dbm.open(file, flag)
import dbm db = dbm.open("websites", "c") #add a n item db["www.python.org"] = "Python Home Page" print(db["www.python.org"]) #Close and Save Disk db.close()
# Not sure how this works...but it did. Seems to just create an object # that can be managed and accessed through here, but not directly using any of the applications # that I have installed. import dbm import pickle database = dbm.open("fruits", 'c') database['1'] = 'apple' database['2'] = pickle.dumps(('peach', 'orange', 'grape')) print(database['1']) print(database['2']) print(pickle.loads(database['2'])) database.close()
def whichdb(filename): """Guess which db package to use to open a db file. Return values: - None if the database file can't be read; - empty string if the file can be read but can't be recognized - the module name (e.g. "dbm" or "gdbm") if recognized. Importing the given module may still fail, and opening the database using that module may still fail. """ try: f = open(filename + os.extsep + 'pag', 'rb') f.close() if not (dbm.library == 'GNU gdbm' and sys.platform == 'os2emx'): f = open(filename + os.extsep + 'dir', 'rb') f.close() return 'dbm' except IOError: try: f = open(filename + os.extsep + 'db', 'rb') f.close() if dbm is not None: d = dbm.open(filename) d.close() return 'dbm' except (IOError, _dbmerror): pass try: os.stat(filename + os.extsep + 'dat') size = os.stat(filename + os.extsep + 'dir').st_size if size == 0: return 'dumbdbm' f = open(filename + os.extsep + 'dir', 'rb') try: if f.read(1) in ("'", '"'): return 'dumbdbm' finally: f.close() except (OSError, IOError): pass try: f = open(filename, 'rb') except IOError: return s16 = f.read(16) f.close() s = s16[0:4] if len(s) != 4: return '' try: magic, = struct.unpack('=l', s) except struct.error: return '' if magic in (324508366, 324508365, 324508367): return 'gdbm' elif magic in (398689, 1628767744): return 'bsddb185' try: magic, = struct.unpack('=l', s16[-4:]) except struct.error: return '' if magic in (398689, 1628767744): return 'dbhash' else: return ''