def cmd_stats(NAME, settings): """ """ host = init_host(settings) if not SqlBase.metadata.bind: sa = Locator.get_session('default', settings.dbref) #sa = get_session(settings.dbref) assert sa, "XXX" dbRef = Locator.find(ref=settings.dbref) if not dbRef: dbRef = Locator(global_id=settings.dbref) dbRef.init_defaults() dbRef.commit() for l, v in ( ( 'Host', host ), ( 'Net', host.net.name ), #( 'Domain', domain), ( 'DBRef', settings.dbref ), #( "Number of nodes", sa.query(Node).count()) ): log.std('{green}%s{default}: {bwhite}%s{default}', l, v)
def init_host(settings): """ Reinitialize or initialize host instance from environment and stored settings. """ host = dict( name = get_hostname() ) hostnameId = host['name'].lower() if hostnameId not in settings.nodes: ifaces = {} for iface, mac, spec in domain.inet_ifaces(): if mac in settings.interfaces: raise Exception("Found existing interface", mac) else: settings.interfaces[mac] = ifaces[mac] = dict( node = hostnameId ) if settings.interactive: name = Prompt.raw_input("Give a name for this node", host['name']) host.update(dict( unid = str(uuid.uuid4()), interfaces = ifaces.keys() )) settings.nodes[hostnameId] = host open(hostIdFile, 'w+').write(" ".join((host['unid'], host['name']))) log.std("{bwhite}Wrote new host, {green}%s {default}<{bblack}%s{default}>", host['name'], host['unid']) settings.commit() else: host = get_current_host(settings) log.std("{bwhite}Found host, {green}%s {default}<{bblack}%s{default}>", host['name'], host['unid']) return host
def cmd_stats(settings): sa = get_session(settings.dbref) for stat, label in ( (sa.query(Locator).count(), "Number of URLs: %s"), #(sa.query(Bookmark).count(), "Number of bookmarks: %s"), (sa.query(Domain).count(), "Number of domains: %s"), (sa.query(Tag).count(), "Number of tags: %s"), ): log.std(label, stat)
def print_Task(task): log.std( "{blue}%s{bblack}. {bwhite}%s {bblack}[{magenta}%s {green}%s{bblack}]{default}" % ( task.task_id, task.title, task.requiredFor_id and task.requiredFor_id or '', task.partOf_id and task.partOf_id or '' ) )
def cmd_info(settings): if opts.flags.database_tables: reload_metadata(settings) log.std("{yellow}Loaded tables from DB{default}") for l, v in ( ( 'DBRef', settings.dbref ), ( "Tables in schema", ", ".join(metadata.tables.keys()) ), ): log.std('{green}%s{default}: {bwhite}%s{default}', l, v)
def cmd_href(NAME, settings): sa = Locator.get_session(settings.session_name, settings.dbref) if NAME: rs = Locator.search(ref=NAME) else: rs = Locator.all() if not rs: log.std("Nothing") for r in rs: print r.ref
def cmd_domain(NAME, settings): sa = Domain.get_session(settings.session_name, settings.dbref) if NAME: rs = Domain.search(name=NAME) else: rs = Domain.all() if not rs: log.std("Nothing") for r in rs: print r.name
def format_Space_item(space): log.std( "{blue}%s{bblack}. {bwhite}%s {bblack}[ about:{magenta}%s {bblack}] %s %s %s{default}" % ( space.space_id, space.global_id, space.classes, str(space.date_added).replace(' ', 'T'), str(space.date_updated).replace(' ', 'T'), str(space.date_deleted).replace(' ', 'T') ) )
def format_Topic_item(topic): log.std( "{blue}%s{bblack}. {bwhite}%s {bblack}[ about:{magenta}%s {bblack}] %s %s %s{default}" % ( topic.topic_id, topic.name, topic.about_id, str(topic.date_added).replace(' ', 'T'), str(topic.date_updated).replace(' ', 'T'), str(topic.date_deleted).replace(' ', 'T') ) )
def cmd_init(settings): sa = Host.get_session("default", settings.dbref) host_dict = init_host(settings) name = host_dict["name"] record = Host.fetch(filters=(Host.name == name,), sa=sa, exists=False) if not record: host = Host(name=name, date_added=datetime.now(), date_updated=datetime.now()) sa.add(host) sa.commit() log.std("{bwhite}Added host %s record{default}", name) else: host = record print "host at", host_dict.path(), ":", host
def print_Folder(folder): log.std( "{blue}%s{bblack}. {bwhite}%s {bblack}[ type:{magenta}%s {bblack} parent:{cyan}%s ] %s %s %s{default}" % ( folder.folder_id, folder.name, folder.ntype, folder.root, str(folder.date_added).replace(' ', 'T'), str(folder.date_updated).replace(' ', 'T'), str(folder.date_deleted).replace(' ', 'T') ) )
def cmd_new(NAME, REF, settings): sa = Folder.start_session('folder', settings.dbref) folder = Folder.byName(NAME) if folder: log.std("Found existing folder %s, created %s", folder.name, folder.date_added) else: folder = Folder(name=NAME) folder.init_defaults() sa.add(folder) sa.commit() log.std("Added new folder %s", folder.name) print_Folder(folder)
def cmd_info(opts): """ Dump settings dict. """ for l, v in ( ( "Tasks Document", opts.flags.tasks_file ), ( "Comment File", opts.flags.grep_file ), ( "Project Name Id Slug", opts.flags.project_slug ), ( "Key Type", opts.flags.key_type ), ( "Key Argument", opts.flags.key_arg ), ( "Key Separator", opts.flags.key_sep ), ): log.std('{green}%s{default}: {bwhite}%s{default}', l, v)
def cmd_memdebug(settings): # peak memory usage (bytes on OS X, kilobytes on Linux) res_usage = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss if os.uname()[0] == 'Linux': res_usage /= 1024; # kilobytes? # XXX: http://stackoverflow.com/questions/938733/total-memory-used-by-python-process #res_usage /= resource.getpagesize() db_size = os.path.getsize(os.path.realpath(settings.dbref[10:])) for l, v in ( ( 'Storage Size', lib.human_readable_bytesize( db_size ) ), ( 'Resource Usage', lib.human_readable_bytesize(res_usage) ), ): log.std('{green}%s{default}: {bwhite}%s{default}', l, v)
def cmd_info(settings): """ Print info. """ host = init_host(settings) for l, v in ( ( 'Host', host ), ( 'Net', host.net.name ), #( 'Domain', domain), ( 'DBRef', settings.dbref ), ): log.std('{green}%s{default}: {bwhite}%s{default}', l, v)
def cmd_stats(settings, opts): """ Print table record stats. """ global metadata sa = schema.get_session(settings.dbref, metadata=metadata) if opts.flags.all_tables or opts.flags.database_tables: if opts.flags.database_tables: reload_metadata(settings) log.info("{yellow}Loaded tables from DB{default}") for t in metadata.tables: try: log.std("{blue}%s{default}: {bwhite}%s{default}", t, sa.query(metadata.tables[t].count().alias("cnt")).all()[0][0]) except Exception, e: log.err("Count failed for %s: %s", t, e) log.std("%i tables, done.", len(metadata.tables))
def cmd_info(settings): sa = Project.get_session('default', settings.dbref) #sa = get_session(settings.dbref) pwd = os.getcwd() name = os.path.basename(pwd) workdir = Workdir.find(pwd) if not workdir: print("Not in a metadata workdir!") rs = Project.search(_sa=sa, name=name) if not rs: print("No project found for %r" % name) return 1 proj=rs[0] try: hosts = proj.hosts except Exception as e: print(settings.dbref, Project.metadata.bind) log.std("Error proj.hosts %s", e) hosts = [] print(proj.name, hosts, proj.repositories[0].vc_type, proj.date_added)
def cmd_info(settings): """ Verify DB connection is working. Print some settings and storage stats. """ for l, v in ( ( 'Settings Raw', pformat(settings.todict()) ), ( 'DBRef', settings.dbref ), ( "Tables in schema", ", ".join(metadata.tables.keys()) ), ( "Table lengths", "" ), ): log.std('{green}%s{default}: {bwhite}%s{default}', l, v) sa = get_session(settings.dbref, metadata=metadata) for t in metadata.tables: try: log.std(" {blue}%s{default}: {bwhite}%s{default}", t, sa.query(metadata.tables[t].count()).all()[0][0]) except Exception, e: log.err("Count failed for %s: %s", t, e)
def cmd_info(settings): for l, v in (("DBRef", settings.dbref), ("Tables in schema", ", ".join(metadata.tables.keys()))): log.std("{green}%s{default}: {bwhite}%s{default}", l, v) # try to connect try: sa = Node.get_session("default", settings.dbref) log.std("{magenta} * {bwhite}DB Connection {default}[{green}OK{default}]") except Exception, e: log.std("{magenta} * {bwhite}DB Connection {default}[{red}Error{default}]: %s", e)
def cmd_new(NAME, REF, settings): #store = Topic.start_master_session() #print store #topic = store.Topic.byName(NAME) #if topic: # pass #else: # topic = store.Topic.forge(name=NAME) # store.commit() #reporter.stdout.Topic(topic) # XXX: old sa = Topic.get_session('default', settings.dbref) topic = Topic.byName(NAME) if topic: log.std("Found existing topic %s, created %s", topic.name, topic.date_added) else: topic = Topic(name=NAME) topic.init_defaults() sa.add(topic) sa.commit() log.std("Added new topic %s", topic.name) reporter.stdout.Topic(topic)
def cmd_account_add(props, settings, name): sa = get_session(settings.dbref) acc = Account(name=name) sa.add(acc) sa.commit() log.std("Added account %s", name)
def cmd_show(settings): for name, table in metadata.tables.items(): log.std('{green}%s{default}: {bwhite}%s{default}', name, "{default}, {bwhite}".join(table.columns.keys()))
def cmd_info(settings): for l, v in ( ( 'DBRef', settings.dbref ), ( "Tables in schema", ", ".join(metadata.tables.keys()) ), ): log.std('{green}%s{default}: {bwhite}%s{default}', l, v)
# peak memory usage (bytes on OS X, kilobytes on Linux) res_usage = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss if os.uname()[0] == 'Linux': res_usage /= 1024; # kilobytes? # XXX: http://stackoverflow.com/questions/938733/total-memory-used-by-python-process #res_usage /= resource.getpagesize() # FIXME: does not use dbref according to settings, may fail/report wrong file db_size = os.path.getsize(os.path.expanduser(__db__)) for l, v in ( ( 'Storage Size', lib.human_readable_bytesize( db_size ) ), ( 'Resource Usage', lib.human_readable_bytesize(res_usage) ), ): log.std('{green}%s{default}: {bwhite}%s{default}', l, v) log.std('\n{green}info {bwhite}OK{default}') def cmd_list(settings): """ List to root tags. """ sa = get_session(settings.dbref, metadata=metadata) roots = sa.query(Tag).filter(Tag.contexts == None).all() for root in roots: print(root.name) def cmd_find(settings, LIKE):
def format_Node_item(node): log.std("{blue}%s{bblack}. {bwhite}%s {bblack}[ {bblack}] {default}" % (node.node_id, node.name))
def cmd_dlcs_import(opts, settings): """ TODO: built into generic import/export (ie. complete set) so heuristics can update all stats each import.. or find some way to fragment dataset. """ importFile = opts.args.FILE data = dlcs_parse_xml(open(importFile).read()) sa = Locator.get_session('default', opts.flags.dbref) #sa = model.get_session(opts.flags.dbref, metadata=SqlBase.metadata) tags_stat = {} domains_stat = {} # first pass: validate, track stats and create Locator records where missing for post in data['posts']: href = post['href'] dt = datetime.strptime(post['time'], ISO_8601_DATETIME) # validate URL url = urlparse(href) domain = url[1] if not domain: log.std("Ignored domainless (non-net?) URIRef: %s", href) continue assert re.match('[a-z0-9]+(\.[a-z0-9]+)*', domain), domain # get/init Locator lctr = Locator.fetch((Locator.ref == href,), exists=False) if lctr: if lctr.date_added != dt: lctr.date_added = dt sa.add(lctr) else: lctr = Locator( global_id=href, ref=href, date_added=datetime.strptime(post['time'], ISO_8601_DATETIME) ) lctr.init_defaults() log.std("new: %s", lctr) sa.add(lctr) # get/init Bookmark bm = Bookmark.fetch((Bookmark.ref_id == lctr.lctr_id,), exists=False) if bm: if bm.date_added != dt: bm.date_added = dt sa.add(bm) if bm.ref_id != lctr.lctr_id: bm.ref = lctr sa.add(bm) else: bm = Bookmark.fetch((Bookmark.name == post['description'],), exists=False) if bm: log.std("Name already exists: %r" % post['description']) continue bm = Bookmark( ref=lctr, name=post['description'], extended=post['extended'], tags=post['tag'].replace(' ', ', '), date_added=datetime.strptime(post['time'], ISO_8601_DATETIME) ) bm.init_defaults() log.std("new: %s", bm) sa.add(bm) # track domain frequency if domain in domains_stat: domains_stat[domain] += 1 else: domains_stat[domain] = 1 # track tag frequency for tag in post['tag'].split(' '): if tag in tags_stat: tags_stat[tag] += 1 else: tags_stat[tag] = 1 log.std("Checked %i locator references", len(data['posts'])) sa.commit() # Prepare domain stats avgDomainFreq = sum(domains_stat.values())/(len(domains_stat)*1.0) hiDomainFreq = max(domains_stat.values()) log.std("Found domain usage (max/avg): %i/%i", hiDomainFreq, avgDomainFreq) domains = 0 domainOffset = int(opts.flags.domain_offset) if domainOffset == 0: domainOffset = hiFreq elif domainOffset == -1: domainOffset = round(hiDomainFreq * 0.2) log.std("Setting domain-offset: %i", domainOffset) # get/init Domains for domain in domains_stat: freq = domains_stat[domain] if freq >= domainOffset: domains += 1 domain_record = Domain.fetch((Domain.name == domain,), exists=False) if not domain_record: domain_record = Domain(name=domain) domain_record.init_defaults() sa.add(domain_record) sa.commit() log.std("Checked %i domains", len(domains_stat)) log.std("Tracking %i domains", domains) # Prepare tag stats avgFreq = sum(tags_stat.values())/(len(tags_stat)*1.0) hiFreq = max(tags_stat.values()) log.std("Found tag usage (max/avg): %i/%i", hiFreq, avgFreq) tagOffset = int(opts.flags.tag_offset) if tagOffset == 0: tagOffset = hiFreq elif tagOffset == -1: tagOffset = round(hiFreq * 0.1) log.std("Setting tag-offset: %i", tagOffset) # get/init Tags tags = 0 for tag in tags_stat: freq = tags_stat[tag] if not re.match('[A-Za-z0-9-]+', tag): log.std("Non-std tag %s", tag) if freq >= tagOffset: tags += 1 t = Node.fetch((Node.name == tag,), exists=False) if not t: t = Tag(name=tag) t.init_defaults() log.std("new: %s", t) sa.add(t) # store frequencies # TODO tags_freq log.std("Checked %i tags", len(tags_stat)) log.std("Tracking %i tags", tags) sa.commit()
def cmd_mutation_import(opts, settings): """ Import mutations from CSV, create accounts as needed. Indx with Year, Month. """ sa = get_session(settings.dbref) if settings.reset or lib.Prompt.ask("Purge mutations?"): sa.query(Mutation).delete() log.std("Purged all previous mutations") assert settings.input_format == 'csv', settings.input_format cache = confparse.Values(dict( accounts={}, years={}, months={} )) for csvfile in opts.args.file: reader = csv_reader(csvfile, [ 'line', 'date', 'accnr', 'amount', 'destacc', 'cat', 'destname', 'descr', 'descr2' ]) for line, date, accnr, amount, destacc, cat, destname, descr, descr2 in reader: from_account, to_account = None, None assert accnr, (line, date, amount, cat) # from_account if accnr not in cache.accounts: from_account = Account.for_nr(sa, accnr) if not from_account: from_account = Account(name=ACCOUNT_CREDIT+':'+accnr) from_account.init_defaults() from_account.set_nr(accnr) sa.add(from_account) sa.commit() cache.accounts[accnr] = from_account else: from_account = cache.accounts[accnr] assert from_account.account_id, (str(from_account), line, accnr, date, amount, cat) # credit account if not destacc: if cat == 'ba': # payment card checkout to_account = Account.for_checkout(sa, ACCOUNT_EXPENSES+':ba:'+descr) elif cat == 'ga': # atm withdrawal to_account = Account.for_withdrawal(sa, ACCOUNT_EXPENSES+':ga:'+descr) elif cat == 'db': # debet interest to_account = Account.for_name_type(sa, ACCOUNT_ACCOUNTING) else: print line, date, accnr, amount, cat, descr, descr2 assert not destname, (cat, destname, cat) continue # billing account elif destacc not in cache.accounts: to_account = Account.for_nr(sa, destacc) if not to_account: to_account = Account(name=ACCOUNT_EXPENSES+':'+destname) to_account.init_defaults() to_account.set_nr(destacc) sa.add(to_account) sa.commit() cache.accounts[destacc] = to_account else: to_account = cache.accounts[destacc] # get Year/Month y, m, d = map(int, ( date[:4], date[4:6], date[6:])) if y not in cache.years: pass mut = Mutation( from_account=from_account.account_id, to_account=to_account.account_id, year=y, month=m, day=d, amount=amount, description=descr+'\t'+descr2, category=cat) sa.add(mut) sa.commit() log.std("Import ready") cmd_balance_commit(settings)
def cmd_account_rm(settings, name): sa = get_session(settings.dbref) acc = sa.query(Account).filter(Account.name == name).one() sa.delete(acc) sa.commit() log.std("Dropped account %s", name)
log.std("{blue}%s{default}: {bwhite}%s{default}", t, sa.query(metadata.tables[t].count().alias("cnt")).all()[0][0]) except Exception, e: log.err("Count failed for %s: %s", t, e) log.std("%i tables, done.", len(metadata.tables)) else: if hasattr(schema, 'models'): models = schema.models else: models = [ getattr(schema, x) for x in dir(schema) if inspect.isclass(getattr(schema, x)) and issubclass( getattr(schema, x), schema.SqlBase ) ] for m in models: try: log.std("{blue}%s{default}: {bwhite}%s{default}", m.__name__, sa.query(m).count()) except Exception, e: log.err("Count failed for %s: %s", m, e) log.std("%i models, done.", len(models)) def cmd_info(settings): if opts.flags.database_tables: reload_metadata(settings) log.std("{yellow}Loaded tables from DB{default}") for l, v in ( ( 'DBRef', settings.dbref ), ( "Tables in schema", ", ".join(metadata.tables.keys()) ), ): log.std('{green}%s{default}: {bwhite}%s{default}', l, v) def cmd_show(settings):