def test_taxus_schema_basic_commit(self): dbref = ORMMixin.assert_dbref(self.prefix+'taxus-schema-test.sqlite') models = load_schema(self.schema) Base = models[0] sa = get_session(dbref, True, metadata=Base.metadata) Basic = models[1] basic = Basic( name="Fist Basic record", label="basic 1", date_added=datetime.now() ) sa.add(basic) sa.commit() SqlBase = declarative_base() sa2 = get_session(dbref, metadata=SqlBase.metadata) SqlBase.metadata.reflect() basics = SqlBase.metadata.tables['basics'] rs = sa2.query(basics).all() assert len(rs) == 1 r = rs[0] for attr in self.fields[self.models[0]]: assert hasattr(r, attr), r assert getattr(r, attr) == getattr(basic, attr) os.unlink(os.path.expanduser(self.prefix+'taxus-schema-test.sqlite'))
def cmd_db_init(settings): """ Initialize if the database file doest not exists, and update schema. """ get_session(settings.dbref) SqlBase.metadata.create_all() print("Updated schema", settings.dbref)
def cmd_db_reset(settings): """ Drop all tables and recreate schema. """ get_session(settings.dbref) if not settings.yes: x = input("This will destroy all data? [yN] ") if not x or x not in 'Yy': return 1 SqlBase.metadata.drop_all() SqlBase.metadata.create_all() print("Done", settings.dbref)
def cmd_db_stats(settings): """ Print table record stats. """ sa = get_session(settings.dbref) for m in [ Node, Topic, Project, VersionControl, Host ]: print(m.__name__+':', sa.query(m).count()) print("Done", settings.dbref)
def cmd_stats(settings): sa = get_session(settings.dbref) for stat, label in ( (sa.query(Locator).count(), "Number of URLs: %s"), #(sa.query(Bookmark).count(), "Number of bookmarks: %s"), (sa.query(Domain).count(), "Number of domains: %s"), (sa.query(Tag).count(), "Number of tags: %s"), ): log.std(label, stat)
def test_taxus_schema_ext_commit(self): dbref = ORMMixin.assert_dbref(self.prefix+'taxus-schema-test.sqlite') models = load_schema(self.schema) Base, MyRecord, Extended = models sa = get_session(dbref, True, metadata=Base.metadata) basic = MyRecord( name="Fist Basic record", label="basic 1", date_added=datetime.now() ) sa.add(basic) sa.commit() os.unlink(os.path.expanduser(self.prefix+'taxus-schema-test.sqlite'))
def cmd_net_info(NAME, settings): sa = get_session(settings.dbref) print 'net', NAME for host in sa.query(Host).all(): print 'host', host
from confparse import yaml_load from taxus.init import get_session, extract_orm from taxus.util import ORMMixin def load_schema(path): path = path.replace(".", os.sep) + ".yml" meta = yaml_load(open(path)) if "$schema" in meta: schema = yaml_load(open(meta["$schema"])) jsonschema.validate(meta, schema) else: print >> sys.stderr, "No validation for", path # schema = extract_schema(meta) return list(extract_orm(meta)) # models = load_schema('taxus.core') # models2 = load_schema('bookmarks') models = load_schema("schema_test") Base, MyRecord, Extended = models dbref = ORMMixin.assert_dbref("~/.schema-test.sqlite") sa = get_session(dbref, True, metadata=Base.metadata) print models # sa2 = get_session(dbref, metadata=Base.metadata) # sa = ORMMixin.get_session('schema_test', dbref)
def main(argv, doc=__doc__, usage=__usage__): """ Execute using docopt-mpe options. prog [opts] [CTX] ( FILE... | DIR... ) """ # Process environment db = os.getenv( 'FINFO_DB', __db__ ) if db is not __db__: usage = usage.replace(__db__, db) ctx = confparse.Values(dict( opts = util.get_opts(doc + usage, version=get_version(), argv=argv[1:]) )) ctx.opts.flags.dbref = taxus.ScriptMixin.assert_dbref(ctx.opts.flags.dbref) # Load configuration ctx.config_file = list(confparse.expand_config_path(ctx.opts.flags.config)).pop() ctx.settings = settings = confparse.load_path(ctx.config_file) # Load SA session ctx.sa = get_session(ctx.opts.flags.dbref) # DEBUG: pprint(ctx.settings.todict()) # Process arguments dirs = [] # Shift paths from ctx arg if ctx.opts.args.CTX and os.path.exists(ctx.opts.args.CTX): ctx.opts.args.FILE.append(ctx.opts.args.CTX) ctx.opts.args.CTX = None # Sort out dirs from files for arg in ctx.opts.args.FILE: if os.path.isdir(arg): ctx.opts.args.FILE.remove(arg) dirs.append(arg) elif os.path.isfile(arg): pass else: log.note("Unhandled path %r" % arg) ctx.opts.args.DIR = dirs # Set default path context if ctx.opts.flags.name: assert not ctx.opts.args.CTX ctx.opts.args.CTX = ctx.opts.flags.name elif not ctx.opts.args.CTX: ctx.opts.args.CTX = 'current' # XXX: create prefixes object on context ctx.prefixes = confparse.Values(dict( map= settings.finfo['prefix-map'], env={}, map_={} )) if 'homedir' not in ctx.prefixes.map: ctx.prefixes.map['homedir'] = 'HOME=%s' % os.path.expanduser('~') if 'current' not in ctx.prefixes.map: ctx.prefixes.map['current'] = '$PWD:$HOME' if 'pwd' not in ctx.prefixes.map: ctx.prefixes.map['pwd'] = 'PWD=%s' % os.path.abspath('.') for prefix, path in ctx.prefixes.map.items(): if '=' in path: envvar, path = path.split('=') if envvar in ctx.prefixes.env: assert ctx.prefixes.env[envvar] == prefix, ( ctx.prefixes.env[envvar], prefix ) ctx.prefixes.env[envvar] = prefix # Pre-pocess binds from env flags if not isinstance(ctx.opts.flags.env, list): ctx.opts.flags.env = [ ctx.opts.flags.env ] for env_map in ctx.opts.flags.env: envvar, prefix = env_map.split('=') if envvar in ctx.prefixes.env: assert prefix == ctx.prefixes.env[envvar] else: ctx.prefixes.env[envvar] = prefix envvalue = os.getenv(envvar, None) if envvalue: ctx.prefixes.map[prefix] = "%s=%s" % ( envvar, envvalue ) #ctx.prefixes.map_[prefix] = envvalue.split(':') # Post-process prefixes after passed flags, and resolve all values for prefix, spec in ctx.prefixes.map.items(): if '=' in spec: envvar, spec = spec.split('=') if envvar in ctx.prefixes.env: assert ctx.prefixes.env[envvar] == prefix, ( ctx.prefixes.env[envvar], prefix ) ctx.prefixes.env[envvar] = prefix specs = spec.split(':') set_ = [] for idx, path in enumerate(specs): path = os.path.expanduser(path) if varname.match(path): refpref = ctx.prefixes.env[path[1:]] #refpath = ctx.prefixes.map[] path = '#prefixes/'+refpref elif '$' in path: pass #else: # path = '#prefixes/'+prefix+':'+str(idx) set_.append(path) ctx.prefixes.map_[prefix] = set_ ctx.pathrefs = ctx.prefixes.map_[ctx.opts.args.CTX] #DEBUG: #print ctx.opts.todict() #print pformat(ctx.prefixes.todict()) #print pformat(ctx.pathrefs) # Preprocess filters to regex if 'FILTER' not in ctx.opts.args: ctx.opts.args.FILTER = [] if not ctx.opts.args.FILTER: ctx.opts.args.FILTER = default_filters if ctx.opts.flags.documents: ctx.opts.args.FILTER = doc_filters + ctx.opts.args.FILTER for idx, filter in enumerate(ctx.opts.args.FILTER): if isinstance(filter, str): ctx.opts.args.FILTER[idx] = fnmatch.translating(filter) # Resolve FILE/DIR arguments files, dirs = [], [] for arg in ctx.opts.args.FILE + ctx.opts.args.DIR: r = find_local(arg, search_path(ctx.pathrefs, ctx)) if not r: continue prefix, file, dir = r if not dir: raise Exception("No path for %s" % arg) elif file: files.append((prefix, file)) else: dirs.append((prefix, dir)) print("Resolved arguments to %s dirs, %s files" % ( len(dirs), len(files) )) # XXX: if not ctx.opts.flags.directories: if ctx.opts.flags.recurse: # Resolve all dirs to file lists for p, d in dirs: for top, path_dirs, path_files in os.walk(os.path.join(p, d)): for path_dir in list(path_dirs): for filter in ctx.opts.args.FILTER: if not filter.match(os.path.basename(path_dir)): path_dirs.remove(path_dir) break if top.startswith('./'): top = top[2:] for path_file in list(path_files): filter = None for filter in ctx.opts.args.FILTER: if filter.match(os.path.basename(path_file)): break else: continue if not filter.match(os.path.basename(path_file)): path_files.remove(path_file) if path_file not in path_files: continue files.append((p, os.path.join(top, path_file))) print("Continue with %s files" % len(files)) mfadapter = None res.persistence.PersistedMetaObject.stores['metafile'] = mfadapter prefix = None for p, f in files: if ctx.opts.flags.auto_prefix: prefixes = find_prefixes(p, ctx) assert prefixes # FIXME: how come only use first?? prefix = prefixes.next() assert len(ctx.prefixes.map_[prefix]) == 1, prefix name = f[len(ctx.prefixes.map_[prefix][0])+1:] else: prefix = ctx.opts.args.CTX name = f[len(p)+1:] ref = prefix+':'+name if ctx.opts.flags.names_only: print ref else: # TODO: get INode through context? Also add mediatype & parameters # resolver. But needs access to finfo ctx.. record = taxus.INode.get_instance(name=ref, _sa=ctx.sa) # GNU/Linux: -bi = --brief --mime # Darwin/BSD: -bI = --brief --mime #mediatype = lib.cmd('file --brief --mime "%s"', path).strip() # XXX: see basename-reg? #if ctx.opts.flags.update == True: # TODO: repopulate metadata; mf = res.metafile.Metafile(f) assert mf.date_accessed record.date_accessed = mf.date_accessed assert mf.date_modified record.date_modified = mf.date_modified if not record.node_id: ctx.sa.add(record) print record, record.date_updated, record.date_modified #sys.exit() if ctx.opts.flags.update: ctx.sa.commit()