Ejemplo n.º 1
0
 def check_ignored(Klass, filepath, opts):
     #if os.path.islink(filepath) or not os.path.isfile(filepath):
     if os.path.islink(filepath) or ( not os.path.isfile(filepath) and not os.path.isdir(filepath)) :
         log.warn("Ignored non-regular path %r", filepath)
         return True
     elif Klass.ignored(filepath) or File.ignored(filepath):
         log.info("Ignored file %r", filepath)
         return True
Ejemplo n.º 2
0
 def vc_repos(self, prog=None, sa=None, context=None):
     """
     TODO: Yield all repositories in workspace.
     """
     print('context=',context)
     print('sa=',sa)
     # TODO: should be some workspace
     log.info('vc:repos done')
Ejemplo n.º 3
0
 def rsr_info(self, prog, context, opts, sa, nodes):
     "Log some session statistics and info"
     log.note("SQLAlchemy session: %s", sa)
     models = taxus.core.ID, Node, Name, Tag, INode, Locator
     cnt = {}
     for m in models:
         cnt[m] = sa.query(m).count()
         log.note("Number of %s: %s", m.__name__, cnt[m])
     if 'node' in self.globaldict and self.globaldict.node:
         log.info("Auto commit: %s", opts.rsr_auto_commit)
         log.info("%s", self.globaldict.node)
     sys.stderr.write('rsr-info: see notice log (-vvv)\n')
Ejemplo n.º 4
0
def get_session(dbref, initialize=False, metadata=SqlBase.metadata):
    if dbref.startswith('sqlite'):
        register_sqlite_connection_event()
    engine = create_engine(dbref)#, encoding='utf8')
    #engine.raw_connection().connection.text_factory = unicode
    metadata.bind = engine
    if initialize:
        log.info("Applying SQL DDL to DB %s..", dbref)
        metadata.create_all()  # issue DDL create
        log.note('Updated schema for %s to %s', dbref, 'X')
    session = sessionmaker(bind=engine)()
    return session
Ejemplo n.º 5
0
def cmd_projects(settings):

    """
        List projects, with id and parent id.
    """

    sa = get_session(settings.dbref)
    l = 'Projects'
    v = sa.query(rdm.Project).count()
    # TODO: filter project; age, public
    log.info('{green}%s{default}: {bwhite}%s{default}', l, v)
    print('# ID PARENT NAME')
    for p in sa.query(rdm.Project).all():
        print(p.id, p.parent_id or '-', p.name)
Ejemplo n.º 6
0
 def init(Class, path, kind):
     self = Class(path)
     self.user = res.Homedir.fetch(path)
     self.volume = res.Volumedir.fetch(path)
     self.workspace = res.Workspace.fetch(path)
     if kind == 'default':
         self.context = self.workspace or self.volume or self.user
         if self.workspace: kind = 'workspace'
         elif self.volume: kind = 'volume'
         elif self.user: kind = 'user'
     else:
         self.context = getattr(self, kind)
     log.info("Session context type is %r" % (kind,))
     log.debug("Session.init: using context %r" % (self.context,))
     self.kind = kind
     return self
Ejemplo n.º 7
0
 def _assert_node(self, Klass, name, sa, opts):
     """
     Helper for node creation.
     """
     assert name
     node = Klass.find(( Klass.name==name, ), sa=sa)
     if node:
         if name != node.name:
             node.name = name
     else:
         node = Klass(name=name, date_added=datetime.now())
         sa.add(node)
         log.info("Added new node to session: %s", node)
         if opts.rsr_auto_commit:
             sa.commit()
     yield dict( node = node )
     log.note('Asserted %s', node)
Ejemplo n.º 8
0
    def rsr_session(self, prog, volume, workspace, homedir, opts):
        """
        TODO: get an Metadir session; lock (and later sync) an file/db.
        Move current db setup to new txs.

        Determine context, and from there get the session/dbref to initialize an
        SQLAlchemy session.
        The context depends on the current working directory, and defaults to
        the nearest workspace; perhaps a volume or the homedir.
        """
        session = Session.init(prog.pwd, opts.session)
        log.note('Session: %s', session)

        if session.context and confparse.haspath(session.context, 'data.repository.root_dir'):
            prog.session = session
            yield dict(context=session.context)
            log.note('Context: %s', session.context)
            repo_root = session.context.settings.data.repository.root_dir

        else:
            repo_root = 'sa_migrate'

        # SA session
        repo_path = os.path.join(repo_root, opts.repo)

        if os.path.exists(repo_path):
            log.info("Reading SA migrate config for repo %r" % repo_path)
            # hard coded module name, root dir for local repos
            from sa_migrate import custom
            config = custom.read(repo_path)
            log.info("Reading SA migrate config from %r" % config)
            repo_opts = custom.migrate_opts(repo_path, config)
            dbref = repo_opts['url']
        else:
            dbref = opts.dbref

        log.note('DBRef: %s', dbref)

        if opts.init_db:
            log.debug("Initializing SQLAlchemy session for %s", dbref)
        sa = SessionMixin.get_session(opts.session, dbref, opts.init_db,
                metadata=SqlBase.metadata)

        yield dict(sa=sa)
Ejemplo n.º 9
0
def cmd_issues(settings):

    """
        List issues
    """

    sa = get_session(settings.dbref)
    l = 'Issues'
    # TODO: filter issues; where not closed, where due, started, etc.
    v = sa.query(rdm.Issue).count()
    log.info('{green}%s{default}: {bwhite}%s{default}', l, v)
    print('# ID PARENT_ID ROOT_ID SUBJECT ')
    #print('# ID PARENT_ID ROOT_ID PRIO SUBJECT ')
    for i in sa.query(rdm.Issue).all():
        print(i.id,)
        for k in i.parent_id, i.root_id:
            print(k or '-',)
        #print i.priority_id or '-', i.subject
        print(i.subject)
Ejemplo n.º 10
0
def cmd_custom_fields(settings):

    """
        List custom-fields
    """

    sa = get_session(settings.dbref)
    l = 'Custom Fields'
    # TODO: filter custom_fields;
    v = sa.query(rdm.CustomField).count()
    log.info('{green}%s{default}: {bwhite}%s{default}', l, v)
    for rs in sa.query(rdm.CustomField).all():
        print(rs.id, rs.type)
        print("  Name:", rs.name)
        if rs.possible_values: # yaml value
            print("  Possible values: ")
            for x in rs.possible_values.split('\n'):
                if x == '---': continue
                print("  ",x)
        if rs.description:
            print("  Description:")
            print("   ", rs.description.replace('\n', '\n    '))
Ejemplo n.º 11
0
    def rsr_set_root_bool(self, sa=None, opts=None):
        """
        set bool = true
        where
            count(jt.node_id) == 0
            jt.group_id

        core.groupnode_node_table\
            update().values(
                )
        """
        gns = sa.query(GroupNode).all()
        if gns:
            for n in gns:
                if not n.supernode:
                    n.root = True
                    log.info("Root %s", n)
                    sa.add(n)
            if opts.rsr_auto_commit:
                sa.commit()
        else:
            log.warn("No entries")
Ejemplo n.º 12
0
 def check_recurse(Klass, dirpath, opts):
     #if not opts.recurse and not opts.interactive:
     #    return False
     depth = dirpath.strip('/').count('/')
     if Klass.ignored(dirpath):
         log.info("Ignored directory %r", dirpath)
         return False
     elif opts.max_depth != -1 and depth+1 >= opts.max_depth:
         log.info("Ignored directory %r at level %i", dirpath, depth)
         return False
     elif opts.recurse:
         return True
     elif opts.interactive:
         log.info("Interactive walk: %s",dirpath)
         if Klass.prompt_recurse(opts):
             return True
         elif Klass.prompt_ignore(opts):
             assert False, "TODO: write new ignores to file"
Ejemplo n.º 13
0
def lnd_tag(opts=None, sa=None, ur=None, pwd=None):
    """
    Experiment, interactive interface.
    Tagging.
    """
    log.debug("{bblack}lnd{bwhite}:tag{default}")

    if not pwd:
        log.err("Not initialized")
        yield 1

    tags = {}
    if '' not in tags:
        tags[''] = 'Root'
    FS_Path_split = re.compile('[\/\.\+,]+').split

    log.info("{bblack}Tagging paths in {green}%s{default}",
            os.path.realpath('.') + os.sep)

    try:
        for root, dirs, files in os.walk(pwd.local_path):
            for name in files + dirs:
                log.info("{bblack}Typing tags for {green}%s{default}",
                        name)
                path = FS_Path_split(os.path.join(root, name))
                for tag in path:
                    yield
                    # Ask about each new tag, TODO: or rename, fuzzy match.
                    if tag not in tags:
                        type = raw_input('%s%s%s:?' % (
                            log.palette['yellow'], tag,
                            log.palette['default']) )
                        if not type: type = 'Tag'
                        tags[tag] = type

                log.info(''.join( [ "{bwhite} %s:{green}%s{default}" % (tag, name)
                    for tag in path if tag in tags] ))

    except KeyboardInterrupt as e:
        log.err(e)
        yield 1
Ejemplo n.º 14
0
    def walk(Klass, path, opts=walk_opts, filters=(None,None)):
        """
        Build on os.walk, this goes over all directories and other paths
        non-recursively.
        It returns all full paths according to walk-opts.
        FIXME: could, but does not, yield INode subtype instances.
        XXX: filters, see dev_treemap
        """
#        if not opts.descend:
#            return self.walkRoot( path, opts=opts, filters=filters )
        if not isinstance(opts, confparse.Values):
            opts_ = confparse.Values(Klass.walk_opts)
            opts_.update(opts)
            opts = opts_
        else:
            opts = confparse.Values(opts.copy())
        # FIXME: validate/process opts or put filter somewhere
        if opts.max_depth > 0:
            assert opts.recurse
        exclusive( opts, 'dirs files symlinks links pipes blockdevs' )
        assert isinstance(path, basestring), (path, path.__class__)
        dirpath = None
        file_filters, dir_filters = filters
        if not os.path.isdir( path ):
            if opts.exists > -1:
                log.err("Cannot walk non-dir path with opt.exists. ")
            else:
                yield path
        else:
            if opts.dirs and opts.include_root:
                yield unicode( path, 'utf-8' )
            for root, dirs, files in os.walk(path):
                for node in list(dirs):
                    if not opts.recurse and not opts.interactive:
                        dirs.remove(node)
                    if not opts.dirs:
                        continue
                    dirpath = join(root, node)
                    if dir_filters:
                        if not Dir.filter(dirpath, *dir_filters):
                            dirs.remove(node)
                            continue
                    #dirpath = os.path.join(root, node).replace(path,'').lstrip('/') +'/'
                    depth = pathdepth(dirpath.replace(path, ''))
                    if not os.path.exists(dirpath):
                        log.err("Error: reported non existant node %s", dirpath)
                        if node in dirs: dirs.remove(node)
                        continue
                    elif Klass.check_ignored(dirpath, opts):
                        if node in dirs: dirs.remove(node)
                        continue
                    elif not Klass.check_recurse(dirpath, opts):
                        if node in dirs:
                            dirs.remove(node)
#                    continue # exception to rule excluded == no yield
# caller can sort out wether they want entries to subpaths at this level
                    assert isinstance(dirpath, basestring)
                    try:
                        dirpath = unicode(dirpath)
                    except UnicodeDecodeError, e:
                        log.err("Ignored non-ascii/illegal filename %s", dirpath)
                        continue
                    assert isinstance(dirpath, unicode)
                    try:
                        dirpath.encode('ascii')
                    except UnicodeDecodeError, e:
                        log.err("Ignored non-ascii filename %s", dirpath)
                        continue
                    dirpath = Klass.decode_path(dirpath, opts)
                    yield dirpath
                for leaf in list(files):
                    filepath = join(root, leaf)
                    if file_filters:
                        if not File.filter(filepath, *file_filters):
                            files.remove(leaf)
                            continue
                    if not os.path.exists(filepath):
                        log.err("Error: non existant leaf %s", filepath)
                        if opts.exists != None and not opts.exists:
                            if opts.files:
                                yield filepath
                        else:
                            files.remove(leaf)
                        continue
                    elif Klass.check_ignored(filepath, opts):
                        log.info("Ignored file %r", filepath)
                        files.remove(leaf)
                        continue
                    filepath = Klass.decode_path(filepath, opts)
                    if not opts.files: # XXX other types
                        continue
                    #try:
                    #    filepath.encode('ascii')
                    #except UnicodeEncodeError, e:
                    #    log.err("Ignored non-ascii/illegal filename %s", filepath)
                    #    continue
                    yield filepath
Ejemplo n.º 15
0
 def vc_repo(self, prog=None, sa=None, context=None):
     """
     TODO: Yield VC manager for current checkout dir
     """
     # TODO: should be VC checkout dir
     log.info('vc:repos done')
Ejemplo n.º 16
0
 def vc_status(self, prog=None, sa=None, context=None):
     """
     TODO: Report status bits, dirty lists and summaries
     """
     log.info('vc:status done')
Ejemplo n.º 17
0
 def init(self, path):
     log.info("reading %s", path)
     data = open(path).read()
     self.json = res.js.loads(data)