def get_reporef_GIT(klass, path): r = lib.cmd("cd %s;git remote -v", path)\ .strip().split('\n') while r: ri = r.pop(0) if 'fetch' in ri: return ri.split('\t')[1].split(' ')[0]
def cmd_update(refs, opts, g): """ Without arguments, update for entire workspace. Finds local SCM dirs, and ensure a pdoc record each exists. Record is created or updated from `htd info`. Only prefixes older-than are updated, set to 0 to update all. """ global ctx ws = ctx.ws s = g.quiet updated = 0 os.chdir(ws.path) if refs: log.stderr('Filtering on %s' % (', '.join(refs))) pathiter = chain( *[ws.find_scmdirs(ref, s=s) for ref in refs ] ) else: pathiter = ws.find_scmdirs(s=s) for p in pathiter: if os.path.islink(p): continue prefix = ws.relpath(p) if prefix not in ws.pdoc['repositories'] or ( older_than( ws.pdoc['repositories'][prefix]['(date)'], g.older_than ) ): log.stderr("Updating %r" % prefix) cmd = "htd info '%s'" % p out = lib.cmd(cmd, allowerrors=True) data = confparse.yaml_loads(out) if prefix in ws.pdoc['repositories']: deep_update( [ws.pdoc['repositories'][prefix], data], confparse.Values(dict(opts=opts))) else: ws.pdoc['repositories'][prefix] = data if g.categorize: # Add type and ID repo = ws.pdoc['repositories'][prefix] if catalog(prefix, repo, g): updated += 1 if g.normalize_remotes: data = ws.pdoc['repositories'][prefix] cmd = "htd remote ... '%s'" % p # TODO: use htd to get latest remote urls, catch up on renames for remote in repo['remotes']: print(remote['name'], remote['url']) ws.yamlsave('pdoc', default_flow_style=not g.pretty_doc, ignore_aliases=True) log.stderr("%i prefixes OK (%s old at most)", len(ws.pdoc['repositories'].keys()), g.older_than)
def load_prefixes(self): out = lib.cmd('htd prefixes table') for l in out.split('\n'): print(l)
def get_reporef_Subversion(klass, path): return lib.cmd("cd %s;svn info | grep URL\: | sed 's/[^:]*\: //'", path).strip()
def cmd_validate(doc, g): global ctx if not doc: doc = ctx.ws.get_yaml('pdoc') lib.cmd(["htd", "validate-pdoc", doc])