def route_dynamic(path): args = dict(request.args) if 'direction' in args: direction = args.pop('direction') else: direction = 'OUTGOING' # should always be outgoing here since we can't specify? if 'format' in args: format_ = args.pop('format') else: format_ = None try: j = sgd.dispatch(path, **args) except rHTTPError as e: log.exception(e) abort(e.response.status_code) # DO NOT PASS ALONG THE MESSAGE except ValueError as e: log.exception(e) abort(404) if j is None or 'edges' not in j or not j['edges']: log.error(pformat(j)) log.debug(sgd._last_url) return abort(400) prov = [ hfn.titletag(f'Dynamic query result for {path}'), f'<meta name="date" content="{UTCNOWISO()}">', f'<link rel="http://www.w3.org/ns/prov#wasGeneratedBy" href="{wgb}">', '<meta name="representation" content="SciGraph">', f'<link rel="http://www.w3.org/ns/prov#wasDerivedFrom" href="{sgd._last_url}">' ] kwargs = {'json': cleanBad(j), 'html_head': prov} tree, extras = creatTree(*Query(None, None, direction, None), **kwargs) #print(extras.hierarhcy) #print(tree) if format_ is not None: if format_ == 'table': #breakpoint() def nowrap(class_, tag=''): return (f'{tag}.{class_}' '{ white-space: nowrap; }') ots = [ OntTerm(n) for n in flatten_tree(extras.hierarchy) if 'CYCLE' not in n ] #rows = [[ot.label, ot.asId().atag(), ot.definition] for ot in ots] rows = [[ot.label, hfn.atag(ot.iri, ot.curie), ot.definition] for ot in ots] return hfn.htmldoc(hfn.render_table(rows, 'label', 'curie', 'definition'), styles=(hfn.table_style, nowrap('col-label', 'td'))) return hfn.htmldoc(extras.html, other=prov, styles=hfn.tree_styles)
def route_examples(): links = hfn.render_table( [[name, hfn.atag(url_for("route_query", pred=pred, root=root) + (args[0] if args else ''), f'../query/{pred}/{root}{args[0] if args else ""}')] for name, pred, root, *args in examples], 'Root class', '../query/{predicate-curie}/{root-curie}?direction=INCOMING&depth=10&branch=master&local=false', halign='left') flinks = hfn.render_table( [[name, hfn.atag(url_for("route_filequery", pred=pred, root=root, file=file) + (args[0] if args else ''), f'../query/{pred}/{root}/{file}{args[0] if args else ""}')] for name, pred, root, file, *args in file_examples], 'Root class', '../query/{predicate-curie}/{root-curie}/{ontology-filepath}?direction=INCOMING&depth=10&branch=master&restriction=false', halign='left') dlinks = hfn.render_table( [[name, hfn.atag(url_for("route_dynamic", path=path) + (querystring if querystring else ''), f'../query/dynamic/{path}{querystring if querystring else ""}')] for name, path, querystring in dynamic_examples], 'Root class', '../query/dynamic/{path}?direction=OUTGOING&dynamic=query&args=here', halign='left') return hfn.htmldoc(links, flinks, dlinks, title='Example hierarchy queries')
def route_sparc_demos_isan2019_flatmap_queries(): # lift up to load from an external source at some point # from pyontutils.core import OntResPath # orp = OntResPath('annotations.ttl') # [i for i in sorted(set(OntId(e) for t in orp.graph for e in t)) if i.prefix in ('UBERON', 'FMA', 'ILX')] query = """ MATCH (blank)- [entrytype:ilxtr:hasSomaLocatedIn|ilxtr:hasAxonLocatedIn|ilxtr:hasDendriteLocatedIn|ilxtr:hasPresynapticTerminalsIn] ->(location:Class{{iri: "{iri}"}}) WITH entrytype, location, blank MATCH (phenotype)<-[predicate]-(blank)<-[:equivalentClass]-(neuron) WHERE NOT (phenotype.iri =~ ".*_:.*") RETURN location, entrytype.iri, neuron.iri, predicate.iri, phenotype """ def fetch(iri, limit=10): q = query.format(iri=iri) log.debug(q) blob = data_sgc.execute(q, limit, 'application/json') # oh boy # if there are less results than the limit scigraph segfaults # and returns nothing return blob hrm = [fetch(oid) for oid in test_terms] return hfn.htmldoc(hfn.render_table([[1, 2]],'oh', 'no'), title='Simulated flatmap query results', )
def route_sparc_view(): hyp_rows = [] spaces = hfn.nbsp * 8 for tier1, tier2_on in sorted(sparc_view.items()): url = url_for('route_sparc_view_query', tier1=tier1) tier1_row = tier1.split(YML_DELIMITER) tier1_row += tier2_on['CURIES'] tagged_tier1_row = tag_row(tier1_row, url) hyp_rows.append(tagged_tier1_row) if not tier2_on: continue # BUG: Will break what we want if more is added to spinal cord if len(tier2_on.keys()) > 15: continue if tier1_row[0] == 'Nerve roots of spinal cord segments': continue for tier2, tier3_on in tier2_on.items(): if tier2 == 'CURIES': continue url = url_for('route_sparc_view_query', tier1=tier1, tier2=tier2) tier2_row = tier2.split(YML_DELIMITER) tier2_row += tier3_on['CURIES'] tagged_tier2_row = tag_row(row=tier2_row, url=url, tier_level=1) if len(list(sparc_view[tier1_row[0]][tier2_row[0]].keys())) == 1: tagged_tier2_row[0] = spaces+tier2_row[0] hyp_rows.append(tagged_tier2_row) return hfn.htmldoc( hfn.render_table(hyp_rows), title= 'Main Page Sparc', styles= ["p {margin: 0px; padding: 0px;}"], metas= ({'name':'date', 'content':time()},), )
def route_sparc_index(): hyp_rows = hyperlink_tree(sparc_view) return hfn.htmldoc( hfn.render_table(hyp_rows), title = 'SPARC Anatomical terms index', metas = ({'name':'date', 'content':time()},), )
def write_html(graph, path): body = graph.asMimetype('text/turtle+html').decode() html = hfn.htmldoc( body, styles=(hfn.ttl_html_style,), title=f'Protocol {path.name}',) with open(path, 'wt') as f: f.write(html)
def route_sparc_connectivity_query(): kwargs = request.args log.debug(kwargs) script = """ var ele = document.getElementById('model-selector') ele.onselect """ return hfn.htmldoc( hfn.render_form( [ [('Model', ), {}], [None, None], [ #('Kidney', 'Defensive breathing',), # TODO autopopulate ( 'Urinary Omega Tree', ), { 'id': 'model-selector', 'name': 'model' } ] ], # FIXME auto via js? # FIXME must switch start and stop per model (argh) # or hide/show depending on which model is selected [ [('start', ), {}], [None, None], [ #('one', 'two', 'three'), # TODO auto populate uot_ordered, { 'name': 'start' } ] ], # FIXME auto via js? [ [('end', ), {}], [None, None], [ #('one', 'two', 'three'), # TODO auto populate uot_ordered, { 'name': 'end' } ] ], # FIXME auto via js? [[tuple(), {}], [tuple(), { 'type': 'submit', 'value': 'Query' }], [None, None]] # FIXME auto via js? , action='view', method='POST'), scripts=(script, ), title='Connectivity query')
def connectivity_query(relationship=None, start=None, end=None): j = sgd.dispatch('/dynamic/shortestSimple?' 'start_id={start.quoted}&' 'end_id={end.quoted}&' 'relationship={relationship}') kwargs['json'] = j tree, extras = creatTree(*Query(root, pred, direction, depth), **kwargs) return htmldoc(extras.html, styles=hfn.tree_styles)
def route_sparc_dynamic(path): args = dict(request.args) if 'direction' in args: direction = args.pop('direction') else: direction = 'OUTGOING' # should always be outgoing here since we can't specify? if 'format' in args: format_ = args.pop('format') else: format_ = None j = data_sgd.dispatch(path, **args) #breakpoint() if not j['edges']: log.error(pprint(j)) return abort(400) kwargs = {'json': j} tree, extras = creatTree(*Query(None, None, direction, None), **kwargs) #print(extras.hierarhcy) print(tree) if format_ is not None: if format_ == 'table': #breakpoint() def nowrap(class_, tag=''): return (f'{tag}.{class_}' '{ white-space: nowrap; }') ots = [ OntTerm(n) for n in flatten_tree(extras.hierarchy) if 'CYCLE' not in n ] #rows = [[ot.label, ot.asId().atag(), ot.definition] for ot in ots] rows = [[ot.label, hfn.atag(ot.iri, ot.curie), ot.definition] for ot in ots] return htmldoc(hfn.render_table(rows, 'label', 'curie', 'definition'), styles=(hfn.table_style, nowrap('col-label', 'td'))) return htmldoc(extras.html, styles=hfn.tree_styles)
def route_apinat_demo(ext=None): source = Path('~/ni/sparc/apinat/sources/').expanduser() # FIXME config probably rm = pipes.ApiNATOMY(source / 'apinatomy-resourceMap.json') r = pipes.ApiNATOMY_rdf(rm.data) # FIXME ... should be able to pass the pipeline if ext == '.ttl': return r.data.ttl, 200, {'Content-Type': 'text/turtle; charset=utf-8',} return hfn.htmldoc(r.data.ttl_html, styles=(hfn.ttl_html_style,), title='ApiNATOMY demo')
def test_htmldoc(self): doc = hfn.htmldoc(hfn.atag('https://example.com/'), title='test page', metas=({ 'test': 'meta' }, ), styles=(hfn.monospace_body_style, ), scripts=('console.log("lol")', )) assert doc == doc_expect
def route_(): d = url_for('route_docs') e = url_for('route_examples') i = url_for('route_import_chain') return hfn.htmldoc(hfn.atag(d, 'Docs'), '<br>', hfn.atag(e, 'Examples'), '<br>', hfn.atag(i, 'Import chain'), title='NIF ontology hierarchies')
def route_sparc_demos_isan2019_neuron_connectivity(): def connected(start): log.debug(start) blob = data_sgd.neurons_connectivity(start) #, limit=9999) edges = blob['edges'] neurons = {} types = {} rows = [] start_type = None sc = OntId(start).curie for e in edges: s, p, o = e['sub'], e['pred'], e['obj'] if p == 'operand': continue if s.startswith('_:'): if s not in neurons: neurons[s] = [] types[s] = {} otp = OntTerm(p) oto = OntTerm(o) neurons[s].append((otp, oto)) if o == sc: start_type = otp if oto not in types[s]: types[s][oto] = [] types[s][oto].append(otp) for v in neurons.values(): v.sort() return OntTerm(start), start_type, neurons, types hrm = [connected(t) for t in set(test_terms)] header = ['Start', 'Start Type', 'Neuron', 'Relation', 'Target'] rows = [] for start, start_type, neurons, types in sorted(hrm): start = start.atag() start_type = start_type.atag() if start_type is not None else '' for i, v in enumerate(neurons.values(), 1): neuron = i for p, o in v: relation = p.atag() target = o.atag() row = start, start_type, neuron, relation, target rows.append(row) rows.append(['|'] + [' '] * 4) h = hfn.htmldoc(hfn.render_table(rows, *header), title='neuron connectivity') return h
def make_html(self): line = getSourceLine(self.__class__) wgb = self.wasGeneratedBy.format(line=line) prov = makeProv('owl:imports', 'NIFTTL:nif.ttl', wgb) tree, extra = self.make_import_chain() if tree is None: html_all = '' else: html = extra.html.replace('NIFTTL:', '') html_all = hfn.htmldoc(html, other=prov, styles=hfn.tree_styles) self.html = html_all return html_all
def route_reports(): report_names = ( 'completeness', 'size', 'filetypes', 'pathids', 'keywords', 'subjects', 'errors', 'terms', ) report_links = [ atag(url_for(f'route_reports_{rn}', ext=None), rn) + '<br>\n' for rn in report_names ] return htmldoc('Reports<br>\n', *report_links, title='Reports')
def route_sparc_view_query(tier1, tier2=None): journey = sparc_view if tier1 not in journey: return abort(404) journey = journey[tier1] if tier2 is not None: if tier2 not in journey: return abort(404) journey = journey[tier2] hyp_rows = hyperlink_tree(journey) return hfn.htmldoc( hfn.render_table(hyp_rows), title = 'Terms for ' + (tier2 if tier2 is not None else tier1), metas = ({'name':'date', 'content':time()},), )
def route_sparc_connectivity_query(): kwargs = request.args log.debug(kwargs) return hfn.htmldoc('form here', title='Connectivity query') return connectivity_query(**kwargs)
def route_sparc_connectivity_view(): kwargs = request.args log.debug(kwargs) return hfn.htmldoc(title='Connectivity view')
def main(): from docopt import docopt args = docopt(__doc__) patch_theme_setup(theme) BUILD = working_dir / 'doc_build' if not BUILD.exists(): BUILD.mkdir() docs_dir = BUILD / 'docs' if not docs_dir.exists(): docs_dir.mkdir() theme_styles_dir = theme_repo / 'styles' doc_styles_dir = docs_dir / 'styles' if doc_styles_dir.exists(): shutil.rmtree(doc_styles_dir) shutil.copytree(theme_styles_dir, doc_styles_dir) docstring_kwargs = docstrings() wd_docs_kwargs = [docstring_kwargs] if args['--docstring-only']: outname, rendered = render_docs(wd_docs_kwargs, BUILD, 1)[0] if not outname.parent.exists(): outname.parent.mkdir(parents=True) with open(outname.as_posix(), 'wt') as f: f.write(rendered) return repos = (Repo(Path(devconfig.ontology_local_repo).resolve().as_posix()), Repo(working_dir.as_posix()), *(Repo(Path(devconfig.git_local_base, repo_name).as_posix()) for repo_name in ('ontquery', 'sparc-curation'))) skip_folders = 'notebook-testing', 'complete', 'ilxutils', 'librdflib' rskip = { 'pyontutils': ( 'docs/NeuronLangExample.ipynb', # exact skip due to moving file 'ilxutils/ilx-playground.ipynb'), 'sparc-curation': ('README.md', ), } et = tuple() # TODO move this into run_all #wd_docs_kwargs = [(Path(repo.working_dir).resolve(), wd_docs_kwargs += [ (Path(repo.working_dir).resolve(), Path(repo.working_dir, f).resolve(), makeKwargs(repo, f)) for repo in repos for f in repo.git.ls_files().split('\n') if Path(f).suffix in suffixFuncs #and Path(repo.working_dir).name == 'NIF-Ontology' and f == 'README.md' # DEBUG #and Path(repo.working_dir).name == 'pyontutils' and f == 'README.md' # DEBUG #and Path(repo.working_dir).name == 'sparc-curation' and f == 'docs/setup.org' # DEBUG and noneMembers(f, *skip_folders) and f not in rskip.get( Path(repo.working_dir).name, et) ] # doesn't work because read-from-minibuffer cannot block #compile_org_forever = ['emacs', '-q', '-l', #Path(devconfig.git_local_base, #'orgstrap/init.el').resolve().as_posix(), #'--batch', '-f', 'compile-org-forever'] #org_compile_process = subprocess.Popen(compile_org_forever, #stdin=subprocess.PIPE, #stdout=subprocess.PIPE, #stderr=subprocess.PIPE) if args['--spell']: spell((f.as_posix() for _, f, _ in wd_docs_kwargs)) return outname_rendered = render_docs(wd_docs_kwargs, BUILD, int(args['--jobs'])) titles = { 'Components': 'Components', 'NIF-Ontology/README.html': 'Introduction to the NIF Ontology', # 'ontquery/README.html': 'Introduction to ontquery', 'pyontutils/README.html': 'Introduction to pyontutils', 'pyontutils/nifstd/README.html': 'Introduction to nifstd-tools', 'pyontutils/neurondm/README.html': 'Introduction to neurondm', 'pyontutils/ilxutils/README.html': 'Introduction to ilxutils', 'Developer docs': 'Developer docs', 'NIF-Ontology/docs/processes.html': 'Ontology development processes (START HERE!)', # HOWTO 'NIF-Ontology/docs/development-setup.html': 'Ontology development setup', # HOWTO 'sparc-curation/docs/setup.html': 'Developer and curator setup (broader scope but extremely detailed)', 'NIF-Ontology/docs/import-chain.html': 'Ontology import chain', # Documentation 'pyontutils/nifstd/resolver/README.html': 'Ontology resolver setup', 'pyontutils/nifstd/scigraph/README.html': 'Ontology SciGraph setup', 'sparc-curation/resources/scigraph/README.html': 'SPARC SciGraph setup', 'pyontutils/docstrings.html': 'Command line programs', 'NIF-Ontology/docs/external-sources.html': 'External sources for the ontology', # Other 'ontquery/docs/interlex-client.html': 'InterLex client library doccumentation', 'Contributing': 'Contributing', 'pyontutils/nifstd/development/README.html': 'Contributing to the ontology', 'pyontutils/nifstd/development/community/README.html': 'Contributing term lists to the ontology', 'pyontutils/neurondm/neurondm/models/README.html': 'Contributing neuron terminology to the ontology', 'Ontology content': 'Ontology content', 'NIF-Ontology/docs/brain-regions.html': 'Parcellation schemes', # Ontology Content 'pyontutils/nifstd/development/methods/README.html': 'Methods and techniques', # Ontology content 'NIF-Ontology/docs/Neurons.html': 'Neuron Lang overview', 'pyontutils/neurondm/docs/NeuronLangExample.html': 'Neuron Lang examples', 'pyontutils/neurondm/docs/neurons_notebook.html': 'Neuron Lang setup', 'Specifications': 'Specifications', 'NIF-Ontology/docs/interlex-spec.html': 'InterLex specification', # Documentation 'pyontutils/ttlser/docs/ttlser.html': 'Deterministic turtle specification', 'Other': 'Other', 'pyontutils/htmlfn/README.html': 'htmlfn readme', 'pyontutils/ttlser/README.html': 'ttlser readme', 'sparc-curation/docs/background.html': '', # present but not visibly listed } titles_sparc = { # TODO abstract this out ... 'Background': 'Background', 'sparc-curation/docs/background.html': 'SPARC curation background', 'Other':'Other', 'sparc-curation/README.html': 'sparc-curation readme', } index = [ '<b class="Components">Components</b>', '<b class="Developer docs">Developer docs</b>', '<b class="Contributing">Contributing</b>', '<b class="Ontology content">Ontology content</b>', '<b class="Specifications">Specifications</b>', '<b class="Other">Other</b>', ] for outname, rendered in outname_rendered: apath = outname.relative_to(BUILD / 'docs') title = titles.get(apath.as_posix(), None) # TODO parse out/add titles value = atag(apath) if title is None else atag(apath, title) index.append(value) if not outname.parent.exists(): outname.parent.mkdir(parents=True) with open(outname.as_posix(), 'wt') as f: f.write(rendered) lt = list(titles) def title_key(a): return lt.index(a.split('"')[1]) index_body = '<br>\n'.join(['<h1>Documentation Index</h1>'] + sorted(index, key=title_key)) with open((BUILD / 'docs/index.html').as_posix(), 'wt') as f: f.write(htmldoc(index_body, title='NIF Ontology documentation index'))
def render(pred, root, direction=None, depth=10, local_filepath=None, branch='master', restriction=False, wgb='FIXME', local=False, verbose=False, flatten=False,): kwargs = {'local':local, 'verbose':verbose} prov = makeProv(pred, root, wgb) if local_filepath is not None: github_link = ('https://github.com/SciCrunch/NIF-Ontology/raw/' f'{branch}/{local_filepath}') prov.append('<link rel="http://www.w3.org/ns/prov#wasDerivedFrom" ' f'href="{github_link}">') graph = graphFromGithub(github_link, verbose) qname = graph.namespace_manager._qhrm # FIXME labels_index = {qname(s):str(o) for s, o in graph[:rdfs.label:]} if pred == 'subClassOf': pred = 'rdfs:subClassOf' # FIXME qname properly? elif pred == 'subPropertyOf': pred = 'rdfs:subPropertyOf' try: kwargs['json'] = graph.asOboGraph(pred, restriction=restriction) kwargs['prefixes'] = {k:str(v) for k, v in graph.namespace_manager} except KeyError as e: if verbose: log.error(str(e)) return abort(422, 'Unknown predicate.') else: kwargs['graph'] = sgg # FIXME this does not work for a generic scigraph load ... # and it should not be calculated every time anyway! # oh look, here we are needed a class again if False: versionIRI = [ e['obj'] for e in sgg.getNeighbors('http://ontology.neuinfo.org/' 'NIF/ttl/nif.ttl')['edges'] if e['pred'] == 'versionIRI'][0] #print(versionIRI) prov.append('<link rel="http://www.w3.org/ns/prov#wasDerivedFrom" ' f'href="{versionIRI}">') # FIXME wrong and wont resolve prov.append('<meta name="representation" content="SciGraph">') # FIXME :/ kwargs['html_head'] = prov try: if root.startswith('http'): # FIXME this codepath is completely busted? if 'prefixes' in kwargs: rec = None for k, v in kwargs.items(): if root.startswith(v): rec = k + 'r:' + root.strip(v) # FIXME what?! break if rec is None: raise KeyError('no prefix found for {root}') else: rec = sgv.findById(root) if 'curie' in rec: root_curie = rec['curie'] # FIXME https://github.com/SciGraph/SciGraph/issues/268 if not root_curie.endswith(':') and '/' not in root_curie: root = root_curie else: kwargs['curie'] = root_curie elif 'prefixes' not in kwargs and root.endswith(':'): kwargs['curie'] = root root = sgc._curies[root.rstrip(':')] # also 268 tree, extras = creatTree(*Query(root, pred, direction, depth), **kwargs) dematerialize(list(tree.keys())[0], tree) if flatten: if local_filepath is not None: def safe_find(n): return {'labels':[labels_index[n]], 'deprecated': False # FIXME inacurate } else: def safe_find(n): # FIXME scigraph bug if n.endswith(':'): n = sgc._curies[n.rstrip(':')] elif '/' in n: prefix, suffix = n.split(':') iriprefix = sgc._curies[prefix] n = iriprefix + suffix return sgv.findById(n) out = set(n for n in flatten_tree(extras.hierarchy)) try: lrecs = Async()(deferred(safe_find)(n) for n in out) except RuntimeError: asyncio.set_event_loop(current_app.config['loop']) lrecs = Async()(deferred(safe_find)(n) for n in out) rows = sorted(((r['labels'][0] if r['labels'] else '') + ',' + n for r, n in zip(lrecs, out) # FIXME still stuff wrong, but better for non cache case if not r['deprecated']), key=lambda lid: lid.lower()) return '\n'.join(rows), 200, {'Content-Type':'text/plain;charset=utf-8'} else: return hfn.htmldoc(extras.html, other=prov, styles=hfn.tree_styles) except (KeyError, TypeError) as e: if verbose: log.error(f'{type(e)} {e}') if sgg.getNode(root): # FIXME distinguish these cases... message = 'Unknown predicate or no results.' elif 'json' in kwargs: message = 'Unknown root.' r = graph.namespace_manager.expand(root) for s in graph.subjects(): if r == s: message = ('No results. ' 'You are querying a ttl file directly, ' 'did you remember to set ?restriction=true?') break else: message = 'Unknown root.' return abort(422, message)
def sparc_dynamic(data_sgd, data_sgc, path, wgb, process=lambda coll, blob: blob): args = dict(request.args) if 'direction' in args: direction = args.pop('direction') else: direction = 'OUTGOING' # should always be outgoing here since we can't specify? if 'format' in args: format_ = args.pop('format') else: format_ = None if 'apinat' in path: # FIXME bad hardcoded hack _old_get = data_sgd._get try: data_sgd._get = data_sgd._normal_get j = data_sgd.dispatch(path, **args) except ValueError as e: log.exception(e) abort(404) except rHTTPError as e: log.exception(e) abort(e.response.status_code) # DO NOT PASS ALONG THE MESSAGE finally: data_sgd._get = _old_get else: try: j = data_sgd.dispatch(path, **args) except rHTTPError as e: log.exception(e) abort(e.response.status_code) # DO NOT PASS ALONG THE MESSAGE except ValueError as e: log.exception(e) abort(404) j = process(collapse_apinat, j) if j is None or 'edges' not in j: log.error(pformat(j)) return abort(400) elif not j['edges']: return node_list(j['nodes']) # FIXME ... really should error? if path.endswith('housing-lyphs'): # FIXME hack root = 'NLX:154731' #direction = 'INCOMING' else: root = None prov = [ hfn.titletag(f'Dynamic query result for {path}'), f'<meta name="date" content="{UTCNOWISO()}">', f'<link rel="http://www.w3.org/ns/prov#wasGeneratedBy" href="{wgb}">', '<meta name="representation" content="SciGraph">', ('<link rel="http://www.w3.org/ns/prov#wasDerivedFrom" ' f'href="{data_sgd._last_url}">')] kwargs = {'json': cleanBad(j), 'html_head': prov, 'prefixes': data_sgc.getCuries(), # FIXME efficiency } tree, extras = creatTree(*Query(root, None, direction, None), **kwargs) #print(extras.hierarhcy) #print(tree) if format_ is not None: if format_ == 'table': #breakpoint() def nowrap(class_, tag=''): return (f'{tag}.{class_}' '{ white-space: nowrap; }') ots = [OntTerm(n) for n in flatten_tree(extras.hierarchy) if 'CYCLE' not in n] #rows = [[ot.label, ot.asId().atag(), ot.definition] for ot in ots] rows = [[ot.label, hfn.atag(ot.iri, ot.curie), ot.definition] for ot in ots] return hfn.htmldoc(hfn.render_table(rows, 'label', 'curie', 'definition'), styles=(hfn.table_style, nowrap('col-label', 'td'))) return hfn.htmldoc(extras.html, other=prov, styles=hfn.tree_styles)
def default(self): out_path = self.options.out_path BUILD = self.options.BUILD glb = Path(auth.get_path('git-local-base')) theme_repo = glb / 'org-html-themes' theme = theme_repo / 'setup/theme-readtheorg-local.setup' prepare_paths(BUILD, out_path, theme_repo, theme) doc_config = self._doc_config names = tuple(doc_config['repos']) + tuple( self.options.repo) # TODO fetch if missing ? repo_paths = [(glb / name).resolve() for name in names] repos = [p.repo for p in repo_paths] skip_folders = doc_config.get('skip-folders', tuple()) rskip = doc_config.get('skip', {}) # TODO move this into run_all docstring_kwargs = makeDocstrings(BUILD, repo_paths, skip_folders, rskip) wd_docs_kwargs = [docstring_kwargs] if self.options.docstring_only: [ kwargs.update({'theme': theme}) for _, _, kwargs in wd_docs_kwargs ] outname, rendered = render_docs(wd_docs_kwargs, out_path, titles=None, n_jobs=1, debug=self.options.debug)[0] if not outname.parent.exists(): outname.parent.mkdir(parents=True) with open(outname.as_posix(), 'wt') as f: f.write(rendered) return et = tuple() wd_docs_kwargs += [ (rp, rp / f, makeKwargs(rp, f)) for rp in repo_paths for f in rp.repo.git.ls_files().split('\n') if Path(f).suffix in suffixFuncs and only(rp, f) and noneMembers( f, *skip_folders) and f not in rskip.get(rp.name, et) ] [kwargs.update({'theme': theme}) for _, _, kwargs in wd_docs_kwargs] if self.options.spell: spell((f.as_posix() for _, f, _ in wd_docs_kwargs)) return titles = doc_config['titles'] outname_rendered = render_docs(wd_docs_kwargs, out_path, titles, self.options.jobs, debug=self.options.debug) index = [ f'<b class="{heading}">{heading}</b>' for heading in doc_config['index'] ] _NOTITLE = object() for outname, rendered in outname_rendered: apath = outname.relative_to(self.options.out_path) title = titles.get(apath.as_posix(), _NOTITLE) # TODO parse out/add titles if title is not None: value = (hfn.atag(apath) if title is _NOTITLE else hfn.atag( apath, title)) index.append(value) if not outname.parent.exists(): outname.parent.mkdir(parents=True) with open(outname.as_posix(), 'wt') as f: f.write(rendered) lt = list(titles) def title_key(a): title = a.split('"')[1] if title not in lt: msg = (f'{title} missing from {self.options.config}') raise ValueError(msg) return lt.index(title) index_body = '<br>\n'.join(['<h1>Documentation Index</h1>'] + sorted(index, key=title_key)) with open((out_path / 'index.html').as_posix(), 'wt') as f: f.write(hfn.htmldoc(index_body, title=doc_config['title']))
def wrap_tables(*tables, title=None): return htmldoc(*tables, styles=(table_style, nowrap('td', 'col-id')), title=title)