def _verify_schema1_content(
            self,
            schema: SchemaDefinition,
            source_file,
            addl_checks: Callable[[SchemaDefinition], None] = None) -> None:
        expected = loads(f"""{{
           "default_prefix": "http://example.org/{source_file}/",
           "name": "{source_file}",
           "id": "http://example.org/{source_file}",
           "title": "Load Raw Schema Test",
           "metamodel_version": "0.5.0",
           "source_file": "{source_file}.yaml",
           "source_file_date": "Mon Dec 31 11:25:38 2018",
           "source_file_size": 76,
           "generation_date": "2018-12-31 11:50"
        }}""")

        schema.source_file = os.path.basename(schema.source_file)
        if addl_checks:
            addl_checks(schema)
        self.assertTrue(isinstance(schema.metamodel_version, str))
        expected.metamodel_version = schema.metamodel_version
        self.assertTrue(isinstance(schema.source_file_date, str))
        expected.source_file_date = schema.source_file_date
        self.assertTrue(isinstance(schema.source_file_size, int))
        expected.source_file_size = schema.source_file_size
        self.assertTrue(isinstance(schema.generation_date, str))
        expected.generation_date = schema.generation_date
        self.assertEqual(expected, loads(as_json(schema)))
Exemple #2
0
    def test_merge_contexts_base(self):
        self.assertEqual(
            JsonObj(**{'@context': JsonObj(**{'@base': 'file://relloc'})}),
            merge_contexts(base='file://relloc'))
        self.assertEqual(
            loads(f'{{"@context": {{"@base": "{META_BASE_URI}"}}}}'),
            merge_contexts(base=META_BASE_URI))
        self.assertEqual(
            loads("""
{"@context": [
      "https://w3id.org/biolink/biolinkml/context.jsonld",
      {
         "ex": "http://example.org/test/",
         "ex2": "http://example.org/test2/"
      },
      {
         "ex": "http://example.org/test3/",
         "ex2": {
            "@id": "http://example.org/test4/"
         }
      },
      {
         "@base": "https://w3id.org/biolink/biolinkml/"
      }
   ]
}"""),
            merge_contexts([METAMODEL_CONTEXT_URI, json_1, json_2],
                           base=META_BASE_URI))
def check_json(ifn: str, ifdir: str, opts: Namespace) -> bool:
    """
    Check whether ifn is a valid FHIR file
    :param ifn: file name
    :param ifdir: file directory
    :param opts: options - we add in_json to it if the file passes
    :return: True if a valid FHIR resource
    """
    if '://' in ifn:
        infilename = ifn
        resp = requests.get(ifn)
        if not resp.ok:
            print(f"Error {resp.status_code}: {ifn} {resp.reason}")
            return False
        in_json = loads(resp.text)
    else:
        infilename = os.path.join(ifdir, ifn)
        with open(infilename) as infile:
            in_json = loads(infile.read())
    if not (hasattr(in_json, 'resourceType') or hasattr(in_json, 'id')):
        print(f"{infilename} is not a FHIR resource - processing skipped",
              file=sys.stderr)
        return False
    opts.in_json = in_json
    return True
 def test_default_vocab(self):
     json_ld_text = ContextGenerator(without_default).serialize()
     json_ld = loads(json_ld_text)
     self.assertEqual('http://example.org/sssom/schema/', json_ld['@context']['@vocab'])
     self.assertEqual('http://example.org/sssom/schema/name', json_ld['@context']['name']['@id'])
     json_ld_text2 = ContextGenerator(with_default).serialize()
     json_ld2 = loads(json_ld_text2)
     self.assertEqual('https://w3id.org/sssom/', json_ld2['@context']['@vocab'])
     self.assertNotIn('name', json_ld2['@context']['@vocab'])
Exemple #5
0
        def check_types(s: SchemaDefinition) -> None:
            output = os.path.join(outputdir, 'schema4.json')
            if not os.path.exists(output):
                with open(output, 'w') as f:
                    f.write(as_json(JsonObj(**{k: as_dict(loads(as_json(v))) for k, v in s.types.items()})))
                    self.fail(f"File {output} created - rerun test")

            with open(output) as f:
                expected = as_dict(load(f))
            self.assertEqual(expected, {k: as_dict(loads(as_json(v))) for k, v in s.types.items()})
            s.types = None
    def compare_shexj(
            shex: Union[ShExJ.Schema, str],
            shexj: Union[ShExJ.Schema, str]) -> Tuple[bool, StringIO]:
        d1 = loads(as_json(shex) if isinstance(shex, ShExJ.Schema) else shex)
        d2 = loads(
            as_json(shexj) if isinstance(shexj, ShExJ.Schema) else shexj)

        log = StringIO()
        with redirect_stdout(log):
            return compare_dicts(d1._as_dict,
                                 d2._as_dict,
                                 d1name="expected",
                                 d2name="actual  ",
                                 filtr=json_filtr), log
def dumpit(argv: Optional[Union[str, List[str]]] = None,
           prog: Optional[str] = None) -> int:
    """
    Dump resources from a FHIR server
    :param argv: command line arguments
    :param prog: program name for testing help output
    :return: number of resource instances dumped
    """
    if isinstance(argv, str):
        argv = argv.split()
    opts = genargs(prog).parse_args(argv if argv is not None else sys.argv[1:])
    os.makedirs(os.path.join(opts.outdir, opts.resource), exist_ok=True)
    url = f"{opts.server}/{opts.resource}?_format=json&_summary=false"
    if opts.max:
        url += f"&_count={opts.max}"
    nwritten = 0
    while True:
        resp = requests.get(url)
        url = None
        if resp.ok:
            rslts = loads(resp.text)
            nwritten = save_instances(opts, rslts, nwritten)
            if opts.max is None or nwritten < opts.max:
                for l in rslts.link:
                    if l.relation == "next":
                        url = l.url
        else:
            print(resp.reason)
        if not url:
            break
    if opts.verbose:
        print(f"{nwritten} instances written to {opts.outdir}")
    return nwritten
Exemple #8
0
 def test_uri_type(self):
     """ URI datatype should map to ShEx URI instead of NONLITERAL """
     shex = loads(
         ShExGenerator(LOCAL_TYPES_YAML_FILE, format='json').serialize())
     uri_shape = [s for s in shex.shapes if s.id == str(METATYPE.Uri)]
     self.assertEqual(1, len(uri_shape))
     self.assertEqual('iri', uri_shape[0].nodeKind)
    def test_issue_types(self):
        """ Make sure that types are generated as part of the output """
        def generator() -> str:
            gen = JsonSchemaGenerator(env.input_path('issue_129.yaml'))
            gen.topCls = 'c'
            return gen.serialize()

        sobj_str = env.generate_single_file('issue_129.json',
                                            lambda: generator(),
                                            value_is_returned=True)

        sobj = jsonasobj.loads(sobj_str)
        defs = sobj['definitions']
        C = defs['C']
        props = C['properties']
        assert props['age_in_years']['type'] == 'integer'
        assert props['has_prop']['type'] == 'boolean'
        # multivalued primitive type, inlined
        assert props['scores']['type'] == 'array'
        assert props['scores']['items']['type'] == 'number'
        # single-valued complex type, inlined
        assert props['has_d']['$ref'] == "#/definitions/D"

        # multi-valued, inlined
        assert props['has_ds']['type'] == 'array'
        assert props['has_ds']['items']['$ref'] == "#/definitions/D"

        # single-valued, non-inlined (foreign key)
        assert props['parent']['type'] == "string"

        # multi-valued, non-inlined (foreign key)
        assert props['children']['type'] == 'array'
        assert props['children']['items']['type'] == "string"
def test_andras_loop():
    manifast = \
        "https://raw.githubusercontent.com/SuLab/Genewiki-ShEx/master/pathways/wikipathways/manifest_all.json"
    manifest = jsonasobj.loads(requests.get(manifast).text)

    for case in manifest:
        print(case._as_json_dumps())
        if case.data.startswith("Endpoint:"):
            sparql_endpoint = case.data.replace("Endpoint: ", "")
            schema = requests.get(case.schemaURL).text
            shex = ShExC(schema).schema
            print("==== Schema =====")
            # print(shex._as_json_dumps())

            evaluator = ShExEvaluator(schema=shex, debug=True)
            sparql_query = case.queryMap.replace("SPARQL '''",
                                                 "").replace("'''@START", "")

            df = get_sparql_dataframe(sparql_endpoint, sparql_query)
            for wdid in df.item:
                slurpeddata = requests.get(wdid + ".ttl")
                results = evaluator.evaluate(rdf=slurpeddata.text,
                                             focus=wdid,
                                             debug=False)
                for result in results:
                    if result.result:
                        print(str(result.focus) + ": CONFORMS")
                    else:
                        print("item with issue: " + str(result.focus) + " - " +
                              "shape applied: " + str(result.start))
Exemple #11
0
def resource_url(server: str, text: str) -> str:
    """ Create a URL out of the resource text """
    def as_url(resource_name: str, resource_format: str,
               resource_id: str) -> str:
        # Note that the 'json' below defines the response format.  The server is clever enough to figure out
        # what you are shipping...
        return f"{server}{'/' if not server.endswith('/') else ''}{resource_name}" \
               f"/{resource_id}?_format=json&_pretty=true"

    """ Create a server URL for uploading text """
    if text.startswith('<'):
        # XML
        doc = ElementTree().parse(source=StringIO(text))
        '{http://hl7.org/fhir}ClinicalProfile'
        typ = doc.tag.replace(f'{{{FHIR_XML_URI}}}', '')
        res_id = doc.findall('{http://hl7.org/fhir}id')[0].attrib['value']
        return as_url(typ, 'xml', res_id)
    elif text.startswith('{'):
        # JSON
        json_text = loads(text)
        return as_url(json_text.resourceType, 'json', json_text.id)
    elif text.startswith('@'):
        g = Graph()
        g.parse(data=text, format='turtle')
        focus = g.value(predicate=FHIR.nodeRole, object=FHIR.treeRoot)
        typ_uri = g.value(subject=focus, predicate=RDF.type)
        res_id = g.value(subject=focus, predicate=FHIR.id)
        typ = str(typ_uri).replace(str(FHIR), '')
        return as_url(typ, 'ttl', res_id)
    else:
        raise ValueError("Unrecognized file type")
Exemple #12
0
def nb_filter(s: str) -> str:
    """ Filter for jupyter (ipynb) notebooks """
    # It is easier to deal with notebook content in JSON
    s_json = loads(ldcontext_metadata_filter(s))
    for cell in s_json.cells:
        if hasattr(cell, 'execution_count'):
            cell.execution_count = 1
        if hasattr(cell, 'metadata'):
            delattr(cell, 'metadata')
        if hasattr(cell, 'outputs'):
            del_outputs = []
            for output in cell.outputs:
                to_del = []
                if hasattr(output, 'text'):
                    for line in output.text:
                        if 'WARNING: You are using pip' in line or\
                           'You should consider upgrading via' in line or\
                           'Requirement already satisfied:' in line:
                            to_del.append(line)
                    for del_line in to_del:
                        output.text.remove(del_line)
                    if not output.text:
                        del_outputs.append(output)
            if del_outputs:
                for del_output in del_outputs:
                    cell.outputs.remove(del_output)
    if hasattr(s_json.metadata, 'language_info'):
        if hasattr(s_json.metadata.language_info, 'version'):
            s_json.metadata.language_info.version = '3'

    return as_json(s_json)
Exemple #13
0
 def test_get(self):
     py_obj = jsonasobj.loads(test_json)
     self.assertEqual(1, py_obj.k1)
     with self.assertRaises(AttributeError):
         py_obj.e1
     self.assertIsNone(py_obj._get('e1'))
     self.assertEqual("abc", py_obj._get('e2', 'abc'))
Exemple #14
0
    def test_merge_contexts(self):
        self.assertIsNone(merge_contexts())
        self.assertEqual('file://local.jsonld', merge_contexts("local.jsonld")['@context'])
        self.assertEqual('file://local.jsonld', merge_contexts(["local.jsonld"])['@context'])
        self.assertEqual(METAMODEL_CONTEXT_URI, merge_contexts(METAMODEL_CONTEXT_URI)['@context'])
        self.assertEqual(METAMODEL_CONTEXT_URI, merge_contexts([METAMODEL_CONTEXT_URI])['@context'])
        self.assertEqual(JsonObj(ex='http://example.org/test/', ex2='http://example.org/test2/'),
                         merge_contexts(json_1)['@context'])
        self.assertEqual(JsonObj(ex='http://example.org/test/', ex2='http://example.org/test2/'),
                         merge_contexts([json_1])['@context'])
        self.assertEqual(JsonObj(ex='http://example.org/test3/', ex2=JsonObj(**{'@id': 'http://example.org/test4/'})),
                         merge_contexts(json_2)['@context'])
        self.assertEqual(JsonObj(ex='http://example.org/test3/', ex2=JsonObj(**{'@id': 'http://example.org/test4/'})),
                         merge_contexts([json_2])['@context'])
        self.assertEqual([f'file://local.jsonld',
                          'https://w3id.org/linkml/meta.context.jsonld',
                          JsonObj(ex='http://example.org/test/', ex2='http://example.org/test2/'),
                          JsonObj(ex='http://example.org/test3/', ex2=JsonObj(**{'@id': 'http://example.org/test4/'}))],
                         merge_contexts(["local.jsonld", METAMODEL_CONTEXT_URI, json_1, json_2])['@context'])
        self.assertEqual(loads(context_output),
                         merge_contexts(["local.jsonld", METAMODEL_CONTEXT_URI, json_1, json_2]))

        # Dups are not removed
        self.assertEqual(
            JsonObj(**{'@context': [JsonObj(ex='http://example.org/test/', ex2='http://example.org/test2/'),
                                    JsonObj(ex='http://example.org/test/', ex2='http://example.org/test2/')]}),
            merge_contexts([json_1, json_1]))
        self.assertEqual('file://local.jsonld', merge_contexts("local.jsonld")['@context'])
    def test_uri_and_curie(self):
        """ Compile a model of URI's and Curies and then test the various types """
        self.single_file_generator('py',
                                   PythonGenerator,
                                   filtr=metadata_filter,
                                   comparator=compare_python)

        # Check that the interpretations are correct
        self.single_file_generator(
            'jsonld',
            ContextGenerator,
            filtr=ldcontext_metadata_filter,
            comparator=lambda expected, actual: compare_rdf(
                expected, actual, fmt="json-ld"))
        self.single_file_generator('json',
                                   JSONLDGenerator,
                                   filtr=json_metadata_filter)

        module = compile_python(env.expected_path(self.model_name + '.py'))

        curie_obj = module.C1("ex:obj1",
                              hasCurie="ex:curie",
                              hasURI="http://example.org/test/uri",
                              hasNcName="A123",
                              id2="ex:id2")
        instance_jsonld = loads('{ "ex": "http://example.org/test/inst#" }')

        g = as_rdf(
            curie_obj,
            [env.input_path(self.model_name + '.jsonld'), instance_jsonld])
        env.eval_single_file(env.expected_path('uriandcurie.ttl'),
                             g.serialize(format='ttl').decode(), lambda s: s,
                             compare_rdf)
Exemple #16
0
 def test_basic_json_read(self) -> None:
     """ Test the basic JSON level read
     """
     py_obj = jsonasobj.loads(test_json)
     self.assertEqual("Markus Lanthaler", py_obj.name)
     self.assertEqual("Dave Longley", py_obj.knows[0].name)
     self.assertEqual("http://xmlns.com/foaf/0.1/name", py_obj["@context"].name)
     self.assertEqual("http://me.markus-lanthaler.com/", py_obj["@id"])
Exemple #17
0
 def test_decimal(self):
     test_json = loads(json_data)
     from fhirtordf.loaders.fhirresourceloader import FHIRResource
     test_rdf = FHIRResource(FHIRGraph(), None, "http://hl7.org/fhir", test_json, add_ontology_header=False)
     g = test_rdf.graph
     self.assertEqual({URIRef('http://hl7.org/fhir/Bundle/bundle-example'),
                       URIRef('https://example.com/base/MedicationRequest/3123')},
                      set(s for s in g.subjects() if isinstance(s, URIRef)))
Exemple #18
0
 def test_reference(self):
     test_json = loads(data)
     from fhirtordf.loaders.fhirresourceloader import FHIRResource
     test_rdf = FHIRResource(FHIRGraph(), None, "http://hl7.org/fhir", test_json)
     g = test_rdf.graph
     expected_graph = Graph()
     diffs = rdf_compare(expected_graph, test_rdf.graph, ignore_owl_version=True, ignore_type_arcs=True)
     self.assertEqual("", diffs)
Exemple #19
0
 def check_types(s: SchemaDefinition) -> None:
     self.assertEqual({
         'integer': {'base': 'int',
                     'from_schema': 'http://example.org/schema5',
                     'name': 'integer'},
         'string': {'base': 'str',
                    'from_schema': 'http://example.org/schema4',
                    'name': 'string'}},
                      {k: as_dict(loads(as_json(v))) for k, v in s.types.items()})
     s.types = None
Exemple #20
0
    def test_reference(self):
        test_json = loads(data)
        from fhirtordf.loaders.fhirresourceloader import FHIRResource
        fmv_loc = "http://build.fhir.org/fhir.ttl"

        test_rdf = FHIRResource(FHIRMetaVoc(fmv_loc).g, None, "http://hl7.org/fhir", test_json)
        g = test_rdf.graph
        subj = URIRef("http://hl7.org/fhir/CoverageEligibilityResponse/E2500")
        self.assertEqual("http://www.BenefitsInc.com/fhir/coverageeligibilityrequest/225476332402",
                         str(g.value(g.value(subj, FHIR.CoverageEligibilityResponse.request), FHIR.link)))
Exemple #21
0
def proc_response(response: Response) -> Optional[str]:
    if response.status_code != 200:
        return response.reason
    elif response.status_code == 400:
        outcome = loads(response.text)
        rval = []
        for issue in outcome.issue:
            rval.append(f"Severity: {issue.severity} - {issue.diagnostics}")
        return '\n'.join(rval)
    return None
Exemple #22
0
 def test_example(self):
     pyobj = jsonasobj.loads(str(test_json))
     self.assertEqual('Markus Lanthaler', pyobj.name)
     self.assertEqual(pyobj.name, pyobj['name'])
     self.assertEqual('Dave Longley', pyobj.knows[0].name)
     self.assertEqual('http://xmlns.com/foaf/0.1/name', pyobj['@context'].name)
     self.assertEqual(json.loads(test_json), json.loads(pyobj._as_json))
     self.assertEqual(json.loads(pyobj._as_json), json.loads(as_json(pyobj)))
     self.assertTrue(compare_dicts(test_data, pyobj._as_dict))
     self.assertTrue(compare_dicts(test_data, as_dict(pyobj)))
Exemple #23
0
 def test_dct_prefix(self):
     """ Make sure prefixes are handled correctly """
     with self.assertRaises(ValueError,
                            msg="A colon in an identifier is illegal"):
         shex = loads(
             ShExGenerator(env.input_path('Issue_6.yaml'),
                           format='json').serialize())
     shex = loads(
         ShExGenerator(env.input_path('Issue_6_fixed.yaml'),
                       format='json').serialize())
     company_shape = [s for s in shex.shapes if 'Company' in s.id][0]
     for expr in company_shape.expression.expressions:
         if expr.min == 0:
             self.assertEqual(
                 str(DCT.created),
                 expr.predicate,
             )
             return
     self.assertFalse(True, "DCT.created predicate not found")
        def check_types(s: SchemaDefinition) -> None:
            output = env.expected_path('schema4.json')
            if not os.path.exists(output):
                with open(output, 'w') as f:
                    f.write(
                        as_json(
                            JsonObj(
                                **{
                                    k: as_dict(loads(as_json(v)))
                                    for k, v in s.types.items()
                                })))

            with open(output) as f:
                expected = as_dict(load(f))
            self.assertEqual(
                expected,
                {k: as_dict(loads(as_json(v)))
                 for k, v in s.types.items()})
            s.types = None
Exemple #25
0
 def end_schema(self, context: str = biolink_context) -> None:
     # self._visit(self.schema)
     json_str = as_json(self.schema)
     json_obj = loads(json_str)
     if '://' not in context:
         context = urljoin('file:', pathname2url(os.path.abspath(context)))
     base_prefix = self.default_uri()
     json_obj["@context"] = [context, {'@base': base_prefix}] if base_prefix else context
     json_obj["@id"] = self.schema.id
     print(as_json(json_obj, indent="  "))
 def test_as_json(self):
     schema = self.fix_schema_metadata(
         load_raw_schema(os.path.join(inputdir, 'schema6.yaml')))
     outfile = os.path.join(outputdir, 'schema6.json')
     if not os.path.exists(outfile):
         with open(outfile, 'w') as f:
             f.write(as_json(schema))
             self.fail(f"Generated {outfile} - run test again")
     else:
         self.assertEqual(load(outfile), loads(as_json(schema)))
Exemple #27
0
 def test_reference(self):
     test_json = loads(data)
     from fhirtordf.loaders.fhirresourceloader import FHIRResource
     test_rdf = FHIRResource(FHIRGraph(), None, "http://hl7.org/fhir",
                             test_json)
     g = test_rdf.graph
     subj = URIRef("http://hl7.org/fhir/EligibilityResponse/E2500")
     self.assertEqual(
         "http://www.BenefitsInc.com/fhir/eligibility/225476332402",
         str(
             g.value(g.value(subj, FHIR.EligibilityResponse.request),
                     FHIR.link)))
def loinc_name_for(code: str) -> str:
    userAndPass = b64encode(b"hsolbrig:instill-geminate-tehran").decode(
        "ascii")
    headers = {'Authorization': 'Basic %s' % userAndPass}
    resp = requests.get(
        f"https://fhir.loinc.org/CodeSystem/$lookup?system=http://loinc.org&code={code}",
        headers=headers)
    if resp.status_code == 200:
        vals = map_parameters(loads(resp.text))
        if vals:
            return vals.display
        else:
            return "Unknown Code"
 def __init__(self, source: Union[fname, json_txt]) -> None:
     """ Construct a FHIR StructuredDefinition from a source file name, file or string
     :param source: JSON source
     """
     valueset_directory = None
     if hasattr(source, 'read'):             # an open file
         self._obj = load(source)
     elif source.strip().startswith('{'):    # a dictionary in text form
         self._obj = loads(source)
     else:
         self._obj = load(open(source))      # a file name
         valueset_directory = os.path.dirname(source)
     self.elements = [FHIRElement(self._obj, e, valueset_directory) for e in self._obj.snapshot.element
                      if '.' in e.path and self._significant_differential(e)]
Exemple #30
0
    def test_issue_177(self):
        def generator() -> str:
            gen = JsonSchemaGenerator(env.input_path('issue_177.yaml'))
            return gen.serialize()

        json_str = env.generate_single_file(
            'issue_177.json',
            lambda: JsonSchemaGenerator(env.input_path('issue_177.yaml')
                                        ).serialize(),
            value_is_returned=True)
        sobj = jsonasobj.loads(json_str)
        props = sobj['properties']
        assert props['sa']['type'] == 'string'
        assert props['sb']['type'] == 'integer'
Exemple #31
0
    def test_as_json(self):
        schema = self.fix_schema_metadata(
            load_raw_schema(os.path.join(datadir, 'schema6.yaml')))
        self.assertEqual(
            loads("""{
   "name": "schema6",
   "id": "http://example.org/schema6.fuzz",
   "title": "Load Raw Schema Test",
   "types": [
      {
         "name": "foo",
         "from_schema": "http://example.org/schema6.fuzz",
         "base": "str",
         "uri": "http://example.org/types/String"
      }
   ],
   "slots": [
      {
         "name": "s1",
         "from_schema": "http://example.org/schema6.fuzz",
         "domain": "c1",
         "range": "foo"
      }
   ],
   "classes": [
      {
         "name": "c1",
         "from_schema": "http://example.org/schema6.fuzz"
      }
   ],
   "metamodel_version": "0.5.0",
   "source_file": "schema6.yaml",
   "source_file_date": "2018-12-31 17:23",
   "source_file_size": 259,
   "generation_date": "2018-12-31 17:23"
}"""), loads(as_json(schema)))
 def get(self,
         relurl: Optional[str],
         params: Optional[Dict[str, str]] = None) -> Optional[JsonObj]:
     url = self.url + relurl
     resp = requests.get(url,
                         params=params,
                         auth=(self._user,
                               self._password) if self._user else None)
     if self.show_urls:
         print(f"GET {resp.url}")
     if resp.ok:
         return loads(resp.text)
     else:
         print(f"{resp.url}: Error: {resp.reason}", file=sys.stderr)
         return None
Exemple #33
0
    def run_test(self, manifest_uri: str, num_entries: Optional[int]=None, verbose: bool=True, debug: bool=False,
                 stop_on_fail: bool=False, debug_slurps: bool=False, save_graph_dir: Optional[str]=None) \
            -> List[EvaluationResult]:
        """ Run the test identified by manifest_uri

        :param manifest_uri: uri of manifest
        :param num_entries: number of manifest elements to test
        :param verbose: True means talk about it
        :param debug: debug setting for shex evaluator
        :param stop_on_fail: True means run until failure
        :param debug_slurps: True means emit SPARQL_slurper statistics
        :param save_graph_dir: If present, save the final graph in this directory
        :return:
        """
        manifest = loads(self.fetch_uri(manifest_uri))
        rval: List[EvaluationResult] = []
        for case in manifest:
            if verbose:
                print(case._as_json_dumps())
            sparql_endpoint = case.data.replace("Endpoint: ", "")
            shex = self.fetch_uri(case.schemaURL)
            evaluator = ShExEvaluator(schema=shex, debug=debug)
            prefixes = PrefixLibrary(shex, SKOS=SKOS)
            sparql_query = case.queryMap.replace("SPARQL '''",
                                                 "").replace("'''@START", "")
            dfs: List[str] = self.get_sparql_dataframe(sparql_endpoint,
                                                       sparql_query)
            dfs_slice = dfs[:num_entries] if num_entries is not None else dfs
            for df in dfs_slice:
                slurper = SlurpyGraphWithAgent(sparql_endpoint)
                # slurper.debug_slurps = debug_slurps
                prefixes.add_bindings(slurper)
                print(f"Evaluating: {df}")
                results = evaluator.evaluate(rdf=slurper,
                                             focus=df,
                                             debug=debug,
                                             debug_slurps=debug_slurps,
                                             over_slurp=False)
                rval += results
                if save_graph_dir:
                    element_name = df.rsplit('/', 1)[1]
                    file_name = os.path.join(save_graph_dir,
                                             element_name + '.ttl')
                    print(f"Writing: {file_name}")
                    slurper.serialize(file_name, format="turtle")
                if stop_on_fail and not all(r.result for r in results):
                    break
        return rval
Exemple #34
0
 def __init__(self, source: Union[fname, json_txt]) -> None:
     """ Construct a FHIR StructuredDefinition from a source file name, file or string
     :param source: JSON source
     """
     valueset_directory = None
     if hasattr(source, 'read'):  # an open file
         self._obj = load(source)
     elif source.strip().startswith('{'):  # a dictionary in text form
         self._obj = loads(source)
     else:
         self._obj = load(open(source))  # a file name
         valueset_directory = os.path.dirname(source)
     self.elements = [
         FHIRElement(self._obj, e, valueset_directory)
         for e in self._obj.snapshot.element
         if '.' in e.path and self._significant_differential(e)
     ]
 def sdo_valueset(self) -> str:
     """ Return a schema.org representation of the value set
     :return:
     """
     # TODO: This URL should really be a terminology service expand call, but the documented method doesn't work
     rval = ''
     vsdef = self.sdo_valueset_file()
     if not vsdef:
         resp = get(self._reference, headers={'accept': 'application/json'})
         if resp.ok:
             vsdef = loads(resp.text)
         else:
             print("ValueSet access error: %s (%s)" % (self._reference, resp.reason))
     if vsdef:
         rval = self.sdo_valueset_header(vsdef.url, vsdef.id, vsdef.name)
         if 'codeSystem' in vsdef and 'concept' in vsdef.codeSystem:
             cs = vsdef.codeSystem
             rval += '\n\t'.join([self.sdo_concept(vsdef.url, vsdef.id, cs.system, c) for c in cs.concept])
     return rval
Exemple #36
0
 def test_as_json(self):
     """ Test the JSON serialization
     """
     py_obj = jsonasobj.loads(test_json)
     self.assertEqual(json.loads(test_json), json.loads(py_obj._as_json))
     self.assertEqual(json.loads(test_json), json.loads(as_json(py_obj)))
Exemple #37
0
      "@id": "name:foo",
      "@type": "@id"
    }
  },
  "@id": "http://me.markus-lanthaler.com/",
  "name": "Markus Lanthaler",
  "knows": [
    {
      "name": "Dave Longley",
      "menu": "something",
      "modelDate" : "01/01/2015"
    }
  ]
}"""

py_obj = jsonasobj.loads(test_json)
py_obj.knows[0].extra = {'age': 17}
py_obj.knows.append(dict(name='Barack Obama'))
del py_obj.knows[0]['menu']
print(py_obj.name)
print(py_obj['name'])
print(py_obj.knows[0].name)
print(py_obj['@context'].name)
print(as_json(py_obj))
print(as_dict(py_obj))
'''
Result:

Markus Lanthaler
Markus Lanthaler
Dave Longley