def test_roundtrip_with_missing_fields(): json_compiler = JSONCompiler() json_parser = JSONParser_1_4() with open("tests/data/metadata_v14.json", "r") as _input_file: input_string = _input_file.read() for expected_json in __mask(json.loads(input_string), keep=["id", "metaMetadata"]): assert_equal(json_compiler.visit(json_parser.parse(expected_json)), expected_json)
def metadata_conversion(old_sql, new_sql, user, user_email): """ Conversion of an existing metadata file to a newer version Parameters ---------- old_sql: str The path to the file containing the old sql file. new_sql: str The filename of the new sql file. user: str The name of the user for the 'contributions' section user_email: str The email address of the user. Returns ------- """ parser = JSONParser_1_3() metadata = parser.parse_from_file(old_sql) metadata.contributors.append( structure.Contribution( title=user, email=user_email, date=datetime.now, obj=None, comment="Update metadata to v1.3 using metadata conversion tool", )) compiler = JSONCompiler() with open(new_sql) as out_file: out_file.write(compiler.visit(metadata))
def assert_roundtrip(expected_json, input_string): json_compiler = JSONCompiler() json_parser = JSONParser_1_4() rdf_compiler = RDFCompiler() rdf_p = RDFParser() # Step 1: Parse JSON to internal structure internal_metadata = json_parser.parse_from_string(input_string) # Step 2: Translate to rdf _ = rdf_compiler.visit(internal_metadata) # Step 3: Parse rdf string internal_metadata2 = rdf_p.parse(rdf_compiler.graph) # Step 4: Translate to JSON result_json = json_compiler.visit(internal_metadata2) # Final step: Compare assert_equal(expected_json, result_json, disregard_ordering=True)
def test_roundtrip(): json_compiler = JSONCompiler() json_parser = JSONParser_1_4() rdf_compiler = RDFCompiler() rdf_p = RDFParser() with open("tests/data/metadata_v14.json", "r") as _input_file: input_string = _input_file.read() expected_json = json.loads(input_string) # Step 1: Parse JSON to internal structure internal_metadata = json_parser.parse_from_string(input_string) # Step 2: Translate to rdf _ = rdf_compiler.visit(internal_metadata) # Step 3: Parse rdf string internal_metadata2 = rdf_p.parse(rdf_compiler.graph) # Step 4: Translate to JSON result_json = json_compiler.visit(internal_metadata2) # Final step: Compare assert_equal(expected_json, result_json, disregard_ordering=True)
def post(self, request, schema, table): table_obj = actions._get_table(schema=schema, table=table) raw_input = request.data metadata, error = actions.try_parse_metadata(raw_input) if metadata is not None: compiler = JSONCompiler() table_obj.comment = json.dumps(compiler.visit(metadata)) cursor = actions.load_cursor_from_context(request.data) # Surprisingly, SQLAlchemy does not seem to escape comment strings # properly. Certain strings cause errors database errors. # This MAY be a security issue. Therefore, we do not use # SQLAlchemy's compiler here but do it manually. sql = "COMMENT ON TABLE {schema}.{table} IS %s".format( schema=table_obj.schema, table=table_obj.name) cursor.execute(sql, (table_obj.comment, )) return JsonResponse(raw_input) else: raise APIError(error)
def test_translation_1_3_to_1_4(): parser = JSONParser_1_3() compiler = JSONCompiler() with open("tests/data/metadata_v13_minimal.json", "r") as _input_file: input_string = _input_file.read() # Step 1: Parse JSON to internal structure internal_metadata = parser.parse_from_string(input_string) # Step 2: Translate to version 1_4 result_json = compiler.visit(internal_metadata) expected_json = OrderedDict( json.loads('''{ "metaMetadata": { "metadataVersion": "OEP-1.4.0", "metadataLicense": { "name": "CC0-1.0", "title": "Creative Commons Zero v1.0 Universal", "path": "https://creativecommons.org/publicdomain/zero/1.0/"}}}''') ) assert_equal(expected_json, result_json)
def test_roundtrip(): json_compiler = JSONCompiler() json_parser = JSONParser_1_4() json_renderer = JSONRenderer() rdf_compiler = RDFCompiler() rdf_p = RDFParser() with open("tests/data/metadata_v14.ttl", "r") as _input_file: input_string = _input_file.read() expected_graph = Graph() expected_graph.parse(data=input_string, format="ttl") # Step 1: Parse Turtle to internal structure internal_metadata = rdf_p.parse_from_string(input_string) # Step 2: Translate to json string json_metadata = json_renderer.render(json_compiler.visit(internal_metadata)) # Step 3: Parse json string internal_metadata2 = json_parser.parse_from_string(json_metadata) # Step 4: Translate to Turtle _ = rdf_compiler.visit(internal_metadata2) # Final step: Compare for (t1, t2) in _squashed_graphs_triples(expected_graph, rdf_compiler.graph): assert t1 == t2 assert isomorphic(expected_graph, rdf_compiler.graph)
def test_compiler_v1_4(): compiler = JSONCompiler() with open("tests/data/metadata_v14.json", "r") as _input_file: expected_result = json.load(_input_file) result = compiler.visit(metadata_v_1_4) assert_equal(expected_result, result)