def export(self, nidm_version, export_dir, prepend_path): """ Copy file over of export_dir and create corresponding triples """ if self.path is not None: if export_dir is not None: # Copy file only if export_dir is not None new_file = os.path.join(export_dir, self.filename) if not self.path == new_file: if prepend_path.endswith('.zip'): with zipfile.ZipFile(prepend_path) as z: extracted = z.extract(str(self.path), export_dir) shutil.move(extracted, new_file) else: if prepend_path: file_copied = os.path.join(prepend_path, self.path) else: file_copied = self.path shutil.copy(file_copied, new_file) if self.temporary: os.remove(self.path) else: new_file = self.path if nidm_version['num'] in ["1.0.0", "1.1.0"]: loc = Identifier("file://./" + self.filename) else: loc = Identifier(self.filename) self.add_attributes([(NFO['fileName'], self.filename)]) if export_dir: self.add_attributes([(PROV['atLocation'], loc)]) if nidm_version['num'] in ("1.0.0", "1.1.0"): path, org_filename = os.path.split(self.path) if (org_filename is not self.filename) \ and (not self.temporary): self.add_attributes([(NFO['fileName'], org_filename)]) if self.is_nifti(): if self.sha is None: self.sha = self.get_sha_sum(new_file) if self.fmt is None: self.fmt = "image/nifti" self.add_attributes([ (CRYPTO['sha512'], self.sha), (DCT['format'], self.fmt) ])
def export(self): """ Create prov entities and activities. """ # Create coordinate space export self.p.update(self.coord_space.export()) # Copy residuals map in export directory residuals_file = os.path.join(self.export_dir, 'ResidualMeanSquares.nii.gz') residuals_original_filename, residuals_filename = self.copy_nifti( self.file, residuals_file) # Create "residuals map" entity self.p.entity( self.id, other_attributes=(( PROV['type'], NIDM['ResidualMeanSquaresMap'], ), (DCT['format'], "image/nifti"), (PROV['location'], Identifier("file://./" + residuals_filename)), (PROV['label'], "Residual Mean Squares Map"), (NIDM['filename'], residuals_original_filename), (NIDM['filename'], residuals_filename), (CRYPTO['sha512'], self.get_sha_sum(residuals_file)), (NIDM['inCoordinateSpace'], self.coord_space.id))) return self.p
def export(self): """ Create prov entities and activities. """ self.p.update(self.coord_space.export()) # Copy "Mask map" in export directory search_space_orig_file = self.file search_space_file = os.path.join(self.export_dir, 'SearchSpace.nii.gz') search_space_orig_filename, search_space_filename = self.copy_nifti( search_space_orig_file, search_space_file) # Crate "Mask map" entity self.p.entity(self.id, other_attributes=( (PROV['label'], "Search Space Map"), (DCT['format'], "image/nifti"), (PROV['type'], NIDM['SearchSpaceMap']), (PROV['location'], Identifier("file://./"+search_space_filename)), (NIDM['filename'], search_space_orig_filename), (NIDM['filename'], search_space_filename), (NIDM['randomFieldStationarity'], self.rf_stationarity), (NIDM['inCoordinateSpace'], self.coord_space.id), (FSL['searchVolumeInVoxels'], self.search_volume), (CRYPTO['sha512'], self.get_sha_sum(search_space_file)), (FSL['reselSizeInVoxels'], self.resel_size_in_voxels), (FSL['dlh'], self.dlh))) return self.p
def decode_json_representation(value, type, bundle): """ Return the value based on the type see also encode_json_representation :param value: :param type: :param bundle: :return: """ if isinstance(type, dict): # complex type datatype = type['type'] if 'type' in type else None datatype = bundle.valid_qualified_name(datatype) langtag = type['lang'] if 'lang' in type else None if datatype == XSD_ANYURI: return Identifier(value) elif datatype == PROV_QUALIFIEDNAME: return bundle.valid_qualified_name(value) else: # The literal of standard Python types is not converted here # It will be automatically converted when added to a record by # _auto_literal_conversion() return Literal(value, datatype, langtag) else: # simple type, just return it return value
def export(self): """ Create prov graph. """ self.p.update(self.coord_space.export()) # Copy contrast map in export directory cope_file = os.path.join(self.export_dir, \ 'Contrast'+self.num+'.nii.gz') cope_original_filename, cope_filename = self.copy_nifti( self.file, cope_file) # Contrast Map entity path, filename = os.path.split(cope_file) self.p.entity( self.id, other_attributes=((PROV['type'], NIDM['ContrastMap']), (DCT['format'], "image/nifti"), (NIDM['inCoordinateSpace'], self.coord_space.id), (PROV['location'], Identifier("file://./" + cope_filename)), (NIDM['filename'], cope_original_filename), (NIDM['filename'], cope_filename), (NIDM['contrastName'], self.name), (CRYPTO['sha512'], self.get_sha_sum(cope_file)), (PROV['label'], "Contrast Map: " + self.name))) return self.p
def export(self): """ Create prov entities and activities. """ self.p.update(self.coord_space.export()) self.p.update(self.visu.export()) # Copy "Excursion set map" in export directory exc_set_orig_file = self.file exc_set_file = os.path.join(self.export_dir, 'ExcursionSet'+\ self.num+'.nii.gz') exc_set_orig_filename, exc_set_filename = self.copy_nifti( exc_set_orig_file, exc_set_file) # Create "Excursion set" entity self.p.entity(self.id, other_attributes=( (PROV['type'], NIDM['ExcursionSet']), (DCT['format'], "image/nifti"), (PROV['location'], Identifier("file://./"+exc_set_filename)), (NIDM['filename'], exc_set_orig_filename), (NIDM['filename'], exc_set_filename), (NIDM['inCoordinateSpace'], self.coord_space.id), (PROV['label'], "Excursion Set"), (NIDM['visualisation'], self.visu.id), (CRYPTO['sha512'], self.get_sha_sum(exc_set_file)), )) return self.p
def __init__(self, oid=None): if oid is None: self.id = NIIRI[str(uuid.uuid4())] else: if not type(oid) is QualifiedName: oid = NIIRI.qname(Identifier(oid)) self.id = oid
def __init__(self, software_type, version, label=None, feat_version=None, oid=None): super(NeuroimagingSoftware, self).__init__(oid=oid) self.version = version if isinstance(software_type, QualifiedName): self.type = software_type else: if software_type.startswith('http'): self.type = Identifier(software_type) elif software_type.lower() == "fsl": self.type = SCR_FSL else: warnings.warn('Unrecognised software: ' + str(software_type)) self.name = str(software_type) self.type = None # FIXME: get label from owl! if self.type == SCR_FSL: self.name = "FSL" elif self.type == SCR_SPM: self.name = "SPM" else: warnings.warn('Unrecognised software: ' + str(software_type)) self.name = str(software_type) if not label: self.label = self.name else: self.label = label self.prov_type = PROV['Agent'] self.feat_version = feat_version
def export(self): """ Create prov entities and activities. """ # Create coordinate space entity self.p.update(self.coord_space.export()) # Create "Display Mask Map" entity disp_mask_file = os.path.join(self.export_dir, 'DisplayMask.nii.gz') disp_mask_orig_filename, disp_mask_filename = self.copy_nifti( self.filename, disp_mask_file) self.p.entity(self.id, other_attributes=( (PROV['type'], NIDM['DisplayMaskMap']), (PROV['label'] , "Display Mask Map"), (DCT['format'] , "image/nifti"), (NIDM['inCoordinateSpace'], self.coord_space.id), (NIDM['filename'], disp_mask_orig_filename), (NIDM['filename'], disp_mask_filename), (PROV['location'], Identifier("file://./"+disp_mask_filename)), (CRYPTO['sha512'], self.get_sha_sum(disp_mask_file)))) return self.p
def export(self): """ Create prov graph. """ self.p.update(self.coord_space.export()) standard_error_file = os.path.join( self.export_dir, "ContrastStandardError" + self.num + ".nii.gz") if self.is_variance: self.p.update(self.var_coord_space.export()) # Copy contrast variance map in export directory path, var_cope_filename = os.path.split(self.file) # FIXME: Use ContrastVariance.nii.gz? # var_cope_file = os.path.join(self.export_dir, var_cope_filename) # var_cope_original_filename, var_cope_filename = self.copy_nifti(var_cope_original_file, var_cope_file) # Contrast Variance Map entity # self.provBundle.entity('niiri:'+'contrast_variance_map_id_'+contrast_num, other_attributes=( contrast_var_id = NIIRI[str(uuid.uuid4())] self.p.entity( contrast_var_id, other_attributes=( (PROV['type'], FSL['ContrastVarianceMap']), # (NIDM['inCoordinateSpace'], self.var_coord_space.id), (DCT['format'], "image/nifti"), (CRYPTO['sha512'], self.get_sha_sum(self.file)), (NIDM['filename'], var_cope_filename))) # Create standard error map from contrast variance map var_cope_img = nib.load(self.file) contrast_variance = var_cope_img.get_data() standard_error_img = nib.Nifti1Image(np.sqrt(contrast_variance), var_cope_img.get_qform()) nib.save(standard_error_img, standard_error_file) else: standard_error_original_file, standard_error_file = self.copy_nifti( self.file, standard_error_file) path, filename = os.path.split(standard_error_file) self.p.entity( self.id, other_attributes=((PROV['type'], NIDM['ContrastStandardErrorMap']), (DCT['format'], "image/nifti"), (NIDM['inCoordinateSpace'], self.coord_space.id), (PROV['location'], Identifier("file://./" + filename)), (CRYPTO['sha512'], self.get_sha_sum(standard_error_file)), (NIDM['filename'], filename), (PROV['label'], "Contrast Standard Error Map"))) if self.is_variance: self.p.wasDerivedFrom(self.id, contrast_var_id) return self.p
def __init__(self, stat_threshold=None, p_corr_threshold=None, p_uncorr_threshold=None, threshold_type=None, value=None, label=None, version={'num': '1.3.0'}, oid=None, equiv_thresh=None): super(HeightThreshold, self).__init__(oid=oid) if not stat_threshold and not p_corr_threshold and \ not p_uncorr_threshold and not value: raise Exception('No threshold defined') if isinstance(threshold_type, str): threshold_type = Identifier(threshold_type) thresh_desc = "" if stat_threshold is not None: thresh_desc = "Z>" + str(stat_threshold) if version['num'] == "1.0.0": user_threshold_type = "Z-Statistic" else: threshold_type = OBO_STATISTIC value = stat_threshold elif p_uncorr_threshold is not None: thresh_desc = "p<" + \ str(p_uncorr_threshold) + " (uncorrected)" if version['num'] == "1.0.0": user_threshold_type = "p-value uncorrected" else: threshold_type = NIDM_P_VALUE_UNCORRECTED_CLASS value = p_uncorr_threshold elif p_corr_threshold is not None: thresh_desc = "p<" + str(p_corr_threshold) + " (FWE)" if version['num'] == "1.0.0": user_threshold_type = "p-value FWE" else: threshold_type = OBO_P_VALUE_FWER value = p_corr_threshold if version['num'] == "1.0.0": self.user_threshold_type = user_threshold_type self.p_uncorr_threshold = p_uncorr_threshold self.p_corr_threshold = p_corr_threshold self.stat_threshold = stat_threshold else: self.value = value self.threshold_type = threshold_type if not label: self.label = "Height Threshold: " + thresh_desc else: self.label = label self.type = NIDM_HEIGHT_THRESHOLD self.prov_type = PROV['Entity'] self.equiv_thresh = equiv_thresh
def export(self, nidm_version, export_dir): """ Create prov entities and activities. """ # Create cvs file containing design matrix np.savetxt(os.path.join(export_dir, self.csv_file), np.asarray(self.matrix), delimiter=",") if nidm_version['num'] in ["1.0.0", "1.1.0"]: csv_location = Identifier("file://./" + self.csv_file) else: csv_location = Identifier(self.csv_file) attributes = [(PROV['type'], self.type), (PROV['label'], self.label), (NIDM_REGRESSOR_NAMES, json.dumps(self.regressors)), (DCT['format'], "text/csv"), (NFO['fileName'], self.filename), (DC['description'], self.image.id), (PROV['location'], csv_location)] if self.hrf_models is not None: if nidm_version['num'] in ("1.0.0", "1.1.0"): if self.design_type is not None: attributes.append((NIDM_HAS_FMRI_DESIGN, self.design_type)) else: warnings.warn("Design type is missing") # hrf model for hrf_model in self.hrf_models: attributes.append((NIDM_HAS_HRF_BASIS, hrf_model)) # drift model if self.drift_model is not None: attributes.append((NIDM_HAS_DRIFT_MODEL, self.drift_model.id)) # Create "design matrix" entity self.add_attributes(attributes)
def export(self): """ Create prov entities and activities. """ # Copy visualisation of excursion set in export directory shutil.copy(self.file, self.export_dir) path, visu_filename = os.path.split(self.file) # Create "png visualisation of Excursion set" entity self.p.entity(self.id, other_attributes=( (PROV['type'], NIDM['Image']), (NIDM['filename'], visu_filename), (PROV['location'], Identifier("file://./"+visu_filename)), (DCT['format'], "image/png"), )) return self.p
def decode_json_representation(literal, bundle): if isinstance(literal, dict): # complex type value = literal['$'] datatype = literal['type'] if 'type' in literal else None datatype = valid_qualified_name(bundle, datatype) langtag = literal['lang'] if 'lang' in literal else None if datatype == XSD_ANYURI: return Identifier(value) elif datatype == PROV_QUALIFIEDNAME: return valid_qualified_name(bundle, value) else: # The literal of standard Python types is not converted here # It will be automatically converted when added to a record by # _auto_literal_conversion() return Literal(value, datatype, langtag) else: # simple type, just return it return literal
def export(self): """ Create prov graph. """ self.p.update(self.coord_space.export()) # Copy Statistical map in export directory stat_file = os.path.join( self.export_dir, self.stat_type + 'Statistic' + self.num + '.nii.gz') stat_orig_filename, stat_filename = self.copy_nifti( self.file, stat_file) label = "Statistic Map: " + self.name if self.stat_type == 'Z': label = self.stat_type + '-' + label attributes = [ (PROV['type'], NIDM['StatisticMap']), (DCT['format'], "image/nifti"), (PROV['label'], label), (PROV['location'], Identifier("file://./" + stat_filename)), (NIDM['statisticType'], NIDM[self.stat_type + 'Statistic']), (NIDM['filename'], stat_filename), (NIDM['filename'], stat_orig_filename), (NIDM['contrastName'], self.name), (CRYPTO['sha512'], self.get_sha_sum(stat_file)), (NIDM['inCoordinateSpace'], self.coord_space.id) ] if not self.stat_type == 'Z': attributes.insert(0, (NIDM['errorDegreesOfFreedom'], self.dof)) # FIXME: this should not be 1 for F-test attributes.insert(0, (NIDM['effectDegreesOfFreedom'], 1.0)) else: # For Z-Statistic error dof is infinity and effect dof is 1 attributes.insert(0, (NIDM['errorDegreesOfFreedom'], float("inf"))) attributes.insert(0, (NIDM['effectDegreesOfFreedom'], 1.0)) # Create "Statistic Map" entity # FIXME: Deal with other than t-contrast maps: dof + statisticType self.p.entity(self.id, other_attributes=attributes) return self.p
def export(self): """ Create prov entities and activities. """ # Coordinate space entity self.p.update(self.coord_space.export()) # Grand Mean Map entity grand_mean_file = os.path.join(self.export_dir, 'GrandMean.nii.gz') grand_mean_original_filename, grand_mean_filename = self.copy_nifti( self.file, grand_mean_file) grand_mean_img = nib.load(grand_mean_file) grand_mean_data = grand_mean_img.get_data() grand_mean_data = np.ndarray.flatten(grand_mean_data) mask_img = nib.load(self.mask_file) mask_data = mask_img.get_data() mask_data = np.ndarray.flatten(mask_data) grand_mean_data_in_mask = grand_mean_data[mask_data > 0] masked_median = np.median( np.array(grand_mean_data_in_mask, dtype=float)) self.p.entity( self.id, other_attributes=((PROV['type'], NIDM['GrandMeanMap']), (DCT['format'], "image/nifti"), (PROV['label'], "Grand Mean Map"), (NIDM['maskedMedian'], masked_median), (NIDM['filename'], grand_mean_filename), (NIDM['filename'], grand_mean_original_filename), (NIDM['inCoordinateSpace'], self.coord_space.id), (CRYPTO['sha512'], self.get_sha_sum(grand_mean_file)), (PROV['location'], Identifier("file://./" + grand_mean_filename)))) return self.p
def export(self): """ Create prov entities and activities. """ # Export visualisation of the design matrix self.p.update(self.image.export()) # Create cvs file containing design matrix design_matrix_csv = 'DesignMatrix.csv' np.savetxt(os.path.join(self.export_dir, design_matrix_csv), np.asarray(self.matrix), delimiter=",") # Create "design matrix" entity self.p.entity( self.id, other_attributes=((PROV['type'], NIDM['DesignMatrix']), (PROV['label'], "Design Matrix"), (DCT['format'], "text/csv"), (NIDM['filename'], "DesignMatrix.csv"), (NIDM['visualisation'], self.image.id), (PROV['location'], Identifier("file://./" + design_matrix_csv)))) return self.p
def export(self): """ Create prov entity. """ if self.file is not None: # FIXME: replace by another name new_file = os.path.join(self.export_dir, "DesignMatrix.png") orig_filename, filename = self.copy_nifti(self.file, new_file) self.p.entity(self.id, other_attributes={ PROV['type']: NIDM['Image'], PROV['atLocation']: Identifier("file://./" + filename), NIDM['filename']: orig_filename, NIDM['filename']: filename, DCT['format']: "image/png", }) return self.p
def w3c_publication_2(): # https://github.com/lucmoreau/ProvToolbox/blob/master/asn/src/test/resources/prov/w3c-publication2.prov-asn # =========================================================================== # bundle # # prefix ex <http://example.org/> # prefix rec <http://example.org/record> # # prefix w3 <http://www.w3.org/TR/2011/> # prefix hg <http://dvcs.w3.org/hg/prov/raw-file/9628aaff6e20/model/releases/WD-prov-dm-20111215/> # # # entity(hg:Overview.html, [ prov:type="file in hg" ]) # entity(w3:WD-prov-dm-20111215, [ prov:type="html4" ]) # # # activity(ex:rcp,-,-,[prov:type="copy directory"]) # # wasGeneratedBy(rec:g; w3:WD-prov-dm-20111215, ex:rcp, -) # # entity(ex:req3, [ prov:type="http://www.w3.org/2005/08/01-transitions.html#pubreq" %% xsd:anyURI ]) # # used(rec:u; ex:rcp,hg:Overview.html,-) # used(ex:rcp, ex:req3, -) # # # wasDerivedFrom(w3:WD-prov-dm-20111215, hg:Overview.html, ex:rcp, rec:g, rec:u) # # agent(ex:webmaster, [ prov:type='prov:Person' ]) # # wasAssociatedWith(ex:rcp, ex:webmaster, -) # # endBundle # =========================================================================== ex = Namespace("ex", "http://example.org/") rec = Namespace("rec", "http://example.org/record") w3 = Namespace("w3", "http://www.w3.org/TR/2011/") hg = Namespace( "hg", "http://dvcs.w3.org/hg/prov/raw-file/9628aaff6e20/model/releases/WD-prov-dm-20111215/", ) g = ProvDocument() g.entity(hg["Overview.html"], {"prov:type": "file in hg"}) g.entity(w3["WD-prov-dm-20111215"], {"prov:type": "html4"}) g.activity(ex["rcp"], None, None, {"prov:type": "copy directory"}) g.wasGeneratedBy("w3:WD-prov-dm-20111215", "ex:rcp", identifier=rec["g"]) g.entity( "ex:req3", { "prov:type": Identifier("http://www.w3.org/2005/08/01-transitions.html#pubreq") }, ) g.used("ex:rcp", "hg:Overview.html", identifier="rec:u") g.used("ex:rcp", "ex:req3") g.wasDerivedFrom("w3:WD-prov-dm-20111215", "hg:Overview.html", "ex:rcp", "rec:g", "rec:u") g.agent("ex:webmaster", {"prov:type": "Person"}) g.wasAssociatedWith("ex:rcp", "ex:webmaster") return g
def w3c_publication_2(): # https://github.com/lucmoreau/ProvToolbox/blob/master/asn/src/test/resources/prov/w3c-publication2.prov-asn #=========================================================================== # bundle # # prefix ex <http://example.org/> # prefix rec <http://example.org/record> # # prefix w3 <http://www.w3.org/TR/2011/> # prefix hg <http://dvcs.w3.org/hg/prov/raw-file/9628aaff6e20/model/releases/WD-prov-dm-20111215/> # # # entity(hg:Overview.html, [ prov:type="file in hg" ]) # entity(w3:WD-prov-dm-20111215, [ prov:type="html4" ]) # # # activity(ex:rcp,-,-,[prov:type="copy directory"]) # # wasGeneratedBy(rec:g; w3:WD-prov-dm-20111215, ex:rcp, -) # # entity(ex:req3, [ prov:type="http://www.w3.org/2005/08/01-transitions.html#pubreq" %% xsd:anyURI ]) # # used(rec:u; ex:rcp,hg:Overview.html,-) # used(ex:rcp, ex:req3, -) # # # wasDerivedFrom(w3:WD-prov-dm-20111215, hg:Overview.html, ex:rcp, rec:g, rec:u) # # agent(ex:webmaster, [ prov:type='prov:Person' ]) # # wasAssociatedWith(ex:rcp, ex:webmaster, -) # # endBundle #=========================================================================== ex = Namespace('ex', 'http://example.org/') rec = Namespace('rec', 'http://example.org/record') w3 = Namespace('w3', 'http://www.w3.org/TR/2011/') hg = Namespace( 'hg', 'http://dvcs.w3.org/hg/prov/raw-file/9628aaff6e20/model/releases/WD-prov-dm-20111215/' ) g = ProvDocument() g.entity(hg['Overview.html'], {'prov:type': "file in hg"}) g.entity(w3['WD-prov-dm-20111215'], {'prov:type': "html4"}) g.activity(ex['rcp'], None, None, {'prov:type': "copy directory"}) g.wasGeneratedBy('w3:WD-prov-dm-20111215', 'ex:rcp', identifier=rec['g']) g.entity( 'ex:req3', { 'prov:type': Identifier("http://www.w3.org/2005/08/01-transitions.html#pubreq") }) g.used('ex:rcp', 'hg:Overview.html', identifier='rec:u') g.used('ex:rcp', 'ex:req3') g.wasDerivedFrom('w3:WD-prov-dm-20111215', 'hg:Overview.html', 'ex:rcp', 'rec:g', 'rec:u') g.agent('ex:webmaster', {'prov:type': "Person"}) g.wasAssociatedWith('ex:rcp', 'ex:webmaster') return g
def prov_api_record_example(): doc = ProvDocument() doc.add_namespace("ex", "http://example.com") doc.add_namespace("custom", "http://custom.com") attributes = attributes_dict_example() del attributes[ "ex:dict value"] # remove dict value because it is not allowed in a prov_record, but for low level adapter tests necessary del attributes[ "ex:list value"] # remove dict value because it is not allowed in a prov_record, but for low level adapter tests necessary attributes.update({ "ex:Qualified name ": doc.valid_qualified_name("custom:qualified name") }) attributes.update({"ex:Qualified name 2": "ex:unqualified_name"}) attributes.update({"ex:Literal": Literal("test literal", langtag="en")}) attributes.update({ "ex:Literal 2": Literal("test literal with datatype", langtag="en", datatype=PROV["InternationalizedString"]) }) attributes.update( {"ex:identifier type": Identifier("http://example.com/#test")}) expected_attributes = dict() for key, value in attributes.items(): new_key = doc.valid_qualified_name(key) expected_attributes.update({new_key: value}) # The prov lib don't require to auto convert string values into qualified names # valid_name = doc.valid_qualified_name("ex:Qualified name 2") # expected_attributes[valid_name] = doc.valid_qualified_name("ex:unqualified_name") namespaces = dict() namespaces.update({"ex": "http://example.com"}) namespaces.update({"custom": "http://custom.com"}) namespaces.update({"prov": "http://www.w3.org/ns/prov#"}) type_map = dict() type_map.update({"ex:date value": {"type": "xsd:dateTime"}}) type_map.update({"ex:double value": {"type": "xsd:double"}}) type_map.update({"ex:int value": {"type": "xsd:int"}}) type_map.update({"ex:Qualified name ": {'type': 'prov:QUALIFIED_NAME'}}) # type_map.update({"ex:Qualified name 2":{'type': 'prov:QUALIFIED_NAME'}}) #The prov lib don't require to auto convert strings into qualified names type_map.update({"ex:Literal": {'lang': 'en'}}) type_map.update({"ex:Literal 2": {'lang': 'en'}}) type_map.update({"ex:identifier type": {'type': 'xsd:anyURI'}}) metadata = dict() metadata.update({METADATA_KEY_PROV_TYPE: PROV_RECORD_IDS_MAP["activity"]}) metadata.update( {METADATA_KEY_IDENTIFIER: doc.valid_qualified_name("ex:record")}) metadata.update({METADATA_KEY_NAMESPACES: namespaces}) metadata.update({METADATA_KEY_TYPE_MAP: type_map}) record = ProvActivity(doc, "ex:record", attributes) Example = namedtuple( "prov_api_metadata_record_example", "metadata, attributes, prov_record, expected_attributes") return Example(metadata, attributes, record, expected_attributes)
def get_anon_id(self, obj, local_prefix='id'): if obj not in self._cache: self._count += 1 self._cache[obj] = Identifier('_:%s%d' % (local_prefix, self._count)) return self._cache[obj]