def convert_stop_times(self, csv_filename): read_stop_times = self.__open_file(csv_filename) for row in read_stop_times: stop_id = str.strip(row["stop_id"]) sequence_num = str.strip(row["stop_sequence"]) trip_id = str.strip(row["trip_id"]) stop_time = Resource(self.graph, URIRef(self.uri + trip_id + "_" + stop_id + "_StopTime_" + sequence_num)) stop_time.set(RDF.type, self.GTFS.StopTime) stop_time.add(self.GTFS.trip, self.get_trip(trip_id)) stop_time.add(self.GTFS.arrivalTime, Literal(str.strip(row["arrival_time"]), datatype=XSD.time)) stop_time.add(self.GTFS.departureTime, Literal(str.strip(row["departure_time"]), datatype=XSD.time)) stop_time.add(self.GTFS.stop, self.get_stop(stop_id)) stop_time.add(self.GTFS.stopSequence, Literal(sequence_num, datatype=XSD.nonNegativeInteger)) if "stop_headsign" in row: stop_time.add(self.GTFS.headsign, Literal(str.strip(row["stop_headsign"]), datatype=XSD.string)) if "pickup_type" in row: pickup_type = self.get_stop_type(str.strip(row["pickup_type"])) stop_time.add(self.GTFS.pickupType, pickup_type) if "drop_off_type" in row: dropoff_type = self.get_stop_type(str.strip(row["drop_off_type"])) stop_time.add(self.GTFS.dropOffType, dropoff_type) if "shape_dist_traveled" in row: # stop_time.add(self.GTFS.distanceTraveled, # Literal(float(str.strip(row["shape_dist_traveled"])), datatype=XSD.nonNegativeInteger)) stop_time.add(self.GTFS.distanceTraveled, Literal(float(str.strip(row["shape_dist_traveled"]))))
def _vcard_name(self): g = Graph() vc = Resource(g, URIRef(self.vcard_name_uri)) vc.set(RDF.type, VCARD.Name) vc.set(RDFS.label, Literal(self._label())) vc.set(VCARD.familyName, Literal(self.cffamilynames)) vc.set(VCARD.givenName, Literal(self.cffirstnames)) if hasattr(self, 'middlename'): vc.set(VIVO.middleName, Literal(self.middlename)) return g
def unified_orgs(self): g = Graph() addresses = self.addresses() for addr in addresses: for org in addr["unified_orgs"]: uri = waan_uri(org) r = Resource(g, uri) r.set(RDF.type, WOS.UnifiedOrganization) r.set(RDFS.label, Literal(org)) # relation set by address return g
def to_rdf(self): g = Graph() o = Resource(g, self.uri) o.set(RDF.type, FOAF.Organization) o.set(RDFS.label, Literal(self.cfname)) o.set(CONVERIS.converisId, Literal(self.cid)) if hasattr(self, 'cfresact'): o.set(VIVO.overview, Literal(self.cfresact)) for child in self.get_children(): # Has sub-organization o.set(OBO['BFO_0000051'], child) return g
def convert_calendar_dates(self, csv_filename): read_dates = self.__open_file(csv_filename) for row in read_dates: service = self.get_service(str.strip(row["service_id"])) calendar_date = Resource(self.graph, URIRef(self.uri + str.strip(row["service_id"]) + "_cal" + "_" + str.strip(row["date"]))) service.add(self.GTFS.serviceRule, calendar_date) calendar_date.set(RDF.type, self.GTFS.CalendarDateRule) calendar_date.add(DCTERMS.date, self.get_date_literal(str.strip(row["date"]))) exception_type = str.strip(row["exception_type"]) if exception_type is "2": exception_type = "0" calendar_date.add(self.GTFS.dateAddition, Literal(exception_type, datatype=XSD.boolean))
def convert_transfers(self, csv_filename): read_transfers = self.__open_file(csv_filename) for row in read_transfers: from_stop = str.strip(row["from_stop_id"]) to_stop = str.strip(row["to_stop_id"]) transfers = Resource(self.graph, URIRef(self.uri + "_" + from_stop + "_" + to_stop)) transfers.set(RDF.type, self.GTFS.TransferRule) transfers.add(self.GTFS.originStop, self.get_stop(from_stop)) transfers.add(self.GTFS.destinationStop, self.get_stop(to_stop)) transfers.add(self.GTFS.transferType, self.get_transfer_type(str.strip(row["transfer_type"]))) if "min_transfer_time" in row and str.strip(row["min_transfer_time"]): transfers.add(self.GTFS.minimumTransferTime, Literal(str.strip(row["min_transfer_time"]), datatype=XSD.nonNegativeInteger))
def convert_shapes(self, csv_filename): read_shapes = self.__open_file(csv_filename) for row in read_shapes: shape = self.get_shape(str.strip(row["shape_id"])) shape_point = Resource(self.graph, URIRef(str(shape.identifier) + "_" + str.strip(row["shape_pt_sequence"]))) shape.add(self.GTFS.shapePoint, shape_point) shape_point.set(RDF.type, self.GTFS.ShapePoint) shape_point.set(self.GEO.long, Literal(str.strip(row["shape_pt_lon"]), datatype=XSD.string)) shape_point.set(self.GEO.lat, Literal(str.strip(row["shape_pt_lat"]), datatype=XSD.string)) shape_point.set(self.GTFS.pointSequence, Literal(str.strip(row["shape_pt_sequence"]), datatype=XSD.nonNegativeInteger)) if "shape_dist_traveled" in row and str.strip(row["shape_dist_traveled"]) != "": shape_point.set(self.GTFS.distanceTraveled, Literal(str.strip(row["shape_dist_traveled"]), datatype=XSD.nonNegativeInteger))
def to_rdf(self): g = Graph() e = Resource(g, self.uri) e.set(RDF.type, VIVO.Position) e.set(CONVERIS.converisId, Literal(self.cid)) # Check jobtitle and function for position name. if hasattr(self, 'jobtitle'): title = self.jobtitle elif hasattr(self, 'function'): title = self.function['value'] else: title = "Research" e.set(RDFS.label, Literal(title)) # start/end try: start = self.cfstartdate except AttributeError: start = None pass try: end = self.cfenddate except AttributeError: end = None pass # Add datetime interval try: dti_uri, dti_g = self.get_dti(start, end) g += dti_g e.set(VIVO.dateTimeInterval, dti_uri) except TypeError: pass return g
def convert_frequencies(self, csv_filename): read_freqs = self.__open_file(csv_filename) for row in read_freqs: freq = Resource(self.graph, URIRef(self.uri + str.strip(row["trip_id"]) + str.strip(row["start_time"]) + str.strip(row["end_time"]))) freq.set(RDF.type, self.GTFS.Frequency) freq.add(self.GTFS.trip, self.get_trip(str.strip(row["trip_id"]))) freq.add(self.GTFS.startTime, Literal(str.strip(row["start_time"]), datatype=XSD.string)) freq.add(self.GTFS.endTime, Literal(str.strip(row["end_time"]), datatype=XSD.string)) freq.add(self.GTFS.headwaySeconds, Literal(str.strip(row["headway_secs"]), datatype=XSD.nonNegativeInteger)) if "exact_times" in row: exact = False if str.strip(row["exact_times"]) == "1": exact = True freq.add(self.GTFS.exactTimes, Literal(exact, datatype=XSD.boolean))
def process(self, input, output): username = "******" password = "******" lmClient = LifemapperClient(username, password) # Get the JSON spec jsonSpecification = input.value(LM.hasJSONExperimentSpecificationURL) # Get published scenario layers scenarioLayerIDs = [] for layer in input[LM.hasScenarioLayer]: layerID = layer.value(LM.hasLayerID) scenarioLayerIDs.append(str(layerID)) # Post scenario and save ID lmClient.postScenario(scenarioLayerIDs) #scenarioID = str(234) scenarioID = lmClient.getScenarioID() scenarioURL = lmClient.getScenarioURL() # The URI generator resourceURI = ResourceURI() # Create the published scenario scenario = RDFLibResource(output.graph, resourceURI.getURI("scenario")) ScenarioClass = RDFLibResource(output.graph, LM.Scenario) scenario.set(RDF.type, ScenarioClass) scenario.set(LM.hasScenarioID, Literal(scenarioID)) scenario.set(LM.hasScenarioURL, Literal(scenarioURL)) scenario.set(LM.hasJSONExperimentSpecificationURL, jsonSpecification) # Add scenarioID to output output.set(LM.hasPublishedScenario, scenario)
def to_rdf(self): data = self.profile g = Graph() person = Resource(g, self.uri) person.set(RDF.type, FOAF.Person) names = data['full_names'].split("|") full_name = max(names, key=len) person.set(RDFS.label, Literal(full_name.strip())) # For sorting #person.set(FOAF.familyName, Literal(data['lastName'])) #person.set(WOS.alphaBrowse, Literal(data['lastName'][0].lower())) # dais for did in self.dais_ids: person.set(WOS.daisNg, Literal(did)) # Vcard individual vci_uri = self.vcard_uri person.set(OBO['ARG_2000028'], vci_uri) g.add((vci_uri, RDF.type, VCARD.Individual)) # Vcard Name #g += self._vcard_name() #g.add((vci_uri, VCARD.hasName, URIRef(self.vcard_name_uri))) # Vcard email vte = self._vcard_email() if vte is not None: g += vte g.add((vci_uri, VCARD.hasEmail, URIRef(self.vcard_email_uri))) return g
def add_venue(self): """ Add publication venue. :return: rdflib.Graph """ g = Graph() isbn = self.isbn() issn = self.issn() or self.eissn() if isbn is not None: vtype = BIBO.Book uri = D['venue-' + isbn] elif issn is not None: vtype = BIBO.Journal uri = D['venue-' + issn] else: # Place holder logger.info("No source/venue ISSN or ISBN found for {}.".format(self.ut())) vtype = BIBO.Journal uri = D['venue-' + self.localid] venue = Resource(g, uri) venue.set(RDF.type, vtype) venue.set(RDFS.label, Literal(self.venue())) if vtype == BIBO.Journal: venue.set(BIBO.issn, Literal(issn)) else: venue.set(BIBO.isbn, Literal(isbn)) g.add((self.pub_uri, VIVO.hasPublicationVenue, uri)) return g
def convert_feed(self, csv_filename): read_feed = self.__open_file(csv_filename) for row in read_feed: feed = Resource(self.graph, URIRef(str.strip(row["publisher"]))) feed.set(RDF.type, self.GTFS.Feed) feed.add(DCTERMS.publisher, Literal(str.strip(row["publisher"]), datatype=XSD.string)) feed.add(DCTERMS.title, Literal(str.strip(row["feed_publisher_name"]), datatype=XSD.string)) feed.add(DCTERMS.language, Literal(str.strip(row["feed_lang"]), datatype=XSD.string)) if "feed_version" in row and str.strip(row["feed_version"]) != "": feed.add(self.SCHEMA.version, Literal(row["feed_version"], datatype=XSD.string)) if "feed_start_date" in row and str.strip(row["feed_start_date"]) != "" and "feed_end_date" in row and str.strip(row["feed_end_date"]) != "": temporal = Resource(self.graph, URIRef(feed.identifier + "_temporal")) temporal.set(RDF.type, DCTERMS.temporal) temporal.add(self.SCHEMA.startDate, self.get_date_literal(str.strip(row["feed_start_date"]))) temporal.add(self.SCHEMA.endDate, self.get_date_literal(str.strip(row["feed_end_date"])))
def convert_fare_rules(self, csv_filename): read_fares = self.__open_file(csv_filename) for row in read_fares: fare = self.get_fare(str.strip(row["fare_id"])) fare_rule = Resource(self.graph, URIRef(self.uri + str.strip(row["fare_id"]) + "_rule_" + str(self.next_fare_rule_num))) self.next_fare_rule_num += 1 fare_rule.set(RDF.type, self.GTFS.FareRule) fare_rule.set(self.GTFS.fareClass, fare_rule) if "route_id" in row and str.strip(row["route_id"]) != "": fare_rule.add(self.GTFS.route, self.get_route(str.strip(row["route_id"]))) if "origin_id" in row and str.strip(row["origin_id"]) != "": fare_rule.add(self.GTFS.originZone, self.get_zone(str.strip(row["origin_id"]))) if "destination_id" in row and str.strip(row["destination_id"]) != "": fare_rule.add(self.GTFS.destinationZone, self.get_zone(str.strip(row["destination_id"]))) if "contains_id" in row and str.strip(row["contains_id"]) != "": fare_rule.add(self.GTFS.zone, self.get_zone(str.strip(row["contains_id"])))
def _date(self, dtype, dv): g = Graph() date_obj = converis.convert_date(dv) date_uri = URIRef(DATA_NAMESPACE + 'date' + dtype + self.vid) de = Resource(g, date_uri) de.set(RDF.type, VIVO.DateTimeValue) if date_obj is not None: de.set(RDFS.label, Literal(dv)) de.set(VIVO.dateTime, Literal(date_obj, datatype=XSD.date)) de.set(VIVO.dateTimePrecision, VIVO.yearMonthDayPrecision) return date_uri, g
def to_rdf(self): g = Graph() p = Resource(g, self.uri) p.add(RDF.type, FOAF.Person) p.set(RDFS.label, Literal(self._label())) p.set(CONVERIS.converisId, Literal(self.cid)) if hasattr(self, 'cfresint'): p.set(VIVO.researchOverview, Literal(self.cfresint)) if hasattr(self, 'orcid'): p.set(VIVO.orcidId, self.orcid_uri) # Confirm the orcid g.add((self.orcid_uri, RDF.type, OWL.Thing)) # Todo - review if we want to confirm all orcids g.add((self.orcid_uri, VIVO.confirmedOrcidId, self.uri)) # Vcard individual vci_uri = URIRef(self.vcard_uri) p.set(OBO['ARG_2000028'], vci_uri) g.add((vci_uri, RDF.type, VCARD.Individual)) # Vcard Name g += self._vcard_name() g.add((vci_uri, VCARD.hasName, URIRef(self.vcard_name_uri))) # Vcard title vtg = self._vcard_title() if vtg is not None: g += vtg g.add((vci_uri, VCARD.hasTitle, URIRef(self.vcard_title_uri))) # Vcard email vte = self._vcard_email() if vte is not None: g += vte g.add((vci_uri, VCARD.hasEmail, URIRef(self.vcard_email_uri))) # positions g += self.get_positions() return g
def to_rdf(self): g = Graph() r = Resource(g, self.uri) r.set(RDF.type, SKOS.Concept) r.set(RDFS.label, Literal(self.name)) r.set(CONVERIS.converisId, Literal(self.cid)) return g
def _vcard_title(self): title = self.profile['position_title'] g = Graph() vt = Resource(g, self.vcard_title_uri) vt.set(RDF.type, VCARD.Title) vt.set(RDFS.label, Literal(title)) vt.set(VCARD.title, Literal(title)) return g
def sub_orgs(self): g = Graph() addresses = self.addresses() for addr in addresses: ano = addr["number"] org = addr["organization"] for idx, suborg in enumerate(addr['sub_organizations']): label = "{}, {}".format(suborg, org) uri = self.sub_org_uri(label) r = Resource(g, uri) r.set(RDF.type, WOS.SubOrganization) r.set(RDFS.label, Literal(label)) r.set(WOS.organizationName, Literal(org)) r.set(WOS.subOrganizationName, Literal(suborg)) return g
def _vcard_name(self): g = Graph() vc = Resource(g, URIRef(self.vcard_name_uri)) vc.set(RDF.type, VCARD.Name) vc.set(RDFS.label, Literal(self.profile['name'].strip())) vc.set(VCARD.familyName, Literal(self.profile['lastName'])) vc.set(VCARD.givenName, Literal(self.profile['firstName'])) return g
def _vcard_title(self): if not hasattr(self, 'academictitle'): return None g = Graph() vt = Resource(g, self.vcard_title_uri) vt.set(RDF.type, VCARD.Title) vt.set(RDFS.label, Literal(self.academictitle)) vt.set(VCARD.title, Literal(self.academictitle)) return g
def to(self): """ Core publication metadata mapped to VIVO RDF. :return: Graph """ g = Graph() r = Resource(g, self.uri) r.set(RDFS.label, Literal(self.title())) for vtype in self.rec_type(): r.add(RDF.type, vtype) r.set(WOS.wosId, Literal(self.ut)) meta = self.meta() # data properties data_props = [ #('author_list', WOS.authorList), ('abstract', BIBO.abstract), ('funding_acknowledgement', WOS.fundingText), ('volume', BIBO.volume), ('issue', BIBO.issue), ('start', BIBO.pageStart), ('end', BIBO.pageEnd), ('page_count', BIBO.numPages), ('doi', BIBO.doi), #('cite_key', WOS.citeKey), ('reference_count', WOS.referenceCount), ('citation_count', WOS.citationCount) ] for key, prop in data_props: value = meta.get(key) if value is not None: g.add((self.uri, prop, Literal(value))) g += self.add_pub_date() return g
def addressships(self): g = Graph() addresses = self.addresses() for addr in addresses: addr_uri = self.addr_uri(addr["full_address"], addr["number"]) org = addr["organization"] r = Resource(g, addr_uri) r.set(RDF.type, WOS.Address) r.set(RDFS.label, Literal(addr['full_address'])) r.set(WOS.organizationName, Literal(org)) r.set(WOS.sequenceNumber, Literal(addr['number'])) # relation to author set by authorship # relate to pub r.set(VIVO.relates, self.uri) # sub orgs for idx, suborg in enumerate(addr["sub_organizations"]): label = "{}, {}".format(suborg, org) so_uri = self.sub_org_uri(label) r.add(VIVO.relates, so_uri) # relate unified orgs for uorg in addr["unified_orgs"]: uo_uri = waan_uri(uorg) r.add(VIVO.relates, uo_uri) return g
def to_rdf(self): g = Graph() r = Resource(g, self.uri) r.set(RDF.type, SKOS.Concept) r.set(RDFS.label, Literal(self.name)) r.set(CONVERIS.converisId, Literal(self.cid)) g += self.has_researchers() g += self.get_narrower() return g
def _vcard_email(self): if not hasattr(self, 'email'): return None g = Graph() vt = Resource(g, self.vcard_email_uri) vt.set(RDF.type, VCARD.Email) # Label probably not necessary vt.set(RDFS.label, Literal(self.email)) vt.set(VCARD.email, Literal(self.email)) return g
def add_pub_date(self): """ Publication dates in VIVO's expected format. """ g = Graph() value = self.pub_date() if value is None: return g date_uri = self.make_date_uri(self.ut, value) date = Resource(g, date_uri) date.set(RDF.type, VIVO.DateTimeValue) date.set(VIVO.dateTimePrecision, VIVO.yearMonthDayPrecision) date.add(VIVO.dateTime, Literal("%sT00:00:00" % (value), datatype=XSD.dateTime)) date.add(RDFS.label, Literal(value)) date.set(RDF.type, VIVO.DateTimeValue) date.set(VIVO.dateTimePrecision, VIVO.yearMonthDayPrecision) date.add(VIVO.dateTime, Literal("%sT00:00:00" % (value), datatype=XSD.dateTime)) date.add(RDFS.label, Literal(value)) g.add((self.uri, VIVO.dateTimeValue, date_uri)) return g
def process(self, input, output): username = "******" password = "******" lmClient = LifemapperClient(username, password) # Get scenario id scenario = input.value(LM.specifiesModellingScenario) scenarioID = scenario.value(LM.hasScenarioID) # Extract Algorithm algorithm = input.value(LM.specifiesModellingAlgorithm) algorithmCode = algorithm.value(LM.hasAlgorithmCode) # Extract Parameter Bindings params = algorithm.value(MD.behaviorControlledBy) bindings = [] for param in params[MD.hasParameterMember]: name = param.value(MD.hasParameterName) value = param.value(MD.boundToValue) bindings[name] = value # Extract OccurrenceSetID occurrenceSet = input.value(LM.specifiesOccurrenceSet) occurrenceSetID = occurrenceSet.value(LM.hasOccurrenceSetID) lmClient.postExperiment(algorithmCode, bindings, occurrenceSetID, scenarioID) #resultURL = "http://somedomain.com/testURL" #get experimentURL and id experimentURL = lmClient.getExperimentURL() experimentID = lmClient.getExperimentID() print "experiment result URL %s " % experimentURL # The URI generator resourceURI = ResourceURI() # Create the experiment result experimentResult = RDFLibResource(output.graph, resourceURI.getURI("experimentResult")) experimentResultClass = RDFLibResource(output.graph, LM.ExperimentResult) experimentResult.set(RDF.type, experimentResultClass) experimentResult.set(LM.hasExperimentResultURL, Literal(experimentURL)) experimentResult.set(LM.hasExperimentResultID, Literal(experimentID)) # Add the experiment result to the executed specification output.set(LM.hasExperimentResult, experimentResult)
def _vcard_email(self): g = Graph() try: emails = [e for e in self.profile["emails"].split("|")] except KeyError: try: emails = [self.profile['email']] except KeyError: emails = [] for email in emails: vt = Resource(g, self.vcard_email_uri) vt.set(RDF.type, VCARD.Work) # Label probably not necessary vt.set(RDFS.label, Literal(email)) vt.set(VCARD.email, Literal(email)) return g
def authorships(self): g = Graph() aus = self.authors() for au in aus: aship_uri = self.aship_uri(au['rank']) r = Resource(g, aship_uri) r.set(RDFS.label, Literal(au["display_name"])) r.set(RDF.type, VIVO.Authorship) r.set(VIVO.rank, Literal(au['rank'])) data_props = [ ('rank', VIVO.rank), ('full_name', WOS.fullName), ('display_name', WOS.displayName), ('wos_standard', WOS.standardName), ('first', WOS.firstName), ('last', WOS.lastName), ('email', WOS.email), ('dais_ng', WOS.daisNg), ('reprint', WOS.reprint), ] for key, prop in data_props: value = au.get(key) if value is not None: r.set(prop, Literal(value)) # relations r.add(VIVO.relates, self.uri) # relate to addresses too # address nums are a space separated list of numbers addr_nums = au["address"] if addr_nums is None: continue else: for anum in addr_nums.split(): addr_uris = self.addr_uris_from_number(anum) for auri in addr_uris: r.add(VIVO.relates, auri) return g
def get_dti(self, start, end): if (start is None) and (end is None): return # Date/Time Interval g = Graph() dti_uri = D['dti'] + self.vid dti = Resource(g, dti_uri) dti.set(RDF.type, VIVO.DateTimeInterval) if start is not None: start_uri, start_g = self._date("start", start) dti.set(VIVO.start, start_uri) g += start_g if end is not None: end_uri, end_g = self._date("end", end) g += end_g dti.set(VIVO.end, end_uri) return dti_uri, g
def get_stop(self, stop_id): stop = Resource(self.graph, URIRef(self.uri + "stop_" + stop_id)) stop.set(DCTERMS.identifier, Literal(stop_id, datatype=XSD.string)) return stop
def get_zone(self, zone_id): the_zone = Resource(self.graph, URIRef(self.uri + "zone_" + zone_id)) the_zone.set(RDF.type, self.GTFS.Zone) the_zone.set(DCTERMS.identifier, Literal(zone_id, datatype=XSD.string)) return the_zone
def get_route(self, route_id): route = Resource(self.graph, URIRef(self.uri + "route_" + route_id)) route.set(RDF.type, self.GTFS.Route) route.set(DCTERMS.identifier, Literal(route_id, datatype=XSD.string)) return route
def get_trip(self, trip_id): trip = Resource(self.graph, URIRef(self.uri + "trip_" + trip_id)) trip.set(RDF.type, self.GTFS.Trip) trip.set(DCTERMS.identifier, Literal(trip_id, datatype=XSD.string)) return trip
def get_service(self, service_id): service = Resource(self.graph, URIRef(self.uri + "service_" + service_id)) service.set(RDF.type, self.GTFS.Service) service.set(DCTERMS.identifier, Literal(service_id, datatype=XSD.string)) return service
class Ldpr(metaclass=ABCMeta): '''LDPR (LDP Resource). Definition: https://www.w3.org/TR/ldp/#ldpr-resource This class and related subclasses contain the implementation pieces of the vanilla LDP specifications. This is extended by the `lakesuperior.fcrepo.Resource` class. Inheritance graph: https://www.w3.org/TR/ldp/#fig-ldpc-types Note: Even though LdpNr (which is a subclass of Ldpr) handles binary files, it still has an RDF representation in the triplestore. Hence, some of the RDF-related methods are defined in this class rather than in the LdpRs class. Convention notes: All the methods in this class handle internal UUIDs (URN). Public-facing URIs are converted from URNs and passed by these methods to the methods handling HTTP negotiation. The data passed to the store layout for processing should be in a graph. All conversion from request payload strings is done here. ''' EMBED_CHILD_RES_URI = nsc['fcrepo'].EmbedResources FCREPO_PTREE_TYPE = nsc['fcrepo'].Pairtree INS_CNT_REL_URI = nsc['ldp'].insertedContentRelation MBR_RSRC_URI = nsc['ldp'].membershipResource MBR_REL_URI = nsc['ldp'].hasMemberRelation RETURN_CHILD_RES_URI = nsc['fcrepo'].Children RETURN_INBOUND_REF_URI = nsc['fcrepo'].InboundReferences RETURN_SRV_MGD_RES_URI = nsc['fcrepo'].ServerManaged # Workflow type. Inbound means that the resource is being written to the # store, outbounnd is being retrieved for output. WRKF_INBOUND = '_workflow:inbound_' WRKF_OUTBOUND = '_workflow:outbound_' # Default user to be used for the `createdBy` and `lastUpdatedBy` if a user # is not provided. DEFAULT_USER = Literal('BypassAdmin') # RDF Types that populate a new resource. base_types = { nsc['fcrepo'].Resource, nsc['ldp'].Resource, nsc['ldp'].RDFSource, } # Predicates that do not get removed when a resource is replaced. protected_pred = ( nsc['fcrepo'].created, nsc['fcrepo'].createdBy, nsc['ldp'].contains, ) # Server-managed RDF types ignored in the RDF payload if the resource is # being created. N.B. These still raise an error if the resource exists. smt_allow_on_create = { nsc['ldp'].DirectContainer, nsc['ldp'].IndirectContainer, } ## MAGIC METHODS ## def __init__(self, uid, repr_opts={}, provided_imr=None, **kwargs): '''Instantiate an in-memory LDP resource that can be loaded from and persisted to storage. @param uid (string) uid of the resource. If None (must be explicitly set) it refers to the root node. It can also be the full URI or URN, in which case it will be converted. @param repr_opts (dict) Options used to retrieve the IMR. See `parse_rfc7240` for format details. @Param provd_rdf (string) RDF data provided by the client in operations such as `PUT` or `POST`, serialized as a string. This sets the `provided_imr` property. ''' self.uid = (rdfly.uri_to_uid(uid) if isinstance(uid, URIRef) else uid) self.uri = nsc['fcres'][uid] # @FIXME Not ideal, should separate app-context dependent functions in # a different toolbox. self.tbox = Toolbox() self.provided_imr = provided_imr @property def rsrc(self): ''' The RDFLib resource representing this LDPR. This is a live representation of the stored data if present. @return rdflib.resource.Resource ''' if not hasattr(self, '_rsrc'): self._rsrc = rdfly.ds.resource(self.uri) return self._rsrc @property def imr(self): ''' Extract an in-memory resource from the graph store. If the resource is not stored (yet), a `ResourceNotExistsError` is raised. @return rdflib.resource.Resource ''' if not hasattr(self, '_imr'): if hasattr(self, '_imr_options'): logger.debug( 'Getting RDF representation for resource {}'.format( self.uid)) #logger.debug('IMR options:{}'.format(self._imr_options)) imr_options = self._imr_options else: imr_options = {} options = dict(imr_options, strict=True) self._imr = rdfly.extract_imr(self.uid, **options) return self._imr @imr.setter def imr(self, v): ''' Replace in-memory buffered resource. @param v (set | rdflib.Graph) New set of triples to populate the IMR with. ''' if isinstance(v, Resource): v = v.graph self._imr = Resource(Graph(), self.uri) gr = self._imr.graph gr += v @imr.deleter def imr(self): ''' Delete in-memory buffered resource. ''' delattr(self, '_imr') @property def metadata(self): ''' Get resource metadata. ''' if not hasattr(self, '_metadata'): if hasattr(self, '_imr'): logger.info('Metadata is IMR.') self._metadata = self._imr else: logger.info('Getting metadata for resource {}'.format( self.uid)) self._metadata = rdfly.get_metadata(self.uid) return self._metadata @metadata.setter def metadata(self, rsrc): ''' Set resource metadata. ''' if not isinstance(rsrc, Resource): raise TypeError('Provided metadata is not a Resource object.') self._metadata = rsrc @property def stored_or_new_imr(self): ''' Extract an in-memory resource for harmless manipulation and output. If the resource is not stored (yet), initialize a new IMR with basic triples. @return rdflib.resource.Resource ''' if not hasattr(self, '_imr'): if hasattr(self, '_imr_options'): #logger.debug('IMR options:{}'.format(self._imr_options)) imr_options = self._imr_options else: imr_options = {} options = dict(imr_options, strict=True) try: self._imr = rdfly.extract_imr(self.uid, **options) except ResourceNotExistsError: self._imr = Resource(Graph(), self.uri) for t in self.base_types: self.imr.add(RDF.type, t) return self._imr @property def out_graph(self): ''' Retun a graph of the resource's IMR formatted for output. ''' out_gr = Graph(identifier=self.uri) for t in self.imr.graph: if ( # Exclude digest hash and version information. t[1] not in { nsc['premis'].hasMessageDigest, nsc['fcrepo'].hasVersion, }) and ( # Only include server managed triples if requested. self._imr_options.get('incl_srv_mgd', True) or not self._is_trp_managed(t)): out_gr.add(t) return out_gr @property def version_info(self): ''' Return version metadata (`fcr:versions`). ''' if not hasattr(self, '_version_info'): try: #@ TODO get_version_info should return a graph. self._version_info = rdfly.get_version_info(self.uid).graph except ResourceNotExistsError as e: self._version_info = Graph(identifier=self.uri) return self._version_info @property def version_uids(self): ''' Return a generator of version UIDs (relative to their parent resource). ''' gen = self.version_info[self.uri:nsc['fcrepo'].hasVersion / nsc['fcrepo'].hasVersionLabel:] return {str(uid) for uid in gen} @property def is_stored(self): if not hasattr(self, '_is_stored'): if hasattr(self, '_imr'): self._is_stored = len(self.imr.graph) > 0 else: self._is_stored = rdfly.ask_rsrc_exists(self.uid) return self._is_stored @property def types(self): '''All RDF types. @return set(rdflib.term.URIRef) ''' if not hasattr(self, '_types'): if len(self.metadata.graph): metadata = self.metadata elif getattr(self, 'provided_imr', None) and \ len(self.provided_imr.graph): metadata = self.provided_imr else: return set() self._types = set(metadata.graph[self.uri:RDF.type]) return self._types @property def ldp_types(self): '''The LDP types. @return set(rdflib.term.URIRef) ''' if not hasattr(self, '_ldp_types'): self._ldp_types = {t for t in self.types if nsc['ldp'] in t} return self._ldp_types ## LDP METHODS ## def head(self): ''' Return values for the headers. ''' out_headers = defaultdict(list) digest = self.metadata.value(nsc['premis'].hasMessageDigest) if digest: etag = digest.identifier.split(':')[-1] out_headers['ETag'] = 'W/"{}"'.format(etag), last_updated_term = self.metadata.value(nsc['fcrepo'].lastModified) if last_updated_term: out_headers['Last-Modified'] = arrow.get(last_updated_term)\ .format('ddd, D MMM YYYY HH:mm:ss Z') for t in self.ldp_types: out_headers['Link'].append('{};rel="type"'.format(t.n3())) return out_headers def get_version(self, ver_uid, **kwargs): ''' Get a version by label. ''' return rdfly.extract_imr(self.uid, ver_uid, **kwargs).graph def create_or_replace(self, create_only=False): ''' Create or update a resource. PUT and POST methods, which are almost identical, are wrappers for this method. @param create_only (boolean) Whether this is a create-only operation. ''' pdb.set_trace() create = create_only or not self.is_stored ev_type = RES_CREATED if create else RES_UPDATED self._add_srv_mgd_triples(create) ref_int = rdfly.config['referential_integrity'] if ref_int: self._check_ref_int(ref_int) # Delete existing triples if replacing. if not create: rdfly.truncate_rsrc(self.uid) remove_trp = { (self.uri, nsc['fcrepo'].lastModified, None), (self.uri, nsc['fcrepo'].lastModifiedBy, None), } add_trp = set(self.provided_imr.graph) | self._containment_rel(create) self._modify_rsrc(ev_type, remove_trp, add_trp) new_gr = Graph() for trp in add_trp: new_gr.add(trp) self.imr = new_gr.resource(self.uri) return ev_type def put(self): ''' https://www.w3.org/TR/ldp/#ldpr-HTTP_PUT ''' return self.create_or_replace() def patch(self, update_str): ''' Update an existing resource by applying a SPARQL-UPDATE query. @param update_str (string) SPARQL-Update staements. ''' self.handling = 'lenient' # FCREPO does that and Hyrax requires it. return self._sparql_update(update_str) def bury_rsrc(self, inbound, tstone_pointer=None): ''' Delete a single resource and create a tombstone. @param inbound (boolean) Whether to delete the inbound relationships. @param tstone_pointer (URIRef) If set to a URN, this creates a pointer to the tombstone of the resource that used to contain the deleted resource. Otherwise the deleted resource becomes a tombstone. ''' logger.info('Burying resource {}'.format(self.uid)) # Create a backup snapshot for resurrection purposes. self.create_rsrc_snapshot(uuid4()) remove_trp = { trp for trp in self.imr.graph if trp[1] != nsc['fcrepo'].hasVersion } if tstone_pointer: add_trp = {(self.uri, nsc['fcsystem'].tombstone, tstone_pointer)} else: add_trp = { (self.uri, RDF.type, nsc['fcsystem'].Tombstone), (self.uri, nsc['fcrepo'].created, env.timestamp_term), } self._modify_rsrc(RES_DELETED, remove_trp, add_trp) if inbound: for ib_rsrc_uri in self.imr.graph.subjects(None, self.uri): remove_trp = {(ib_rsrc_uri, None, self.uri)} ib_rsrc = Ldpr(ib_rsrc_uri) # To preserve inbound links in history, create a snapshot ib_rsrc.create_rsrc_snapshot(uuid4()) ib_rsrc._modify_rsrc(RES_UPDATED, remove_trp) return RES_DELETED def forget_rsrc(self, inbound=True): ''' Remove all traces of a resource and versions. ''' logger.info('Purging resource {}'.format(self.uid)) refint = env.config['store']['ldp_rs']['referential_integrity'] inbound = True if refint else inbound rdfly.forget_rsrc(self.uid, inbound) # @TODO This could be a different event type. return RES_DELETED def create_rsrc_snapshot(self, ver_uid): ''' Perform version creation and return the version UID. ''' # Create version resource from copying the current state. logger.info('Creating version snapshot {} for resource {}.'.format( ver_uid, self.uid)) ver_add_gr = set() vers_uid = '{}/{}'.format(self.uid, VERS_CONT_LABEL) ver_uid = '{}/{}'.format(vers_uid, ver_uid) ver_uri = nsc['fcres'][ver_uid] ver_add_gr.add((ver_uri, RDF.type, nsc['fcrepo'].Version)) for t in self.imr.graph: if (t[1] == RDF.type and t[2] in { nsc['fcrepo'].Binary, nsc['fcrepo'].Container, nsc['fcrepo'].Resource, }) or (t[1] in { nsc['fcrepo'].hasParent, nsc['fcrepo'].hasVersions, nsc['fcrepo'].hasVersion, nsc['premis'].hasMessageDigest, }): pass else: ver_add_gr.add( (self.tbox.replace_term_domain(t[0], self.uri, ver_uri), t[1], t[2])) rdfly.modify_rsrc(ver_uid, add_trp=ver_add_gr) # Update resource admin data. rsrc_add_gr = { (self.uri, nsc['fcrepo'].hasVersion, ver_uri), (self.uri, nsc['fcrepo'].hasVersions, nsc['fcres'][vers_uid]), } self._modify_rsrc(RES_UPDATED, add_trp=rsrc_add_gr, notify=False) return ver_uid def resurrect_rsrc(self): ''' Resurrect a resource from a tombstone. @EXPERIMENTAL ''' tstone_trp = set(rdfly.extract_imr(self.uid, strict=False).graph) ver_rsp = self.version_info.graph.query(''' SELECT ?uid { ?latest fcrepo:hasVersionLabel ?uid ; fcrepo:created ?ts . } ORDER BY DESC(?ts) LIMIT 1 ''') ver_uid = str(ver_rsp.bindings[0]['uid']) ver_trp = set(rdfly.get_metadata(self.uid, ver_uid).graph) laz_gr = Graph() for t in ver_trp: if t[1] != RDF.type or t[2] not in { nsc['fcrepo'].Version, }: laz_gr.add((self.uri, t[1], t[2])) laz_gr.add((self.uri, RDF.type, nsc['fcrepo'].Resource)) if nsc['ldp'].NonRdfSource in laz_gr[:RDF.type:]: laz_gr.add((self.uri, RDF.type, nsc['fcrepo'].Binary)) elif nsc['ldp'].Container in laz_gr[:RDF.type:]: laz_gr.add((self.uri, RDF.type, nsc['fcrepo'].Container)) laz_set = set(laz_gr) | self._containment_rel() self._modify_rsrc(RES_CREATED, tstone_trp, laz_set) return self.uri def create_version(self, ver_uid=None): ''' Create a new version of the resource. NOTE: This creates an event only for the resource being updated (due to the added `hasVersion` triple and possibly to the `hasVersions` one) but not for the version being created. @param ver_uid Version ver_uid. If already existing, an exception is raised. ''' if not ver_uid or ver_uid in self.version_uids: ver_uid = str(uuid4()) return self.create_rsrc_snapshot(ver_uid) def revert_to_version(self, ver_uid, backup=True): ''' Revert to a previous version. @param ver_uid (string) Version UID. @param backup (boolean) Whether to create a backup snapshot. Default is true. ''' # Create a backup snapshot. if backup: self.create_version() ver_gr = rdfly.extract_imr(self.uid, ver_uid=ver_uid, incl_children=False) self.provided_imr = Resource(Graph(), self.uri) for t in ver_gr.graph: if not self._is_trp_managed(t): self.provided_imr.add(t[1], t[2]) # @TODO Check individual objects: if they are repo-managed URIs # and not existing or tombstones, they are not added. return self.create_or_replace(create_only=False) ## PROTECTED METHODS ## def _is_trp_managed(self, t): ''' Whether a triple is server-managed. @return boolean ''' return t[1] in srv_mgd_predicates or (t[1] == RDF.type and t[2] in srv_mgd_types) def _modify_rsrc(self, ev_type, remove_trp=set(), add_trp=set(), notify=True): ''' Low-level method to modify a graph for a single resource. This is a crucial point for messaging. Any write operation on the RDF store that needs to be notified should be performed by invoking this method. @param ev_type (string) The type of event (create, update, delete). @param remove_trp (set) Triples to be removed. @param add_trp (set) Triples to be added. @param notify (boolean) Whether to send a message about the change. ''' rdfly.modify_rsrc(self.uid, remove_trp, add_trp) if notify and env.config['application'].get('messaging'): logger.debug('Enqueuing message for {}'.format(self.uid)) self._enqueue_msg(ev_type, remove_trp, add_trp) def _enqueue_msg(self, ev_type, remove_trp=None, add_trp=None): ''' Compose a message about a resource change. The message is enqueued for asynchronous processing. @param ev_type (string) The event type. See global constants. @param remove_trp (set) Triples removed. Only used if the ''' try: rsrc_type = tuple(str(t) for t in self.types) actor = self.metadata.value(nsc['fcrepo'].createdBy) except (ResourceNotExistsError, TombstoneError): rsrc_type = () actor = None for t in add_trp: if t[1] == RDF.type: rsrc_type.add(t[2]) elif actor is None and t[1] == nsc['fcrepo'].createdBy: actor = t[2] env.app_globals.changelog.append((set(remove_trp), set(add_trp), { 'ev_type': ev_type, 'timestamp': env.timestamp.format(), 'rsrc_type': rsrc_type, 'actor': actor, })) def _check_ref_int(self, config): gr = self.provided_imr.graph for o in gr.objects(): if isinstance(o, URIRef) and str(o).startswith(nsc['fcres']): obj_uid = rdfly.uri_to_uid(o) if not rdfly.ask_rsrc_exists(obj_uid): if config == 'strict': raise RefIntViolationError(obj_uid) else: logger.info( 'Removing link to non-existent repo resource: {}'. format(obj_uid)) gr.remove((None, None, o)) def _check_mgd_terms(self, gr): ''' Check whether server-managed terms are in a RDF payload. @param gr (rdflib.Graph) The graph to validate. ''' offending_subjects = set(gr.subjects()) & srv_mgd_subjects if offending_subjects: if self.handling == 'strict': raise ServerManagedTermError(offending_subjects, 's') else: for s in offending_subjects: logger.info('Removing offending subj: {}'.format(s)) gr.remove((s, None, None)) offending_predicates = set(gr.predicates()) & srv_mgd_predicates # Allow some predicates if the resource is being created. if offending_predicates: if self.handling == 'strict': raise ServerManagedTermError(offending_predicates, 'p') else: for p in offending_predicates: logger.info('Removing offending pred: {}'.format(p)) gr.remove((None, p, None)) offending_types = set(gr.objects(predicate=RDF.type)) & srv_mgd_types if not self.is_stored: offending_types -= self.smt_allow_on_create if offending_types: if self.handling == 'strict': raise ServerManagedTermError(offending_types, 't') else: for t in offending_types: logger.info('Removing offending type: {}'.format(t)) gr.remove((None, RDF.type, t)) #logger.debug('Sanitized graph: {}'.format(gr.serialize( # format='turtle').decode('utf-8'))) return gr def _add_srv_mgd_triples(self, create=False): ''' Add server-managed triples to a provided IMR. @param create (boolean) Whether the resource is being created. ''' # Base LDP types. for t in self.base_types: self.provided_imr.add(RDF.type, t) # Message digest. cksum = self.tbox.rdf_cksum(self.provided_imr.graph) self.provided_imr.set(nsc['premis'].hasMessageDigest, URIRef('urn:sha1:{}'.format(cksum))) # Create and modify timestamp. if create: self.provided_imr.set(nsc['fcrepo'].created, env.timestamp_term) self.provided_imr.set(nsc['fcrepo'].createdBy, self.DEFAULT_USER) else: self.provided_imr.set(nsc['fcrepo'].created, self.metadata.value(nsc['fcrepo'].created)) self.provided_imr.set(nsc['fcrepo'].createdBy, self.metadata.value(nsc['fcrepo'].createdBy)) self.provided_imr.set(nsc['fcrepo'].lastModified, env.timestamp_term) self.provided_imr.set(nsc['fcrepo'].lastModifiedBy, self.DEFAULT_USER) def _containment_rel(self, create): '''Find the closest parent in the path indicated by the uid and establish a containment triple. Check the path-wise parent of the new resource. If it exists, add the containment relationship with this UID. Otherwise, create a container resource as the parent. This function may recurse up the path tree until an existing container is found. E.g. if only fcres:/a exists: - If fcres:/a/b/c/d is being created, a becomes container of fcres:/a/b/c/d. Also, containers are created for fcres:a/b and fcres:/a/b/c. - If fcres:/e is being created, the root node becomes container of fcres:/e. @param create (bool) Whether the resource is being created. If false, the parent container is not updated. ''' from lakesuperior.model.ldp_factory import LdpFactory if '/' in self.uid.lstrip('/'): # Traverse up the hierarchy to find the parent. path_components = self.uid.lstrip('/').split('/') cnd_parent_uid = '/' + '/'.join(path_components[:-1]) if rdfly.ask_rsrc_exists(cnd_parent_uid): parent_rsrc = LdpFactory.from_stored(cnd_parent_uid) if nsc['ldp'].Container not in parent_rsrc.types: raise InvalidResourceError( cnd_parent_uid, 'Parent {} is not a container.') parent_uid = cnd_parent_uid else: parent_rsrc = LdpFactory.new_container(cnd_parent_uid) # This will trigger this method again and recurse until an # existing container or the root node is reached. parent_rsrc.create_or_replace() parent_uid = parent_rsrc.uid else: parent_uid = ROOT_UID parent_rsrc = LdpFactory.from_stored( parent_uid, repr_opts={'incl_children': False}, handling='none') # Only update parent if the resource is new. if create: add_gr = Graph() add_gr.add( (nsc['fcres'][parent_uid], nsc['ldp'].contains, self.uri)) parent_rsrc._modify_rsrc(RES_UPDATED, add_trp=add_gr) # Direct or indirect container relationship. return self._add_ldp_dc_ic_rel(parent_rsrc) def _dedup_deltas(self, remove_gr, add_gr): ''' Remove duplicate triples from add and remove delta graphs, which would otherwise contain unnecessary statements that annul each other. @return tuple 2 "clean" sets of respectively remove statements and add statements. ''' return (remove_gr - add_gr, add_gr - remove_gr) def _add_ldp_dc_ic_rel(self, cont_rsrc): ''' Add relationship triples from a parent direct or indirect container. @param cont_rsrc (rdflib.resource.Resouce) The container resource. ''' cont_p = set(cont_rsrc.metadata.graph.predicates()) logger.info('Checking direct or indirect containment.') logger.debug('Parent predicates: {}'.format(cont_p)) add_trp = {(self.uri, nsc['fcrepo'].hasParent, cont_rsrc.uri)} if self.MBR_RSRC_URI in cont_p and self.MBR_REL_URI in cont_p: from lakesuperior.model.ldp_factory import LdpFactory s = cont_rsrc.metadata.value(self.MBR_RSRC_URI).identifier p = cont_rsrc.metadata.value(self.MBR_REL_URI).identifier if cont_rsrc.metadata[RDF.type:nsc['ldp'].DirectContainer]: logger.info('Parent is a direct container.') logger.debug('Creating DC triples.') o = self.uri elif (cont_rsrc.metadata[RDF.type:nsc['ldp'].IndirectContainer] and self.INS_CNT_REL_URI in cont_p): logger.info('Parent is an indirect container.') cont_rel_uri = cont_rsrc.metadata.value( self.INS_CNT_REL_URI).identifier o = self.provided_imr.value(cont_rel_uri).identifier logger.debug('Target URI: {}'.format(o)) logger.debug('Creating IC triples.') target_rsrc = LdpFactory.from_stored(rdfly.uri_to_uid(s)) target_rsrc._modify_rsrc(RES_UPDATED, add_trp={(s, p, o)}) return add_trp def _sparql_update(self, update_str, notify=True): ''' Apply a SPARQL update to a resource. @param update_str (string) SPARQL-Update string. All URIs are local. @return ''' self.handling = 'lenient' # FCREPO does that and Hyrax requires it. delta = self._sparql_delta(update_str) return self._modify_rsrc(RES_UPDATED, *delta, notify=notify) def _sparql_delta(self, q): ''' Calculate the delta obtained by a SPARQL Update operation. This is a critical component of the SPARQL update prcess and does a couple of things: 1. It ensures that no resources outside of the subject of the request are modified (e.g. by variable subjects) 2. It verifies that none of the terms being modified is server managed. This method extracts an in-memory copy of the resource and performs the query on that once it has checked if any of the server managed terms is in the delta. If it is, it raises an exception. NOTE: This only checks if a server-managed term is effectively being modified. If a server-managed term is present in the query but does not cause any change in the updated resource, no error is raised. @return tuple(rdflib.Graph) Remove and add graphs. These can be used with `BaseStoreLayout.update_resource` and/or recorded as separate events in a provenance tracking system. ''' logger.debug('Provided SPARQL query: {}'.format(q)) pre_gr = self.imr.graph post_gr = pre_gr | Graph() post_gr.update(q) remove_gr, add_gr = self._dedup_deltas(pre_gr, post_gr) #logger.debug('Removing: {}'.format( # remove_gr.serialize(format='turtle').decode('utf8'))) #logger.debug('Adding: {}'.format( # add_gr.serialize(format='turtle').decode('utf8'))) remove_gr = self._check_mgd_terms(remove_gr) add_gr = self._check_mgd_terms(add_gr) return set(remove_gr), set(add_gr)
def get_fare(self, fare_id): fare = Resource(self.graph, URIRef(self.uri + "fare_" + fare_id)) fare.set(RDF.type, self.GTFS.FareClass) fare.set(DCTERMS.identifier, Literal(fare_id, datatype=XSD.string)) return fare
def convert_calendar(self, csv_filename): read_calendar = self.__open_file(csv_filename) for row in read_calendar: service = self.get_service(str.strip(row["service_id"])) calendar = Resource(self.graph, URIRef(self.uri + str.strip(row["service_id"]) + "_cal")) service.add(self.GTFS.serviceRule, calendar) calendar.set(RDF.type, self.GTFS.CalendarRule) calendar.set(self.GTFS.monday, Literal(str.strip(row["monday"]), datatype=XSD.boolean)) calendar.set(self.GTFS.tuesday, Literal(str.strip(row["tuesday"]), datatype=XSD.boolean)) calendar.set(self.GTFS.wednesday, Literal(str.strip(row["wednesday"]), datatype=XSD.boolean)) calendar.set(self.GTFS.thursday, Literal(str.strip(row["thursday"]), datatype=XSD.boolean)) calendar.set(self.GTFS.friday, Literal(str.strip(row["friday"]), datatype=XSD.boolean)) calendar.set(self.GTFS.saturday, Literal(str.strip(row["saturday"]), datatype=XSD.boolean)) calendar.set(self.GTFS.sunday, Literal(str.strip(row["sunday"]), datatype=XSD.boolean)) temporal = Resource(self.graph, URIRef(self.uri + str.strip(row["service_id"]) + "_cal" + "_temporal")) calendar.set(DCTERMS.temporal, temporal) temporal.add(self.SCHEMA.startDate, self.get_date_literal(str.strip(row["start_date"]))) temporal.add(self.SCHEMA.endDate, self.get_date_literal(str.strip(row["end_date"])))
def add_date(self): """ Add vivo:DateTimeValue for publication. :return: rdflib.Graph """ g = Graph() date_uri = D['date-' + self.localid] de = Resource(g, date_uri) de.set(RDF.type, VIVO.DateTimeValue) year, month, month_num = self.pub_date() # Add year and month if possible. if month_num is not None: de.set(RDFS.label, Literal("{}, {}".format(month, year))) de.set( VIVO.dateTime, Literal("{}-{}".format(year, month_num), datatype=XSD.dayMonth) ) de.set(VIVO.dateTimePrecision, VIVO.yearMonthPrecision) else: de.set(RDFS.label, Literal(year)) de.set( VIVO.dateTime, Literal("{}".format(year), datatype=XSD.year) ) de.set(VIVO.dateTimePrecision, VIVO.yearPrecision) g.add((self.pub_uri, VIVO.dateTimeValue, date_uri)) return g
def to_rdf(self): """ Convert the API publication object to VIVO RDF. :return: rdflib.Graph """ g = Graph() pub = Resource(g, self.pub_uri) pub.set(RDF.type, self.vivo_type()) pub.set(RDFS.label, Literal(self.title())) # WoS UT. Map to VIVO.identifier for now. # ToDo: this should be a more specific property pub.set(VIVO.identifier, Literal(self.ut())) # DOI doi = self.doi() if doi is not None: pub.set(BIBO.doi, Literal(doi)) # Volume volume = self.volume() if volume is not None: pub.set(BIBO.volume, Literal(volume)) # Issue issue = self.issue() if issue is not None: pub.set(BIBO.issue, Literal(issue)) # Pages pages = self.pages() if pages is not None: start, end = pages.split('-') pub.set(BIBO.start, Literal(start)) pub.set(BIBO.end, Literal(end)) # publication venue g += self.add_venue() # date g += self.add_date() # authorship and vcards g += self.authorship() # links web_link, linkg = self.add_vcard_weblink() g += linkg # relate web link and publication g.add((self.pub_uri, OBO['ARG_2000028'], web_link)) return g
def get_agency(self, agency_id): agency = Resource(self.graph, URIRef(self.uri + "agency_" + agency_id)) agency.add(RDF.type, self.GTFS.Agency) agency.set(DCTERMS.identifier, Literal(agency_id, datatype=XSD.string)) return agency
def get_shape(self, shape_id): shape = Resource(self.graph, URIRef(self.uri + "shape_" + shape_id)) shape.set(RDF.type, self.GTFS.Shape) shape.set(DCTERMS.identifier, Literal(shape_id, datatype=XSD.string)) return shape
def to_rdf(self): uri = self._uri() g = Graph() jr = Resource(g, uri) jr.set(RDF.type, BIBO.Journal) jr.set(RDFS.label, Literal(self.title)) jr.set(LOCAL.identifier, Literal(self.wosid)) if self.issn is not None: jr.set(BIBO.issn, Literal(self.issn)) if self.eissn is not None: jr.set(BIBO.eissn, Literal(self.eissn)) if self.wikidata is not None: jr.set(OWL.sameAs, URIRef(self.wikidata)) return g
def process(self, input, output): username = "******" password = "******" lmClient = LifemapperClient(username, password) # Get file download URL of the TIFF layer manif = input.value(DATA.hasManifestation) tiffFileDownloadURL = manif.value(DATA.hasFileDownloadURL) # Get the typeCode and layerUnits typeCode = input.value(LM.hasTypeCode) layerUnits = input.value(LM.hasLayerUnits) # Get the JSON spec jsonSpecification = input.value(LM.hasJSONExperimentSpecificationURL) lmClient.postLayer(tiffFileDownloadURL, layerUnits, typeCode) #layerID = str(12) layerID = lmClient.getLayerID() layerURL = lmClient.getLayerURL() print "posted layer ID %s " % layerID # The URI generator resourceURI = ResourceURI() # Create new manifestation fileManifestation = RDFLibResource(output.graph, resourceURI.getURI("manifestation")) FileManifestationClass = RDFLibResource(output.graph, DATA.FileManifestation) fileManifestation.set(RDF.type, FileManifestationClass) fileManifestation.set(DATA.hasLandingPageURL, Literal(layerURL)) # Create the ScenarioLayer layer = RDFLibResource(output.graph, resourceURI.getURI("layer")) ScenarioLayerClass = RDFLibResource(output.graph, LM.ScenarioLayer) layer.set(RDF.type, ScenarioLayerClass) layer.set(DATA.hasManifestation, fileManifestation) layer.set(LM.hasLayerID, Literal(layerID)) layer.set(LM.hasLayerURL, Literal(layerURL)) layer.set(LM.hasJSONExperimentSpecificationURL, jsonSpecification) output.set(LM.hasPublishedLayer, layer)