def convert(graph, input_path): reader = csv.DictReader(open(input_path, mode='r')) for row in reader: p = PARKING[utils.idify(row["Name"])] graph.add((p, RDF.type, PARKING_ONT["ParkingSite"])) graph.add((p, utils.RDFS['label'], Literal(row["Name"]))) graph.add((p, PARKING_ONT['parkingType'], PARKING_TYPE_ONT[utils.idify(row["Type"])])) graph.add((p, PARKING_ONT['informationPage'], Literal(row["URL"]))) p_operator = PARKING["operator/" + utils.idify(row["Operator"])] graph.add((p, PARKING_ONT['operator'], p_operator)) graph.add((p_operator, RDF.type, PARKING_ONT["ParkingSiteOperator"])) graph.add((p_operator, utils.RDFS['label'], Literal(row["Operator"]))) graph.add((p_operator, utils.ADMINGEO["MetropolitanDistrict"], URIRef("http://data.ordnancesurvey.co.uk/id/7000000000018805"))) graph.add((p, utils.GEO["lat"], Literal(row["Latitude"]))) graph.add((p, utils.GEO["long"], Literal(row["Longitude"]))) address = utils.idify(row["Address"]) graph.add((p, utils.VCARD['hasAddress'], URIRef("http://data.gmdsp.org.uk/def/council/parking/address/"+address))) # now add the address VCARD vcard = PARKING["address/"+address] graph.add((vcard, RDF.type, utils.VCARD["Location"])) graph.add((vcard, utils.RDFS['label'], Literal(row["Name"]))) graph.add((vcard, utils.VCARD['street-address'], Literal(row["Address"]))) graph.add((vcard, utils.VCARD['postal-code'], Literal(row["Postcode"]))) graph.add((vcard, utils.POST['postcode'], URIRef(utils.convertpostcodeto_osuri(row["Postcode"]))))
def convert(graph, input_path): reader = csv.DictReader(open(input_path, mode='r')) for row in reader: allotment = al[utils.idify(row["Name"])] graph.add((allotment, RDF.type, al_ont['Allotment'])) graph.add((allotment, utils.RDFS['label'], Literal("Allotment site " + row["Name"]))) # geo info graph.add((allotment, utils.OS["northing"], Literal(row["Northing"]))) graph.add((allotment, utils.OS["easting"], Literal(row["Easting"]))) # add conversion for lat/long lat_long = utils.ENtoLL84(float(row["Easting"]), float(row["Northing"])) graph.add((allotment, utils.GEO["long"], Literal(lat_long[0]))) graph.add((allotment, utils.GEO["lat"], Literal(lat_long[1]))) address = utils.idify(row["Address"]) graph.add((allotment, utils.VCARD['hasAddress'], al["address/"+address])) street_address, address_postcode = utils.postcode_helper(row["Address"]) # now add the address VCARD vcard = al["address/"+address] graph.add((vcard, RDF.type, utils.VCARD["Location"])) graph.add((vcard, utils.RDFS['label'], Literal("Address of allotment site " + row["Name"]))) graph.add((vcard, utils.VCARD['street-address'], Literal(street_address))) if address_postcode is not None: graph.add((vcard, utils.VCARD['postal-code'], Literal(address_postcode))) graph.add((vcard, utils.POST['postcode'], URIRef(utils.convertpostcodeto_osuri(address_postcode))))
def convert(graph, input_path): reader = csv.DictReader(open(input_path, mode='r')) for row in reader: rc = RECYCLING[utils.idify(row["Location"])] graph.add((rc, RDF.type, RECYCLING_ONT["RecyclingSite"])) graph.add((rc, utils.RDFS['label'], Literal("Recycling Site at " + row["Location"]))) address = utils.idify(row["Address"]) graph.add( (rc, utils.VCARD['hasAddress'], RECYCLING["address/" + address])) # now add the address VCARD vcard = RECYCLING["address/" + address] graph.add((vcard, RDF.type, utils.VCARD["Location"])) graph.add((vcard, utils.RDFS['label'], Literal("Address of Recycling Site at " + row["Location"]))) graph.add( (vcard, utils.VCARD['street-address'], Literal(row["Address"]))) graph.add( (vcard, utils.VCARD['postal-code'], Literal(row["Postcode"]))) graph.add((vcard, utils.POST['postcode'], URIRef(utils.convertpostcodeto_osuri(row["Postcode"])))) # location information graph.add((rc, utils.OS["northing"], Literal(row["Northings"]))) graph.add((rc, utils.OS["easting"], Literal(row["Eastings"]))) # add conversion for lat/long lat_long = utils.ENtoLL84(float(row["Eastings"]), float(row["Northings"])) graph.add((rc, utils.GEO["long"], Literal(lat_long[0]))) graph.add((rc, utils.GEO["lat"], Literal(lat_long[1]))) # recycling informationxs # maps the CSV header to the recycling facility concept schema facility_map = { "Cardboard": "cardboard", "Paper": "paper", "Cartons": "cartons", "Shoes": "shoes", "Glass": "glass", "Textiles": "textiles", "Cans": "cans", "Plastic Bottles": "plastic", "Aerosols": "aerosols", } for facility in facility_map: if row[facility]: graph.add((rc, RECYCLING_ONT['recyclingType'], RECYCLING_TYPE_ONT[facility_map[facility]]))
def convert(graph, input_path): reader = csv.DictReader(open(input_path, mode='r')) # define time reference data ontology #graph.add((utils.QUARTER[YEAR_STRING], RDF.type, utils.QB["DimensionProperty"])) #graph.add((utils.QUARTER[YEAR_STRING], utils.RDFS["label"], Literal("2013 Q4"))) refperiod = al_ont["refPeriod"] graph.add((refperiod, RDF.type, utils.QB["DimensionProperty"])) graph.add((refperiod, utils.RDFS["label"], Literal("Reference period"))) graph.add((refperiod, utils.RDFS["subPropertyOf"], utils.SDMXDIMENSION["refPeriod"])) graph.add((refperiod, utils.RDFS["range"], utils.INTERVAL["Interval"])) graph.add((refperiod, utils.QB["concept"], utils.SDMXCONCEPT["refPeriod"])) # define number of plots ontology #graph.add((al_ont["numberOfPlots"], RDF.type, utils.QB["MeasureProperty"])) #graph.add((al_ont["numberOfPlots"], utils.RDFS["label"], Literal("Total number of plots"))) numberofplots = al_ont["numberOfPlots"] graph.add((numberofplots, RDF.type, utils.QB["MeasureProperty"])) graph.add((numberofplots, utils.RDFS["label"], Literal("Total number of plots"))) graph.add((numberofplots, utils.RDFS["subPropertyOf"], utils.SDMXMEASURE["obsValue"])) graph.add((numberofplots, utils.RDFS["range"], XSD.integer)) # add the dataset dataset = al_data["dataset-le1"] graph.add((dataset, RDF.type, utils.QB["DataSet"])) graph.add((dataset, utils.RDFS["label"], Literal("Number of plots in allotment"))) graph.add((dataset, utils.RDFS["comment"], Literal("xxxxx"))) graph.add((dataset, utils.QB["structure"], al_data['data'])) # now add the observations themselves for row in reader: if row["Plots"]: try: Literal(int(row["Plots"])) # figure out if there is any plot data before we start adding data allotment = al_data[utils.idify(row["Name"])] #graph.add((allotment, utils.QB['DimensionProperty'], al_ont["numberOfPlots"])) allotment_plots = number_of_plots[YEAR_STRING + "/" + utils.idify(row["Name"])] graph.add((allotment_plots, RDF.type, utils.QB['Observation'])) graph.add((allotment_plots, utils.QB['dataSet'], URIRef(al_data))) graph.add((allotment_plots, al_ont["numberOfPlots"], Literal(int(row["Plots"]), datatype=XSD.integer))) graph.add((allotment_plots, al_ont["refPeriod"], utils.QUARTER[YEAR_STRING])) graph.add((allotment_plots, utils.RDFS['label'], Literal("{}, {}, number of plots".format(row["Name"], YEAR_STRING)))) graph.add((allotment_plots, al_stat['refAllotment'], allotment)) except ValueError: pass
def convert(graph, input_path): reader = csv.DictReader(open(input_path, mode='r')) for row in reader: if row["Feature ID"]: sl = STREETLIGHT[utils.idify(row["Feature ID"])] graph.add((sl, RDF.type, STREETLIGHT_ONT["Streetlight"])) graph.add((sl, utils.RDFS['label'], Literal("Streetlight with ID " + row["Feature ID"]))) address = utils.idify(row["Feature ID"]) graph.add((sl, utils.VCARD['hasAddress'], STREETLIGHT["address/" + address])) # now add the address VCARD vcard = STREETLIGHT["address/" + address] graph.add((vcard, RDF.type, utils.VCARD["Location"])) graph.add((vcard, utils.RDFS['label'], Literal("Address of streetlight with ID " + row["Feature ID"]))) graph.add((vcard, utils.VCARD['street-address'], Literal(row["RoadName"]))) #graph.add((vcard, utils.VCARD['postal-code'], Literal(row["POSTCODE"]))) #graph.add((vcard, utils.POST['postcode'], URIRef(utils.convertpostcodeto_osuri(row["POSTCODE"])))) # location information graph.add((sl, utils.OS["northing"], Literal(row["Northing"]))) graph.add((sl, utils.OS["easting"], Literal(row["Easting"]))) # add conversion for lat/long lat_long = utils.ENtoLL84(float(row["Easting"]), float(row["Northing"])) graph.add((sl, utils.GEO["long"], Literal(lat_long[0]))) graph.add((sl, utils.GEO["lat"], Literal(lat_long[1]))) # street light specific stuff if row["Lamp Wattage"]: watts = re.findall('\d+', row["Lamp Wattage"])[0] graph.add((sl, STREETLIGHT_ONT['wattage'], Literal(watts))) graph.add( (sl, STREETLIGHT_ONT['lampType'], Literal(row["Lamp Type"]))) if row["Mounting Height"]: height = re.findall('\d+', row["Mounting Height"])[0] graph.add( (sl, STREETLIGHT_ONT['columnHeight'], Literal(height)))
def convert(graph, input_path): reader = csv.DictReader(open(input_path, mode='r')) for row in reader: rc = RECYCLING[utils.idify(row["Location"])] graph.add((rc, RDF.type, RECYCLING_ONT["RecyclingSite"])) graph.add((rc, utils.RDFS['label'], Literal("Recycling Site at " + row["Location"]))) address = utils.idify(row["Address"]) graph.add((rc, utils.VCARD['hasAddress'], RECYCLING["address/"+address])) # now add the address VCARD vcard = RECYCLING["address/"+address] graph.add((vcard, RDF.type, utils.VCARD["Location"])) graph.add((vcard, utils.RDFS['label'], Literal("Address of Recycling Site at " + row["Location"]))) graph.add((vcard, utils.VCARD['street-address'], Literal(row["Address"]))) graph.add((vcard, utils.VCARD['postal-code'], Literal(row["Postcode"]))) graph.add((vcard, utils.POST['postcode'], URIRef(utils.convertpostcodeto_osuri(row["Postcode"])))) # location information graph.add((rc, utils.OS["northing"], Literal(row["Northings"]))) graph.add((rc, utils.OS["easting"], Literal(row["Eastings"]))) # add conversion for lat/long lat_long = utils.ENtoLL84(float(row["Eastings"]), float(row["Northings"])) graph.add((rc, utils.GEO["long"], Literal(lat_long[0]))) graph.add((rc, utils.GEO["lat"], Literal(lat_long[1]))) # recycling informationxs # maps the CSV header to the recycling facility concept schema facility_map = { "Cardboard": "cardboard", "Paper": "paper", "Cartons": "cartons", "Shoes": "shoes", "Glass": "glass", "Textiles": "textiles", "Cans": "cans", "Plastic Bottles": "plastic", "Aerosols": "aerosols", } for facility in facility_map: if row[facility]: graph.add((rc, RECYCLING_ONT['recyclingType'], RECYCLING_TYPE_ONT[facility_map[facility]]))
def convert(graph, input_path): reader = csv.DictReader(open(input_path, mode='r')) for row in reader: allotment = al[utils.idify(row["Name"])] graph.add((allotment, RDF.type, al_ont['Allotment'])) graph.add((allotment, utils.RDFS['label'], Literal("Allotment site " + row["Name"]))) # geo info graph.add((allotment, utils.OS["northing"], Literal(row["Northing"]))) graph.add((allotment, utils.OS["easting"], Literal(row["Easting"]))) # add conversion for lat/long lat_long = utils.ENtoLL84(float(row["Easting"]), float(row["Northing"])) graph.add((allotment, utils.GEO["long"], Literal(lat_long[0]))) graph.add((allotment, utils.GEO["lat"], Literal(lat_long[1]))) address = utils.idify(row["Address"]) graph.add( (allotment, utils.VCARD['hasAddress'], al["address/" + address])) street_address, address_postcode = utils.postcode_helper( row["Address"]) # now add the address VCARD vcard = al["address/" + address] graph.add((vcard, RDF.type, utils.VCARD["Location"])) graph.add((vcard, utils.RDFS['label'], Literal("Address of allotment site " + row["Name"]))) graph.add( (vcard, utils.VCARD['street-address'], Literal(street_address))) if address_postcode is not None: graph.add( (vcard, utils.VCARD['postal-code'], Literal(address_postcode))) graph.add( (vcard, utils.POST['postcode'], URIRef(utils.convertpostcodeto_osuri(address_postcode))))
def convert(graph, input_path): reader = csv.DictReader(open(input_path, mode='r')) for row in reader: if row["Feature ID"]: sl = STREETLIGHT[utils.idify(row["Feature ID"])] graph.add((sl, RDF.type, STREETLIGHT_ONT["Streetlight"])) graph.add((sl, utils.RDFS['label'], Literal("Streetlight with ID " + row["Feature ID"]))) address = utils.idify(row["Feature ID"]) graph.add((sl, utils.VCARD['hasAddress'], STREETLIGHT["address/"+address])) # now add the address VCARD vcard = STREETLIGHT["address/"+address] graph.add((vcard, RDF.type, utils.VCARD["Location"])) graph.add((vcard, utils.RDFS['label'], Literal("Address of streetlight with ID " + row["Feature ID"]))) graph.add((vcard, utils.VCARD['street-address'], Literal(row["RoadName"]))) #graph.add((vcard, utils.VCARD['postal-code'], Literal(row["POSTCODE"]))) #graph.add((vcard, utils.POST['postcode'], URIRef(utils.convertpostcodeto_osuri(row["POSTCODE"])))) # location information graph.add((sl, utils.OS["northing"], Literal(row["Northing"]))) graph.add((sl, utils.OS["easting"], Literal(row["Easting"]))) # add conversion for lat/long lat_long = utils.ENtoLL84(float(row["Easting"]), float(row["Northing"])) graph.add((sl, utils.GEO["long"], Literal(lat_long[0]))) graph.add((sl, utils.GEO["lat"], Literal(lat_long[1]))) # street light specific stuff if row["Lamp Wattage"]: watts = re.findall('\d+', row["Lamp Wattage"])[0] graph.add((sl, STREETLIGHT_ONT['wattage'], Literal(watts))) graph.add((sl, STREETLIGHT_ONT['lampType'], Literal(row["Lamp Type"]))) if row["Mounting Height"]: height = re.findall('\d+', row["Mounting Height"])[0] graph.add((sl, STREETLIGHT_ONT['columnHeight'], Literal(height)))
def convert(graph, input_path): reader = csv.DictReader(open(input_path, mode='r')) non_existent_uris = set() rows = list(reader) row_length = len(rows) for index, row in enumerate(rows): # this takes a while so lets provide some context print "{}/{}".format(index, row_length) pa = PLANNING[utils.idify(row["REFERENCE"])] graph.add((pa, RDF.type, PLANNING_ONT["PlanningApplication"])) graph.add((pa, utils.RDFS['label'], Literal("Planning application " + row["REFERENCE"]))) # planning application specific stuff if row["APP TYPE DECODE"]: graph.add((pa, PLANNING_ONT['applicationType'], PLANNING_APPLICATION_STATUS_ONT[utils.idify(row["APP TYPE DECODE"])])) if row["APP TYPE"]: graph.add((pa, PLANNING_ONT['applicationTypeCode'], Literal(row["APP TYPE"]))) if row["DEVELOPMENT TYPE DECODE"]: graph.add((pa, PLANNING_ONT['developmentType'], PLANNING_APPLICATION_TYPE_ONT[utils.idify(row["DEVELOPMENT TYPE DECODE"])])) if row["PROPOSAL"]: graph.add((pa, PLANNING_ONT['proposal'], Literal(clean_string(row["PROPOSAL"])))) if row["VALIDATION DATE"]: validation_date = datetime.datetime.strptime( row["VALIDATION DATE"].split(" ")[0], "%d/%m/%Y", ) try: date_string = validation_date.strftime("%Y-%m-%d") graph.add((pa, PLANNING_ONT['validatedDate'], utils.DATE[date_string])) except ValueError: # This means we were unable to parse a valid date # so just don't ' this node to the graph pass if row["RECOMMENDATION DECODE"]: graph.add((pa, PLANNING_ONT['decision'], PLANNING_APPLICATION_STATUS_ONT[utils.idify(row["RECOMMENDATION DECODE"])])) if row["DATEDECISS"]: decision_date = datetime.datetime.strptime( row["DATEDECISS"].split(" ")[0], "%d/%m/%Y", ) try: date_string = decision_date.strftime("%Y-%m-%d") graph.add((pa, PLANNING_ONT['decisionDate'], utils.DATE[date_string])) except ValueError: # This means we were unable to parse a valid date # so just don't ' this node to the graph pass # planning application site pa_site = PLANNING["site/" + utils.idify(row["REFERENCE"])] graph.add((pa, utils.VCARD['hasAddress'], pa_site)) graph.add((pa_site, RDF.type, PLANNING_ONT['PlanningApplicationSite'])) graph.add((pa_site, utils.RDFS['label'], Literal("Planning application site for planning application " + row["REFERENCE"]))) # postcode helper used here to remove the postcode if we find it street_address, address_postcode = utils.postcode_helper(clean_string(row["LOCATION"])) graph.add((pa_site, utils.VCARD['street-address'], Literal(street_address))) graph.add((pa_site, utils.VCARD['postal-code'], Literal(row["Postcode"]))) os_postcodeuri = utils.convertpostcodeto_osuri(row["Postcode"]) graph.add((pa_site, utils.POST['postcode'], URIRef(os_postcodeuri))) # so now we are going to generate lat/long information based on the postcode centroids try: os_postcodedata = json.load(urllib2.urlopen(os_postcodeuri + ".json")) graph.add((pa_site, utils.GEO["lat"], Literal(float(os_postcodedata[os_postcodeuri][str(utils.GEO["lat"])][0]["value"])))) graph.add((pa_site, utils.GEO["long"], Literal(float(os_postcodedata[os_postcodeuri][str(utils.GEO["long"])][0]["value"])))) except urllib2.HTTPError: # print "Unable to load data from: ", os_postcodeuri non_existent_uris.add(os_postcodeuri) pass print "Unable to locate the following uri's:", non_existent_uris
def convert(graph, input_path): reader = csv.DictReader(open(input_path, mode='r')) non_existent_uris = set() rows = list(reader) row_length = len(rows) for index, row in enumerate(rows): # this takes a while so lets provide some context print "{}/{}".format(index, row_length) pa = PLANNING[utils.idify(row["REFERENCE"])] graph.add((pa, RDF.type, PLANNING_ONT["PlanningApplication"])) graph.add((pa, utils.RDFS['label'], Literal("Planning application " + row["REFERENCE"]))) # planning application specific stuff if row["APP TYPE DECODE"]: graph.add((pa, PLANNING_ONT['applicationType'], PLANNING_APPLICATION_STATUS_ONT[utils.idify( row["APP TYPE DECODE"])])) if row["APP TYPE"]: graph.add((pa, PLANNING_ONT['applicationTypeCode'], Literal(row["APP TYPE"]))) if row["DEVELOPMENT TYPE DECODE"]: graph.add((pa, PLANNING_ONT['developmentType'], PLANNING_APPLICATION_TYPE_ONT[utils.idify( row["DEVELOPMENT TYPE DECODE"])])) if row["PROPOSAL"]: graph.add((pa, PLANNING_ONT['proposal'], Literal(clean_string(row["PROPOSAL"])))) if row["VALIDATION DATE"]: validation_date = datetime.datetime.strptime( row["VALIDATION DATE"].split(" ")[0], "%d/%m/%Y", ) try: date_string = validation_date.strftime("%Y-%m-%d") graph.add((pa, PLANNING_ONT['validatedDate'], utils.DATE[date_string])) except ValueError: # This means we were unable to parse a valid date # so just don't ' this node to the graph pass if row["RECOMMENDATION DECODE"]: graph.add((pa, PLANNING_ONT['decision'], PLANNING_APPLICATION_STATUS_ONT[utils.idify( row["RECOMMENDATION DECODE"])])) if row["DATEDECISS"]: decision_date = datetime.datetime.strptime( row["DATEDECISS"].split(" ")[0], "%d/%m/%Y", ) try: date_string = decision_date.strftime("%Y-%m-%d") graph.add((pa, PLANNING_ONT['decisionDate'], utils.DATE[date_string])) except ValueError: # This means we were unable to parse a valid date # so just don't ' this node to the graph pass # planning application site pa_site = PLANNING["site/" + utils.idify(row["REFERENCE"])] graph.add((pa, utils.VCARD['hasAddress'], pa_site)) graph.add((pa_site, RDF.type, PLANNING_ONT['PlanningApplicationSite'])) graph.add( (pa_site, utils.RDFS['label'], Literal("Planning application site for planning application " + row["REFERENCE"]))) # postcode helper used here to remove the postcode if we find it street_address, address_postcode = utils.postcode_helper( clean_string(row["LOCATION"])) graph.add( (pa_site, utils.VCARD['street-address'], Literal(street_address))) graph.add( (pa_site, utils.VCARD['postal-code'], Literal(row["Postcode"]))) os_postcodeuri = utils.convertpostcodeto_osuri(row["Postcode"]) graph.add((pa_site, utils.POST['postcode'], URIRef(os_postcodeuri))) # so now we are going to generate lat/long information based on the postcode centroids try: os_postcodedata = json.load( urllib2.urlopen(os_postcodeuri + ".json")) graph.add((pa_site, utils.GEO["lat"], Literal( float(os_postcodedata[os_postcodeuri][str( utils.GEO["lat"])][0]["value"])))) graph.add((pa_site, utils.GEO["long"], Literal( float(os_postcodedata[os_postcodeuri][str( utils.GEO["long"])][0]["value"])))) except urllib2.HTTPError: # print "Unable to load data from: ", os_postcodeuri non_existent_uris.add(os_postcodeuri) pass print "Unable to locate the following uri's:", non_existent_uris
def convert(graph, input_path): reader = csv.DictReader(open(input_path, mode='r')) # define time reference data ontology #graph.add((utils.QUARTER[YEAR_STRING], RDF.type, utils.QB["DimensionProperty"])) #graph.add((utils.QUARTER[YEAR_STRING], utils.RDFS["label"], Literal("2013 Q4"))) refperiod = al_ont["refPeriod"] graph.add((refperiod, RDF.type, utils.QB["DimensionProperty"])) graph.add((refperiod, utils.RDFS["label"], Literal("Reference period"))) graph.add((refperiod, utils.RDFS["subPropertyOf"], utils.SDMXDIMENSION["refPeriod"])) graph.add((refperiod, utils.RDFS["range"], utils.INTERVAL["Interval"])) graph.add((refperiod, utils.QB["concept"], utils.SDMXCONCEPT["refPeriod"])) # define number of plots ontology #graph.add((al_ont["numberOfPlots"], RDF.type, utils.QB["MeasureProperty"])) #graph.add((al_ont["numberOfPlots"], utils.RDFS["label"], Literal("Total number of plots"))) numberofplots = al_ont["numberOfPlots"] graph.add((numberofplots, RDF.type, utils.QB["MeasureProperty"])) graph.add( (numberofplots, utils.RDFS["label"], Literal("Total number of plots"))) graph.add((numberofplots, utils.RDFS["subPropertyOf"], utils.SDMXMEASURE["obsValue"])) graph.add((numberofplots, utils.RDFS["range"], XSD.integer)) # add the dataset dataset = al_data["dataset-le1"] graph.add((dataset, RDF.type, utils.QB["DataSet"])) graph.add((dataset, utils.RDFS["label"], Literal("Number of plots in allotment"))) graph.add((dataset, utils.RDFS["comment"], Literal("xxxxx"))) graph.add((dataset, utils.QB["structure"], al_data['data'])) # now add the observations themselves for row in reader: if row["Plots"]: try: Literal( int(row["Plots"]) ) # figure out if there is any plot data before we start adding data allotment = al_data[utils.idify(row["Name"])] #graph.add((allotment, utils.QB['DimensionProperty'], al_ont["numberOfPlots"])) allotment_plots = number_of_plots[YEAR_STRING + "/" + utils.idify(row["Name"])] graph.add((allotment_plots, RDF.type, utils.QB['Observation'])) graph.add( (allotment_plots, utils.QB['dataSet'], URIRef(al_data))) graph.add((allotment_plots, al_ont["numberOfPlots"], Literal(int(row["Plots"]), datatype=XSD.integer))) graph.add((allotment_plots, al_ont["refPeriod"], utils.QUARTER[YEAR_STRING])) graph.add((allotment_plots, utils.RDFS['label'], Literal("{}, {}, number of plots".format( row["Name"], YEAR_STRING)))) graph.add( (allotment_plots, al_stat['refAllotment'], allotment)) except ValueError: pass