def set_sig_claim(item, qid, years, permalink): print(f"Got {len(years)} year(s) claim for restoration") for year in years: print(f"Adding year: {year}") if year["type"] == "range": qualifiers = [ item.get_claim(Props.START_TIME, WbTime(year=int(year["value"][0]))), item.get_claim(Props.END_TIME, WbTime(year=int(year["value"][1]))) ] elif year["type"] in ["century", "decade", "year"]: qualifiers = [ item.get_claim(Props.POINT_IN_TIME, convert_year(year)) ] if all(["start" in year, "end" in year]): qualifiers.extend([ item.get_claim(Props.START_TIME, WbTime(year=int(year["start"]))), item.get_claim(Props.END_TIME, WbTime(year=int(year["end"]))) ]) else: print("Invalid year type: " + year["type"]) continue item.add_item_claim(Props.SIG_EVENT, qid, qualifiers=qualifiers, references=get_ref(item, permalink))
def convert_year(year): ytype = year["type"] val = int(year["value"]) if year["type"] == "decade": return WbTime(year=val, precision="decade") elif year["type"] == "century": return WbTime(year=val, precision="century") else: return WbTime(year=val)
def partial_date_to_wbtime(date): print(f"Parsing partial date to wbtime {date}") if len(date) == 4: return WbTime(year = int(date)) elif len(date) == 7: year, month = date.split("-") return WbTime(year = int(year), month = int(month)) elif len(date) == 10: year, month, day = date.split("-") return WbTime(year = int(year), month = int(month), day = int(day)) else: raise TypeError(f"Invalid date to parse: {date}")
def get_ref(job): return [ job.item.get_url_claim(Props.REF_URL, job.data["url"]), job.item.get_claim(Props.RETRIEVED, WbTime(year=2021, month=11, day=15)), job.item.get_item_claim(Props.LANGUAGE_WORK, Items.DUTCH) ]
def get_refs(item, pid): url = f"https://pointer.kro-ncrv.nl/het-slavernijverleden-van-suriname-op-de-kaart-gezet#/plantage/{pid}" return [ item.get_url_claim(Props.REF_URL, url), item.get_claim(Props.RETRIEVED, WbTime(year=2021, month=2, day=20)), item.get_item_claim(Props.LANGUAGE_WORK, Items.DUTCH) ]
def set_year_claim(item, prop, years, permalink): print(f"Got {len(years)} year(s) claim for {prop}") for year in years: print(f"Adding year: {year}") if year["type"] == "range": item.add_item_claim( prop, "novalue", qualifiers=[ item.get_claim(Props.START_TIME, WbTime(year=int(year["value"][0]))), item.get_claim(Props.END_TIME, WbTime(year=int(year["value"][1]))) ], references=get_ref(item, permalink)) elif year["type"] in ["century", "decade"]: if all(["start" in year, "end" in year]): qualifiers = [ item.get_claim(Props.START_TIME, WbTime(year=int(year["start"]))), item.get_claim(Props.END_TIME, WbTime(year=int(year["end"]))) ] else: qualifiers = None item.add_time_claim(prop, convert_year(year), qualifiers=qualifiers, references=get_ref(item, permalink)) elif year["type"] == "circa": item.add_time_claim( prop, convert_year(year), qualifiers=[item.get_item_claim(Props.PRECISION, Items.CIRCA)], references=get_ref(item, permalink)) elif year["type"] == "year": item.add_time_claim(prop, convert_year(year), references=get_ref(item, permalink)) else: print("Invalid year type: " + year["type"]) continue
def get_refs(item, url): return [ item.get_item_claim(Props.STATED_IN, Items.PUBLIC_ART_IN_NIJMEGEN), item.get_url_claim(Props.REF_URL, url), item.get_claim(Props.RETRIEVED, WbTime( year = 2021, month = 1, day = 31 )), item.get_item_claim(Props.LANGUAGE_WORK, Items.DUTCH) ]
def set_target(self, snak, value): if value in ('somevalue', 'novalue'): snak.setSnakType(value) return True if snak.type == 'wikibase-item': snak.setTarget(pywikibot.ItemPage(self.repo, value)) return True elif snak.type == 'wikibase-property': snak.setTarget(pywikibot.PropertyPage(self.repo, value)) return True elif snak.type == 'quantity': match = self.quantityR.fullmatch(value) if match: amount, error, unit = match.groups() else: match = self.quantity_oldR.fullmatch(value) if match: amount, lower, upper, unit = match.groups() error = upper, lower # it *is* the other way around if match: if unit: unit = pywikibot.ItemPage(self.repo, 'Q' + unit) quantity = WbQuantity(amount, unit, error, site=self.repo) snak.setTarget(quantity) return True elif snak.type == 'time': iso, _, prec = value.rpartition('/') if iso: time = WbTime.fromTimestr(iso, precision=int(prec), site=self.repo) snak.setTarget(time) return True elif snak.type in ('string', 'external-id', 'url', 'math'): if value.startswith('"') and value.endswith('"'): snak.setTarget(value[1:-1]) return True elif snak.type == 'commonsMedia': if value.startswith('"') and value.endswith('"'): repo = self.repo.image_repository() snak.setTarget(pywikibot.FilePage(repo, value[1:-1])) return True #elif snak.type in ('geo-shape', 'tabular-data'): elif snak.type == 'monolingualtext': lang, _, text = value.partition(':') if text and text.startswith('"') and text.endswith('"'): monotext = WbMonolingualText(text[1:-1], lang) snak.setTarget(monotext) return True elif snak.type == 'globe-coordinate': match = self.globeR.fullmatch(value) if match: coord = Coordinate(*map(float, match.groups()), site=self.repo) snak.setTarget(coord) return True return False
def set_target(self, snak, value): if value in ('somevalue', 'novalue'): snak.setSnakType(value) return True if snak.type == 'wikibase-item': snak.setTarget(pywikibot.ItemPage(self.repo, value)) return True elif snak.type == 'wikibase-property': snak.setTarget(pywikibot.PropertyPage(self.repo, value)) return True elif snak.type == 'quantity': match = self.quantityR.fullmatch(value) if match: amount, error, unit = match.groups() else: match = self.quantity_oldR.fullmatch(value) if match: amount, lower, upper, unit = match.groups() error = upper, lower # it *is* the other way around if match: if unit: unit = pywikibot.ItemPage(self.repo, 'Q' + unit) quantity = WbQuantity(amount, unit, error, site=self.repo) snak.setTarget(quantity) return True elif snak.type == 'time': iso, _, prec = value.rpartition('/') if iso: time = WbTime.fromTimestr( iso, precision=int(prec), site=self.repo) snak.setTarget(time) return True elif snak.type in ('string', 'external-id', 'url', 'math'): if value.startswith('"') and value.endswith('"'): snak.setTarget(value[1:-1]) return True elif snak.type == 'commonsMedia': if value.startswith('"') and value.endswith('"'): repo = self.repo.image_repository() snak.setTarget(pywikibot.FilePage(repo, value[1:-1])) return True #elif snak.type in ('geo-shape', 'tabular-data'): elif snak.type == 'monolingualtext': lang, _, text = value.partition(':') if text and text.startswith('"') and text.endswith('"'): monotext = WbMonolingualText(text[1:-1], lang) snak.setTarget(monotext) return True elif snak.type == 'globe-coordinate': match = self.globeR.fullmatch(value) if match: coord = Coordinate(*map(float, match.groups()), site=self.repo) snak.setTarget(coord) return True return False
def string_to_wddate(isotimestamp: str) -> WbTime: """ Create a wikidata compatible wikibase date from an ISO 8601 timestamp """ date = WbTime.fromTimestr(isotimestamp, calendarmodel=Settings.calendarmodel) date.hour = 0 date.minute = 0 date.second = 0 date.precision = WbTime.PRECISION["day"] return date
def try_get_year_from_property_timestamp(entity_dict: Dict, property_id: str, oab_type: str) -> int: """Function for extracting the year from an inception timestamp Args: entity_dict: JSON response from wikidata property_id: Wikidata property id e. g. 'P18' stands for the property image oab_type: openArtBrowser type e. g. 'artwork', 'motif' important for logging Returns: Year from timestamp """ timestr = entity_dict[CLAIMS][property_id][0][MAINSNAK][DATAVALUE][VALUE][ TIME] return WbTime.fromTimestr(timestr).year
def add_party_data(row): print("----" * 20) print() print(row) title = row["title"] qid = row["qid"] if skiplist.has(qid): print(f"In skiplist, skipping") return item = WikidataItem(qid) if Props.NR_OF_SEATS in item.get_claims(): print("Got seats already, skipping party") return for key, val in row.items(): if not key.isdigit(): continue year = int(key) if val == "": continue seats = int(val) print(f"{title} ({qid}) had {seats} seats in {year}") item.add_quantity_claim( Props.NR_OF_SEATS, seats, qualifiers=[ item.get_item_claim(Props.LEGISLATIVE_BODY, Items.NL_LOWER_HOUSE), item.get_claim(Props.START_TIME, WbTime(year=year)) ], references=[ item.get_item_claim(Props.IMPORTED_FROM, Items.WIKIPEDIA_NL), item.get_url_claim(Props.WM_IMPORT_URL, WP_PERMALINK) ]) skiplist.add(qid)
def add_architect(job): data = job.data if not data["arch1_qid"]: print(f"No architect, skipping") return claims = job.item.get_claims() if Props.ARCHITECT in claims: print("Already has an architect, skipping") return # Add the actual data for index in ["1", "2", "3"]: prefix = f"arch{index}" if data[f"{prefix}_qid"] == "": continue qid = data[f"{prefix}_qid"] qualifiers = [] if data[f"{prefix}_role_qid"]: qualifiers.append( job.item.get_item_claim(Props.OBJECT_HAS_ROLE, data[f"{prefix}_role_qid"])) if data[f"{prefix}_role_date"]: date = data[f"{prefix}_role_date"] yearstr = YEAR.match(date) if yearstr: year = int(yearstr[0]) qualifiers.append( job.item.get_claim(Props.POINT_IN_TIME, WbTime(year=year))) else: print(f"Unparsable year: {date}") job.item.add_item_claim(Props.ARCHITECT, qid, qualifiers=qualifiers, references=get_refs(job.item, data["pageid"]))
def wbtime_now(): now = datetime.now() return WbTime( year = now.year, month = now.month, day = now.day )
def add_item(data): print("----" * 20) print() print(data) title = data["title"] url = data["url"] kid = data["id"] # if kid != "50": # return print(f"Handling {title}") if skiplist.has(kid): print(f"In skiplist, skipping") return creator = data["creator"] if creator == "": desc_nl = "kunstwerk in de openbare ruimte te Nijmegen" desc_en = "public artwork in Nijmegen, the Netherlands" elif creator == "Onbekend": desc_nl = "kunstwerk in de openbare ruimte van een onbekende maker te Nijmegen" desc_en = "public artwork by an unknown artist in Nijmegen, the Netherlands" else: desc_nl = f"kunstwerk van {creator} in de openbare ruimte te Nijmegen" desc_en = f"public artwork by {creator} in Nijmegen, the Netherlands" item = WikidataItem( summary = f"public artwork '{title}' in Nijmegen, the Netherlands", labels = { "nl" : title } ) item.edit_descriptions({ "de" : f"Kunst im öffentlichen Raum in Nijmegen (Niederlande)", "en" : desc_en, "es" : f"arte público en Nijmegen (Holanda)", "fr" : f"art public à Nimègue (Pays-Bas)", "nl" : desc_nl }) item.edit_aliases({ "en" : title }) # Basics item.add_item_claim(Props.INSTANCE_OF, Items.SCULPTURE) item.add_item_claim(Props.COUNTRY, Items.NETHERLANDS) item.add_item_claim(Props.LOCATED_IN_ADMIN, Items.NIJMEGEN_MUNIP) item.add_item_claim(Props.GENRE, Items.PUBLIC_ART) # Actual data coord = [data["lat"], data["lon"]] item.add_coordinate( Props.COORDINATES, coord, references = get_refs(item, url) ) item.add_string_claim( Props.INVENTORY_NR, kid, qualifiers = [ item.get_item_claim(Props.COLLECTION, Items.PUBLIC_ART_IN_NIJMEGEN) ], references = get_refs(item, url) ) item.add_string_claim( Props.DESCRIBED_AT_URL, url, qualifiers = [ item.get_item_claim(Props.LANGUAGE_WORK, Items.DUTCH) ] ) if data["year"] != "": year = int(data["year"]) item.add_time_claim( Props.INCEPTION, WbTime(year = year), references = get_refs(item, url) ) if data["creator_qid"] != "": item.add_item_claim( Props.CREATOR, data["creator_qid"], references = get_refs(item, url) ) elif data["creator"] == "Onbekend": item.add_item_claim( Props.CREATOR, "somevalue", references = get_refs(item, url) ) if data["owner"] == "gemeente": item.add_item_claim( Props.COLLECTION, Items.NIJMEGEN_MUNIP, references = get_refs(item, url) ) elif data["owner"] == "particulier": item.add_item_claim( Props.COLLECTION, Items.PRIVATE_COLLECTION, references = get_refs(item, url) ) if data["location_clean"] != "": item.add_monoling_claim( Props.STREET_ADDRESS, data["location_clean"], "nl", references = get_refs(item, url) ) skiplist.add(kid)
def try_get_significant_events( result: Dict, oab_type: Optional[str] = SIGNIFICANT_EVENT) -> List[Dict]: """Maps the wikidata response for significant events to a list of dicts which is appended to an object Args: result: wikidata response oab_type: OpenArtBrowser type. Defaults to SIGNIFICANT_EVENT. Returns: List of JSON objects which represent significant events """ significant_events = [] qid = result[ID] for event in result[CLAIMS][ PROPERTY_NAME_TO_PROPERTY_ID[SIGNIFICANT_EVENT]]: event_dict = {LABEL[SINGULAR]: event[MAINSNAK][DATAVALUE][VALUE][ID]} for qualifiers in event[QUALIFIERS].values(): datatype = qualifiers[0][DATATYPE] property_id = qualifiers[0][PROPERTY] # Get property name from dict, if the name is not in the dict ignore it and take the id property = PROPERTY_ID_TO_PROPERTY_NAME.get( property_id, property_id) if datatype == TIME: event_dict.update({ property: WbTime.fromTimestr( qualifiers[0][DATAVALUE][VALUE][TIME]).year }) elif datatype == WIKIBASE_ITEM: event_dict.update({ property: list( map( lambda qualifier: qualifier[DATAVALUE][VALUE][ID], qualifiers, )) }) elif datatype == QUANTITY: event_dict.update( {property: float(qualifiers[0][DATAVALUE][VALUE][AMOUNT])}) event_dict.update({ f"{property}_{UNIT}": qualifiers[0][DATAVALUE][VALUE].get(UNIT, "").replace( WIKIDATA_ENTITY_URL, "") }) elif datatype in [STRING, URL]: event_dict.update({property: qualifiers[0][DATAVALUE][VALUE]}) elif datatype == MONOLINGUALTEXT: event_dict.update( {property: qualifiers[0][DATAVALUE][VALUE][TEXT]}) elif datatype == COMMONS_MEDIA: logger.error( f"commonsMedia type not supported in significant events on item {qid}" ) else: logger.error(f"Unknown datatype: {datatype} on item {qid}") event_dict.update({TYPE: SIGNIFICANT_EVENT}) significant_events.append(event_dict) return significant_events
def create_new(): items = Knead(PATH + "/data/reliwiki/new-churches.csv", has_header=True).data() CITY = "Amsterdam" for church in items: print() print(f"Creating new church", church) pageid = church["pageid"] # Last, final check if this church doesn't exist if claim_exists(Props.RELIWIKI, f'"{pageid}"'): print(f"This Reliwiki ID exists, skipping") continue name = church["name"] item = WikidataItem( summary=f"Creating new item for Dutch church with name {name}", labels={ "en": name, "nl": name }, descriptions={ "de": f"Kirche in {CITY} (Niederlande)", "en": f"church in {CITY}, the Netherlands", "es": f"iglesia en {CITY} (Holanda)", "fr": f"Église d'{CITY} (Pays-Bas)", "nl": f"kerk in {CITY}" }) item.add_item_claim(Props.INSTANCE_OF, Items.CHURCH_BUILDING) item.add_string_claim(Props.RELIWIKI, pageid) item.add_item_claim(Props.COUNTRY, Items.NETHERLANDS) item.add_item_claim(Props.LOCATED_IN, church["admin_qid"]) if church["sonneveld"] != "": item.add_string_claim(Props.SONNEVELD, church["sonneveld"], references=get_refs(item, pageid)) if church["coordinates"] != "": coord = church["coordinates"].split(",") item.add_coordinate(Props.COORDINATES, coord, references=get_refs(item, pageid)) if church["zipcode"] != "": item.add_string_claim(Props.ZIP, church["zipcode"], references=get_refs(item, pageid)) if church["address"] != "": item.add_monoling_claim(Props.STREET_ADDRESS, church["address"], "nl", references=get_refs(item, pageid)) if church["denomination_qid"] != "": item.add_item_claim(Props.RELIGION, church["denomination_qid"], references=get_refs(item, pageid)) if church["year_use"] != "": if "s" in church["year_use"]: decade = int(church["year_use"].replace("s", "")) time = WbTime(year=decade, precision="decade") else: time = WbTime(year=int(church["year_use"])) item.add_time_claim(Props.INCEPTION, time, references=get_refs(item, pageid)) print() break
def run_bot(): DATA_PATH = PATH / "data" / "vbvd" bot = CreateBot("vbvd", run_once=False, datapath=str(DATA_PATH / "items2.json"), key="title") for job in bot.iterate(): title = job.data["title"] creator = job.data["artist_label"] year = job.data["year"] summary = f"artwork '{title}' in Museum van Bommel van Dom in Venlo, the Netherlands", labels = {"nl": title} descriptions = { "de": f"Kunstwerk von {creator}", "en": f"work of art by {creator}", "es": f"obra de arte de {creator}", "fr": f"œuvre d’art de {creator}", "nl": f"kunstwerk van {creator}" } if year: descriptions["en"] = f"{year} " + descriptions["en"] descriptions["nl"] = descriptions["nl"] + f" uit {year}" aliases = {"en": [title]} job.create_item(summary, labels, descriptions, aliases) # Basics job.item.add_item_claim(Props.INSTANCE_OF, Items.WORK_OF_ART) job.item.add_item_claim(Props.LOCATION, Items.MUS_BOMMELVDAM) job.item.add_string_claim(Props.INVENTORY_NR, job.data["inventory_nr"], qualifiers=[ job.item.get_item_claim( Props.COLLECTION, Items.MUS_BOMMELVDAM) ], references=get_ref(job)) job.item.add_url_claim(Props.DESCRIBED_AT_URL, job.data["url"], qualifiers=[ job.item.get_item_claim( Props.LANGUAGE_WORK, Items.DUTCH) ]) if job.data["artist_qid"]: job.item.add_item_claim(Props.CREATOR, job.data["artist_qid"], references=get_ref(job)) if job.data["year"]: job.item.add_time_claim(Props.INCEPTION, WbTime(year=job.data["year"]), references=get_ref(job)) if job.data["collection_qid"]: job.item.add_item_claim(Props.COLLECTION, job.data["collection_qid"], references=get_ref(job))
def _set_target(self, snak, value): if value in ('somevalue', 'novalue'): snak.setSnakType(value) return True def invalid_report(): pywikibot.warning('Invalid value "{}" for {} datatype'.format( value, snak.type)) if snak.type in self.entity_types: target = self.parse_entity(value) if target is None: pywikibot.warning('"LAST" magic word used without "CREATE"') else: snak.setTarget(target) return True elif snak.type == 'quantity': match = self.quantity_errR.fullmatch(value) if match: amount, error, unit = match.groups() else: match = self.quantity_boundsR.fullmatch(value) if match: groups = list(match.groups()) unit = groups.pop() amount, lower, upper = map(Decimal, groups) if lower > upper: error = amount - lower, upper - amount else: error = upper - amount, amount - lower if match: if unit: unit = pywikibot.ItemPage(self.repo, 'Q' + unit) quantity = WbQuantity(amount, unit, error, site=self.repo) snak.setTarget(quantity) return True else: invalid_report() elif snak.type == 'time': iso, _, prec = value.rpartition('/') if iso: time = WbTime.fromTimestr(iso, precision=int(prec), site=self.repo) snak.setTarget(time) return True else: invalid_report() elif snak.type in ('string', 'external-id', 'url', 'math'): literal = self.valid_text_literal(value) if literal: snak.setTarget(literal) return True else: invalid_report() elif snak.type == 'commonsMedia': literal = self.valid_text_literal(value) if literal: image_repo = self.repo.image_repository() snak.setTarget(pywikibot.FilePage(image_repo, literal)) return True else: invalid_report() # todo: elif snak.type in ('geo-shape', 'tabular-data'): elif snak.type == 'monolingualtext': lang, _, text = value.partition(':') literal = self.valid_text_literal(text) if literal: monotext = WbMonolingualText(literal, lang) snak.setTarget(monotext) return True else: invalid_report() elif snak.type == 'globe-coordinate': match = self.globeR.fullmatch(value) if match: coord = Coordinate( *map(float, match.groups()), precision=1e-4, # hardcoded as in claimit.py site=self.repo) snak.setTarget(coord) return True else: invalid_report() else: pywikibot.warning('"{}" datatype is not supported yet'.format( snak.type)) return False
def run_bot(): sys.exit("No, fix your code first") DATA_PATH = PATH / "data" / "vbvd" bot = CreateBot("vbvd2", datapath=str(DATA_PATH / "export-met-urls.csv"), key="inventory", required_fields=["inventory", "title", "url"], empty_check=is_empty) for job in bot.iterate(): title = job.data["title"] inventory_nr = job.data["inventory"] # If the job has a qid, check if we want to append, # otherwise skip because this already exists if not is_empty(job.data["item_qid"]) and is_empty(job.data["append"]): job.abort( f"Skipping {inventory_nr}/{title}, item already is on Wikidata" ) continue summary = f"Adding new artwork '{title}' in Museum van Bommel van Dom in Venlo, the Netherlands" labels = {"nl": title} descriptions = {} # Check for empty or non-existing creator if is_empty(job.data["creator_name"]): creator_name = "een onbekende kunstenaar" creator_name_en = "an unknown artist" else: creator_name = job.data["creator_name"] creator_name_en = creator_name if is_empty(job.data["type"]): descriptions["nl"] = f"kunstwerk van {creator_name}" else: type_nl = job.data["type"] descriptions["nl"] = f"{type_nl} van {creator_name}" if is_empty(job.data["type_en"]): descriptions["en"] = f"work of art by {creator_name_en}" else: type_en = job.data["type_en"] descriptions["en"] = f"{type_en} by {creator_name_en}" if not is_empty(job.data["date"]): year = job.data["date"] descriptions["en"] = f"{year} " + descriptions["en"] descriptions["nl"] = descriptions["nl"] + f" uit {year}" # Check for duplicate item, and if it's there, create a description with # the inventory number if has_duplicate_item(bot): print( "Duplicate title/description, return a description with inventory suffix" ) descriptions[ "nl"] = f"{descriptions['nl']} (objectnummer {inventory_nr})" dd(descriptions) aliases = {"en": [title]} # If this is an append item, get the item instead of creating if not is_empty(job.data["item_qid"]) and not is_empty( job.data["append"]): print("Item exists, add to it instead") job.set_item_by_qid(job.data["item_qid"]) else: # Try, if we get an exception change the description because # description is double try: job.create_item(summary, labels, descriptions, aliases) except Exception as e: print("Exception while creating", e) descriptions[ "nl"] = f"{descriptions['nl']} (objectnummer {inventory_nr})" print("Trying another description: %s" % descriptions["nl"]) job.item.edit_descriptions( descriptions, f"Giving a complex description because we have a duplicate label/description" ) # Save URL to Wayback Machine job.archive_url(job.data["url"]) if is_empty(job.data["type_qid"]): job.item.add_item_claim(Props.INSTANCE_OF, Items.WORK_OF_ART) else: job.item.add_item_claim(Props.INSTANCE_OF, job.data["type_qid"]) job.item.add_item_claim(Props.LOCATION, Items.MUS_BOMMELVDAM, references=get_ref(job)) # If we have no collection, assume it's the default one if is_empty(job.data["collection_qid"]): collection_qid = "Q1994770" else: collection_qid = job.data["collection_qid"] job.item.add_string_claim(Props.INVENTORY_NR, inventory_nr, qualifiers=[ job.item.get_item_claim( Props.COLLECTION, collection_qid) ], references=get_ref(job)) job.item.add_url_claim(Props.DESCRIBED_AT_URL, job.data["url"], qualifiers=[ job.item.get_item_claim( Props.LANGUAGE_WORK, Items.DUTCH) ]) if not is_empty(job.data["creator_qid"]): job.item.add_item_claim(Props.CREATOR, job.data["creator_qid"], references=get_ref(job)) if not is_empty(job.data["date"]) and is_valid_year(job.data["date"]): job.item.add_time_claim(Props.INCEPTION, WbTime(year=int(job.data["date"])), references=get_ref(job)) if is_empty(job.data["aquirement_qid"]): cause_qualifers = [] else: cause_qualifers = [ job.item.get_item_claim(Props.HAS_CAUSE, job.data["aquirement_qid"]) ] job.item.add_item_claim(Props.COLLECTION, collection_qid, qualifiers=cause_qualifers, references=get_ref(job)) if not is_empty(job.data["material_qid"]): job.item.add_item_claim(Props.MATERIAL_USED, job.data["material_qid"], references=get_ref(job)) if not is_empty(job.data["height"]): job.item.add_quantity_claim( Props.HEIGHT, job.data["height"], unit="http://www.wikidata.org/entity/Q174728", references=get_ref(job))