def model_publishing(data): series_label = data['label'] a_uri = data['uri'] + f'-pub' a = vocab.Publishing(ident=a_uri, label=f'Publishing of {series_label}') start_year = data.get('_publishing_start_year') cease_year = data.get('_publishing_cease_year') if start_year or cease_year: ts = model.TimeSpan(ident='') if start_year: with suppress(ValueError): year = int(start_year) ts.begin_of_the_begin = '%04d-01-01:00:00:00Z' % (year, ) if cease_year: with suppress(ValueError): year = int(cease_year) ts.end_of_the_end = '%04d-01-01:00:00:00Z' % (year + 1, ) a.timespan = ts publishings = data.get('_publishings', []) if publishings: series_label = data['label'] # if len(publishings) > 1: # print(f'{len(publishings)} publishings of {series_label}') for sub in publishings: a.part = sub data['used_for'].append(a)
def model_publisher_group(self, record, data, seq): record.setdefault('_publishings', []) series_label = record['label'] corp_id = data.get('gaia_corp_id') geog_id = data.get('gaia_geog_id') a_uri = record['uri'] + f'-pub-{seq}' cb_label = f' by CB{corp_id}' if corp_id else f' by publisher #{seq}' a = vocab.Publishing(ident=a_uri, label=f'Publishing of {series_label}' + cb_label) if corp_id: uri = self.helper.corporate_body_uri(corp_id) a.carried_out_by = model.Group(ident=uri) if geog_id: uri = self.helper.place_uri(geog_id) a.took_place_at = model.Place(ident=uri) record['_publishings'].append(a)
def model_imprint_group(self, record, data): if not data: return record.setdefault('referred_to_by', []) record.setdefault('used_for', []) record.setdefault('part_of', []) record.setdefault('_activities', []) record.setdefault('_groups', []) record.setdefault('_places', []) record.setdefault('identifiers', []) edition = data.get('edition') series_number = data.get('series_number') doi = data.get('doi') coden = data.get('coden') website = data.get('website_address') publishers = _as_list(data.get('publisher')) distributors = _as_list(data.get('distributor')) journal = data.get('journal_info') # imprint_group/journal_info/aata_journal_id # imprint_group/journal_info/aata_issue_id degree = data.get('thesis_degree') tr = data.get('technical_report_number') if edition: record['referred_to_by'].append( vocab.EditionStatement(ident='', content=edition)) if series_number: record['referred_to_by'].append( vocab.Note(ident='', content=series_number)) # TODO: classify this Note if doi: record['identifiers'].append( vocab.DoiIdentifier(ident='', content=doi)) if coden: record['identifiers'].append( vocab.CodenIdentifier(ident='', content=coden)) if website: record['referred_to_by'].append( vocab.Note(ident='', content=website)) article_label = record['label'] for i, publisher in enumerate(publishers): corp_id = publisher.get('gaia_corp_id') geog_id = publisher.get('publisher_location', {}).get('gaia_geog_id') a_uri = record['uri'] + f'-pub-{i}' a = vocab.Publishing(ident=a_uri, label=f'Publishing of {article_label}') if corp_id: uri = self.helper.corporate_body_uri(corp_id) g = model.Group(ident=uri) a.carried_out_by = g record['_groups'].append(add_crom_data({}, g)) if geog_id: uri = self.helper.place_uri(geog_id) p = model.Place(ident=uri) a.took_place_at = p record['_places'].append(add_crom_data({}, p)) record['used_for'].append(a) # record['_activities'].append(add_crom_data({}, a)) for i, distributor in enumerate(distributors): corp_id = distributor.get('gaia_corp_id') geog_id = distributor.get('distributor_location', {}).get('gaia_geog_id') a_uri = record['uri'] + f'-dist-{i}' a = vocab.Distributing(ident=a_uri, label=f'Distribution of {article_label}') if corp_id: uri = self.helper.corporate_body_uri(corp_id) g = model.Group(ident=uri) a.carried_out_by = g record['_groups'].append(add_crom_data({}, g)) if geog_id: uri = self.helper.place_uri(geog_id) p = model.Place(ident=uri) a.took_place_at = p record['_places'].append(add_crom_data({}, p)) record['used_for'].append(a) # record['_activities'].append(add_crom_data({}, a)) if journal: journal_id = journal.get('aata_journal_id') issue_id = journal.get('aata_issue_id') issue_uri = self.helper.issue_uri(journal_id, issue_id) issue = vocab.IssueText(ident=issue_uri) record['part_of'].append(add_crom_data({'uri': issue_uri}, issue)) if degree: record['referred_to_by'].append( vocab.Note(ident='', content=degree)) if tr: record['identifiers'].append(model.Identifier( ident='', content=tr)) # TODO: classify this Identifier
def model_issue_group(self, record, data, seq): record.setdefault('^part', []) issue_id = data['issue_id'] title = data.get('title') title_translated = data.get('title_translated') date = data.get('date') # issue_group/date/display_date # issue_group/date/sort_year volume = data.get('volume') number = data.get('number') note = data.get('note') journal_label = record['label'] issue_label = f'Issue of {journal_label}' if title: issue_label = f'{journal_label}: “{title}”' if volume and number: issue_label = f'{issue_label} (v. {volume}, n. {number})' elif volume and number: issue_label = f'{journal_label} (v. {volume}, n. {number})' jid = record['record_desc_group']['record_id'] issue = { 'uri': self.helper.issue_uri(jid, issue_id), 'label': issue_label, 'object_type': vocab.IssueText, 'identifiers': [self.helper.gci_number_id(issue_id)], 'referred_to_by': [], 'used_for': [], } if title: issue['identifiers'].append( vocab.PrimaryName(ident='', content=title)) if title_translated: issue['identifiers'].append( vocab.TranslatedTitle(ident='', content=title_translated)) if date: display_date = date.get('display_date') sort_year = date.get('sort_year') if display_date or sort_year: a_uri = issue['uri'] + f'-pub' a = vocab.Publishing(ident=a_uri, label=f'Publishing of {issue_label}') ts = model.TimeSpan(ident='') if display_date: ts._label = display_date ts.identified_by = vocab.DisplayName(ident='', content=display_date) if sort_year: try: year = int(sort_year) ts.begin_of_the_begin = '%04d-01-01:00:00:00Z' % ( year, ) ts.end_of_the_end = '%04d-01-01:00:00:00Z' % (year + 1, ) except: pass a.timespan = ts issue['used_for'].append(a) # TODO: # volume # number if note: issue['referred_to_by'].append(vocab.Note(ident='', content=note)) mlalo = MakeLinkedArtLinguisticObject() mlalo(issue) i = get_crom_object(issue) for a in issue.get('used_for', []): i.used_for = a record['^part'].append(issue)