def test_aa_check(self): # Make sure that some other test hasn't set it try: del model.AttributeAssignment.set_assigned_property except: pass t = model.Type() aa = model.AttributeAssignment() # First check that aa accepts a type aa.assigned_property = t # And will not accept a string self.assertRaises(model.DataError, aa.__setattr__, "assigned_property", "classified_as") # Check we can set anything to assigned / assigned_to aa.assigned_property = None aa.assigned = aa aa.assigned_to = aa self.assertEqual(aa.assigned, aa) self.assertEqual(aa.assigned_to, aa) vocab.add_attribute_assignment_check() # This should fail right now as can't classify as an AA self.assertRaises(model.DataError, aa.__setattr__, "assigned_property", "classified_as") aa.assigned = None aa.assigned_to = None aa.assigned = t aa.assigned_to = t aa.assigned_property = "classified_as" self.assertEqual(aa.assigned_property, 'classified_as')
def rewrite_output_files(r, update_filename=False, parallel=False, concurrency=4, path=None, files=None, **kwargs): print(f'Rewriting JSON output files') vocab.add_linked_art_boundary_check() vocab.add_attribute_assignment_check() if not files: if path is None: path = output_file_path p = Path(path) files = p.rglob('*.json') files = list(files) if 'content_filter_re' in kwargs: print(f'rewriting with content filter: {kwargs["content_filter_re"]}') if parallel: pool = multiprocessing.Pool(concurrency) partition_size = max(min(25000, int(len(files) / concurrency)), 10) file_partitions = list(chunks(files, partition_size)) args = list((file_partition, r, update_filename, i + 1, len(file_partitions), kwargs) for i, file_partition in enumerate(file_partitions)) print(f'{len(args)} worker partitions with size {partition_size}') _ = pool.starmap(_rewrite_output_files, args) else: _rewrite_output_files(files, r, update_filename, 1, 1, kwargs)
def run(self, **options): vocab.add_linked_art_boundary_check() vocab.add_attribute_assignment_check() services = self.get_services(**options) super().run(services=services, **options) post_map = services['post_sale_map'] self.generate_prev_post_sales_data(post_map)
def test_attrib_assign(self): vocab.add_attribute_assignment_check() data = """ { "id": "https://linked.art/example/activity/12", "type": "AttributeAssignment", "assigned": { "id": "https://linked.art/example/name/10", "type": "Name", "content": "Exhibition Specific Name" }, "assigned_property": "identified_by", "assigned_to": { "id": "https://linked.art/example/object/12", "type": "HumanMadeObject", "_label": "Real Painting Name" } } """ d = self.reader.read(data) self.assertTrue(isinstance(d, AttributeAssignment))
import unittest import os import os.path import hashlib import json import uuid import pprint import inspect from itertools import groupby from pathlib import Path import warnings from tests import TestSalesPipelineOutput from cromulent import vocab vocab.add_attribute_assignment_check() class PIRModelingTest_PrivateContractSales(TestSalesPipelineOutput): def test_modeling_private_contract_sales(self): ''' Test for modeling of Private Contract Sales. ''' output = self.run_pipeline('lottery') self.verify_catalogs(output) self.verify_sales(output) def verify_catalogs(self, output): ''' For this non-auction sale event, there should be a 'Private Contract Sale' event, and all physical copies of the sales catalog should be both classified as an
def run(self, **options): vocab.add_linked_art_boundary_check() vocab.add_attribute_assignment_check() services = self.get_services(**options) super().run(services=services, **options)
host = site['var']['hostname'] if port: port = ":%s" % port basedir = site['base_url'] egdir = site['var']['exampleDir'] baseUrl = "%s://%s%s%s%s/" % (scheme, host, port, basedir, egdir) contextUrl = "%s://%s%s%sns/v1/linked-art.json" % (scheme, host, port, basedir) factory.base_url = baseUrl factory.base_dir = "content/%s" % egdir factory.context_uri = contextUrl # Ensure it's still int per segment factory.auto_id_type = "int-per-segment" add_art_setter() add_attribute_assignment_check() factory.id_type_label = True # Try to load in the context only once ctxt = factory.context_json['@context'] # Profile definition fn = os.path.join(os.path.dirname(cromulent.__file__), 'data') fn += "/crm-profile.json" fh = open(fn) d = fh.read() fh.close() linked_art_profile = json.loads(d) docCache = {} docCache[factory.context_uri] = {