def test_save_context(self): graph = set() ident_uri = 'http://example.com/context_1' ctx = Context(ident=ident_uri) for i in range(5): ctx.add_statement(create_mock_statement(ident_uri, i)) ctx.save_context(graph) self.assertEqual(len(graph), 5)
def test_triples_saved_noundef_triples_counted(self): graph = set() ident_uri = 'http://example.com/context_1' ctx = Context(ident=ident_uri) statement = MagicMock() statement.context.identifier = rdflib.term.URIRef(ident_uri) statement.to_triple.return_value = (Variable('var'), 1, 2) ctx.add_statement(statement) ctx.save_context(graph) self.assertEqual(ctx.triples_saved, 0)
def setUp(self): # Make the statements and evidence we will query for in the test super(EvidenceForTest, self).setUp() c1 = Context(ident='http://example.org/statements', conf=self.conf) c1(Neuron)('AVAL').innexin('UNC-7') evc = Context(ident='http://example.org/metadata', conf=self.conf) ev1 = evc(Evidence)(key='js2019') ev1.supports(c1.rdf_object) # Save them c1.save_context() evc.save_context()
class _DataTest(unittest.TestCase): def delete_dir(self): self.path = self.TestConfig['rdf.store_conf'] try: if self.TestConfig['rdf.source'] == "Sleepycat": subprocess.call("rm -rf " + self.path, shell=True) elif self.TestConfig['rdf.source'] == "ZODB": delete_zodb_data_store(self.path) except OSError as e: if e.errno == 2: # The file may not exist and that's fine pass else: raise e @classmethod def setUpClass(cls): pass def setUp(self): # Set do_logging to True if you like walls of text self.TestConfig = Data.open(TEST_CONFIG) td = '__tempdir__' z = self.TestConfig['rdf.store_conf'] if z.startswith(td): x = z[len(td):] h = tempfile.mkdtemp() self.TestConfig['rdf.store_conf'] = h + x self.delete_dir() PyOpenWorm.connect(conf=self.TestConfig, do_logging=False) self.context = Context(ident='http://example.org/test-context', conf=self.TestConfig) typ = type(self) if hasattr(typ, 'ctx_classes'): if isinstance(dict, typ.ctx_classes): self.ctx = self.context(typ.ctx_classes) else: self.ctx = self.context( {x.__name__: x for x in typ.ctx_classes}) def save(self): self.context.save_context() def tearDown(self): PyOpenWorm.disconnect() self.delete_dir() @property def config(self): return PyOpenWorm.config()
class _DataTest(unittest.TestCase): def delete_dir(self): self.path = self.TestConfig['rdf.store_conf'] try: if self.TestConfig['rdf.source'] == "Sleepycat": subprocess.call("rm -rf " + self.path, shell=True) elif self.TestConfig['rdf.source'] == "ZODB": delete_zodb_data_store(self.path) except OSError as e: if e.errno == 2: # The file may not exist and that's fine pass else: raise e def setUp(self): # Set do_logging to True if you like walls of text self.TestConfig = Data.open(TEST_CONFIG) td = '__tempdir__' z = self.TestConfig['rdf.store_conf'] if z.startswith(td): x = z[len(td):] h = tempfile.mkdtemp() self.TestConfig['rdf.store_conf'] = h + x self.delete_dir() self.connection = PyOpenWorm.connect(conf=self.TestConfig, do_logging=False) self.context = Context(ident='http://example.org/test-context', conf=self.TestConfig) typ = type(self) if hasattr(typ, 'ctx_classes'): if isinstance(dict, typ.ctx_classes): self.ctx = self.context(typ.ctx_classes) else: self.ctx = self.context({x.__name__: x for x in typ.ctx_classes}) def save(self): self.context.save_context() def tearDown(self): PyOpenWorm.disconnect(self.connection) self.delete_dir() @property def config(self): return self.TestConfig conf = config
def test_triples_saved_multi(self): graph = set() ident_uri = 'http://example.com/context_1' ident_uri1 = 'http://example.com/context_11' ident_uri2 = 'http://example.com/context_12' ctx = Context(ident=ident_uri) ctx1 = Context(ident=ident_uri1) ctx2 = Context(ident=ident_uri2) ctx2.add_import(ctx) ctx1.add_import(ctx2) ctx1.add_import(ctx) ctx.add_statement(create_mock_statement(ident_uri, 1)) ctx1.add_statement(create_mock_statement(ident_uri1, 3)) ctx2.add_statement(create_mock_statement(ident_uri2, 2)) ctx1.save_context(graph, inline_imports=True) self.assertEqual(ctx1.triples_saved, 3)
def test_triples_saved(self): graph = set() ident_uri = 'http://example.com/context_1' ident_uri2 = 'http://example.com/context_2' ident_uri2_1 = 'http://example.com/context_2_1' ident_uri3 = 'http://example.com/context_3' ident_uri4 = 'http://example.com/context_4' ctx = Context(ident=ident_uri) ctx2 = Context(ident=ident_uri2) ctx2_1 = Context(ident=ident_uri2_1) ctx.add_import(ctx2) ctx.add_import(ctx2_1) ctx3 = Context(ident=ident_uri3) ctx3.add_import(ctx) last_ctx = Context(ident=ident_uri4) last_ctx.add_import(ctx3) ctx.add_statement(create_mock_statement(ident_uri, 1)) ctx2.add_statement(create_mock_statement(ident_uri2, 2)) ctx2_1.add_statement(create_mock_statement(ident_uri2_1, 2.1)) ctx3.add_statement(create_mock_statement(ident_uri3, 3)) last_ctx.add_statement(create_mock_statement(ident_uri4, 4)) last_ctx.save_context(graph, True) self.assertEqual(last_ctx.triples_saved, 5)
ctx = Context(ident='http://example.org/data') evctx = Context(ident='http://example.org/meta') # Create a new Neuron object to work with n = ctx(Neuron)(name='AVAL') # Create a new Evidence object with `doi` and `pmid` fields populated. # See `PyOpenWorm/evidence.py` for other available fields. d = evctx(Document)(key='Anonymous2011', doi='125.41.3/ploscompbiol', pmid='12345678') e = evctx(Evidence)(key='Anonymous2011', reference=d) # Evidence object asserts something about the enclosed dataObject. # Here we add a receptor to the Neuron we made earlier, and "assert it". # As the discussion (see top) reads, this might be asserting the existence of # receptor UNC-8 on neuron AVAL. n.receptor('UNC-8') e.supports(ctx.rdf_object) # Save the Neuron and Evidence objects to the database. ctx.save_context() evctx.save_context() # What does my evidence object contain? for e_i in evctx.stored(Evidence)().load(): print(e_i.reference()) print(e_i.supports()) # Disconnect from the database. P.disconnect()
def do_insert(ident, config="default.conf", logging=False, imports_context_ident=None, basedir=aux_data()): sources = init_sources() extras = init_extra_sources(basedir) data_sources_by_key = {x.key: x for x in sources + extras} trans_map = init_translators() + init_extra_neuron_data_translators(extras) P.connect(configFile=config, do_logging=logging) P.config() CTX = Context(ident=ident + '-data', imported=(P.CONTEXT, ), conf=P.config()) EVCTX = Context(ident=ident + '-evidence', imported=(P.CONTEXT, ), conf=P.config()) IWCTX = Context(ident=ident, imported=(CTX, EVCTX), conf=P.config()) imports_context = Context(ident=imports_context_ident, conf=P.config()) try: t0 = time() translators = dict() remaining = list(trans_map) last_remaining = None saved_contexts = set([]) while remaining != last_remaining: next_remaining = [] for t in remaining: if not isinstance(t[0], (list, tuple)): source_keys = (t[0], ) else: source_keys = t[0] sources = tuple( data_sources_by_key.get(s) for s in source_keys) if None in sources: next_remaining.append(t) continue translator_class = t[1] if len(t) > 2: output_key = t[2] else: output_key = None translator = translators.get(translator_class, None) if not translator: translator = translator_class() translators[translator_class] = translator print('\n'.join( 'Input({}/{}): {}'.format(i + 1, len(sources), s) for i, s in enumerate(sources))) print('Translating with {}'.format(translator)) orig_wd = os.getcwd() os.chdir(basedir) try: res = translator(*sources, output_key=output_key) finally: os.chdir(orig_wd) print('Result: {}'.format(res)) if isinstance(res, DataWithEvidenceDataSource): res.data_context.save_context( inline_imports=True, saved_contexts=saved_contexts) res.data_context.save_imports(imports_context) res.evidence_context.save_context( inline_imports=True, saved_contexts=saved_contexts) res.evidence_context.save_imports(imports_context) for ctx in res.contexts: raise Exception() if res: if res.key: data_sources_by_key[res.key] = res else: data_sources_by_key[res.identifier] = res last_remaining = list(remaining) remaining = next_remaining for x in remaining: warn("Failed to process: {}".format(x)) # attach_neuromlfiles_to_channel() t1 = time() print("Saving data...") graph = P.config('rdf.graph') for src in data_sources_by_key.values(): if isinstance(src, DataWithEvidenceDataSource): print('saving', src) CTX.add_import(src.data_context) EVCTX.add_import(src.evidence_context) for ctx in src.contexts: IWCTX.add_import(ctx) IWCTX.save_context(graph, saved_contexts=saved_contexts) IWCTX.save_imports(imports_context) print('imports context size', len(imports_context)) print("Saved %d triples." % IWCTX.triples_saved) t2 = time() print("Serializing...") serialize_as_nquads() t3 = time() print("generating objects took", t1 - t0, "seconds") print("saving objects took", t2 - t1, "seconds") print("serializing objects took", t3 - t2, "seconds") except Exception: traceback.print_exc() finally: P.disconnect()
evctx = Context(ident='http://example.org/meta', conf=conn.conf) # Create a new Neuron object to work with n = ctx(Neuron)(name='AVAL') # Create a new Evidence object with `doi` and `pmid` fields populated. # See `PyOpenWorm/evidence.py` for other available fields. d = evctx(Document)(key='Anonymous2011', doi='125.41.3/ploscompbiol', pmid='12345678') e = evctx(Evidence)(key='Anonymous2011', reference=d) # Evidence object asserts something about the enclosed dataObject. # Here we add a receptor to the Neuron we made earlier, and "assert it". # As the discussion (see top) reads, this might be asserting the existence of # receptor UNC-8 on neuron AVAL. n.receptor('UNC-8') e.supports(ctx.rdf_object) # Save the Neuron and Evidence objects to the database. ctx.save_context() evctx.save_context() # What does my evidence object contain? for e_i in evctx.stored(Evidence)().load(): print(e_i.reference(), e_i.supports()) # Disconnect from the database. P.disconnect(conn)
def do_insert(ident, config="default.conf", logging=False, imports_context_ident=None, basedir=aux_data()): sources = init_sources() extras = init_extra_sources(basedir) data_sources_by_key = {x.key: x for x in sources + extras} trans_map = init_translators() + init_extra_neuron_data_translators(extras) P.connect(configFile=config, do_logging=logging) P.config() CTX = Context(ident=ident + '-data', imported=(P.CONTEXT,), conf=P.config()) EVCTX = Context(ident=ident + '-evidence', imported=(P.CONTEXT,), conf=P.config()) IWCTX = Context(ident=ident, imported=(CTX, EVCTX), conf=P.config()) imports_context = Context(ident=imports_context_ident, conf=P.config()) try: t0 = time() translators = dict() remaining = list(trans_map) last_remaining = None saved_contexts = set([]) while remaining != last_remaining: next_remaining = [] for t in remaining: if not isinstance(t[0], (list, tuple)): source_keys = (t[0],) else: source_keys = t[0] sources = tuple(data_sources_by_key.get(s) for s in source_keys) if None in sources: next_remaining.append(t) continue translator_class = t[1] if len(t) > 2: output_key = t[2] else: output_key = None translator = translators.get(translator_class, None) if not translator: translator = translator_class() translators[translator_class] = translator print('\n'.join('Input({}/{}): {}'.format(i + 1, len(sources), s) for i, s in enumerate(sources))) print('Translating with {}'.format(translator)) orig_wd = os.getcwd() os.chdir(basedir) try: res = translator(*sources, output_key=output_key) finally: os.chdir(orig_wd) print('Result: {}'.format(res)) if isinstance(res, DataWithEvidenceDataSource): res.data_context.save_context(inline_imports=True, saved_contexts=saved_contexts) res.data_context.save_imports(imports_context) res.evidence_context.save_context(inline_imports=True, saved_contexts=saved_contexts) res.evidence_context.save_imports(imports_context) for ctx in res.contexts: raise Exception() if res: if res.key: data_sources_by_key[res.key] = res else: data_sources_by_key[res.identifier] = res last_remaining = list(remaining) remaining = next_remaining for x in remaining: warn("Failed to process: {}".format(x)) # attach_neuromlfiles_to_channel() t1 = time() print("Saving data...") graph = P.config('rdf.graph') for src in data_sources_by_key.values(): if isinstance(src, DataWithEvidenceDataSource): print('saving', src) CTX.add_import(src.data_context) EVCTX.add_import(src.evidence_context) for ctx in src.contexts: IWCTX.add_import(ctx) IWCTX.save_context(graph, saved_contexts=saved_contexts) IWCTX.save_imports(imports_context) print('imports context size', len(imports_context)) print("Saved %d triples." % IWCTX.triples_saved) t2 = time() print("Serializing...") serialize_as_nquads() t3 = time() print("generating objects took", t1 - t0, "seconds") print("saving objects took", t2 - t1, "seconds") print("serializing objects took", t3 - t2, "seconds") except Exception: traceback.print_exc() finally: P.disconnect()
def test_save_context_no_graph(self): ctx = Context() with patch('PyOpenWorm.data.ALLOW_UNCONNECTED_DATA_USERS', False): with self.assertRaisesRegexp(Exception, r'graph'): ctx.save_context()
def test_save_context_no_graph(self): ctx = Context() del ctx.conf['rdf.graph'] with self.assertRaisesRegexp(Exception, r'graph'): ctx.save_context()