def test_get_same_n_samples(self, session): browser = Browser(session) samples = browser.last(NUM_MODELS, "Sample") sample_types = browser.get(samples, "sample_type") assert sample_types browser2 = Browser(session) samples2 = browser2.last(NUM_MODELS, "Sample") sample_types2 = browser2.get(samples2, "sample_type", force_refresh=True) assert len(sample_types) == len(sample_types2)
def test_retrieve_with_new_samples(session): """We expect when we create new models for the model relationships to be maintained.""" samp1 = session.SampleType.find_by_name("Primer").new_sample( "", "", "", properties={"Anneal Sequence": "AGTAGTATGA"}) samp2 = session.SampleType.find_by_name("Fragment").new_sample( "", "", "", properties={ "Length": 100, "Forward Primer": samp1 }) fvs = [] samples = [samp1, samp2] for s in samples: fvs += s.field_values browser = Browser(session) browser.retrieve(samples, "field_values") fvs2 = [] samples = [samp1, samp2] for s in samples: fvs2 += s.field_values assert fvs == fvs2
def test_retrieve_with_many_through_for_collections_and_parts(session): browser = Browser(session) # retrieve expected collections and parts part_associations = browser.last(100, "PartAssociation") parts = browser.retrieve(part_associations, "part") collections = browser.retrieve(part_associations, "collection") assert parts assert collections # we clear the model cache browser.clear() assert not browser.model_cache # we should be able to gather tha parts form the collections parts_from_collections = browser.retrieve(collections, "parts") assert len(parts_from_collections) >= len(parts) assert len(parts) > 0 # we check to make sure the 'parts' are instances of Items for model in collections: assert "parts" in model._get_deserialized_data() other_models = model._get_deserialized_data()["parts"] if other_models is not None: for other_model in other_models: if other_model is not None: assert issubclass(type(other_model), pydent_models.Item)
def test_algorithm(autoplanner, session): browser = Browser(session) sample_composition = nx.DiGraph() edges = [ ("DTBA_backboneA_splitAMP", "pyMOD-URA-URA3.A.1-pGPD-yeVenus-tCYC1"), ("T1MC_NatMX-Cassette_MCT2 (JV)", "pyMOD-URA-URA3.A.1-pGPD-yeVenus-tCYC1"), ( "BBUT_URA3.A.0_homology1_UTP1 (from genome)", "pyMOD-URA-URA3.A.1-pGPD-yeVenus-tCYC1", ), ("MCDT_URA3.A.1_homology2_DTBA", "pyMOD-URA-URA3.A.1-pGPD-yeVenus-tCYC1"), ("DH5alpha", "pyMOD-URA-URA3.A.1-pGPD-yeVenus-tCYC1"), ("TP-IRES-EGFP-TS", "pyMOD-URA-URA3.A.1-pGPD-yeVenus-tCYC1"), ( "BBUT_URA3.A.1_homology1_UTP1 (from_genome) (new fwd primer))", "pyMOD-URA-URA3.A.1-pGPD-yeVenus-tCYC1", ), ] for n1, n2 in edges: s1 = browser.find_by_name(n1) s2 = browser.find_by_name(n2) sample_composition.add_node(s1.id, sample=s1) sample_composition.add_node(s2.id, sample=s2) sample_composition.add_edge(s1.id, s2.id) algorithm = NetworkOptimizer(browser, sample_composition, autoplanner.template_graph) algorithm.print_sample_composition() algorithm.run(session.ObjectType.find_by_name("Plasmid Stock"))
def test_one(session): browser = Browser(session) m1 = browser.one() m2 = session.Sample.one() m3 = session.Sample.last()[0] assert m1.id == m2.id assert m3.id == m2.id
def test_nested_cache(session): """Tests whether the sample pulled.""" browser = Browser(session) browser.use_cache = True st_from_session = session.SampleType.find(1) sample_from_session = st_from_session.samples[0] sample = browser.find(sample_from_session.id, "Sample") st = browser.find(st_from_session.id, "SampleType") sample_from_cache = browser.find(sample_from_session.id, "Sample") st_from_cache = browser.find(st_from_session.id, "SampleType") # check if sample type pulle from cache is always the same assert st_from_session is not st assert st_from_cache is st # check if sample pulled from cache is the same assert sample_from_session is not sample assert sample_from_cache is sample assert ( st_from_cache.samples[0].id == sample.id ), "The first indexed sample should be the same sample found by the browser"
def test_speed_improvements(session): n = 10 t1 = time.time() n1 = session._aqhttp.num_requests items = session.Item.last(n) samples = [item.sample for item in items] object_types = [item.object_type for item in items] sample_types = [sample.sample_type for sample in samples if sample] for st in sample_types: st.field_types t2 = time.time() n2 = session._aqhttp.num_requests t3 = time.time() n3 = session._aqhttp.num_requests browser = Browser(session) items = browser.last(n, "Item") browser.get(items, { "sample": { "sample_type": "field_types" }, "object_type": [] }) t4 = time.time() n4 = session._aqhttp.num_requests fold_diff = (t2 - t1) / (t4 - t3) print("Browser is {} times faster than nested for-loops".format(fold_diff)) print("Browser uses {} requests, while for-loops use {}".format( n4 - n3, n2 - n1)) assert fold_diff > 1
def test_retrieve_get_refresh(self, session, force_refresh, func_name): """If force refresh is ON, then retrieve should get the EXACT same models every time.""" browser = Browser(session) samples = browser.last(NUM_MODELS, "Sample") def method(): if force_refresh is None: return getattr(browser, func_name)(samples, "sample_type") else: return getattr(browser, func_name)(samples, "sample_type", force_refresh=force_refresh) sample_types = method() sample_types2 = method() assert sample_types assert len(sample_types) == len(sample_types2) # no new items total_num_items = len({id(x) for x in sample_types + sample_types2}) assert total_num_items == len(sample_types) for st in sample_types: errs = check_model_in_cache(st, browser.model_cache) assert not errs for st in sample_types2: errs = check_model_in_cache(st, browser.model_cache) assert not errs
def test_recursive_retrieve(session): browser = Browser(session) d = { "field_values": { "wires_as_dest": { "source": "operation", "destination": "operation" }, "wires_as_source": { "source": "operation", "destination": "operation" }, } } ops = browser.session.Operation.last(10) r = browser.recursive_retrieve(ops, d) assert len(r["field_values"]) > 0 assert len(r["wires_as_dest"]) > 0 assert len(r["wires_as_source"]) > 0 assert len(r["source"]) > 0 assert len(r["destination"]) > 0 assert len(r["operation"]) > 0 assert "field_values" in ops[0]._get_deserialized_data() assert "wires_as_dest" in ops[0].field_values[0]._get_deserialized_data() assert "wires_as_source" in ops[0].field_values[0]._get_deserialized_data()
def test_query_with_sample_type(session, fname, stid): browser = Browser(session) st = session.SampleType.find(stid) fxn = getattr(browser, fname) models = fxn(sample_type=st.name) for m in models: assert m.sample_type_id == stid
def test_set_model(session): browser = Browser(session) browser.set_model("OperationType") op_types = browser.search(".*Fragment.*") for op_type in op_types: assert isinstance(op_type, pydent_models.OperationType)
def test_recursive_cache(session): browser = Browser(session) samples = browser.interface("Sample").where({"sample_type_id": 1}) # should preload SampleType into cache st = browser.find(1, "SampleType") assert browser.model_cache["SampleType"][st.id] == st found_st = browser.find(1, "SampleType") assert found_st is st st_from_where = browser.where({"id": samples[0].sample_type_id}, "SampleType") assert ( st_from_where[0] is st ), "SampleType retrieved by where should find the SampleType in the cache" # should retrieve the exact model that was preloaded print(browser.model_cache) session.set_verbose(True) browser.session.set_verbose(True) browser.retrieve(samples, "sample_type") print(st) print(samples[0]._get_data()["sample_type"]) print(samples[0].sample_type) assert samples[0].sample_type is st # affecting sample_types from these models should refer to the same sample type assert samples[0].sample_type is samples[1].sample_type
def test_cache_where(session): browser = Browser(session) primers = browser.cached_where({"sample_type_id": 1}, "Sample") p = primers[-1] p.__dict__["foo"] = "bar" cached_primers = browser.cached_where({ "sample_type_id": 1, "id": p.id }, "Sample") cached_primers2 = browser.cached_where( { "sample_type_id": [1, 2], "id": [p.id] }, "Sample") empty = browser.cached_where({"sample_type_id": 2, "id": p.id}, "Sample") assert empty == [], "should not return any primers since query does not match" assert len(cached_primers) == 1, "should return exactly 1 primer" assert (cached_primers2 == cached_primers ), "should be equivalent as these are equivalent queries" assert ( "foo" in cached_primers[0].__dict__ ), 'should containing the "foo" attribute that was initially cached' assert (cached_primers[0].__dict__["foo"] == "bar" ), "should return the very same models that was initially cached" assert len(browser.model_cache["Sample"]) > 1
def test_simple_one_query(session): browser = Browser(session) user = session.User.one(query={"login": session.current_user.login}) assert user user = browser.one(query={"login": session.current_user.login}, model_class="User") assert user
def test_browser_loads(session): browser = Browser(session) browser.last(30) s = dill.dumps(browser) loaded_browser = dill.loads(s) assert len(loaded_browser.model_cache) > 0
def test_cache_where_name(session): browser = Browser(session) primers = browser.cached_where({"sample_type_id": 1}, "Sample") primer1 = primers[0] id1 = id(primer1) primer2 = browser.find_by_name(primer1.name, "Sample", primary_key="id") id2 = id(primer2) assert id1 == id2
def test_search(session): browser = Browser(session) pattern = ".*GFP.*" samples = browser.search(pattern) assert len(samples) > 0 for s in samples: assert re.match(pattern, s.name, re.IGNORECASE)
def wc(session): edge_hash = AutoPlannerModel._hash_afts node_hash = AutoPlannerModel._external_aft_hash browser = Browser(session) plans = browser.last(10, model_class="Plan") wc = EdgeWeightContainer(browser, edge_hash, node_hash, plans=plans) wc.compute() return wc
def test_get(self, session): """Calling 'get' with just a single keyword should return the models in the cache.""" browser = Browser(session) assert browser.get("Sample") == [] samples = browser.last(NUM_MODELS, "Sample") assert browser.get("Sample") == samples
def test_first_last(session): browser = Browser(session) models = browser.first() assert len(models) == 1 m1 = models[0] m2 = session.Sample.first()[0] m3 = session.Sample.last()[0] assert m1.id == m2.id assert m3.id != m2.id
def test_retrieve_with_many(session): browser = Browser(session) samples = browser.search(".*mcherry.*", sample_type="Fragment")[:30] assert (not samples[0]._get_deserialized_data().get("items", None) ), "Items should not have been loaded into the sample yet." browser._retrieve_has_many_or_has_one(samples, "items") assert "items" in samples[0]._get_deserialized_data() assert (len(samples[0]._get_deserialized_data()["items"]) > 0), "Items should have been found."
def test_search_ignore_case(session): browser = Browser(session) pattern = ".*mCherry.*" samples = browser.search(pattern, ignore_case=False) samples_without_case = browser.search(pattern, ignore_case=True) assert len(samples_without_case) > len(samples) assert len(samples) > 0 for s in samples: assert re.match(pattern, s.name)
def test_retrieve_with_many_field_values(session): browser = Browser(session) session.set_verbose(True) samples = browser.search(".*mcherry.*", sample_type="Fragment")[:30] assert len(samples[0].field_values) > 0 assert (not samples[0]._get_deserialized_data().get("items", None) ), "Items should not have been loaded into the sample yet." field_values = browser._retrieve_has_many_or_has_one( samples, "field_values") assert len(field_values) > 0
def test_close_matches_with_sample_type(session): browser = Browser(session) pattern = "pMOD8-pGRR-W8" samples = browser.close_matches(pattern, sample_type="Plasmid") assert len(samples) > 0 sample_type = session.SampleType.find_by_name("Plasmid") for s in samples: assert s.sample_type_id == sample_type.id
def test_search_with_sample_type(session): browser = Browser(session) pattern = ".*GFP.*" samples = browser.search(pattern, sample_type="Plasmid") assert len(samples) > 0 sample_type = session.SampleType.find_by_name("Plasmid") for s in samples: assert s.sample_type_id == sample_type.id
def load_browser(self): if os.path.isfile(self.filepath): with open(self.filepath, 'rb') as f: print('') browser = dill.load(f) self.is_new_browser = False else: browser = Browser(self.session) self.is_new_browser = True self.browser = browser return self.browser
def test_retrieve_has_many_or_has_one(session): browser = Browser(session) samples = browser.search(".*mcherry.*", sample_type="Fragment")[:30] assert (not samples[0]._get_deserialized_data().get("sample_type", None) ), "SampleType should not have been loaded into the sample yet." sample_types = browser._retrieve_has_many_or_has_one( samples, "sample_type") assert len(sample_types) > 0 assert (samples[0]._get_deserialized_data()["sample_type"].id ), session.SampleType.find_by_name("Fragment").id
def test_browser_loads_wires(session): """The Plan class has a special query hook that automatically grabs wires as well. By default, the update cache will recursively update models that have been deserialized from a model list. We expect that the 'Wire' model is in the model cache when we load a plan. """ browser = Browser(session) assert "Wire" not in browser.model_cache browser.one("Plan") assert "Wire" in browser.model_cache
def test_update_model_cache_without_id(session): """We expect to be able to update the model cache with a newly created item. The model key should be equivalent to the record id ('rid') """ browser = Browser(session) s = session.Sample.new() browser.update_cache([s]) assert None not in browser.model_cache["Sample"] assert s._primary_key in browser.model_cache["Sample"]
def test_retrieve_has_many_through(session): browser = Browser(session) # we check to make sure that 'operations' are instances of Operations jobs = session.Job.last(50) operations = browser._retrieve_has_many_through(jobs, "operations") assert len(operations) > 0 for model in jobs: assert "operations" in model._get_deserialized_data() other_models = model._get_deserialized_data()["operations"] if other_models is not None: for other_model in other_models: assert isinstance(other_model, pydent_models.Operation)