def test_hierarchy_of_processors_after_serialization_deserialization(self): state = prepare_simple_processors_hierarchy() # Serialize, deserialize s = serialize_state(state) state = deserialize_state(s) glb_idx, p_sets, hh, datasets, mappings = get_case_study_registry_objects(state) p2 = glb_idx.get(Processor.partial_key("P1.P2"))[0] p4 = glb_idx.get(Processor.partial_key("P1.P2.P3"))[0] p5 = glb_idx.get(Processor.partial_key("P1.P2b"))[0] names = p2.full_hierarchy_names(glb_idx) self.assertEqual(names[0], "P1.P2") # Make "p1.p2.p3" processor descend from "p1.p2b" so it will be also "p1.p2b.p3" r = _find_or_create_relation(p5, p4, RelationClassType.pp_part_of, "test_observer", None, state) names = p4.full_hierarchy_names(glb_idx) self.assertIn("P1.P2.P3", names)
def update_current_version_state(self, lst_cmds): """ Designed to work using the REST interface. TEST in direct use. """ # Version # v = self._session.version # Serialize state st = serialize_state(self._isess._state) # v.state = st # Open DB session session = self._sess_factory() # Load version and change its state v = session.query(CaseStudyVersion).get(self._session.version_id) v.state = st session.add(v) for c in lst_cmds: c2 = session.query(CommandsContainer).get(c.id) c2.execution_start = c.execution_start c2.execution_end = c.execution_end session.add(c2) session.commit() self._sess_factory.remove()
def test_002_execute_file_two(self): """ A file containing QQs for three different sets of processor: Crop, Farm, AgrarianRegion AND UPSCALING (extracted from Almeria case study) Test number of processors read for each category, using processor sets and PartialRetrievalDictionary :return: """ file_path = os.path.dirname(os.path.abspath( __file__)) + "/z_input_files/test_spreadsheet_upscale_reduced.xlsx" isess = execute_file(file_path, generator_type="spreadsheet") # # Save state s = serialize_state(isess.state) with open("/home/rnebot/GoogleDrive/AA_MAGIC/MiniAlmeria.serialized", "wt") as f: f.write(s) local_state = deserialize_state(s) # Check State of things glb_idx, p_sets, hh, datasets, mappings = get_case_study_registry_objects( local_state) # Three processor sets self.assertEqual(len(p_sets), 3) # Close interactive session isess.close_db_session()
def save(self, from_web_service=False, cs_uuid=None, cs_name=None): if not self._allow_saving: raise Exception( "The ReproducibleSession was opened disallowing saving. Please close it and reopen it with the proper value" ) # Serialize state st = serialize_state(self._isess._state) self._session.version.state = st self._session.state = st ws = self._session # Open DB session session = self._sess_factory() # Change the case study if cs_uuid: # Load case study cs = session.query(CaseStudy).filter( CaseStudy.uuid == cs_uuid).first() if cs: ws.version.case_study = cs else: raise Exception("The case study UUID '" + cs_uuid + "' was not found") # Append commands, self._session, the version and the case_study if not from_web_service: for c in self._session.commands: session.add(c) session.add(ws) session.add(ws.version) session.add(ws.version.case_study) else: ws.who = session.merge(ws.who) cs_id = ws.version.case_study.id vs_id = ws.version.id if cs_id and not vs_id: ws.version.case_study = None if vs_id: ws.version = None if cs_id: cs = session.query(CaseStudy).get(cs_id) else: cs = ws.version.case_study session.add(cs) if vs_id: vs = session.query(CaseStudyVersion).get(vs_id) ws.version = vs else: ws.version.case_study = cs vs = ws.version session.add(vs) ws.close_instant = datetime.datetime.utcnow() session.add(ws) for c in self._session.commands: session.add(c) if cs_name: ws.version.name = cs_name # If it was called from the REST API, assure that the version has a creation date (it should not happen) if from_web_service and not vs.creation_instant: print("Late setup of version creation date") vs.creation_instant = datetime.datetime.utcnow() # Commit DB session session.commit() force_load(self._session) self._sess_factory.remove()
def test_005_execute_file_five(self): """ Just Structure. From Soslaires. :return: """ file_path = os.path.dirname( os.path.abspath(__file__)) + "/z_input_files/Soslaires.xlsx" isess = execute_file(file_path, generator_type="spreadsheet") # # Save state s = serialize_state(isess.state) with open("/home/rnebot/GoogleDrive/AA_MAGIC/Soslaires.serialized", "wt") as f: f.write(s) local_state = deserialize_state(s) # Check State of things glb_idx, p_sets, hh, datasets, mappings = get_case_study_registry_objects( local_state) # Four processor sets self.assertEqual(len(p_sets), 4) # Obtain all Observers print("---- Observer ----") oers = glb_idx.get(Observer.partial_key()) for i in oers: print(i.name) # Obtain all processors print("---- Processor ----") procs = glb_idx.get(Processor.partial_key()) for i in procs: print(i.name) # Obtain all FactorTypes print("---- FactorType ----") fts = glb_idx.get(FactorType.partial_key()) for i in fts: print(i.name) # Obtain all Factors print("---- Factor ----") fs = glb_idx.get(Factor.partial_key()) for i in fs: print(i.processor.name + ":" + i.taxon.name) # Obtain all Quantitative Observations print("---- Quantities ----") qqs = glb_idx.get(FactorQuantitativeObservation.partial_key()) for i in qqs: print(i.factor.processor.name + ":" + i.factor.taxon.name + "= " + str(i.value.expression if i.value else "")) # Obtain all part-of Relation Observations print("---- Part-of relations (P-P) ----") po_rels = glb_idx.get( ProcessorsRelationPartOfObservation.partial_key()) for i in po_rels: print(i.parent_processor.name + " \/ " + i.child_processor.name) # Obtain all undirected flow Relation Observations print("---- Undirected flow relations (P-P) ----") uf_rels = glb_idx.get( ProcessorsRelationUndirectedFlowObservation.partial_key()) for i in uf_rels: print(i.source_processor.name + " <> " + i.target_processor.name) # Obtain all upscale Relation Observations print("---- Upscale relations (P-P) ----") up_rels = glb_idx.get( ProcessorsRelationUpscaleObservation.partial_key()) for i in up_rels: print(i.parent_processor.name + " \/ " + i.child_processor.name + "(" + i.factor_name + ": " + str(i.quantity) + ")") # Obtain all directed flow Relation Observations print("---- Directed flow relations (F-F) ----") df_rels = glb_idx.get( FactorsRelationDirectedFlowObservation.partial_key()) for i in df_rels: print(i.source_factor.processor.name + ":" + i.source_factor.taxon.name + " -> " + i.target_factor.processor.name + ":" + i.target_factor.taxon.name + (" (" + str(i.weight) + ")" if i.weight else "")) # Obtain all hierarchies print("---- FactorType Hierarchies ----") hies = glb_idx.get(Hierarchy.partial_key()) for i in hies: print(i.name) # Close interactive session isess.close_db_session()