def test_hierarchy_of_processors_after_serialization_deserialization(self):
        state = prepare_simple_processors_hierarchy()
        # Serialize, deserialize
        s = serialize_state(state)
        state = deserialize_state(s)
        glb_idx, p_sets, hh, datasets, mappings = get_case_study_registry_objects(
            state)
        p2 = glb_idx.get(Processor.partial_key("P1.P2"))[0]
        p4 = glb_idx.get(Processor.partial_key("P1.P2.P3"))[0]
        p5 = glb_idx.get(Processor.partial_key("P1.P2b"))[0]

        names = p2.full_hierarchy_names(glb_idx)
        self.assertEqual(names[0], "P1.P2")
        # Make "p1.p2.p3" processor descend from "p1.p2b" so it will be also "p1.p2b.p3"
        r = _find_or_create_relation(p5, p4, RelationClassType.pp_part_of,
                                     "test_observer", None, state)
        names = p4.full_hierarchy_names(glb_idx)
        self.assertIn("P1.P2.P3", names)
示例#2
0
    def test_005_execute_file_five(self):
        """
        Just Structure. From Soslaires.

        :return:
        """
        file_path = os.path.dirname(
            os.path.abspath(__file__)) + "/z_input_files/Soslaires.xlsx"
        isess = execute_file(file_path, generator_type="spreadsheet")
        # # Save state
        s = serialize_state(isess.state)
        # Changed "wt" to "wb": the output of "serialize_state" is a byte array (it is compressed now)
        with open("/home/rnebot/GoogleDrive/AA_MAGIC/Soslaires.serialized",
                  "wb") as f:
            f.write(s)
        local_state = deserialize_state(s)
        # Check State of things
        glb_idx, p_sets, hh, datasets, mappings = get_case_study_registry_objects(
            local_state)
        # Four processor sets
        self.assertEqual(len(p_sets), 4)
        # Obtain all Observers
        print("---- Observer ----")
        oers = glb_idx.get(Observer.partial_key())
        for i in oers:
            print(i.name)
        # Obtain all processors
        print("---- Processor ----")
        procs = glb_idx.get(Processor.partial_key())
        for i in procs:
            print(i.name)
        # Obtain all FactorTypes
        print("---- FactorType ----")
        fts = glb_idx.get(FactorType.partial_key())
        for i in fts:
            print(i.name)
        # Obtain all Factors
        print("---- Factor ----")
        fs = glb_idx.get(Factor.partial_key())
        for i in fs:
            print(i.processor.name + ":" + i.taxon.name)
        # Obtain all Quantitative Observations
        print("---- Quantities ----")
        qqs = glb_idx.get(FactorQuantitativeObservation.partial_key())
        for i in qqs:
            print(i.factor.processor.name + ":" + i.factor.taxon.name + "= " +
                  str(i.value.expression if i.value else ""))
        # Obtain all part-of Relation Observations
        print("---- Part-of relations (P-P) ----")
        po_rels = glb_idx.get(
            ProcessorsRelationPartOfObservation.partial_key())
        for i in po_rels:
            print(i.parent_processor.name + " \/ " + i.child_processor.name)
        # Obtain all undirected flow Relation Observations
        print("---- Undirected flow relations (P-P) ----")
        uf_rels = glb_idx.get(
            ProcessorsRelationUndirectedFlowObservation.partial_key())
        for i in uf_rels:
            print(i.source_processor.name + " <> " + i.target_processor.name)
        # Obtain all upscale Relation Observations
        print("---- Upscale relations (P-P) ----")
        up_rels = glb_idx.get(
            ProcessorsRelationUpscaleObservation.partial_key())
        for i in up_rels:
            print(i.parent_processor.name + " \/ " + i.child_processor.name +
                  "(" + i.factor_name + ": " + str(i.quantity) + ")")
        # Obtain all directed flow Relation Observations
        print("---- Directed flow relations (F-F) ----")
        df_rels = glb_idx.get(
            FactorsRelationDirectedFlowObservation.partial_key())
        for i in df_rels:
            print(i.source_factor.processor.name + ":" +
                  i.source_factor.taxon.name + " -> " +
                  i.target_factor.processor.name + ":" +
                  i.target_factor.taxon.name +
                  (" (" + str(i.weight) + ")" if i.weight else ""))
        # Obtain all hierarchies
        print("---- FactorType Hierarchies ----")
        hies = glb_idx.get(Hierarchy.partial_key())
        for i in hies:
            print(i.name)
        # Close interactive session
        isess.close_db_session()
示例#3
0
    e.append(("QQ Observation", "A Factor", {
        "label": "A QQ Observation",
        **edge_factor_value
    }))
    factors_graph = nx.DiGraph()
    factors_graph.add_nodes_from(n)
    factors_graph.add_edges_from(e)
    # nx.write_gml(factors_graph, "/home/rnebot/LegendGraph.gml")


if __name__ == '__main__':
    # Deserialize previously recorded Soslaires State (WARNING! Execute unit tests to generated the ".serialized" file)
    with open("/home/rnebot/GoogleDrive/AA_MAGIC/Soslaires.serialized",
              "r") as file:
        s = file.read()
    state = deserialize_state(s)
    # Create a Query and execute a query
    query = BasicQuery(state)
    construct_flow_graph(state, query, None)

# NUTS files
# Each file is a Code List
# Levels. Each level has a name
# Entry.
#   Each code has a shape (reproject (unify) to geographic coordinates (WKT)).
#   Each code can have a parent.
#   Each code is in a level.
#   An entry can have several codes and descriptions

#
# import shapefile
示例#4
0
    def open(self,
             session_factory,
             uuid_: str = None,
             recover_previous_state=True,
             cr_new: CreateNew = CreateNew.NO,
             allow_saving=True):
        """
        Open a work session

    +--------+--------+---------+-----------------------------------------------------------------------------------+
    | UUID   | cr_new | recover |        Behavior                                                                   |
    +--------+--------+---------+-----------------------------------------------------------------------------------+ 
    | !=None | True   | True    | Create new CS or version (branch) from "UUID", clone WS, recover State, append WS |
    | !=None | True   | False   | Create new CS or version (branch) from "UUID", Zero State, first WS               |
    | !=None | False  | True    | Recover State, append WS                                                          |
    | !=None | False  | False   | Zero State, append WS (overwrite type)                                            |
    | ==None | -      | -       | New CS and version, Zero State, first WS                                          |
    +--------+--------+---------+-----------------------------------------------------------------------------------+
    Use cases:
    * A new case study, from scratch. uuid_=None
    * A new case study, copied from another case study. uuid_=<something>, cr_new=CreateNew.CASE_STUDY, recover_previous_state=True
    * A new version of a case study
      - Copying previous version
      - Starting from scratch
    * Continue a case study version
      - But restart (from scratch)
    * Can be a Transient session

        :param uuid_: UUID of the case study or case study version. Can be None, for new case studies or for testing purposes.
        :param recover_previous_state: If an existing version is specified, it will recover its state after execution of all command_executors
        :param cr_new: If != CreateNew.NO, create either a case study or a new version. If == CreateNew.NO, append session to "uuid"
        :param allow_saving: If True, it will allow saving at the end (it will be optional). If False, trying to save will generate an Exception
        :return UUID of the case study version in use. If it is a new case study and it has not been saved, the value will be "None"
        """

        # TODO Just register for now. But in the future it should control that there is no other "allow_saving" ReproducibleSession opened
        # TODO for the same Case Study Version. So it implies modifying some state in CaseStudyVersion to have the UUID
        # TODO of the active ReproducibleSession, even if it is not in the database. Register also the date of "lock", so the
        # TODO lock can be removed in case of "hang" of the locker ReproducibleSession
        self._allow_saving = allow_saving
        self._sess_factory = session_factory
        session = self._sess_factory()
        if uuid_:
            uuid_ = str(uuid_)
            # Find UUID. Is it a Case Study or a Case Study version?
            # If it is the former, look for the active version.
            cs = session.query(CaseStudy).filter(
                CaseStudy.uuid == uuid_).first()
            if not cs:
                vs = session.query(CaseStudyVersion).filter(
                    CaseStudyVersion.uuid == uuid_).first()
                if not vs:
                    ss = session.query(CaseStudyVersionSession).filter(
                        CaseStudyVersionSession.uuid == uuid_).first()
                    if not ss:
                        raise Exception(
                            "Object '" + uuid_ +
                            "' not found, when opening a ReproducibleSession")
                    else:
                        vs = ss.version
                        cs = vs.case_study
                else:
                    cs = vs.case_study
            else:  # A case study, find the latest version (the version modified latest -by activity, newest ReproducibleSession-)
                max_date = None
                max_version = None
                for v in cs.versions:
                    for s in v.sessions:
                        if not max_date or s.open_instant > max_date:
                            max_date = s.open_instant
                            max_version = v
                vs = max_version
                cs = vs.case_study

            # List of active sessions
            # NOTE: instead of time ordering, the ID is used, assuming sessions with greater ID were created later
            lst = session.query(CaseStudyVersionSession). \
                filter(CaseStudyVersionSession.version_id == vs.id). \
                order_by(CaseStudyVersionSession.id). \
                all()
            idx = 0
            for i, ws in enumerate(lst):
                if ws.restarts:
                    idx = i
            lst = lst[idx:]  # Cut the list, keep only active sessions

            if cr_new != CreateNew.NO:  # Create either a case study or a case study version
                if cr_new == CreateNew.CASE_STUDY:
                    cs = copy.copy(cs)  # New Case Study: COPY CaseStudy
                else:
                    force_load(
                        cs
                    )  # New Case Study Version: LOAD CaseStudy (then version it)
                vs2 = copy.copy(vs)  # COPY CaseStudyVersion
                vs2.case_study = cs  # Assign case study to the new version
                if recover_previous_state:  # If the new version keeps previous state, copy it also
                    vs2.state = vs.state  # Copy state
                    vs2.state_version = vs.state_version
                    for ws in lst:  # COPY active ReproducibleSessions
                        ws2 = copy.copy(ws)
                        ws2.version = vs2
                        for c in ws.commands:  # COPY commands
                            c2 = copy.copy(c)
                            c2.session = ws2
                vs = vs2
            else:
                # Load into memory
                if len(lst) == 1:
                    ws = lst[0]
                    force_load(ws)
                force_load(vs)
                force_load(cs)

            if recover_previous_state:
                # Load state if it is persisted (if not EXECUTE, POTENTIALLY VERY SLOW)
                if vs.state:
                    # Deserialize
                    self._isess._state = deserialize_state(
                        vs.state, vs.state_version)
                else:
                    self._isess._state = State(
                    )  # Zero State, execute all commands in sequence
                    for ws in lst:
                        for c in ws.commands:
                            execute_command_container(self._isess._state, c)
                if cr_new == CreateNew.VERSION:  # TODO Check if this works in all possible circumstances (combine the parameters of the function)
                    recover_previous_state = False
            else:
                self._isess._state = State()

        else:  # New Case Study AND new Case Study Version
            cs = CaseStudy()
            vs = CaseStudyVersion()
            vs.creation_instant = datetime.datetime.utcnow()
            vs.case_study = cs

        # Detach Case Study and Case Study Version
        if cs in session:
            session.expunge(cs)
        if vs in session:
            session.expunge(vs)
        # Create the Case Study Version Session
        usr = session.query(User).filter(User.name == self._identity).first()
        if usr:
            force_load(usr)
        else:
            if allow_saving:
                raise Exception(
                    "A user is required to register which user is authoring a case study"
                )
        # TODO !!!!NEW CODE, ADDED TO SUPPORT NEEDED FUNCTIONALITY. NEEDS BETTER CODING!!!!
        restart = not recover_previous_state if uuid_ else True
        if not restart:
            self._session = ws
        else:
            self._session = CaseStudyVersionSession()
            self._session.version = vs
            self._session.who = usr
            self._session.restarts = True
        # If the Version existed, define "restarts" according to parameter "recover_previous_state"
        # ElseIf it is the first Session -> RESTARTS=True

        session.close()
        # session.expunge_all()
        self._sess_factory.remove()