def test_024_maddalena_dataset(self): file_path = os.path.dirname(os.path.abspath( __file__)) + "/z_input_files/v2/MAGIC_n_1_CC_Spain.xlsx" isess = execute_file(file_path, generator_type="spreadsheet") # Check State of things glb_idx, p_sets, hh, datasets, mappings = get_case_study_registry_objects( isess.state) # TODO Check things!!! # self.assertEqual(len(p_sets), 3) # Close interactive session isess.close_db_session()
def test_022_processor_scalings(self): file_path = os.path.dirname(os.path.abspath( __file__)) + "/z_input_files/v2/14_processor_scalings_example.xlsx" isess = execute_file(file_path, generator_type="spreadsheet") json_string = export_model_to_json(isess.state) print(json_string) # Check State of things glb_idx, p_sets, hh, datasets, mappings = get_case_study_registry_objects( isess.state) # TODO Check things!!! # self.assertEqual(len(p_sets), 3) # Close interactive session isess.close_db_session()
def test_023_solving(self): file_path = os.path.dirname(os.path.abspath( __file__)) + "/z_input_files/v2/06_upscale_almeria_NEW.xlsx" isess = execute_file(file_path, generator_type="spreadsheet") issues = prepare_and_solve_model(isess.state) # Check State of things glb_idx, p_sets, hh, datasets, mappings = get_case_study_registry_objects( isess.state) # Close interactive session isess.close_db_session()
def test_019_import_commands(self): """ Testing import commands :return: """ file_path = os.path.dirname(os.path.abspath( __file__)) + "/z_input_files/v2/12_import_commands_example.xlsx" isess = execute_file(file_path, generator_type="spreadsheet") # Check State of things glb_idx, p_sets, hh, datasets, mappings = get_case_study_registry_objects( isess.state) # TODO Check things!!! # self.assertEqual(len(p_sets), 3) # Close interactive session isess.close_db_session()
def test_013_execute_file_v2_seven(self): """ Parsing of Custom datasets :return: """ file_path = os.path.dirname(os.path.abspath( __file__)) + "/z_input_files/v2/07_custom_datasets.xlsx" isess = execute_file(file_path, generator_type="spreadsheet") # Check State of things glb_idx, p_sets, hh, datasets, mappings = get_case_study_registry_objects( isess.state) # TODO Check things!!! # self.assertEqual(len(p_sets), 3) # Close interactive session isess.close_db_session()
def test_011_execute_file_v2_five(self): """ Dataset processing using old commands :return: """ file_path = os.path.dirname( os.path.abspath(__file__) ) + "/z_input_files/v2/05_caso_energia_eu_old_commands.xlsx" isess = execute_file(file_path, generator_type="spreadsheet") # Check State of things glb_idx, p_sets, hh, datasets, mappings = get_case_study_registry_objects( isess.state) # TODO Check things!!! # self.assertEqual(len(p_sets), 3) # Close interactive session isess.close_db_session()
def test_009_execute_file_v2_three(self): """ Soslaires, without parameters With regard to the two previous, introduces the syntax of a Selector of many Processors :return: """ file_path = os.path.dirname(os.path.abspath( __file__)) + "/z_input_files/v2/03_Soslaires_no_parameters.xlsx" isess = execute_file(file_path, generator_type="spreadsheet") # Check State of things glb_idx, p_sets, hh, datasets, mappings = get_case_study_registry_objects( isess.state) # TODO Check things!!! # self.assertEqual(len(p_sets), 3) # Close interactive session isess.close_db_session()
def test_001_execute_file_one(self): """ A file containing QQs for three different sets of processor: Crop, Farm, AgrarianRegion (extracted from Almeria case study) Test number of processors read for each category, using processor sets and PartialRetrievalDictionary :return: """ file_path = os.path.dirname(os.path.abspath( __file__)) + "/z_input_files/test_spreadsheet_1.xlsx" isess = execute_file(file_path, generator_type="spreadsheet") # Check State of things glb_idx, p_sets, hh, datasets, mappings = get_case_study_registry_objects( isess.state) # Three processor sets self.assertEqual(len(p_sets), 3) # Close interactive session isess.close_db_session()
def test_017_execute_file_v2_eleven(self): """ Dataset queries using Mappings, then use of resulting Datasets to create Processors and Interfaces :return: """ file_path = os.path.dirname( os.path.abspath(__file__) ) + "/z_input_files/v2/11_dataset_to_musiasem_maddalena.xlsx" isess = execute_file(file_path, generator_type="spreadsheet") # Check State of things glb_idx, p_sets, hh, datasets, mappings = get_case_study_registry_objects( isess.state) # TODO Check things!!! # self.assertEqual(len(p_sets), 3) # Close interactive session isess.close_db_session()
def test_021_export_to_json(self): """ Testing model export :return: """ file_path = os.path.dirname(os.path.abspath( __file__)) + "/z_input_files/v2/03_Soslaires_no_parameters.xlsx" #file_path = os.path.dirname(os.path.abspath(__file__)) + "/z_input_files/v2/02_declare_hierarchies_and_cloning_and_scaling.xlsx" #file_path = os.path.dirname(os.path.abspath(__file__)) + "/z_input_files/v2/06_upscale_almeria.xlsx" #file_path = os.path.dirname(os.path.abspath(__file__)) + "/z_input_files/test_spreadsheet_4.xlsx" #file_path = os.path.dirname(os.path.abspath(__file__)) + "/z_input_files/v2/08_caso_energia_eu_new_commands.xlsx" #file_path = os.path.dirname(os.path.abspath(__file__)) + "/z_input_files/v2/09_many_to_many_mapping.xlsx" isess = execute_file(file_path, generator_type="spreadsheet") json_string = export_model_to_json(isess.state) print(json_string) isess.close_db_session()
def test_016_execute_file_v2_ten(self): """ Upscaling using Instantiations. Translation of Louisa's file "Electricity state of the play 16.03.xlsm" :return: """ file_path = os.path.dirname( os.path.abspath(__file__) ) + "/z_input_files/v2/10_electricity_state_of_the_play.xlsx" isess = execute_file(file_path, generator_type="spreadsheet") # Check State of things glb_idx, p_sets, hh, datasets, mappings = get_case_study_registry_objects( isess.state) # TODO Check things!!! # self.assertEqual(len(p_sets), 3) # Close interactive session isess.close_db_session()
def test_007_execute_file_v2_one(self): """ Two connected Processors Test parsing and execution of a file with basic commands, and only literals (very basic syntax) :return: """ file_path = os.path.dirname( os.path.abspath(__file__) ) + "/z_input_files/v2/01_declare_two_connected_processors.xlsx" isess = execute_file(file_path, generator_type="spreadsheet") # Check State of things glb_idx, p_sets, hh, datasets, mappings = get_case_study_registry_objects( isess.state) # TODO Check things!!! # self.assertEqual(len(p_sets), 3) # Close interactive session isess.close_db_session()
def test_018_many_to_many_mappings(self): """ Testing many to many mappings :return: """ file_path = os.path.dirname(os.path.abspath( __file__)) + "/z_input_files/v2/09_many_to_many_mapping.xlsx" isess = execute_file(file_path, generator_type="spreadsheet") # Check State of things glb_idx, p_sets, hh, datasets, mappings = get_case_study_registry_objects( isess.state) # TODO Check things!!! # self.assertEqual(len(p_sets), 3) # Close interactive session datasets["ds1"].data.to_csv( "/tmp/09_many_to_many_mapping_ds1_results.csv", index=False) isess.close_db_session()
def test_006_execute_file_five(self): """ Parameters Simple Expression evaluation in QQs :return: """ file_path = os.path.dirname(os.path.abspath( __file__)) + "/z_input_files/mapping_example_maddalena.xlsx" isess = execute_file(file_path, generator_type="spreadsheet") # Check State of things glb_idx, p_sets, hh, datasets, mappings = get_case_study_registry_objects( isess.state) # Three processor sets self.assertEqual(len(p_sets), 1) # Close interactive session isess.close_db_session()
def test_014_execute_file_v2_eight(self): """ Dataset queries using Mappings :return: """ file_path = os.path.dirname( os.path.abspath(__file__) ) + "/z_input_files/v2/08_caso_energia_eu_new_commands_CASE_SENSITIVE.xlsx" isess = execute_file(file_path, generator_type="spreadsheet") # Check State of things glb_idx, p_sets, hh, datasets, mappings = get_case_study_registry_objects( isess.state) # TODO Check things!!! # self.assertEqual(len(p_sets), 3) # Close interactive session datasets["ds1"].data.to_csv( "/tmp/08_caso_energia_eu_new_commands_ds1_results.csv") datasets["ds2"].data.to_csv( "/tmp/08_caso_energia_eu_new_commands_ds2_results.csv") isess.close_db_session()
def test_008_execute_file_v2_two(self): """ Processors from Soslaires Test parsing and execution of a file with basic commands, and only literals (very basic syntax) :return: """ file_path = os.path.dirname( os.path.abspath(__file__) ) + "/z_input_files/v2/02_declare_hierarchies_and_cloning_and_scaling.xlsx" isess = execute_file(file_path, generator_type="spreadsheet") # Check State of things glb_idx, p_sets, hh, datasets, mappings = get_case_study_registry_objects( isess.state) processor_dict = get_processor_names_to_processors_dictionary(glb_idx) for p in processor_dict: print(p) # TODO Check things!!! # self.assertEqual(len(p_sets), 3) # Close interactive session isess.close_db_session()
def test_012_execute_file_v2_six(self): """ Almeria upscaling with new syntax * References * InterfaceTypes * BareProcessors * Dynamic attribute columns * Interfaces * Old Upscale (really efficient) :return: """ file_path = os.path.dirname(os.path.abspath( __file__)) + "/z_input_files/v2/06_upscale_almeria.xlsx" isess = execute_file(file_path, generator_type="spreadsheet") # Check State of things glb_idx, p_sets, hh, datasets, mappings = get_case_study_registry_objects( isess.state) # TODO Check things!!! # self.assertEqual(len(p_sets), 3) # Close interactive session isess.close_db_session()
def test_002_execute_file_two(self): """ A file containing QQs for three different sets of processor: Crop, Farm, AgrarianRegion AND UPSCALING (extracted from Almeria case study) Test number of processors read for each category, using processor sets and PartialRetrievalDictionary :return: """ file_path = os.path.dirname(os.path.abspath( __file__)) + "/z_input_files/test_spreadsheet_upscale_reduced.xlsx" isess = execute_file(file_path, generator_type="spreadsheet") # # Save state s = serialize_state(isess.state) with open("/home/rnebot/GoogleDrive/AA_MAGIC/MiniAlmeria.serialized", "wt") as f: f.write(s) local_state = deserialize_state(s) # Check State of things glb_idx, p_sets, hh, datasets, mappings = get_case_study_registry_objects( local_state) # Three processor sets self.assertEqual(len(p_sets), 3) # Close interactive session isess.close_db_session()
def test_005_execute_file_five(self): """ Just Structure. From Soslaires. :return: """ file_path = os.path.dirname( os.path.abspath(__file__)) + "/z_input_files/Soslaires.xlsx" isess = execute_file(file_path, generator_type="spreadsheet") # # Save state s = serialize_state(isess.state) with open("/home/rnebot/GoogleDrive/AA_MAGIC/Soslaires.serialized", "wt") as f: f.write(s) local_state = deserialize_state(s) # Check State of things glb_idx, p_sets, hh, datasets, mappings = get_case_study_registry_objects( local_state) # Four processor sets self.assertEqual(len(p_sets), 4) # Obtain all Observers print("---- Observer ----") oers = glb_idx.get(Observer.partial_key()) for i in oers: print(i.name) # Obtain all processors print("---- Processor ----") procs = glb_idx.get(Processor.partial_key()) for i in procs: print(i.name) # Obtain all FactorTypes print("---- FactorType ----") fts = glb_idx.get(FactorType.partial_key()) for i in fts: print(i.name) # Obtain all Factors print("---- Factor ----") fs = glb_idx.get(Factor.partial_key()) for i in fs: print(i.processor.name + ":" + i.taxon.name) # Obtain all Quantitative Observations print("---- Quantities ----") qqs = glb_idx.get(FactorQuantitativeObservation.partial_key()) for i in qqs: print(i.factor.processor.name + ":" + i.factor.taxon.name + "= " + str(i.value.expression if i.value else "")) # Obtain all part-of Relation Observations print("---- Part-of relations (P-P) ----") po_rels = glb_idx.get( ProcessorsRelationPartOfObservation.partial_key()) for i in po_rels: print(i.parent_processor.name + " \/ " + i.child_processor.name) # Obtain all undirected flow Relation Observations print("---- Undirected flow relations (P-P) ----") uf_rels = glb_idx.get( ProcessorsRelationUndirectedFlowObservation.partial_key()) for i in uf_rels: print(i.source_processor.name + " <> " + i.target_processor.name) # Obtain all upscale Relation Observations print("---- Upscale relations (P-P) ----") up_rels = glb_idx.get( ProcessorsRelationUpscaleObservation.partial_key()) for i in up_rels: print(i.parent_processor.name + " \/ " + i.child_processor.name + "(" + i.factor_name + ": " + str(i.quantity) + ")") # Obtain all directed flow Relation Observations print("---- Directed flow relations (F-F) ----") df_rels = glb_idx.get( FactorsRelationDirectedFlowObservation.partial_key()) for i in df_rels: print(i.source_factor.processor.name + ":" + i.source_factor.taxon.name + " -> " + i.target_factor.processor.name + ":" + i.target_factor.taxon.name + (" (" + str(i.weight) + ")" if i.weight else "")) # Obtain all hierarchies print("---- FactorType Hierarchies ----") hies = glb_idx.get(Hierarchy.partial_key()) for i in hies: print(i.name) # Close interactive session isess.close_db_session()