def G(): G = AnalysisGraph.from_statements(get_valid_statements_for_modeling(STS)) G.res = 200 G.sample_from_prior() G.map_concepts_to_indicators() G.parameterize(year=2014, month=12) yield G
def G(): G = AnalysisGraph.from_statements(get_valid_statements_for_modeling(STS)) G.assemble_transition_model_from_gradable_adjectives() G.sample_from_prior() G.map_concepts_to_indicators() G.parameterize(year = 2014, month = 12) G.to_pickle() yield G
def createNewModel(): """ Create a new Delphi model. """ data = json.loads(request.data) G = AnalysisGraph.from_uncharted_json_serialized_dict(data) G.sample_from_prior() G.id = data["id"] G.to_sql(app=current_app) return jsonify({"status": "success"})
def G(): G = AnalysisGraph.from_statements(get_valid_statements_for_modeling(STS)) G.assemble_transition_model_from_gradable_adjectives() G.map_concepts_to_indicators() G.parameterize(date(2014, 12, 1)) G.to_pickle() G.create_bmi_config_file() yield G
def G_eval(): G = AnalysisGraph.from_statements([s2]) G.map_concepts_to_indicators() G.res = 200 G.sample_from_prior() G.parameterize(year=2013, month=9) G.get_timeseries_values_for_indicators() yield G
def createNewICM(): """ Create a new ICM""" data = json.loads(request.data) G = AnalysisGraph.from_uncharted_json_serialized_dict(data) G.assemble_transition_model_from_gradable_adjectives() G.sample_from_prior() G.to_sql(app=current_app) _metadata = ICMMetadata.query.filter_by(id=G.id).first().deserialize() del _metadata["model_id"] return jsonify(_metadata)
def test_from_statements_file(): test_statements_file = "test_statements.pkl" with open(test_statements_file, "wb") as f: pickle.dump(STS, f) with open(test_statements_file, "rb") as f: sts_from_file = pickle.load(f) G = AnalysisGraph.from_statements(sts_from_file, assign_default_polarities=False) assert set(G.nodes()) == set([conflict_string, food_security_string]) assert set(G.edges()) == set([(conflict_string, food_security_string)]) os.remove(test_statements_file)
def __init__(self, G: AnalysisGraph): self.G = G self.A = None self.n_timesteps: int = None self.observed_state_sequence = None self.score: float = None self.candidate_score: float = None self.max_score: float = None self.delta_t = 1.0 self.index_permutations = list(permutations(range(2 * len(G)), 2)) self.s0 = G.construct_default_initial_state() self.original_score = None
def create_reference_CAG(inputPickleFile, outputPickleFile): with open(inputPickleFile, "rb") as f: all_sts = pickle.load(f) filtered_sts = filter_and_process_statements(all_sts, 0.9) G = AnalysisGraph.from_statements(filtered_sts) G.merge_nodes( "UN/events/natural/weather/precipitation", "UN/events/weather/precipitation", ) for n in G.nodes(): G.delete_edge(n, "UN/events/weather/precipitation") with open(outputPickleFile, "wb") as f: pickle.dump(G, f)
def execute(args): from pandas import read_csv from .AnalysisGraph import AnalysisGraph from .execution import _write_latent_state, get_latent_state_components from .bmi import initialize, update print("Executing model") G = AnalysisGraph.from_pickle(args.input_dressed_cag) initialize(G, args.input_variables) with open(args.output_sequences, "w") as f: f.write(",".join(["seq_no", "variable"] + [f"sample_{str(i)}" for i in range(1, G.res + 1)]) + "\n") for t in range(args.steps): update(G) _write_latent_state(G, f)
def execute(args): from pandas import read_csv print("Executing model") G = AnalysisGraph.from_pickle(args.input_dressed_cag) G.assemble_transition_model_from_gradable_adjectives() G.sample_from_prior() G.initialize(args.input_variables) with open(args.output_sequences, "w") as f: f.write( ",".join( ["seq_no", "variable"] + [f"sample_{str(i)}" for i in range(1, G.res + 1)] ) + "\n" ) for t in range(args.steps): G.update() _write_latent_state(G, f)
def create_reference_CAG(inputPickleFile, outputPickleFile): with open(inputPickleFile, "rb") as f: all_sts = pickle.load(f) #Second and Third Argument control grounding score and belief score cutoff, #respectively. filtered_sts = filter_and_process_statements(all_sts, .5, 0.7) G = AnalysisGraph.from_statements(filtered_sts) G.delete_node("UN/events/human/physical_insecurity") G.delete_node("UN/entities/human/government/government_actions/duty") G.delete_node("UN/entities/human/livelihood") G.delete_node("UN/entities/GPE") G.delete_node("UN/events/crisis") G.delete_node("UN/entities/natural/biology/ecosystem") G.delete_node("UN/entities/natural/crop_technology/management") G.delete_node("UN/events/nature_impact/climate_change_mitigation") G.delete_node("UN/entities/food_availability") G.delete_node("UN/entities/human/food/food_insecurity") G.delete_node("UN/entities/human/financial/economic/fuel") with open(outputPickleFile, "wb") as f: pickle.dump(G, f)
def create(args): print("Creating model") from delphi.assembly import get_data from delphi.parameterization import parameterize from delphi.quantification import map_concepts_to_indicators from datetime import datetime from delphi.AnalysisGraph import AnalysisGraph from delphi.export import export with open(args.indra_statements, "rb") as f: sts = pickle.load(f) G = AnalysisGraph.from_statements(sts) G.infer_transition_model(args.adjective_data) G = map_concepts_to_indicators(G, 2) G = parameterize(G, datetime(args.year, 1, 1), get_data(args.data)) export( G, format="full", json_file=args.output_cag_json, pickle_file=args.output_dressed_cag, variables_file=args.output_variables, )
def test_from_statements(): G = AnalysisGraph.from_statements(STS) assert set(G.nodes()) == set([conflict_string, food_security_string]) assert set(G.edges()) == set([(conflict_string, food_security_string)])
def G(): G = AnalysisGraph.from_statements(sts) G.infer_transition_model() return G
def G_unit(): G = AnalysisGraph.from_statements([s3]) G.map_concepts_to_indicators() yield G
def create_observed_state(G: AnalysisGraph) -> Dict: """ Create a dict corresponding to an observed state vector. """ return { n[0]: {ind.name: ind.value for ind in n[1]["indicators"].values()} for n in G.nodes(data=True) }
from delphi.AnalysisGraph import AnalysisGraph G = AnalysisGraph.from_text( "Significantly increased conflict seen in " "South Sudan forced many families to flee in 2017.") G.assemble_transition_model_from_gradable_adjectives() G.map_concepts_to_indicators() G.parameterize(country="South Sudan", year=2017, month=4) A = G.to_agraph(indicators=True, indicator_values=True) A.draw("CAG.png", prog="dot")
import sys import pickle from delphi.AnalysisGraph import AnalysisGraph from delphi.export import to_agraph G = AnalysisGraph.from_uncharted_json_file(sys.argv[1]) # TODO Make sure to get indicators from DSSAT # G.map_concepts_to_indicators() # G.set_indicator("UN/events/weather/precipitation", "Historical Average Total Daily Rainfall (Maize)", "DSSAT") # G.set_indicator("UN/events/human/agriculture/food_production", # "Historical Production (Maize)", "DSSAT") # G.set_indicator("UN/entities/human/food/food_security", "IPC Phase Classification", "FEWSNET") # G.set_indicator("UN/entities/food_availability", "Production, Meat indigenous, total", "FAO") # G.set_indicator("UN/entities/human/financial/economic/market", "Inflation Rate", "ieconomics.com") # G.set_indicator("UN/events/human/death", "Battle-related deaths", "WDI") # G.parameterize(year = 2017, month=4) G.assemble_transition_model_from_gradable_adjectives() G.sample_from_prior() A = to_agraph(G) A.draw("CauseMos_CAG.png", prog="dot") A = to_agraph(G, indicators=True, indicator_values=True) A.draw("CauseMos_CAG_with_indicators.png", prog="dot") with open(sys.argv[2], 'wb') as f: pickle.dump(G, f)
"obj_adjectives": event2.delta.adjectives, }), ) events = { concept: make_event(concept, attrs) for concept, attrs in concepts.items() } s1 = make_statement(events["conflict"], events["food security"]) s2 = make_statement(events["migration"], events["product"]) STS = [s1, s2] G = AnalysisGraph.from_statements(get_valid_statements_for_modeling(STS)) G.res = 1000 G.sample_from_prior() G.map_concepts_to_indicators() G.parameterize(year=2014, month=12) def test_inference_with_synthetic_data(G): """ Smokescreen test for sampler. """ # Generate synthetic data # Sample a transition matrix from the prior A = G.sample_from_prior()[0] # Get the original value of our parameter of interest (the ground truth # value that we can use to evaluate our inference.
def test_from_statements(): G = AnalysisGraph.from_statements(STS, assign_default_polarities=False) assert set(G.nodes()) == set([conflict, food_security]) assert set(G.edges()) == set([(conflict, food_security)])
def G_unit(): G = AnalysisGraph.from_statements([s3]) G.map_concepts_to_indicators() G.res = 200 G.sample_from_prior() yield G