Beispiel #1
0
def load_model(n, prop):
    f_name = ex1_pickle_name(n, prop)
    source = "../test_pickles/"
    do_pickle = True
    
    "try loading class_dicts first. If no dict then try class instance."
    try:
        u  = pickle_main("dict_" + f_name, source, do_pickle)
    except:
        u  = pickle_main(f_name, source, do_pickle)
     
    return u
Beispiel #2
0
def ex3_main(n, recall, do_pickle, source, destination):
    """main function to run experiment 1
    
    - build model and ukf dictionary parameters based on n and prop
    - initiate Model and ukf_ss based on new dictionaries
    - run ABM with filtering on top
    - make plots using finished run data
    
    Parameters
    ------
    n, prop : float
        `n` population and proportion observed 0<=`prop`<=1
    
    recall, do_pickle : bool
        `recall` a previous run or  `do_pickle` pickle a new one?
        
    pickle_source : str
        `pickle_source` where to load/save any pickles.
    """

    if not recall:
        model_params = configs.model_params
        ukf_params = configs.ukf_params
        model_params, ukf_params, base_model = ex3_params(
            n, model_params, ukf_params)
        base_models = []
        for i in range(int((4 * n) + 1)):
            base_models.append(deepcopy(base_model))

        print(f"Population: {n}")
        u = ukf_ss(model_params, ukf_params, base_model, base_models)
        u.main()
        pickle_main(ukf_params["file_name"], pickle_source, do_pickle, u)

    else:
        "if recalling, load a pickle."
        f_name = ex3_pickle_name(n)

        "try loading class_dicts first. If no dict then class instance."
        try:
            u = pickle_main("dict_" + f_name, source, do_pickle)
        except:
            u = pickle_main(f_name, source, do_pickle)

        model_params, ukf_params = u.model_params, u.ukf_params

    ex3_plots(u, destination, "ukf_", True, False)

    return u
Beispiel #3
0
def ex2_main(n, bin_size, recall, do_pickle, source, destination):
    """main function to run experiment 2
    
    - build model and ukf dictionary parameters based on n and bin_size
    - initiate Model and ukf_ss based on new dictionaries
    - run ABM with filtering on top
    - make plots using finished run data
    
    Parameters
    ------
    n, bin_size : float
        `n` population and aggregate grid square size `bin_size`
    
    recall, do_pickle : bool
        `recall` a previous run or  `do_pickle` pickle a new one?
        
    source, destination : str
        `source` where to load/save any pickles and the `destination` of 
        any plots
    """

    if not recall:
        model_params = configs.model_params
        ukf_params = configs.ukf_params

        model_params, ukf_params, base_model = aggregate_params(
            n, bin_size, model_params, ukf_params)

        print(f"Population: {n}")
        print(f"Square grid size: {bin_size}")

        u = ukf_ss(model_params, ukf_params, base_model)
        u.main()
        pickle_main(ukf_params["file_name"], pickle_source, do_pickle, u)

    else:
        f_name = ex2_pickle_name(n, bin_size)
        try:
            u = pickle_main("dict_" + f_name, source, do_pickle)
        except:
            print(f_name)
            print("dictionary not found. trying to load class")
            u = pickle_main(f_name, source, do_pickle)

    ex2_plots(u, destination, "agg_ukf_", True, True)
    return u
Beispiel #4
0
def ex2_main(n, bin_size, recall, do_pickle, source, destination):
    """main function to run experiment 2
    
    - build model and ukf dictionary parameters based on n and bin_size
    - initiate Model and ukf_ss based on new dictionaries
    - run ABM with filtering on top
    - make plots using finished run data
    
    Parameters
    ------
    n, bin_size : float
        `n` population and aggregate grid square size `bin_size`
    
    recall, do_pickle : bool
        `recall` a previous run or  `do_pickle` pickle a new one?
        
    source, destination : str
        `source` where to load/save any pickles and the `destination` of 
        any plots
    """

    if not recall:

        model_params = configs.model_params
        model_params["random_seed"] = 15
        ukf_params = configs.ukf_params

        model_params, ukf_params, base_model = aggregate_params(
            n, bin_size, model_params, ukf_params)

        batch = False
        ukf_params["batch"] = batch
        if batch:
            print(
                "WARNING: Batch set to true and will not generate a random model each time."
            )
            try:
                seed = 50
                file_name = f"batch_test_{n}_{seed}.pkl"
                batch_truths, batch_start_model = batch_load(file_name)
                print("batch data found.")
            except:
                print("no model found. generating one with given seed")
                file_name = f"batch_test_{n}_{seed}.pkl"
                batch_save(model_params, n, seed)
                batch_truths, batch_start_model = batch_load(file_name)
                print("new model generated.")
                new_seed = int.from_bytes(
                    os.urandom(4),
                    byteorder='little') if seed == None else seed
                np.random.seed(new_seed)

            base_model = batch_start_model

        print(f"Population: {n}")
        print(f"Square grid size: {bin_size}")

        u = ukf_ss(model_params, ukf_params, base_model)
        u.main()
        if do_pickle:
            pickle_main(ukf_params["file_name"], pickle_source, do_pickle, u)

    else:
        f_name = ex2_pickle_name(n, bin_size)
        try:
            u = pickle_main("dict_" + f_name, source, do_pickle)
        except:
            print(f_name)
            print("dictionary not found. trying to load class")
            u = pickle_main(f_name, source, do_pickle)

    ex2_plots(u, destination, "agg_ukf_", True, False)
    return u
Beispiel #5
0
    def data_extractor(self):
        """pull multiple class runs into arrays for analysis
        
        This is function looks awful... because it is. 
        Heres what it does:
            
        - build grand dictionary L2
        - loop over first parameter e.g. population size
            - create sub dictionary for given i L2[i]
            - loop over second parameter e.g. proportion observed (prop)
                - create placeholder list sub_L2 to store data for given i and j.
                - load each ukf pickle with the given i and j.
                - for each pickle extract the data, calculate L2s, and put the 
                    grand median L2 into sub_L2.
                - put list sub_L2 as a bnpy array into 
                dictionary L2[i] with key j.
        
        This will output a dictionary where for every pair of keys i and j , we accquire
        an array of grand medians.
        
        Returns
        ------
        L2 : dict
             dictionary of `L2` distances between ground truth and ukf predictions 
             over 2 parameters. We have keys [i][j] corresponding to the ith 
             value of parameter 1 (e.g population) and jth value of parameter 2
             (e.g proportion observed). Each pair of keys will contain a list of 
             numpy arrays. Each array is a scalar grand median of an L2 distance matrix
             output by ukf_plots.L2s
             
        """
        "names of first and second parameters. e.g. agents and prop"
        keys = self.param_keys
        "placeholder dictionary for all parameters"
        L2 = {}
        "loop over first parameter. usually agents."
        for i in self.p1:
            print(i)
            "sub dictionary for parameter i"
            L2[i] = {}
            for j in self.p2:
                "file names for glob to find. note wildcard * is needed"
                f_name = self.source + f"*{keys[0]}_*{i}_{keys[1]}_*{j}-*"
                "find all files with given i and j"
                files = glob.glob(f_name)
                "placeholder list for grand medians of UKF runs with parameters i and j"
                sub_L2 = []
                for file in files:
                    "open pickle"
                    f = open(file, "rb")
                    u = pickle_main(
                        os.path.split(file)[1],
                        os.path.split(file)[0] + "/", True)
                    f.close()
                    "pull raw data"
                    truth, preds = self.depickle_data_parser(u)
                    "find L2 distances"
                    distances = L2_parser(truth[::u.sample_rate, :],
                                          preds[::u.sample_rate, :])
                    if self.restrict is not None:
                        distances = self.restrict(distances, u, self.kwargs)

                    "add grand median to sub_L2"
                    sub_L2.append(np.nanmedian(np.nanmedian(distances,
                                                            axis=0)))
                    "stack list of grand medians as an nx1 vector array"
                    "put array into grand dictionary with keys i and j"
                L2[i][j] = np.hstack(sub_L2)

        return L2