Ejemplo n.º 1
0
 def environment_sampler(
     constraint_input="constraint",
     solution_input="solution",
     satisfaction_input="satisfaction",
     sampler_transform=identity,
 ):
     """
     Object? -> Object? -> Object? -> (Sampler a -> Sampler a)?
         -> FeedDictSampler ([Float], [Float], [Float])
     Return a sampler that generates random constraint/solution pairs and
     matches them with the satisfaction of the constraint.  The raw sampler is
     mapped through a user-provided transform, optionally producing a mapped
     sampler, before being extracted into a FeedDictSampler.
     """
     io = IO("data/datasets/10x7/")
     dataset = map(
         lambda path: io.restore_object(path),
         io.all_files(remove_extensions=True, include_sub_folders=False),
     )
     dataset_sampler = DataSetSampler(
         list(map(lambda b: (b.constraint, b.solution, True), dataset)))
     return FeedDictSampler(
         sampler_transform(dataset_sampler),
         {
             constraint_input: lambda t: t[0],
             solution_input: lambda t: t[1],
             satisfaction_input: lambda t: t[2],
         },
     )
Ejemplo n.º 2
0
 def save(self, directory, file_name):
     """
     String -> String -> ()
     Save the points that make up this data set to a file.  They can be
     restored using DataSetSampler.restore(...).  Do not include a file
     extension in the file name.
     """
     io = IO(directory, create_if_missing=True)
     io.save_object(self.points, file_name)
Ejemplo n.º 3
0
 def restore(self):
     """
     () -> ()
     Restore the values of the network's variables from its save location.
     """
     self._check_save_location()
     io = IO(self.save_location, create_if_missing=True)
     io.restore_session(self.session,
                        'parameters',
                        variables=self.get_variables())
Ejemplo n.º 4
0
 def restore(directory, file_name):
     """
     String -> String -> DataSetSampler
     Restore a data set sampler that was previously saved into the given
     directory and file name.  Do not include a file extension in the file
     name.
     """
     io = IO(directory, create_if_missing=False)
     data = io.restore_object(file_name)
     return DataSetSampler(data)
Ejemplo n.º 5
0
 def __init__(self, log_folder, create_folder_if_missing=False):
     """
     String? -> Experiment
     Initialise an instance of Experiment by providing a path to a directory
     in which the experiment files will be stored.
     """
     if log_folder is not None:
         self.log_folder = log_folder
         self.io = IO(log_folder, create_folder_if_missing)
     else:
         print("WARNING: no log folder provided to experiment")
Ejemplo n.º 6
0
        def __init__(self, data_folder=None, training_load_location=None,
                training_set_size=-1, validation_load_location=None,
                validation_set_size=-1):
            """
            String? -> Int? -> String? -> Int? -> DataBuilder
            To load either set as a sampler from a pickle file, provide the path
            to that file in the load location parameter, relative to a base data
            folder.  If no load location is given for a set, a set will be
            created.  A set size of -1 indicates that examples will be generated
            procedurally.
            """
            self.data_folder = data_folder
            self.training_load_location = training_load_location
            self.training_set_size = training_set_size
            self.validation_load_location = validation_load_location
            self.validation_set_size = validation_set_size

            self.root_io = IO(self.data_folder) if self.data_folder is not None else None

            self.training_set_sampler = None
            self.validation_set_sampler = None

            self.loaded_training_set = None
            self.loaded_validation_set = None
            self.procedural_training_set = None
            self.procedural_validation_set = None
Ejemplo n.º 7
0
def run():
    """
    () -> ()
    For each type of builder available to a LearnedObjectiveFunction, run
    experiments on a number of LOFs while varying only that builder.
    """
    n_experiments = experiments_to_run()
    check = input(
        "This experiment will run {} configurations, saved in loftuning/{}/.  "
        .format(n_experiments, EXPERIMENT_ID) + "Are you sure? (y/N) ")
    if check != "y":
        return None

    architectures = get_sample_architectures(N_SAMPLE_ARCHITECTURES)
    architecture_index = 0
    for architecture in architectures:
        print("--- RUNNING ARCHITECTURE {}/{}".format(architecture_index + 1,
                                                      N_SAMPLE_ARCHITECTURES))

        lof = build_objective_function(architecture)
        io = IO(
            "data/experiments/loftuning/{}/architectures/".format(
                EXPERIMENT_ID),
            create_if_missing=True,
        )
        io.save_json(lof.data_dictionary(),
                     "architecture-{}".format(architecture_index))

        relative_builder_index = 0
        for builder_index in range(len(builders)):
            tested = vary(
                builder_index,
                architecture,
                REPEATS,
                "builder-{}/architecture-{}".format(builder_index,
                                                    architecture_index),
                architecture_index,
                relative_builder_index,
            )
            if tested:
                relative_builder_index += 1
        architecture_index += 1
Ejemplo n.º 8
0
    def build_bridge_dataset(name, maximum_size, maximum_overstress):
        """
        String -> Int -> Float -> ()
        Build a dataset of valid bridge designs, saving each bridge design with
        the constraint that caused it to be created.  Only bridge designs whose
        maximum overstress is below the states threshold will be saved.
        """
        minimum_width = ceil(0.2 * Bridge.WIDTH)
        maximum_width = ceil(0.4 * Bridge.WIDTH)
        maximum_offset = (Bridge.WIDTH - maximum_width) // 2
        maximum_exclusion_height = floor(0.45 * Bridge.HEIGHT)
        maximum_inclusion_height = floor(0.3 * Bridge.HEIGHT)

        def make_constraint():
            return BridgeFactory.generate_pillared_constraint(
                left_offset=randint(1, maximum_offset),
                width=randint(minimum_width, maximum_width),
                exclusion_height=randint(1, maximum_exclusion_height),
                gap_height=randint(1, 2),
                inclusion_height=randint(1, maximum_inclusion_height),
                inclusion_threshold=uniform(0.1, 0.45),
            )

        io = IO("data/datasets/{}/".format(name), True)
        i = 0
        while i < maximum_size:
            constraint = make_constraint()
            overstress, success, solution = BridgeFactory.find_viable_design(
                constraint)
            if overstress <= maximum_overstress and success:
                print("Found viable solution for design {}.".format(i))
                io.save_object(
                    BridgeConstraintSolutionPair(constraint, solution), str(i))
                i += 1
            else:
                print(
                    "Failed to find viable solution for design {}.".format(i))
Ejemplo n.º 9
0
class Experiment:
    """
    Defines a class which can run an experiment, and exposes methods for
    easily logging experiment data when run.
    """
    def __init__(self, log_folder, create_folder_if_missing=False):
        """
        String? -> Experiment
        Initialise an instance of Experiment by providing a path to a directory
        in which the experiment files will be stored.
        """
        if log_folder is not None:
            self.log_folder = log_folder
            self.io = IO(log_folder, create_folder_if_missing)
        else:
            print("WARNING: no log folder provided to experiment")

    def log_experiment(self, file_name):
        """
        String -> ()
        Perform the experiment and log its details in the log directory.  If
        the name of the file already exists, an identifier will be added to
        the end.
        """
        data = self._get_experiment_data()
        self.io.save_json(data, self._get_next_file_name(file_name))

    def log_experiment_with_metadata(self, file_name, metadata):
        """
        String -> Dict -> ()
        Perform the experiment and log its details, along with some metadata,
        om the log directory.  If the name of the file already exists, an
        identifier will be added to the end.
        """
        data = self._get_experiment_data()
        self._add_metadata(data, metadata)
        self.io.save_json(data, self._get_next_file_name(file_name))

    def log_experiments(self, file_name, repeats, reset=lambda: None):
        """
        String -> Int -> (() -> ())? -> ()
        Perform the experiment a number of times and log the details of each
        run in the log directory.  If the name of the file already exists,
        an identifier will be added to the end.  The optional reset function
        will be called before each run.
        """
        data = []
        for _ in range(repeats):
            reset()
            data.append(self._get_experiment_data())
        results = {"repeats": repeats, "data": data}
        self.io.save_json(results, self._get_next_file_name(file_name))

    def log_experiments_with_metadata(self,
                                      file_name,
                                      repeats,
                                      metadata_lambda,
                                      reset=lambda: None):
        """
        String -> Int -> (Int -> Dict) -> (() -> ())? -> ()
        Perform the experiment a number of times and log the details of each
        run in the log directory.  If the name of the file already exists,
        an identifier will be added to the end.  The optional reset function
        will be called before each run.  Metadata for each run is given
        determined as a function of its index.
        """
        data = []
        for repeat_index in range(repeats):
            reset()
            datum = self._get_experiment_data()
            self._add_metadata(datum, metadata_lambda(repeat_index))
            data.append(datum)
        results = {"repeats": repeats, "data": data}
        self.io.save_json(results, self._get_next_file_name(file_name))

    def _add_metadata(self, data, metadata):
        """
        Dict -> Dict -> ()
        Add a metadata field to the given JSON object, represented as
        a Python dictionary.  If a 'metadata' key already exists, an
        exception will be thrown.
        """
        if "metadata" in data:
            raise Exception("the experiment already contains a metadata field")
        data["metadata"] = metadata

    def _get_next_file_name(self, file_name):
        """
        String -> String
        Append a number to the end of the file path such that the resulting
        file name does not exist in the log directory.
        """
        self.io._create_dirs_for_path(file_name)
        jsons = set(self._get_jsons_in_directory(file_name))
        path_without_identifier = file_name.split("/")[-1] + "-"
        i = 0
        while path_without_identifier + str(i) in jsons:
            i += 1
        return file_name + "-" + str(i)

    def _get_experiment_data(self):
        """
        () -> Dict
        Perform the experiment and save the results in an enclosing JSON file.
        """
        data = {}
        data["start_time_unix"] = time()

        try:
            results = self.run_experiment()
            results["success"] = True
            data["results"] = results
        except Exception as e:
            _, _, traceback = exc_info()
            data["results"] = {
                "success": False,
                "error": str(e),
                "stack_trace": format_tb(traceback)[0],
            }

        data["end_time_unix"] = time()
        data["duration_seconds"] = data["end_time_unix"] - data[
            "start_time_unix"]
        return data

    def run_experiment(self):
        """
        () -> Dict
        """
        raise NotImplementedError()

    def _get_jsons_in_directory(self, path):
        """
        String -> [String]
        Return a list of the JSON files in the given directory, without their
        file extension.
        """
        leaf_directory = "/".join(
            path.split("/")[:-1]) + "/" if "/" in path else ""
        full_path = self.log_folder + leaf_directory
        files = [f for f in listdir(full_path) if isfile(join(full_path, f))]
        jsons = [f.replace(".json", "") for f in files if ".json" in f]
        return jsons