Ejemplo n.º 1
0
 def parse_bootstrap_sample_size(self, n_boot: int) -> int:
     """The size of the bootstrap sample."""
     if n_boot < 2:
         # TODO: would be nice to have the option to perform analysis without bootstrapping
         raise ConfigError("bootstrap sample size must be greater than 1")
     log.warning(f"Using user specified bootstrap sample size: {n_boot}")
     return n_boot
Ejemplo n.º 2
0
 def produce_layer_action(self, layer: str):
     """Given a string, returns the flow model action indexed by that string."""
     try:
         return LAYER_OPTIONS[layer]
     except KeyError:
         raise ConfigError(f"Invalid model {layer}", layer,
                           LAYER_OPTIONS.keys())
Ejemplo n.º 3
0
 def parse_thermalisation(self, therm: (int, type(None))):
     if therm is None:
         log.warning("Not Performing thermalisation")
         return therm
     if therm < 1:
         raise ConfigError(
             "thermalisation must be greater than or equal to 1 or be None")
     return therm
Ejemplo n.º 4
0
 def parse_sample_interval(self, interval: (int, type(None))):
     if interval is None:
         return interval
     if interval < 1:
         raise ConfigError(
             "sample_interval must be greater than or equal to 1")
     log.warning(f"Using user specified sample_interval: {interval}")
     return interval
Ejemplo n.º 5
0
 def produce_target_dist(self, geometry, parameterisation: str,
                         couplings: dict):
     """Uses arguments to instantiate :py:class:`anvil.distributions.PhiFourScalar`"""
     try:
         constructor = getattr(PhiFourScalar, f"from_{parameterisation}")
     except AttributeError as e:
         raise ConfigError(f"Invalid parametrisation: {parameterisation}",
                           parameterisation)
     return constructor(geometry, **couplings)
Ejemplo n.º 6
0
    def produce_size_half(self, lattice_size: int) -> int:
        """Half of the number of nodes on the lattice.

        This defines the size of the input layer to the neural networks.
        """
        # NOTE: we may want to make this more flexible
        if (lattice_size % 2) != 0:
            raise ConfigError("Lattice size is expected to be an even number")
        return int(lattice_size / 2)
Ejemplo n.º 7
0
    def parse_cosh_fit_min_separation(self, n: int, training_geometry):
        """The smallest lattice separation to include in when fitting a cosh function
        to the correlator, so as to the extract the correlation length.

        See also: ``produce_cosh_fit_window``.
        """
        if n > training_geometry.length // 2 - 2:
            raise ConfigError(
                "Not enough points to for a three-parameter fit.")
        return n
Ejemplo n.º 8
0
 def produce_checkpoint(self, cp_id=None, training_output=None):
     if cp_id is None:
         return None
     if cp_id == -1:
         return training_output.final_checkpoint()
     if cp_id not in training_output.cp_ids:
         raise ConfigError(
             f"Checkpoint {cp_id} not found in {training_output.path}")
     # get index from training_output class
     return training_output.checkpoints[training_output.cp_ids.index(cp_id)]
Ejemplo n.º 9
0
 def parse_thermalization(
     self, therm: (int, type(None))) -> (int, type(None)):
     """A number of Markov chain steps to be discarded before beginning to select
     configurations for the output sample."""
     if therm is None:
         log.warning("Not Performing thermalization")
         return None
     if therm < 1:
         raise ConfigError(
             "Thermalization must be greater than or equal to 1 or be None")
     return therm
Ejemplo n.º 10
0
 def produce_bootstrap_seed(self,
                            manual_bootstrap_seed: (int,
                                                    type(None)) = None):
     """Optional seed for the random number generator which generates the bootstrap
     sample, for the purpose of reproducibility."""
     if manual_bootstrap_seed is None:
         return randint(0, maxsize)
     # numpy is actually this strict but let's keep it sensible.
     if (manual_bootstrap_seed < 0) or (manual_bootstrap_seed > 2**32):
         raise ConfigError(
             "Seed is outside of appropriate range: [0, 2 ** 32]")
     return manual_bootstrap_seed
Ejemplo n.º 11
0
    def parse_window(self, window: float) -> float:
        """A numerical factor featuring in the calculation of the optimal 'window'
        size, which is then used to measure the integrated autocorrelation time of
        observables.

        Suggested values are between 1 and 2. However, this should be judged by
        checking that the integrated autocorrelation has approximately plateaued
        at the optimal window size.

        See :py:func:`anvil.observables.automatic_windowing_function`.
        """
        if window < 0:
            raise ConfigError("window must be positive")
        log.warning(f"Using user specified window 'S' parameter: {window}")
        return window
Ejemplo n.º 12
0
    def parse_sample_interval(
        self, interval: (int, type(None))) -> (int, type(None)):
        """A number of Markov chain steps to discard between configurations that are
        selected for the output sample.

        Can be specified by the user in the runcard, or left to an automatic
        calculation based on the acceptance rate of the Metropolis-Hastings algorith.
        """
        if interval is None:
            log.info(
                "No sample_interval provided - will be calculated 'on the fly'."
            )
            return None
        if interval < 1:
            raise ConfigError(
                "sample_interval must be greater than or equal to 1")
        log.info(f"Using user specified sample_interval: {interval}")
        return interval
Ejemplo n.º 13
0
    def produce_checkpoint(
            self,
            cp_id: (int, type(None)),
            training_output,
    ):
        """Attempts to return a checkpoint object extracted from a training output.

        - If ``cp_id == None``, no checkpoint is returned.
        - If ``cp_id == -1``, the checkpoint with the highest ``cp_id`` is returned.
        - Otherwise, attempts to load checkpoint with id ``cp_id``.
        """
        if cp_id is None:
            return None
        if cp_id == -1:
            return training_output.final_checkpoint()
        if cp_id not in training_output.cp_ids:
            raise ConfigError(
                f"Checkpoint {cp_id} not found in {training_output.path}")
        # get index from training_output class
        return training_output.checkpoints[training_output.cp_ids.index(cp_id)]
Ejemplo n.º 14
0
 def parse_lattice_dimension(self, dim: int) -> int:
     """The number of spatial dimensions."""
     if dim != 2:
         raise ConfigError("Currently only 2 dimensions is supported")
     return dim
Ejemplo n.º 15
0
 def parse_bootstrap_n_samples(self, n_samples: int):
     if n_samples < 2:
         raise ConfigError("bootstrap_n_samples must be greater than 1")
     log.warning(f"Using user specified bootstrap_n_samples: {n_samples}")
     return n_samples
Ejemplo n.º 16
0
 def parse_lattice_dimension(self, dim: int):
     """Parse lattice dimension from runcard"""
     if dim != 2:
         raise ConfigError("Currently only 2 dimensions is supported")
     return dim
Ejemplo n.º 17
0
 def parse_window(self, window: float):
     if window < 0:
         raise ConfigError("window must be positive")
     log.warning(f"Using user specified window 'S': {window}")
     return window