Exemple #1
0
    def __call__(self) -> None:
        # If only 1 pipeline stage is passed, then the list of stages to run is
        # parsed as a non-iterable integer, which can cause the generator to
        # fail to be created. So make it iterable in that case as well.
        if not isinstance(self.args.pipeline, Iterable):
            self.args.pipeline = [self.args.pipeline]

        if 5 not in self.args.pipeline:
            controller = ControllerGeneratorParser()(self.args)
            sgp = pm.module_load_tiered(
                project=self.args.project,
                path='generators.scenario_generator_parser')
            scenario = sgp.ScenarioGeneratorParser().to_scenario_name(
                self.args)

            self.logger.info("Controller=%s, Scenario=%s", controller,
                             scenario)
            cmdopts = rdg.from_cmdline(self.args)
            pipeline = Pipeline(self.args, controller, cmdopts)
        else:
            pipeline = Pipeline(self.args, None, {})

        try:
            pipeline.run()
        except KeyboardInterrupt:
            self.logger.info("Exiting on user cancel")
            sys.exit()
def factory(cli_arg: str, main_config: types.YAMLDict,
            cmdopts: types.Cmdopts) -> PopulationVariableDensity:
    """
    Factory to create :class:`PopulationVariableDensity` derived classes from
    the command line definition.
    """
    attr = vd.Parser()(cli_arg)
    sgp = pm.module_load_tiered(project=cmdopts['project'],
                                path='generators.scenario_generator_parser')
    kw = sgp.ScenarioGeneratorParser().to_dict(cmdopts['scenario'])
    extent = ArenaExtent(Vector3D(kw['arena_x'], kw['arena_y'], kw['arena_z']))

    densities = [
        x for x in np.linspace(
            attr['density_min'], attr['density_max'], num=attr['cardinality'])
    ]

    def __init__(self) -> None:
        PopulationVariableDensity.__init__(self, cli_arg, main_config,
                                           cmdopts['batch_input_root'],
                                           densities, extent)

    return type(
        cli_arg,  # type: ignore
        (PopulationVariableDensity, ),
        {"__init__": __init__})
Exemple #3
0
 def __init__(self, **kwargs) -> None:
     cmdopts = kwargs['cmdopts']
     self.logger = logging.getLogger(__name__)
     module = pm.module_load_tiered(project=cmdopts['project'],
                                    path='generators.scenario_generators')
     generator_name = module.gen_generator_name(spec.scenario_name)
     self.scenario_generator = getattr(module, generator_name)(
         controller=controller, spec=spec, **kwargs)
Exemple #4
0
    def _create_exp_run(self, run_exp_def: xml.XMLLuigi,
                        cmds_generator: bindings.IExpShellCmdsGenerator,
                        run_num: int, seeds: tp.List[int]) -> None:
        run_output_dir = "{0}_{1}_output".format(self.main_input_name, run_num)

        # If the project defined per-run configuration, apply
        # it. Otherwise, just apply the configuration in the SIERRA core.
        per_run = pm.module_load_tiered(project=self.cmdopts['project'],
                                        path='generators.exp_generators')

        run_output_root = os.path.join(self.exp_output_root, run_output_dir)
        stem_path = self._get_launch_file_stempath(run_num)

        per_run.ExpRunDefUniqueGenerator(run_num, run_output_root, stem_path,
                                         self.random_seeds[run_num],
                                         self.cmdopts).generate(run_exp_def)

        # Write out the experimental run launch file
        run_exp_def.write(stem_path)

        # Perform any necessary programmatic (i.e., stuff you can do in python
        # and don't need a shell for) per-run configuration.
        configurer = platform.ExpConfigurer(self.cmdopts)
        configurer.for_exp_run(self.exp_input_root, run_output_root)

        if configurer.cmdfile_paradigm() == 'per-exp':
            # Update GNU Parallel commands file with the command for the
            # configured experimental run.
            fpath = f"{self.commands_fpath}{config.kGNUParallel['cmdfile_ext']}"
            with open(fpath, 'a') as cmds_file:
                self._update_cmds_file(cmds_file, cmds_generator, 'per-exp',
                                       run_num,
                                       self._get_launch_file_stempath(run_num),
                                       'slave')
        elif configurer.cmdfile_paradigm() == 'per-run':
            # Write new GNU Parallel commands file with the commends for the
            # experimental run.
            ext = config.kGNUParallel['cmdfile_ext']
            master_fpath = f"{self.commands_fpath}_run{run_num}_master{ext}"
            slave_fpath = f"{self.commands_fpath}_run{run_num}_slave{ext}"

            self.logger.trace("Updating slave cmdfile %s", slave_fpath)
            with open(slave_fpath, 'w') as cmds_file:
                self._update_cmds_file(cmds_file, cmds_generator, 'per-run',
                                       run_num,
                                       self._get_launch_file_stempath(run_num),
                                       'slave')

            self.logger.trace("Updating master cmdfile %s", master_fpath)
            with open(master_fpath, 'w') as cmds_file:
                self._update_cmds_file(cmds_file, cmds_generator, 'per-run',
                                       run_num,
                                       self._get_launch_file_stempath(run_num),
                                       'master')
Exemple #5
0
    def __init__(self, criteria: bc.IConcreteBatchCriteria, exp_num: int,
                 cmdopts: types.Cmdopts) -> None:
        self.exp_num = exp_num
        self.exp_input_root = os.path.join(
            cmdopts['batch_input_root'],
            criteria.gen_exp_dirnames(cmdopts)[exp_num])
        self.exp_def_fpath = os.path.join(self.exp_input_root,
                                          sierra.core.config.kPickleLeaf)
        self.logger = logging.getLogger(__name__)
        self.criteria = criteria

        from_bivar_bc1 = False
        from_bivar_bc2 = False
        from_univar_bc = False

        if criteria.is_bivar():
            bivar = tp.cast(bc.BivarBatchCriteria, criteria)
            from_bivar_bc1 = hasattr(bivar.criteria1, 'exp_scenario_name')
            from_bivar_bc2 = hasattr(bivar.criteria2, 'exp_scenario_name')
        else:
            from_univar_bc = hasattr(criteria, 'exp_scenario_name')

        # Need to get per-experiment arena dimensions from batch criteria, as
        # they might be different for each experiment
        if from_univar_bc:
            self.arena_dim = criteria.arena_dims()[exp_num]
            self.scenario_name = criteria.exp_scenario_name(exp_num)
            self.logger.debug(
                "Read scenario dimensions '%s' from univariate batch criteria",
                self.arena_dim)
        elif from_bivar_bc1 or from_bivar_bc2:
            self.arena_dim = criteria.arena_dims()[exp_num]
            self.logger.debug(
                "Read scenario dimensions '%s' bivariate batch criteria",
                self.arena_dim)
            self.scenario_name = criteria.exp_scenario_name(exp_num)

        else:  # Default case: scenario dimensions read from cmdline
            sgp = pm.module_load_tiered(
                project=cmdopts['project'],
                path='generators.scenario_generator_parser')
            kw = sgp.ScenarioGeneratorParser().to_dict(cmdopts['scenario'])
            self.arena_dim = ArenaExtent(
                Vector3D(kw['arena_x'], kw['arena_y'], kw['arena_z']))
            self.logger.debug("Read scenario dimensions %s from cmdline spec",
                              self.arena_dim)

            self.scenario_name = cmdopts['scenario']
Exemple #6
0
    def __call__(self, main_config: types.YAMLDict,
                 controller_config: types.YAMLDict, LN_config: types.YAMLDict,
                 HM_config: types.YAMLDict,
                 criteria: bc.IConcreteBatchCriteria) -> None:
        """
        Parameters:
            main_config: Parsed dictionary of main YAML configuration

            controller_config: Parsed dictionary of controller YAML
                               configuration.

            LN_config: Parsed dictionary of intra-experiment linegraph
                       configuration.

            HM_config: Parsed dictionary of intra-experiment heatmap
                       configuration.

            criteria:  The :term:`Batch Criteria` used for the batch
                       experiment.
        """
        exp_to_gen = sierra.core.utils.exp_range_calc(
            self.cmdopts, self.cmdopts['batch_output_root'], criteria)

        for exp in exp_to_gen:
            exp = os.path.split(exp)[1]
            cmdopts = copy.deepcopy(self.cmdopts)
            cmdopts["exp_input_root"] = os.path.join(
                self.cmdopts['batch_input_root'], exp)
            cmdopts["exp_output_root"] = os.path.join(
                self.cmdopts['batch_output_root'], exp)
            cmdopts["exp_graph_root"] = os.path.join(
                self.cmdopts['batch_graph_root'], exp)
            cmdopts["exp_model_root"] = os.path.join(
                cmdopts['batch_model_root'], exp)
            cmdopts["exp_stat_root"] = os.path.join(cmdopts["batch_stat_root"],
                                                    exp)

            if os.path.isdir(cmdopts["exp_stat_root"]):
                generator = pm.module_load_tiered(
                    project=self.cmdopts['project'],
                    path='pipeline.stage4.intra_exp_graph_generator')
                generator.IntraExpGraphGenerator(main_config,
                                                 controller_config, LN_config,
                                                 HM_config, cmdopts)(criteria)
            else:
                self.logger.warning(
                    "Skipping experiment '%s': %s does not exist", exp,
                    cmdopts['exp_stat_root'])
    def _gather_worker(gatherq: mp.Queue, processq: mp.Queue,
                       main_config: dict, project: str,
                       storage_medium: str) -> None:
        while True:
            # Wait for 3 seconds after the queue is empty before bailing
            try:
                batch_output_root, exp = gatherq.get(True, 3)
                module = pm.module_load_tiered(
                    project=project, path='pipeline.stage3.run_collator')
                module.ExperimentalRunCSVGatherer(main_config,
                                                  batch_output_root, exp,
                                                  storage_medium, processq)()
                gatherq.task_done()

            except queue.Empty:
                break
Exemple #8
0
def factory(cli_arg: str, main_config: types.YAMLDict,
            cmdopts: types.Cmdopts) -> PopulationConstantDensity:
    """
    Factory to create :class:`PopulationConstantDensity` derived classes from
    the command line definition.

    """
    attr = cd.Parser()(cli_arg)
    sgp = pm.module_load_tiered(project=cmdopts['project'],
                                path='generators.scenario_generator_parser')
    kw = sgp.ScenarioGeneratorParser().to_dict(cmdopts['scenario'])

    is_2x1 = kw['arena_x'] == 2 * kw['arena_y']
    is_1x1 = kw['arena_x'] == kw['arena_y']

    if is_2x1:
        r = range(kw['arena_x'],
                  kw['arena_x'] + attr['cardinality'] * attr['arena_size_inc'],
                  attr['arena_size_inc'])
        dims = [
            sierra.core.utils.ArenaExtent(
                Vector3D(x, int(x / 2), kw['arena_z'])) for x in r
        ]
    elif is_1x1:
        r = range(kw['arena_x'],
                  kw['arena_x'] + attr['cardinality'] * attr['arena_size_inc'],
                  attr['arena_size_inc'])

        dims = [
            sierra.core.utils.ArenaExtent(Vector3D(x, x, kw['arena_z']))
            for x in r
        ]
    else:
        raise NotImplementedError(
            "Unsupported arena X,Y scaling '{0}': Must be [2x1,1x1]")

    def __init__(self) -> None:
        PopulationConstantDensity.__init__(self, cli_arg, main_config,
                                           cmdopts['batch_input_root'],
                                           attr["target_density"], dims,
                                           kw['scenario_tag'])

    return type(
        cli_arg,  # type: ignore
        (PopulationConstantDensity, ),
        {"__init__": __init__})
Exemple #9
0
    def _run_inter_graph_generation(
            self, criteria: bc.IConcreteBatchCriteria) -> None:
        """
        Generate inter-experiment graphs (duh).
        """
        targets = self._calc_inter_LN_targets()

        self.logger.info("Generating inter-experiment graphs...")
        start = time.time()

        generator = pm.module_load_tiered(
            project=self.cmdopts['project'],
            path='pipeline.stage4.inter_exp_graph_generator')
        generator.InterExpGraphGenerator(self.main_config, self.cmdopts,
                                         targets)(criteria)
        elapsed = int(time.time() - start)
        sec = datetime.timedelta(seconds=elapsed)

        self.logger.info("Inter-experiment graph generation complete: %s",
                         str(sec))
Exemple #10
0
    def __init__(self, main_config: types.YAMLDict,
                 cmdopts: types.Cmdopts) -> None:
        self.cmdopts = cmdopts

        self.main_config = main_config
        self.controller_config = yaml.load(
            open(
                os.path.join(self.cmdopts['project_config_root'],
                             sierra.core.config.kYAML['controllers'])),
            yaml.FullLoader)
        self.logger = logging.getLogger(__name__)

        # Load YAML config
        loader = pm.module_load_tiered(
            project=self.cmdopts['project'],
            path='pipeline.stage4.yaml_config_loader')
        config = loader.YAMLConfigLoader()(self.cmdopts)
        self.intra_LN_config = config['intra_LN']
        self.intra_HM_config = config['intra_HM']
        self.inter_LN_config = config['inter_LN']

        if self.cmdopts['models_enable']:
            self._load_models()
Exemple #11
0
def factory(cli_arg: str, main_config: types.YAMLDict,
            cmdopts: types.Cmdopts) -> PopulationSize:
    """
    Factory to create :class:`PopulationSize` derived classes from the command
    line definition.

    """
    parser = population_size.Parser()
    attr = parser(cli_arg)
    max_sizes = parser.to_sizes(attr)

    if cmdopts['robot_positions']:
        positions = [
            Vector3D.from_str(s, astype=float)
            for s in cmdopts['robot_positions']
        ]
    else:
        # Get the dimensions of the effective arena from the scenario so we can
        # place robots randomly within it.
        sgp = pm.module_load_tiered(
            project=cmdopts['project'],
            path='generators.scenario_generator_parser')
        kw = sgp.ScenarioGeneratorParser().to_dict(cmdopts['scenario'])
        xs = random.choices(range(0, kw['arena_x']), k=max_sizes[-1])
        ys = random.choices(range(0, kw['arena_y']), k=max_sizes[-1])
        zs = random.choices(range(0, kw['arena_z']), k=max_sizes[-1])
        positions = [Vector3D(x, y, z) for x, y, z in zip(xs, ys, zs)]

    def __init__(self) -> None:
        PopulationSize.__init__(self, cli_arg, main_config,
                                cmdopts['batch_input_root'], cmdopts['robot'],
                                max_sizes, positions)

    return type(
        cli_arg,  # type: ignore
        (PopulationSize, ),
        {"__init__": __init__})
Exemple #12
0
    def __init__(self, args: argparse.Namespace, controller: str,
                 rdg_opts: types.Cmdopts) -> None:
        self.args = args
        self.logger = logging.getLogger(__name__)

        assert all(stage in [1, 2, 3, 4, 5] for stage in args.pipeline), \
            f"Invalid pipeline stage in {args.pipeline}: Only 1-5 valid"

        self.cmdopts = {
            # general
            'sierra_root': self.args.sierra_root,
            'scenario': self.args.scenario,
            'template_input_file': self.args.template_input_file,
            'project': self.args.project,
            'exec_env': args.exec_env,
            'platform_vc': self.args.platform_vc,
            "n_runs": args.n_runs,
            'project_imagizing': self.args.project_imagizing,
            'exp_overwrite': self.args.exp_overwrite,
            'exp_range': self.args.exp_range,
            'dist_stats': self.args.dist_stats,
            'skip_collate': self.args.skip_collate,
            'platform': self.args.platform,

            # stage 1
            'no_preserve_seeds': self.args.no_preserve_seeds,

            # stage 2
            'nodefile': self.args.nodefile,

            # stage 3
            'skip_verify_results': self.args.skip_verify_results,
            'render_cmd_opts': self.args.render_cmd_opts,
            'processing_mem_limit': self.args.processing_mem_limit,
            'serial_processing': self.args.serial_processing,
            'storage_medium': self.args.storage_medium,

            # stage 4
            'exp_graphs': self.args.exp_graphs,
            'project_no_yaml_LN': self.args.project_no_yaml_LN,
            'project_no_yaml_HM': self.args.project_no_yaml_HM,
            'project_rendering': self.args.project_rendering,
            'plot_log_xscale': self.args.plot_log_xscale,
            'plot_enumerated_xscale': self.args.plot_enumerated_xscale,
            'plot_log_yscale': self.args.plot_log_yscale,
            'plot_regression_lines': self.args.plot_regression_lines,
            'plot_primary_axis': self.args.plot_primary_axis,
            'plot_large_text': self.args.plot_large_text,
            'models_enable': self.args.models_enable,

            # stage 5
            'controllers_list': self.args.controllers_list,
            'controllers_legend': self.args.controllers_legend,
            'scenarios_list': self.args.scenarios_list,
            'scenarios_legend': self.args.scenarios_legend,
            'scenario_comparison': self.args.scenario_comparison,
            'controller_comparison': self.args.controller_comparison,
            'comparison_type': self.args.comparison_type,
            'transpose_graphs': self.args.transpose_graphs,
        }

        # Load additional cmdline options from platform
        self.logger.debug("Updating cmdopts with extensions from '%s'",
                          self.cmdopts['platform'])
        module = pm.module_load_tiered("cmdline",
                                       platform=self.cmdopts['platform'])
        module.PlatformCmdline.cmdopts_update(self.args, self.cmdopts)

        if rdg_opts is not None:
            self.cmdopts.update(rdg_opts)

            # Load additional cmdline options from project. This is mandatory,
            # because all projects have to define --controller and --scenario
            # at a minimum.
            self.logger.debug("Updating cmdopts with extensions from '%s'",
                              self.cmdopts['project'])
            path = "{0}.cmdline".format(self.cmdopts['project'])
            module = pm.module_load(path)

            module.Cmdline.cmdopts_update(self.args, self.cmdopts)

        self.cmdopts['plugin_root'] = os.path.join('sierra', 'plugins')

        project = pm.pipeline.get_plugin(self.cmdopts['project'])
        path = os.path.join(project['parent_dir'], self.cmdopts['project'])
        self.cmdopts['project_root'] = path
        self.cmdopts['project_config_root'] = os.path.join(path, 'config')
        self.cmdopts['project_model_root'] = os.path.join(path, 'models')

        self._load_config()

        if 5 not in self.args.pipeline:
            self.batch_criteria = bc.factory(self.main_config, self.cmdopts,
                                             self.args)

        self.controller = controller
    def _gen_csv(self, cmdopts: types.Cmdopts, batch_leaf: str, src_stem: str,
                 dest_stem: str) -> None:
        """
        Helper function for generating a set of .csv files for use in
        inter-scenario graph generation.

        Generates:

        - ``.csv`` file containing results for each scenario the controller is
          being compared across, 1 per line.

        - ``.stddev`` file containing stddev for the generated ``.csv`` file, 1
          per line.

        - ``.model`` file containing model predictions for controller behavior
          during each scenario, 1 per line (not generated if models were not run
          the performance measures we are generating graphs for).

        - ``.legend`` file containing legend values for models to plot (not
          generated if models were not run for the performance measures we are
          generating graphs for).

        """

        csv_ipath = os.path.join(cmdopts['batch_output_root'],
                                 cmdopts['batch_stat_collate_root'],
                                 src_stem + ".csv")
        stddev_ipath = os.path.join(cmdopts['batch_output_root'],
                                    cmdopts['batch_stat_collate_root'],
                                    src_stem + ".stddev")

        model_ipath_stem = os.path.join(cmdopts['batch_model_root'], src_stem)
        model_opath_stem = os.path.join(self.sc_model_root,
                                        dest_stem + "-" + self.controller)

        opath_stem = os.path.join(self.sc_csv_root,
                                  dest_stem + "-" + self.controller)

        # Some experiments might not generate the necessary performance measure
        # .csvs for graph generation, which is OK.
        if not utils.path_exists(csv_ipath):
            self.logger.warning("%s missing for controller %s", csv_ipath,
                                self.controller)
            return

        # Collect performance measure results. Append to existing dataframe if
        # it exists, otherwise start a new one.
        data_df = self._accum_df(csv_ipath, opath_stem + '.csv', src_stem)
        storage.DataFrameWriter('storage.csv')(data_df,
                                               opath_stem + '.csv',
                                               index=False)

        # Collect performance results stddev. Append to existing dataframe if it
        # exists, otherwise start a new one.
        stddev_df = self._accum_df(stddev_ipath, opath_stem + '.stddev',
                                   src_stem)
        if stddev_df is not None:
            storage.DataFrameWriter('storage.csv')(stddev_df,
                                                   opath_stem + '.stddev',
                                                   index=False)

        # Collect performance results models and legends. Append to existing
        # dataframes if they exist, otherwise start new ones.
        model_df = self._accum_df(model_ipath_stem + '.model',
                                  model_opath_stem + '.model', src_stem)
        if model_df is not None:
            storage.DataFrameWriter('storage.csv')(model_df,
                                                   model_opath_stem + '.model',
                                                   index=False)
            with open(model_opath_stem + '.legend', 'a') as f:
                _, scenario, _ = rdg.parse_batch_leaf(batch_leaf)
                sgp = pm.module_load_tiered(
                    project=cmdopts['project'],
                    path='generators.scenario_generator_parser')
                kw = sgp.ScenarioGeneratorParser().to_dict(scenario)
                f.write("{0} Prediction\n".format(kw['scenario_tag']))