Example #1
0
 def __call__(self):
     ARGPARSER.epilog = self.steps.get_steps_text()
     self.args = ARGPARSER.parse_args()
     if not self.args.steps and not self.args.run_all_steps:
         ARGPARSER.print_help()
         sys.exit(0)
     # If no steps were given on the commandline, run all exp steps.
     steps = [self.steps.get_step(name)
              for name in self.args.steps] or self.steps
     if self.args.run_all_steps:
         self.environment.run_steps(steps)
     else:
         Sequence.run_steps(steps)
Example #2
0
    def __call__(self):
        ARGPARSER.epilog = self.steps.get_steps_text()
        self.args = ARGPARSER.parse_args()
        if not self.args.steps and not self.args.run_all_steps:
            ARGPARSER.print_help()
            sys.exit(0)
        # If no steps were given on the commandline, run all exp steps.
        steps = [self.steps.get_step(name) for name in self.args.steps] or self.steps


        if self.args.run_all_steps:
            self.environment.run_steps(steps)
        else:
            Sequence.run_steps(steps)
Example #3
0
    def __init__(self, path, environment=None, cache_dir=None):
        """
        Create a new experiment that will be built at *path* using the methods
        provided by :ref:`Environment <environments>` *environment*. If
        *environment* is None, ``LocalEnvironment`` is used (default).

        Lab will use the *cache_dir* for storing temporary files.
        In case you run :py:class:`Fast Downward experiments
        <downward.experiments.DownwardExperiment>` this directory can become
        very large (tens of GB) since it is used to cache revisions and
        preprocessed tasks. By default *cache_dir* points to ``~/lab``.

        An experiment consists of multiple steps. Every experiment will need at
        least the following steps:

        * Build the experiment.
        * Run it.
        * Fetch the results.
        * Make a report.

        In the "Run it" step all runs that have been added to the experiment
        will be executed. Each run consists of one or multiple commands.
        """
        _Buildable.__init__(self)
        self.path = os.path.abspath(path)
        if any(char in self.path for char in (':', ',')):
            logging.critical('Path contains commas or colons: %s' % self.path)
        self.environment = environment or LocalEnvironment()
        self.environment.exp = self
        self.cache_dir = cache_dir or tools.DEFAULT_USER_DIR
        tools.makedirs(self.cache_dir)
        self.shard_size = SHARD_SIZE

        self.runs = []

        self.set_property('experiment_file', self._script)

        self.steps = Sequence()
        self.add_step(Step('build', self.build))
        self.add_step(Step('start', self.run))
        self.add_fetcher(name='fetch')
Example #4
0
    def __init__(self, path, repo, environment=None, combinations=None,
                 compact=True, limits=None, cache_dir=None):
        """
        The experiment will be built at *path*.

        *repo* must be the path to a Fast Downward repository. Among other things
        this repository is used to search for benchmark files.

        *environment* must be an :ref:`Environment <environments>` instance.
        By default the experiment is run locally.

        If given, *combinations* must be a list of :ref:`Checkout <checkouts>`
        tuples of the form (Translator, Preprocessor, Planner). If combinations
        is None (default), perform an experiment with the working copy in *repo*.

        The *compact* parameter is only relevant for the search
        stage. If *compact* is ``False``, the preprocessed task and
        the two PDDL files are **copied** into the respective run
        directories for all configurations. This requires a lot of
        space (tens of GB), so it is strongly recommended to use the
        default (``compact=True``) which only references these
        files. Use ``compact=False`` only if you really need a
        portable experiment.

        If *limits* is given, it must be a dictionary that maps a
        subset of the keys below to seconds and MiB. It will be used
        to overwrite the default limits::

            default_limits = {
                'translate_time': 7200,
                'translate_memory': 8192,
                'preprocess_time': 7200,
                'preprocess_memory': 8192,
                'search_time': 1800,
                'search_memory': 2048,
            }

        *cache_dir* is used to cache Fast Downward clones and preprocessed
        tasks. By default it points to ``~/lab``.

        .. note::

            The directory *cache_dir* can grow very large (tens of GB).

        Example: ::

            repo = '/path/to/downward-repo'
            env = GkiGridEnvironment(queue='xeon_core.q', priority=-2)
            combos = [(Translator(repo, rev=123),
                       Preprocessor(repo, rev='e2a018c865f7'),
                       Planner(repo, rev='tip')]
            exp = DownwardExperiment('/tmp/path', repo, environment=env,
                                     combinations=combos,
                                     limits={'search_time': 30,
                                             'search_memory': 1024})

        """
        Experiment.__init__(self, path, environment=environment, cache_dir=cache_dir)

        #if not repo or not os.path.isdir(repo):
        #    logging.critical('The path "%s" is not a local Fast Downward '
        #                     'repository.' % repo)
        self.repo = repo
        self.orig_path = self.path
        self.search_exp_path = self.path
        self.preprocess_exp_path = self.path + '-p'
        self._path_to_python = None
        Checkout.REV_CACHE_DIR = os.path.join(self.cache_dir, 'revision-cache')
        self.preprocessed_tasks_dir = os.path.join(self.cache_dir, 'preprocessed-tasks')
        tools.makedirs(self.preprocessed_tasks_dir)

        self.combinations = (combinations or
                             [(Translator(repo), Preprocessor(repo), Planner(repo))])

        self.compact = compact
        self.suites = defaultdict(list)
        self._algorithms = []
        self._portfolios = []

        limits = limits or {}
        for key, value in limits.items():
            if key not in LIMITS:
                logging.critical('Unknown limit: %s' % key)
        self.limits = LIMITS
        self.limits.update(limits)

        # Save if this is a compact experiment i.e. preprocessed tasks are referenced.
        self.set_property('compact', compact)

        # TODO: Integrate this into the API.
        self.include_preprocess_results_in_search_runs = True

        self.compilation_options = ['-j%d' % self._jobs]

        self._search_parsers = []
        self.add_search_parser(os.path.join(DOWNWARD_SCRIPTS_DIR, 'search_parser.py'))

        # Remove the default experiment steps
        self.steps = Sequence()

        self.add_step(Step('build-preprocess-exp', self.build, stage='preprocess'))
        self.add_step(Step('run-preprocess-exp', self.run, stage='preprocess'))
        self.add_fetcher(src=self.preprocess_exp_path,
                         dest=self.preprocessed_tasks_dir,
                         name='fetch-preprocess-results',
                         copy_all=True,
                         write_combined_props=False)
        self.add_step(Step('build-search-exp', self.build, stage='search'))
        self.add_PAC_fetcher(src='/home/sternron/gal-dreiman/downward/lab/examples/PAC_Preprocess_Output-eval/preprocess',#TODO change to be parameter
                         dest=self.search_exp_path,
                         name='fetch-preprocess-results',
                         copy_all=True,
                         write_combined_props=False)#new featcher to copy preprocess for PAC results
        self.add_step(Step('run-search-exp', self.run, stage='search'))
        self.add_fetcher(src=self.search_exp_path, name='fetch-search-results')
Example #5
0
 def run_steps(self, steps):
     Sequence.run_steps(steps)
 def run_steps(self, steps):
     Sequence.run_steps(steps)