Esempio n. 1
0
    def run_steps(self, steps):
        timestamp = datetime.datetime.now().isoformat()
        job_dir = os.path.join(self.exp.cache_dir,
                               'grid-steps',
                               timestamp + '-' + self.exp.name)
        tools.overwrite_dir(job_dir)

        # Build the job files before submitting the other jobs.
        logging.info('Building job scripts')
        for step in steps:
            if step._funcname == 'build':
                script_step = step.copy()
                script_step.kwargs['only_main_script'] = True
                script_step()

        prev_job_name = None
        for number, step in enumerate(steps, start=1):
            job_name = self._get_job_name(step)
            # We cannot submit a job from within the grid, so we submit it
            # directly.
            if step._funcname == 'run':
                self.__wait_for_job_name = prev_job_name
                self._job_name = job_name
                step()
            else:
                step.is_last_step = (number == len(steps))
                with open(os.path.join(job_dir, job_name), 'w') as f:
                    f.write(self._get_job(step))
                submit = ['qsub']
                if prev_job_name:
                    submit.extend(['-hold_jid', prev_job_name])
                submit.append(job_name)
                tools.run_command(submit, cwd=job_dir)
            prev_job_name = job_name
 def _submit_job(self, job_name, job_file, job_dir, dependency=None):
     submit = ['qsub']
     if dependency:
         submit.extend(['-hold_jid', dependency])
     submit.append(job_file)
     tools.run_command(submit, cwd=job_dir)
     return job_name
    def run_steps(self, steps):
        timestamp = datetime.datetime.now().isoformat()
        job_dir = os.path.join(self.exp.cache_dir, 'grid-steps',
                               timestamp + '-' + self.exp.name)
        tools.overwrite_dir(job_dir)

        # Build the job files before submitting the other jobs.
        logging.info('Building job scripts')
        for step in steps:
            if step._funcname == 'build':
                script_step = step.copy()
                script_step.kwargs['only_main_script'] = True
                script_step()

        prev_job_name = None
        for number, step in enumerate(steps, start=1):
            job_name = self._get_job_name(step)
            # We cannot submit a job from within the grid, so we submit it
            # directly.
            if step._funcname == 'run':
                self.__wait_for_job_name = prev_job_name
                self._job_name = job_name
                step()
            else:
                step.is_last_step = (number == len(steps))
                with open(os.path.join(job_dir, job_name), 'w') as f:
                    f.write(self._get_job(step))
                submit = ['qsub']
                if prev_job_name:
                    submit.extend(['-hold_jid', prev_job_name])
                submit.append(job_name)
                tools.run_command(submit, cwd=job_dir)
            prev_job_name = job_name
Esempio n. 4
0
    def _prepare_validator(self):
        validate = os.path.join(self.repo, 'src', 'VAL', 'validate')
        if not os.path.exists(validate):
            logging.info('Building the validator in the experiment repository.')
            tools.run_command(['make', '-j%d' % self._jobs],
                              cwd=os.path.dirname(validate))
        assert os.path.exists(validate), validate
        self.add_resource('VALIDATE', validate, 'validate')

        downward_validate = os.path.join(DOWNWARD_SCRIPTS_DIR, 'validate.py')
        self.add_resource('DOWNWARD_VALIDATE', downward_validate, 'downward-validate')
Esempio n. 5
0
    def _prepare_validator(self):
        validate = os.path.join(self.repo, 'src', 'VAL', 'validate')
        if not os.path.exists(validate):
            logging.info('Building the validator in the experiment repository.')
            tools.run_command(['make', '-j%d' % self._jobs],
                              cwd=os.path.dirname(validate))
        assert os.path.exists(validate), validate
        self.add_resource('VALIDATE', validate, 'validate')

        downward_validate = os.path.join(DOWNWARD_SCRIPTS_DIR, 'validate.py')
        self.add_resource('DOWNWARD_VALIDATE', downward_validate, 'downward-validate')
Esempio n. 6
0
 def _compile(self, options=None):
     options = options or []
     retcode = tools.run_command(['./build_all'] + options, cwd=self.src_dir)
     if retcode == 0:
         tools.touch(self._sentinel_file)
     else:
         logging.critical('Build failed in: %s' % self.src_dir)
Esempio n. 7
0
    def cache(self, revision_cache):
        self.path = os.path.join(revision_cache, self.name)
        if os.path.exists(self.path):
            logging.info(f'Revision is already cached: "{self.path}"')
            if not os.path.exists(self._get_sentinel_file()):
                logging.critical(
                    f"The build for the cached revision at {self.path} is corrupted. "
                    f"Please delete it and try again.")
        else:
            tools.makedirs(self.path)
            tar_archive = os.path.join(self.path, "solver.tgz")
            cmd = ["git", "archive", "--format", "tar", self.global_rev]
            with open(tar_archive, "w") as f:
                retcode = tools.run_command(cmd, stdout=f, cwd=self.repo)

            if retcode == 0:
                with tarfile.open(tar_archive) as tf:
                    tf.extractall(self.path)
                tools.remove_path(tar_archive)

                for exclude_dir in self.exclude:
                    path = os.path.join(self.path, exclude_dir)
                    if os.path.exists(path):
                        tools.remove_path(path)

            if retcode != 0:
                shutil.rmtree(self.path)
                logging.critical("Failed to make checkout.")
            self._compile()
            self._cleanup()
Esempio n. 8
0
    def cache(self, revision_cache):
        self._path = os.path.join(revision_cache, self._hashed_name)
        if os.path.exists(self.path):
            logging.info('Revision is already cached: "%s"' % self.path)
            if not os.path.exists(self._get_sentinel_file()):
                logging.critical(
                    'The build for the cached revision at {} is corrupted '
                    'or was made with an older Lab version. Please delete '
                    'it and try again.'.format(self.path))
        else:
            tools.makedirs(self.path)

            if not os.path.exists(os.path.join(self.repo, 'export.sh')):
                logging.critical(
                    'export.sh script not found. Make sure you\'re using a recent version of the planner.'
                )
            # First export the main repo
            script = os.path.join(self.repo, "export.sh")
            retcode = tools.run_command((script, self.global_rev, self.path),
                                        cwd=self.repo)

            if retcode != 0:
                shutil.rmtree(self.path)
                logging.critical('Failed to make checkout.')
            self._compile()
            self._cleanup()
Esempio n. 9
0
 def _compile(self, options=None):
     options = options or []
     retcode = tools.run_command(['./build_all'] + options,
                                 cwd=self.src_dir)
     if retcode == 0:
         tools.touch(self._sentinel_file)
     else:
         logging.critical('Build failed in: %s' % self.src_dir)
Esempio n. 10
0
 def start_exp(self):
     submitted_file = os.path.join(self.exp.path, 'submitted')
     if os.path.exists(submitted_file):
         tools.confirm('The file "%s" already exists so it seems the '
                       'experiment has already been submitted. Are you '
                       'sure you want to submit it again?' % submitted_file)
     submit = ['qsub']
     if self.__wait_for_job_name:
         submit.extend(['-hold_jid', self.__wait_for_job_name])
     if self._job_name:
         # The name set in the job file will be ignored.
         submit.extend(['-N', self._job_name])
     submit.append(self.main_script_file)
     tools.run_command(submit, cwd=self.exp.path)
     # Write "submitted" file.
     with open(submitted_file, 'w') as f:
         f.write('This file is created when the experiment is submitted to '
                 'the queue.')
Esempio n. 11
0
 def start_exp(self):
     submitted_file = os.path.join(self.exp.path, 'submitted')
     if os.path.exists(submitted_file):
         tools.confirm('The file "%s" already exists so it seems the '
                       'experiment has already been submitted. Are you '
                       'sure you want to submit it again?' % submitted_file)
     submit = ['qsub']
     if self.__wait_for_job_name:
         submit.extend(['-hold_jid', self.__wait_for_job_name])
     if self._job_name:
         # The name set in the job file will be ignored.
         submit.extend(['-N', self._job_name])
     submit.append(self.main_script_file)
     tools.run_command(submit, cwd=self.exp.path)
     # Write "submitted" file.
     with open(submitted_file, 'w') as f:
         f.write('This file is created when the experiment is submitted to '
                 'the queue.')
Esempio n. 12
0
 def _compile(self):
     if not os.path.exists(os.path.join(self.path, 'build.py')):
         logging.critical('build.py not found. Please merge with master.')
     retcode = tools.run_command(['./build.py'] + self.build_options,
                                 cwd=self.path)
     if retcode == 0:
         tools.write_file(self._get_sentinel_file(), '')
     else:
         logging.critical('Build failed in {}'.format(self.path))
Esempio n. 13
0
 def _cleanup(self):
     assert self.rev != 'WORK'
     tools.run_command(['./build_all', 'clean'], cwd=self.src_dir)
     # Strip binaries.
     downward_bin = os.path.join(self.src_dir, 'search', 'downward-release')
     preprocess_bin = os.path.join(self.src_dir, 'preprocess', 'preprocess')
     assert os.path.exists(preprocess_bin), preprocess_bin
     binaries = [preprocess_bin]
     if os.path.exists(downward_bin):
         binaries.append(downward_bin)
     tools.run_command(['strip'] + binaries)
     # Remove unneeded files from "src" dir if they exist.
     # TODO: Remove "lp" and "ext" dirs?
     for name in ['dist', 'VAL', 'validate']:
         path = os.path.join(self.src_dir, name)
         if os.path.isfile(path):
             os.remove(path)
         elif os.path.isdir(path):
             shutil.rmtree(path)
Esempio n. 14
0
 def _cleanup(self):
     assert self.rev != 'WORK'
     tools.run_command(['./build_all', 'clean'], cwd=self.src_dir)
     # Strip binaries.
     downward_bin = os.path.join(self.src_dir, 'search', 'downward-release')
     preprocess_bin = os.path.join(self.src_dir, 'preprocess', 'preprocess')
     assert os.path.exists(preprocess_bin), preprocess_bin
     binaries = [preprocess_bin]
     if os.path.exists(downward_bin):
         binaries.append(downward_bin)
     tools.run_command(['strip'] + binaries)
     # Remove unneeded files from "src" dir if they exist.
     # TODO: Remove "lp" and "ext" dirs?
     for name in ['dist', 'VAL', 'validate']:
         path = os.path.join(self.src_dir, name)
         if os.path.isfile(path):
             os.remove(path)
         elif os.path.isdir(path):
             shutil.rmtree(path)
Esempio n. 15
0
    def cache(self, revision_cache):
        self.path = os.path.join(revision_cache, self.name)
        if os.path.exists(self.path):
            logging.info('Revision is already cached: "%s"' % self.path)
            if not os.path.exists(self._get_sentinel_file()):
                logging.critical(
                    "The build for the cached revision at {} is corrupted. "
                    "Please delete it and try again.".format(self.path)
                )
        else:
            tools.makedirs(self.path)
            vcs = get_version_control_system(self.repo)
            if vcs == MERCURIAL:
                retcode = tools.run_command(
                    ["hg", "archive", "-r", self.global_rev]
                    + [f"-X{d}" for d in self.exclude]
                    + [self.path],
                    cwd=self.repo,
                )
            elif vcs == GIT:
                tar_archive = os.path.join(self.path, "solver.tgz")
                cmd = ["git", "archive", "--format", "tar", self.global_rev]
                with open(tar_archive, "w") as f:
                    retcode = tools.run_command(cmd, stdout=f, cwd=self.repo)

                if retcode == 0:
                    with tarfile.open(tar_archive) as tf:
                        tf.extractall(self.path)
                    tools.remove_path(tar_archive)

                    for exclude_dir in self.exclude:
                        path = os.path.join(self.path, exclude_dir)
                        if os.path.exists(path):
                            tools.remove_path(path)
            else:
                _raise_unknown_vcs_error(vcs)

            if retcode != 0:
                shutil.rmtree(self.path)
                logging.critical("Failed to make checkout.")
            self._compile()
            self._cleanup()
Esempio n. 16
0
 def _cache(self, compilation_options):
     path = self.get_path()
     if os.path.exists(path):
         logging.info('Revision is already cached: "%s"' % path)
         if not os.path.exists(self._sentinel_file):
             logging.critical(
                 'The build for the cached revision at "%s" is corrupted '
                 'or was made with an older lab version. Please delete '
                 'it and try again.' % path)
     else:
         tools.makedirs(path)
         retcode = tools.run_command(
             ['hg', 'archive', '-r', self.rev, '-I', 'src', path], cwd=self.repo)
         if retcode != 0:
             shutil.rmtree(path)
             logging.critical('Failed to make checkout.')
         self._compile(compilation_options)
         self._cleanup()
Esempio n. 17
0
 def _cache(self, compilation_options):
     path = self.get_path()
     if os.path.exists(path):
         logging.info('Revision is already cached: "%s"' % path)
         if not os.path.exists(self._sentinel_file):
             logging.critical(
                 'The build for the cached revision at "%s" is corrupted '
                 'or was made with an older lab version. Please delete '
                 'it and try again.' % path)
     else:
         tools.makedirs(path)
         retcode = tools.run_command(
             ['hg', 'archive', '-r', self.rev, '-I', 'src', path],
             cwd=self.repo)
         if retcode != 0:
             shutil.rmtree(path)
             logging.critical('Failed to make checkout.')
         self._compile(compilation_options)
         self._cleanup()
Esempio n. 18
0
 def cache(self, revision_cache):
     self._path = os.path.join(revision_cache, self._hashed_name)
     if os.path.exists(self.path):
         logging.info('Revision is already cached: "%s"' % self.path)
         if not os.path.exists(self._get_sentinel_file()):
             logging.critical(
                 'The build for the cached revision at {} is corrupted '
                 'or was made with an older Lab version. Please delete '
                 'it and try again.'.format(self.path))
     else:
         tools.makedirs(self.path)
         excludes = ['-X{}'.format(d) for d in ['experiments', 'misc']]
         retcode = tools.run_command(
             ['hg', 'archive', '-r', self.global_rev] + excludes +
             [self.path],
             cwd=self.repo)
         if retcode != 0:
             shutil.rmtree(self.path)
             logging.critical('Failed to make checkout.')
         self._compile()
         self._cleanup()
Esempio n. 19
0
 def _compile(self):
     retcode = tools.run_command(self.build_cmd, cwd=self.path)
     if retcode == 0:
         tools.write_file(self._get_sentinel_file(), "")
     else:
         logging.critical(f"Build failed in {self.path}")
Esempio n. 20
0
 def start_runs(self):
     tools.run_command(
         [tools.get_python_executable(), self.EXP_RUN_SCRIPT], cwd=self.exp.path
     )
Esempio n. 21
0
 def start_exp(self):
     tools.run_command(['./' + self.main_script_file], cwd=self.exp.path)
Esempio n. 22
0
 def start_runs(self):
     tools.run_command([sys.executable, self.EXP_RUN_SCRIPT],
                       cwd=self.exp.path)
Esempio n. 23
0
 def start_exp(self):
     tools.run_command(['./' + self.main_script_file], cwd=self.exp.path)