Exemple #1
0
    def load(self):

        config = self.config

        prefix = '  :      %d  |  %s  |    %s  | ' % (config.level, [
            ' FINE ', 'COARSE'
        ][config.type], intf(len(config.samples), table=1))
        progress = Progress(prefix=prefix,
                            steps=len(config.samples),
                            length=33)
        progress.init()

        for i, sample in enumerate(config.samples):
            if self.params.verbose >= 2:
                self.results[i] = config.solver.load(config.level, config.type,
                                                     sample)
            else:
                try:
                    self.results[i] = config.solver.load(
                        config.level, config.type, sample)
                except:
                    self.results[i] = None
            progress.update(i + 1)

        progress.reset()

        loaded = [i for i, result in enumerate(self.results) if result != None]

        self.available = (len(loaded) > 0)

        return loaded
Exemple #2
0
def copy_restorable_apps_to_wic(target: FactoryClient.Target, wic_image: str, token: str, apps_shortlist: list,
                                fetch_dir: str, progress: Progress):
    p = Progress(4, progress)
    apps_fetcher = SkopeAppFetcher(token, fetch_dir)
    apps_fetcher.fetch_target(target, shortlist=apps_shortlist, force=True)
    p.tick()
    apps_size_b = apps_fetcher.get_target_apps_size(target)
    p.tick()

    logger.info('Restorable Apps require extra {} bytes of storage'.format(apps_size_b))
    with WicImage(wic_image, apps_size_b) as wic_image:
        if os.path.exists(wic_image.docker_data_root):
            # wic image was populated by container images data during LmP build (/var/lib/docker)
            # let's remove it and populate with the given images data
            logger.info('Removing existing preloaded app images from the system image')
            shutil.rmtree(wic_image.docker_data_root)

        if os.path.exists(wic_image.compose_apps_root):
            # wic image was populated by container images data during LmP build (/var/sota/compose-apps)
            # let's remove it and populate with the given images data
            logger.info('Removing existing preloaded compose apps from the system image')
            shutil.rmtree(wic_image.compose_apps_root)

        if os.path.exists(wic_image.restorable_apps_root):
            # wic image was populated by container images data during LmP build (/var/sota/reset-apps)
            # let's remove it and populate with the given images data
            logger.info('Removing existing preloaded app images from the system image')
            shutil.rmtree(wic_image.restorable_apps_root)

        cmd('cp', '-r', apps_fetcher.target_dir(target.name), wic_image.restorable_apps_root)
        p.tick()
        wic_image.update_target(target)
    p.tick()
Exemple #3
0
    def get_target_system_image(self, target: Target, out_dir: str,
                                progress: Progress):
        # https://api.foundries.io/projects/<factory>/lmp/builds/<build-numb>/runs/<machine>/<image-name>-<machine>.wic.gz

        image_base_url = target['custom']['origUri'] if 'origUri' in target[
            'custom'] else target['custom']['uri']
        image_machine = target['custom']['hardwareIds'][0]
        image_filename = target['custom']['image-file']

        base_url = image_base_url.replace('https://ci.foundries.io',
                                          self.api_base_url)
        image_url = os.path.join(base_url, 'runs', image_machine,
                                 image_filename)
        os_release_url = os.path.join(base_url, 'runs', image_machine,
                                      'os-release')

        image_file_path = os.path.join(out_dir, image_filename)
        extracted_image_file_path = image_file_path.rstrip('.gz')

        p = Progress(2, progress)

        if not os.path.exists(extracted_image_file_path):
            logger.info(
                'Downloading Target system image...; Target: {}, image: {}'.
                format(target.name, image_filename))

            image_resp = http_get(image_url, headers=self._auth_headers)
            with open(image_file_path, 'wb') as image_file:
                for data_chunk in image_resp.iter_content(chunk_size=65536):
                    image_file.write(data_chunk)
            p.tick()

            logger.info(
                'Extracting Target system image: {}'.format(image_file_path))
            subprocess.check_call(['gunzip', '-f', image_file_path])
            p.tick()
        else:
            logger.info(
                'Target system image has been already downloaded: {}'.format(
                    extracted_image_file_path))

        release_resp = requests.get(os_release_url, headers=self._auth_headers)
        if release_resp.ok:
            try:
                release_info = self.Release.parse(
                    dict([
                        line.split('=')
                        for line in release_resp.content.decode().splitlines()
                    ]))
            except Exception as exc:
                logger.error(
                    'Failed to parse a received information about LmP release: '
                    + str(exc))
                release_info = self.Release(0, '')  # or just `raise` ???
        else:
            release_info = self.Release(0, '')
            logger.info('Missing info about LmP release.')

        return extracted_image_file_path, release_info
Exemple #4
0
 def predict_all(self, corpus):
     if not self.data:
         self.set_data(corpus)
     self.predictions = np.zeros(len(self.data))
     for i, prediction in enumerate(
             Progress(self.predict_all_generator(corpus), len(self.data),
                      10)):
         self.predictions[i] = prediction
     return self.predictions
Exemple #5
0
 def extract_parsed(self, out_filename, filename=None, nth=None):
     if filename is None:
         filename = self.filename or click.prompt('file name is required')
     filelength = ParseUtils.file_length(filename)
     if nth is None:
         nth = self.nth
     with open(out_filename, 'w') as f:
         csv_writer = csv.writer(f)
         for row in Progress(self.parse_all_generator(), filelength, 10):
             csv_writer.writerow(row)
Exemple #6
0
def copy_compose_apps_to_wic(target: FactoryClient.Target, fetch_dir: str,
                             wic_image: str, token: str, apps_shortlist: list,
                             progress: Progress):
    p = Progress(4, progress)
    apps_fetcher = TargetAppsFetcher(token, fetch_dir)
    apps_fetcher.fetch_target(target, shortlist=apps_shortlist, force=True)
    p.tick()
    apps_size_b = apps_fetcher.get_target_apps_size(target)
    p.tick()

    logger.info(
        'Compose Apps require extra {} bytes of storage'.format(apps_size_b))
    with WicImage(wic_image, apps_size_b) as wic_image:
        if os.path.exists(wic_image.docker_data_root):
            # wic image was populated by container images data during LmP build (/var/lib/docker)
            # let's remove it and populate with the given images data
            logger.info(
                'Removing existing preloaded app images from the system image')
            shutil.rmtree(wic_image.docker_data_root)
        else:
            # intel installer images won't have this directory
            _mk_parent_dir(wic_image.docker_data_root)

        if os.path.exists(wic_image.compose_apps_root):
            # wic image was populated by container images data during LmP build (/var/sota/compose-apps)
            # let's remove it and populate with the given images data
            logger.info(
                'Removing existing preloaded compose apps from the system image'
            )
            shutil.rmtree(wic_image.compose_apps_root)
        else:
            # intel installer images won't have this directory
            _mk_parent_dir(wic_image.compose_apps_root)

        # copy <fetch-dir>/<target-name>/apps/* to /var/sota/compose-apps/
        subprocess.check_call([
            'cp', '-r',
            apps_fetcher.apps_dir(target.name), wic_image.compose_apps_root
        ])
        # copy <fetch-dir>/<target-name>/images/* to /var/lib/docker/
        subprocess.check_call([
            'cp', '-r',
            apps_fetcher.images_dir(target.name), wic_image.docker_data_root
        ])

        p.tick()
        wic_image.update_target(target)
    p.tick()
Exemple #7
0
def copy_container_images_to_wic(target: FactoryClient.Target, factory: str,
                                 ostree_repo_archive_dir: str, app_repo_dir,
                                 app_fetch_dir: str, wic_image: str,
                                 token: str, apps_shortlist: list,
                                 progress: Progress):

    p = Progress(2, progress)
    target_app_store = ArchOSTreeTargetAppsStore(factory,
                                                 ostree_repo_archive_dir,
                                                 app_repo_dir)
    target.shortlist = apps_shortlist
    if not target_app_store.exist(target):
        logger.info('Compose Apps haven\'t been found, fetching them...')
        apps_fetcher = TargetAppsFetcher(token, app_fetch_dir)
        if target_app_store.exist_branch(target):
            target_app_store.checkout(target,
                                      apps_fetcher.target_dir(target.name))
        apps_fetcher.fetch_target(target, force=True)
        target.apps_uri = target_app_store.store(
            target, apps_fetcher.target_dir(target.name))
    p.tick()

    with TemporaryDirectory(dir=os.getenv('HOME', '/root')) as tmp_tree_dir:
        # TODO: make use of the commit size generation functionality to determine a size to extend a wic image for
        logger.info('Building an ostree repo for the given Target...')
        os.makedirs(tmp_tree_dir, exist_ok=True)
        tmp_tree_repo = OSTreeRepo(tmp_tree_dir, 'bare', create=True)
        p.tick()
        target_app_store.copy(target, tmp_tree_repo)
        p.tick()

        with WicImage(wic_image,
                      tmp_tree_repo.size_in_kbs() * 1024) as wic_image:
            logger.info('Removing previously preloaded Apps if any...')

            shutil.rmtree(wic_image.docker_data_root, ignore_errors=True)
            shutil.rmtree(wic_image.compose_apps_root, ignore_errors=True)
            shutil.rmtree(wic_image.compose_apps_tree, ignore_errors=True)
            p.tick()
            target_app_store.copy_and_checkout(target,
                                               wic_image.compose_apps_tree,
                                               wic_image.compose_apps_root,
                                               wic_image.docker_data_root)
            wic_image.update_target(target)
    p.tick()
Exemple #8
0
    def run(self):

        config = self.config

        # check if nothing is overwritten
        # unless the simulation is specified to proceed further or override is allowed
        if not self.params.proceed and not self.params.override:
            for sample in config.samples:
                config.solver.check(config.level, config.type, sample)

        # get information of the MC run and the prescribed parallelization
        info_mc = self.info()

        # set solver iteration
        config.solver.iteration = config.iteration

        # use progress indicator, report MC info each time
        prefix = info_mc + '  '
        progress = Progress(prefix=prefix,
                            steps=len(config.samples),
                            length=20)
        progress.init()

        import time
        # queue all samples
        for step, sample in enumerate(config.samples):
            config.solver.run(config.level, config.type, sample,
                              self.seed(sample), config.discretization,
                              self.params, self.parallelization)
            progress.update(step + 1)

        # reset progress indicator
        progress.message('Dispatching...')

        # dispatch all samples
        info_solver = config.solver.dispatch(config.level, config.type,
                                             self.parallelization)

        # print combined info: MC info and additional (scheduler-related) information from the solver
        info = progress.message(info_solver)
        progress.finalize()

        # return combined info
        return info
Exemple #9
0
def copy_container_images_from_archive_to_wic(target: FactoryClient.Target, app_image_dir: str, app_preload_dir: str,
                                              wic_image: str, token: str, apps_shortlist: list, progress: Progress):

    p = Progress(2, progress)
    target_app_store = ArchiveTargetAppsStore(app_image_dir)
    target.shortlist = apps_shortlist
    if not target_app_store.exist(target):
        logger.info('Container images have not been found, trying to obtain them...')
        apps_fetcher = TargetAppsFetcher(token, app_preload_dir)
        apps_fetcher.fetch_target_apps(target, apps_shortlist)
        apps_fetcher.fetch_apps_images()
        target_app_store.store(target, apps_fetcher.target_dir(target.name))
    p.tick()

    # in kilobytes
    image_data_size = target_app_store.images_size(target)
    with WicImage(wic_image, image_data_size * 1024) as wic_image:
        target_app_store.copy(target, wic_image.docker_data_root, wic_image.compose_apps_root)
        wic_image.update_target(target)
    p.tick()
Exemple #10
0
    def get_target_system_image(self, target: Target, out_dir: str,
                                progress: Progress):
        # https://api.foundries.io/projects/<factory>/lmp/builds/<build-numb>/runs/<machine>/<image-name>-<machine>.wic.gz

        image_base_url = target['custom']['origUri'] if 'origUri' in target[
            'custom'] else target['custom']['uri']
        image_machine = target['custom']['hardwareIds'][0]
        image_filename = target['custom']['image-file']

        base_url = image_base_url.replace('https://ci.foundries.io',
                                          self.api_base_url)
        image_url = os.path.join(base_url, 'runs', image_machine,
                                 image_filename)

        image_file_path = os.path.join(out_dir, image_filename)
        extracted_image_file_path = image_file_path.rstrip('.gz')

        p = Progress(2, progress)

        if not os.path.exists(extracted_image_file_path):
            logger.info(
                'Downloading Target system image...; Target: {}, image: {}'.
                format(target.name, image_filename))

            image_resp = http_get(image_url, headers=self._auth_headers)
            with open(image_file_path, 'wb') as image_file:
                for data_chunk in image_resp.iter_content(chunk_size=65536):
                    image_file.write(data_chunk)
            p.tick()

            logger.info(
                'Extracting Target system image: {}'.format(image_file_path))
            subprocess.check_call(['gunzip', '-f', image_file_path])
            p.tick()
        else:
            logger.info(
                'Target system image has been already downloaded: {}'.format(
                    extracted_image_file_path))

        return extracted_image_file_path
Exemple #11
0
            err_msg = 'No Targets found; Factory: {}, input Target list: {}'.format(
                args.factory, args.targets)
        else:
            logger.info('Getting Targets of version {}'.format(
                args.target_version))
            targets = factory_client.get_targets_by_version(
                args.target_version)
            err_msg = 'No Targets found; Factory: {}, Version/Build Number: {}'.format(
                args.factory, args.target_version)

        found_targets_number = len(targets)
        if found_targets_number == 0:
            logger.warning(err_msg)
            exit(1)

        p = Progress(len(targets))
        logger.info('Found {} Targets to assemble image for'.format(
            found_targets_number))
        for target in targets:
            logger.info('Assembling image for {}, shortlist: {}'.format(
                target.name, args.app_shortlist))
            subprog = Progress(3, p)
            image_file_path, release_info = factory_client.get_target_system_image(
                target, args.out_image_dir, subprog)

            if args.app_type == 'restorable' or (
                    not args.app_type and release_info.lmp_version > 84):
                logger.info('Preloading Restorable Apps...')
                copy_restorable_apps_to_wic(target, image_file_path,
                                            args.token, args.app_shortlist,
                                            args.fetch_dir, subprog)
Exemple #12
0
 def extract_output(self, out_filename):
     with open(out_filename, 'w') as f:
         csv_writer = csv.writer(f, delimiter=',')
         for row in Progress(self.gen_replace_all(), Utils.file_length(self.filename), 10):
             csv_writer.writerow(row)
Exemple #13
0
    def start(self):
        try:
            _, _ = os.popen("stty size", "r").read().split()
            run_from_term = True
        except Exception:
            run_from_term = False
        progress = Progress(len(self._all_models), self._epochs,
                            len(self._train_loader), run_from_term)
        progress.init_print(len(self._all_models), self._model_name,
                            self._dataset_name)

        for model in self._all_models:  # different sized models
            progress.update_model()
            model.init_weights(self._prev_model)  # need for 2layer NN
            model.to(self._device)
            train_losses = []
            val_losses = []
            for epoch in range(epochs):
                model.epoch_step()
                running_loss = 0.0
                for data in self._train_loader:
                    progress.update_batch()
                    running_loss += model.train_step(data, self._device)
                train_losses.append(running_loss / len(self._train_loader))

                running_loss = 0.0
                for data in self._val_loader:
                    running_loss += model.val_step(data, self._device)
                val_losses.append(running_loss / len(self._val_loader))

                progress.update_epoch(train_losses[-1], val_losses[-1])

                # TODO: only for two layer nn?
                if train_losses[-1] == 0.0 and model.num_parameters(
                ) < self._interpolation_threshold:
                    break

            running_loss = 0
            running_acc = 0
            for data in self._test_loader:
                loss, acc = model.test_step(data, self._device)
                running_loss += loss
                running_acc += acc
            accuracy = running_acc / len(self._test_loader)
            test_loss = running_loss / len(self._test_loader)

            progress.finished_model(model.num_parameters(), test_loss,
                                    accuracy)

            model_name = str(model.num_parameters())
            self.all_test_losses[model_name] = test_loss
            self.all_train_losses[model_name] = train_losses
            self.all_val_losses[model_name] = val_losses
            self._prev_model = model

            path = os.path.join("data", "models", self._model_name,
                                self._loss_name, "epochs_" + str(self._epochs))
            if not os.path.exists(path):
                os.makedirs(path)
            file_name = os.path.join(path, model_name)
            self.save()
            print("Saving model to \"{}\"\n".format(file_name))
            model.save(file_name)
        progress.finished_training()