Exemple #1
0
    def update_config(self, filename: Path) -> int:
        """Update the firmware framework configuration  using the given file

        :param filename: user firmware configuration file
        :return: number of settings updated
        """

        # Check if file name exist
        if not filename:
            logger.info('no user config specified, skipping config update')
            return 0
        logger.info('updating user configuration')

        # Load user provided firmware configuration
        contents = YAML().load(open(filename).read())
        if not contents:
            logger.warning("YAML file {} is empty".format(filename))
            return 0
        else:
            logger.debug(
                f'loaded {len(contents.keys())} section from {filename}')

        # update firmware configuration
        updates = 0
        for (k, v) in contents.items():
            if k not in self.firmware_config.keys():
                raise ValueError(
                    f'section {k} is missing from firmware configuration')
            for (setting, value) in v.items():
                logger.debug(f'updating setting {setting} with value {value}')
                self.firmware_config[k][setting] = value
                updates += 1

        return updates
Exemple #2
0
def main():
    raw_req = YAML(typ="safe").load(Path("templates/echo.yaml"))

    req = {"_" + k: v for k, v in raw_req.items() if k != "parameters"}
    req.update(raw_req.get("parameters", {}))

    futures = []
    sys_client = SystemClient(**load_config(), blocking=False)

    start = datetime.now()

    for _ in range(250_000):
        futures.append(sys_client.send_bg_request(**req))
def get_automatic_entries() -> List[BackupEntry]:
    """Parses the automatic entries file and returns a list of BackupEntries.

    Returns:
        List[BackupEntry]: backup entries parsed.
    """

    automatic_entries_dict = YAML(typ="safe").load(settings.automatic_path.read_text())

    entries = []
    for name, yaml_entry in automatic_entries_dict.items():
        result = check_yaml_entry(name=name, **yaml_entry)
        entries.append(BackupEntry(**result))
    return entries
Exemple #4
0
def parse_yaml(yaml_path, model_id):
    from tensorflow.contrib.training import HParams
    from ruamel.yaml import YAML

    hparams = HParams()
    hparams.add_hparam('model_id', model_id)

    with open(yaml_path) as fp:
        customized = YAML().load(fp)
        for k, v in customized.items():
            if k in hparams:
                hparams.set_hparam(k, v)
            else:
                hparams.add_hparam(k, v)
    return hparams
Exemple #5
0
 def load(cls, path: Path, basename: str) -> "Schema":
     data = YAML().load(path / f"{basename}.pvi.yaml")
     local = data.get("local", None)
     if local:
         local_path = path / local.replace("$(basename)", basename)
         overrides = YAML().load(local_path)
         for k, v in overrides.items():
             if k == "components":
                 # Merge
                 by_name = {}
                 for existing in walk_dicts(data["components"]):
                     by_name[existing["name"]] = existing
                 for component in v:
                     by_name[component["name"]].update(component)
             else:
                 # Replace
                 data[k] = v
     schema = cls(**data)
     return schema
Exemple #6
0
class Marksheet():

    # --------------------------- I/O -----------------------------------------

    def __init__(self, marksheet_path=None):
        '''
        Initialize an empty marksheet or with the contents of a file
        '''
        if marksheet_path is None:
            self.data = {}
        else:
            self.load(marksheet_path)

    def load(self, marksheet_path):
        '''
        Initialize the marksheet with the contents of a file
        '''
        with open(marksheet_path) as marksheet:
            self.data = YAML().load(marksheet)
        if self.data is None:
            self.data = {}

    def save(self, destination_path):
        '''
        Saves the marksheet to a file.

        NOTE: This is not a robust yaml dump, but it ensures that the output 
              file contains blanks instead of 'null's for unmarked students.
              This conveniently works because yaml supports array notation, 
              and so the mark_lists can be written directly to yaml.
        '''
        with open(destination_path, 'w') as marksheet:
            for student, mark in self.data.items():
                if mark is None:
                    mark = ""
                if student is not None:
                    marksheet.write(f'{student}: {mark}\n')
    
    # ----------------------- Marks Bookkeeping -------------------------------


    def add_students(self, student_list):
        '''
        Add a list students to the marksheet if they don't already exist
        '''
        for student in student_list:
            if student not in self.data and student is not None:
                self.data[student] = None
    
    def marked_items(self):
        '''
        Return the student: mark_list pairs for all the marked students
        '''
        for student in self.data:
            mark_list = self.data[student] 
            if mark_list is not None:
                yield student, mark_list

    def items(self):
        '''
        Return all student: mark_list pairs
        '''
        return self.data.items()

    def __getitem__(self, student):
        if student not in self.data or self.data[student] is None:
            return []
        return self.data[student]

    def __setitem__(self, student, mark_list):
        self.data[student] = mark_list

    def unmarked_students(self):
        '''
        Generates all the unmarked student names in the marksheet
        '''
        for student, mark in self.data.items():
            if mark is None:
                yield student
    
    def update(self, student, mark_list):
        '''
        Update the marklist for a student
        '''
        self.data[student] = mark_list

    # --------------------- Statistics ----------------------------------------

    def get_mark_totals(self, only_compiled=False):
        '''
        Returns an array of (summed) total marks for each marked student.
        Optionally, return marks for only compiled submissions.
        '''
        if only_compiled:
            return [sum(marks) for _, marks in self.marked_items() if marks != []] 
        else:
            return [sum(marks) for _, marks in self.marked_items()]

    def num_marked(self):
        return len(self.get_mark_totals(False))

    def num_compiled(self):
        return len(self.get_mark_totals(True))

    def num_compile_failed(self):
        return self.num_marked() - self.num_compiled()

    def mean(self, only_compiled=False):
        marks_list = self.get_mark_totals(only_compiled)
        return sum(marks_list) / len(marks_list)

    def median(self, only_compiled=False):
        marks_list = self.get_mark_totals(only_compiled)
        return sorted(marks_list)[ len(marks_list) // 2 ]

    def get_distribution(self, only_compiled=False):
        '''
        Return a dictionary containing key value pairs of:
            { <mark> : number of students with <mark> }
        '''
        marks_list = self.get_mark_totals(only_compiled)        
        distribution = {
            i : marks_list.count(i) 
                for i in range(max(marks_list) + 1)
        }
        return distribution
Exemple #7
0
def parse_args(yaml_path, model_id, default_set, followup=None):
    logger = logging.getLogger(APP_NAME)

    hparams = HParams()
    hparams.add_hparam('model_id', model_id)

    with open('default.yaml') as fp:
        configs = YAML().load(fp)
        default_cfg = configs[default_set]

        add_param_recur(hparams, default_cfg)

        if yaml_path:
            logger.info('loading parameters...')
            with open(yaml_path) as fp:
                customized = YAML().load(fp)
                for k, v in customized.items():
                    if k in hparams and hparams.get(k) != v:
                        logger.info('%20s: %20s -> %20s' %
                                    (k, hparams.get(k), v))
                        hparams.set_hparam(k, v)
                    elif k not in hparams:  # add new parameter
                        hparams.add_hparam(k, v)
                        logger.info(
                            '%30s %20s: %20s' %
                            ("[add from %s]" % yaml_path, k, hparams.get(k)))

    if followup:
        # useful when changing args for prediction
        logger.info('override args with follow-up args...')
        for k, v in followup.items():
            if k in hparams and hparams.get(k) != v:
                logger.info('%20s: %20s -> %20s' % (k, hparams.get(k), v))
                hparams.set_hparam(k, v)
            elif k not in hparams:
                logger.warning('%s is not a valid attribute! ignore!' % k)
    if 'save_dir' not in hparams:
        hparams.add_hparam(
            'save_dir',
            os.path.join(hparams.get('model_dir'), hparams.get('model_id')))
    if 'code_dir' not in hparams:
        hparams.add_hparam('code_dir',
                           os.path.join(hparams.get('save_dir'), 'code'))
    hparams.set_hparam('summary_dir',
                       os.path.join(hparams.get('save_dir'), 'summary'))
    # reset logger model id
    logger = set_logger(model_id='%s:%s' %
                        (DEVICE_ID, hparams.get('model_id')))

    try:
        shutil.copytree('./',
                        hparams.get('code_dir'),
                        ignore=shutil.ignore_patterns(*IGNORE_PATTERNS))
        logger.info('current code base is copied to %s' %
                    hparams.get('save_dir'))
    except FileExistsError:
        logger.info('code base exist, no need to copy!')

    # if hparams.get('model_id') != model_id:
    #     logger.warning('model id is changed %s -> %s! '
    #                    'This happens when you train a pretrained model' % (
    #                        hparams.get('model_id'), model_id))
    #     hparams.set_hparam('model_id', model_id)
    if 'loss_csv_file' not in hparams:
        hparams.add_hparam('loss_csv_file',
                           os.path.join(hparams.get('save_dir'), 'loss.csv'))
    if 'is_serving' not in hparams:
        hparams.add_hparam('is_serving', False)

    logger.info('current parameters')
    for k, v in sorted(vars(hparams).items()):
        if not k.startswith('_'):
            logger.info('%20s = %-20s' % (k, v))

    return hparams
        validate    =   [abspath(args.VALIDATE),]
        validate.extend([abspath(dfile), abspath(pfile),args.PLAN_FILE])
        run.add_command('validate',validate,
            time_limit=args.TIME_LIMIT, memory_limit=args.MEMORY_LIMIT)

    ### A HACK for Tarski unbound tmp file issue - to be removed
    if isfile(CLR_TMP_PATH) :
        run.add_command('clear_tmp',['sh', abspath(CLR_TMP_PATH),],
            time_limit=args.TIME_LIMIT, memory_limit=args.MEMORY_LIMIT)
    ### A HACK - to be removed

with open(Path(args.PLIST_YML),'r') as pfile :
    problem_list = YAML(typ='safe').load(pfile)

for algo in args.SOLVERS :
    for domain, b_dict in problem_list.items() :
        if args.DNAME and not any([d==domain for d in args.DNAME.keys()]) : continue
        for bname, plist in b_dict.items() :
            if args.BNAME and not any([b in bname for b in args.BNAME]) : continue
            bname = args.DNAME[domain]
            for pname, p in plist.items() :
                if args.PNAME and not any([re.match(p, pname) for p in args.PNAME]) : continue
                if args.TAGS and p['tag'] \
                    and not any([re.match(t, p['tag']) for t in args.TAGS]) : continue
                dfile = join(abspath(dirname(args.PLIST_YML)), Path(p['domain_file']))
                pfile = join(abspath(dirname(args.PLIST_YML)), Path(p['problem_file']))
                setup_exp_run(args, exp, algo, dfile, pfile, bname, domain, pname)

if (len(args.steps)>0 and (1 in args.steps or 'build' in args.steps)) or \
        args.run_all_steps : 
    with open(join(args.EXP_DIR,'plist.yml'),'w+') as pfile :