Exemple #1
0
def write_ConvertToHDF5Action_interpolate(yaml, start, end):
    log.info('Writing ConvertToHDF5Action.dat for INTERPOLATE GRIDS action')
    with open('./ConvertToHDF5Action.dat', 'w') as f:
        f.write('<begin_file>\n')
        f.write('\n')
        f.write('! Written by GetMeteoPy\n')
        f.write('{0:30}{1}'.format('ACTION', ': ' + 'INTERPOLATE GRIDS' + '\n'))
        f.write('{0:30}{1}'.format('TYPE_OF_INTERPOLATION', ': ' + str(yaml['typeOfInterpolation']) + '\n'))
        f.write('\n')
        f.write('{0:30}{1}'.format('START', ': ' + datetime.strftime(start, '%Y %m %d %H %M %S') + '\n'))
        f.write('{0:30}{1}'.format('END', ': ' + datetime.strftime(end, '%Y %m %d %H %M %S') + '\n'))
        f.write('\n')
        f.write('{0:30}{1}'.format('OUTPUTFILENAME', ': ' +
                yaml['outputPrefix']+'_'+datetime.strftime(start, '%Y-%m-%d')+'_'+datetime.strftime(end, '%Y-%m-%d')+'.hdf5' + '\n'))
        f.write('{0:30}{1}'.format('NEW_GRID_FILENAME', ': ' + yaml['bathymetry'] + '\n'))
        f.write('\n')
        for meteoModel in yaml['meteoModels'].keys():
            f.write('{0:30}{1}'.format('FATHER_FILENAME', ': ' + meteoModel + '.hdf5' + '\n'))
            f.write('{0:30}{1}'.format('FATHER_GRID_FILENAME', ': ' + yaml['meteoModels'][meteoModel]['meteoDatFile'] + '\n'))
        f.write('\n')
        if 'propertiesToInterpolate' in yaml.keys():
            f.write('<<BeginFields>>\n')
            for p in yaml['propertiesToInterpolate']:
                f.write(p+'\n')
            f.write('<<EndFields>>\n')
            f.write('\n')
        if ('mohidKeywords' in yaml.keys()) and ('INTERPOLATE GRIDS' in yaml['mohidKeywords'].keys()):
            f.write('! Additional keywords\n')
            for keyword in yaml['mohidKeywords']['INTERPOLATE GRIDS'].keys():
                try: f.write('{0:30}{1}'.format(keyword, ': ' + yaml['mohidKeywords']['INTERPOLATE GRIDS'][keyword] + '\n'))
                except TypeError: f.write('{0:30}{1}'.format(keyword, ': ' + str(yaml['mohidKeywords']['INTERPOLATE GRIDS'][keyword]) + '\n'))
            f.write('\n')
        f.write('<end_file>\n')
    copy2('./ConvertToHDF5Action.dat', './ConvertToHDF5Action-INTERPOLATE_GRIDS.dat')
Exemple #2
0
def patch_styles(yaml):
    '''
    Following the release of 1.0 any user with a config generated by version
    0.8 will crash without this method to restructure the style parts of the
    config file.
    '''
    if 'console_style' in yaml.keys():
        yaml['console_style'] = {
            'fontsize': {
                'default': 11,
                'user': 11
            },
            'background_color': {
                'default': 'Gray',
                'user': '******'
            },
            'font_color': {
                'default': 'Black',
                'user': '******'
            }
        }

    if 'edit_area_style' in yaml.keys():
        yaml['edit_area_style'] = {'fontsize': {'default': 14, 'user': 14}}

    return yaml
Exemple #3
0
def load_qc_input(path):
    data = {}
    yaml = load_yaml(path)
    assert len(yaml.keys()) == 1
    patient = list(yaml.keys())[0]
    for patient, patient_data in yaml.items():
        for sample, sample_data in patient_data.items():
            for library, library_data in sample_data.items():
                data[(sample, library)] = {
                    data_label: data
                    for data_label, data in library_data.items()
                }
    return data, patient
Exemple #4
0
def write_ConvertToHDF5Action_glue(yaml, meteoModel, start, end, files_to_glue):
    log.info('Writing ConvertToHDF5Action.dat for GLUES HDF5 FILES action')
    with open('./ConvertToHDF5Action.dat', 'w') as f:
        f.write('<begin_file>\n')
        f.write('\n')
        f.write('! Written by GetMeteoPy\n')
        f.write('{0:30}{1}'.format('ACTION', ': ' + 'GLUES HDF5 FILES' + '\n'))
        f.write('{0:30}{1}'.format('OUTPUTFILENAME', ': ' + meteoModel + '.hdf5' + '\n'))
        f.write('\n')
        f.write('{0:30}{1}'.format('START', ': ' + datetime.strftime(start, '%Y %m %d %H %M %S') + '\n'))
        f.write('{0:30}{1}'.format('END', ': ' + datetime.strftime(end, '%Y %m %d %H %M %S') + '\n'))
        f.write('\n')
        f.write('<<begin_list>>\n')
        for hdf5_file in files_to_glue:
            f.write(hdf5_file+'\n')
        f.write('<<end_list>>\n')
        f.write('\n')
        if ('mohidKeywords' in yaml.keys()) and ('GLUES HDF5 FILES' in yaml['mohidKeywords'].keys()):
            f.write('! Additional keywords\n')
            for keyword in yaml['mohidKeywords']['GLUES HDF5 FILES'].keys():
                try: f.write('{0:30}{1}'.format(keyword, ': ' + yaml['mohidKeywords']['GLUES HDF5 FILES'][keyword] + '\n'))
                except TypeError: f.write('{0:30}{1}'.format(keyword, ': ' + str(yaml['GLUES HDF5 FILES']['mohidKeywords'][keyword]) + '\n'))
            f.write('\n')
        f.write('<end_file>\n')
    copy2('./ConvertToHDF5Action.dat', './ConvertToHDF5Action-GLUES_HDF5_FILES.dat')
def generate_table(data):
    global qty_images
    images = []
    default = ""

    # Iterate over per input (depth 1)
    for yaml in data['devices']:
        # Iterate over vendors
        for vendor in yaml.keys():
            # Iterate over board (depth 2)
            for board in yaml[vendor]:
                # Iterate over per board
                for key in board.keys():
                    # Check if there is an image for the board
                    if 'images' in key:
                        # Iterate over image (depth 3)
                        for image in board[key]:
                            images.append("{} ({})".format(
                                image.get('name', default),
                                image.get('architecture', default)))
                if 'images' not in board.keys():
                    print("[i] Possible issue with: " +
                          board.get('board', default) + " (no images)")

    table = "| [Image Name](images.html) (Architecture) |\n"
    table += "|---------------------------|\n"
    # iterate over all the devices
    for device in sorted(set(images)):
        table += "| {} |\n".format(device)
    qty_images = len(set(images))
    return table
Exemple #6
0
def patch_styles(yaml):
    '''
    Following the release of 1.0 any user with a config generated by version
    0.8 will crash without this method to restructure the style parts of the
    config file.
    '''
    if 'console_style' in yaml.keys():
        yaml['console_style'] = {'fontsize': {'default': 11, 'user': 11},
                                 'background_color': {'default': 'Gray',
                                                      'user': '******'},
                                 'font_color': {'default': 'Black',
                                                'user': '******'}}

    if 'edit_area_style' in yaml.keys():
        yaml['edit_area_style'] = {'fontsize': {'default': 14, 'user': 14}}

    return yaml
def languages_lang_file():
    """
    Returns the list of languages present in the language file
    with their respective type and group
    """
    yaml = read_langs_file()

    return list(yaml.keys())
Exemple #8
0
def extract_node_include_info(reduced, root_node_address, sub_node_address,
                              yaml, defs, structs, y_sub):
    node = reduced[sub_node_address]
    node_compat = get_compat(reduced[root_node_address])
    label_override = None

    if node_compat not in yaml.keys():
        return {}, {}

    if y_sub is None:
        y_node = yaml[node_compat]
    else:
        y_node = y_sub

    if yaml[node_compat].get('use-property-label', False):
        for yp in y_node['properties']:
            if yp.get('label') is not None:
                if node['props'].get('label') is not None:
                    label_override = convert_string_to_label(
                        node['props']['label']).upper()
                    break

    # check to see if we need to process the properties
    for yp in y_node['properties']:
        for k, v in yp.items():
            if 'properties' in v:
                for c in reduced:
                    if root_node_address + '/' in c:
                        extract_node_include_info(
                            reduced, root_node_address, c, yaml, defs, structs,
                            v)
            if 'generation' in v:

                prefix = []
                if v.get('use-name-prefix') is not None:
                    prefix = [convert_string_to_label(k.upper())]

                for c in node['props'].keys():
                    if c.endswith("-names"):
                        pass

                    if re.match(k + '$', c):

                        if 'pinctrl-' in c:
                            names = node['props'].get('pinctrl-names', [])
                        else:
                            names = node['props'].get(c[:-1] + '-names', [])
                            if not names:
                                names = node['props'].get(c + '-names', [])

                        if not isinstance(names, list):
                            names = [names]

                        extract_property(
                            node_compat, yaml, sub_node_address, c, v, names,
                            prefix, defs, label_override)

    return
def extract_node_include_info(reduced, root_node_address, sub_node_address,
                              yaml, defs, structs, y_sub):
    node = reduced[sub_node_address]
    node_compat = get_compat(reduced[root_node_address])
    label_override = None

    if node_compat not in yaml.keys():
        return {}, {}

    if y_sub is None:
        y_node = yaml[node_compat]
    else:
        y_node = y_sub

    if yaml[node_compat].get('use-property-label', False):
        for yp in y_node['properties']:
            if yp.get('label') is not None:
                if node['props'].get('label') is not None:
                    label_override = convert_string_to_label(
                        node['props']['label']).upper()
                    break

    # check to see if we need to process the properties
    for yp in y_node['properties']:
        for k, v in yp.items():
            if 'properties' in v:
                for c in reduced:
                    if root_node_address + '/' in c:
                        extract_node_include_info(
                            reduced, root_node_address, c, yaml, defs, structs,
                            v)
            if 'generation' in v:

                prefix = []
                if v.get('use-name-prefix') is not None:
                    prefix = [convert_string_to_label(k.upper())]

                for c in node['props'].keys():
                    if c.endswith("-names"):
                        pass

                    if re.match(k + '$', c):

                        if 'pinctrl-' in c:
                            names = node['props'].get('pinctrl-names', [])
                        else:
                            names = node['props'].get(c[:-1] + '-names', [])
                            if not names:
                                names = node['props'].get(c + '-names', [])

                        if not isinstance(names, list):
                            names = [names]

                        extract_property(
                            node_compat, yaml, sub_node_address, c, v, names,
                            prefix, defs, label_override)

    return
def inject_yaml_data(sql, yaml):

    dict_keys = yaml.keys()

    for key in dict_keys:
        if isinstance(yaml[key], str):
            sql = sql.replace("{{" + key + "}}", str(yaml[key]))

    return sql
Exemple #11
0
def fix_old_default_project_yaml(yaml):
    '''
    Method to ensure compatability between old config files and verisons
    of nammu > 0.8
    '''
    if 'projects' in yaml.keys():
        if isinstance(yaml['projects']['default'], basestring):
            yaml['projects']['default'] = [yaml['projects']['default']]
    return yaml
Exemple #12
0
def fix_old_default_project_yaml(yaml):
    '''
    Method to ensure compatability between old config files and verisons
    of nammu > 0.8
    '''
    if 'projects' in yaml.keys():
        if isinstance(yaml['projects']['default'], basestring):
            yaml['projects']['default'] = [yaml['projects']['default']]
    return yaml
def get_number_bits(yaml):
    """Extract the numebr of bits in yaml circuit."""
    bits = 0
    if "steps" in yaml.keys():
        for step in yaml["steps"]:
            if "gates" in step:
                for gate in step["gates"]:
                    if "bit" in gate:
                        bits = max(bits, gate["bit"] + 1)
    return bits
def extract_node_include_info(reduced, root_node_address, sub_node_address,
                              yaml, y_sub):

    filter_list = ['interrupt-names', 'reg-names', 'phandle', 'linux,phandle']
    node = reduced[sub_node_address]
    node_compat = get_compat(root_node_address)

    if node_compat not in yaml.keys():
        return {}, {}

    if y_sub is None:
        y_node = yaml[node_compat]
    else:
        y_node = y_sub

    # check to see if we need to process the properties
    for k, v in y_node['properties'].items():
        if 'properties' in v:
            for c in reduced:
                if root_node_address + '/' in c:
                    extract_node_include_info(reduced, root_node_address, c,
                                              yaml, v)
        if 'generation' in v:

            match = False
            for c in node['props'].keys():
                # if prop is in filter list - ignore it
                if c in filter_list:
                    continue

                if re.match(k + '$', c):

                    if 'pinctrl-' in c:
                        names = deepcopy(node['props'].get(
                            'pinctrl-names', []))
                    else:
                        if not c.endswith("-names"):
                            names = deepcopy(node['props'].get(
                                c[:-1] + '-names', []))
                            if not names:
                                names = deepcopy(node['props'].get(
                                    c + '-names', []))
                    if not isinstance(names, list):
                        names = [names]

                    extract_property(node_compat, yaml, sub_node_address, c, v,
                                     names)
                    match = True

            # Handle the case that we have a boolean property, but its not
            # in the dts
            if not match:
                if v['type'] == "boolean":
                    extract_property(node_compat, yaml, sub_node_address, k, v,
                                     None)
def extract_node_include_info(reduced, root_node_address, sub_node_address,
                              yaml, y_sub):
    node = reduced[sub_node_address]
    node_compat = get_compat(root_node_address)
    label_override = None

    if node_compat not in yaml.keys():
        return {}, {}

    if y_sub is None:
        y_node = yaml[node_compat]
    else:
        y_node = y_sub

    if yaml[node_compat].get('use-property-label', False):
        try:
            label = y_node['properties']['label']
            label_override = convert_string_to_label(node['props']['label'])
        except KeyError:
            pass

    # check to see if we need to process the properties
    for k, v in y_node['properties'].items():
            if 'properties' in v:
                for c in reduced:
                    if root_node_address + '/' in c:
                        extract_node_include_info(
                            reduced, root_node_address, c, yaml, v)
            if 'generation' in v:

                for c in node['props'].keys():
                    if c.endswith("-names"):
                        pass

                    if re.match(k + '$', c):

                        if 'pinctrl-' in c:
                            names = deepcopy(node['props'].get(
                                                        'pinctrl-names', []))
                        else:
                            if not c.endswith("-names"):
                                names = deepcopy(node['props'].get(
                                                        c[:-1] + '-names', []))
                                if not names:
                                    names = deepcopy(node['props'].get(
                                                            c + '-names', []))
                            else:
                                names = []
                        if not isinstance(names, list):
                            names = [names]

                        extract_property(
                            node_compat, yaml, sub_node_address, c, v, names,
                            label_override)
Exemple #16
0
def patch_server_settings(yaml):
    '''
    Following the change in server address we need to update user's config
    files without overwriting their other settings.
    '''
    old_url = 'http://oracc.museum.upenn.edu'
    new_url = 'http://build-oracc.museum.upenn.edu'
    if 'servers' in yaml.keys() and 'upenn' in yaml['servers'].keys():
        if yaml['servers']['upenn']['url'] == old_url:
            yaml['servers']['upenn']['url'] = new_url

    return yaml
Exemple #17
0
 def parse_source_file(source, yaml):
     for key in yaml.keys():
         if key == 'download':
             source = DownloadSourceFile(source)
             source.parse(yaml[key])
             return source
         elif key == 'source':
             source = VCSSourceFile(source)
             source.parse(yaml[key])
             return source
         else:
             pbs.log.error('invalid type of source file:', key)
Exemple #18
0
def patch_server_settings(yaml):
    '''
    Following the change in server address we need to update user's config
    files without overwriting their other settings.
    '''
    old_url = 'http://oracc.museum.upenn.edu'
    new_url = 'http://build-oracc.museum.upenn.edu'
    if 'servers' in yaml.keys() and 'upenn' in yaml['servers'].keys():
        if yaml['servers']['upenn']['url'] == old_url:
            yaml['servers']['upenn']['url'] = new_url

    return yaml
Exemple #19
0
 def get_scales_from_fitopt_file(self):
     if self.sys_file_in is None:
         return {}
     self.logger.debug(f"Loading sys scaling from {self.sys_file_in}")
     yaml = read_yaml(self.sys_file_in)
     if 'FLAG_USE_SAME_EVENTS' in yaml.keys():
         yaml.pop('FLAG_USE_SAME_EVENTS')
     raw = {
         k: float(v.split(maxsplit=1)[0])
         for _, d in yaml.items() for k, v in d.items()
     }
     return raw
    def inject_yaml_data(str_data, yaml):

        dict_keys = yaml.keys()
        # print('before ', str_data)
        for key in dict_keys:
            if isinstance(yaml[key], dict):
                str_data = recurse_replace_yaml(str_data, yaml[key])
            elif not (isinstance(yaml[key], list)
                      or isinstance(yaml[key], dict)):
                str_data = str_data.replace("{{" + key + "}}", str(yaml[key]))
        # print('after ', str_data)
        return str_data
def generate_table(data):
    global qty_devices, qty_images, qty_image_kali, qty_image_community, qty_image_eol, qty_image_unknown
    images = []
    default = ""
    table  = "| [Device Name](https://www.kali.org/docs/arm/) | [Build-Script](https://gitlab.com/kalilinux/build-scripts/kali-arm/) | [Official Image](https://www.kali.org/get-kali/#kali-arm) | Community Image | EOL/Retired Image |\n"
    table += "|---------------|--------------|----------------|-----------------|---------------|\n"

    # Iterate over per input (depth 1)
    for yaml in data['devices']:
        # Iterate over vendors
        for vendor in yaml.keys():
            # Iterate over board (depth 2)
            for board in yaml[vendor]:
                qty_devices += 1
                # Iterate over per board
                for key in board.keys():
                    # Check if there is an image for the board
                    if 'images' in key:
                        # Iterate over image (depth 3)
                        for image in board[key]:
                            if image['name'] not in images:
                                images.append(image['name']) # ALT: images.append(image['image'])
                                qty_images += 1
                                build_script = image.get('build-script', default)
                                if build_script:
                                    build_script = "[{0}](https://gitlab.com/kalilinux/build-scripts/kali-arm/-/blob/master/{0})".format(build_script)
                                name = image.get('name', default)
                                slug = image.get('slug', default)
                                if name and slug:
                                    name = "[{}](https://www.kali.org/docs/arm/{}/)".format(name, slug)
                                support = image.get('support', default)
                                if support == "kali":
                                    status = "x |  | "
                                    qty_image_kali += 1
                                elif support == "community":
                                    status = " | x | "
                                    qty_image_community += 1
                                elif support == "eol":
                                    status = " |  | x"
                                    qty_image_eol += 1
                                else:
                                    status = " |  | "
                                    qty_image_unknown += 1
                                table += "| {} | {} | {} |\n".format(name,
                                                                     build_script,
                                                                     status)
                            #else:
                            #    print('DUP {} / {}'.format(image['name'], image['image']))
                if 'images' not in board.keys():
                    print("[i] Possible issue with: " + board.get('board', default) + " (no images)")
    return table
Exemple #22
0
def _yaml_check_map(yaml, required_fields, optional_fields, check_valid=True):
  if not isinstance(yaml, dict):
    raise SpecParseError('{} must be a map. Use "key: value" syntax.'.format(yaml))

  for key in required_fields:
    if key not in yaml:
      raise SpecParseError('Required field "{}" not found in {}.'.format(key, yaml))

  if check_valid:
    valid_fields = required_fields + optional_fields
    for key in yaml.keys():
      if key not in valid_fields:
        raise SpecParseError('Unknown field "{}" in {}. Valid fields: {}'.format(
            key, yaml, valid_fields))
Exemple #23
0
def check_unsupported(yaml, required, path=''):
    try:
        passed = True
        if not yaml:
            raise FailedInitialization(
                "YAML file is corrupt or truncated, nothing left to parse")
        if isinstance(yaml, list):
            for index, element in enumerate(yaml):
                for yk in element.keys():
                    listpath = f"{path}.{yk}[{index}]"

                    yamlValue = dict(element).get(yk, None)
                    for rk in required:
                        supportedSubkeys = rk.get(yk, None)
                        if supportedSubkeys:
                            break
                    if not supportedSubkeys:
                        _LOGGER.info(f"'{listpath}' option is unsupported")
                        return

                    subkeyList = supportedSubkeys.get('keys', None)
                    if subkeyList:
                        passed = check_unsupported(yamlValue, subkeyList,
                                                   listpath) and passed
        elif isinstance(yaml, dict) or isinstance(yaml, Configuration):
            for yk in yaml.keys():
                currentpath = path + yk if path == '' else path + '.' + yk

                yamlValue = dict(yaml).get(yk, None)
                for rk in required:
                    supportedSubkeys = rk.get(yk, None)
                    if supportedSubkeys:
                        break
                if not supportedSubkeys:
                    _LOGGER.info(f"'{currentpath}' option is unsupported")
                    return

                subkeyList = supportedSubkeys.get('keys', None)
                if subkeyList:
                    passed = check_unsupported(yamlValue, subkeyList,
                                               currentpath) and passed
        else:
            raise FailedInitialization('Unexpected YAML checking error')
    except FailedInitialization:
        raise
    except Exception as e:
        raise FailedInitialization(f"Unexpected exception: {e}")
    return passed
def get_number_qubits(yaml):
    """Extract the number of qubits in yaml circuit."""
    qubits = 0
    if "steps" in yaml.keys():
        for step in yaml["steps"]:
            if "gates" in step:
                for gate in step["gates"]:
                    if "controls" in gate:
                        for ctrl_info in gate["controls"]:
                            qubits = max(qubits, ctrl_info["target"] + 1)
                    if "targets" in gate:
                        for target in gate["targets"]:
                            qubits = max(qubits, target + 1)
                    if "gates" in gate:
                        for gate in gate["gates"]:
                            for target in gate["targets"]:
                                qubits = max(qubits, target + 1)
    return qubits
def generate_table(data):
    global qty_devices, qty_images
    default = ""
    table = "| Vendor | [Board](devices.html) | [Images](images.html) |\n"
    table += "|--------|-----------------------|-----------------------|\n"

    # Iterate over per input (depth 1)
    for yaml in data['devices']:
        # Iterate over vendors
        for vendor in yaml.keys():
            # Iterate over board (depth 2)
            for board in yaml[vendor]:
                qty_devices += 1
                qty_images += len(board.get('images', default))
                table += "| {} | {} | {} |\n".format(
                    vendor, board.get('name', default),
                    len(board.get('images', default)))
    return table
Exemple #26
0
def generate_table(data):
    global qty_devices, qty_images, qty_images_released
    images = []
    images_released = []
    default = ""
    table  = "| Image Name | Filename | Architecture | Preferred | Support | [Documentation](https://www.kali.org/docs/arm/) | [Kernel](kernel-stats.html) | Kernel Version | Notes |\n"
    table += "|------------|----------|--------------|-----------|---------|-------------------------------------------------|-----------------------|----------------|-------|\n"

    # Iterate over per input (depth 1)
    for yaml in data['devices']:
        # Iterate over vendors
        for vendor in yaml.keys():
            # Iterate over board (depth 2)
            for board in yaml[vendor]:
                qty_devices += 1
                # Iterate over per board
                for key in board.keys():
                    # Check if there is an image for the board
                    if 'images' in key:
                        # Iterate over image (depth 3)
                        for image in board[key]:
                            #qty_images += 1
                            images.append("{}".format(image.get('name', default)))
                            support = image.get('support', default)
                            if support == "kali":
                                #qty_images_released += 1
                                images_released.append("{}".format(image.get('name', default)))
                            slug = image.get('slug', default)
                            if slug:
                                slug = "[{0}](https://www.kali.org/docs/arm/{0}/)".format(slug)
                            table += "| {} | {} | {} | {} | {} | {} | {} | {} |\n".format(image.get('name', default),
                                                                                          image.get('image', default),
                                                                                          image.get('architecture', default),
                                                                                          image.get('preferred-image', default),
                                                                                          image.get('support', default),
                                                                                          slug,
                                                                                          image.get('kernel', default),
                                                                                          image.get('kernel-version', default),
                                                                                          image.get('image-notes', default))
                if 'images' not in board.keys():
                    print("[i] Possible issue with: " + board.get('board', default) + " (no images)")
    qty_images = len(set(images))
    qty_images_released = len(set(images_released))
    return table
Exemple #27
0
def extract_node_include_info(reduced, node_address, yaml, defs, structs):
    node = reduced[node_address]
    node_compat = get_compat(node)

    if not node_compat in yaml.keys():
        return {}, {}

    y_node = yaml[node_compat]

    # check to see if we need to process the properties
    for yp in y_node['properties']:
        for k, v in yp.items():
            if 'generation' in v:
                if v['generation'] == 'define':
                    label = v.get('define_string')
                    storage = defs
                else:
                    label = v.get('structures_string')
                    storage = structs

                prefix = []
                if v.get('use-name-prefix') != None:
                    prefix = [convert_string_to_label(k.upper())]

                for c in node['props'].keys():
                    if c.endswith("-names"):
                        pass

                    if re.match(k + '$', c):

                        if 'pinctrl-' in c:
                            names = node['props'].get('pinctrl-names', [])
                        else:
                            names = node['props'].get(c[:-1] + '-names', [])
                            if not names:
                                names = node['props'].get(c + '-names', [])

                        if not isinstance(names, list):
                            names = [names]

                        extract_property(yaml, node_address, c, v, names,
                                         prefix, defs)

    return
def extract_node_include_info(reduced, node_address, yaml, defs, structs):
  node = reduced[node_address]
  node_compat = get_compat(node)

  if not node_compat in yaml.keys():
    return {}, {}

  y_node = yaml[node_compat]

  # check to see if we need to process the properties
  for yp in y_node['properties']:
    for k,v in yp.items():
      if 'generation' in v:
        if v['generation'] == 'define':
          label = v.get('define_string')
          storage = defs
        else:
          label = v.get('structures_string')
          storage = structs

        prefix = []
        if v.get('use-name-prefix') != None:
          prefix = [convert_string_to_label(k.upper())]

        for c in node['props'].keys():
          if c.endswith("-names"):
            pass

          if re.match(k + '$', c):

            if 'pinctrl-' in c:
              names = node['props'].get('pinctrl-names', [])
            else:
              names = node['props'].get(c[:-1] + '-names', [])
              if not names:
                names = node['props'].get(c + '-names', [])

            if not isinstance(names, list):
              names = [names]

            extract_property(yaml, node_address, c, v, names, prefix, defs)

  return
def generate_table(data):
    global qty_devices
    default = ""
    table  = "| Vendor | Board | CPU | CPU Cores | GPU | RAM | RAM Size (MB) | Ethernet | Ethernet Speed (MB) | Wi-Fi | Bluetooth | USB2 | USB3 | Storage |        Notes        |\n"
    table += "|--------|-------|-----|-----------|-----|-----|---------------|----------|---------------------|-------|-----------|------|------|---------|---------------------|\n"

    # Iterate over per input (depth 1)
    for yaml in data['devices']:
        # Iterate over vendors
        for vendor in yaml.keys():
            # Iterate over board (depth 2)
            for board in yaml[vendor]:
                qty_devices += 1
                ram_size = ""
                storage = ""
                i = 0
                for f in natural_sort(board.get('ram-size', default)):
                    if i > 0:
                        ram_size += ", "
                    ram_size += f
                    i += 1
                i = 0
                for f in natural_sort(board.get('storage', default)):
                    if i > 0:
                        storage += ", "
                    storage += f
                    i += 1
                table += "| {} | {} | {} | {} | {} | {} | {} | {} | {} | {} | {} | {} | {} | {} | {} |\n".format(vendor,
                                                                                                            board.get('name', default),
                                                                                                            board.get('cpu', default),
                                                                                                            board.get('cpu-cores', default),
                                                                                                            board.get('gpu', default),
                                                                                                            board.get('ram', default),
                                                                                                            ram_size,
                                                                                                            board.get('ethernet', default),
                                                                                                            board.get('ethernet-speed', default),
                                                                                                            board.get('wifi', default),
                                                                                                            board.get('bluetooth', default),
                                                                                                            board.get('usb2', default),
                                                                                                            board.get('usb3', default),
                                                                                                            storage,
                                                                                                            board.get('notes', default))
    return table
def normalize_yaml(yaml):
    """Normalize the YAML from project and role lookups.

    These are returned as a list of tuples.
    """
    if isinstance(yaml, list):
        # Normalize the roles YAML data
        normalized_yaml = [(x['name'], x['src'], x.get('version', 'HEAD'))
                           for x in yaml]
    else:
        # Extract the project names from the roles YAML and create a list of
        # tuples.
        projects = [x[:-9] for x in yaml.keys() if x.endswith('git_repo')]
        normalized_yaml = []
        for project in projects:
            repo_url = yaml['{0}_git_repo'.format(project)]
            commit_sha = yaml['{0}_git_install_branch'.format(project)]
            normalized_yaml.append((project, repo_url, commit_sha))

    return normalized_yaml
Exemple #31
0
def generate_manifest(data):
    global release, qty_devices, qty_images, qty_release_images
    default = ""
    devices = {}

    # Iterate over per input (depth 1)
    for yaml in data['devices']:
        # Iterate over vendors
        for vendor in yaml.keys():
            # Ready to have a unique name in the entry
            img_seen = set()
            # Iterate over board (depth 2)
            for board in yaml[vendor]:
                qty_devices += 1
                # Iterate over per board
                for key in board.keys():
                    # Check if there is an image for the board
                    if 'images' in key:
                        # Iterate over image (depth 3)
                        for image in board[key]:
                            qty_images += 1
                            # Check that it's not EOL or community supported
                            if image.get('support') == "kali":
                                name = image.get('name', default)
                                # If we haven't seen this image before for this vendor
                                if name not in img_seen:
                                    img_seen.add(name)
                                    qty_release_images += 1

                                    filename = "kali-linux-{}-{}".format(
                                        release, image.get('image', default))
                                    preferred = image.get(
                                        'preferred-image', default)
                                    slug = image.get('slug', default)
                                    jsonarray(devices, vendor, name, filename,
                                              preferred, slug)
    return json.dumps(devices, indent=2)
def generate_table(data):
    global qty_kernels, qty_versions
    images = []
    default = "unknown"

    # Iterate over per input (depth 1)
    for yaml in data['devices']:
        # Iterate over vendors
        for vendor in yaml.keys():
            # Iterate over board (depth 2)
            for board in yaml[vendor]:
                # Iterate over per board
                for key in board.keys():
                    # Check if there is an image for the board
                    if 'images' in key:
                        # Iterate over image (depth 3)
                        for image in board[key]:
                            if image['name'] not in images:
                                images.append(
                                    image['name']
                                )  # ALT: images.append(image['image'])
                                qty_kernels += 1
                                qty_versions[(image.get('kernel',
                                                        default))] += 1
                            #else:
                            #    print('DUP {} / {}'.format(image['name'], image['image']))
                if 'images' not in board.keys():
                    print("[i] Possible issue with: " +
                          board.get('board', default) + " (no images)")

    table = "| Kernel | Qty |\n"
    table += "|--------|-----|\n"

    # iterate over all the devices
    for v in qty_versions:
        table += "| {} | {} |\n".format(v.capitalize(), str(qty_versions[v]))
    return table
Exemple #33
0
    def __init__(self, yaml):
        sim = dict()
        ins = dict()
        logic = dict()
        python = dict()
        gen = dict()
        func = dict()
        py2cnf = dict()
        #aima = dict()

        eqntott = {"and": "&",
                   "or": "|",
                   "not": "!",
                   "False": "ZERO",
                   "True": "ONE"}

        #aimaC = {"and": "&",
        #         "or": "|",
        #         "not": "~",
        #         "False": "0",
        #         "True": "1"}

        for modname in yaml.keys():
            if "primitive" in yaml.get(modname):
                prim = yaml.get(modname)["primitive"]
                inputs = yaml.get(modname)["inputs"].keys()
                evalStr = "lambda "
                for i in range(0, len(inputs)-1):
                    evalStr += inputs[i] + ","
                evalStr += inputs[len(inputs)-1] + ": "
                simStr = evalStr + prim
                logicStr = evalStr + self.__primToLogic__(inputs, prim, eqntott)
                pyStr    = evalStr + self.__primToLogic__(inputs, prim)
                #aimaStr  = evalStr + self.__primToLogic__(inputs, prim, aimaC)

                genStr = self.__getGenerator__(yaml, modname, eqntott)
                
                d = {}
                exec genStr.strip() in d
                #setattr(self.__class__, modname, d[modname])
                gen[modname] = d[modname]

                funcStr = self.__getFunction__(yaml, modname, eqntott)

                f = {}
                exec funcStr.strip() in f
                func[modname] = f[modname]

                sim[modname] = eval(simStr)
                logic[modname] = eval(logicStr)
                python[modname] = eval(pyStr)
                py2cnf[modname] = Gate2CNF.Gate2CNF(prim)
                #aima[modname] = eval(aimaStr)
                ins[modname] = inputs
        self.__sim = sim
        self.__inputs = ins
        self.__logic = logic
        self.__python = python
        self.__gen = gen
        self.__func = func
        self.__py2cnf = py2cnf
Exemple #34
0
    st.header("Hybrid Genome Browser")

    # Create a second instance of our component whose `name` arg will vary
    # based on a text_input widget.
    #
    # We use the special "key" argument to assign a fixed identity to this
    # component instance. By default, when a component's arguments change,
    # it is considered a new instance and will be re-mounted on the frontend
    # and lose its current state. In this case, we want to vary the component's
    # "name" argument without having it get recreated.
    # name_input = st.text_input("Enter a file name", value="../../bt142/ont2_ngmlr.bam")
    try:
        yaml = load_samples("hgb/config.yaml")

        ref = st.sidebar.selectbox("Which references to use?",
                                   list(yaml.keys()), 1)

        name_input = st.sidebar.multiselect("Which files to load?",
                                            yaml[ref]["samples"],
                                            list(yaml[ref]["default"]))
        refs = reference_hash(yaml[ref]["samples"][0])
        default_range = yaml[ref]["range"][0]
    except:
        name_input = st.sidebar.text_input("Which file to explore?")
        refs = reference_hash(name_input)
        if len(refs) > 0:
            default_range = "{}:1-10001".format(next(iter(refs)))
        else:
            default_range = ""

    #range_candidate = st.sidebar
Exemple #35
0
def extract_node_include_info(reduced, root_node_address, sub_node_address,
                              yaml, y_sub):

    filter_list = ['interrupt-names', 'reg-names', 'phandle', 'linux,phandle']
    node = reduced[sub_node_address]
    node_compat = get_compat(root_node_address)

    if node_compat not in yaml.keys():
        return {}, {}

    if y_sub is None:
        y_node = yaml[node_compat]
    else:
        y_node = y_sub

    # check to see if we need to process the properties
    for k, v in y_node['properties'].items():
        if 'properties' in v:
            for c in reduced:
                if root_node_address + '/' in c:
                    extract_node_include_info(reduced, root_node_address, c,
                                              yaml, v)
        if 'generation' in v:

            match = False

            # Handle any per node extraction first.  For example we
            # extract a few different defines for a flash partition so its
            # easier to handle the partition node in one step
            if 'partition@' in sub_node_address:
                flash.extract_partition(sub_node_address)
                continue

            # Handle each property individually, this ends up handling common
            # patterns for things like reg, interrupts, etc that we don't need
            # any special case handling at a node level
            for c in node['props'].keys():
                # if prop is in filter list - ignore it
                if c in filter_list:
                    continue

                if re.match(k + '$', c):

                    if 'pinctrl-' in c:
                        names = deepcopy(node['props'].get(
                            'pinctrl-names', []))
                    else:
                        if not c.endswith("-names"):
                            names = deepcopy(node['props'].get(
                                c[:-1] + '-names', []))
                            if not names:
                                names = deepcopy(node['props'].get(
                                    c + '-names', []))
                    if not isinstance(names, list):
                        names = [names]

                    extract_property(node_compat, yaml, sub_node_address, c, v,
                                     names)
                    match = True

            # Handle the case that we have a boolean property, but its not
            # in the dts
            if not match:
                if v['type'] == "boolean":
                    extract_property(node_compat, yaml, sub_node_address, k, v,
                                     None)
Exemple #36
0
def check_required_keys(yaml, required, path='') -> bool:
    passed = True

    for keywords in required:
        for rk, rv in keywords.items():
            currentpath = path + rk if path == '' else path + '.' + rk

            requiredKey = rv.get('required')
            requiredSubkeys = rv.get('keys')
            keyType = rv.get('type', None)
            typeStr = '' if not keyType else f" (type is '{keyType.__name__}')"

            if not yaml:
                raise FailedInitialization(f"YAML file is corrupt or truncated, expecting to find '{rk}' and found nothing")

            if isinstance(yaml, list):
                for index, element in enumerate(yaml):
                    path = f"{currentpath}[{index}]"

                    yamlKeys = element.keys()
                    if requiredKey:
                        if rk not in yamlKeys:
                            _LOGGER.error(f"'{currentpath}' is required for operation {typeStr}")
                            passed = False
                            continue

                    yamlValue = dict(element).get(rk, None)
                    if yamlValue is None:
                        return passed

                    if rk in yamlKeys and keyType and not isinstance(yamlValue, keyType):
                        _LOGGER.error(f"'{currentpath}' should be type '{keyType.__name__}'")
                        passed = False

                    if isinstance(requiredSubkeys, list):
                        if len(requiredSubkeys):
                            passed = check_required_keys(yamlValue, requiredSubkeys, path) and passed
                    else:
                        raise FailedInitialization(Exception('Unexpected YAML checking error'))
            elif isinstance(yaml, dict) or isinstance(yaml, Configuration):
                yamlKeys = yaml.keys()
                if requiredKey:
                    if rk not in yamlKeys:
                        _LOGGER.error(f"'{currentpath}' is required for operation {typeStr}")
                        passed = False
                        continue

                yamlValue = dict(yaml).get(rk, None)
                if yamlValue is None:
                    return passed

                if rk in yamlKeys and keyType and not isinstance(yamlValue, keyType):
                    _LOGGER.error(f"'{currentpath}' should be type '{keyType.__name__}'")
                    passed = False

                if isinstance(requiredSubkeys, list):
                    if len(requiredSubkeys):
                        passed = check_required_keys(yamlValue, requiredSubkeys, currentpath) and passed
                else:
                    raise FailedInitialization('Unexpected YAML checking error')
            else:
                raise FailedInitialization('Unexpected YAML checking error')
    return passed
Exemple #37
0
def generate_manifest(data):
    global release, qty_devices, qty_images, qty_release_images
    default = ""
    devices = {}

    # Iterate over per input (depth 1)
    for yaml in data['devices']:
        # Iterate over vendors
        for vendor in yaml.keys():
            # @g0tmi1k: Feels like there is a cleaner way todo this
            if not vendor == "raspberrypi":
                continue
            # Ready to have a unique name in the entry
            img_seen = set()
            # Iterate over board (depth 2)
            for board in yaml[vendor]:
                qty_devices += 1
                # Iterate over per board
                for key in board.keys():
                    # Check if there is an image for the board
                    if 'images' in key:
                        # Iterate over image (depth 3)
                        for image in board[key]:
                            qty_images += 1
                            # Check that it's not EOL or community supported
                            if image.get('support') == "kali":
                                name = image.get('name', default)
                                # If we haven't seen this image before for this vendor
                                if name not in img_seen:
                                    img_seen.add(name)
                                    qty_release_images += 1

                                    filename = "kali-linux-{}-{}".format(
                                        release, image.get('image', default))

                                    # Check to make sure files got created
                                    for ext in file_ext:
                                        check_file = '{}/{}.{}'.format(
                                            imagedir, filename, ext)
                                        if not os.path.isfile(check_file):
                                            bail(
                                                "Missing: '{}'".format(
                                                    check_file),
                                                "Please create the image before running"
                                            )

                                    with open('{}/{}.xz.sha256sum'.format(
                                            imagedir, filename)) as f:
                                        image_download_sha256 = f.read().split(
                                        )[0]
                                    with open('{}/{}.sha256sum'.format(
                                            imagedir, filename)) as f:
                                        extract_sha256 = f.read().split()[0]

                                    url = "https://kali.download/arm-images/kali-{}/{}.xz".format(
                                        release, filename)

                                    # @g0tmi1k: not happy about external OS, rather keep it in python (import lzma)
                                    try:
                                        unxz = subprocess.check_output(
                                            "unxz --verbose --list {}/{}.xz | grep 'Uncompressed'"
                                            .format(imagedir, filename),
                                            shell=True)
                                        extract_size = re.findall(
                                            r'\((.*?) B\)', str(unxz))[0]
                                        extract_size = extract_size.replace(
                                            ',', '')
                                        extract_size = int(extract_size)
                                    except subprocess.CalledProcessError as e:
                                        #print("command '{}' return with error (code {})".format(e.cmd, e.returncode))
                                        extract_size = 0

                                #image_download_size = os.stat('{}/{}.xz'.format(imagedir, filename)).st_size
                                    image_download_size = os.path.getsize(
                                        '{}/{}.xz'.format(imagedir, filename))
                                    jsonarray(devices, 'os_list', name, url,
                                              extract_size, extract_sha256,
                                              image_download_size,
                                              image_download_sha256)
    return json.dumps(devices, indent=2)