Ejemplo n.º 1
0
def get_whatrequires(pkg, yum_conf):
    """
    Write list of packages.

    Write packages that require the current package to a file
    using dnf repoquery what-requires and --recursive commands.
    """
    # clean up dnf cache to avoid 'no more mirrors repo' error
    try:
        subprocess.check_output([
            'dnf', '--config', yum_conf, '--releasever', 'clear', 'clean',
            'all'
        ])
    except subprocess.CalledProcessError as err:
        util.print_warning("Unable to clean dnf repo: {}, {}".format(pkg, err))
        return

    try:
        out = subprocess.check_output([
            'dnf', 'repoquery', '--config', yum_conf, '--releasever', 'clear',
            '--archlist=src', '--recursive', '--queryformat=%{NAME}',
            '--whatrequires', pkg
        ]).decode('utf-8')

    except subprocess.CalledProcessError as err:
        util.print_warning(
            "dnf repoquery whatrequires for {} failed with: {}".format(
                pkg, err))
        return

    util.write_out(
        'whatrequires', '# This file contains recursive sources that '
        'require this package\n' + out)
Ejemplo n.º 2
0
def gen(args):
    test_dir = Path(os.path.join(args.root, args.name))
    os.makedirs(test_dir, exist_ok=True)

    if args.continue_:
        transitions_by_binding = read_transitions(test_dir)
    else:
        transitions_by_binding = transitions_from_args(args)
        try:
            existing = read_transitions(test_dir)
        except FileNotFoundError:
            write_transitions(test_dir, transitions_by_binding)
        else:
            if transitions_by_binding != existing:
                print_warning(
                    f'specified transitions do not match previously existing ones for {args.name}:'
                )
                print('your parameters:')
                print(f'  {to_flags(transitions_by_binding)}')
                print('existing parameters:')
                print(f'  {to_flags(existing)}')
                if input('\ncontinue? (Y/n) ') == 'n':
                    return
                write_transitions(test_dir, transitions_by_binding)

    generate_test(args.fidl, test_dir, transitions_by_binding)
Ejemplo n.º 3
0
    def _clean_dirs(self, root, files):
        """Do the work to remove the directories from the files list."""
        res = set()
        removed = False

        directive_re = re.compile(r"(%\w+(\([^\)]*\))?\s+)(.*)")
        for f in files:
            # skip the files with directives at the beginning, including %doc
            # and %dir directives.
            # autospec does not currently support adding empty directories to
            # the file list by prefixing "%dir". Regardless, skip these entries
            # because if they exist at this point it is intentional (i.e.
            # support was added).
            if directive_re.match(f):
                res.add(f)
                continue

            path = os.path.join(root, f.lstrip("/"))
            if os.path.isdir(path) and not os.path.islink(path):
                util.print_warning(
                    "Removing directory {} from file list".format(f))
                self.files_blacklist.add(f)
                removed = True
            else:
                res.add(f)

        return (res, removed)
Ejemplo n.º 4
0
    def _set_instance_data(self, attr, value):
        """Adds or assigns value to the attr attribute of this Java value.

        attr (String) -- The attribute to add or assign value to. If this Java
                         value does not have an attribute with this name, a
                         warning is printed with the msg "Unknown attribute"
                         followed by the attribute name. The value is added,
                         appended or assigned, depending on if the attribute is
                         a MutableSet, a list or something else, respectively.
        value         -- Typically a String, but can be anything, really.

        The 'exact' cache is invalidated is the attribute exists.

        """
        try:
            data = getattr(self, attr)
            if isinstance(data, list):
                data.append(value)
            elif isinstance(data, collections.MutableSet):
                data.add(value)
            else:
                setattr(self, attr, value)
        except AttributeError:
            util.print_warning(msg='Unknown attribute: ' + attr, key=attr)
        else:
            self.exact = None  # Invalidate cache
Ejemplo n.º 5
0
def run(test_root: Path, state: State):
    """ Runs the tool until the user quits. """
    print_warning(
        'Generate test tool: press ^C at any time to quit (progress is saved after each step)'
    )
    while True:
        state = step(test_root, state)
        maybe_save_state(test_root, state)
Ejemplo n.º 6
0
def check_for_warning_pattern(line):
    """Print warning if a line matches against a warning list."""
    warning_patterns = [
        "march=native"
    ]
    for pat in warning_patterns:
        if pat in line:
            util.print_warning("Build log contains: {}".format(pat))
Ejemplo n.º 7
0
def read_extras_config(path):
    """Return parsed extras configurations from path."""
    if not os.path.exists(path):
        return None
    try:
        return toml.load(path)
    except Exception as excpt:
        print_warning(excpt)
    return None
Ejemplo n.º 8
0
    def read_info(self, config_file="photo_info.txt", debug=False):
        with open(config_file, "r", encoding="utf-8") as r:
            infos = r.readlines()
        info_dict = {}
        if len(infos) == 0:
            print("无额外照片信息,将直接进行照片转换")
            info_dict['title'] = "小麦冬"
            info_dict['sub_title'] = "xiaomaidong.com"
            return info_dict
        else:
            print("发现额外照片信息,将进行解析...")
        info_dict['part'] = []
        img_type = "0"
        for info in infos:
            try:
                # 跳过注释
                info = info.strip()
                if info.startswith("//"):
                    continue
                info_n = info.split("#")
                # 作为标题解析
                if info_n[0] == util.TITLE_KEY:
                    if util.TITLE_KEY in info_dict:
                        util.print_repeat("标题")
                    self.parse_title(info_n, info_dict)

                elif info_n[0] == util.BACK_KEY:
                    if util.BACK_KEY in info_dict:
                        util.print_repeat("背景图片")
                    self.parse_back_img(info_n[1:], info_dict)

                elif info_n[0] == util.DAYS_KEY:
                    if util.DAYS_KEY in info_dict:
                        util.print_repeat("日期")
                    self.parse_days(info_n, info_dict)
                elif info_n[0] == util.MID_KEY:
                    info_dict['page_title'] = info_n[1]
                elif info_n[0].startswith(util.PARTS_KEY):
                    img_type = info_n[0][4:]
                    info_dict['part'].append(dict(part_id=img_type, part_title=info_n[1], part_desc=info_n[2]))

                elif util.check_image_file_name(info_n[0]):
                    info_n = list(map(lambda x: x.strip(), info_n))
                    if info_n[1] == '':
                        info_n[1] = info_n[0].split('.')[0]
                    info_dict[img_type + info_n[0]] = dict(title=info_n[1], desc=info_n[2], type=img_type)
                else:
                    util.print_warning(info)
                    print("注意照片文件名必须带有后缀,支持以下格式的文件:")
                    print("jpg|png|JPG|jpeg|JPEG|PNG|bmp|BMP")

            except BaseException as e:
                if debug:
                    raise e
                util.print_warning(info)
        print("照片信息解析完成")
        return info_dict
Ejemplo n.º 9
0
def regen_toc(args):
    all_tests = []
    for test_root in find_tests(args.root):
        with open(test_root / TEST_FILE, 'r') as f:
            test = CompatTest.fromdict(json.load(f))
        all_tests.append((test_root, test))
    if not all_tests:
        print_warning('No tests found')
    generate_docs.regen_toc(all_tests)
Ejemplo n.º 10
0
def license_from_copying_hash(copying, srcdir, config):
    """Add licenses based on the hash of the copying file."""
    try:
        data = get_contents(copying)
    except FileNotFoundError:
        # LICENSE file is a bad symlink (qemu-4.2.0!)
        return

    if data.startswith(b'#!'):
        # Not a license if this is a script
        return

    data = decode_license(data)
    if not data:
        return

    hash_sum = get_sha1sum(copying)

    if config.license_fetch:
        values = {'hash': hash_sum, 'text': data, 'package': tarball.name}
        data = urllib.parse.urlencode(values)
        data = data.encode('utf-8')

        buffer = download.do_curl(config.license_fetch,
                                  post=data,
                                  is_fatal=True)
        response = buffer.getvalue()
        page = response.decode('utf-8').strip()
        if page:
            print("License     : ", page, " (server) (", hash_sum, ")")
            process_licenses(page, config.license_translations,
                             config.license_blacklist)

            if page != "none":
                # Strip the build source directory off the front
                lic_path = copying[len(srcdir):]
                # Strip any leading slashes
                while lic_path.startswith('/'):
                    lic_path = lic_path[1:]
                lic_path = shlex.quote(lic_path)
                license_files.append(lic_path)
                hashes[lic_path] = hash_sum

            return

    if hash_sum in config.license_hashes:
        add_license(config.license_hashes[hash_sum],
                    config.license_translations, config.license_blacklist)
    else:
        if not config.license_show:
            return
        print_warning("Unknown license {0} with hash {1}".format(
            copying, hash_sum))
        hash_url = config.license_show % {'HASH': hash_sum}
        print_warning("Visit {0} to enter".format(hash_url))
Ejemplo n.º 11
0
def license_from_copying_hash(copying, srcdir):
    """Add licenses based on the hash of the copying file."""
    data = tarball.get_contents(copying)
    if data.startswith(b'#!'):
        # Not a license if this is a script
        return

    sh = hashlib.sha1()
    sh.update(data)
    hash_sum = sh.hexdigest()
    """ decode license text """
    detected = chardet.detect(data)
    license_charset = detected['encoding']
    if license_charset == 'ISO-8859-1':
        if b'\xff' in data:
            license_charset = 'ISO-8859-13'
        elif b'\xd2' in data and b'\xd3' in data:
            license_charset = 'mac_roman'
    if not license_charset:
        # This is not a text file
        return

    data = data.decode(license_charset)

    if config.license_fetch:
        values = {'hash': hash_sum, 'text': data, 'package': tarball.name}
        data = urllib.parse.urlencode(values)
        data = data.encode('utf-8')

        buffer = download.do_curl(config.license_fetch,
                                  post=data,
                                  is_fatal=True)
        response = buffer.getvalue()
        page = response.decode('utf-8').strip()
        if page:
            print("License     : ", page, " (server) (", hash_sum, ")")
            process_licenses(page)

            if page != "none":
                lic_path = copying[len(srcdir) + 1:]
                license_files.append(shlex.quote(lic_path))

            return

    if hash_sum in config.license_hashes:
        add_license(config.license_hashes[hash_sum])
    else:
        if not config.license_show:
            return
        print_warning("Unknown license {0} with hash {1}".format(
            copying, hash_sum))
        hash_url = config.license_show % {'HASH': hash_sum}
        print_warning("Visit {0} to enter".format(hash_url))
Ejemplo n.º 12
0
def license_from_copying_hash(copying, srcdir):
    """Add licenses based on the hash of the copying file"""
    data = tarball.get_contents(copying)
    if data.startswith(b'#!'):
        # Not a license if this is a script
        return

    sh = hashlib.sha1()
    sh.update(data)
    hash_sum = sh.hexdigest()

    if config.license_fetch:
        values = {'hash': hash_sum, 'text': data, 'package': tarball.name}
        data = urllib.parse.urlencode(values)
        data = data.encode('utf-8')

        buffer = BytesIO()
        c = pycurl.Curl()
        c.setopt(c.URL, config.license_fetch)
        c.setopt(c.WRITEDATA, buffer)
        c.setopt(c.POSTFIELDS, data)
        c.setopt(c.FOLLOWLOCATION, 1)
        try:
            c.perform()
        except Exception as excep:
            print_fatal("Failed to fetch license from {}: {}".format(
                config.license_fetch, excep))
            c.close()
            sys.exit(1)

        c.close()

        response = buffer.getvalue()
        page = response.decode('utf-8').strip()
        if page:
            print("License     : ", page, " (server) (", hash_sum, ")")
            process_licenses(page)

            if page != "none":
                lic_path = copying[len(srcdir) + 1:]
                license_files.append(shlex.quote(lic_path))

            return

    if hash_sum in config.license_hashes:
        add_license(config.license_hashes[hash_sum])
    else:
        if not config.license_show:
            return
        print_warning("Unknown license {0} with hash {1}".format(
            copying, hash_sum))
        hash_url = config.license_show % {'HASH': hash_sum}
        print_warning("Visit {0} to enter".format(hash_url))
Ejemplo n.º 13
0
def regen(args):
    tests = args.tests or find_tests(args.root)
    for name in tests:
        print(f'Regenerating files for {name}')
        test_dir = Path(os.path.join(args.root, name))
        with open(test_dir / TEST_FILE, 'r') as f:
            test = CompatTest.fromdict(json.load(f))
        generate_test.regen_files(test_dir, test)
    if not tests:
        print_warning('No tests found')
    else:
        print(
            white('Done! Run fx-format to get rid of formatting differences.'))
Ejemplo n.º 14
0
def examine_abi(download_path, name):
    """Proxy the ABI reporting to the right function."""
    download_path = os.path.abspath(download_path)
    results_dir = os.path.abspath(os.path.join(download_path, "results"))

    if not os.path.exists(results_dir):
        util.print_fatal("Results directory does not exist, aborting")
        sys.exit(1)

    if util.binary_in_path("abireport"):
        examine_abi_host(download_path, results_dir, name)
    else:
        util.print_warning("abireport is not installed. Using slow scanning")
        examine_abi_fallback(download_path, results_dir, name)
Ejemplo n.º 15
0
    def parse_config_versions(self, path):
        """Parse the versions configuration file."""
        # Only actually parse it the first time around
        if not self.parsed_versions:
            for line in self.read_conf_file(os.path.join(path, "versions")):
                # Simply whitespace-separated fields
                fields = line.split()
                version = fields.pop(0)
                if len(fields):
                    url = fields.pop(0)
                else:
                    url = ""
                # Catch and report duplicate URLs in the versions file. Don't stop,
                # but assume only the first one is valid and drop the rest.
                if version in self.parsed_versions and url != self.parsed_versions[version]:
                    print_warning("Already have a URL defined for {}: {}"
                                  .format(version, self.parsed_versions[version]))
                    print_warning("Dropping {}, but you should check my work"
                                  .format(url))
                else:
                    self.parsed_versions[version] = url
                if len(fields):
                    print_warning("Extra fields detected in `versions` file entry:\n{}"
                                  .format(line))
                    print_warning("I'll delete them, but you should check my work")

        # We'll combine what we just parsed from the versions file with any other
        # versions that have already been defined, most likely the version actually
        # provided in the Makefile's URL variable, so we don't drop any.
        for version in self.parsed_versions:
            self.versions[version] = self.parsed_versions[version]

        return self.versions
Ejemplo n.º 16
0
def parse_ext(path):
    """
    Gets the extension of a file and determines if the filetype can be handled
    by the infile_parser. If so, it returns the extension. If not, it returns
    nothing, which will cause the file_handler to return and infile parsing
    will NOT be used on that file.
    """
    ext = os.path.splitext(path)[1]
    if ext not in parsable_filetypes:
        util.print_warning("Cannot parse infile \"{}\" type. From input: {}."
                           " \nContinuing with the execution of "
                           "autospec".format(ext, path))
        return None

    return ext.lstrip('.')
Ejemplo n.º 17
0
def main():
	parser = argparse.ArgumentParser(description="Builds a parsec benchmark with contech")
	parser.add_argument("benchmark", help="The parsec bencmark to build.")
	parser.add_argument("--bldconf", default="contech", help="PARSEC build configuration (bldconf) to use for building.")
	parser.add_argument("--hammerOptLevel", help="Set hammer optimization level (bldconf==hammer only)")
	args = parser.parse_args()
	
	if os.environ.has_key("CONTECH_HOME"):
		CONTECH_HOME = os.environ["CONTECH_HOME"]
		stateFile = os.path.join(CONTECH_HOME, "scripts/output/", args.benchmark + ".stateFile.temp")
		os.environ["CONTECH_STATE_FILE"] = stateFile
	else:
		print ">Error: Could not find contech installation. Set CONTECH_HOME to the root of your contech directory."
		exit(1)

	if os.environ.has_key("PARSEC_HOME"):
		PARSEC_HOME = os.environ["PARSEC_HOME"]
		PARSECMGMT = os.path.join(PARSEC_HOME, "bin/parsecmgmt")
	else:
		print ">Error: Could not find parsec installation. Set PARSEC_HOME to the root of your parsec directory."
		exit(1)


	# Run the parsec benchmark
	print ">Building " + args.benchmark
	
	# Prepare state file for run
	if os.path.exists(stateFile):
		os.remove(stateFile)
		
	# Hammer: Prepare hammer nail file 
	if args.bldconf == "hammer":
		os.environ["HAMMER_NAIL_FILE"] = os.path.join(CONTECH_HOME, "backend/Hammer/compilerData/{}.bin".format(args.benchmark))
		os.environ["HAMMER_OPT_LEVEL"] = args.hammerOptLevel
		
	pcall([PARSECMGMT, "-a", "uninstall", "-p", args.benchmark, "-c", args.bldconf])
	with Timer("Build"):
		returnCode = pcall([PARSECMGMT, "-a", "build", "-p", args.benchmark, "-c", args.bldconf], returnCode=True)
	
	# Clean up
	if os.path.exists(stateFile):
		os.remove(stateFile)
	elif args.bldconf in ["contech", "contech_marker", "hammer"]: 
		util.print_warning("Warning: no statefile was generated.")
	
	if returnCode != 0:
		util.print_error("Build failed")
		exit(1)
Ejemplo n.º 18
0
def license_from_copying_hash(copying, srcdir):
    """Add licenses based on the hash of the copying file"""
    hash_sum = tarball.get_sha1sum(copying)

    if config.license_fetch:
        with open(copying, "r", encoding="latin-1") as myfile:
            data = myfile.read()

        values = {'hash': hash_sum, 'text': data, 'package': tarball.name}
        data = urllib.parse.urlencode(values)
        data = data.encode('utf-8')

        buffer = BytesIO()
        c = pycurl.Curl()
        c.setopt(c.URL, config.license_fetch)
        c.setopt(c.WRITEDATA, buffer)
        c.setopt(c.POSTFIELDS, data)
        c.setopt(c.FOLLOWLOCATION, 1)
        try:
            c.perform()
        except Exception as excep:
            print_fatal("Failed to fetch license from {}: {}"
                        .format(config.license_fetch, excep))
            c.close()
            sys.exit(1)

        c.close()

        response = buffer.getvalue()
        page = response.decode('utf-8').strip()
        if page:
            print("License     : ", page, " (server) (", hash_sum, ")")
            add_license(page)
            if page != "none":
                license_files.append(copying[len(srcdir) + 1:])

            return

    if hash_sum in config.license_hashes:
        add_license(config.license_hashes[hash_sum])
    else:
        if not config.license_show:
            return
        print_warning("Unknown license {0} with hash {1}".format(copying, hash_sum))
        hash_url = config.license_show % {'HASH': hash_sum}
        print_warning("Visit {0} to enter".format(hash_url))
Ejemplo n.º 19
0
    def push_package_file(self, filename, package="main"):
        """Add found %file and indicate to build module that we must restart the build."""
        if package not in self.packages:
            self.packages[package] = set()

        if FileManager.banned_path(filename):
            util.print_warning(
                f"  Content {filename} found in banned path, skipping")
            self.has_banned = True
            return

        # prepend the %attr macro if file defined in 'attrs' control file
        if filename in self.attrs:
            mod = self.attrs[filename][0]
            u = self.attrs[filename][1]
            g = self.attrs[filename][2]
            filename = "%attr({0},{1},{2}) {3}".format(mod, u, g, filename)
        self.packages[package].add(filename)
        build.must_restart += 1
        if not self.newfiles_printed:
            print("  New %files content found")
            self.newfiles_printed = True
Ejemplo n.º 20
0
def file_handler(indata, output_dict):
    """
    File or url parsing frontend.

    This function determines whether the input is a file or a url. If it is a
    url it checks that it is in the correct format (plaintext), downloads the
    url to a temporary file and passes the file handler to be scraped.
    If the input is a file, then it opens the file and passes the handler to
    be scraped.

    The type of parsing bitbake, inc, deb, etc is based on the file extension.
    """
    # If ext is not parsable, return from using infile parser on that file.
    parse_type = parse_ext(indata)
    if not parse_type:
        return

    if output_dict.get('filename'):
        output_dict['filename'].append(indata)
    else:
        output_dict['filename'] = [indata]

    if not os.path.isfile(indata):
        # check that input is plain or raw text and not html
        indata = check_url_content(indata)
        with tempfile.NamedTemporaryFile() as tmpfile:
            try:
                tmp, _ = urlretrieve(indata, tmpfile.name)
                with open(tmp, 'r') as bb_fp:
                    output_dict = parse_infile(bb_fp, output_dict, parse_type)
            except Exception as e:
                util.print_warning("Infile was unable to be parsed with the "
                                   "error: \"{}\". \nContinuing with the "
                                   "execution of autospec.".format(e))
    else:
        with open(indata, 'r') as bb_fp:
            output_dict = parse_infile(bb_fp, output_dict, parse_type)
    return output_dict
Ejemplo n.º 21
0
    def generate_class(self):
        """Generates a Java class hierarchy providing an interface to a YANG
        module. Uses mutual recursion with generate_child.

        """
        stmt = self.stmt

        # If augment, add target module to context.augmented_modules dict
        if stmt.keyword == 'augment':
            if not hasattr(stmt, "i_target_node"):
                warn_msg = 'Target missing from augment statement'
                util.print_warning(warn_msg, warn_msg, self.ctx)
            else:
                target = stmt.i_target_node
                target_module = util.get_module(target)
                context.augmented_modules[target_module.arg] = target_module
            return  # XXX: Do not generate a class for the augment statement

        fields = OrderedSet()
        package_generated = False
        all_fully_qualified = True
        fully_qualified = False

        self.java_class = JavaClass(filename=self.filename,
                                    package=self.package,
                                    description=util.class_javadoc(self.ns, stmt),
                                    source=self.src,
                                    superclass=context.superclasses.get(stmt.keyword, 'YangElement'))

        # if (self.java_class.superclass == 'YangAnyXml'):
        #     print('Adding imports for ' + self.java_class.filename)
        #     self.java_class.imports.add('io.netconfessor.YangAnyXml')

        for ch in util.search(stmt, context.yangelement_stmts | {'leaf', 'leaf-list'}):
            field = self.generate_child(ch)
            ch_arg = util.normalize(ch.arg)
            if field is not None:
                package_generated = True
                if ch_arg == self.n and not fully_qualified:
                    fully_qualified = True
                    s = ('\n * <p>\n * Children with the same name as this ' +
                         'class are fully qualified.')
                    self.java_class.description += s
                else:
                    all_fully_qualified = False
                if field:
                    fields.add(field)  # Container child
                if (not self.ctx.opts.import_on_demand
                        or ch_arg in context.java_lang
                        or ch_arg in context.java_util
                        or ch_arg in context.io_netconfessor
                        or ch_arg in context.class_hierarchy[self.rootpkg]
                        or ch_arg in context.class_hierarchy[self.package]):
                    # Need to do explicit import
                    import_ = '.'.join([self.package, self.n2, ch_arg])
                    self.java_class.imports.add(import_)

        if self.ctx.opts.debug or self.ctx.opts.verbose:
            if package_generated:
                print('pkg ' + '.'.join([self.package, self.n2]) + ' generated')
            if self.ctx.opts.verbose:
                print('Generating "' + self.filename + '"...')

        gen = MethodGenerator(stmt, self.ctx)
        # print(stmt.keyword + ': ' + stmt.arg)
        for constructor in gen.constructors():
            self.java_class.add_constructor(constructor)

        for cloner in gen.cloners():
            self.java_class.add_cloner(cloner)

        if stmt.keyword in {'leaf', 'leaf-list'}:
            for value_setter in gen.leaf_value_access_methods():
                self.java_class.add_value_setter(value_setter)

        try:
            impl_methods = gen.gen.enum_holder_impl()
        except AttributeError:
            pass
        else:
            if impl_methods:
                self.java_class.add_interface_implementation('YangEnumerationHolder', 'io.netconfessor')
                for m in impl_methods:
                    self.java_class.add_support_method(m)

        try:
            lc_methods = gen.gen.list_container_impl()
        except AttributeError:
            pass
        else:
            if lc_methods:
                self.java_class.add_interface_implementation('YangContainer', 'io.netconfessor')
                for m in lc_methods:
                    self.java_class.add_support_method(m)

        support_method = gen.support_method(fields)
        if support_method is not None:
            self.java_class.add_support_method(support_method)

        self.java_class.add_name_getter(gen.key_names())
        self.java_class.add_name_getter(gen.children_names())

        if self.ctx.opts.import_on_demand:
            self.java_class.imports.add('io.netconfessor.*')
            self.java_class.imports.add('java.math.*')
            self.java_class.imports.add('java.util.*')
            if self.rootpkg != self.package:
                self.java_class.imports.add(self.rootpkg + '.*')
                top = util.get_module(self.stmt)
                if top is None:
                    top = self.stmt
                elif top.keyword == 'submodule':
                    top = util.search_one(top, 'belongs-to')
                top_classname = util.normalize(util.search_one(top, 'prefix').arg)
                if (top_classname in context.java_built_in
                        or top_classname in context.java_util):
                    top_import = self.rootpkg + '.' + top_classname
                    self.java_class.imports.add(top_import)
            if package_generated and not all_fully_qualified:
                import_ = '.'.join([self.package, self.n2, '*'])
                self.java_class.imports.add(import_)

        if stmt.keyword in {'container', 'list'}:
            self.java_class_visitor = JavaClass(
                abstract=True,
                filename=self.filename_visitor,
                package=self.package,
                description='Visitor of ' + stmt.keyword + ' ' + stmt.arg)

            self.generate_visitor(self.stmt)

        self.write_to_file()
Ejemplo n.º 22
0
    def failed_pattern(self, line, config, requirements, pattern, verbose, buildtool=None):
        """Check against failed patterns to restart build as needed."""
        pat = re.compile(pattern)
        match = pat.search(line)
        if not match:
            return
        s = match.group(1)
        # standard configure cleanups
        s = cleanup_req(s)

        if s in config.ignored_commands:
            return

        try:
            if not buildtool:
                req = config.failed_commands[s]
                if req:
                    if self.short_circuit is None:
                        self.must_restart += requirements.add_buildreq(req, cache=True)
                    else:
                        requirements.add_buildreq(req, cache=True)
            elif buildtool == 'pkgconfig':
                if self.short_circuit is None:
                    self.must_restart += requirements.add_pkgconfig_buildreq(s, config.config_opts.get('32bit'), cache=True)
                else:
                    requirements.add_pkgconfig_buildreq(s, config.config_opts.get('32bit'), cache=True)
            elif buildtool == 'R':
                if requirements.add_buildreq("R-" + s, cache=True) > 0:
                    if self.short_circuit is None:
                        self.must_restart += 1
            elif buildtool == 'perl':
                s = s.replace('inc::', '')
                if self.short_circuit is None:
                    self.must_restart += requirements.add_buildreq('perl(%s)' % s, cache=True)
                else:
                    requirements.add_buildreq('perl(%s)' % s, cache=True)
            elif buildtool == 'pypi':
                s = util.translate(s)
                if not s:
                    return
                if self.short_circuit is None:
                    self.must_restart += requirements.add_buildreq(util.translate('%s-python' % s), cache=True)
                else:
                    requirements.add_buildreq(util.translate('%s-python' % s), cache=True)
            elif buildtool == 'ruby':
                if s in config.gems:
                    if self.short_circuit is None:
                        self.must_restart += requirements.add_buildreq(config.gems[s], cache=True)
                    else:
                        requirements.add_buildreq(config.gems[s], cache=True)
                else:
                    if self.short_circuit is None:
                        self.must_restart += requirements.add_buildreq('rubygem-%s' % s, cache=True)
                    else:
                        requirements.add_buildreq('rubygem-%s' % s, cache=True)
            elif buildtool == 'ruby table':
                if s in config.gems:
                    if self.short_circuit is None:
                        self.must_restart += requirements.add_buildreq(config.gems[s], cache=True)
                    else:
                        requirements.add_buildreq(config.gems[s], cache=True)
                else:
                    print("Unknown ruby gem match", s)
            elif buildtool == 'maven' or buildtool == 'gradle':
                group_count = len(match.groups())
                if group_count == 2:
                    # Add fully qualified versioned mvn() dependency
                    name = match.group(1)
                    # Hyphens are disallowed for version strings, so use dots instead
                    ver = match.group(2).replace('-', '.')
                    mvn_provide = f'mvn({name}) = {ver}'
                    if self.short_circuit is None:
                        self.must_restart += requirements.add_buildreq(mvn_provide, cache=True)
                    else:
                        requirements.add_buildreq(mvn_provide, cache=True)
                elif s in config.maven_jars:
                    # Overrides for dependencies with custom grouping
                    if self.short_circuit is None:
                        self.must_restart += requirements.add_buildreq(config.maven_jars[s], cache=True)
                    else:
                        requirements.add_buildreq(config.maven_jars[s], cache=True)
                elif group_count == 3:
                    org = match.group(1)
                    name = match.group(2)
                    ver = match.group(3).replace('-', '.')
                    if re.search("-(parent|pom|bom)$", name):
                        mvn_provide = f'mvn({org}:{name}:pom) = {ver}'
                    else:
                        mvn_provide = f'mvn({org}:{name}:jar) = {ver}'
                    if self.short_circuit is None:
                        self.must_restart += requirements.add_buildreq(mvn_provide, cache=True)
                    else:
                        requirements.add_buildreq(mvn_provide, cache=True)
                else:
                    # Fallback to mvn-ARTIFACTID package name
                    self.must_restart += requirements.add_buildreq('mvn-%s' % s, cache=True)
            elif buildtool == 'catkin':
                if self.short_circuit is None:
                    self.must_restart += requirements.add_pkgconfig_buildreq(s, config.config_opts.get('32bit'), cache=True)
                    self.must_restart += requirements.add_buildreq(s, cache=True)
                else:
                    requirements.add_pkgconfig_buildreq(s, config.config_opts.get('32bit'), cache=True)
                    requirements.add_buildreq(s, cache=True)
        except Exception:
            if s.strip() and s not in self.warned_about and s[:2] != '--':
                util.print_warning(f"Unknown pattern match: {s}")
                self.warned_about.add(s)
Ejemplo n.º 23
0
    def parse_config_files(self, path, bump, filemanager, version,
                           requirements):
        """Parse the various configuration files that may exist in the package directory."""
        packages_file = None

        # Require autospec.conf for additional features
        if os.path.exists(self.config_file):
            config = configparser.ConfigParser(interpolation=None)
            config.read(self.config_file)

            if "autospec" not in config.sections():
                print("Missing autospec section..")
                sys.exit(1)

            self.git_uri = config['autospec'].get('git', None)
            self.license_fetch = config['autospec'].get('license_fetch', None)
            self.license_show = config['autospec'].get('license_show', None)
            packages_file = config['autospec'].get('packages_file', None)
            self.yum_conf = config['autospec'].get('yum_conf', None)
            self.failed_pattern_dir = config['autospec'].get(
                'failed_pattern_dir', None)

            # support reading the local files relative to config_file
            if packages_file and not os.path.isabs(packages_file):
                packages_file = os.path.join(os.path.dirname(self.config_file),
                                             packages_file)
            if self.yum_conf and not os.path.isabs(self.yum_conf):
                self.yum_conf = os.path.join(os.path.dirname(self.config_file),
                                             self.yum_conf)
            if self.failed_pattern_dir and not os.path.isabs(
                    self.failed_pattern_dir):
                self.failed_pattern_dir = os.path.join(
                    os.path.dirname(self.config_file), self.failed_pattern_dir)

            if not packages_file:
                print(
                    "Warning: Set [autospec][packages_file] path to package list file for "
                    "requires validation")
                packages_file = os.path.join(os.path.dirname(self.config_file),
                                             "packages")

            self.urlban = config['autospec'].get('urlban', None)

        # Read values from options.conf (and deprecated files) and rewrite as necessary
        self.read_config_opts(path)

        if not self.git_uri:
            print(
                "Warning: Set [autospec][git] upstream template for remote git URI configuration"
            )
        if not self.license_fetch:
            print(
                "Warning: Set [autospec][license_fetch] uri for license fetch support"
            )
        if not self.license_show:
            print(
                "Warning: Set [autospec][license_show] uri for license link check support"
            )
        if not self.yum_conf:
            print(
                "Warning: Set [autospec][yum_conf] path to yum.conf file for whatrequires validation"
            )
            self.yum_conf = os.path.join(os.path.dirname(self.config_file),
                                         "image-creator/yum.conf")

        if packages_file:
            self.os_packages = set(
                self.read_conf_file(packages_file, track=False))
        else:
            self.os_packages = set(
                self.read_conf_file("~/packages", track=False))

        wrapper = textwrap.TextWrapper()
        wrapper.initial_indent = "# "
        wrapper.subsequent_indent = "# "

        self.write_default_conf_file(
            path, "buildreq_ban", wrapper,
            "This file contains build requirements that get picked up but are "
            "undesirable. One entry per line, no whitespace.")
        self.write_default_conf_file(
            path, "pkgconfig_ban", wrapper,
            "This file contains pkgconfig build requirements that get picked up but"
            " are undesirable. One entry per line, no whitespace.")
        self.write_default_conf_file(
            path, "requires_ban", wrapper,
            "This file contains runtime requirements that get picked up but are "
            "undesirable. One entry per line, no whitespace.")
        self.write_default_conf_file(
            path, "buildreq_add", wrapper,
            "This file contains additional build requirements that did not get "
            "picked up automatically. One name per line, no whitespace.")
        self.write_default_conf_file(
            path, "pkgconfig_add", wrapper,
            "This file contains additional pkgconfig build requirements that did "
            "not get picked up automatically. One name per line, no whitespace."
        )
        self.write_default_conf_file(
            path, "requires_add", wrapper,
            "This file contains additional runtime requirements that did not get "
            "picked up automatically. One name per line, no whitespace.")
        self.write_default_conf_file(
            path, "excludes", wrapper,
            "This file contains the output files that need %exclude. Full path "
            "names, one per line.")

        content = self.read_conf_file(os.path.join(path, "release"))
        if content and content[0]:
            r = int(content[0])
            if bump:
                r += 1
            tarball.release = str(r)
            print("Release     :", tarball.release)

        content = self.read_conf_file(os.path.join(path, "extra_sources"))
        for source in content:
            fields = source.split(maxsplit=1)
            print("Adding additional source file: %s" % fields[0])
            self.config_files.add(os.path.basename(fields[0]))
            self.extra_sources.append(fields)

        content = self.read_conf_file(os.path.join(path, "buildreq_ban"))
        for banned in content:
            print("Banning build requirement: %s." % banned)
            requirements.banned_buildreqs.add(banned)
            requirements.buildreqs.discard(banned)
            requirements.buildreqs_cache.discard(banned)

        content = self.read_conf_file(os.path.join(path, "pkgconfig_ban"))
        for banned in content:
            banned = "pkgconfig(%s)" % banned
            print("Banning build requirement: %s." % banned)
            requirements.banned_buildreqs.add(banned)
            requirements.buildreqs.discard(banned)
            requirements.buildreqs_cache.discard(banned)

        content = self.read_conf_file(os.path.join(path, "requires_ban"))
        for banned in content:
            print("Banning runtime requirement: %s." % banned)
            requirements.banned_requires.add(banned)
            requirements.requires.discard(banned)

        content = self.read_conf_file(os.path.join(path, "buildreq_add"))
        for extra in content:
            print("Adding additional build requirement: %s." % extra)
            requirements.add_buildreq(extra)

        cache_file = os.path.join(path, "buildreq_cache")
        content = self.read_conf_file(cache_file)
        if content and content[0] == version:
            for extra in content[1:]:
                print("Adding additional build (cache) requirement: %s." %
                      extra)
                requirements.add_buildreq(extra)
        else:
            try:
                os.unlink(cache_file)
            except FileNotFoundError:
                pass
            except Exception as e:
                print_warning(f"Unable to remove buildreq_cache file: {e}")

        content = self.read_conf_file(os.path.join(path, "pkgconfig_add"))
        for extra in content:
            extra = "pkgconfig(%s)" % extra
            print("Adding additional build requirement: %s." % extra)
            requirements.add_buildreq(extra)

        content = self.read_conf_file(os.path.join(path, "requires_add"))
        for extra in content:
            print("Adding additional runtime requirement: %s." % extra)
            requirements.add_requires(extra, self.os_packages, override=True)

        content = self.read_conf_file(os.path.join(path, "excludes"))
        for exclude in content:
            print("%%exclude for: %s." % exclude)
        filemanager.excludes += content

        content = self.read_conf_file(os.path.join(path, "extras"))
        for extra in content:
            print("extras for  : %s." % extra)
        filemanager.extras += content

        for fname in os.listdir(path):
            if not re.search('.+_extras$', fname) or fname == "dev_extras":
                continue
            content = {}
            content['files'] = self.read_conf_file(os.path.join(path, fname))
            if not content:
                print_warning(f"Error reading custom extras file: {fname}")
                continue
            req_file = os.path.join(path, f'{fname}_requires')
            if os.path.isfile(req_file):
                content['requires'] = self.read_conf_file(req_file)
            name = fname[:-len("_extras")]
            print(f"extras-{name} for {content['files']}")
            filemanager.custom_extras["extras-" + f"{name}"] = content

        content = self.read_conf_file(os.path.join(path, "dev_extras"))
        for extra in content:
            print("dev for     : %s." % extra)
        filemanager.dev_extras += content

        content = self.read_conf_file(os.path.join(path, "setuid"))
        for suid in content:
            print("setuid for  : %s." % suid)
        filemanager.setuid += content

        content = self.read_conf_file(os.path.join(path, "attrs"))
        for line in content:
            attr = line.split()
            filename = attr.pop()
            print("%attr({0},{1},{2}) for: {3}".format(attr[0], attr[1],
                                                       attr[2], filename))
            filemanager.attrs[filename] = attr

        self.patches += self.read_conf_file(os.path.join(path, "series"))
        pfiles = [("%s/%s" % (path, x.split(" ")[0])) for x in self.patches]
        cmd = "egrep \"(\+\+\+|\-\-\-).*((Makefile.am)|(aclocal.m4)|(configure.ac|configure.in))\" %s" % " ".join(
            pfiles)  # noqa: W605
        if self.patches and call(cmd,
                                 check=False,
                                 stdout=subprocess.DEVNULL,
                                 stderr=subprocess.DEVNULL) == 0:
            self.autoreconf = True

        # Parse the version-specific patch lists
        update_security_sensitive = False
        for version in self.versions:
            self.verpatches[version] = self.read_conf_file(
                os.path.join(path, '.'.join(['series', version])))
            if any(p.lower().startswith('cve-')
                   for p in self.verpatches[version]):
                update_security_sensitive = True

        if any(p.lower().startswith('cve-') for p in self.patches):
            update_security_sensitive = True

        if update_security_sensitive:
            self.config_opts['security_sensitive'] = True
            self.rewrite_config_opts(path)

        content = self.read_conf_file(os.path.join(path, "configure"))
        self.extra_configure = " \\\n".join(content)

        content = self.read_conf_file(os.path.join(path, "configure32"))
        self.extra_configure32 = " \\\n".join(content)

        content = self.read_conf_file(os.path.join(path, "configure64"))
        self.extra_configure64 = " \\\n".join(content)

        content = self.read_conf_file(os.path.join(path, "configure_avx2"))
        self.extra_configure_avx2 = " \\\n".join(content)

        content = self.read_conf_file(os.path.join(path, "configure_avx512"))
        self.extra_configure_avx512 = " \\\n".join(content)

        content = self.read_conf_file(os.path.join(path, "configure_openmpi"))
        self.extra_configure_openmpi = " \\\n".join(content)

        if self.config_opts["keepstatic"]:
            self.disable_static = ""
        if self.config_opts['broken_parallel_build']:
            self.parallel_build = ""

        content = self.read_conf_file(os.path.join(path, "make_args"))
        if content:
            self.extra_make = " \\\n".join(content)

        content = self.read_conf_file(os.path.join(path, "make32_args"))
        if content:
            self.extra32_make = " \\\n".join(content)

        content = self.read_conf_file(os.path.join(path, "make_install_args"))
        if content:
            self.extra_make_install = " \\\n".join(content)

        content = self.read_conf_file(os.path.join(path,
                                                   "make32_install_args"))
        if content:
            self.extra_make32_install = " \\\n".join(content)

        content = self.read_conf_file(os.path.join(path, "install_macro"))
        if content and content[0]:
            self.install_macro = content[0]

        content = self.read_conf_file(os.path.join(path, "cmake_args"))
        if content:
            self.extra_cmake = " \\\n".join(content)

        content = self.read_conf_file(os.path.join(path, "cmake_args_openmpi"))
        if content:
            self.extra_cmake_openmpi = " \\\n".join(content)

        content = self.read_conf_file(os.path.join(path, "cmake_srcdir"))
        if content and content[0]:
            self.cmake_srcdir = content[0]

        content = self.read_conf_file(os.path.join(path, "subdir"))
        if content and content[0]:
            self.subdir = content[0]

        content = self.read_conf_file(os.path.join(path, "build_pattern"))
        if content and content[0]:
            buildpattern.set_build_pattern(content[0], 20)
            self.autoreconf = False

        content = self.read_script_file(
            os.path.join(path, "make_check_command"))
        if content:
            check.tests_config = '\n'.join(content)

        content = self.read_conf_file(
            os.path.join(path, tarball.name + ".license"))
        if content and content[0]:
            words = content[0].split()
            for word in words:
                if word.find(":") < 0:
                    if not license.add_license(word, self.license_translations,
                                               self.license_blacklist):
                        print_warning(
                            "{}: blacklisted license {} ignored.".format(
                                tarball.name + ".license", word))

        content = self.read_conf_file(os.path.join(path, "golang_libpath"))
        if content and content[0]:
            tarball.golibpath = content[0]
            print("golibpath   : {}".format(tarball.golibpath))

        if self.config_opts['use_clang']:
            self.config_opts['funroll-loops'] = False
            requirements.add_buildreq("llvm")

        if self.config_opts['32bit']:
            requirements.add_buildreq("glibc-libc32")
            requirements.add_buildreq("glibc-dev32")
            requirements.add_buildreq("gcc-dev32")
            requirements.add_buildreq("gcc-libgcc32")
            requirements.add_buildreq("gcc-libstdc++32")

        if self.config_opts['openmpi']:
            requirements.add_buildreq("openmpi-dev")
            requirements.add_buildreq("modules")
            # MPI testsuites generally require "openssh"
            requirements.add_buildreq("openssh")

        self.prep_prepend = self.read_script_file(
            os.path.join(path, "prep_prepend"))
        if os.path.isfile(os.path.join(path, "prep_append")):
            os.rename(os.path.join(path, "prep_append"),
                      os.path.join(path, "build_prepend"))
        self.make_prepend = self.read_script_file(
            os.path.join(path, "make_prepend"))
        self.build_prepend = self.read_script_file(
            os.path.join(path, "build_prepend"))
        self.build_append = self.read_script_file(
            os.path.join(path, "build_append"))
        self.install_prepend = self.read_script_file(
            os.path.join(path, "install_prepend"))
        if os.path.isfile(os.path.join(path, "make_install_append")):
            os.rename(os.path.join(path, "make_install_append"),
                      os.path.join(path, "install_append"))
        self.install_append = self.read_script_file(
            os.path.join(path, "install_append"))
        self.service_restart = self.read_conf_file(
            os.path.join(path, "service_restart"))

        self.profile_payload = self.read_script_file(
            os.path.join(path, "profile_payload"))

        self.custom_desc = self.read_conf_file(
            os.path.join(path, "description"))
        self.custom_summ = self.read_conf_file(os.path.join(path, "summary"))
Ejemplo n.º 24
0
def warn(msg):
    if DEBUG: util.print_warning(msg)
Ejemplo n.º 25
0
def parse_config_files(path, bump, filemanager, version):
    """Parse the various configuration files that may exist in the package directory."""
    global extra_configure
    global extra_configure32
    global extra_configure64
    global extra_configure_avx2
    global extra_configure_avx512
    global config_files
    global parallel_build
    global license_fetch
    global license_show
    global git_uri
    global os_packages
    global urlban
    global config_file
    global profile_payload
    global config_opts
    global extra_make
    global extra32_make
    global extra_make_install
    global extra_make32_install
    global extra_cmake
    global cmake_srcdir
    global subdir
    global install_macro
    global disable_static
    global prep_prepend
    global build_prepend
    global make_prepend
    global install_prepend
    global install_append
    global patches
    global autoreconf
    global set_gopath
    global yum_conf
    global custom_desc
    global failed_pattern_dir

    packages_file = None

    # Require autospec.conf for additional features
    if os.path.exists(config_file):
        config = configparser.ConfigParser(interpolation=None)
        config.read(config_file)

        if "autospec" not in config.sections():
            print("Missing autospec section..")
            sys.exit(1)

        git_uri = config['autospec'].get('git', None)
        license_fetch = config['autospec'].get('license_fetch', None)
        license_show = config['autospec'].get('license_show', None)
        packages_file = config['autospec'].get('packages_file', None)
        yum_conf = config['autospec'].get('yum_conf', None)
        failed_pattern_dir = config['autospec'].get('failed_pattern_dir', None)

        # support reading the local files relative to config_file
        if packages_file and not os.path.isabs(packages_file):
            packages_file = os.path.join(os.path.dirname(config_file),
                                         packages_file)
        if yum_conf and not os.path.isabs(yum_conf):
            yum_conf = os.path.join(os.path.dirname(config_file), yum_conf)
        if failed_pattern_dir and not os.path.isabs(failed_pattern_dir):
            failed_pattern_dir = os.path.join(os.path.dirname(config_file),
                                              failed_pattern_dir)

        if not packages_file:
            print(
                "Warning: Set [autospec][packages_file] path to package list file for "
                "requires validation")
            packages_file = os.path.join(os.path.dirname(config_file),
                                         "packages")

        urlban = config['autospec'].get('urlban', None)

    # Read values from options.conf (and deprecated files) and rewrite as necessary
    read_config_opts(path)

    if not git_uri:
        print(
            "Warning: Set [autospec][git] upstream template for remote git URI configuration"
        )
    if not license_fetch:
        print(
            "Warning: Set [autospec][license_fetch] uri for license fetch support"
        )
    if not license_show:
        print(
            "Warning: Set [autospec][license_show] uri for license link check support"
        )
    if not yum_conf:
        print(
            "Warning: Set [autospec][yum_conf] path to yum.conf file for whatrequires validation"
        )
        yum_conf = os.path.join(os.path.dirname(config_file),
                                "image-creator/yum.conf")

    if packages_file:
        os_packages = set(read_conf_file(packages_file))
    else:
        os_packages = set(read_conf_file("~/packages"))

    wrapper = textwrap.TextWrapper()
    wrapper.initial_indent = "# "
    wrapper.subsequent_indent = "# "

    def write_default_conf_file(name, description):
        """Write default configuration file with description to file name."""
        config_files.add(name)
        filename = os.path.join(path, name)
        if os.path.isfile(filename):
            return

        write_out(filename, wrapper.fill(description) + "\n")

    write_default_conf_file(
        "buildreq_ban",
        "This file contains build requirements that get picked up but are "
        "undesirable. One entry per line, no whitespace.")
    write_default_conf_file(
        "pkgconfig_ban",
        "This file contains pkgconfig build requirements that get picked up but"
        " are undesirable. One entry per line, no whitespace.")
    write_default_conf_file(
        "requires_ban",
        "This file contains runtime requirements that get picked up but are "
        "undesirable. One entry per line, no whitespace.")
    write_default_conf_file(
        "buildreq_add",
        "This file contains additional build requirements that did not get "
        "picked up automatically. One name per line, no whitespace.")
    write_default_conf_file(
        "pkgconfig_add",
        "This file contains additional pkgconfig build requirements that did "
        "not get picked up automatically. One name per line, no whitespace.")
    write_default_conf_file(
        "requires_add",
        "This file contains additional runtime requirements that did not get "
        "picked up automatically. One name per line, no whitespace.")
    write_default_conf_file(
        "excludes",
        "This file contains the output files that need %exclude. Full path "
        "names, one per line.")

    content = read_conf_file(os.path.join(path, "release"))
    if content and content[0]:
        r = int(content[0])
        if bump:
            r += 1
        tarball.release = str(r)
        print("Release     :", tarball.release)

    content = read_conf_file(os.path.join(path, "buildreq_ban"))
    for banned in content:
        print("Banning build requirement: %s." % banned)
        buildreq.banned_buildreqs.add(banned)

    content = read_conf_file(os.path.join(path, "pkgconfig_ban"))
    for banned in content:
        banned = "pkgconfig(%s)" % banned
        print("Banning build requirement: %s." % banned)
        buildreq.banned_buildreqs.add(banned)

    content = read_conf_file(os.path.join(path, "requires_ban"))
    for banned in content:
        print("Banning runtime requirement: %s." % banned)
        buildreq.banned_requires.add(banned)

    content = read_conf_file(os.path.join(path, "buildreq_add"))
    for extra in content:
        print("Adding additional build requirement: %s." % extra)
        buildreq.add_buildreq(extra)

    content = read_conf_file(os.path.join(path, "buildreq_cache"))
    if content and content[0] == version:
        for extra in content[1:]:
            print("Adding additional build (cache) requirement: %s." % extra)
            buildreq.add_buildreq(extra)

    content = read_conf_file(os.path.join(path, "pkgconfig_add"))
    for extra in content:
        extra = "pkgconfig(%s)" % extra
        print("Adding additional build requirement: %s." % extra)
        buildreq.add_buildreq(extra)

    content = read_conf_file(os.path.join(path, "requires_add"))
    for extra in content:
        print("Adding additional runtime requirement: %s." % extra)
        buildreq.add_requires(extra, override=True)

    content = read_conf_file(os.path.join(path, "excludes"))
    for exclude in content:
        print("%%exclude for: %s." % exclude)
    filemanager.excludes += content

    content = read_conf_file(os.path.join(path, "extras"))
    for extra in content:
        print("extras for  : %s." % extra)
    filemanager.extras += content
    filemanager.excludes += content

    for fname in os.listdir(path):
        if not re.search('.+_extras$', fname) or fname == "dev_extras":
            continue
        content = read_extras_config(os.path.join(path, fname))
        if not content:
            print_warning(f"Error reading custom extras file: {fname}")
            continue
        name = fname[:-len("_extras")]
        if "files" not in content or type(content['files']) is not list:
            print_warning(
                f"Invalid custom extras file: {fname} invalid or missing files list"
            )
            continue
        if "requires" in content:
            if type(content['requires']) is not list:
                print_warning(
                    f"Invalid custom extras file: {fname} invalid requires list"
                )
                continue
        print(f"{name}-extras for {content['files']}")
        filemanager.custom_extras[f"{name}-extras"] = content
        filemanager.excludes += content['files']

    content = read_conf_file(os.path.join(path, "dev_extras"))
    for extra in content:
        print("dev for     : %s." % extra)
    filemanager.dev_extras += content
    filemanager.excludes += content

    content = read_conf_file(os.path.join(path, "setuid"))
    for suid in content:
        print("setuid for  : %s." % suid)
    filemanager.setuid += content
    filemanager.excludes += content

    content = read_conf_file(os.path.join(path, "attrs"))
    for line in content:
        attr = re.split(r'\(|\)|,', line)
        attr = [a.strip() for a in attr]
        filename = attr.pop()
        print("attr for: %s." % filename)
        filemanager.attrs[filename] = attr
        filemanager.excludes.append(filename)

    patches += read_conf_file(os.path.join(path, "series"))
    pfiles = [("%s/%s" % (path, x.split(" ")[0])) for x in patches]
    cmd = "egrep \"(\+\+\+|\-\-\-).*((Makefile.am)|(configure.ac|configure.in))\" %s" % " ".join(
        pfiles)  # noqa: W605
    if patches and call(
            cmd, check=False, stdout=subprocess.DEVNULL,
            stderr=subprocess.DEVNULL) == 0:
        autoreconf = True

    if any(p.lower().startswith('cve-') for p in patches):
        config_opts['security_sensitive'] = True
        rewrite_config_opts(path)

    content = read_conf_file(os.path.join(path, "configure"))
    extra_configure = " \\\n".join(content)

    content = read_conf_file(os.path.join(path, "configure32"))
    extra_configure32 = " \\\n".join(content)

    content = read_conf_file(os.path.join(path, "configure64"))
    extra_configure64 = " \\\n".join(content)

    content = read_conf_file(os.path.join(path, "configure_avx2"))
    extra_configure_avx2 = " \\\n".join(content)

    content = read_conf_file(os.path.join(path, "configure_avx512"))
    extra_configure_avx512 = " \\\n".join(content)

    if config_opts["keepstatic"]:
        disable_static = ""
    if config_opts['broken_parallel_build']:
        parallel_build = ""

    content = read_conf_file(os.path.join(path, "make_args"))
    if content:
        extra_make = " \\\n".join(content)

    content = read_conf_file(os.path.join(path, "make32_args"))
    if content:
        extra32_make = " \\\n".join(content)

    content = read_conf_file(os.path.join(path, "make_install_args"))
    if content:
        extra_make_install = " \\\n".join(content)

    content = read_conf_file(os.path.join(path, "make32_install_args"))
    if content:
        extra_make32_install = " \\\n".join(content)

    content = read_conf_file(os.path.join(path, "install_macro"))
    if content and content[0]:
        install_macro = content[0]

    content = read_conf_file(os.path.join(path, "cmake_args"))
    if content:
        extra_cmake = " \\\n".join(content)

    content = read_conf_file(os.path.join(path, "cmake_srcdir"))
    if content and content[0]:
        cmake_srcdir = content[0]

    content = read_conf_file(os.path.join(path, "subdir"))
    if content and content[0]:
        subdir = content[0]

    content = read_conf_file(os.path.join(path, "build_pattern"))
    if content and content[0]:
        buildpattern.set_build_pattern(content[0], 20)
        autoreconf = False

    content = read_conf_file(os.path.join(path, "make_check_command"))
    if content:
        check.tests_config = '\n'.join(content)

    content = read_conf_file(os.path.join(path, tarball.name + ".license"))
    if content and content[0]:
        words = content[0].split()
        for word in words:
            if word.find(":") < 0:
                license.add_license(word)

    content = read_conf_file(os.path.join(path, "golang_libpath"))
    if content and content[0]:
        tarball.golibpath = content[0]
        print("golibpath   : {}".format(tarball.golibpath))

    if config_opts['use_clang']:
        config_opts['funroll-loops'] = False
        buildreq.add_buildreq("llvm")

    if config_opts['32bit']:
        buildreq.add_buildreq("glibc-libc32")
        buildreq.add_buildreq("glibc-dev32")
        buildreq.add_buildreq("gcc-dev32")
        buildreq.add_buildreq("gcc-libgcc32")
        buildreq.add_buildreq("gcc-libstdc++32")

    prep_prepend = read_conf_file(os.path.join(path, "prep_prepend"))
    if os.path.isfile(os.path.join(path, "prep_append")):
        os.rename(os.path.join(path, "prep_append"),
                  os.path.join(path, "build_prepend"))
    make_prepend = read_conf_file(os.path.join(path, "make_prepend"))
    build_prepend = read_conf_file(os.path.join(path, "build_prepend"))
    install_prepend = read_conf_file(os.path.join(path, "install_prepend"))
    if os.path.isfile(os.path.join(path, "make_install_append")):
        os.rename(os.path.join(path, "make_install_append"),
                  os.path.join(path, "install_append"))
    install_append = read_conf_file(os.path.join(path, "install_append"))

    profile_payload = read_conf_file(os.path.join(path, "profile_payload"))

    custom_desc = read_conf_file(os.path.join(path, "description"))
Ejemplo n.º 26
0
def warn(msg):
    if DEBUG: util.print_warning(msg)
Ejemplo n.º 27
0
def main():
    parser = argparse.ArgumentParser(
        description="Builds a parsec benchmark with contech")
    parser.add_argument("benchmark", help="The parsec bencmark to build.")
    parser.add_argument(
        "--bldconf",
        default="contech",
        help="PARSEC build configuration (bldconf) to use for building.")
    parser.add_argument(
        "--hammerOptLevel",
        help="Set hammer optimization level (bldconf==hammer only)")
    args = parser.parse_args()

    if os.environ.has_key("CONTECH_HOME"):
        CONTECH_HOME = os.environ["CONTECH_HOME"]
        stateFile = os.path.join(CONTECH_HOME, "scripts/output/",
                                 args.benchmark + ".stateFile.temp")
        os.environ["CONTECH_STATE_FILE"] = stateFile
    else:
        print ">Error: Could not find contech installation. Set CONTECH_HOME to the root of your contech directory."
        exit(1)

    if os.environ.has_key("PARSEC_HOME"):
        PARSEC_HOME = os.environ["PARSEC_HOME"]
        PARSECMGMT = os.path.join(PARSEC_HOME, "bin/parsecmgmt")
    else:
        print ">Error: Could not find parsec installation. Set PARSEC_HOME to the root of your parsec directory."
        exit(1)

    # Run the parsec benchmark
    print ">Building " + args.benchmark

    # Prepare state file for run
    if os.path.exists(stateFile):
        os.remove(stateFile)

    # Hammer: Prepare hammer nail file
    if args.bldconf == "hammer":
        os.environ["HAMMER_NAIL_FILE"] = os.path.join(
            CONTECH_HOME,
            "backend/Hammer/compilerData/{}.bin".format(args.benchmark))
        os.environ["HAMMER_OPT_LEVEL"] = args.hammerOptLevel

    pcall([
        PARSECMGMT, "-a", "uninstall", "-p", args.benchmark, "-c", args.bldconf
    ])
    with Timer("Build"):
        returnCode = pcall([
            PARSECMGMT, "-a", "build", "-p", args.benchmark, "-c", args.bldconf
        ],
                           returnCode=True)

    # Clean up
    if os.path.exists(stateFile):
        os.remove(stateFile)
    elif args.bldconf in ["contech", "contech_marker", "hammer"]:
        util.print_warning("Warning: no statefile was generated.")

    if returnCode != 0:
        util.print_error("Build failed")
        exit(1)
Ejemplo n.º 28
0
    def failed_pattern(self,
                       line,
                       config,
                       requirements,
                       pattern,
                       verbose,
                       buildtool=None):
        """Check against failed patterns to restart build as needed."""
        pat = re.compile(pattern)
        match = pat.search(line)
        if not match:
            return
        s = match.group(1)
        # standard configure cleanups
        s = cleanup_req(s)

        if s in config.ignored_commands:
            return

        try:
            if not buildtool:
                req = config.failed_commands[s]
                if req:
                    if self.short_circuit is None:
                        self.must_restart += requirements.add_buildreq(
                            req, cache=True)
                    else:
                        requirements.add_buildreq(req, cache=True)
            elif buildtool == 'pkgconfig':
                if self.short_circuit is None:
                    self.must_restart += requirements.add_pkgconfig_buildreq(
                        s, config.config_opts.get('32bit'), cache=True)
                else:
                    requirements.add_pkgconfig_buildreq(
                        s, config.config_opts.get('32bit'), cache=True)
            elif buildtool == 'R':
                if requirements.add_buildreq("R-" + s, cache=True) > 0:
                    if self.short_circuit is None:
                        self.must_restart += 1
            elif buildtool == 'perl':
                s = s.replace('inc::', '')
                if self.short_circuit is None:
                    self.must_restart += requirements.add_buildreq('perl(%s)' %
                                                                   s,
                                                                   cache=True)
                else:
                    requirements.add_buildreq('perl(%s)' % s, cache=True)
            elif buildtool == 'pypi':
                s = util.translate(s)
                if not s:
                    return
                if self.short_circuit is None:
                    self.must_restart += requirements.add_buildreq(
                        f"pypi({s.lower().replace('-', '_')})", cache=True)
                else:
                    requirements.add_buildreq(
                        f"pypi({s.lower().replace('-', '_')})", cache=True)
            elif buildtool == 'ruby':
                if s in config.gems:
                    if self.short_circuit is None:
                        self.must_restart += requirements.add_buildreq(
                            config.gems[s], cache=True)
                    else:
                        requirements.add_buildreq(config.gems[s], cache=True)
                else:
                    if self.short_circuit is None:
                        self.must_restart += requirements.add_buildreq(
                            'rubygem-%s' % s, cache=True)
                    else:
                        requirements.add_buildreq('rubygem-%s' % s, cache=True)
            elif buildtool == 'ruby table':
                if s in config.gems:
                    if self.short_circuit is None:
                        self.must_restart += requirements.add_buildreq(
                            config.gems[s], cache=True)
                    else:
                        requirements.add_buildreq(config.gems[s], cache=True)
                else:
                    print("Unknown ruby gem match", s)
            elif buildtool == 'catkin':
                if self.short_circuit is None:
                    self.must_restart += requirements.add_pkgconfig_buildreq(
                        s, config.config_opts.get('32bit'), cache=True)
                    self.must_restart += requirements.add_buildreq(s,
                                                                   cache=True)
                else:
                    requirements.add_pkgconfig_buildreq(
                        s, config.config_opts.get('32bit'), cache=True)
                    requirements.add_buildreq(s, cache=True)
        except Exception:
            if s.strip() and s not in self.warned_about and s[:2] != '--':
                util.print_warning(f"Unknown pattern match: {s}")
                self.warned_about.add(s)
Ejemplo n.º 29
0
def examine_abi_fallback(download_path, results_dir, name):
    """Missing abireport so fallback to internal scanning."""
    old_dir = os.getcwd()

    rpms = set()
    for item in os.listdir(results_dir):
        namelen = len(name)
        if item.find("-extras-", namelen) >= namelen:
            continue
        if item.endswith(".rpm") and not item.endswith(".src.rpm"):
            rpms.add(os.path.basename(item))

    if len(rpms) == 0:
        util.print_fatal("No usable rpms found, aborting")
        sys.exit(1)

    extract_dir = os.path.abspath(os.path.join(download_path, "__extraction"))
    purge_tree(extract_dir)

    try:
        os.makedirs(extract_dir)
    except Exception as e:
        util.print_fatal("Cannot create extraction tree: {}".format(e))
        sys.exit(1)

    os.chdir(extract_dir)

    # Extract all those rpms to our current directory
    try:
        for rpm in rpms:
            cmd = 'rpm2cpio "{}" | cpio -imd 2>/dev/null'.format(
                os.path.join(results_dir, rpm))
            subprocess.check_call(cmd, shell=True)
    except Exception as e:
        util.print_fatal("Error extracting RPMS: {}".format(e))

    os.chdir(download_path)
    collected_files = set()

    # Places we expect to find shared libraries
    for check_path in valid_dirs:
        if check_path[0] == "/":
            check_path = check_path[1:]

        dirn = os.path.join(extract_dir, check_path)
        if not os.path.isdir(dirn):
            continue

        for file in os.listdir(dirn):
            f = os.path.basename(file)

            clean_path = os.path.abspath(os.path.join(dirn, f))
            if not is_file_valid(clean_path):
                continue
            collected_files.add(clean_path)

    abi_report = dict()

    # Now examine these libraries
    for library in sorted(collected_files):
        soname = get_soname(library)
        if not soname:
            warn = "Failed to determine soname of: {}".format(library)
            util.print_warning(warn)
            soname = os.path.basename(library)
        symbols = dump_symbols(library)
        if symbols and len(symbols) > 0:
            if soname not in abi_report:
                abi_report[soname] = set()
            abi_report[soname].update(symbols)

    report_file = os.path.join(download_path, "symbols")

    if len(abi_report) > 0:
        # Finally, write the report
        report = util.open_auto(report_file, "w")
        for soname in sorted(abi_report.keys()):
            for symbol in sorted(abi_report[soname]):
                report.write("{}:{}\n".format(soname, symbol))

        report.close()
    else:
        truncate_file(report_file)

    # Write the library report
    lib_deps = get_all_dependencies(extract_dir)
    report_file = os.path.join(download_path, "used_libs")
    if len(lib_deps) > 0:
        report = util.open_auto(report_file, "w")
        for soname in sorted(lib_deps):
            report.write("{}\n".format(soname))
        report.close()
    else:
        truncate_file(report_file)

    os.chdir(old_dir)
    purge_tree(extract_dir)
Ejemplo n.º 30
0
def examine_abi(download_path):
    download_path = os.path.abspath(download_path)
    results_dir = os.path.abspath(os.path.join(download_path, "results"))

    if not os.path.exists(results_dir):
        util.print_fatal("Results directory does not exist, aborting")
        sys.exit(1)

    old_dir = os.getcwd()

    rpms = set()
    for item in os.listdir(results_dir):
        if item.endswith(".rpm") and not item.endswith(".src.rpm"):
            rpms.add(os.path.basename(item))

    if len(rpms) == 0:
        util.print_fatal("No usable rpms found, aborting")
        sys.exit(1)

    extract_dir = os.path.abspath(os.path.join(download_path, "__extraction"))
    purge_tree(extract_dir)

    try:
        os.makedirs(extract_dir)
    except Exception as e:
        util.print_fatal("Cannot create extraction tree: {}".format(e))
        sys.exit(1)

    os.chdir(extract_dir)

    # Extract all those rpms to our current directory
    try:
        for rpm in rpms:
            cmd = "rpm2cpio \"{}\" | cpio -imd 2>/dev/null".format(os.path.join(results_dir, rpm))
            subprocess.check_call(cmd, shell=True)
    except Exception as e:
        util.print_fatal("Error extracting RPMS: {}".format(e))

    os.chdir(download_path)
    collected_files = set()

    # Places we expect to find shared libraries
    for check_path in valid_dirs:
        if check_path[0] == '/':
            check_path = check_path[1:]

        dirn = os.path.join(extract_dir, check_path)
        if not os.path.isdir(dirn):
            continue

        for file in os.listdir(dirn):
            f = os.path.basename(file)

            clean_path = os.path.abspath(os.path.join(dirn, f))
            if not is_file_valid(clean_path):
                continue
            collected_files.add(clean_path)

    abi_report = dict()

    # Now examine these libraries
    for library in sorted(collected_files):
        soname = get_soname(library)
        if not soname:
            warn = "Failed to determine soname of: {}".format(library)
            util.print_warning(warn)
            soname = os.path.basename(library)
        symbols = dump_symbols(library)
        if symbols and len(symbols) > 0:
            if soname not in abi_report:
                abi_report[soname] = set()
            abi_report[soname].update(symbols)

    report_file = os.path.join(download_path, "symbols")

    if len(abi_report) > 0:
        # Finally, write the report
        report = open(report_file, "w", encoding="utf-8")
        for soname in sorted(abi_report.keys()):
            for symbol in sorted(abi_report[soname]):
                report.write("{}:{}\n".format(soname, symbol))

        report.close()
    else:
        truncate_file(report_file)

    # Write the library report
    lib_deps = get_all_dependencies(extract_dir)
    report_file = os.path.join(download_path, "used_libs")
    if len(lib_deps) > 0:
        report = open(report_file, "w", encoding="utf-8")
        for soname in sorted(lib_deps):
            report.write("{}\n".format(soname))
        report.close()
    else:
        truncate_file(report_file)

    os.chdir(old_dir)
    purge_tree(extract_dir)