예제 #1
0
def ensure_npm(config, basepath, fixed_setup=False, check_exists=True):
    """
    Ensure that npm is available and either build it or show a
    reasonable error message
    """
    if check_exists:
        tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
        try:
            rd = tinfoil.parse_recipe('nodejs-native')
            nativepath = rd.getVar('STAGING_BINDIR_NATIVE')
        finally:
            tinfoil.shutdown()
        npmpath = os.path.join(nativepath, 'npm')
        build_npm = not os.path.exists(npmpath)
    else:
        build_npm = True

    if build_npm:
        logger.info('Building nodejs-native')
        try:
            exec_build_env_command(config.init_path, basepath,
                                'bitbake -q nodejs-native -c addto_recipe_sysroot', watch=True)
        except bb.process.ExecutionError as e:
            if "Nothing PROVIDES 'nodejs-native'" in e.stdout:
                if fixed_setup:
                    msg = 'nodejs-native is required for npm but is not available within this SDK'
                else:
                    msg = 'nodejs-native is required for npm but is not available - you will likely need to add a layer that provides nodejs'
                raise DevtoolError(msg)
            else:
                raise
예제 #2
0
파일: __init__.py 프로젝트: lsandov1/poky
def ensure_npm(config, basepath, fixed_setup=False, check_exists=True):
    """
    Ensure that npm is available and either build it or show a
    reasonable error message
    """
    if check_exists:
        tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
        try:
            rd = tinfoil.parse_recipe('nodejs-native')
            nativepath = rd.getVar('STAGING_BINDIR_NATIVE')
        finally:
            tinfoil.shutdown()
        npmpath = os.path.join(nativepath, 'npm')
        build_npm = not os.path.exists(npmpath)
    else:
        build_npm = True

    if build_npm:
        logger.info('Building nodejs-native')
        try:
            exec_build_env_command(
                config.init_path,
                basepath,
                'bitbake -q nodejs-native -c addto_recipe_sysroot',
                watch=True)
        except bb.process.ExecutionError as e:
            if "Nothing PROVIDES 'nodejs-native'" in e.stdout:
                if fixed_setup:
                    msg = 'nodejs-native is required for npm but is not available within this SDK'
                else:
                    msg = 'nodejs-native is required for npm but is not available - you will likely need to add a layer that provides nodejs'
                raise DevtoolError(msg)
            else:
                raise
예제 #3
0
    def test_patch_recipe_singleappend(self):
        import oe.recipeutils
        rd = tinfoil.parse_recipe('recipeutils-test')
        val = rd.getVar('SRC_URI', False).split()
        del val[1]
        val = ' '.join(val)
        vals = {'SRC_URI': val}
        patches = oe.recipeutils.patch_recipe(rd, rd.getVar('FILE'), vals, patch=True, relpath=metaselftestpath)

        expected_patch = """
--- a/recipes-test/recipeutils/recipeutils-test_1.2.bb
+++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb
@@ -8,6 +8,4 @@
 
 BBCLASSEXTEND = "native nativesdk"
 
-SRC_URI += "file://somefile"
-
 SRC_URI_append = " file://anotherfile"
"""
        patchlines = []
        for f in patches:
            for line in f:
                patchlines.append(line)
        self.assertEqual(''.join(patchlines).strip(), expected_patch.strip())
예제 #4
0
    def test_patch_recipe_appends(self):
        import oe.recipeutils
        rd = tinfoil.parse_recipe('recipeutils-test')
        val = rd.getVar('SRC_URI', False).split()
        vals = {'SRC_URI': val[0]}
        patches = oe.recipeutils.patch_recipe(rd,
                                              rd.getVar('FILE'),
                                              vals,
                                              patch=True,
                                              relpath=metaselftestpath)

        expected_patch = """
--- a/recipes-test/recipeutils/recipeutils-test_1.2.bb
+++ b/recipes-test/recipeutils/recipeutils-test_1.2.bb
@@ -8,6 +8,3 @@
 
 BBCLASSEXTEND = "native nativesdk"
 
-SRC_URI += "file://somefile"
-
-SRC_URI:append = " file://anotherfile"
"""
        patchlines = []
        for f in patches:
            for line in f:
                patchlines.append(line)
        self.assertEqual(''.join(patchlines).strip(), expected_patch.strip())
예제 #5
0
파일: tinfoil.py 프로젝트: hizukiayaka/poky
 def test_variable_history(self):
     # Basic test to ensure that variable history works when tracking=True
     with bb.tinfoil.Tinfoil(tracking=True) as tinfoil:
         tinfoil.prepare(config_only=False, quiet=2)
         # Note that _tracking for any datastore we get will be
         # false here, that's currently expected - so we can't check
         # for that
         history = tinfoil.config_data.varhistory.variable('DL_DIR')
         for entry in history:
             if entry['file'].endswith('/bitbake.conf'):
                 if entry['op'] in ['set', 'set?']:
                     break
         else:
             self.fail('Did not find history entry setting DL_DIR in bitbake.conf. History: %s' % history)
         # Check it works for recipes as well
         testrecipe = 'zlib'
         rd = tinfoil.parse_recipe(testrecipe)
         history = rd.varhistory.variable('LICENSE')
         bbfound = -1
         recipefound = -1
         for i, entry in enumerate(history):
             if entry['file'].endswith('/bitbake.conf'):
                 if entry['detail'] == 'INVALID' and entry['op'] in ['set', 'set?']:
                     bbfound = i
             elif entry['file'].endswith('.bb'):
                 if entry['op'] == 'set':
                     recipefound = i
         if bbfound == -1:
             self.fail('Did not find history entry setting LICENSE in bitbake.conf parsing %s recipe. History: %s' % (testrecipe, history))
         if recipefound == -1:
             self.fail('Did not find history entry setting LICENSE in %s recipe. History: %s' % (testrecipe, history))
         if bbfound > recipefound:
             self.fail('History entry setting LICENSE in %s recipe and in bitbake.conf in wrong order. History: %s' % (testrecipe, history))
예제 #6
0
파일: tinfoil.py 프로젝트: 01org/luv-yocto
 def test_variable_history(self):
     # Basic test to ensure that variable history works when tracking=True
     with bb.tinfoil.Tinfoil(tracking=True) as tinfoil:
         tinfoil.prepare(config_only=False, quiet=2)
         # Note that _tracking for any datastore we get will be
         # false here, that's currently expected - so we can't check
         # for that
         history = tinfoil.config_data.varhistory.variable('DL_DIR')
         for entry in history:
             if entry['file'].endswith('/bitbake.conf'):
                 if entry['op'] in ['set', 'set?']:
                     break
         else:
             self.fail('Did not find history entry setting DL_DIR in bitbake.conf. History: %s' % history)
         # Check it works for recipes as well
         testrecipe = 'zlib'
         rd = tinfoil.parse_recipe(testrecipe)
         history = rd.varhistory.variable('LICENSE')
         bbfound = -1
         recipefound = -1
         for i, entry in enumerate(history):
             if entry['file'].endswith('/bitbake.conf'):
                 if entry['detail'] == 'INVALID' and entry['op'] in ['set', 'set?']:
                     bbfound = i
             elif entry['file'].endswith('.bb'):
                 if entry['op'] == 'set':
                     recipefound = i
         if bbfound == -1:
             self.fail('Did not find history entry setting LICENSE in bitbake.conf parsing %s recipe. History: %s' % (testrecipe, history))
         if recipefound == -1:
             self.fail('Did not find history entry setting LICENSE in %s recipe. History: %s' % (testrecipe, history))
         if bbfound > recipefound:
             self.fail('History entry setting LICENSE in %s recipe and in bitbake.conf in wrong order. History: %s' % (testrecipe, history))
예제 #7
0
    def test_patch_recipe_varflag(self):
        import oe.recipeutils
        rd = tinfoil.parse_recipe('python3-async-test')
        vals = {'SRC_URI[md5sum]': 'aaaaaa', 'LICENSE': 'something'}
        patches = oe.recipeutils.patch_recipe(rd, rd.getVar('FILE'), vals, patch=True, relpath=metaselftestpath)

        expected_patch = """
--- a/recipes-devtools/python/python-async-test.inc
+++ b/recipes-devtools/python/python-async-test.inc
@@ -1,14 +1,14 @@
 SUMMARY = "Python framework to process interdependent tasks in a pool of workers"
 HOMEPAGE = "http://github.com/gitpython-developers/async"
 SECTION = "devel/python"
-LICENSE = "BSD"
+LICENSE = "something"
 LIC_FILES_CHKSUM = "file://PKG-INFO;beginline=8;endline=8;md5=88df8e78b9edfd744953862179f2d14e"
 
 inherit pypi
 
 PYPI_PACKAGE = "async"
 
-SRC_URI[md5sum] = "9b06b5997de2154f3bc0273f80bcef6b"
+SRC_URI[md5sum] = "aaaaaa"
 SRC_URI[sha256sum] = "ac6894d876e45878faae493b0cf61d0e28ec417334448ac0a6ea2229d8343051"
 
 RDEPENDS_${PN} += "${PYTHON_PN}-threading"
"""
        patchlines = []
        for f in patches:
            for line in f:
                patchlines.append(line)
        self.maxDiff = None
        self.assertEqual(''.join(patchlines).strip(), expected_patch.strip())
예제 #8
0
def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None, launch_cmd=None, qemuparams=None, overrides={}, discard_writes=True):
    """
    launch_cmd means directly run the command, don't need set rootfs or env vars.
    """

    import bb.tinfoil
    import bb.build

    # Need a non-'BitBake' logger to capture the runner output
    targetlogger = logging.getLogger('TargetRunner')
    targetlogger.setLevel(logging.DEBUG)
    handler = logging.StreamHandler(sys.stdout)
    targetlogger.addHandler(handler)

    tinfoil = bb.tinfoil.Tinfoil()
    tinfoil.prepare(config_only=False, quiet=True)
    try:
        tinfoil.logger.setLevel(logging.WARNING)
        import oeqa.targetcontrol
        tinfoil.config_data.setVar("TEST_LOG_DIR", "${WORKDIR}/testimage")
        tinfoil.config_data.setVar("TEST_QEMUBOOT_TIMEOUT", "1000")
        # Tell QemuTarget() whether need find rootfs/kernel or not
        if launch_cmd:
            tinfoil.config_data.setVar("FIND_ROOTFS", '0')
        else:
            tinfoil.config_data.setVar("FIND_ROOTFS", '1')

        recipedata = tinfoil.parse_recipe(pn)
        for key, value in overrides.items():
            recipedata.setVar(key, value)

        logdir = recipedata.getVar("TEST_LOG_DIR")

        qemu = oeqa.targetcontrol.QemuTarget(recipedata, targetlogger, image_fstype)
    finally:
        # We need to shut down tinfoil early here in case we actually want
        # to run tinfoil-using utilities with the running QEMU instance.
        # Luckily QemuTarget doesn't need it after the constructor.
        tinfoil.shutdown()

    try:
        qemu.deploy()
        try:
            qemu.start(params=qemuparams, ssh=ssh, runqemuparams=runqemuparams, launch_cmd=launch_cmd, discard_writes=discard_writes)
        except bb.build.FuncFailed:
            msg = 'Failed to start QEMU - see the logs in %s' % logdir
            if os.path.exists(qemu.qemurunnerlog):
                with open(qemu.qemurunnerlog, 'r') as f:
                    msg = msg + "Qemurunner log output from %s:\n%s" % (qemu.qemurunnerlog, f.read())
            raise Exception(msg)

        yield qemu

    finally:
        targetlogger.removeHandler(handler)
        try:
            qemu.stop()
        except:
            pass
예제 #9
0
def get_recipe_upgrade_status(recipes=None):
    pkgs_list = []
    data_copy_list = []
    copy_vars = (
        'SRC_URI',
        'PV',
        'GITDIR',
        'DL_DIR',
        'PN',
        'CACHE',
        'PERSISTENT_DIR',
        'BB_URI_HEADREVS',
        'UPSTREAM_CHECK_COMMITS',
        'UPSTREAM_CHECK_GITTAGREGEX',
        'UPSTREAM_CHECK_REGEX',
        'UPSTREAM_CHECK_URI',
        'UPSTREAM_VERSION_UNKNOWN',
        'RECIPE_MAINTAINER',
        'RECIPE_NO_UPDATE_REASON',
        'RECIPE_UPSTREAM_VERSION',
        'RECIPE_UPSTREAM_DATE',
        'CHECK_DATE',
    )

    with bb.tinfoil.Tinfoil() as tinfoil:
        tinfoil.prepare(config_only=False)

        if not recipes:
            recipes = tinfoil.all_recipe_files(variants=False)

        for fn in recipes:
            try:
                if fn.startswith("/"):
                    data = tinfoil.parse_recipe_file(fn)
                else:
                    data = tinfoil.parse_recipe(fn)
            except bb.providers.NoProvider:
                bb.note(" No provider for %s" % fn)
                continue

            unreliable = data.getVar('UPSTREAM_CHECK_UNRELIABLE')
            if unreliable == "1":
                bb.note(" Skip package %s as upstream check unreliable" % pn)
                continue

            data_copy = bb.data.init()
            for var in copy_vars:
                data_copy.setVar(var, data.getVar(var))
            for k in data:
                if k.startswith('SRCREV'):
                    data_copy.setVar(k, data.getVar(k))

            data_copy_list.append(data_copy)

    from concurrent.futures import ProcessPoolExecutor
    with ProcessPoolExecutor(max_workers=utils.cpu_count()) as executor:
        pkgs_list = executor.map(_get_recipe_upgrade_status, data_copy_list)

    return pkgs_list
예제 #10
0
def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None, launch_cmd=None, qemuparams=None, overrides={}, discard_writes=True):
    """
    launch_cmd means directly run the command, don't need set rootfs or env vars.
    """

    import bb.tinfoil
    import bb.build

    # Need a non-'BitBake' logger to capture the runner output
    targetlogger = logging.getLogger('TargetRunner')
    targetlogger.setLevel(logging.DEBUG)
    handler = logging.StreamHandler(sys.stdout)
    targetlogger.addHandler(handler)

    tinfoil = bb.tinfoil.Tinfoil()
    tinfoil.prepare(config_only=False, quiet=True)
    try:
        tinfoil.logger.setLevel(logging.WARNING)
        import oeqa.targetcontrol
        tinfoil.config_data.setVar("TEST_LOG_DIR", "${WORKDIR}/testimage")
        tinfoil.config_data.setVar("TEST_QEMUBOOT_TIMEOUT", "1000")
        # Tell QemuTarget() whether need find rootfs/kernel or not
        if launch_cmd:
            tinfoil.config_data.setVar("FIND_ROOTFS", '0')
        else:
            tinfoil.config_data.setVar("FIND_ROOTFS", '1')

        recipedata = tinfoil.parse_recipe(pn)
        for key, value in overrides.items():
            recipedata.setVar(key, value)

        logdir = recipedata.getVar("TEST_LOG_DIR")

        qemu = oeqa.targetcontrol.QemuTarget(recipedata, targetlogger, image_fstype)
    finally:
        # We need to shut down tinfoil early here in case we actually want
        # to run tinfoil-using utilities with the running QEMU instance.
        # Luckily QemuTarget doesn't need it after the constructor.
        tinfoil.shutdown()

    try:
        qemu.deploy()
        try:
            qemu.start(params=qemuparams, ssh=ssh, runqemuparams=runqemuparams, launch_cmd=launch_cmd, discard_writes=discard_writes)
        except bb.build.FuncFailed:
            msg = 'Failed to start QEMU - see the logs in %s' % logdir
            if os.path.exists(qemu.qemurunnerlog):
                with open(qemu.qemurunnerlog, 'r') as f:
                    msg = msg + "Qemurunner log output from %s:\n%s" % (qemu.qemurunnerlog, f.read())
            raise Exception(msg)

        yield qemu

    finally:
        targetlogger.removeHandler(handler)
        try:
            qemu.stop()
        except:
            pass
예제 #11
0
def get_recipe_upgrade_status(recipes=None):
    pkgs_list = []
    data_copy_list = []
    copy_vars = ('SRC_URI',
                 'PV',
                 'GITDIR',
                 'DL_DIR',
                 'PN',
                 'CACHE',
                 'PERSISTENT_DIR',
                 'BB_URI_HEADREVS',
                 'UPSTREAM_CHECK_COMMITS',
                 'UPSTREAM_CHECK_GITTAGREGEX',
                 'UPSTREAM_CHECK_REGEX',
                 'UPSTREAM_CHECK_URI',
                 'UPSTREAM_VERSION_UNKNOWN',
                 'RECIPE_MAINTAINER',
                 'RECIPE_NO_UPDATE_REASON',
                 'RECIPE_UPSTREAM_VERSION',
                 'RECIPE_UPSTREAM_DATE',
                 'CHECK_DATE',
            )

    with bb.tinfoil.Tinfoil() as tinfoil:
        tinfoil.prepare(config_only=False)

        if not recipes:
            recipes = tinfoil.all_recipe_files(variants=False)

        for fn in recipes:
            try:
                if fn.startswith("/"):
                    data = tinfoil.parse_recipe_file(fn)
                else:
                    data = tinfoil.parse_recipe(fn)
            except bb.providers.NoProvider:
                bb.note(" No provider for %s" % fn)
                continue

            unreliable = data.getVar('UPSTREAM_CHECK_UNRELIABLE')
            if unreliable == "1":
                bb.note(" Skip package %s as upstream check unreliable" % pn)
                continue

            data_copy = bb.data.init()
            for var in copy_vars:
                data_copy.setVar(var, data.getVar(var))
            for k in data:
                if k.startswith('SRCREV'):
                    data_copy.setVar(k, data.getVar(k))

            data_copy_list.append(data_copy)

    from concurrent.futures import ProcessPoolExecutor
    with ProcessPoolExecutor(max_workers=utils.cpu_count()) as executor:
        pkgs_list = executor.map(_get_recipe_upgrade_status, data_copy_list)

    return pkgs_list
예제 #12
0
def harvest_data(machines, recipes):
    import bb.tinfoil
    with bb.tinfoil.Tinfoil() as tinfoil:
        tinfoil.prepare(config_only=True)
        corepath = layer_path("core", tinfoil.config_data)
        sys.path.append(os.path.join(corepath, "lib"))
    import oe.recipeutils
    import oe.patch

    # Queue of recipes that we're still looking for upstream releases for
    to_check = list(recipes)

    # Upstream releases
    upstreams = {}
    # Machines to recipes to versions
    versions = {}

    for machine in machines:
        print(f"Gathering data for {machine}...")
        os.environ["MACHINE"] = machine
        with bb.tinfoil.Tinfoil() as tinfoil:
            versions[machine] = {}

            tinfoil.prepare(quiet=2)
            for recipe in recipes:
                try:
                    d = tinfoil.parse_recipe(recipe)
                except bb.providers.NoProvider:
                    continue

                if recipe in to_check:
                    try:
                        info = oe.recipeutils.get_recipe_upstream_version(d)
                        upstreams[recipe] = info["version"]
                        to_check.remove(recipe)
                    except (bb.providers.NoProvider, KeyError):
                        pass

                details = versions[machine][recipe] = {}
                details["recipe"] = d.getVar("PN")
                details["version"] = trim_pv(d.getVar("PV"))
                details["fullversion"] = d.getVar("PV")
                details["patches"] = [extract_patch_info(p, d) for p in oe.patch.src_patches(d)]
                details["patched"] = bool(details["patches"])
                details["patches_safe"] = safe_patches(details["patches"])

    # Now backfill the upstream versions
    for machine in versions:
        for recipe in versions[machine]:
            data = versions[machine][recipe]
            data["upstream"] = upstreams[recipe]
            data["needs_update"] = needs_update(data["version"], data["upstream"])
    return upstreams, versions
예제 #13
0
def runqemu(pn, ssh=True):

    import bb.tinfoil
    import bb.build

    tinfoil = bb.tinfoil.Tinfoil()
    tinfoil.prepare(config_only=False, quiet=True)
    try:
        tinfoil.logger.setLevel(logging.WARNING)
        import oeqa.targetcontrol
        tinfoil.config_data.setVar("TEST_LOG_DIR", "${WORKDIR}/testimage")
        tinfoil.config_data.setVar("TEST_QEMUBOOT_TIMEOUT", "1000")
        recipedata = tinfoil.parse_recipe(pn)

        # The QemuRunner log is saved out, but we need to ensure it is at the right
        # log level (and then ensure that since it's a child of the BitBake logger,
        # we disable propagation so we don't then see the log events on the console)
        logger = logging.getLogger('BitBake.QemuRunner')
        logger.setLevel(logging.DEBUG)
        logger.propagate = False
        logdir = recipedata.getVar("TEST_LOG_DIR")

        qemu = oeqa.targetcontrol.QemuTarget(recipedata)
    finally:
        # We need to shut down tinfoil early here in case we actually want
        # to run tinfoil-using utilities with the running QEMU instance.
        # Luckily QemuTarget doesn't need it after the constructor.
        tinfoil.shutdown()

    # Setup bitbake logger as console handler is removed by tinfoil.shutdown
    bblogger = logging.getLogger('BitBake')
    bblogger.setLevel(logging.INFO)
    console = logging.StreamHandler(sys.stdout)
    bbformat = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
    if sys.stdout.isatty():
        bbformat.enable_color()
    console.setFormatter(bbformat)
    bblogger.addHandler(console)

    try:
        qemu.deploy()
        try:
            qemu.start(ssh=ssh)
        except bb.build.FuncFailed:
            raise Exception('Failed to start QEMU - see the logs in %s' % logdir)

        yield qemu

    finally:
        try:
            qemu.stop()
        except:
            pass
예제 #14
0
def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None):

    import bb.tinfoil
    import bb.build

    tinfoil = bb.tinfoil.Tinfoil()
    tinfoil.prepare(config_only=False, quiet=True)
    try:
        tinfoil.logger.setLevel(logging.WARNING)
        import oeqa.targetcontrol
        tinfoil.config_data.setVar("TEST_LOG_DIR", "${WORKDIR}/testimage")
        tinfoil.config_data.setVar("TEST_QEMUBOOT_TIMEOUT", "1000")
        recipedata = tinfoil.parse_recipe(pn)

        # The QemuRunner log is saved out, but we need to ensure it is at the right
        # log level (and then ensure that since it's a child of the BitBake logger,
        # we disable propagation so we don't then see the log events on the console)
        logger = logging.getLogger('BitBake.QemuRunner')
        logger.setLevel(logging.DEBUG)
        logger.propagate = False
        logdir = recipedata.getVar("TEST_LOG_DIR")

        qemu = oeqa.targetcontrol.QemuTarget(recipedata, image_fstype)
    finally:
        # We need to shut down tinfoil early here in case we actually want
        # to run tinfoil-using utilities with the running QEMU instance.
        # Luckily QemuTarget doesn't need it after the constructor.
        tinfoil.shutdown()

    # Setup bitbake logger as console handler is removed by tinfoil.shutdown
    bblogger = logging.getLogger('BitBake')
    bblogger.setLevel(logging.INFO)
    console = logging.StreamHandler(sys.stdout)
    bbformat = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
    if sys.stdout.isatty():
        bbformat.enable_color()
    console.setFormatter(bbformat)
    bblogger.addHandler(console)

    try:
        qemu.deploy()
        try:
            qemu.start(ssh=ssh, runqemuparams=runqemuparams)
        except bb.build.FuncFailed:
            raise Exception('Failed to start QEMU - see the logs in %s' % logdir)

        yield qemu

    finally:
        try:
            qemu.stop()
        except:
            pass
예제 #15
0
def get_recipe_upgrade_status(recipes=None):
    pkgs_list = []
    with bb.tinfoil.Tinfoil() as tinfoil:
        tinfoil.prepare(config_only=False)

        if not recipes:
            recipes = tinfoil.all_recipe_files(variants=False)

        for fn in recipes:
            try:
                if fn.startswith("/"):
                    data = tinfoil.parse_recipe_file(fn)
                else:
                    data = tinfoil.parse_recipe(fn)
            except bb.providers.NoProvider:
                bb.note(" No provider for %s" % fn)
                continue

            unreliable = data.getVar('UPSTREAM_CHECK_UNRELIABLE')
            if unreliable == "1":
                bb.note(" Skip package %s as upstream check unreliable" % pn)
                continue

            uv = get_recipe_upstream_version(data)

            pn = data.getVar('PN')
            cur_ver = uv['current_version']

            upstream_version_unknown = data.getVar('UPSTREAM_VERSION_UNKNOWN')
            if not uv['version']:
                status = "UNKNOWN" if upstream_version_unknown else "UNKNOWN_BROKEN"
            else:
                cmp = vercmp_string(uv['current_version'], uv['version'])
                if cmp == -1:
                    status = "UPDATE" if not upstream_version_unknown else "KNOWN_BROKEN"
                elif cmp == 0:
                    status = "MATCH" if not upstream_version_unknown else "KNOWN_BROKEN"
                else:
                    status = "UNKNOWN" if upstream_version_unknown else "UNKNOWN_BROKEN"

            next_ver = uv['version'] if uv['version'] else "N/A"
            revision = uv['revision'] if uv['revision'] else "N/A"
            maintainer = data.getVar('RECIPE_MAINTAINER')
            no_upgrade_reason = data.getVar('RECIPE_NO_UPDATE_REASON')

            pkgs_list.append((pn, status, cur_ver, next_ver, maintainer,
                              revision, no_upgrade_reason))

    return pkgs_list
예제 #16
0
def get_recipe_upgrade_status(recipes=None):
    pkgs_list = []
    with bb.tinfoil.Tinfoil() as tinfoil:
        tinfoil.prepare(config_only=False)

        if not recipes:
            recipes = tinfoil.all_recipe_files(variants=False)

        for fn in recipes:
            try:
                if fn.startswith("/"):
                    data = tinfoil.parse_recipe_file(fn)
                else:
                    data = tinfoil.parse_recipe(fn)
            except bb.providers.NoProvider:
                bb.note(" No provider for %s" % fn)
                continue

            unreliable = data.getVar('UPSTREAM_CHECK_UNRELIABLE')
            if unreliable == "1":
                bb.note(" Skip package %s as upstream check unreliable" % pn)
                continue

            uv = get_recipe_upstream_version(data)

            pn = data.getVar('PN')
            cur_ver = uv['current_version']

            upstream_version_unknown = data.getVar('UPSTREAM_VERSION_UNKNOWN')
            if not uv['version']:
                status = "UNKNOWN" if upstream_version_unknown else "UNKNOWN_BROKEN"
            else:
                cmp = vercmp_string(uv['current_version'], uv['version'])
                if cmp == -1:
                    status = "UPDATE" if not upstream_version_unknown else "KNOWN_BROKEN"
                elif cmp == 0:
                    status = "MATCH" if not upstream_version_unknown else "KNOWN_BROKEN"
                else:
                    status = "UNKNOWN" if upstream_version_unknown else "UNKNOWN_BROKEN"

            next_ver = uv['version'] if uv['version'] else "N/A"
            revision = uv['revision'] if uv['revision'] else "N/A"
            maintainer = data.getVar('RECIPE_MAINTAINER')
            no_upgrade_reason = data.getVar('RECIPE_NO_UPDATE_REASON')

            pkgs_list.append((pn, status, cur_ver, next_ver, maintainer, revision, no_upgrade_reason))

    return pkgs_list
예제 #17
0
    def test_patch_recipe_varflag(self):
        import oe.recipeutils
        rd = tinfoil.parse_recipe('python3-async-test')
        vals = {'SRC_URI[md5sum]': 'aaaaaa', 'LICENSE': 'something'}
        patches = oe.recipeutils.patch_recipe(rd,
                                              rd.getVar('FILE'),
                                              vals,
                                              patch=True,
                                              relpath=metaselftestpath)

        expected_patch = """
--- a/recipes-devtools/python/python-async-test.inc
+++ b/recipes-devtools/python/python-async-test.inc
@@ -1,14 +1,14 @@
 SUMMARY = "Python framework to process interdependent tasks in a pool of workers"
 HOMEPAGE = "http://github.com/gitpython-developers/async"
 SECTION = "devel/python"
-LICENSE = "BSD-3-Clause"
+LICENSE = "something"
 LIC_FILES_CHKSUM = "file://PKG-INFO;beginline=8;endline=8;md5=88df8e78b9edfd744953862179f2d14e"
 
 inherit pypi
 
 PYPI_PACKAGE = "async"
 
-SRC_URI[md5sum] = "9b06b5997de2154f3bc0273f80bcef6b"
+SRC_URI[md5sum] = "aaaaaa"
 SRC_URI[sha256sum] = "ac6894d876e45878faae493b0cf61d0e28ec417334448ac0a6ea2229d8343051"
 
 RDEPENDS:${PN} += "${PYTHON_PN}-threading"
"""
        patchlines = []
        for f in patches:
            for line in f:
                patchlines.append(line)
        self.maxDiff = None
        self.assertEqual(''.join(patchlines).strip(), expected_patch.strip())
예제 #18
0
def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None, launch_cmd=None, qemuparams=None, overrides={}, discard_writes=True):
    """
    launch_cmd means directly run the command, don't need set rootfs or env vars.
    """

    import bb.tinfoil
    import bb.build

    tinfoil = bb.tinfoil.Tinfoil()
    tinfoil.prepare(config_only=False, quiet=True)
    try:
        tinfoil.logger.setLevel(logging.WARNING)
        import oeqa.targetcontrol
        tinfoil.config_data.setVar("TEST_LOG_DIR", "${WORKDIR}/testimage")
        tinfoil.config_data.setVar("TEST_QEMUBOOT_TIMEOUT", "1000")
        # Tell QemuTarget() whether need find rootfs/kernel or not
        if launch_cmd:
            tinfoil.config_data.setVar("FIND_ROOTFS", '0')
        else:
            tinfoil.config_data.setVar("FIND_ROOTFS", '1')

        recipedata = tinfoil.parse_recipe(pn)
        for key, value in overrides.items():
            recipedata.setVar(key, value)

        # The QemuRunner log is saved out, but we need to ensure it is at the right
        # log level (and then ensure that since it's a child of the BitBake logger,
        # we disable propagation so we don't then see the log events on the console)
        logger = logging.getLogger('BitBake.QemuRunner')
        logger.setLevel(logging.DEBUG)
        logger.propagate = False
        logdir = recipedata.getVar("TEST_LOG_DIR")

        qemu = oeqa.targetcontrol.QemuTarget(recipedata, image_fstype)
    finally:
        # We need to shut down tinfoil early here in case we actually want
        # to run tinfoil-using utilities with the running QEMU instance.
        # Luckily QemuTarget doesn't need it after the constructor.
        tinfoil.shutdown()

    # Setup bitbake logger as console handler is removed by tinfoil.shutdown
    bblogger = logging.getLogger('BitBake')
    bblogger.setLevel(logging.INFO)
    console = logging.StreamHandler(sys.stdout)
    bbformat = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
    if sys.stdout.isatty():
        bbformat.enable_color()
    console.setFormatter(bbformat)
    bblogger.addHandler(console)

    try:
        qemu.deploy()
        try:
            qemu.start(params=qemuparams, ssh=ssh, runqemuparams=runqemuparams, launch_cmd=launch_cmd, discard_writes=discard_writes)
        except bb.build.FuncFailed:
            raise Exception('Failed to start QEMU - see the logs in %s' % logdir)

        yield qemu

    finally:
        try:
            qemu.stop()
        except:
            pass
#!/usr/bin/env python3

import os
import sys
import bb.tinfoil
import bb.fetch2

with bb.tinfoil.Tinfoil() as tinfoil:
    tinfoil.prepare(config_only=False)

    ret = []

    print("\nprocessing...\n")
    for recipe in tinfoil.all_recipes():
        if 'libretro' in recipe.inherits(
        ) or 'libretro-cmake' in recipe.inherits():
            parsed = tinfoil.parse_recipe(recipe.pn)
            src_uris = parsed.getVar('SRC_URI')
            src_uri = src_uris.split()[0]
            ud = bb.fetch2.FetchData(src_uri, parsed)
            revision = ud.method.latest_revision(ud, parsed, 'default')
            ret.append('LIBRETRO_GIT_REV:pn-{0} ?= "{1}"'.format(
                recipe.pn, revision))
            print(ret[-1])

    print("\nresults:\n")
    for value in sorted(ret):
        print(value)