Пример #1
0
def get_source_date_epoch_from_git(d, sourcedir):
    if not "git://" in d.getVar('SRC_URI'):
        return None

    gitpath = find_git_folder(d, sourcedir)
    if not gitpath:
        return None

    # Check that the repository has a valid HEAD; it may not if subdir is used
    # in SRC_URI
    p = subprocess.run(['git', 'rev-parse', 'HEAD'],
                       stdout=subprocess.PIPE,
                       stderr=subprocess.STDOUT,
                       cwd=gitpath)
    if p.returncode != 0:
        bb.debug(
            1, "%s does not have a valid HEAD: %s" %
            (gitpath, p.stdout.decode('utf-8')))
        return None

    bb.debug(1, "git repository: %s" % gitpath)
    p = subprocess.run(['git', 'log', '-1', '--pretty=%ct'],
                       check=True,
                       stdout=subprocess.PIPE,
                       cwd=gitpath)
    return int(p.stdout.decode('utf-8'))
Пример #2
0
 def download(self, Bucket, Key, Filename, quiet=True):
     if self.s3client is None:
         self.makeclient()
     for attempt in range(10):
         try:
             info = self.s3client.head_object(Bucket=Bucket, Key=Key)
             self.s3client.download_file(Bucket=Bucket,
                                         Key=Key,
                                         Filename=Filename)
             if 'LastModified' in info:
                 mtime = int(time.mktime(info['LastModified'].timetuple()))
                 os.utime(Filename, (mtime, mtime))
         except botocore.exceptions.NoCredentialsError:
             s3retry_wait(attempt)
             continue
         except botocore.exceptions.ClientError as e:
             err = e.response['Error']
             if quiet and err['Code'] == "404":
                 bb.debug(2, "not found: {}/{}".format(Bucket, Key))
             else:
                 bb.warn("{}/{}: {} {}".format(Bucket, Key, err['Code'],
                                               err['Message']))
             return False
         except OSError as e:
             if quiet:
                 pass
             bb.warn("os.utime({}): {} (errno {})".format(
                 Filename, e.strerror, e.errno))
             return False
         return True
     bb.warn("{}/{}: credentials error on download for 10 attempts".format(
         Bucket, Key))
     return False
Пример #3
0
    def execute(graph, item):
        if data.getVarFlag(item, 'task', d):
            if item in task_cache:
                return 1

            if task != item:
                # deeper than toplevel, exec w/ deps
                exec_task(item, d)
                return 1

            try:
                debug(1, "Executing task %s" % item)
                old_overrides = data.getVar('OVERRIDES', d, 0)
                localdata = data.createCopy(d)
                data.setVar('OVERRIDES', 'task_%s:%s' % (item, old_overrides), localdata)
                data.update_data(localdata)
                event.fire(TaskStarted(item, localdata))
                exec_func(item, localdata)
                event.fire(TaskSucceeded(item, localdata))
                task_cache.append(item)
            except FuncFailed, reason:
                note( "Task failed: %s" % reason )
                failedevent = TaskFailed(item, d)
                event.fire(failedevent)
                raise EventException(None, failedevent)
Пример #4
0
    def go(self, d, urls = []):
        """Fetch urls"""
        if not urls:
            urls = self.urls

        for loc in urls:
            (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, d))

            tag = gettag(parm)
            proto = getprotocol(parm)

            gitsrcname = '%s%s' % (host, path.replace('/', '.'))

            repofilename = 'git_%s.tar.gz' % (gitsrcname)
            repofile = os.path.join(data.getVar("DL_DIR", d, 1), repofilename)
            repodir = os.path.join(data.expand('${GITDIR}', d), gitsrcname)

            coname = '%s' % (tag)
            codir = os.path.join(repodir, coname)

            cofile = self.localpath(loc, d)

            # tag=="master" must always update
            if (tag != "master") and Fetch.try_mirror(d, localfile(loc, d)):
                bb.debug(1, "%s already exists (or was stashed). Skipping git checkout." % cofile)
                continue

            if not os.path.exists(repodir):
                if Fetch.try_mirror(d, repofilename):    
                    bb.mkdirhier(repodir)
                    os.chdir(repodir)
                    rungitcmd("tar -xzf %s" % (repofile),d)
                else:
                    rungitcmd("git clone -n %s://%s%s %s" % (proto, host, path, repodir),d)

            os.chdir(repodir)
            rungitcmd("git pull %s://%s%s" % (proto, host, path),d)
            rungitcmd("git pull --tags %s://%s%s" % (proto, host, path),d)
            rungitcmd("git prune-packed", d)
            rungitcmd("git pack-redundant --all | xargs -r rm", d)
            # Remove all but the .git directory
            rungitcmd("rm * -Rf", d)
            # old method of downloading tags
            #rungitcmd("rsync -a --verbose --stats --progress rsync://%s%s/ %s" % (host, path, os.path.join(repodir, ".git", "")),d)

            os.chdir(repodir)
            bb.note("Creating tarball of git repository")
            rungitcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ),d)

            if os.path.exists(codir):
                prunedir(codir)

            bb.mkdirhier(codir)
            os.chdir(repodir)
            rungitcmd("git read-tree %s" % (tag),d)
            rungitcmd("git checkout-index -q -f --prefix=%s -a" % (os.path.join(codir, "git", "")),d)

            os.chdir(codir)
            bb.note("Creating tarball of git checkout")
            rungitcmd("tar -czf %s %s" % (cofile, os.path.join(".", "*") ),d)
Пример #5
0
def exec_func_python(func, d, runfile, cwd=None):
    """Execute a python BB 'function'"""

    bbfile = d.getVar('FILE', True)
    code = _functionfmt.format(function=func, body=d.getVar(func, True))
    bb.utils.mkdirhier(os.path.dirname(runfile))
    with open(runfile, 'w') as script:
        script.write(code)

    if cwd:
        try:
            olddir = os.getcwd()
        except OSError:
            olddir = None
        os.chdir(cwd)

    bb.debug(2, "Executing python function %s" % func)

    try:
        comp = utils.better_compile(code, func, bbfile)
        utils.better_exec(comp, {"d": d}, code, bbfile)
    except (bb.parse.SkipRecipe, bb.build.FuncFailed):
        raise
    except:
        raise FuncFailed(func, None)
    finally:
        bb.debug(2, "Python function %s finished" % func)

        if cwd and olddir:
            try:
                os.chdir(olddir)
            except OSError:
                pass
Пример #6
0
def exec_func_python(func, d, runfile, cwd=None):
    """Execute a python BB 'function'"""

    bbfile = d.getVar('FILE', True)
    code = _functionfmt.format(function=func, body=d.getVar(func, True))
    bb.utils.mkdirhier(os.path.dirname(runfile))
    with open(runfile, 'w') as script:
        script.write(code)

    if cwd:
        try:
            olddir = os.getcwd()
        except OSError:
            olddir = None
        os.chdir(cwd)

    bb.debug(2, "Executing python function %s" % func)

    try:
        comp = utils.better_compile(code, func, bbfile)
        utils.better_exec(comp, {"d": d}, code, bbfile)
    except:
        if sys.exc_info()[0] in (bb.parse.SkipPackage, bb.build.FuncFailed):
            raise

        raise FuncFailed(func, None)
    finally:
        bb.debug(2, "Python function %s finished" % func)

        if cwd and olddir:
            try:
                os.chdir(olddir)
            except OSError:
                pass
Пример #7
0
        def fetch_uri(uri, basename, dl, md5, d):
            if os.path.exists(dl):
                #               file exists, but we didnt complete it.. trying again..
                fetchcmd = bb.data.getVar("RESUMECOMMAND", d, 1)
            else:
                fetchcmd = bb.data.getVar("FETCHCOMMAND", d, 1)

            bb.note("fetch " + uri)
            fetchcmd = fetchcmd.replace("${URI}", uri)
            fetchcmd = fetchcmd.replace("${FILE}", basename)
            bb.debug(2, "executing " + fetchcmd)
            ret = os.system(fetchcmd)
            if ret != 0:
                return False

            #           supposedly complete.. write out md5sum
            if bb.which(bb.data.getVar("PATH", d), "md5sum"):
                try:
                    md5pipe = os.popen("md5sum " + dl)
                    md5data = (md5pipe.readline().split() or [""])[0]
                    md5pipe.close()
                except OSError:
                    md5data = ""
                md5out = file(md5, "w")
                md5out.write(md5data)
                md5out.close()
            else:
                md5out = file(md5, "w")
                md5out.write("")
                md5out.close()
            return True
Пример #8
0
 def readfifo(data):
     lines = data.split('\0')
     for line in lines:
         splitval = line.split(' ', 1)
         cmd = splitval[0]
         if len(splitval) > 1:
             value = splitval[1]
         else:
             value = ''
         if cmd == 'bbplain':
             bb.plain(value)
         elif cmd == 'bbnote':
             bb.note(value)
         elif cmd == 'bbwarn':
             bb.warn(value)
         elif cmd == 'bberror':
             bb.error(value)
         elif cmd == 'bbfatal':
             # The caller will call exit themselves, so bb.error() is
             # what we want here rather than bb.fatal()
             bb.error(value)
         elif cmd == 'bbfatal_log':
             bb.error(value, forcelog=True)
         elif cmd == 'bbdebug':
             splitval = value.split(' ', 1)
             level = int(splitval[0])
             value = splitval[1]
             bb.debug(level, value)
Пример #9
0
def run(d, cmd, *args):
    import subprocess

    topdir = d.getVar('TMPDIR', True)
    toolchain_path = d.getVar('EXTERNAL_TOOLCHAIN', True)
    if toolchain_path:
        target_prefix = d.getVar('EXTERNAL_TARGET_SYS', True) + '-'
        path = os.path.join(toolchain_path, 'bin', target_prefix + cmd)
        args = [path] + list(args)

        try:
            output = oe.path.check_output(args,
                                          cwd=topdir,
                                          stderr=subprocess.STDOUT)
        except oe.path.CalledProcessError as exc:
            import pipes
            bb.debug(
                1, "{0} failed: {1}".format(
                    ' '.join(pipes.quote(a) for a in args), exc.output))
        except OSError as exc:
            import pipes
            bb.debug(
                1, "{0} failed: {1}".format(
                    ' '.join(pipes.quote(a) for a in args), str(exc)))
        else:
            return output

    return 'UNKNOWN'
Пример #10
0
    def try_mirror(d, tarfn):
        """
        Try to use a mirrored version of the sources. We do this
        to avoid massive loads on foreign cvs and svn servers.
        This method will be used by the different fetcher
        implementations.

        d Is a bb.data instance
        tarfn is the name of the tarball
        """
        tarpath = os.path.join(data.getVar("DL_DIR", d, 1), tarfn)
        if os.access(tarpath, os.R_OK):
            bb.debug(1, "%s already exists, skipping checkout." % tarfn)
            return True

        pn = data.getVar('PN', d, True)
        src_tarball_stash = None
        if pn:
            src_tarball_stash = (data.getVar('SRC_TARBALL_STASH_%s' % pn, d, True) or data.getVar('CVS_TARBALL_STASH_%s' % pn, d, True) or data.getVar('SRC_TARBALL_STASH', d, True) or data.getVar('CVS_TARBALL_STASH', d, True) or "").split()

        for stash in src_tarball_stash:
            fetchcmd = data.getVar("FETCHCOMMAND_mirror", d, True) or data.getVar("FETCHCOMMAND_wget", d, True)
            uri = stash + tarfn
            bb.note("fetch " + uri)
            fetchcmd = fetchcmd.replace("${URI}", uri)
            ret = os.system(fetchcmd)
            if ret == 0:
                bb.note("Fetched %s from tarball stash, skipping checkout" % tarfn)
                return True
        return False
Пример #11
0
 def readfifo(data):
     lines = data.split(b"\0")
     for line in lines:
         splitval = line.split(b" ", 1)
         cmd = splitval[0]
         if len(splitval) > 1:
             value = splitval[1].decode("utf-8")
         else:
             value = ""
         if cmd == "bbplain":
             bb.plain(value)
         elif cmd == "bbnote":
             bb.note(value)
         elif cmd == "bbwarn":
             bb.warn(value)
         elif cmd == "bberror":
             bb.error(value)
         elif cmd == "bbfatal":
             # The caller will call exit themselves, so bb.error() is
             # what we want here rather than bb.fatal()
             bb.error(value)
         elif cmd == "bbfatal_log":
             bb.error(value, forcelog=True)
         elif cmd == "bbdebug":
             splitval = value.split(" ", 1)
             level = int(splitval[0])
             value = splitval[1]
             bb.debug(level, value)
Пример #12
0
def uri_replace(uri, uri_find, uri_replace, d=bb.data.init()):
    #   bb.note("uri_replace: operating on %s" % uri)
    if not uri or not uri_find or not uri_replace:
        bb.debug(1, "uri_replace: passed an undefined value, not replacing")
    uri_decoded = list(bb.decodeurl(uri))
    uri_find_decoded = list(bb.decodeurl(uri_find))
    uri_replace_decoded = list(bb.decodeurl(uri_replace))
    result_decoded = ["", "", "", "", "", {}]
    for i in uri_find_decoded:
        loc = uri_find_decoded.index(i)
        result_decoded[loc] = uri_decoded[loc]
        import types

        if type(i) == types.StringType:
            import re

            if re.match(i, uri_decoded[loc]):
                result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc])
                if uri_find_decoded.index(i) == 2:
                    if d:
                        localfn = bb.fetch.localpath(uri, d)
                        if localfn:
                            result_decoded[loc] = (
                                os.path.dirname(result_decoded[loc])
                                + "/"
                                + os.path.basename(bb.fetch.localpath(uri, d))
                            )
            #                       bb.note("uri_replace: matching %s against %s and replacing with %s" % (i, uri_decoded[loc], uri_replace_decoded[loc]))
            else:
                #               bb.note("uri_replace: no match")
                return uri
    #           else:
    #               for j in i.keys():
    #                   FIXME: apply replacements against options
    return bb.encodeurl(result_decoded)
Пример #13
0
    def _initialize_intercepts(self):
        bb.note("Initializing intercept dir for %s" % self.target_rootfs)
        # As there might be more than one instance of PackageManager operating at the same time
        # we need to isolate the intercept_scripts directories from each other,
        # hence the ugly hash digest in dir name.
        self.intercepts_dir = os.path.join(
            self.d.getVar('WORKDIR'), "intercept_scripts-%s" %
            (hashlib.sha256(self.target_rootfs.encode()).hexdigest()))

        postinst_intercepts = (self.d.getVar("POSTINST_INTERCEPTS")
                               or "").split()
        if not postinst_intercepts:
            postinst_intercepts_path = self.d.getVar(
                "POSTINST_INTERCEPTS_PATH")
            if not postinst_intercepts_path:
                postinst_intercepts_path = self.d.getVar(
                    "POSTINST_INTERCEPTS_DIR") or self.d.expand(
                        "${COREBASE}/scripts/postinst-intercepts")
            postinst_intercepts = oe.path.which_wild('*',
                                                     postinst_intercepts_path)

        bb.debug(
            1, 'Collected intercepts:\n%s' %
            ''.join('  %s\n' % i for i in postinst_intercepts))
        bb.utils.remove(self.intercepts_dir, True)
        bb.utils.mkdirhier(self.intercepts_dir)
        for intercept in postinst_intercepts:
            bb.utils.copyfile(
                intercept,
                os.path.join(self.intercepts_dir, os.path.basename(intercept)))
Пример #14
0
 def run_ostree(self, command, *args, **kwargs):
     cmd = 'ostree ' + self.format(command, *args, **kwargs)
     bb.debug(1, 'Running: {0}'.format(cmd))
     output = subprocess.check_output(cmd,
                                      shell=True,
                                      stderr=subprocess.STDOUT)
     return output
Пример #15
0
def prserv_import_db(d, filter_version=None, filter_pkgarch=None, filter_checksum=None):
    if d.getVar('USE_PR_SERV', True) != "1":
        bb.error("Not using network based PR service")
        return None

    conn = d.getVar("__PRSERV_CONN", True)
    if conn is None:
        conn = prserv_make_conn(d)
        if conn is None:
            bb.error("Making connection failed to remote PR service")
            return None
    #get the entry values
    imported = []
    prefix = "PRAUTO$"
    for v in d.keys():
        if v.startswith(prefix):
            (remain, sep, checksum) = v.rpartition('$')
            (remain, sep, pkgarch) = remain.rpartition('$')
            (remain, sep, version) = remain.rpartition('$')
            if (remain + '$' != prefix) or \
               (filter_version and filter_version != version) or \
               (filter_pkgarch and filter_pkgarch != pkgarch) or \
               (filter_checksum and filter_checksum != checksum):
               continue
            try:
                value = int(d.getVar(remain + '$' + version + '$' + pkgarch + '$' + checksum, True))
            except BaseException as exc:
                bb.debug("Not valid value of %s:%s" % (v,str(exc)))
                continue
            ret = conn.importone(version,pkgarch,checksum,value)
            if ret != value:
                bb.error("importing(%s,%s,%s,%d) failed. DB may have larger value %d" % (version,pkgarch,checksum,value,ret))
            else:
               imported.append((version,pkgarch,checksum,value))
    return imported
Пример #16
0
def exec_func_python(func, d, runfile, cwd=None):
    """Execute a python BB 'function'"""

    code = _functionfmt.format(function=func)
    bb.utils.mkdirhier(os.path.dirname(runfile))
    with open(runfile, 'w') as script:
        bb.data.emit_func_python(func, script, d)

    if cwd:
        try:
            olddir = os.getcwd()
        except OSError as e:
            bb.warn("%s: Cannot get cwd: %s" % (func, e))
            olddir = None
        os.chdir(cwd)

    bb.debug(2, "Executing python function %s" % func)

    try:
        text = "def %s(d):\n%s" % (func, d.getVar(func, False))
        fn = d.getVarFlag(func, "filename", False)
        lineno = int(d.getVarFlag(func, "lineno", False))
        bb.methodpool.insert_method(func, text, fn, lineno - 1)

        comp = utils.better_compile(code, func, "exec_python_func() autogenerated")
        utils.better_exec(comp, {"d": d}, code, "exec_python_func() autogenerated")
    finally:
        bb.debug(2, "Python function %s finished" % func)

        if cwd and olddir:
            try:
                os.chdir(olddir)
            except OSError as e:
                bb.warn("%s: Cannot restore cwd %s: %s" % (func, olddir, e))
Пример #17
0
 def readfifo(data):
     lines = data.split('\0')
     for line in lines:
         splitval = line.split(' ', 1)
         cmd = splitval[0]
         if len(splitval) > 1:
             value = splitval[1]
         else:
             value = ''
         if cmd == 'bbplain':
             bb.plain(value)
         elif cmd == 'bbnote':
             bb.note(value)
         elif cmd == 'bbwarn':
             bb.warn(value)
         elif cmd == 'bberror':
             bb.error(value)
         elif cmd == 'bbfatal':
             # The caller will call exit themselves, so bb.error() is
             # what we want here rather than bb.fatal()
             bb.error(value)
         elif cmd == 'bbfatal_log':
             bb.error(value, forcelog=True)
         elif cmd == 'bbdebug':
             splitval = value.split(' ', 1)
             level = int(splitval[0])
             value = splitval[1]
             bb.debug(level, value)
def fixed_source_date_epoch(d):
    bb.debug(1, "No tarball or git repo found to determine SOURCE_DATE_EPOCH")
    source_date_epoch = d.getVar('SOURCE_DATE_EPOCH_FALLBACK')
    if source_date_epoch:
        bb.debug(1, "Using SOURCE_DATE_EPOCH_FALLBACK")
        return int(source_date_epoch)
    return 0
Пример #19
0
    def get_controller_modulenames(self, bbpath):

        controllerslist = []

        def add_controller_list(path):
            if not os.path.exists(os.path.join(path, '__init__.py')):
                bb.fatal(
                    'Controllers directory %s exists but is missing __init__.py'
                    % path)
            files = sorted([
                f for f in os.listdir(path)
                if f.endswith('.py') and not f.startswith('_')
            ])
            for f in files:
                module = 'oeqa.controllers.' + f[:-3]
                if module not in controllerslist:
                    controllerslist.append(module)
                else:
                    bb.warn(
                        "Duplicate controller module found for %s, only one added. Layers should create unique controller module names"
                        % module)

        for p in bbpath:
            controllerpath = os.path.join(p, 'lib', 'oeqa', 'controllers')
            bb.debug(2,
                     'Searching for target controllers in %s' % controllerpath)
            if os.path.exists(controllerpath):
                add_controller_list(controllerpath)
        return controllerslist
Пример #20
0
 def readfifo(data):
     lines = data.split(b'\0')
     for line in lines:
         # Just skip empty commands
         if not line:
             continue
         splitval = line.split(b' ', 1)
         cmd = splitval[0].decode("utf-8")
         if len(splitval) > 1:
             value = splitval[1].decode("utf-8")
         else:
             value = ''
         if cmd == 'bbplain':
             bb.plain(value)
         elif cmd == 'bbnote':
             bb.note(value)
         elif cmd == 'bbwarn':
             bb.warn(value)
         elif cmd == 'bberror':
             bb.error(value)
         elif cmd == 'bbfatal':
             # The caller will call exit themselves, so bb.error() is
             # what we want here rather than bb.fatal()
             bb.error(value)
         elif cmd == 'bbfatal_log':
             bb.error(value, forcelog=True)
         elif cmd == 'bbdebug':
             splitval = value.split(' ', 1)
             level = int(splitval[0])
             value = splitval[1]
             bb.debug(level, value)
         else:
             bb.warn("Unrecognised command '%s' on FIFO" % cmd)
Пример #21
0
 def makeclient(self):
     os.environ['AWS_METADATA_SERVICE_NUM_ATTEMPTS'] = '{}'.format(self.maxtries)
     os.environ['AWS_METADATA_SERVICE_TIMEOUT'] = '{}'.format(self.metadata_timeout)
     session = botocore.session.get_session()
     self.s3client = boto3.Session(botocore_session=session).client('s3')
     provider = session.get_component('credential_provider').get_provider('assume-role')
     provider.cache = botocore.credentials.JSONFileCache()
     bb.debug(1, "Using AWS profile: %s" % provider._profile_name)
Пример #22
0
def epochfile_write(source_date_epoch, epochfile, d):

    bb.debug(1, "SOURCE_DATE_EPOCH: %d" % source_date_epoch)
    bb.utils.mkdirhier(os.path.dirname(epochfile))

    tmp_file = "%s.new" % epochfile
    with open(tmp_file, 'w') as f:
        f.write(str(source_date_epoch))
    os.rename(tmp_file, epochfile)
Пример #23
0
def handle(fn, data = bb.data.init(), include = 0):
    if include:
        inc_string = "including"
    else:
        inc_string = "reading"
    init(data)

    if include == 0:
        bb.data.inheritFromOS(data)
        oldfile = None
    else:
        oldfile = bb.data.getVar('FILE', data)

    fn = obtain(fn, data)
    bbpath = []
    if not os.path.isabs(fn):
        f = None
        vbbpath = bb.data.getVar("BBPATH", data)
        if vbbpath:
            bbpath += vbbpath.split(":")
        for p in bbpath:
            currname = os.path.join(bb.data.expand(p, data), fn)
            if os.access(currname, os.R_OK):
                f = open(currname, 'r')
                abs_fn = currname
                debug(1, "CONF %s %s" % (inc_string, currname))
                break
        if f is None:
            raise IOError("file '%s' not found" % fn)
    else:
        f = open(fn,'r')
        debug(1, "CONF %s %s" % (inc_string,fn))
        abs_fn = fn

    if include:
        bb.parse.mark_dependency(data, abs_fn)

    lineno = 0
    bb.data.setVar('FILE', fn, data)
    while 1:
        lineno = lineno + 1
        s = f.readline()
        if not s: break
        w = s.strip()
        if not w: continue          # skip empty lines
        s = s.rstrip()
        if s[0] == '#': continue    # skip comments
        while s[-1] == '\\':
            s2 = f.readline()[:-1].strip()
            lineno = lineno + 1
            s = s[:-1] + s2
        feeder(lineno, s, fn, data)

    if oldfile:
        bb.data.setVar('FILE', oldfile, data)
    return data
Пример #24
0
 def _parse_path(self, regex, s):
     """
     Find and group name, version and archive type in the given string s
     """
     bb.debug(3, "parse_path(%s, %s)" % (regex.pattern, s))
     m = regex.search(s)
     if m:
         bb.debug(3, "%s, %s, %s" % (m.group('name'), m.group('ver'), m.group('type')))
         return (m.group('name'), m.group('ver'), m.group('type'))
     return None
Пример #25
0
Файл: wget.py Проект: xleng/poky
 def _parse_path(self, regex, s):
     """
     Find and group name, version and archive type in the given string s
     """
     bb.debug(3, "parse_path(%s, %s)" % (regex.pattern, s))
     m = regex.search(s)
     if m:
         bb.debug(3, "%s, %s, %s" % (m.group('name'), m.group('ver'), m.group('type')))
         return (m.group('name'), m.group('ver'), m.group('type'))
     return None
Пример #26
0
 def applyOverrides(var, d = _data):
     if not overrides:
         debug(1, "OVERRIDES not defined, nothing to do")
         return
     val = getVar(var, d)
     for o in overrides:
         if var.endswith("_" + o):
             l = len(o)+1
             name = var[:-l]
             d[name] = d[var]
Пример #27
0
    def _check_latest_version_by_dir(self, dirver, package, package_regex,
                                     current_version, ud, d):
        """
        Scan every directory in order to get upstream version.
        """
        version_dir = ['', '', '']
        version = ['', '', '']

        dirver_regex = re.compile(r"(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])*(\d+))")
        s = dirver_regex.search(dirver)
        if s:
            version_dir[1] = s.group('ver')
        else:
            version_dir[1] = dirver

        dirs_uri = bb.fetch.encodeurl(
            [ud.type, ud.host,
             ud.path.split(dirver)[0], ud.user, ud.pswd, {}])
        bb.debug(3, "DirURL: %s, %s" % (dirs_uri, package))

        soup = BeautifulSoup(self._fetch_index(dirs_uri, ud, d),
                             "html.parser",
                             parse_only=SoupStrainer("a"))
        if not soup:
            return version[1]

        for line in soup.find_all('a', href=True):
            s = dirver_regex.search(line['href'].strip("/"))
            if s:
                sver = s.group('ver')

                # When prefix is part of the version directory it need to
                # ensure that only version directory is used so remove previous
                # directories if exists.
                #
                # Example: pfx = '/dir1/dir2/v' and version = '2.5' the expected
                # result is v2.5.
                spfx = s.group('pfx').split('/')[-1]

                version_dir_new = ['', sver, '']
                if self._vercmp(version_dir, version_dir_new) <= 0:
                    dirver_new = spfx + sver
                    path = ud.path.replace(dirver, dirver_new, True) \
                        .split(package)[0]
                    uri = bb.fetch.encodeurl(
                        [ud.type, ud.host, path, ud.user, ud.pswd, {}])

                    pupver = self._check_latest_version(
                        uri, package, package_regex, current_version, ud, d)
                    if pupver:
                        version[1] = pupver

                    version_dir = version_dir_new

        return version[1]
Пример #28
0
def mkdirhier(dir):
    """Create a directory like 'mkdir -p', but does not complain if
    directory already exists like os.makedirs
    """

    bb.debug(3, "mkdirhier(%s)" % dir)
    try:
        os.makedirs(dir)
        bb.debug(2, "created " + dir)
    except OSError, e:
        if e.errno != 17: raise e
Пример #29
0
    def _get_tests_list(self, bbpath, extrapath):
        testslist = []

        type = self._get_test_namespace()

        # This relies on lib/ under each directory in BBPATH being added to sys.path
        # (as done by default in base.bbclass)
        for testname in self.testsuites:
            if testname != "auto":
                if testname.startswith("oeqa."):
                    testslist.append(testname)
                    continue
                found = False
                for p in bbpath:
                    if os.path.exists(
                            os.path.join(p, extrapath, type,
                                         testname + ".py")):
                        testslist.append("oeqa." + type + "." + testname)
                        found = True
                        break
                    elif os.path.exists(
                            os.path.join(p, extrapath, type,
                                         testname.split(".")[0] + ".py")):
                        testslist.append("oeqa." + type + "." + testname)
                        found = True
                        break
                if not found:
                    bb.fatal(
                        'Test %s specified in TEST_SUITES could not be found in lib/oeqa/runtime under BBPATH'
                        % testname)

        if "auto" in self.testsuites:

            def add_auto_list(path):
                if not os.path.exists(os.path.join(path, '__init__.py')):
                    bb.fatal(
                        'Tests directory %s exists but is missing __init__.py'
                        % path)
                files = sorted([
                    f for f in os.listdir(path)
                    if f.endswith('.py') and not f.startswith('_')
                ])
                for f in files:
                    module = 'oeqa.' + type + '.' + f[:-3]
                    if module not in testslist:
                        testslist.append(module)

            for p in bbpath:
                testpath = os.path.join(p, 'lib', 'oeqa', type)
                bb.debug(2, 'Searching for tests in %s' % testpath)
                if os.path.exists(testpath):
                    add_auto_list(testpath)

        return testslist
Пример #30
0
def mkdirhier(dir):
    """Create a directory like 'mkdir -p', but does not complain if
    directory already exists like os.makedirs
    """

    bb.debug(3, "mkdirhier(%s)" % dir)
    try:
        os.makedirs(dir)
        bb.debug(2, "created " + dir)
    except OSError, e:
        if e.errno != 17: raise e
Пример #31
0
def rungitcmd(cmd,d):

    bb.debug(1, "Running %s" % cmd)

    # Need to export PATH as git is likely to be in metadata paths 
    # rather than host provided
    pathcmd = 'export PATH=%s; %s' % (data.expand('${PATH}', d), cmd)

    myret = os.system(pathcmd)

    if myret != 0:
        raise FetchError("Git: %s failed" % pathcmd)
Пример #32
0
    def _check_latest_version_by_dir(self, dirver, package, package_regex,
            current_version, ud, d):
        """
            Scan every directory in order to get upstream version.
        """
        version_dir = ['', '', '']
        version = ['', '', '']

        dirver_regex = re.compile("(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])+(\d+))")
        s = dirver_regex.search(dirver)
        if s:
            version_dir[1] = s.group('ver')
        else:
            version_dir[1] = dirver

        dirs_uri = bb.fetch.encodeurl([ud.type, ud.host,
                ud.path.split(dirver)[0], ud.user, ud.pswd, {}])
        bb.debug(3, "DirURL: %s, %s" % (dirs_uri, package))

        soup = BeautifulSoup(self._fetch_index(dirs_uri, ud, d), "html.parser", parse_only=SoupStrainer("a"))
        if not soup:
            return version[1]

        for line in soup.find_all('a', href=True):
            s = dirver_regex.search(line['href'].strip("/"))
            if s:
                sver = s.group('ver')

                # When prefix is part of the version directory it need to
                # ensure that only version directory is used so remove previous
                # directories if exists.
                #
                # Example: pfx = '/dir1/dir2/v' and version = '2.5' the expected
                # result is v2.5.
                spfx = s.group('pfx').split('/')[-1]

                version_dir_new = ['', sver, '']
                if self._vercmp(version_dir, version_dir_new) <= 0:
                    dirver_new = spfx + sver
                    path = ud.path.replace(dirver, dirver_new, True) \
                        .split(package)[0]
                    uri = bb.fetch.encodeurl([ud.type, ud.host, path,
                        ud.user, ud.pswd, {}])

                    pupver = self._check_latest_version(uri,
                            package, package_regex, current_version, ud, d)
                    if pupver:
                        version[1] = pupver

                    version_dir = version_dir_new

        return version[1]
Пример #33
0
 def get_object_info(self, Bucket, Key, quiet=True):
     if self.s3client is None:
         self.makeclient()
     try:
         info = self.s3client.head_object(Bucket=Bucket, Key=Key)
     except botocore.exceptions.ClientError as e:
         err = e.response['Error']
         if quiet and err['Code'] == "404":
             bb.debug(2, "not found: {}/{}".format(Bucket, Key))
         else:
             bb.warn("{}/{}: {} {}".format(Bucket, Key, err['Code'], err['Message']))
         return None
     return info
Пример #34
0
def include(oldfn, fn, data = bb.data.init()):
    if oldfn == fn: # prevent infinate recursion
        return None

    import bb
    fn = bb.data.expand(fn, data)
    oldfn = bb.data.expand(oldfn, data)

    from bb.parse import handle
    try:
        ret = handle(fn, data, 1)
    except IOError:
        debug(2, "CONF file '%s' not found" % fn)
Пример #35
0
def init(urls = [], d = None):
    if d == None:
        bb.debug(2,"BUG init called with None as data object!!!")
        return

    for m in methods:
        m.urls = []

    for u in urls:
        for m in methods:
            m.data = d
            if m.supports(u, d):
                m.urls.append(u)
Пример #36
0
def exec_func_shell(func, d, runfile, cwd=None):
    """Execute a shell function from the metadata

    Note on directory behavior.  The 'dirs' varflag should contain a list
    of the directories you need created prior to execution.  The last
    item in the list is where we will chdir/cd to.
    """

    # Don't let the emitted shell script override PWD
    d.delVarFlag('PWD', 'export')

    with open(runfile, 'w') as script:
        script.write(shell_trap_code())

        bb.data.emit_func(func, script, d)

        if bb.msg.loggerVerboseLogs:
            script.write("set -x\n")
        if cwd:
            script.write("cd '%s'\n" % cwd)
        script.write("%s\n" % func)
        script.write('''
# cleanup
ret=$?
trap '' 0
exit $?
''')

    os.chmod(runfile, 0775)

    cmd = runfile
    if d.getVarFlag(func, 'fakeroot'):
        fakerootcmd = d.getVar('FAKEROOT', True)
        if fakerootcmd:
            cmd = [fakerootcmd, runfile]

    if bb.msg.loggerDefaultVerbose:
        logfile = LogTee(logger, sys.stdout)
    else:
        logfile = sys.stdout

    bb.debug(2, "Executing shell function %s" % func)

    try:
        with open(os.devnull, 'r+') as stdin:
            bb.process.run(cmd, shell=False, stdin=stdin, log=logfile)
    except bb.process.CmdError:
        logfn = d.getVar('BB_LOGFILE', True)
        raise FuncFailed(func, logfn)

    bb.debug(2, "Shell function %s finished" % func)
Пример #37
0
def exec_func_shell(func, d, runfile, cwd=None):
    """Execute a shell function from the metadata

    Note on directory behavior.  The 'dirs' varflag should contain a list
    of the directories you need created prior to execution.  The last
    item in the list is where we will chdir/cd to.
    """

    # Don't let the emitted shell script override PWD
    d.delVarFlag('PWD', 'export')

    with open(runfile, 'w') as script:
        script.write(shell_trap_code())

        bb.data.emit_func(func, script, d)

        if bb.msg.loggerVerboseLogs:
            script.write("set -x\n")
        if cwd:
            script.write("cd '%s'\n" % cwd)
        script.write("%s\n" % func)
        script.write('''
# cleanup
ret=$?
trap '' 0
exit $?
''')

    os.chmod(runfile, 0775)

    cmd = runfile
    if d.getVarFlag(func, 'fakeroot'):
        fakerootcmd = d.getVar('FAKEROOT', True)
        if fakerootcmd:
            cmd = [fakerootcmd, runfile]

    if bb.msg.loggerDefaultVerbose:
        logfile = LogTee(logger, sys.stdout)
    else:
        logfile = sys.stdout

    bb.debug(2, "Executing shell function %s" % func)

    try:
        with open(os.devnull, 'r+') as stdin:
            bb.process.run(cmd, shell=False, stdin=stdin, log=logfile)
    except bb.process.CmdError:
        logfn = d.getVar('BB_LOGFILE', True)
        raise FuncFailed(func, logfn)

    bb.debug(2, "Shell function %s finished" % func)
Пример #38
0
    def download(self, ud, d):
        """Fetch url"""

        logger.debug2("Fetch: checking for module directory '" + ud.moddir + "'")

        lf = bb.utils.lockfile(ud.svnlock)

        try:
            if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
                svncmd = self._buildsvncommand(ud, d, "update")
                logger.info("Update " + ud.url)
                # We need to attempt to run svn upgrade first in case its an older working format
                try:
                    runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir)
                except FetchError:
                    pass
                logger.debug("Running %s", svncmd)
                bb.fetch2.check_network_access(d, svncmd, ud.url)
                runfetchcmd(svncmd, d, workdir=ud.moddir)
            else:
                svncmd = self._buildsvncommand(ud, d, "fetch")
                logger.info("Fetch " + ud.url)
                # check out sources there
                bb.utils.mkdirhier(ud.pkgdir)
                logger.debug("Running %s", svncmd)
                bb.fetch2.check_network_access(d, svncmd, ud.url)
                runfetchcmd(svncmd, d, workdir=ud.pkgdir)

            if not ("externals" in ud.parm and ud.parm["externals"] == "nowarn"):
                # Warn the user if this had externals (won't catch them all)
                output = runfetchcmd("svn propget svn:externals || true", d, workdir=ud.moddir)
                if output:
                    if "--ignore-externals" in svncmd.split():
                        bb.warn("%s contains svn:externals." % ud.url)
                        bb.warn("These should be added to the recipe SRC_URI as necessary.")
                        bb.warn("svn fetch has ignored externals:\n%s" % output)
                        bb.warn("To disable this warning add ';externals=nowarn' to the url.")
                    else:
                        bb.debug(1, "svn repository has externals:\n%s" % output)

            scmdata = ud.parm.get("scmdata", "")
            if scmdata == "keep":
                tar_flags = ""
            else:
                tar_flags = "--exclude='.svn'"

            # tar them up to a defined filename
            runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d,
                        cleanup=[ud.localpath], workdir=ud.pkgdir)
        finally:
            bb.utils.unlockfile(lf)
Пример #39
0
def obtain(fn, data = bb.data.init()):
    import sys, bb
    fn = bb.data.expand(fn, data)
    localfn = bb.data.expand(localpath(fn, data), data)

    if localfn != fn:
        dldir = bb.data.getVar('DL_DIR', data, 1)
        if not dldir:
            debug(1, "obtain: DL_DIR not defined")
            return localfn
        bb.mkdirhier(dldir)
        try:
            bb.fetch.init([fn])
        except bb.fetch.NoMethodError:
            (type, value, traceback) = sys.exc_info()
            debug(1, "obtain: no method: %s" % value)
            return localfn

        try:
            bb.fetch.go(data)
        except bb.fetch.MissingParameterError:
            (type, value, traceback) = sys.exc_info()
            debug(1, "obtain: missing parameters: %s" % value)
            return localfn
        except bb.fetch.FetchError:
            (type, value, traceback) = sys.exc_info()
            debug(1, "obtain: failed: %s" % value)
            return localfn
    return localfn
Пример #40
0
def inherit(files, d):
    __inherit_cache = data.getVar('__inherit_cache', d) or ""
    fn = ""
    lineno = 0
    for f in files:
        file = data.expand(f, d)
        if file[0] != "/" and file[-8:] != ".bbclass":
            file = os.path.join('classes', '%s.bbclass' % file)

        if not file in __inherit_cache.split():
            debug(2, "BB %s:%d: inheriting %s" % (fn, lineno, file))
            __inherit_cache += " %s" % file
            include(fn, file, d)
    data.setVar('__inherit_cache', __inherit_cache, d)
Пример #41
0
    def latest_versionstring(self, ud, d):
        """
        Manipulate the URL and try to obtain the latest package version

        sanity check to ensure same name and type.
        """
        package = ud.path.split("/")[-1]
        current_version = ['', d.getVar('PV'), '']
        """possible to have no version in pkg name, such as spectrum-fw"""
        if not re.search(r"\d+", package):
            current_version[1] = re.sub('_', '.', current_version[1])
            current_version[1] = re.sub('-', '.', current_version[1])
            return (current_version[1], '')

        package_regex = self._init_regexes(package, ud, d)
        if package_regex is None:
            bb.warn("latest_versionstring: package %s don't match pattern" %
                    (package))
            return ('', '')
        bb.debug(3,
                 "latest_versionstring, regex: %s" % (package_regex.pattern))

        uri = ""
        regex_uri = d.getVar("UPSTREAM_CHECK_URI")
        if not regex_uri:
            path = ud.path.split(package)[0]

            # search for version matches on folders inside the path, like:
            # "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
            dirver_regex = re.compile(
                r"(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
            m = dirver_regex.search(path)
            if m:
                pn = d.getVar('PN')
                dirver = m.group('dirver')

                dirver_pn_regex = re.compile(r"%s\d?" % (re.escape(pn)))
                if not dirver_pn_regex.search(dirver):
                    return (self._check_latest_version_by_dir(
                        dirver, package, package_regex, current_version, ud,
                        d), '')

            uri = bb.fetch.encodeurl(
                [ud.type, ud.host, path, ud.user, ud.pswd, {}])
        else:
            uri = regex_uri

        return (self._check_latest_version(uri, package, package_regex,
                                           current_version, ud, d), '')
Пример #42
0
def get_source_date_epoch_from_known_files(d, sourcedir):
    source_date_epoch = None
    newest_file = None
    known_files = set(["NEWS", "ChangeLog", "Changelog", "CHANGES"])
    for file in known_files:
        filepath = os.path.join(sourcedir, file)
        if os.path.isfile(filepath):
            mtime = int(os.lstat(filepath).st_mtime)
            # There may be more than one "known_file" present, if so, use the youngest one
            if not source_date_epoch or mtime > source_date_epoch:
                source_date_epoch = mtime
                newest_file = filepath
    if newest_file:
        bb.debug(1, "SOURCE_DATE_EPOCH taken from: %s" % newest_file)
    return source_date_epoch
Пример #43
0
def copy_from_sysroots(pathnames, sysroots, mirrors, installdest):
    '''Copy the specified files from the specified sysroots, also checking the
    specified mirror patterns as alternate paths, to the specified destination.'''
    import subprocess

    expanded_pathnames = expand_paths(pathnames, mirrors)
    searched_paths = search_sysroots(expanded_pathnames, sysroots)
    for path, files in searched_paths:
        if not files:
            bb.debug(1, 'Failed to find `{}`'.format(path))
        else:
            destdir = oe.path.join(installdest, os.path.dirname(path))
            bb.utils.mkdirhier(destdir)
            subprocess.check_call(['cp', '-pPR'] + list(files) + [destdir + '/'])
            bb.note('Copied `{}`  to `{}/`'.format(', '.join(files), destdir))
Пример #44
0
 def makeclient(self):
     session = botocore.session.get_session()
     timeout = session.get_config_variable('metadata_service_timeout')
     if timeout < 10:
         bb.debug(1, "increasing metadata service timeout to 10 seconds")
         session.set_config_variable('metadata_service_timeout', 10)
     retries = session.get_config_variable('metadata_service_num_attempts')
     if retries < self.maxtries:
         bb.debug(
             1, "increasing metadata service retries to %d" % self.maxtries)
         session.set_config_variable('metadata_service_num_attempts',
                                     self.maxtries)
     session.get_component('credential_provider').get_provider(
         'assume-role').cache = botocore.credentials.JSONFileCache()
     self.s3client = boto3.Session(botocore_session=session).client('s3')
Пример #45
0
def run(d, cmd, *args):
    topdir = d.getVar('TOPDIR', True)
    toolchain_path = d.getVar('EXTERNAL_TOOLCHAIN', True)
    if toolchain_path:
        target_prefix = d.getVar('EXTERNAL_TARGET_SYS', True) + '-'
        path = os.path.join(toolchain_path, 'bin', target_prefix + cmd)
        args = [path] + list(args)

        try:
            output, _ = bb.process.run(args, cwd=topdir)
        except bb.process.CmdError as exc:
            bb.debug(1, str(exc))
        else:
            return output

    return 'UNKNOWN'
Пример #46
0
def run(d, cmd, *args):
    topdir = d.getVar('TMPDIR', True)
    toolchain_path = d.getVar('EXTERNAL_TOOLCHAIN', True)
    if toolchain_path:
        target_prefix = d.getVar('EXTERNAL_TARGET_SYS', True) + '-'
        path = os.path.join(toolchain_path, 'bin', target_prefix + cmd)
        args = [path] + list(args)

        try:
            output, _ = bb.process.run(args, cwd=topdir)
        except bb.process.CmdError as exc:
            bb.debug(1, str(exc))
        else:
            return output

    return 'UNKNOWN'
Пример #47
0
def copy_from_sysroots(pathnames, sysroots, mirrors, installdest):
    '''Copy the specified files from the specified sysroots, also checking the
    specified mirror patterns as alternate paths, to the specified destination.'''
    import subprocess

    expanded_pathnames = expand_paths(pathnames, mirrors)
    searched_paths = search_sysroots(expanded_pathnames, sysroots)
    for path, files in searched_paths:
        if not files:
            bb.debug(1, 'Failed to find `{}`'.format(path))
        else:
            destdir = oe.path.join(installdest, os.path.dirname(path))
            bb.utils.mkdirhier(destdir)
            subprocess.check_call(['cp', '-pPR'] + list(files) +
                                  [destdir + '/'])
            bb.note('Copied `{}`  to `{}/`'.format(', '.join(files), destdir))
Пример #48
0
    def latest_versionstring(self, ud, d):
        """
        Manipulate the URL and try to obtain the latest package version

        sanity check to ensure same name and type.
        """
        package = ud.path.split("/")[-1]
        current_version = ["", d.getVar("PV", True), ""]

        """possible to have no version in pkg name, such as spectrum-fw"""
        if not re.search("\d+", package):
            current_version[1] = re.sub("_", ".", current_version[1])
            current_version[1] = re.sub("-", ".", current_version[1])
            return (current_version[1], "")

        package_regex = self._init_regexes(package, ud, d)
        if package_regex is None:
            bb.warn("latest_versionstring: package %s don't match pattern" % (package))
            return ("", "")
        bb.debug(3, "latest_versionstring, regex: %s" % (package_regex.pattern))

        uri = ""
        regex_uri = d.getVar("UPSTREAM_CHECK_URI", True)
        if not regex_uri:
            path = ud.path.split(package)[0]

            # search for version matches on folders inside the path, like:
            # "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
            dirver_regex = re.compile("(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
            m = dirver_regex.search(path)
            if m:
                pn = d.getVar("PN", True)
                dirver = m.group("dirver")

                dirver_pn_regex = re.compile("%s\d?" % (re.escape(pn)))
                if not dirver_pn_regex.search(dirver):
                    return (
                        self._check_latest_version_by_dir(dirver, package, package_regex, current_version, ud, d),
                        "",
                    )

            uri = bb.fetch.encodeurl([ud.type, ud.host, path, ud.user, ud.pswd, {}])
        else:
            uri = regex_uri

        return (self._check_latest_version(uri, package, package_regex, current_version, ud, d), "")
Пример #49
0
    def _check_latest_version_by_dir(self, dirver, package, package_regex,
                                     current_version, ud, d):
        """
            Scan every directory in order to get upstream version.
        """
        version_dir = ['', '', '']
        version = ['', '', '']

        dirver_regex = re.compile("(\D*)((\d+[\.\-_])+(\d+))")
        s = dirver_regex.search(dirver)
        if s:
            version_dir[1] = s.group(2)
        else:
            version_dir[1] = dirver

        dirs_uri = bb.fetch.encodeurl(
            [ud.type, ud.host,
             ud.path.split(dirver)[0], ud.user, ud.pswd, {}])
        bb.debug(3, "DirURL: %s, %s" % (dirs_uri, package))

        soup = BeautifulSoup(self._fetch_index(dirs_uri, ud, d),
                             "html.parser",
                             parse_only=SoupStrainer("a"))
        if not soup:
            return version[1]

        for line in soup.find_all('a', href=True):
            s = dirver_regex.search(line['href'].strip("/"))
            if s:
                version_dir_new = ['', s.group(2), '']
                if self._vercmp(version_dir, version_dir_new) <= 0:
                    dirver_new = s.group(1) + s.group(2)
                    path = ud.path.replace(dirver, dirver_new, True) \
                        .split(package)[0]
                    uri = bb.fetch.encodeurl(
                        [ud.type, ud.host, path, ud.user, ud.pswd, {}])

                    pupver = self._check_latest_version(
                        uri, package, package_regex, current_version, ud, d)
                    if pupver:
                        version[1] = pupver

                    version_dir = version_dir_new

        return version[1]
Пример #50
0
 def get_object_info(self, Bucket, Key, quiet=True):
     if self.s3client is None:
         self.makeclient()
     for attempt in range(10):
         try:
             info = self.s3client.head_object(Bucket=Bucket, Key=Key)
         except botocore.exceptions.NoCredentialsError:
             s3retry_wait(attempt)
             continue
         except botocore.exceptions.ClientError as e:
             err = e.response['Error']
             if quiet and err['Code'] == "404":
                 bb.debug(2, "not found: {}/{}".format(Bucket, Key))
             else:
                 bb.warn("{}/{}: {} {}".format(Bucket, Key, err['Code'], err['Message']))
             return None
         return info
     bb.warn("{}/{}: credentials error on get_object_info for 10 attempts".format(Bucket, Key))
     return None
Пример #51
0
def exec_func_python(func, d, runfile, cwd=None, pythonexception=False):
    """Execute a python BB 'function'"""

    code = _functionfmt.format(function=func)
    bb.utils.mkdirhier(os.path.dirname(runfile))
    with open(runfile, 'w') as script:
        bb.data.emit_func_python(func, script, d)

    if cwd:
        try:
            olddir = os.getcwd()
        except OSError:
            olddir = None
        os.chdir(cwd)

    bb.debug(2, "Executing python function %s" % func)

    try:
        text = "def %s(d):\n%s" % (func, d.getVar(func, False))
        fn = d.getVarFlag(func, "filename", False)
        lineno = int(d.getVarFlag(func, "lineno", False))
        bb.methodpool.insert_method(func, text, fn, lineno - 1)

        comp = utils.better_compile(code, func,
                                    "exec_python_func() autogenerated")
        utils.better_exec(comp, {"d": d},
                          code,
                          "exec_python_func() autogenerated",
                          pythonexception=pythonexception)
    except (bb.parse.SkipRecipe, bb.build.FuncFailed):
        raise
    except:
        if pythonexception:
            raise
        raise FuncFailed(func, None)
    finally:
        bb.debug(2, "Python function %s finished" % func)

        if cwd and olddir:
            try:
                os.chdir(olddir)
            except OSError:
                pass
Пример #52
0
def arch_config_sub(d, arch):
    try:
        config_sub_cache = globals()['config_sub_cache']
    except KeyError:
        config_sub_cache = {}
        globals()['config_sub_cache'] = config_sub_cache

    try:
        canonical_arch = config_sub_cache[arch]

    except KeyError:
        script = arch_find_script(d, 'config.sub')
        try:
            bb.debug("%s %s"%(script, arch))
            canonical_arch = os.popen("%s %s"%(script, arch)).readline().strip()
            config_sub_cache[arch] = canonical_arch
        except OSError, e:
            bb.error("config.sub(%s) failed: %s"%(arch, e))
            return arch
Пример #53
0
def prserv_import_db(d,
                     filter_version=None,
                     filter_pkgarch=None,
                     filter_checksum=None):
    if d.getVar('USE_PR_SERV', True) != "1":
        bb.error("Not using network based PR service")
        return None

    conn = d.getVar("__PRSERV_CONN", True)
    if conn is None:
        conn = prserv_make_conn(d)
        if conn is None:
            bb.error("Making connection failed to remote PR service")
            return None
    #get the entry values
    imported = []
    prefix = "PRAUTO$"
    for v in d.keys():
        if v.startswith(prefix):
            (remain, sep, checksum) = v.rpartition('$')
            (remain, sep, pkgarch) = remain.rpartition('$')
            (remain, sep, version) = remain.rpartition('$')
            if (remain + '$' != prefix) or \
               (filter_version and filter_version != version) or \
               (filter_pkgarch and filter_pkgarch != pkgarch) or \
               (filter_checksum and filter_checksum != checksum):
                continue
            try:
                value = int(
                    d.getVar(
                        remain + '$' + version + '$' + pkgarch + '$' +
                        checksum, True))
            except BaseException as exc:
                bb.debug("Not valid value of %s:%s" % (v, str(exc)))
                continue
            ret = conn.importone(version, pkgarch, checksum, value)
            if ret != value:
                bb.error(
                    "importing(%s,%s,%s,%d) failed. DB may have larger value %d"
                    % (version, pkgarch, checksum, value, ret))
            else:
                imported.append((version, pkgarch, checksum, value))
    return imported
Пример #54
0
def include(oldfn, fn, data = bb.data.init(), error_out = False):
    """

    error_out If True a ParseError will be reaised if the to be included
    """
    if oldfn == fn: # prevent infinate recursion
        return None

    import bb
    fn = bb.data.expand(fn, data)
    oldfn = bb.data.expand(oldfn, data)

    from bb.parse import handle
    try:
        ret = handle(fn, data, True)
    except IOError:
        if error_out:
            raise ParseError("Could not include required file %(fn)s" % vars() )
        debug(2, "CONF file '%s' not found" % fn)
Пример #55
0
    def check_for_tarball(d, tarfn, dldir, date):
        """
        Check for a local copy then check the tarball stash.
        Both checks are skipped if date == 'now'.

        d Is a bb.data instance
        tarfn is the name of the tarball
        date is the SRCDATE
        """
        if "now" != date:
            dl = os.path.join(dldir, tarfn)
            if os.access(dl, os.R_OK):
                bb.debug(1, "%s already exists, skipping checkout." % tarfn)
                return True

            # try to use the tarball stash
            if Fetch.try_mirror(d, tarfn):
                return True
        return False
Пример #56
0
def arch_config_sub(d, arch):
    try:
        config_sub_cache = globals()['config_sub_cache']
    except KeyError:
        config_sub_cache = {}
        globals()['config_sub_cache'] = config_sub_cache

    try:
        canonical_arch = config_sub_cache[arch]

    except KeyError:
        script = arch_find_script(d, 'config.sub')
        try:
            bb.debug("%s %s"%(script, arch))
            canonical_arch = os.popen("%s %s"%(script, arch)).readline().strip()
            config_sub_cache[arch] = canonical_arch
        except OSError, e:
            bb.error("config.sub(%s) failed: %s"%(arch, e))
            return arch
Пример #57
0
    def _get_tests_list(self, bbpath, extrapath):
        testslist = []

        type = self._get_test_namespace()

        # This relies on lib/ under each directory in BBPATH being added to sys.path
        # (as done by default in base.bbclass)
        for testname in self.testsuites:
            if testname != "auto":
                if testname.startswith("oeqa."):
                    testslist.append(testname)
                    continue
                found = False
                for p in bbpath:
                    if os.path.exists(os.path.join(p, extrapath, type, testname + ".py")):
                        testslist.append("oeqa." + type + "." + testname)
                        found = True
                        break
                    elif os.path.exists(os.path.join(p, extrapath, type, testname.split(".")[0] + ".py")):
                        testslist.append("oeqa." + type + "." + testname)
                        found = True
                        break
                if not found:
                    bb.fatal('Test %s specified in TEST_SUITES could not be found in lib/oeqa/runtime under BBPATH' % testname)

        if "auto" in self.testsuites:
            def add_auto_list(path):
                if not os.path.exists(os.path.join(path, '__init__.py')):
                    bb.fatal('Tests directory %s exists but is missing __init__.py' % path)
                files = sorted([f for f in os.listdir(path) if f.endswith('.py') and not f.startswith('_')])
                for f in files:
                    module = 'oeqa.' + type + '.' + f[:-3]
                    if module not in testslist:
                        testslist.append(module)

            for p in bbpath:
                testpath = os.path.join(p, 'lib', 'oeqa', type)
                bb.debug(2, 'Searching for tests in %s' % testpath)
                if os.path.exists(testpath):
                    add_auto_list(testpath)

        return testslist
Пример #58
0
    def _check_latest_version_by_dir(self, dirver, package, package_regex,
            current_version, ud, d):
        """
            Scan every directory in order to get upstream version.
        """
        version_dir = ['', '', '']
        version = ['', '', '']

        dirver_regex = re.compile("(\D*)((\d+[\.-_])+(\d+))")
        s = dirver_regex.search(dirver)
        if s:
            version_dir[1] = s.group(2)
        else:
            version_dir[1] = dirver

        dirs_uri = bb.fetch.encodeurl([ud.type, ud.host,
                ud.path.split(dirver)[0], ud.user, ud.pswd, {}])
        bb.debug(3, "DirURL: %s, %s" % (dirs_uri, package))

        soup = BeautifulSoup(self._fetch_index(dirs_uri, ud, d))
        if not soup:
            return version[1]

        for line in soup.find_all('a', href=True):
            s = dirver_regex.search(line['href'].strip("/"))
            if s:
                version_dir_new = ['', s.group(2), '']
                if self._vercmp(version_dir, version_dir_new) <= 0:
                    dirver_new = s.group(1) + s.group(2)
                    path = ud.path.replace(dirver, dirver_new, True) \
                        .split(package)[0]
                    uri = bb.fetch.encodeurl([ud.type, ud.host, path,
                        ud.user, ud.pswd, {}])

                    pupver = self._check_latest_version(uri,
                            package, package_regex, current_version, ud, d)
                    if pupver:
                        version[1] = pupver

                    version_dir = version_dir_new

        return version[1]
Пример #59
0
def filter_environment(good_vars):
    """
    Create a pristine environment for bitbake. This will remove variables that
    are not known and may influence the build in a negative way.
    """

    import bb

    removed_vars = []
    for key in os.environ.keys():
        if key in good_vars:
            continue
        
        removed_vars.append(key)
        os.unsetenv(key)
        del os.environ[key]

    if len(removed_vars):
        bb.debug(1, "Removed the following variables from the environment:", ",".join(removed_vars))

    return removed_vars
Пример #60
0
def epochfile_read(epochfile, d):
    cached, efile = d.getVar('__CACHED_SOURCE_DATE_EPOCH') or (None, None)
    if cached and efile == epochfile:
        return cached

    if cached and epochfile != efile:
        bb.debug(1, "Epoch file changed from %s to %s" % (efile, epochfile))

    source_date_epoch = int(d.getVar('SOURCE_DATE_EPOCH_FALLBACK'))
    try:
        with open(epochfile, 'r') as f:
            s = f.read()
            try:
                source_date_epoch = int(s)
            except ValueError:
                bb.warn(
                    "SOURCE_DATE_EPOCH value '%s' is invalid. Reverting to SOURCE_DATE_EPOCH_FALLBACK"
                    % s)
                source_date_epoch = int(d.getVar('SOURCE_DATE_EPOCH_FALLBACK'))
        bb.debug(1, "SOURCE_DATE_EPOCH: %d" % source_date_epoch)
    except FileNotFoundError:
        bb.debug(
            1, "Cannot find %s. SOURCE_DATE_EPOCH will default to %d" %
            (epochfile, source_date_epoch))

    d.setVar('__CACHED_SOURCE_DATE_EPOCH', (str(source_date_epoch), epochfile))
    return str(source_date_epoch)