Example #1
0
 def checkstatus(self, uri, ud, d):
     fetchcmd = "%s ls-remote %s" % (ud.basecmd, uri)
     try:
         runfetchcmd(fetchcmd, d, quiet=True)
         return True
     except FetchError:
         return False
Example #2
0
    def download(self, ud, d):
        """Fetch url"""
        jsondepobj = {}
        shrinkobj = {}
        lockdown = {}

        if not os.listdir(ud.pkgdatadir) and os.path.exists(ud.fullmirror):
            dest = d.getVar("DL_DIR", True)
            bb.utils.mkdirhier(dest)
            runfetchcmd("tar -xJf %s" % (ud.fullmirror), d, workdir=dest)
            return

        shwrf = d.getVar('NPM_SHRINKWRAP', True)
        logger.debug(2, "NPM shrinkwrap file is %s" % shwrf)
        try:
            with open(shwrf) as datafile:
                shrinkobj = json.load(datafile)
        except:
            logger.warning('Missing shrinkwrap file in NPM_SHRINKWRAP for %s, this will lead to unreliable builds!' % ud.pkgname)
        lckdf = d.getVar('NPM_LOCKDOWN', True)
        logger.debug(2, "NPM lockdown file is %s" % lckdf)
        try:
            with open(lckdf) as datafile:
                lockdown = json.load(datafile)
        except:
            logger.warning('Missing lockdown file in NPM_LOCKDOWN for %s, this will lead to unreproducible builds!' % ud.pkgname)

        if ('name' not in shrinkobj):
            self._getdependencies(ud.pkgname, jsondepobj, ud.version, d, ud)
        else:
            self._getshrinkeddependencies(ud.pkgname, shrinkobj, ud.version, d, ud, lockdown, jsondepobj)

        with open(ud.localpath, 'w') as outfile:
            json.dump(jsondepobj, outfile)
Example #3
0
File: gitsm.py Project: kacf/poky
 def unpack(self, ud, destdir, d):
     Git.unpack(self, ud, destdir, d)
     
     submodules = self.uses_submodules(ud, d, ud.destdir)
     if submodules:
         runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d, workdir=ud.destdir)
         runfetchcmd(ud.basecmd + " submodule update --init --recursive", d, workdir=ud.destdir)
Example #4
0
    def download(self, ud, d):
        """Fetch urls"""

        urlo = URI(ud.url)
        basecmd = 'sftp -oBatchMode=yes'
        port = ''
        if urlo.port:
            port = '-P %d' % urlo.port
            urlo.port = None

        dldir = data.getVar('DL_DIR', d, True)
        lpath = os.path.join(dldir, ud.localfile)

        user = ''
        if urlo.userinfo:
            user = urlo.userinfo + '@'

        path = urlo.path

        # Supoprt URIs relative to the user's home directory, with
        # the tilde syntax. (E.g. <sftp://example.com/~/foo.diff>).
        if path[:3] == '/~/':
            path = path[3:]

        remote = '%s%s:%s' % (user, urlo.hostname, path)

        cmd = '%s %s %s %s' % (basecmd, port, remote, lpath)

        bb.fetch2.check_network_access(d, cmd, ud.url)
        runfetchcmd(cmd, d)
        return True
Example #5
0
    def download(self, uri, ud, d, checkonly = False):
        """Fetch urls"""

        if checkonly:
            fetchcmd = data.getVar("CHECKCOMMAND_wget", d, True) or d.expand("/usr/bin/env wget --spider -t 5 --passive-ftp --no-check-certificate -P ${DL_DIR} '${URI}'")
        elif os.path.exists(ud.localpath):
            # file exists, but we didnt complete it.. trying again..
            fetchcmd = data.getVar("RESUMECOMMAND_wget", d, True) or d.expand("/usr/bin/env wget -c -t 5 -nv --passive-ftp --no-check-certificate -P ${DL_DIR} '${URI}'")
        else:
            fetchcmd = data.getVar("FETCHCOMMAND_wget", d, True) or d.expand("/usr/bin/env wget -t 5 -nv --passive-ftp --no-check-certificate -P ${DL_DIR} '${URI}'")

        uri = uri.split(";")[0]
        uri_decoded = list(decodeurl(uri))
        uri_type = uri_decoded[0]
        uri_host = uri_decoded[1]

        fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
        fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
        if not checkonly:
            logger.info("fetch " + uri)
            logger.debug(2, "executing " + fetchcmd)
        bb.fetch2.check_network_access(d, fetchcmd)
        runfetchcmd(fetchcmd, d, quiet=checkonly)

        # Sanity check since wget can pretend it succeed when it didn't
        # Also, this used to happen if sourceforge sent us to the mirror page
        if not os.path.exists(ud.localpath) and not checkonly:
            raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)

        return True
Example #6
0
    def download(self, urldata, d):
        dldir = d.getVar('DL_DIR', True)

        m = __pattern__.match(urldata.url)
        path = m.group('path')
        host = m.group('host')
        port = m.group('port')
        user = m.group('user')
        password = m.group('pass')

        if port:
            portarg = '-P %s' % port
        else:
            portarg = ''

        if user:
            fr = user
            if password:
                fr += ':%s' % password
            fr += '@%s' % host
        else:
            fr = host
        fr += ':%s' % path


        import commands
        cmd = 'scp -B -r %s %s %s/' % (
            portarg,
            commands.mkarg(fr),
            commands.mkarg(dldir)
        )

        bb.fetch2.check_network_access(d, cmd, urldata.url)

        runfetchcmd(cmd, d)
Example #7
0
        def fetch_uri(uri, ud, d):
            if checkonly:
                fetchcmd = data.getVar("CHECKCOMMAND", d, True)
            elif os.path.exists(ud.localpath):
                # file exists, but we didnt complete it.. trying again..
                fetchcmd = data.getVar("RESUMECOMMAND", d, True)
            else:
                fetchcmd = data.getVar("FETCHCOMMAND", d, True)

            uri = uri.split(";")[0]
            uri_decoded = list(decodeurl(uri))
            uri_type = uri_decoded[0]
            uri_host = uri_decoded[1]

            fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
            fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
            if not checkonly:
                logger.info("fetch " + uri)
                logger.debug(2, "executing " + fetchcmd)
            bb.fetch2.check_network_access(d, fetchcmd)
            runfetchcmd(fetchcmd, d, quiet=checkonly)

            # Sanity check since wget can pretend it succeed when it didn't
            # Also, this used to happen if sourceforge sent us to the mirror page
            if not os.path.exists(ud.localpath) and not checkonly:
                raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
Example #8
0
 def _getdependencies(self, pkg, data, version, d, ud):
     pkgfullname = pkg
     if version:
         pkgfullname += "@%s" % version
     logger.debug(2, "Calling getdeps on %s" % pkg)
     fetchcmd = "npm view %s dist.tarball --registry %s" % (pkgfullname, ud.registry)
     output = runfetchcmd(fetchcmd, d, True)
     # npm may resolve multiple versions
     outputarray = output.strip().splitlines()
     # we just take the latest version npm resolved
     #logger.debug(2, "Output URL is %s - %s - %s" % (ud.basepath, ud.basename, ud.localfile))
     outputurl = outputarray[len(outputarray)-1].rstrip()
     if (len(outputarray) > 1):
         # remove the preceding version/name from npm output and then the
         # first and last quotes
         outputurl = outputurl.split(" ")[1][1:-1]
     data[pkg] = {}
     data[pkg]['tgz'] = os.path.basename(outputurl)
     self._runwget(ud, d, "%s %s" % (self.basecmd, outputurl), False)
     #fetchcmd = "npm view %s@%s dependencies --json" % (pkg, version)
     fetchcmd = "npm view %s dependencies --json --registry %s" % (pkgfullname, ud.registry)
     output = runfetchcmd(fetchcmd, d, True)
     try:
       depsfound = json.loads(output)
     except:
         # just assume there is no deps to be loaded here
         return
     data[pkg]['deps'] = {}
     for dep, version in depsfound.iteritems():
         self._getdependencies(dep, data[pkg]['deps'], version, d, ud)
Example #9
0
    def download(self, ud, d):
        """Fetch urls"""

        urlo = URI(ud.url)
        basecmd = "sftp -oPasswordAuthentication=no"
        port = ""
        if urlo.port:
            port = "-P %d" % urlo.port
            urlo.port = None

        dldir = data.getVar("DL_DIR", d, True)
        lpath = os.path.join(dldir, ud.localfile)

        user = ""
        if urlo.userinfo:
            user = urlo.userinfo + "@"

        path = urlo.path

        # Supoprt URIs relative to the user's home directory, with
        # the tilde syntax. (E.g. <sftp://example.com/~/foo.diff>).
        if path[:3] == "/~/":
            path = path[3:]

        remote = "%s%s:%s" % (user, urlo.hostname, path)

        cmd = "%s %s %s %s" % (basecmd, port, commands.mkarg(remote), commands.mkarg(lpath))

        bb.fetch2.check_network_access(d, cmd, ud.url)
        runfetchcmd(cmd, d)
        return True
Example #10
0
    def download(self, loc, ud, d):
        """Fetch urls"""

        svkroot = ud.host + ud.path

        svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module)

        if ud.revision:
            svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module)

        # create temp directory
        localdata = data.createCopy(d)
        data.update_data(localdata)
        logger.debug(2, "Fetch: creating temporary directory")
        bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata))
        data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
        tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
        tmpfile = tmpfile.strip()
        if not tmpfile:
            logger.error()
            raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc)

        # check out sources there
        os.chdir(tmpfile)
        logger.info("Fetch " + loc)
        logger.debug(1, "Running %s", svkcmd)
        runfetchcmd(svkcmd, d, cleanup = [tmpfile])

        os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module)))
        # tar them up to a defined filename
        runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)), d, cleanup = [ud.localpath])

        # cleanup
        bb.utils.prunedir(tmpfile)
Example #11
0
 def build_mirror_data(self, url, ud, d):
     # Generate a mirror tarball if needed
     if ud.write_tarballs and (ud.repochanged or not os.path.exists(ud.fullmirror)):
         os.chdir(ud.clonedir)
         logger.info("Creating tarball of git repository")
         runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d)
         runfetchcmd("touch %s.done" % (ud.fullmirror), d)
Example #12
0
    def copy_submodules(self, submodules, ud, destdir, d):
        if ud.bareclone:
            repo_conf = destdir
        else:
            repo_conf = os.path.join(destdir, '.git')

        if submodules and not os.path.exists(os.path.join(repo_conf, 'modules')):
            os.mkdir(os.path.join(repo_conf, 'modules'))

        for module, md in submodules.items():
            srcpath = os.path.join(ud.clonedir, 'modules', md['path'])
            modpath = os.path.join(repo_conf, 'modules', md['path'])

            if os.path.exists(srcpath):
                if os.path.exists(os.path.join(srcpath, '.git')):
                    srcpath = os.path.join(srcpath, '.git')

                target = modpath
                if os.path.exists(modpath):
                    target = os.path.dirname(modpath)

                os.makedirs(os.path.dirname(target), exist_ok=True)
                runfetchcmd("cp -fpLR %s %s" % (srcpath, target), d)
            elif os.path.exists(modpath):
                # Module already exists, likely unpacked from a shallow mirror clone
                pass
            else:
                # This is fatal, as we do NOT want git-submodule to hit the network
                raise bb.fetch2.FetchError('Submodule %s does not exist in %s or %s.' % (module, srcpath, modpath))
Example #13
0
File: ssh.py Project: kraj/bitbake
    def download(self, urldata, d):
        dldir = d.getVar("DL_DIR")

        m = __pattern__.match(urldata.url)
        path = m.group("path")
        host = m.group("host")
        port = m.group("port")
        user = m.group("user")
        password = m.group("pass")

        if port:
            portarg = "-P %s" % port
        else:
            portarg = ""

        if user:
            fr = user
            if password:
                fr += ":%s" % password
            fr += "@%s" % host
        else:
            fr = host
        fr += ":%s" % path

        cmd = "scp -B -r %s %s %s/" % (portarg, fr, dldir)

        bb.fetch2.check_network_access(d, cmd, urldata.url)

        runfetchcmd(cmd, d)
Example #14
0
    def _runwget(self, ud, d, command, quiet, workdir=None):

        progresshandler = WgetProgressHandler(d)

        logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
        bb.fetch2.check_network_access(d, command, ud.url)
        runfetchcmd(command + ' --progress=dot -v', d, quiet, log=progresshandler, workdir=workdir)
Example #15
0
    def download(self, ud, d):
        """Fetch url"""

        # Make a fresh view
        bb.utils.mkdirhier(ud.ccasedir)
        self._write_configspec(ud, d)
        cmd = self._build_ccase_command(ud, 'mkview')
        logger.info("creating view [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
        bb.fetch2.check_network_access(d, cmd, ud.url)
        try:
            runfetchcmd(cmd, d)
        except FetchError as e:
            if "CRCLI2008E" in e.msg:
                raise FetchError("%s\n%s\n" % (e.msg, "Call `rcleartool login` in your console to authenticate to the clearcase server before running bitbake."))
            else:
                raise e

        # Set configspec: Setting the configspec effectively fetches the files as defined in the configspec
        cmd = self._build_ccase_command(ud, 'setcs');
        logger.info("fetching data [VOB=%s label=%s view=%s]", ud.vob, ud.label, ud.viewname)
        bb.fetch2.check_network_access(d, cmd, ud.url)
        output = runfetchcmd(cmd, d, workdir=ud.viewdir)
        logger.info("%s", output)

        # Copy the configspec to the viewdir so we have it in our source tarball later
        shutil.copyfile(ud.configspecfile, os.path.join(ud.viewdir, ud.csname))

        # Clean clearcase meta-data before tar

        runfetchcmd('tar -czf "%s" .' % (ud.localpath), d, cleanup = [ud.localpath])

        # Clean up so we can create a new view next time
        self.clean(ud, d);
Example #16
0
    def download(self, uri, ud, d, checkonly = False):
        """Fetch urls"""

        basecmd = d.getVar("FETCHCMD_wget", True) or "/usr/bin/env wget -t 2 -T 30 -nv --passive-ftp --no-check-certificate"

        if not checkonly and 'downloadfilename' in ud.parm:
            dldir = d.getVar("DL_DIR", True)
            bb.utils.mkdirhier(os.path.dirname(dldir + os.sep + ud.localfile))
            basecmd += " -O " + dldir + os.sep + ud.localfile

        if checkonly:
            fetchcmd = d.getVar("CHECKCOMMAND_wget", True) or d.expand(basecmd + " --spider '${URI}'")
        elif os.path.exists(ud.localpath):
            # file exists, but we didnt complete it.. trying again..
            fetchcmd = d.getVar("RESUMECOMMAND_wget", True) or d.expand(basecmd + " -c -P ${DL_DIR} '${URI}'")
        else:
            fetchcmd = d.getVar("FETCHCOMMAND_wget", True) or d.expand(basecmd + " -P ${DL_DIR} '${URI}'")

        uri = uri.split(";")[0]

        fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
        fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
        if not checkonly:
            logger.info("fetch " + uri)
            logger.debug(2, "executing " + fetchcmd)
        bb.fetch2.check_network_access(d, fetchcmd)
        runfetchcmd(fetchcmd, d, quiet=checkonly)

        # Sanity check since wget can pretend it succeed when it didn't
        # Also, this used to happen if sourceforge sent us to the mirror page
        if not os.path.exists(ud.localpath) and not checkonly:
            raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)

        return True
Example #17
0
 def uses_submodules(self, ud, d):
     for name in ud.names:
         try:
             runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True)
             return True
         except bb.fetch.FetchError:
             pass
     return False
Example #18
0
    def update_annex(self, ud, d):
        try:
            runfetchcmd("%s annex get --all" % (ud.basecmd), d, quiet=True)
        except bb.fetch.FetchError:
            return False
        runfetchcmd("chmod u+w -R %s/annex" % (ud.clonedir), d, quiet=True)

        return True
Example #19
0
    def uses_annex(self, ud, d):
        for name in ud.names:
            try:
                runfetchcmd("%s rev-list git-annex" % (ud.basecmd), d, quiet=True)
                return True
            except bb.fetch.FetchError:
                pass

        return False
Example #20
0
    def build_mirror_data(self, ud, d):
        # Generate a mirror tarball if needed
        if ud.write_tarballs and not os.path.exists(ud.fullmirror):
            # it's possible that this symlink points to read-only filesystem with PREMIRROR
            if os.path.islink(ud.fullmirror):
                os.unlink(ud.fullmirror)

            logger.info("Creating tarball of git repository")
            runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d, workdir=ud.clonedir)
            runfetchcmd("touch %s.done" % (ud.fullmirror), d, workdir=ud.clonedir)
Example #21
0
    def build_mirror_data(self, ud, d):
        # Generate a mirror tarball if needed
        if ud.write_tarballs == "1" and not os.path.exists(ud.fullmirror):
            # it's possible that this symlink points to read-only filesystem with PREMIRROR
            if os.path.islink(ud.fullmirror):
                os.unlink(ud.fullmirror)

            os.chdir(ud.pkgdir)
            logger.info("Creating tarball of hg repository")
            runfetchcmd("tar -czf %s %s" % (ud.fullmirror, ud.module), d)
            runfetchcmd("touch %s.done" % (ud.fullmirror), d)
Example #22
0
    def build_mirror_data(self, ud, d):
        # Generate a mirror tarball if needed
        if ud.write_tarballs and not os.path.exists(ud.fullmirror):
            # it's possible that this symlink points to read-only filesystem with PREMIRROR
            if os.path.islink(ud.fullmirror):
                os.unlink(ud.fullmirror)

            dldir = d.getVar("DL_DIR", True)
            logger.info("Creating tarball of npm data")
            runfetchcmd("tar -cJf %s npm/%s npm/%s" % (ud.fullmirror, ud.bbnpmmanifest, ud.pkgname), d,
                        workdir=dldir)
            runfetchcmd("touch %s.done" % (ud.fullmirror), d, workdir=dldir)
Example #23
0
    def unpack(self, ud, destdir, d):
        Git.unpack(self, ud, destdir, d)

        # Copy over the submodules' fetched histories too.
        if ud.bareclone:
            repo_conf = ud.destdir
        else:
            repo_conf = os.path.join(ud.destdir, '.git')

        update_submodules = False
        paths = {}
        uris = {}
        local_paths = {}
        for name in ud.names:
            try:
                gitmodules = runfetchcmd("%s show HEAD:.gitmodules" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)
            except:
                # No submodules to update
                continue

            submodules = self.parse_gitmodules(gitmodules)
            self.copy_submodules(submodules, ud, ud.destdir, d)

            submodules_queue = [(module, os.path.join(repo_conf, 'modules', md['path'])) for module, md in submodules.items()]
            while len(submodules_queue) != 0:
                module, modpath = submodules_queue.pop()

                # add submodule children recursively
                try:
                    gitmodules = runfetchcmd("%s show HEAD:.gitmodules" % (ud.basecmd), d, quiet=True, workdir=modpath)
                    for m, md in self.parse_gitmodules(gitmodules).items():
                        submodules_queue.append([m, os.path.join(modpath, 'modules', md['path'])])
                except:
                    # no children
                    pass


                # There are submodules to update
                update_submodules = True

                # Determine (from the submodule) the correct url to reference
                try:
                    output = runfetchcmd("%(basecmd)s config remote.origin.url" % {'basecmd': ud.basecmd}, d, workdir=modpath)
                except bb.fetch2.FetchError as e:
                    # No remote url defined in this submodule
                    continue

                local_paths[module] = output

                # Setup the local URL properly (like git submodule init or sync would do...)
                runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_paths[module]}, d, workdir=ud.destdir)

                # Ensure the submodule repository is NOT set to bare, since we're checking it out...
                runfetchcmd("%s config core.bare false" % (ud.basecmd), d, quiet=True, workdir=modpath)

        if update_submodules:
            # Run submodule update, this sets up the directories -- without touching the config
            runfetchcmd("%s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)
Example #24
0
File: git.py Project: zedian/poky
    def clone_shallow_local(self, ud, dest, d):
        """Clone the repo and make it shallow.

        The upstream url of the new clone isn't set at this time, as it'll be
        set correctly when unpacked."""
        runfetchcmd("%s clone %s %s %s" % (ud.basecmd, ud.cloneflags, ud.clonedir, dest), d)

        to_parse, shallow_branches = [], []
        for name in ud.names:
            revision = ud.revisions[name]
            depth = ud.shallow_depths[name]
            if depth:
                to_parse.append('%s~%d^{}' % (revision, depth - 1))

            # For nobranch, we need a ref, otherwise the commits will be
            # removed, and for non-nobranch, we truncate the branch to our
            # srcrev, to avoid keeping unnecessary history beyond that.
            branch = ud.branches[name]
            if ud.nobranch:
                ref = "refs/shallow/%s" % name
            elif ud.bareclone:
                ref = "refs/heads/%s" % branch
            else:
                ref = "refs/remotes/origin/%s" % branch

            shallow_branches.append(ref)
            runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision), d, workdir=dest)

        # Map srcrev+depths to revisions
        parsed_depths = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join(to_parse)), d, workdir=dest)

        # Resolve specified revisions
        parsed_revs = runfetchcmd("%s rev-parse %s" % (ud.basecmd, " ".join('"%s^{}"' % r for r in ud.shallow_revs)), d, workdir=dest)
        shallow_revisions = parsed_depths.splitlines() + parsed_revs.splitlines()

        # Apply extra ref wildcards
        all_refs = runfetchcmd('%s for-each-ref "--format=%%(refname)"' % ud.basecmd,
                               d, workdir=dest).splitlines()
        for r in ud.shallow_extra_refs:
            if not ud.bareclone:
                r = r.replace('refs/heads/', 'refs/remotes/origin/')

            if '*' in r:
                matches = filter(lambda a: fnmatch.fnmatchcase(a, r), all_refs)
                shallow_branches.extend(matches)
            else:
                shallow_branches.append(r)

        # Make the repository shallow
        shallow_cmd = [self.make_shallow_path, '-s']
        for b in shallow_branches:
            shallow_cmd.append('-r')
            shallow_cmd.append(b)
        shallow_cmd.extend(shallow_revisions)
        runfetchcmd(subprocess.list2cmdline(shallow_cmd), d, workdir=dest)
Example #25
0
File: svn.py Project: kacf/poky
    def download(self, ud, d):
        """Fetch url"""

        logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")

        if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
            svnupdatecmd = self._buildsvncommand(ud, d, "update")
            logger.info("Update " + ud.url)
            # We need to attempt to run svn upgrade first in case its an older working format
            try:
                runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir)
            except FetchError:
                pass
            logger.debug(1, "Running %s", svnupdatecmd)
            bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
            runfetchcmd(svnupdatecmd, d, workdir=ud.moddir)
        else:
            svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
            logger.info("Fetch " + ud.url)
            # check out sources there
            bb.utils.mkdirhier(ud.pkgdir)
            logger.debug(1, "Running %s", svnfetchcmd)
            bb.fetch2.check_network_access(d, svnfetchcmd, ud.url)
            runfetchcmd(svnfetchcmd, d, workdir=ud.pkgdir)

        scmdata = ud.parm.get("scmdata", "")
        if scmdata == "keep":
            tar_flags = ""
        else:
            tar_flags = "--exclude='.svn'"

        # tar them up to a defined filename
        runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.path_spec), d,
                    cleanup=[ud.localpath], workdir=ud.pkgdir)
Example #26
0
    def download(self, ud, d):
        """Fetch url"""

        logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")

        # If the checkout doesn't exist and the mirror tarball does, extract it
        if not os.path.exists(ud.pkgdir) and os.path.exists(ud.fullmirror):
            bb.utils.mkdirhier(ud.pkgdir)
            os.chdir(ud.pkgdir)
            runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)

        if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
            # Found the source, check whether need pull
            updatecmd = self._buildhgcommand(ud, d, "update")
            os.chdir(ud.moddir)
            logger.debug(1, "Running %s", updatecmd)
            try:
                runfetchcmd(updatecmd, d)
            except bb.fetch2.FetchError:
                # Runnning pull in the repo
                pullcmd = self._buildhgcommand(ud, d, "pull")
                logger.info("Pulling " + ud.url)
                # update sources there
                os.chdir(ud.moddir)
                logger.debug(1, "Running %s", pullcmd)
                bb.fetch2.check_network_access(d, pullcmd, ud.url)
                runfetchcmd(pullcmd, d)
                try:
                    os.unlink(ud.fullmirror)
                except OSError as exc:
                    if exc.errno != errno.ENOENT:
                        raise

        # No source found, clone it.
        if not os.path.exists(ud.moddir):
            fetchcmd = self._buildhgcommand(ud, d, "fetch")
            logger.info("Fetch " + ud.url)
            # check out sources there
            bb.utils.mkdirhier(ud.pkgdir)
            os.chdir(ud.pkgdir)
            logger.debug(1, "Running %s", fetchcmd)
            bb.fetch2.check_network_access(d, fetchcmd, ud.url)
            runfetchcmd(fetchcmd, d)

        # Even when we clone (fetch), we still need to update as hg's clone
        # won't checkout the specified revision if its on a branch
        updatecmd = self._buildhgcommand(ud, d, "update")
        os.chdir(ud.moddir)
        logger.debug(1, "Running %s", updatecmd)
        runfetchcmd(updatecmd, d)
Example #27
0
    def unpack(self, ud, destdir, d):
        """ unpack the downloaded src to destdir"""

        subdir = ud.parm.get("subpath", "")
        if subdir != "":
            readpathspec = ":%s" % (subdir)
            def_destsuffix = "%s/" % os.path.basename(subdir)
        else:
            readpathspec = ""
            def_destsuffix = "git/"

        destsuffix = ud.parm.get("destsuffix", def_destsuffix)
        destdir = os.path.join(destdir, destsuffix)
        if os.path.exists(destdir):
            bb.utils.prunedir(destdir)

        cloneflags = "-s -n"
        if ud.bareclone:
            cloneflags += " --mirror"

        runfetchcmd("git clone %s %s/ %s" % (cloneflags, ud.clonedir, destdir), d)
        if not ud.nocheckout:
            os.chdir(destdir)
            if subdir != "":
                runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d)
                runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d)
            else:
                runfetchcmd("%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]), d)
        return True
Example #28
0
 def update_submodules(self, ud, d):
     # We have to convert bare -> full repo, do the submodule bit, then convert back
     tmpclonedir = ud.clonedir + ".tmp"
     gitdir = tmpclonedir + os.sep + ".git"
     bb.utils.remove(tmpclonedir, True)
     os.mkdir(tmpclonedir)
     os.rename(ud.clonedir, gitdir)
     runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'", d)
     os.chdir(tmpclonedir)
     runfetchcmd(ud.basecmd + " reset --hard", d)
     runfetchcmd(ud.basecmd + " submodule init", d)
     runfetchcmd(ud.basecmd + " submodule update", d)
     runfetchcmd("sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'", d)
     os.rename(gitdir, ud.clonedir)
     bb.utils.remove(tmpclonedir, True)
Example #29
0
        def download_submodule(ud, url, module, modpath, d):
            url += ";bareclone=1;nobranch=1"

            # Is the following still needed?
            #url += ";nocheckout=1"

            try:
                newfetch = Fetch([url], d, cache=False)
                newfetch.download()
                # Drop a nugget to add each of the srcrevs we've fetched (used by need_update)
                runfetchcmd("%s config --add bitbake.srcrev %s" % \
                            (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir)
            except Exception as e:
                logger.error('gitsm: submodule download failed: %s %s' % (type(e).__name__, str(e)))
                raise
Example #30
0
    def download(self, ud, d):
        """ Get the list of files, fetch each one """
        filelist = self._p4listfiles(ud, d)
        if not filelist:
            raise FetchError('No files found in depot %s@%s' % (ud.host, ud.path))

        bb.utils.remove(ud.pkgdir, True)
        bb.utils.mkdirhier(ud.pkgdir)

        for afile in filelist:
            p4fetchcmd = self._buildp4command(ud, d, 'print', afile)
            bb.fetch2.check_network_access(d, p4fetchcmd, ud.url)
            runfetchcmd(p4fetchcmd, d, workdir=ud.pkgdir)

        runfetchcmd('tar -czf %s p4' % (ud.localpath), d, cleanup=[ud.localpath], workdir=ud.pkgdir)
Example #31
0
File: git.py Project: opencalc/poky
    def download(self, loc, ud, d):
        """Fetch url"""

        if ud.user:
            username = ud.user + '@'
        else:
            username = ""

        ud.repochanged = not os.path.exists(ud.fullmirror)

        # If the checkout doesn't exist and the mirror tarball does, extract it
        if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror):
            bb.utils.mkdirhier(ud.clonedir)
            os.chdir(ud.clonedir)
            runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)

        # If the repo still doesn't exist, fallback to cloning it
        if not os.path.exists(ud.clonedir):
            clone_cmd = "%s clone --bare --mirror %s://%s%s%s %s" % \
                  (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.clonedir)
            bb.fetch2.check_network_access(d, clone_cmd)
            runfetchcmd(clone_cmd, d)

        os.chdir(ud.clonedir)
        # Update the checkout if needed
        needupdate = False
        for name in ud.names:
            if not self._contains_ref(ud.revisions[name], d):
                needupdate = True
        if needupdate:
            try:
                runfetchcmd("%s remote prune origin" % ud.basecmd, d)
                runfetchcmd("%s remote rm origin" % ud.basecmd, d)
            except bb.fetch2.FetchError:
                logger.debug(1, "No Origin")

            runfetchcmd(
                "%s remote add --mirror origin %s://%s%s%s" %
                (ud.basecmd, ud.proto, username, ud.host, ud.path), d)
            fetch_cmd = "%s fetch --all -t" % ud.basecmd
            bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
            runfetchcmd(fetch_cmd, d)
            runfetchcmd("%s prune-packed" % ud.basecmd, d)
            runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd,
                        d)
            ud.repochanged = True
Example #32
0
    def download(self, ud, d):
        """Fetch url"""

        # A current clone is preferred to either tarball, a shallow tarball is
        # preferred to an out of date clone, and a missing clone will use
        # either tarball.
        if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(
                ud, d):
            ud.localpath = ud.fullshallow
            return
        elif os.path.exists(ud.fullmirror) and self.need_update(ud, d):
            if not os.path.exists(ud.clonedir):
                bb.utils.mkdirhier(ud.clonedir)
                runfetchcmd("tar -xzf %s" % ud.fullmirror,
                            d,
                            workdir=ud.clonedir)
            else:
                tmpdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
                runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=tmpdir)
                fetch_cmd = "LANG=C %s fetch -f --progress %s " % (
                    ud.basecmd, shlex.quote(tmpdir))
                runfetchcmd(fetch_cmd, d, workdir=ud.clonedir)
        repourl = self._get_repo_url(ud)

        # If the repo still doesn't exist, fallback to cloning it
        if not os.path.exists(ud.clonedir):
            # We do this since git will use a "-l" option automatically for local urls where possible
            if repourl.startswith("file://"):
                repourl = repourl[7:]
            clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (
                ud.basecmd, shlex.quote(repourl), ud.clonedir)
            if ud.proto.lower() != 'file':
                bb.fetch2.check_network_access(d, clone_cmd, ud.url)
            progresshandler = GitProgressHandler(d)
            runfetchcmd(clone_cmd, d, log=progresshandler)

        # Update the checkout if needed
        if self.clonedir_need_update(ud, d):
            output = runfetchcmd("%s remote" % ud.basecmd,
                                 d,
                                 quiet=True,
                                 workdir=ud.clonedir)
            if "origin" in output:
                runfetchcmd("%s remote rm origin" % ud.basecmd,
                            d,
                            workdir=ud.clonedir)

            runfetchcmd("%s remote add --mirror=fetch origin %s" %
                        (ud.basecmd, shlex.quote(repourl)),
                        d,
                        workdir=ud.clonedir)
            fetch_cmd = "LANG=C %s fetch -f --progress %s refs/*:refs/*" % (
                ud.basecmd, shlex.quote(repourl))
            if ud.proto.lower() != 'file':
                bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
            progresshandler = GitProgressHandler(d)
            runfetchcmd(fetch_cmd, d, log=progresshandler, workdir=ud.clonedir)
            runfetchcmd("%s prune-packed" % ud.basecmd, d, workdir=ud.clonedir)
            runfetchcmd("%s pack-refs --all" % ud.basecmd,
                        d,
                        workdir=ud.clonedir)
            runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd,
                        d,
                        workdir=ud.clonedir)
            try:
                os.unlink(ud.fullmirror)
            except OSError as exc:
                if exc.errno != errno.ENOENT:
                    raise

        for name in ud.names:
            if not self._contains_ref(ud, d, name, ud.clonedir):
                raise bb.fetch2.FetchError(
                    "Unable to find revision %s in branch %s even from upstream"
                    % (ud.revisions[name], ud.branches[name]))

        if ud.shallow and ud.write_shallow_tarballs:
            missing_rev = self.clonedir_need_shallow_revs(ud, d)
            if missing_rev:
                raise bb.fetch2.FetchError(
                    "Unable to find revision %s even from upstream" %
                    missing_rev)

        if self._contains_lfs(ud, d, ud.clonedir) and self._need_lfs(ud):
            # Unpack temporary working copy, use it to run 'git checkout' to force pre-fetching
            # of all LFS blobs needed at the srcrev.
            #
            # It would be nice to just do this inline here by running 'git-lfs fetch'
            # on the bare clonedir, but that operation requires a working copy on some
            # releases of Git LFS.
            tmpdir = tempfile.mkdtemp(dir=d.getVar('DL_DIR'))
            try:
                # Do the checkout. This implicitly involves a Git LFS fetch.
                Git.unpack(self, ud, tmpdir, d)

                # Scoop up a copy of any stuff that Git LFS downloaded. Merge them into
                # the bare clonedir.
                #
                # As this procedure is invoked repeatedly on incremental fetches as
                # a recipe's SRCREV is bumped throughout its lifetime, this will
                # result in a gradual accumulation of LFS blobs in <ud.clonedir>/lfs
                # corresponding to all the blobs reachable from the different revs
                # fetched across time.
                #
                # Only do this if the unpack resulted in a .git/lfs directory being
                # created; this only happens if at least one blob needed to be
                # downloaded.
                if os.path.exists(os.path.join(tmpdir, "git", ".git", "lfs")):
                    runfetchcmd("tar -cf - lfs | tar -xf - -C %s" %
                                ud.clonedir,
                                d,
                                workdir="%s/git/.git" % tmpdir)
            finally:
                bb.utils.remove(tmpdir, recurse=True)
Example #33
0
    def unpack(self, ud, destdir, d):
        """ unpack the downloaded src to destdir"""

        subdir = ud.parm.get("subdir")
        subpath = ud.parm.get("subpath")
        readpathspec = ""
        def_destsuffix = "git/"

        if subpath:
            readpathspec = ":%s" % subpath
            def_destsuffix = "%s/" % os.path.basename(subpath.rstrip('/'))

        if subdir:
            # If 'subdir' param exists, create a dir and use it as destination for unpack cmd
            if os.path.isabs(subdir):
                if not os.path.realpath(subdir).startswith(
                        os.path.realpath(destdir)):
                    raise bb.fetch2.UnpackError(
                        "subdir argument isn't a subdirectory of unpack root %s"
                        % destdir, ud.url)
                destdir = subdir
            else:
                destdir = os.path.join(destdir, subdir)
            def_destsuffix = ""

        destsuffix = ud.parm.get("destsuffix", def_destsuffix)
        destdir = ud.destdir = os.path.join(destdir, destsuffix)
        if os.path.exists(destdir):
            bb.utils.prunedir(destdir)

        need_lfs = self._need_lfs(ud)

        if not need_lfs:
            ud.basecmd = "GIT_LFS_SKIP_SMUDGE=1 " + ud.basecmd

        source_found = False
        source_error = []

        if not source_found:
            clonedir_is_up_to_date = not self.clonedir_need_update(ud, d)
            if clonedir_is_up_to_date:
                runfetchcmd(
                    "%s clone %s %s/ %s" %
                    (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d)
                source_found = True
            else:
                source_error.append(
                    "clone directory not available or not up to date: " +
                    ud.clonedir)

        if not source_found:
            if ud.shallow:
                if os.path.exists(ud.fullshallow):
                    bb.utils.mkdirhier(destdir)
                    runfetchcmd("tar -xzf %s" % ud.fullshallow,
                                d,
                                workdir=destdir)
                    source_found = True
                else:
                    source_error.append("shallow clone not available: " +
                                        ud.fullshallow)
            else:
                source_error.append("shallow clone not enabled")

        if not source_found:
            raise bb.fetch2.UnpackError(
                "No up to date source found: " + "; ".join(source_error),
                ud.url)

        repourl = self._get_repo_url(ud)
        runfetchcmd("%s remote set-url origin %s" %
                    (ud.basecmd, shlex.quote(repourl)),
                    d,
                    workdir=destdir)

        if self._contains_lfs(ud, d, destdir):
            if need_lfs and not self._find_git_lfs(d):
                raise bb.fetch2.FetchError(
                    "Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 to ignore it)"
                    % (repourl))
            elif not need_lfs:
                bb.note(
                    "Repository %s has LFS content but it is not being fetched"
                    % (repourl))

        if not ud.nocheckout:
            if subpath:
                runfetchcmd(
                    "%s read-tree %s%s" %
                    (ud.basecmd, ud.revisions[ud.names[0]], readpathspec),
                    d,
                    workdir=destdir)
                runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd,
                            d,
                            workdir=destdir)
            elif not ud.nobranch:
                branchname = ud.branches[ud.names[0]]
                runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
                            ud.revisions[ud.names[0]]), d, workdir=destdir)
                runfetchcmd("%s branch %s --set-upstream-to origin/%s" % (ud.basecmd, branchname, \
                            branchname), d, workdir=destdir)
            else:
                runfetchcmd("%s checkout %s" %
                            (ud.basecmd, ud.revisions[ud.names[0]]),
                            d,
                            workdir=destdir)

        return True
Example #34
0
    def download(self, ud, d):
        """Fetch url"""

        if ud.user:
            username = ud.user + '@'
        else:
            username = ""

        ud.repochanged = not os.path.exists(ud.fullmirror)

        # If the checkout doesn't exist and the mirror tarball does, extract it
        if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror):
            bb.utils.mkdirhier(ud.clonedir)
            os.chdir(ud.clonedir)
            runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)

        repourl = "%s://%s%s%s" % (ud.proto, username, ud.host, ud.path)

        # If the repo still doesn't exist, fallback to cloning it
        if not os.path.exists(ud.clonedir):
            # We do this since git will use a "-l" option automatically for local urls where possible
            if repourl.startswith("file://"):
                repourl = repourl[7:]
            clone_cmd = "%s clone --bare --mirror %s %s" % (
                ud.basecmd, repourl, ud.clonedir)
            if ud.proto.lower() != 'file':
                bb.fetch2.check_network_access(d, clone_cmd)
            runfetchcmd(clone_cmd, d)

        os.chdir(ud.clonedir)
        # Update the checkout if needed
        needupdate = False
        for name in ud.names:
            if not self._contains_ref(ud.revisions[name], ud.branches[name],
                                      d):
                needupdate = True
        if needupdate:
            try:
                runfetchcmd("%s remote rm origin" % ud.basecmd, d)
            except bb.fetch2.FetchError:
                logger.debug(1, "No Origin")

            runfetchcmd(
                "%s remote add --mirror=fetch origin %s" %
                (ud.basecmd, repourl), d)
            fetch_cmd = "%s fetch -f --prune %s refs/*:refs/*" % (ud.basecmd,
                                                                  repourl)
            if ud.proto.lower() != 'file':
                bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
            runfetchcmd(fetch_cmd, d)
            runfetchcmd("%s prune-packed" % ud.basecmd, d)
            runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd,
                        d)
            ud.repochanged = True
        os.chdir(ud.clonedir)
        for name in ud.names:
            if not self._contains_ref(ud.revisions[name], ud.branches[name],
                                      d):
                raise bb.fetch2.FetchError(
                    "Unable to find revision %s in branch %s even from upstream"
                    % (ud.revisions[name], ud.branches[name]))
Example #35
0
    def _populate_shallowclone(self, repourl, source, dest, gitcmd, branchinfo, nobranch, allbranches, bareclone, d):
        shallow_revisions = []
        for name, (shallow, revision, branch) in branchinfo.iteritems():
            if not shallow:
                continue

            try:
                shallow_revision = runfetchcmd("GIT_DIR=%s %s rev-parse %s^{}" % (source, gitcmd, shallow), d).rstrip()
            except bb.fetch2.FetchError:
                try:
                    shallow = int(shallow)
                except ValueError:
                    raise bb.fetch2.FetchError("Invalid BB_GIT_SHALLOW_%s: %s" % (name, shallow))
                else:
                    shallow_revision = runfetchcmd("GIT_DIR=%s %s rev-parse %s~%d^{}" % (source, gitcmd, revision, shallow - 1), d).rstrip()

            shallow_revisions.append(shallow_revision)

        cloneflags = "-s -n"
        if bareclone:
            cloneflags += " --mirror"
        runfetchcmd("%s clone %s %s %s" % (gitcmd, cloneflags, source, dest), d)

        os.chdir(dest)
        if allbranches:
            shallow_branches = None
        else:
            runfetchcmd("%s for-each-ref --format='%%(refname)' | xargs -n 1 %s update-ref -d" % (gitcmd, gitcmd), d)
            runfetchcmd('%s update-ref -d HEAD' % gitcmd, d)
            shallow_branches = []
            for name, (shallow, revision, branch) in branchinfo.iteritems():
                if nobranch:
                    runfetchcmd("%s update-ref refs/shallow/%s %s" % (gitcmd, name, revision), d)
                    shallow_branches.append('refs/shallow/%s' % name)
                else:
                    runfetchcmd("%s update-ref refs/remotes/origin/%s %s" % (gitcmd, branch, revision), d)
                    shallow_branches.append("origin/%s" % branch)

        git_dir = runfetchcmd('%s rev-parse --git-dir' % gitcmd, d).rstrip()
        self._make_repo_shallow(shallow_revisions, git_dir, gitcmd, d, branches=shallow_branches)

        alternates_file = os.path.join(git_dir, "objects", "info", "alternates")
        os.unlink(alternates_file)
Example #36
0
    def clone_shallow_local(self, ud, dest, d):
        """Clone the repo and make it shallow.

        The upstream url of the new clone isn't set at this time, as it'll be
        set correctly when unpacked."""
        runfetchcmd(
            "%s clone %s %s %s" %
            (ud.basecmd, ud.cloneflags, ud.clonedir, dest), d)

        to_parse, shallow_branches = [], []
        for name in ud.names:
            revision = ud.revisions[name]
            depth = ud.shallow_depths[name]
            if depth:
                to_parse.append('%s~%d^{}' % (revision, depth - 1))

            # For nobranch, we need a ref, otherwise the commits will be
            # removed, and for non-nobranch, we truncate the branch to our
            # srcrev, to avoid keeping unnecessary history beyond that.
            branch = ud.branches[name]
            if ud.nobranch:
                ref = "refs/shallow/%s" % name
            elif ud.bareclone:
                ref = "refs/heads/%s" % branch
            else:
                ref = "refs/remotes/origin/%s" % branch

            shallow_branches.append(ref)
            runfetchcmd("%s update-ref %s %s" % (ud.basecmd, ref, revision),
                        d,
                        workdir=dest)

        # Map srcrev+depths to revisions
        parsed_depths = runfetchcmd("%s rev-parse %s" %
                                    (ud.basecmd, " ".join(to_parse)),
                                    d,
                                    workdir=dest)

        # Resolve specified revisions
        parsed_revs = runfetchcmd(
            "%s rev-parse %s" %
            (ud.basecmd, " ".join('"%s^{}"' % r for r in ud.shallow_revs)),
            d,
            workdir=dest)
        shallow_revisions = parsed_depths.splitlines(
        ) + parsed_revs.splitlines()

        # Apply extra ref wildcards
        all_refs = runfetchcmd('%s for-each-ref "--format=%%(refname)"' %
                               ud.basecmd,
                               d,
                               workdir=dest).splitlines()
        for r in ud.shallow_extra_refs:
            if not ud.bareclone:
                r = r.replace('refs/heads/', 'refs/remotes/origin/')

            if '*' in r:
                matches = filter(lambda a: fnmatch.fnmatchcase(a, r), all_refs)
                shallow_branches.extend(matches)
            else:
                shallow_branches.append(r)

        # Make the repository shallow
        shallow_cmd = [self.make_shallow_path, '-s']
        for b in shallow_branches:
            shallow_cmd.append('-r')
            shallow_cmd.append(b)
        shallow_cmd.extend(shallow_revisions)
        runfetchcmd(subprocess.list2cmdline(shallow_cmd), d, workdir=dest)
Example #37
0
    def unpack(self, ud, destdir, d):
        """ unpack the downloaded src to destdir"""

        subdir = ud.parm.get("subpath", "")
        if subdir != "":
            readpathspec = ":%s" % (subdir)
            def_destsuffix = "%s/" % os.path.basename(subdir.rstrip('/'))
        else:
            readpathspec = ""
            def_destsuffix = "git/"

        destsuffix = ud.parm.get("destsuffix", def_destsuffix)
        destdir = ud.destdir = os.path.join(destdir, destsuffix)
        if os.path.exists(destdir):
            bb.utils.prunedir(destdir)

        cloneflags = "-s -n"
        if ud.bareclone:
            cloneflags += " --mirror"

        runfetchcmd(
            "%s clone %s %s/ %s" %
            (ud.basecmd, cloneflags, ud.clonedir, destdir), d)
        repourl = self._get_repo_url(ud)
        runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl),
                    d,
                    workdir=destdir)
        if not ud.nocheckout:
            if subdir != "":
                runfetchcmd(
                    "%s read-tree %s%s" %
                    (ud.basecmd, ud.revisions[ud.names[0]], readpathspec),
                    d,
                    workdir=destdir)
                runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd,
                            d,
                            workdir=destdir)
            elif not ud.nobranch:
                branchname = ud.branches[ud.names[0]]
                runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
                            ud.revisions[ud.names[0]]), d, workdir=destdir)
                runfetchcmd("%s branch --set-upstream %s origin/%s" % (ud.basecmd, branchname, \
                            branchname), d, workdir=destdir)
            else:
                runfetchcmd("%s checkout %s" %
                            (ud.basecmd, ud.revisions[ud.names[0]]),
                            d,
                            workdir=destdir)

        return True
Example #38
0
    def download(self, loc, ud, d):
        """
        Fetch urls
        """

        (host, depot, user, pswd, parm) = Perforce.doparse(loc, d)

        if depot.find('/...') != -1:
            path = depot[:depot.find('/...')]
        else:
            path = depot

        module = parm.get('module', os.path.basename(path))

        localdata = data.createCopy(d)
        data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata)
        data.update_data(localdata)

        # Get the p4 command
        p4opt = ""
        if user:
            p4opt += " -u %s" % (user)

        if pswd:
            p4opt += " -P %s" % (pswd)

        if host:
            p4opt += " -p %s" % (host)

        p4cmd = data.getVar('FETCHCOMMAND', localdata, True)

        # create temp directory
        logger.debug(2, "Fetch: creating temporary directory")
        bb.utils.mkdirhier(data.expand('${WORKDIR}', localdata))
        data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
        tmpfile, errors = bb.process.run(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
        tmpfile = tmpfile.strip()
        if not tmpfile:
            raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc)

        if "label" in parm:
            depot = "%s@%s" % (depot, parm["label"])
        else:
            cset = Perforce.getcset(d, depot, host, user, pswd, parm)
            depot = "%s@%s" % (depot, cset)

        os.chdir(tmpfile)
        logger.info("Fetch " + loc)
        logger.info("%s%s files %s", p4cmd, p4opt, depot)
        p4file, errors = bb.process.run("%s%s files %s" % (p4cmd, p4opt, depot))
        p4file = p4file.strip()

        if not p4file:
            raise FetchError("Fetch: unable to get the P4 files from %s" % depot, loc)

        count = 0

        for file in p4file:
            list = file.split()

            if list[2] == "delete":
                continue

            dest = list[0][len(path)+1:]
            where = dest.find("#")

            subprocess.call("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]), shell=True)
            count = count + 1

        if count == 0:
            logger.error()
            raise FetchError("Fetch: No files gathered from the P4 fetch", loc)

        runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath])
        # cleanup
        bb.utils.prunedir(tmpfile)
Example #39
0
    def process_submodules(self, ud, workdir, function, d):
        """
        Iterate over all of the submodules in this repository and execute
        the 'function' for each of them.
        """

        submodules = []
        paths = {}
        revision = {}
        uris = {}
        subrevision = {}

        def parse_gitmodules(gitmodules):
            modules = {}
            module = ""
            for line in gitmodules.splitlines():
                if line.startswith('[submodule'):
                    module = line.split('"')[1]
                    modules[module] = {}
                elif module and line.strip().startswith('path'):
                    path = line.split('=')[1].strip()
                    modules[module]['path'] = path
                elif module and line.strip().startswith('url'):
                    url = line.split('=')[1].strip()
                    modules[module]['url'] = url
            return modules

        # Collect the defined submodules, and their attributes
        for name in ud.names:
            try:
                gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=workdir)
            except:
                # No submodules to update
                continue

            for m, md in parse_gitmodules(gitmodules).items():
                try:
                    module_hash = runfetchcmd("%s ls-tree -z -d %s %s" % (ud.basecmd, ud.revisions[name], md['path']), d, quiet=True, workdir=workdir)
                except:
                    # If the command fails, we don't have a valid file to check.  If it doesn't
                    # fail -- it still might be a failure, see next check...
                    module_hash = ""

                if not module_hash:
                    logger.debug(1, "submodule %s is defined, but is not initialized in the repository. Skipping", m)
                    continue

                submodules.append(m)
                paths[m] = md['path']
                revision[m] = ud.revisions[name]
                uris[m] = md['url']
                subrevision[m] = module_hash.split()[2]

                # Convert relative to absolute uri based on parent uri
                if uris[m].startswith('..'):
                    newud = copy.copy(ud)
                    newud.path = os.path.realpath(os.path.join(newud.path, uris[m]))
                    uris[m] = Git._get_repo_url(self, newud)

        for module in submodules:
            # Translate the module url into a SRC_URI

            if "://" in uris[module]:
                # Properly formated URL already
                proto = uris[module].split(':', 1)[0]
                url = uris[module].replace('%s:' % proto, 'gitsm:', 1)
            else:
                if ":" in uris[module]:
                    # Most likely an SSH style reference
                    proto = "ssh"
                    if ":/" in uris[module]:
                        # Absolute reference, easy to convert..
                        url = "gitsm://" + uris[module].replace(':/', '/', 1)
                    else:
                        # Relative reference, no way to know if this is right!
                        logger.warning("Submodule included by %s refers to relative ssh reference %s.  References may fail if not absolute." % (ud.url, uris[module]))
                        url = "gitsm://" + uris[module].replace(':', '/', 1)
                else:
                    # This has to be a file reference
                    proto = "file"
                    url = "gitsm://" + uris[module]

            url += ';protocol=%s' % proto
            url += ";name=%s" % module
            url += ";subpath=%s" % module

            ld = d.createCopy()
            # Not necessary to set SRC_URI, since we're passing the URI to
            # Fetch.
            #ld.setVar('SRC_URI', url)
            ld.setVar('SRCREV_%s' % module, subrevision[module])

            # Workaround for issues with SRCPV/SRCREV_FORMAT errors
            # error refer to 'multiple' repositories.  Only the repository
            # in the original SRC_URI actually matters...
            ld.setVar('SRCPV', d.getVar('SRCPV'))
            ld.setVar('SRCREV_FORMAT', module)

            function(ud, url, module, paths[module], workdir, ld)

        return submodules != []
Example #40
0
 def _runwget(self, ud, d, command, quiet):
     logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
     bb.fetch2.check_network_access(d, command, ud.url)
     dldir = d.getVar("DL_DIR")
     runfetchcmd(command, d, quiet, workdir=dldir)
Example #41
0
 def unpack(self, ud, destdir, d):
     FetchMethod.unpack(self, ud, destdir, d)
     bb.fetch2.check_network_access(d, "repo sync %s" % ud.url, ud.url)
     runfetchcmd("repo sync", d, workdir=ud.repodir)
Example #42
0
    def _getdependencies(self,
                         pkg,
                         data,
                         version,
                         d,
                         ud,
                         optional=False,
                         fetchedlist=None):
        if fetchedlist is None:
            fetchedlist = []
        pkgfullname = pkg
        if version != '*' and not '/' in version:
            pkgfullname += "@'%s'" % version
        logger.debug(2, "Calling getdeps on %s" % pkg)
        fetchcmd = "npm view %s --json --registry %s" % (pkgfullname,
                                                         ud.registry)
        output = runfetchcmd(fetchcmd, d, True)
        pdata = self._parse_view(output)
        if not pdata:
            raise FetchError("The command '%s' returned no output" % fetchcmd)
        if optional:
            pkg_os = pdata.get('os', None)
            if pkg_os:
                if not isinstance(pkg_os, list):
                    pkg_os = [pkg_os]
                blacklist = False
                for item in pkg_os:
                    if item.startswith('!'):
                        blacklist = True
                        break
                if (not blacklist
                        and 'linux' not in pkg_os) or '!linux' in pkg_os:
                    logger.debug(
                        2,
                        "Skipping %s since it's incompatible with Linux" % pkg)
                    return
        #logger.debug(2, "Output URL is %s - %s - %s" % (ud.basepath, ud.basename, ud.localfile))
        outputurl = pdata['dist']['tarball']
        data[pkg] = {}
        data[pkg]['tgz'] = os.path.basename(outputurl)
        if outputurl in fetchedlist:
            return

        self._runwget(
            ud, d, "%s --directory-prefix=%s %s" %
            (self.basecmd, ud.prefixdir, outputurl), False)
        fetchedlist.append(outputurl)

        dependencies = pdata.get('dependencies', {})
        optionalDependencies = pdata.get('optionalDependencies', {})
        dependencies.update(optionalDependencies)
        depsfound = {}
        optdepsfound = {}
        data[pkg]['deps'] = {}
        for dep in dependencies:
            if dep in optionalDependencies:
                optdepsfound[dep] = dependencies[dep]
            else:
                depsfound[dep] = dependencies[dep]
        for dep, version in optdepsfound.items():
            self._getdependencies(dep,
                                  data[pkg]['deps'],
                                  version,
                                  d,
                                  ud,
                                  optional=True,
                                  fetchedlist=fetchedlist)
        for dep, version in depsfound.items():
            self._getdependencies(dep,
                                  data[pkg]['deps'],
                                  version,
                                  d,
                                  ud,
                                  fetchedlist=fetchedlist)
Example #43
0
    def download(self, ud, d):
        """Fetch url"""

        # A current clone is preferred to either tarball, a shallow tarball is
        # preferred to an out of date clone, and a missing clone will use
        # either tarball.
        if ud.shallow and os.path.exists(ud.fullshallow) and self.need_update(
                ud, d):
            ud.localpath = ud.fullshallow
            return
        elif os.path.exists(ud.fullmirror) and not os.path.exists(ud.clonedir):
            bb.utils.mkdirhier(ud.clonedir)
            runfetchcmd("tar -xzf %s" % ud.fullmirror, d, workdir=ud.clonedir)

        repourl = self._get_repo_url(ud)

        # If the repo still doesn't exist, fallback to cloning it
        if not os.path.exists(ud.clonedir):
            # We do this since git will use a "-l" option automatically for local urls where possible
            if repourl.startswith("file://"):
                repourl = repourl[7:]
            clone_cmd = "LANG=C %s clone --bare --mirror \"%s\" %s --progress" % (
                ud.basecmd, repourl, ud.clonedir)
            if ud.proto.lower() != 'file':
                bb.fetch2.check_network_access(d, clone_cmd, ud.url)
            progresshandler = GitProgressHandler(d)
            runfetchcmd(clone_cmd, d, log=progresshandler)

        # Update the checkout if needed
        if self.clonedir_need_update(ud, d):
            output = runfetchcmd("%s remote" % ud.basecmd,
                                 d,
                                 quiet=True,
                                 workdir=ud.clonedir)
            if "origin" in output:
                runfetchcmd("%s remote rm origin" % ud.basecmd,
                            d,
                            workdir=ud.clonedir)

            runfetchcmd("%s remote add --mirror=fetch origin \"%s\"" %
                        (ud.basecmd, repourl),
                        d,
                        workdir=ud.clonedir)
            fetch_cmd = "LANG=C %s fetch -f --progress \"%s\" refs/*:refs/*" % (
                ud.basecmd, repourl)
            if ud.proto.lower() != 'file':
                bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
            progresshandler = GitProgressHandler(d)
            runfetchcmd(fetch_cmd, d, log=progresshandler, workdir=ud.clonedir)
            runfetchcmd("%s prune-packed" % ud.basecmd, d, workdir=ud.clonedir)
            runfetchcmd("%s pack-refs --all" % ud.basecmd,
                        d,
                        workdir=ud.clonedir)
            runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd,
                        d,
                        workdir=ud.clonedir)
            try:
                os.unlink(ud.fullmirror)
            except OSError as exc:
                if exc.errno != errno.ENOENT:
                    raise

        for name in ud.names:
            if not self._contains_ref(ud, d, name, ud.clonedir):
                raise bb.fetch2.FetchError(
                    "Unable to find revision %s in branch %s even from upstream"
                    % (ud.revisions[name], ud.branches[name]))

        if ud.shallow and ud.write_shallow_tarballs:
            missing_rev = self.clonedir_need_shallow_revs(ud, d)
            if missing_rev:
                raise bb.fetch2.FetchError(
                    "Unable to find revision %s even from upstream" %
                    missing_rev)
Example #44
0
    def download(self, ud, d):
        """Fetch url"""

        logger.debug2("Fetch: checking for module directory '" + ud.moddir +
                      "'")

        lf = bb.utils.lockfile(ud.svnlock)

        try:
            if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
                svncmd = self._buildsvncommand(ud, d, "update")
                logger.info("Update " + ud.url)
                # We need to attempt to run svn upgrade first in case its an older working format
                try:
                    runfetchcmd(ud.basecmd + " upgrade", d, workdir=ud.moddir)
                except FetchError:
                    pass
                logger.debug("Running %s", svncmd)
                bb.fetch2.check_network_access(d, svncmd, ud.url)
                runfetchcmd(svncmd, d, workdir=ud.moddir)
            else:
                svncmd = self._buildsvncommand(ud, d, "fetch")
                logger.info("Fetch " + ud.url)
                # check out sources there
                bb.utils.mkdirhier(ud.pkgdir)
                logger.debug("Running %s", svncmd)
                bb.fetch2.check_network_access(d, svncmd, ud.url)
                runfetchcmd(svncmd, d, workdir=ud.pkgdir)

            if not ("externals" in ud.parm
                    and ud.parm["externals"] == "nowarn"):
                # Warn the user if this had externals (won't catch them all)
                output = runfetchcmd("svn propget svn:externals || true",
                                     d,
                                     workdir=ud.moddir)
                if output:
                    if "--ignore-externals" in svncmd.split():
                        bb.warn("%s contains svn:externals." % ud.url)
                        bb.warn(
                            "These should be added to the recipe SRC_URI as necessary."
                        )
                        bb.warn("svn fetch has ignored externals:\n%s" %
                                output)
                        bb.warn(
                            "To disable this warning add ';externals=nowarn' to the url."
                        )
                    else:
                        bb.debug(1,
                                 "svn repository has externals:\n%s" % output)

            scmdata = ud.parm.get("scmdata", "")
            if scmdata == "keep":
                tar_flags = ""
            else:
                tar_flags = "--exclude='.svn'"

            # tar them up to a defined filename
            runfetchcmd("tar %s -czf %s %s" %
                        (tar_flags, ud.localpath, ud.path_spec),
                        d,
                        cleanup=[ud.localpath],
                        workdir=ud.pkgdir)
        finally:
            bb.utils.unlockfile(lf)
Example #45
0
    def download(self, ud, d):
        """
        Fetch urls
        """

        (host, depot, user, pswd, parm) = Perforce.doparse(ud.url, d)

        if depot.find('/...') != -1:
            path = depot[:depot.find('/...')]
        else:
            path = depot[:depot.rfind('/')]

        module = parm.get('module', os.path.basename(path))

        # Get the p4 command
        p4opt = ""
        if user:
            p4opt += " -u %s" % (user)

        if pswd:
            p4opt += " -P %s" % (pswd)

        if host:
            p4opt += " -p %s" % (host)

        p4cmd = d.getVar('FETCHCMD_p4', True) or "p4"

        # create temp directory
        logger.debug(2, "Fetch: creating temporary directory")
        bb.utils.mkdirhier(d.expand('${WORKDIR}'))
        mktemp = d.getVar(
            "FETCHCMD_p4mktemp",
            True) or d.expand("mktemp -d -q '${WORKDIR}/oep4.XXXXXX'")
        tmpfile, errors = bb.process.run(mktemp)
        tmpfile = tmpfile.strip()
        if not tmpfile:
            raise FetchError(
                "Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.",
                ud.url)

        if "label" in parm:
            depot = "%s@%s" % (depot, parm["label"])
        else:
            cset = Perforce.getcset(d, depot, host, user, pswd, parm)
            depot = "%s@%s" % (depot, cset)

        os.chdir(tmpfile)
        logger.info("Fetch " + ud.url)
        logger.info("%s%s files %s", p4cmd, p4opt, depot)
        p4file, errors = bb.process.run("%s%s files %s" %
                                        (p4cmd, p4opt, depot))
        p4file = [f.rstrip() for f in p4file.splitlines()]

        if not p4file:
            raise FetchError(
                "Fetch: unable to get the P4 files from %s" % depot, ud.url)

        count = 0

        for file in p4file:
            list = file.split()

            if list[2] == "delete":
                continue

            dest = list[0][len(path) + 1:]
            where = dest.find("#")

            subprocess.call("%s%s print -o %s/%s %s" %
                            (p4cmd, p4opt, module, dest[:where], list[0]),
                            shell=True)
            count = count + 1

        if count == 0:
            logger.error()
            raise FetchError("Fetch: No files gathered from the P4 fetch",
                             ud.url)

        runfetchcmd("tar -czf %s %s" % (ud.localpath, module),
                    d,
                    cleanup=[ud.localpath])
        # cleanup
        bb.utils.prunedir(tmpfile)
Example #46
0
 def update_submodules(self, ud, d):
     # We have to convert bare -> full repo, do the submodule bit, then convert back
     tmpclonedir = ud.clonedir + ".tmp"
     gitdir = tmpclonedir + os.sep + ".git"
     bb.utils.remove(tmpclonedir, True)
     os.mkdir(tmpclonedir)
     os.rename(ud.clonedir, gitdir)
     runfetchcmd(
         "sed " + gitdir + "/config -i -e 's/bare.*=.*true/bare = false/'",
         d)
     os.chdir(tmpclonedir)
     runfetchcmd(ud.basecmd + " reset --hard", d)
     runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d)
     runfetchcmd(ud.basecmd + " submodule init", d)
     runfetchcmd(ud.basecmd + " submodule update", d)
     self._set_relative_paths(tmpclonedir)
     runfetchcmd(
         "sed " + gitdir + "/config -i -e 's/bare.*=.*false/bare = true/'",
         d)
     os.rename(
         gitdir,
         ud.clonedir,
     )
     bb.utils.remove(tmpclonedir, True)
Example #47
0
    def _runwget(self, ud, d, command, quiet):

        logger.debug(2, "Fetching %s using command '%s'" % (ud.url, command))
        bb.fetch2.check_network_access(d, command)
        runfetchcmd(command, d, quiet)
Example #48
0
File: git.py Project: lynxbee/poky
    def unpack(self, ud, destdir, d):
        """ unpack the downloaded src to destdir"""

        subdir = ud.parm.get("subpath", "")
        if subdir != "":
            readpathspec = ":%s" % (subdir)
            def_destsuffix = "%s/" % os.path.basename(subdir.rstrip('/'))
        else:
            readpathspec = ""
            def_destsuffix = "git/"

        destsuffix = ud.parm.get("destsuffix", def_destsuffix)
        destdir = ud.destdir = os.path.join(destdir, destsuffix)
        if os.path.exists(destdir):
            bb.utils.prunedir(destdir)

        cloneflags = "-s -n"
        if ud.bareclone:
            cloneflags += " --mirror"

        # Versions of git prior to 1.7.9.2 have issues where foo.git and foo get confused
        # and you end up with some horrible union of the two when you attempt to clone it
        # The least invasive workaround seems to be a symlink to the real directory to
        # fool git into ignoring any .git version that may also be present.
        #
        # The issue is fixed in more recent versions of git so we can drop this hack in future
        # when that version becomes common enough.
        clonedir = ud.clonedir
        if not ud.path.endswith(".git"):
            indirectiondir = destdir[:-1] + ".indirectionsymlink"
            if os.path.exists(indirectiondir):
                os.remove(indirectiondir)
            bb.utils.mkdirhier(os.path.dirname(indirectiondir))
            os.symlink(ud.clonedir, indirectiondir)
            clonedir = indirectiondir

        runfetchcmd(
            "%s clone %s %s/ %s" % (ud.basecmd, cloneflags, clonedir, destdir),
            d)
        os.chdir(destdir)
        repourl = self._get_repo_url(ud)
        runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl), d)
        if not ud.nocheckout:
            if subdir != "":
                runfetchcmd(
                    "%s read-tree %s%s" %
                    (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d)
                runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d)
            elif not ud.nobranch:
                branchname = ud.branches[ud.names[0]]
                runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
                            ud.revisions[ud.names[0]]), d)
                runfetchcmd("%s branch --set-upstream %s origin/%s" % (ud.basecmd, branchname, \
                            branchname), d)
            else:
                runfetchcmd(
                    "%s checkout %s" % (ud.basecmd, ud.revisions[ud.names[0]]),
                    d)

        return True
Example #49
0
    def download(self, ud, d):
        """Fetch url"""

        no_clone = not os.path.exists(ud.clonedir)
        need_update = no_clone or self.need_update(ud, d)

        # A current clone is preferred to either tarball, a shallow tarball is
        # preferred to an out of date clone, and a missing clone will use
        # either tarball.
        if ud.shallows and os.path.exists(ud.fullshallow) and need_update:
            ud.localpath = ud.fullshallow
            return
        elif os.path.exists(ud.fullmirror) and no_clone:
            bb.utils.mkdirhier(ud.clonedir)
            os.chdir(ud.clonedir)
            runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)

        repourl = self._get_repo_url(ud)

        # If the repo still doesn't exist, fallback to cloning it
        if not os.path.exists(ud.clonedir):
            # We do this since git will use a "-l" option automatically for local urls where possible
            if repourl.startswith("file://"):
                repourl = repourl[7:]
            clone_cmd = "%s clone --bare --mirror %s %s" % (ud.basecmd, repourl, ud.clonedir)
            if ud.proto.lower() != 'file':
                bb.fetch2.check_network_access(d, clone_cmd)
            runfetchcmd(clone_cmd, d)

        os.chdir(ud.clonedir)
        # Update the checkout if needed
        needupdate = False
        for name in ud.names:
            if not self._contains_ref(ud, d, name):
                needupdate = True
        if needupdate:
            try: 
                runfetchcmd("%s remote rm origin" % ud.basecmd, d) 
            except bb.fetch2.FetchError:
                logger.debug(1, "No Origin")

            runfetchcmd("%s remote add --mirror=fetch origin %s" % (ud.basecmd, repourl), d)
            fetch_cmd = "%s fetch -f --prune %s refs/*:refs/*" % (ud.basecmd, repourl)
            if ud.proto.lower() != 'file':
                bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
            runfetchcmd(fetch_cmd, d)
            runfetchcmd("%s prune-packed" % ud.basecmd, d)
            runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
            try:
                os.unlink(ud.fullmirror)
            except OSError as exc:
                if exc.errno != errno.ENOENT:
                    raise
        os.chdir(ud.clonedir)
        for name in ud.names:
            if not self._contains_ref(ud, d, name):
                raise bb.fetch2.FetchError("Unable to find revision %s in branch %s even from upstream" % (ud.revisions[name], ud.branches[name]))
Example #50
0
    def download(self, ud, d):
        """Fetch url"""

        # If the checkout doesn't exist and the mirror tarball does, extract it
        if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror):
            bb.utils.mkdirhier(ud.clonedir)
            runfetchcmd("tar -xzf %s" % (ud.fullmirror),
                        d,
                        workdir=ud.clonedir)

        repourl = self._get_repo_url(ud)

        # If the repo still doesn't exist, fallback to cloning it
        if not os.path.exists(ud.clonedir):
            # We do this since git will use a "-l" option automatically for local urls where possible
            if repourl.startswith("file://"):
                repourl = repourl[7:]
            clone_cmd = "LANG=C %s clone --bare --mirror %s %s --progress" % (
                ud.basecmd, repourl, ud.clonedir)
            if ud.proto.lower() != 'file':
                bb.fetch2.check_network_access(d, clone_cmd)
            progresshandler = GitProgressHandler(d)
            runfetchcmd(clone_cmd, d, log=progresshandler)

        # Update the checkout if needed
        needupdate = False
        for name in ud.names:
            if not self._contains_ref(ud, d, name, ud.clonedir):
                needupdate = True
        if needupdate:
            try:
                runfetchcmd("%s remote rm origin" % ud.basecmd,
                            d,
                            workdir=ud.clonedir)
            except bb.fetch2.FetchError:
                logger.debug(1, "No Origin")

            runfetchcmd("%s remote add --mirror=fetch origin %s" %
                        (ud.basecmd, repourl),
                        d,
                        workdir=ud.clonedir)
            fetch_cmd = "LANG=C %s fetch -f --prune --progress %s refs/*:refs/*" % (
                ud.basecmd, repourl)
            if ud.proto.lower() != 'file':
                bb.fetch2.check_network_access(d, fetch_cmd, ud.url)
            progresshandler = GitProgressHandler(d)
            runfetchcmd(fetch_cmd, d, log=progresshandler, workdir=ud.clonedir)
            runfetchcmd("%s prune-packed" % ud.basecmd, d, workdir=ud.clonedir)
            runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd,
                        d,
                        workdir=ud.clonedir)
            try:
                os.unlink(ud.fullmirror)
            except OSError as exc:
                if exc.errno != errno.ENOENT:
                    raise
        for name in ud.names:
            if not self._contains_ref(ud, d, name, ud.clonedir):
                raise bb.fetch2.FetchError(
                    "Unable to find revision %s in branch %s even from upstream"
                    % (ud.revisions[name], ud.branches[name]))
Example #51
0
    def download(self, ud, d):

        method = ud.parm.get('method', 'pserver')
        localdir = ud.parm.get('localdir', ud.module)
        cvs_port = ud.parm.get('port', '')

        cvs_rsh = None
        if method == "ext":
            if "rsh" in ud.parm:
                cvs_rsh = ud.parm["rsh"]

        if method == "dir":
            cvsroot = ud.path
        else:
            cvsroot = ":" + method
            cvsproxyhost = d.getVar('CVS_PROXY_HOST', True)
            if cvsproxyhost:
                cvsroot += ";proxy=" + cvsproxyhost
            cvsproxyport = d.getVar('CVS_PROXY_PORT', True)
            if cvsproxyport:
                cvsroot += ";proxyport=" + cvsproxyport
            cvsroot += ":" + ud.user
            if ud.pswd:
                cvsroot += ":" + ud.pswd
            cvsroot += "@" + ud.host + ":" + cvs_port + ud.path

        options = []
        if 'norecurse' in ud.parm:
            options.append("-l")
        if ud.date:
            # treat YYYYMMDDHHMM specially for CVS
            if len(ud.date) == 12:
                options.append("-D \"%s %s:%s UTC\"" %
                               (ud.date[0:8], ud.date[8:10], ud.date[10:12]))
            else:
                options.append("-D \"%s UTC\"" % ud.date)
        if ud.tag:
            options.append("-r %s" % ud.tag)

        cvsbasecmd = d.getVar("FETCHCMD_cvs", True)
        cvscmd = cvsbasecmd + " '-d" + cvsroot + "' co " + " ".join(
            options) + " " + ud.module
        cvsupdatecmd = cvsbasecmd + " '-d" + cvsroot + "' update -d -P " + " ".join(
            options)

        if cvs_rsh:
            cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
            cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)

        # create module directory
        logger.debug(2, "Fetch: checking for module directory")
        pkg = d.getVar('PN', True)
        pkgdir = os.path.join(d.getVar('CVSDIR', True), pkg)
        moddir = os.path.join(pkgdir, localdir)
        if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
            logger.info("Update " + ud.url)
            bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url)
            # update sources there
            os.chdir(moddir)
            cmd = cvsupdatecmd
        else:
            logger.info("Fetch " + ud.url)
            # check out sources there
            bb.utils.mkdirhier(pkgdir)
            os.chdir(pkgdir)
            logger.debug(1, "Running %s", cvscmd)
            bb.fetch2.check_network_access(d, cvscmd, ud.url)
            cmd = cvscmd

        runfetchcmd(cmd, d, cleanup=[moddir])

        if not os.access(moddir, os.R_OK):
            raise FetchError(
                "Directory %s was not readable despite sucessful fetch?!" %
                moddir, ud.url)

        scmdata = ud.parm.get("scmdata", "")
        if scmdata == "keep":
            tar_flags = ""
        else:
            tar_flags = "--exclude 'CVS'"

        # tar them up to a defined filename
        if 'fullpath' in ud.parm:
            os.chdir(pkgdir)
            cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir)
        else:
            os.chdir(moddir)
            os.chdir('..')
            cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath,
                                         os.path.basename(moddir))

        runfetchcmd(cmd, d, cleanup=[ud.localpath])
Example #52
0
    def unpack(self, ud, destdir, d):
        def unpack_submodules(ud, url, module, modpath, d):
            url += ";bareclone=1;nobranch=1"

            # Figure out where we clone over the bare submodules...
            if ud.bareclone:
                repo_conf = ud.destdir
            else:
                repo_conf = os.path.join(ud.destdir, '.git')

            try:
                newfetch = Fetch([url], d, cache=False)
                newfetch.unpack(root=os.path.dirname(
                    os.path.join(repo_conf, 'modules', module)))
            except Exception as e:
                logger.error('gitsm: submodule unpack failed: %s %s' %
                             (type(e).__name__, str(e)))
                raise

            local_path = newfetch.localpath(url)

            # Correct the submodule references to the local download version...
            runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" %
                        {
                            'basecmd': ud.basecmd,
                            'module': module,
                            'url': local_path
                        },
                        d,
                        workdir=ud.destdir)

            if ud.shallow:
                runfetchcmd(
                    "%(basecmd)s config submodule.%(module)s.shallow true" % {
                        'basecmd': ud.basecmd,
                        'module': module
                    },
                    d,
                    workdir=ud.destdir)

            # Ensure the submodule repository is NOT set to bare, since we're checking it out...
            try:
                runfetchcmd("%s config core.bare false" % (ud.basecmd),
                            d,
                            quiet=True,
                            workdir=os.path.join(repo_conf, 'modules', module))
            except:
                logger.error(
                    "Unable to set git config core.bare to false for %s" %
                    os.path.join(repo_conf, 'modules', module))
                raise

        Git.unpack(self, ud, destdir, d)

        ret = self.process_submodules(ud, ud.destdir, unpack_submodules, d)

        if not ud.bareclone and ret:
            # All submodules should already be downloaded and configured in the tree.  This simply sets
            # up the configuration and checks out the files.  The main project config should remain
            # unmodified, and no download from the internet should occur.
            runfetchcmd("%s submodule update --recursive --no-fetch" %
                        (ud.basecmd),
                        d,
                        quiet=True,
                        workdir=ud.destdir)
Example #53
0
    def unpack(self, ud, destdir, d):
        """ unpack the downloaded src to destdir"""

        subdir = ud.parm.get("subpath", "")
        if subdir != "":
            readpathspec = ":%s" % subdir
            def_destsuffix = "%s/" % os.path.basename(subdir.rstrip('/'))
        else:
            readpathspec = ""
            def_destsuffix = "git/"

        destsuffix = ud.parm.get("destsuffix", def_destsuffix)
        destdir = ud.destdir = os.path.join(destdir, destsuffix)
        if os.path.exists(destdir):
            bb.utils.prunedir(destdir)

        if ud.shallow and (not os.path.exists(ud.clonedir)
                           or self.need_update(ud, d)):
            bb.utils.mkdirhier(destdir)
            runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=destdir)
        else:
            runfetchcmd(
                "%s clone %s %s/ %s" %
                (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d)

        repourl = self._get_repo_url(ud)
        runfetchcmd("%s remote set-url origin %s" % (ud.basecmd, repourl),
                    d,
                    workdir=destdir)
        if not ud.nocheckout:
            if subdir != "":
                runfetchcmd(
                    "%s read-tree %s%s" %
                    (ud.basecmd, ud.revisions[ud.names[0]], readpathspec),
                    d,
                    workdir=destdir)
                runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd,
                            d,
                            workdir=destdir)
            elif not ud.nobranch:
                branchname = ud.branches[ud.names[0]]
                runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
                            ud.revisions[ud.names[0]]), d, workdir=destdir)
                runfetchcmd("%s branch %s --set-upstream-to origin/%s" % (ud.basecmd, branchname, \
                            branchname), d, workdir=destdir)
            else:
                runfetchcmd("%s checkout %s" %
                            (ud.basecmd, ud.revisions[ud.names[0]]),
                            d,
                            workdir=destdir)

        return True
Example #54
0
 def _run(cmd):
     cmd = "NPM_CONFIG_USERCONFIG=%s " % (
         self.user_config.name) + cmd
     cmd = "NPM_CONFIG_GLOBALCONFIG=%s " % (
         self.global_config_name) + cmd
     return runfetchcmd(cmd, d, workdir=workdir)
Example #55
0
    def unpack(self, ud, destdir, d):
        Git.unpack(self, ud, destdir, d)

        if self.uses_submodules(ud, d, ud.destdir):
            runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d, workdir=ud.destdir)
            runfetchcmd(ud.basecmd + " submodule update --init --recursive", d, workdir=ud.destdir)
Example #56
0
 def build_mirror_data(self, url, ud, d):
     # Generate a mirror tarball if needed
     if ud.write_tarballs and (ud.repochanged or not os.path.exists(ud.fullmirror)):
         os.chdir(ud.clonedir)
         logger.info("Creating tarball of git repository")
         runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d)
Example #57
0
    def clone_shallow_local(self, ud, dest, d):
        super(GitSM, self).clone_shallow_local(ud, dest, d)

        runfetchcmd('cp -fpPRH "%s/modules" "%s/"' % (ud.clonedir, os.path.join(dest, '.git')), d)
Example #58
0
    def update_submodules(self, ud, d):
        submodules = []
        paths = {}
        uris = {}
        local_paths = {}

        for name in ud.names:
            try:
                gitmodules = runfetchcmd("%s show %s:.gitmodules" % (ud.basecmd, ud.revisions[name]), d, quiet=True, workdir=ud.clonedir)
            except:
                # No submodules to update
                continue

            module = ""
            for line in gitmodules.splitlines():
                if line.startswith('[submodule'):
                    module = line.split('"')[1]
                    submodules.append(module)
                elif module and line.strip().startswith('path'):
                    path = line.split('=')[1].strip()
                    paths[module] = path
                elif module and line.strip().startswith('url'):
                    url = line.split('=')[1].strip()
                    uris[module] = url

        for module in submodules:
            module_hash = runfetchcmd("%s ls-tree -z -d %s %s" % (ud.basecmd, ud.revisions[name], paths[module]), d, quiet=True, workdir=ud.clonedir)
            module_hash = module_hash.split()[2]

            # Build new SRC_URI
            proto = uris[module].split(':', 1)[0]
            url = uris[module].replace('%s:' % proto, 'gitsm:', 1)
            url += ';protocol=%s' % proto
            url += ";name=%s" % module
            url += ";qbareclone=1;nocheckout=1"

            ld = d.createCopy()
            # Not necessary to set SRC_URI, since we're passing the URI to
            # Fetch.
            #ld.setVar('SRC_URI', url)
            ld.setVar('SRCREV_%s' % module, module_hash)

            # Workaround for issues with SRCPV/SRCREV_FORMAT errors
            # error refer to 'multiple' repositories.  Only the repository
            # in the original SRC_URI actually matters...
            ld.setVar('SRCPV', d.getVar('SRCPV'))
            ld.setVar('SRCREV_FORMAT', module)

            newfetch = Fetch([url], ld, cache=False)
            newfetch.download()
            local_paths[module] = newfetch.localpath(url)

            # Correct the submodule references to the local download version...
            runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_paths[module]}, d, workdir=ud.clonedir)
            try:
                os.mkdir(os.path.join(ud.clonedir, 'modules'))
            except OSError:
                pass
            if not os.path.exists(os.path.join(ud.clonedir, 'modules', paths[module])):
                os.symlink(local_paths[module], os.path.join(ud.clonedir, 'modules', paths[module]))

        return True
Example #59
0
    def unpack(self, ud, destdir, d):
        """ unpack the downloaded src to destdir"""

        subdir = ud.parm.get("subpath", "")
        if subdir != "":
            readpathspec = ":%s" % subdir
            def_destsuffix = "%s/" % os.path.basename(subdir.rstrip('/'))
        else:
            readpathspec = ""
            def_destsuffix = "git/"

        destsuffix = ud.parm.get("destsuffix", def_destsuffix)
        destdir = ud.destdir = os.path.join(destdir, destsuffix)
        if os.path.exists(destdir):
            bb.utils.prunedir(destdir)

        need_lfs = ud.parm.get("lfs", "1") == "1"

        source_found = False
        source_error = []

        if not source_found:
            clonedir_is_up_to_date = not self.clonedir_need_update(ud, d)
            if clonedir_is_up_to_date:
                runfetchcmd(
                    "%s clone %s %s/ %s" %
                    (ud.basecmd, ud.cloneflags, ud.clonedir, destdir), d)
                source_found = True
            else:
                source_error.append(
                    "clone directory not available or not up to date: " +
                    ud.clonedir)

        if not source_found:
            if ud.shallow:
                if os.path.exists(ud.fullshallow):
                    bb.utils.mkdirhier(destdir)
                    runfetchcmd("tar -xzf %s" % ud.fullshallow,
                                d,
                                workdir=destdir)
                    source_found = True
                else:
                    source_error.append("shallow clone not available: " +
                                        ud.fullshallow)
            else:
                source_error.append("shallow clone not enabled")

        if not source_found:
            raise bb.fetch2.UnpackError(
                "No up to date source found: " + "; ".join(source_error),
                ud.url)

        repourl = self._get_repo_url(ud)
        runfetchcmd("%s remote set-url origin \"%s\"" % (ud.basecmd, repourl),
                    d,
                    workdir=destdir)

        if self._contains_lfs(ud, d, destdir):
            if need_lfs and not self._find_git_lfs(d):
                raise bb.fetch2.FetchError(
                    "Repository %s has LFS content, install git-lfs on host to download (or set lfs=0 to ignore it)"
                    % (repourl))
            else:
                bb.note(
                    "Repository %s has LFS content but it is not being fetched"
                    % (repourl))

        if not ud.nocheckout:
            if subdir != "":
                runfetchcmd(
                    "%s read-tree %s%s" %
                    (ud.basecmd, ud.revisions[ud.names[0]], readpathspec),
                    d,
                    workdir=destdir)
                runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd,
                            d,
                            workdir=destdir)
            elif not ud.nobranch:
                branchname = ud.branches[ud.names[0]]
                runfetchcmd("%s checkout -B %s %s" % (ud.basecmd, branchname, \
                            ud.revisions[ud.names[0]]), d, workdir=destdir)
                runfetchcmd("%s branch %s --set-upstream-to origin/%s" % (ud.basecmd, branchname, \
                            branchname), d, workdir=destdir)
            else:
                runfetchcmd("%s checkout %s" %
                            (ud.basecmd, ud.revisions[ud.names[0]]),
                            d,
                            workdir=destdir)

        return True