def unpack(self, ud, destdir, d): Git.unpack(self, ud, destdir, d) submodules = self.uses_submodules(ud, d, ud.destdir) if submodules: runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d, workdir=ud.destdir) runfetchcmd(ud.basecmd + " submodule update --init --recursive", d, workdir=ud.destdir)
def unpack(self, ud, destdir, d): Git.unpack(self, ud, destdir, d) submodules = self.uses_submodules(ud, d, ud.destdir) if submodules: runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d, workdir=ud.destdir) runfetchcmd(ud.basecmd + " submodule update --init --recursive", d, workdir=ud.destdir)
def unpack(self, ud, destdir, d): Git.unpack(self, ud, destdir, d) # Copy over the submodules' fetched histories too. if ud.bareclone: repo_conf = ud.destdir else: repo_conf = os.path.join(ud.destdir, '.git') update_submodules = False paths = {} uris = {} local_paths = {} for name in ud.names: try: gitmodules = runfetchcmd("%s show HEAD:.gitmodules" % (ud.basecmd), d, quiet=True, workdir=ud.destdir) except: # No submodules to update continue submodules = self.parse_gitmodules(gitmodules) self.copy_submodules(submodules, ud, ud.destdir, d) submodules_queue = [(module, os.path.join(repo_conf, 'modules', md['path'])) for module, md in submodules.items()] while len(submodules_queue) != 0: module, modpath = submodules_queue.pop() # add submodule children recursively try: gitmodules = runfetchcmd("%s show HEAD:.gitmodules" % (ud.basecmd), d, quiet=True, workdir=modpath) for m, md in self.parse_gitmodules(gitmodules).items(): submodules_queue.append([m, os.path.join(modpath, 'modules', md['path'])]) except: # no children pass # There are submodules to update update_submodules = True # Determine (from the submodule) the correct url to reference try: output = runfetchcmd("%(basecmd)s config remote.origin.url" % {'basecmd': ud.basecmd}, d, workdir=modpath) except bb.fetch2.FetchError as e: # No remote url defined in this submodule continue local_paths[module] = output # Setup the local URL properly (like git submodule init or sync would do...) runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_paths[module]}, d, workdir=ud.destdir) # Ensure the submodule repository is NOT set to bare, since we're checking it out... runfetchcmd("%s config core.bare false" % (ud.basecmd), d, quiet=True, workdir=modpath) if update_submodules: # Run submodule update, this sets up the directories -- without touching the config runfetchcmd("%s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)
def unpack(self, ud, destdir, d): Git.unpack(self, ud, destdir, d) # Copy over the submodules' fetched histories too. if ud.bareclone: repo_conf = ud.destdir else: repo_conf = os.path.join(ud.destdir, '.git') submodules = [] paths = {} uris = {} local_paths = {} for name in ud.names: try: gitmodules = runfetchcmd("%s show HEAD:.gitmodules" % (ud.basecmd), d, quiet=True, workdir=ud.destdir) except: # No submodules to update continue for m, md in self.parse_gitmodules(gitmodules).items(): submodules.append(m) paths[m] = md['path'] uris[m] = md['url'] self.copy_submodules(submodules, ud, ud.destdir, d) submodules_queue = [(module, os.path.join(repo_conf, 'modules', module)) for module in submodules] while len(submodules_queue) != 0: module, modpath = submodules_queue.pop() # add submodule children recursively try: gitmodules = runfetchcmd("%s show HEAD:.gitmodules" % (ud.basecmd), d, quiet=True, workdir=modpath) for m, md in self.parse_gitmodules(gitmodules).items(): submodules_queue.append([m, os.path.join(modpath, 'modules', m)]) except: # no children pass # Determine (from the submodule) the correct url to reference try: output = runfetchcmd("%(basecmd)s config remote.origin.url" % {'basecmd': ud.basecmd}, d, workdir=modpath) except bb.fetch2.FetchError as e: # No remote url defined in this submodule continue local_paths[module] = output # Setup the local URL properly (like git submodule init or sync would do...) runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_paths[module]}, d, workdir=ud.destdir) # Ensure the submodule repository is NOT set to bare, since we're checking it out... runfetchcmd("%s config core.bare false" % (ud.basecmd), d, quiet=True, workdir=modpath) if submodules: # Run submodule update, this sets up the directories -- without touching the config runfetchcmd("%s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)
def unpack(self, ud, destdir, d): Git.unpack(self, ud, destdir, d) os.chdir(ud.destdir) submodules = self.uses_submodules(ud, d) if submodules: runfetchcmd("cp -r " + ud.clonedir + "/modules " + ud.destdir + "/.git/", d) runfetchcmd(ud.basecmd + " submodule init", d) runfetchcmd(ud.basecmd + " submodule update", d)
def unpack(self, ud, destdir, d): Git.unpack(self, ud, destdir, d) os.chdir(ud.destdir) submodules = self.uses_submodules(ud, d) if submodules: runfetchcmd("cp -r " + ud.clonedir + "/modules " + ud.destdir + "/.git/", d) runfetchcmd(ud.basecmd + " submodule init", d) runfetchcmd(ud.basecmd + " submodule update", d)
def unpack(self, ud, destdir, d): Git.unpack(self, ud, destdir, d) os.chdir(ud.destdir) submodules = self.uses_submodules(ud, d) if submodules: runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d) runfetchcmd(ud.basecmd + " submodule init", d) runfetchcmd(ud.basecmd + " submodule update", d)
def unpack(self, ud, destdir, d): Git.unpack(self, ud, destdir, d) # Copy over the submodules' fetched histories too. if ud.bareclone: repo_conf = ud.destdir else: repo_conf = os.path.join(ud.destdir, '.git') submodules = [] paths = {} uris = {} local_paths = {} for name in ud.names: try: gitmodules = runfetchcmd("%s show HEAD:.gitmodules" % (ud.basecmd), d, quiet=True, workdir=ud.destdir) except: # No submodules to update continue module = "" for line in gitmodules.splitlines(): if line.startswith('[submodule'): module = line.split('"')[1] submodules.append(module) elif module and line.strip().startswith('path'): path = line.split('=')[1].strip() paths[module] = path elif module and line.strip().startswith('url'): url = line.split('=')[1].strip() uris[module] = url self.copy_submodules(submodules, ud, ud.destdir, d) for module in submodules: srcpath = os.path.join(ud.clonedir, 'modules', module) modpath = os.path.join(repo_conf, 'modules', module) # Determine (from the submodule) the correct url to reference try: output = runfetchcmd("%(basecmd)s config remote.origin.url" % {'basecmd': ud.basecmd}, d, workdir=modpath) except bb.fetch2.FetchError as e: # No remote url defined in this submodule continue local_paths[module] = output # Setup the local URL properly (like git submodule init or sync would do...) runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_paths[module]}, d, workdir=ud.destdir) # Ensure the submodule repository is NOT set to bare, since we're checking it out... runfetchcmd("%s config core.bare false" % (ud.basecmd), d, quiet=True, workdir=modpath) if submodules: # Run submodule update, this sets up the directories -- without touching the config runfetchcmd("%s submodule update --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)
def unpack(self, ud, destdir, d): Git.unpack(self, ud, destdir, d) os.chdir(ud.destdir) submodules = self.uses_submodules(ud, d) if submodules: runfetchcmd(ud.basecmd + " checkout " + ud.revisions[ud.names[0]], d) runfetchcmd(ud.basecmd + " submodule init", d) runfetchcmd(ud.basecmd + " submodule update", d)
def unpack(self, ud, destdir, d): Git.unpack(self, ud, destdir, d) try: runfetchcmd("%s annex init" % (ud.basecmd), d, workdir=ud.destdir) except bb.fetch.FetchError: pass annex = self.uses_annex(ud, d, ud.destdir) if annex: runfetchcmd("%s annex get" % (ud.basecmd), d, workdir=ud.destdir) runfetchcmd("chmod u+w -R %s/.git/annex" % (ud.destdir), d, quiet=True, workdir=ud.destdir)
def unpack(self, ud, destdir, d): Git.unpack(self, ud, destdir, d) os.chdir(ud.destdir) try: runfetchcmd("%s annex sync" % (ud.basecmd), d) except bb.fetch.FetchError: pass annex = self.uses_annex(ud, d) if annex: runfetchcmd("%s annex get" % (ud.basecmd), d) runfetchcmd("chmod u+w -R %s/.git/annex" % (ud.destdir), d, quiet=True)
def unpack(self, ud, destdir, d): Git.unpack(self, ud, destdir, d) os.chdir(ud.destdir) try: runfetchcmd("%s annex init" % (ud.basecmd), d) except bb.fetch.FetchError: pass annex = self.uses_annex(ud, d) if annex: runfetchcmd("%s annex get" % (ud.basecmd), d) runfetchcmd("chmod u+w -R %s/.git/annex" % (ud.destdir), d, quiet=True)
def unpack(self, ud, destdir, d): def unpack_submodules(ud, url, module, modpath, d): url += ";bareclone=1;nobranch=1" # Figure out where we clone over the bare submodules... if ud.bareclone: repo_conf = ud.destdir else: repo_conf = os.path.join(ud.destdir, '.git') try: newfetch = Fetch([url], d, cache=False) newfetch.unpack(root=os.path.dirname(os.path.join(repo_conf, 'modules', module))) except Exception as e: logger.error('gitsm: submodule unpack failed: %s %s' % (type(e).__name__, str(e))) raise local_path = newfetch.localpath(url) # Correct the submodule references to the local download version... runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_path}, d, workdir=ud.destdir) if ud.shallow: runfetchcmd("%(basecmd)s config submodule.%(module)s.shallow true" % {'basecmd': ud.basecmd, 'module': module}, d, workdir=ud.destdir) # Ensure the submodule repository is NOT set to bare, since we're checking it out... try: runfetchcmd("%s config core.bare false" % (ud.basecmd), d, quiet=True, workdir=os.path.join(repo_conf, 'modules', module)) except: logger.error("Unable to set git config core.bare to false for %s" % os.path.join(repo_conf, 'modules', module)) raise Git.unpack(self, ud, destdir, d) ret = self.process_submodules(ud, ud.destdir, unpack_submodules, d) if not ud.bareclone and ret: # All submodules should already be downloaded and configured in the tree. This simply sets # up the configuration and checks out the files. The main project config should remain # unmodified, and no download from the internet should occur. runfetchcmd("%s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)
def unpack(self, ud, destdir, d): def unpack_submodules(ud, url, module, modpath, d): url += ";bareclone=1;nobranch=1" # Figure out where we clone over the bare submodules... if ud.bareclone: repo_conf = ud.destdir else: repo_conf = os.path.join(ud.destdir, '.git') try: newfetch = Fetch([url], d, cache=False) newfetch.unpack(root=os.path.dirname(os.path.join(repo_conf, 'modules', modpath))) except Exception as e: logger.error('gitsm: submodule unpack failed: %s %s' % (type(e).__name__, str(e))) raise local_path = newfetch.localpath(url) # Correct the submodule references to the local download version... runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % {'basecmd': ud.basecmd, 'module': module, 'url' : local_path}, d, workdir=ud.destdir) if ud.shallow: runfetchcmd("%(basecmd)s config submodule.%(module)s.shallow true" % {'basecmd': ud.basecmd, 'module': module}, d, workdir=ud.destdir) # Ensure the submodule repository is NOT set to bare, since we're checking it out... try: runfetchcmd("%s config core.bare false" % (ud.basecmd), d, quiet=True, workdir=os.path.join(repo_conf, 'modules', modpath)) except: logger.error("Unable to set git config core.bare to false for %s" % os.path.join(repo_conf, 'modules', modpath)) raise Git.unpack(self, ud, destdir, d) ret = self.process_submodules(ud, ud.destdir, unpack_submodules, d) if not ud.bareclone and ret: # Run submodule update, this sets up the directories -- without touching the config runfetchcmd("%s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)
def unpack(self, ud, destdir, d): def unpack_submodules(ud, url, module, modpath, d): url += ";bareclone=1;nobranch=1" # Figure out where we clone over the bare submodules... if ud.bareclone: repo_conf = ud.destdir else: repo_conf = os.path.join(ud.destdir, '.git') try: newfetch = Fetch([url], d, cache=False) newfetch.unpack(root=os.path.dirname( os.path.join(repo_conf, 'modules', modpath))) except Exception as e: logger.error('gitsm: submodule unpack failed: %s %s' % (type(e).__name__, str(e))) raise local_path = newfetch.localpath(url) # Correct the submodule references to the local download version... runfetchcmd("%(basecmd)s config submodule.%(module)s.url %(url)s" % { 'basecmd': ud.basecmd, 'module': module, 'url': local_path }, d, workdir=ud.destdir) if ud.shallow: runfetchcmd( "%(basecmd)s config submodule.%(module)s.shallow true" % { 'basecmd': ud.basecmd, 'module': module }, d, workdir=ud.destdir) # Ensure the submodule repository is NOT set to bare, since we're checking it out... try: runfetchcmd("%s config core.bare false" % (ud.basecmd), d, quiet=True, workdir=os.path.join(repo_conf, 'modules', modpath)) except: logger.error( "Unable to set git config core.bare to false for %s" % os.path.join(repo_conf, 'modules', modpath)) raise Git.unpack(self, ud, destdir, d) if not ud.bareclone and self.process_submodules( ud, ud.destdir, unpack_submodules, d): # Run submodule update, this sets up the directories -- without touching the config runfetchcmd("%s submodule update --recursive --no-fetch" % (ud.basecmd), d, quiet=True, workdir=ud.destdir)