def get_files(self): '''Cache a list of filenames in the checkout''' cmd = u'cd {}; git ls-files'.format(self.gitrepo.checkoutdir) (rc, so, se) = run_command(cmd) files = to_text(so).split(u'\n') files = [x.strip() for x in files if x.strip()] self.files = files
def get_files(self, force=False): '''Cache a list of filenames in the checkout''' if not self._files or force: cmd = u'cd {}; git ls-files'.format(self.checkoutdir) logging.debug(cmd) (rc, so, se) = run_command(cmd) files = to_text(so).split(u'\n') files = [x.strip() for x in files if x.strip()] self._files = files
def get_files(self): cmd = u'find %s' % self.gitrepo.checkoutdir (rc, so, se) = run_command(cmd) files = to_text(so).split(u'\n') files = [x.strip() for x in files if x.strip()] files = [x.replace(self.gitrepo.checkoutdir + u'/', u'') for x in files] files = [x for x in files if not x.startswith(u'.git')] self.files = files
def create_checkout(self): """checkout ansible""" # cleanup if os.path.isdir(self.checkoutdir): shutil.rmtree(self.checkoutdir) cmd = "git clone %s %s" \ % (self.repo, self.checkoutdir) (rc, so, se) = run_command(cmd) print(to_text(so) + to_text(se))
def last_commit_for_file(self, filepath): if filepath in self.commits: return self.commits[filepath][0][u'hash'] # git log --pretty=format:'%H' -1 # lib/ansible/modules/cloud/amazon/ec2_metric_alarm.py cmd = u'cd %s; git log --pretty=format:\'%%H\' -1 %s' % \ (self.gitrepo.checkoutdir, filepath) (rc, so, se) = run_command(cmd) return to_text(so).strip()
def get_files_by_commit(self, commit): if commit not in self.files_by_commit: cmd = u'cd {}; git show --pretty="" --name-only {}'.format(self.checkoutdir, commit) (rc, so, se) = run_command(cmd) filenames = [x.strip() for x in to_text(so).split(u'\n') if x.strip()] self.files_by_commit[commit] = filenames[:] else: filenames = self.files_by_commit[commit] return filenames
def extract_metafiles(): # make tempdir # extract files to tempdir # return list of json files in tempdir tarfile = os.path.abspath(METATAR) tdir = tempfile.mkdtemp() cmd = 'cd {} ; tar xzvf {}'.format(tdir, tarfile) (rc, so, se) = run_command(cmd) metafiles = glob.glob('{}/metafiles/*.json'.format(tdir)) metafiles= sorted(set(metafiles)) return metafiles
def update_checkout(self): """rebase + pull + update the checkout""" changed = False # get a specific commit or do a rebase if self.commit: cmd = "cd %s; git log -1 | head -n1 | awk '{print $2}'" % self.checkoutdir (rc, so, se) = run_command(cmd) so = to_text(so).strip() if so != self.commit: cmd = "cd %s; git checkout %s" % (self.checkoutdir, self.commit) (rc, so, se) = run_command(cmd) changed = True if rc != 0: self.create_checkout() changed = True else: changed = False cmd = "cd %s ; git pull --rebase" % self.checkoutdir (rc, so, se) = run_command(cmd) so = to_text(so) print(so + to_text(se)) # If rebase failed, recreate the checkout if rc != 0: self.create_checkout() return True else: if u'current branch devel is up to date.' not in so.lower(): changed = True self.commits_by_email = None return changed
def commits_by_email(self, email): if not isinstance(email, (list, tuple)): email = [email] if not self.email_commits: cmd = u'cd {}; git log --format="%H %ae"'.format(self.gitrepo.checkoutdir) (rc, so, se) = run_command(cmd) commits = [x.split(None, 1)[::-1] for x in to_text(so).split(u'\n') if x] for x in commits: if x[0] not in self.email_commits: self.email_commits[x[0]] = [] self.email_commits[x[0]].append(x[1]) commits = [] for x in email: commits += self.email_commits.get(x, []) return commits
def get_commits_by_email(self, email): '''Map an email(s) to a total num of commits and total by file''' if self.commits_by_email is None: commits = {} cmd = u'cd {}; git log --format="%h;%ae"'.format(self.checkoutdir) (rc, so, se) = run_command(cmd) lines = [x.strip() for x in to_text(so).split(u'\n') if x.strip()] for line in lines: parts = line.split(u';') this_hash = parts[0] this_email = parts[1] if this_email not in commits: commits[this_email] = set() commits[this_email].add(this_hash) self.commits_by_email = commits if not isinstance(email, (set, list)): emails = [email] else: emails = [x for x in email] email_map = {} for _email in emails: if _email not in email_map: email_map[_email] = { u'commit_count': 0, u'commit_count_byfile': {} } if _email in self.commits_by_email: email_map[_email][u'commit_count'] = \ len(self.commits_by_email[_email]) for _commit in self.commits_by_email[_email]: filenames = self.get_files_by_commit(_commit) for fn in filenames: if fn not in email_map[_email][u'commit_count_byfile']: email_map[_email][u'commit_count_byfile'][fn] = 0 email_map[_email][u'commit_count_byfile'][fn] += 1 return email_map
def version_by_date(self, dateobj, devel=False): if not self.DATEVERSIONS: self.DATEVERSIONS = [] cmd = u'cd %s;' % self.checkoutdir cmd += u'git log --date=short --pretty=format:"%ad;%H"' (rc, so, se) = run_command(cmd) lines = (x.strip() for x in to_text(so).split(u'\n')) lines = filter(bool, lines) for x in lines: parts = x.split(u';') self.DATEVERSIONS.append(parts) last_commit_date = self.DATEVERSIONS[0][0] last_commit_date = datetime.datetime.strptime( last_commit_date, u'%Y-%m-%d' ) # use last commit version if older than incoming date if dateobj >= last_commit_date: acommit = self.DATEVERSIONS[0][1] else: acommit = None datestr = to_text(dateobj).split()[0] for dv in reversed(self.DATEVERSIONS): if dv[0] == datestr: break if not acommit: datestr = u'-'.join(datestr.split(u'-')[0:2]) for dv in self.DATEVERSIONS: dvs = u'-'.join(dv[0].split(u'-')[0:2]) if dvs == datestr: acommit = dv[1] break aversion = None if acommit: aversion = self.ansible_version_by_commit(acommit) return aversion
def update_checkout(self): """rebase + pull + update the checkout""" changed = False cmd = "cd %s ; git pull --rebase" % self.checkoutdir (rc, so, se) = run_command(cmd) so = to_text(so) print(so + to_text(se)) # If rebase failed, recreate the checkout if rc != 0: self.create_checkout() return True else: if u'current branch devel is up to date.' not in so.lower(): changed = True self.commits_by_email = None return changed
def eval_pr_param(self, pr): '''PR/ID can be a number, numberlist, script, jsonfile, or url''' if isinstance(pr, list): pass elif pr.isdigit(): pr = int(pr) elif pr.startswith('http'): rr = requests.get(pr) numbers = rr.json() pr = numbers[:] elif os.path.isfile(pr) and not os.access(pr, os.X_OK): with open(pr) as f: numbers = json.loads(f.read()) pr = numbers[:] elif os.path.isfile(pr) and os.access(pr, os.X_OK): # allow for scripts when trying to target spec issues logging.info('executing %s' % pr) (rc, so, se) = run_command(pr) numbers = json.loads(to_text(so)) if numbers: if isinstance(numbers[0], dict) and 'number' in numbers[0]: numbers = [x['number'] for x in numbers] else: numbers = [int(x) for x in numbers] logging.info('%s numbers after running script' % len(numbers)) pr = numbers[:] elif ',' in pr: numbers = [int(x) for x in pr.split(',')] pr = numbers[:] if not isinstance(pr, list): pr = [pr] return pr
def version_by_date(self, dateobj, devel=False): if not self.DATEVERSIONS: self.DATEVERSIONS = [] cmd = u'cd %s;' % self.checkoutdir cmd += u'git log --date=short --pretty=format:"%ad;%H"' (rc, so, se) = run_command(cmd) lines = (x.strip() for x in to_text(so).split(u'\n')) lines = filter(bool, lines) for x in lines: parts = x.split(u';') self.DATEVERSIONS.append(parts) last_commit_date = self.DATEVERSIONS[0][0] last_commit_date = datetime.datetime.strptime(last_commit_date, u'%Y-%m-%d') # use last commit version if older than incoming date if dateobj >= last_commit_date: acommit = self.DATEVERSIONS[0][1] else: acommit = None datestr = to_text(dateobj).split()[0] for dv in reversed(self.DATEVERSIONS): if dv[0] == datestr: break if not acommit: datestr = u'-'.join(datestr.split(u'-')[0:2]) for dv in self.DATEVERSIONS: dvs = u'-'.join(dv[0].split(u'-')[0:2]) if dvs == datestr: acommit = dv[1] break aversion = None if acommit: aversion = self.ansible_version_by_commit(acommit) return aversion
def get_module_commits(self): keys = self.modules.keys() keys = sorted(keys) for k in keys: #v = self.modules[k] self.commits[k] = [] cpath = os.path.join(self.checkoutdir, k) if not os.path.isfile(cpath): continue mtime = os.path.getmtime(cpath) refresh = False pfile = os.path.join(self.scraper_cache, k.replace('/', '_') + '.commits.pickle') if not os.path.isfile(pfile): refresh = True else: with open(pfile, 'rb') as f: pdata = pickle.load(f) if pdata[0] == mtime: self.commits[k] = pdata[1] else: refresh = True if refresh: logging.info('refresh commit cache for %s' % k) cmd = 'cd %s; git log --follow %s' % (self.checkoutdir, k) (rc, so, se) = run_command(cmd) for line in so.split('\n'): if line.startswith('commit '): commit = { 'name': None, 'email': None, 'login': None, 'hash': line.split()[-1], 'date': None } # Author: Matt Clay <*****@*****.**> if line.startswith('Author: '): line = line.replace('Author: ', '') line = line.replace('<', '') line = line.replace('>', '') lparts = line.split() if '@' in lparts[-1]: commit['email'] = lparts[-1] commit['name'] = ' '.join(lparts[:-1]) else: pass if commit['email'] and \ 'noreply.github.com' in commit['email']: commit['login'] = commit['email'].split('@')[0] # Date: Sat Jan 28 23:28:53 2017 -0800 if line.startswith('Date:'): dstr = line.split(':', 1)[1].strip() dstr = ' '.join(dstr.split(' ')[:-1]) ds = datetime.datetime.strptime( dstr, '%a %b %d %H:%M:%S %Y') commit['date'] = ds self.commits[k].append(commit) with open(pfile, 'wb') as f: pickle.dump((mtime, self.commits[k]), f)
def create_checkout(self): """checkout ansible""" cmd = u"git clone --depth=1 --single-branch %s %s" % (self.repo, self.checkoutdir) (rc, so, se) = run_command(cmd) if rc: raise Exception(u"Fail to execute '{}: {} ({}, {})'".format(cmd, rc, so, se))
def create_checkout(self): """checkout ansible""" cmd = u"git clone --depth=1 --single-branch %s %s" % (self.repo, self.checkoutdir) (rc, so, se) = run_command(cmd) if rc: raise Exception(u"Fail to execute '{}: {} ({}, {})'".format(cmd, rc, so, se))
def get_module_commits(self): keys = self.modules.keys() keys = sorted(keys) for k in keys: self.commits[k] = [] cpath = os.path.join(self.gitrepo.checkoutdir, k) if not os.path.isfile(cpath): continue mtime = os.path.getmtime(cpath) refresh = False pfile = os.path.join(self.scraper_cache, k.replace(u'/', u'_') + u'.commits.pickle') if not os.path.isfile(pfile): refresh = True else: pickle_kwargs = {'encoding': 'bytes'} if six.PY3 else {} print(pfile) with open(pfile, 'rb') as f: pdata = pickle_load(f, **pickle_kwargs) if pdata[0] == mtime: self.commits[k] = pdata[1] else: refresh = True if refresh: logging.info(u'refresh commit cache for %s' % k) cmd = u'cd %s; git log --follow %s' % ( self.gitrepo.checkoutdir, k) (rc, so, se) = run_command(cmd) for line in to_text(so).split(u'\n'): if line.startswith(u'commit '): commit = { u'name': None, u'email': None, u'login': None, u'hash': line.split()[-1], u'date': None } # Author: Matt Clay <*****@*****.**> if line.startswith(u'Author: '): line = line.replace(u'Author: ', u'') line = line.replace(u'<', u'') line = line.replace(u'>', u'') lparts = line.split() if u'@' in lparts[-1]: commit[u'email'] = lparts[-1] commit[u'name'] = u' '.join(lparts[:-1]) else: pass if commit[u'email'] and \ u'noreply.github.com' in commit[u'email']: commit[u'login'] = commit[u'email'].split(u'@')[0] # Date: Sat Jan 28 23:28:53 2017 -0800 if line.startswith(u'Date:'): dstr = line.split(u':', 1)[1].strip() dstr = u' '.join(dstr.split(u' ')[:-1]) ds = datetime.datetime.strptime( to_text(dstr), u'%a %b %d %H:%M:%S %Y') commit[u'date'] = ds self.commits[k].append(commit) with open(pfile, 'wb') as f: pickle_dump((mtime, self.commits[k]), f)
def get_module_commits(self): keys = self.modules.keys() keys = sorted(keys) for k in keys: self.commits[k] = [] cpath = os.path.join(self.gitrepo.checkoutdir, k) if not os.path.isfile(cpath): continue mtime = os.path.getmtime(cpath) refresh = False pfile = os.path.join( self.scraper_cache, k.replace(u'/', u'_') + u'.commits.pickle' ) if not os.path.isfile(pfile): refresh = True else: pickle_kwargs = {'encoding': 'bytes'} if six.PY3 else {} print(pfile) with open(pfile, 'rb') as f: pdata = pickle_load(f, **pickle_kwargs) if pdata[0] == mtime: self.commits[k] = pdata[1] else: refresh = True if refresh: logging.info(u'refresh commit cache for %s' % k) cmd = u'cd %s; git log --follow %s' % (self.gitrepo.checkoutdir, k) (rc, so, se) = run_command(cmd) for line in to_text(so).split(u'\n'): if line.startswith(u'commit '): commit = { u'name': None, u'email': None, u'login': None, u'hash': line.split()[-1], u'date': None } # Author: Matt Clay <*****@*****.**> if line.startswith(u'Author: '): line = line.replace(u'Author: ', u'') line = line.replace(u'<', u'') line = line.replace(u'>', u'') lparts = line.split() if u'@' in lparts[-1]: commit[u'email'] = lparts[-1] commit[u'name'] = u' '.join(lparts[:-1]) else: pass if commit[u'email'] and \ u'noreply.github.com' in commit[u'email']: commit[u'login'] = commit[u'email'].split(u'@')[0] # Date: Sat Jan 28 23:28:53 2017 -0800 if line.startswith(u'Date:'): dstr = line.split(u':', 1)[1].strip() dstr = u' '.join(dstr.split(u' ')[:-1]) ds = datetime.datetime.strptime( to_text(dstr), u'%a %b %d %H:%M:%S %Y' ) commit[u'date'] = ds self.commits[k].append(commit) with open(pfile, 'wb') as f: pickle_dump((mtime, self.commits[k]), f)
def get_component_matcher(): # Make indexers MI = FakeIndexer() FI = FakeIndexer() GR = FakeGitRepo() GR.checkoutdir = tempfile.mkdtemp() if not os.path.isdir(GR.checkoutdir): os.makedirs(GR.checkoutdir) tarname = 'ansible-2017-10-24.tar.gz' tarurl = 'http://tannerjc.net/ansible/{}'.format(tarname) tarfile = 'tests/fixtures/{}'.format(tarname) tarfile = os.path.abspath(tarfile) if not os.path.isfile(tarfile): cmd = 'cd {}; wget {}'.format(os.path.dirname(tarfile), tarurl) (rc, so, se) = run_command(cmd) print(so) print(se) assert rc == 0 cmd = 'cd {} ; tar xzvf {}'.format(GR.checkoutdir, tarfile) (rc, so, se) = run_command(cmd) GR.checkoutdir = GR.checkoutdir + '/ansible' # Load the files with open('tests/fixtures/filenames/2017-10-24.json', 'rb') as f: _files = json.loads(f.read()) _files = sorted(set(_files)) #with open('tests/fixtures/botmeta/BOTMETA-2017-11-01.yml', 'rb') as f: # botmeta = f.read() botmetafile = 'tests/fixtures/botmeta/BOTMETA-2017-11-01.yml' FI.files = _files GR.files = _files #GR.module_files = [x for x in _files if x.startswith('lib/ansible/modules')] # Load the modules mfiles = [x for x in FI.files if 'lib/ansible/modules' in x] mfiles = [x for x in mfiles if x.endswith('.py') or x.endswith('.ps1')] mfiles = [x for x in mfiles if x != '__init__.py'] mnames = [] for mfile in mfiles: mname = os.path.basename(mfile) mname = mname.replace('.py', '') mname = mname.replace('.ps1', '') mnames.append(mname) MI.modules[mfile] = {'name': mname, 'repo_filename': mfile} # Init the matcher #CM = ComponentMatcher(None, FI, MI) CM = ComponentMatcher(botmetafile=botmetafile, email_cache={}, gitrepo=GR, file_indexer=FI, module_indexer=MI) return CM
def ansible_version_by_commit(self, commithash, config=None): # $ git branch --contains e620fed755a9c7e07df846b7deb32bbbf3164ac7 # * devel #$ git branch -r --contains 6d9949698bd6a5693ef64cfde845c029f0e02b91 | egrep -e 'release' -e 'stable' | head # origin/release1.5.0 # origin/release1.5.1 # origin/release1.5.2 # origin/release1.5.3 # origin/release1.5.4 # origin/release1.5.5 # origin/release1.6.0 # origin/release1.6.1 # origin/release1.6.10 # origin/release1.6.2 ''' # make sure the checkout cache is still valid self.update_checkout() ''' aversion = None if not self.COMMITVERSIONS: self.COMMITVERSIONS = {} if commithash in self.COMMITVERSIONS: aversion = self.COMMITVERSIONS[commithash] else: # get devel's version devel_version = self._get_devel_version() cmd = u'cd %s;' % self.checkoutdir cmd += u'git branch -r --contains %s' % commithash (rc, so, se) = run_command(cmd) lines = (x.strip() for x in to_text(so).split(u'\n')) lines = list(filter(bool, lines)) rlines = (x for x in lines if x.startswith((u'origin/release', u'origin/stable'))) rlines = (x.split(u'/')[-1] for x in rlines) rlines = (x.replace(u'release', u'') for x in rlines) rlines = [x.replace(u'stable-', u'') for x in rlines] if rc != 0: logging.error(u"rc != 0") if C.DEFAULT_BREAKPOINTS: logging.error(u'breakpoint!') import epdb; epdb.st() else: raise Exception(u'bad returncode') if len(rlines) > 0: aversion = rlines[0] else: if u'HEAD' in lines[0] or lines[0].endswith(u'/devel'): ''' cmd = 'cd %s;' % self.checkoutdir cmd += 'git branch -a | fgrep -e release -e stable | tail -n 1' (rc, so, se) = run_command(cmd) cver = so.strip() cver = cver.replace('remotes/origin/stable-', '') cver = cver.replace('remotes/upstream/stable-', '') cver = cver.replace('remotes/origin/release', '') cver = cver.replace('remotes/upstream/release', '') assert cver, "cver is null" assert cver[0].isdigit(), "cver[0] is not digit: %s" % cver aversion = cver ''' aversion = devel_version else: logging.error(u"WTF!? ...") if C.DEFAULT_BREAKPOINTS: logging.error(u'breakpoint!') import epdb; epdb.st() else: raise Exception(u'HEAD not found') self.COMMITVERSIONS[commithash] = aversion return aversion
def index_files(self): self.BOTMETA = {} self.MODULES = {} self.MODULE_NAMES = [] self.MODULE_NAMESPACE_DIRECTORIES = [] self.load_meta() for fn in self.gitrepo.module_files: if os.path.isdir(fn): continue mname = os.path.basename(fn) mname = mname.replace('.py', '').replace('.ps1', '') if mname.startswith('__'): continue mdata = { 'name': mname, 'repo_filename': fn, 'filename': fn } if fn not in self.MODULES: self.MODULES[fn] = mdata.copy() else: self.MODULES[fn].update(mdata) self.MODULE_NAMESPACE_DIRECTORIES = [os.path.dirname(x) for x in self.gitrepo.module_files] self.MODULE_NAMESPACE_DIRECTORIES = sorted(set(self.MODULE_NAMESPACE_DIRECTORIES)) # make a list of names by enumerating the files self.MODULE_NAMES = [os.path.basename(x) for x in self.gitrepo.module_files] self.MODULE_NAMES = [x for x in self.MODULE_NAMES if x.endswith('.py') or x.endswith('.ps1')] self.MODULE_NAMES = [x.replace('.ps1', '').replace('.py', '') for x in self.MODULE_NAMES] self.MODULE_NAMES = [x for x in self.MODULE_NAMES if not x.startswith('__')] self.MODULE_NAMES = sorted(set(self.MODULE_NAMES)) # make a list of names by calling ansible-doc checkoutdir = self.gitrepo.checkoutdir checkoutdir = os.path.abspath(checkoutdir) cmd = '. {}/hacking/env-setup; ansible-doc -t module -F'.format(checkoutdir) logging.debug(cmd) (rc, so, se) = run_command(cmd, cwd=checkoutdir) if rc: raise Exception("'ansible-doc' command failed (%s, %s %s)" % (rc, so, se)) lines = so.split('\n') for line in lines: parts = line.split() parts = [x.strip() for x in parts] if len(parts) != 2 or checkoutdir not in line: continue mname = parts[0] if mname not in self.MODULE_NAMES: self.MODULE_NAMES.append(mname) fpath = parts[1] fpath = fpath.replace(checkoutdir + '/', '') if fpath not in self.MODULES: self.MODULES[fpath] = { 'name': mname, 'repo_filename': fpath, 'filename': fpath } _modules = self.MODULES.copy() for k, v in _modules.items(): kparts = os.path.splitext(k) if kparts[-1] == '.ps1': _k = kparts[0] + '.py' checkpath = os.path.join(checkoutdir, _k) if not os.path.isfile(checkpath): _k = k else: _k = k ME = ModuleExtractor(os.path.join(checkoutdir, _k), email_cache=self.email_cache) if k not in self.BOTMETA['files']: self.BOTMETA['files'][k] = { 'deprecated': os.path.basename(k).startswith('_'), 'labels': os.path.dirname(k).split('/'), 'authors': ME.authors, 'maintainers': ME.authors, 'maintainers_keys': [], 'notified': ME.authors, 'ignored': [], 'support': ME.metadata.get('supported_by', 'community'), 'metadata': ME.metadata.copy() } else: bmeta = self.BOTMETA['files'][k].copy() bmeta['metadata'] = ME.metadata.copy() if 'notified' not in bmeta: bmeta['notified'] = [] if 'maintainers' not in bmeta: bmeta['maintainers'] = [] if not bmeta.get('supported_by'): bmeta['supported_by'] = ME.metadata.get('supported_by', 'community') if 'authors' not in bmeta: bmeta['authors'] = [] for x in ME.authors: if x not in bmeta['authors']: bmeta['authors'].append(x) if x not in bmeta['maintainers']: bmeta['maintainers'].append(x) if x not in bmeta['notified']: bmeta['notified'].append(x) if not bmeta.get('labels'): bmeta['labels'] = os.path.dirname(k).split('/') bmeta['deprecated'] = os.path.basename(k).startswith('_') self.BOTMETA['files'][k].update(bmeta) # clean out the ignorees if 'ignored' in self.BOTMETA['files'][k]: for ignoree in self.BOTMETA['files'][k]['ignored']: for thiskey in ['maintainers', 'notified']: while ignoree in self.BOTMETA['files'][k][thiskey]: self.BOTMETA['files'][k][thiskey].remove(ignoree) # write back to the modules self.MODULES[k].update(self.BOTMETA['files'][k])
def ansible_version_by_commit(self, commithash): # $ git branch --contains e620fed755a9c7e07df846b7deb32bbbf3164ac7 # * devel #$ git branch -r --contains 6d9949698bd6a5693ef64cfde845c029f0e02b91 | egrep -e 'release' -e 'stable' | head # origin/release1.5.0 # origin/release1.5.1 # origin/release1.5.2 # origin/release1.5.3 # origin/release1.5.4 # origin/release1.5.5 # origin/release1.6.0 # origin/release1.6.1 # origin/release1.6.10 # origin/release1.6.2 ''' # make sure the checkout cache is still valid self.update_checkout() ''' aversion = None if not self.COMMITVERSIONS: self.COMMITVERSIONS = {} if commithash in self.COMMITVERSIONS: aversion = self.COMMITVERSIONS[commithash] else: # get devel's version devel_version = self._get_devel_version() cmd = u'cd %s;' % self.checkoutdir cmd += u'git branch -r --contains %s' % commithash (rc, so, se) = run_command(cmd) lines = (x.strip() for x in to_text(so).split(u'\n')) lines = list(filter(bool, lines)) rlines = (x for x in lines if x.startswith((u'origin/release', u'origin/stable'))) rlines = (x.split(u'/')[-1] for x in rlines) rlines = (x.replace(u'release', u'') for x in rlines) rlines = [x.replace(u'stable-', u'') for x in rlines] if rc != 0: logging.error(u"rc != 0") if C.DEFAULT_BREAKPOINTS: logging.error(u'breakpoint!') import epdb epdb.st() else: raise Exception(u'bad returncode') if len(rlines) > 0: aversion = rlines[0] else: if u'HEAD' in lines[0] or lines[0].endswith(u'/devel'): ''' cmd = 'cd %s;' % self.checkoutdir cmd += 'git branch -a | fgrep -e release -e stable | tail -n 1' (rc, so, se) = run_command(cmd) cver = so.strip() cver = cver.replace('remotes/origin/stable-', '') cver = cver.replace('remotes/upstream/stable-', '') cver = cver.replace('remotes/origin/release', '') cver = cver.replace('remotes/upstream/release', '') assert cver, "cver is null" assert cver[0].isdigit(), "cver[0] is not digit: %s" % cver aversion = cver ''' aversion = devel_version else: logging.error(u"WTF!? ...") if C.DEFAULT_BREAKPOINTS: logging.error(u'breakpoint!') import epdb epdb.st() else: raise Exception(u'HEAD not found') self.COMMITVERSIONS[commithash] = aversion return aversion
def create_checkout(self): """checkout ansible""" cmd = "git clone http://github.com/ansible/ansible --recursive %s" \ % self.checkoutdir (rc, so, se) = run_command(cmd) print(str(so) + str(se))