def last_commit_for_file(self, filepath): # git log --pretty=format:'%H' -1 # lib/ansible/modules/cloud/amazon/ec2_metric_alarm.py cmd = 'cd %s; git log --pretty=format:\'%%H\' -1 %s' % \ (self.checkoutdir, filepath) (rc, so, se) = run_command(cmd) #import epdb; epdb.st() return so.strip()
def update_checkout(self): """rebase + pull + update the checkout""" print('# updating checkout for module indexer') #success = True cmd = "cd %s ; git pull --rebase" % self.checkoutdir (rc, so, se) = run_command(cmd) print str(so) + str(se) # If rebase failed, recreate the checkout if rc != 0: self.create_checkout() return cmd = "cd %s ; git submodule update --recursive" % self.checkoutdir (rc, so, se) = run_command(cmd) print str(so) + str(se) # if update fails, recreate the checkout if rc != 0: self.create_checkout()
def create_checkout(self): """checkout ansible""" print('# creating checkout for module indexer') # cleanup if os.path.isdir(self.checkoutdir): shutil.rmtree(self.checkoutdir) cmd = "git clone http://github.com/ansible/ansible --recursive %s" \ % self.checkoutdir (rc, so, se) = run_command(cmd) print str(so) + str(se)
def get_files(self): # manage the checkout if not os.path.isdir(self.checkoutdir): self.create_checkout() else: self.update_checkout() cmd = 'find %s' % self.checkoutdir (rc, so, se) = run_command(cmd) files = so.split('\n') files = [x.strip() for x in files if x.strip()] files = [x.replace(self.checkoutdir + '/', '') for x in files] files = [x for x in files if not x.startswith('.git')] self.files = files
def get_module_commits(self): for k, v in self.modules.iteritems(): self.commits[k] = [] cpath = os.path.join(self.checkoutdir, k) if not os.path.isfile(cpath): continue mtime = os.path.getmtime(cpath) refresh = False pfile = os.path.join(self.scraper_cache, k.replace('/', '_') + '.commits.pickle') if not os.path.isfile(pfile): refresh = True else: with open(pfile, 'rb') as f: pdata = pickle.load(f) if pdata[0] == mtime: self.commits[k] = pdata[1] else: refresh = True if refresh: logging.info('refresh commit cache for %s' % k) cmd = 'cd %s; git log --follow %s' % (self.checkoutdir, k) (rc, so, se) = run_command(cmd) for line in so.split('\n'): if line.startswith('commit '): commit = { 'name': None, 'email': None, 'login': None, 'hash': line.split()[-1], 'date': None } # Author: Matt Clay <*****@*****.**> if line.startswith('Author: '): line = line.replace('Author: ', '') line = line.replace('<', '') line = line.replace('>', '') lparts = line.split() if '@' in lparts[-1]: commit['email'] = lparts[-1] commit['name'] = ' '.join(lparts[:-1]) else: pass if commit['email'] and \ 'noreply.github.com' in commit['email']: commit['login'] = commit['email'].split('@')[0] # Date: Sat Jan 28 23:28:53 2017 -0800 if line.startswith('Date:'): dstr = line.split(':', 1)[1].strip() dstr = ' '.join(dstr.split(' ')[:-1]) ds = datetime.datetime.strptime( dstr, '%a %b %d %H:%M:%S %Y') commit['date'] = ds self.commits[k].append(commit) with open(pfile, 'wb') as f: pickle.dump((mtime, self.commits[k]), f)