def to_python(self): if self.data is None: return None # don't parse data that is already native if isinstance(self.data, datetime.datetime): return self.data return PySO8601.parse_time(self.data).time()
def to_python(self): # don't parse data that is already native if isinstance(self.data, datetime.datetime): return self.data elif self.format is None: # parse as iso8601 return PySO8601.parse_time(self.data).time() else: return datetime.datetime.strptime(self.data, self.format).time()
def _to_python(self): # don't parse data that is already native if isinstance(self.data, datetime.datetime): return self.data elif self.format is None: # parse as iso8601 return PySO8601.parse_time(self.data).time() else: return datetime.datetime.strptime(self.data, self.format).time()
def _to_python(self): '''A :class:`datetime.datetime` object is returned.''' # don't parse data that is already native if isinstance(self.data, datetime.datetime): return self.data elif self.format is None: # parse as iso8601 return PySO8601.parse(self.data) else: return datetime.datetime.strptime(self.data, self.format)
def to_python(self): '''A :class:`datetime.datetime` object is returned.''' if self.data is None: return None # don't parse data that is already native if isinstance(self.data, datetime.datetime): return self.data elif self.format is None: # parse as iso8601 return PySO8601.parse(self.data) else: return datetime.datetime.strptime(self.data, self.format)
def create_file(args): repository_safe = "".join( [c for c in args.repository if re.match(r'\w', c)]) collaborators = get_collaborators(args) skippable = set( [collaborator['login'].lower() for collaborator in collaborators]) # Remove people that are in CONTRIBUTORS for some reason or another skippable.discard('lefticus') skippable.discard('ubsan') # Added in the thanks to section of the readme skippable.update( ['filcab', 'voxelf', 'johanengelen', 'jsheard', 'dkm', 'andrewpardoe']) # Duplicated people under different accounts skippable.add('jaredadobe') all_contributors = get_contributors(args) # People already listed somewhere else. Use set diff? contributors = [ contributor for contributor in all_contributors if contributor['login'].lower() not in skippable ] print('Found {} contributors. Skipping {} collaborators'.format( len(contributors), len(skippable))) # Create cache folder, which can be cleared at any moment cache_dir_base = 'contributorer-cache-{}'.format(repository_safe) if not os.path.isdir(cache_dir_base): os.mkdir(cache_dir_base) dprint('Cache base dir: {}'.format(cache_dir_base), args) cache_dir_commits = '{}/commits'.format(cache_dir_base) if not os.path.isdir(cache_dir_commits): os.mkdir(cache_dir_commits) dprint('Cache commits dir: {}'.format(cache_dir_commits), args) first_commits = [] for contributor in contributors: commits = {} # Where should the commits for this contributor be? # This works even if outdated because we are looking for old commits, not new contrib_file = '{}/{}-commits.json'.format(cache_dir_commits, contributor['login']) dprint('Checking commits file: {}'.format(contrib_file), args) if os.path.isfile(contrib_file): dprint('File found, using as commit source', args) with open(contrib_file, 'r') as c: commits = json.load(c) else: dprint('None found, querying to GitHub', args) # TODO: Buffer them and send only 1 request? result = get_oauth( 'https://api.github.com/repos/{}/commits'.format( args.repository), args, params={'author': contributor['login']}) if result.status_code == 200: commits = result.json() dprint('Writing results to file', args) with open(contrib_file, 'w') as c: c.write(result.text) if len(commits) > 0: first_commit = commits[-1] dprint( 'First commit for {} was in {}'.format( contributor['login'], first_commit['commit']['author']['date']), args) first_commits.append({ 'date': first_commit['commit']['author']['date'], 'name': first_commit['commit']['author']['name'] or '"{}"'.format(first_commit['author']['login']), 'url': first_commit['author']['html_url'] }) dprint('Sorting commits from oldest to newest', args) sorted_commits = sorted(first_commits, key=lambda x: PySO8601.parse(x['date'])) with open(args.output, 'w') as md: dprint('Output file: {}'.format(args.output), args) md.write( 'From oldest to newest contributor, we would like to thank:\n\n') md.writelines([ '- [{}]({})\n'.format(commit['name'], commit['url']) for commit in sorted_commits ])
def create_file(args): repository_safe = "".join([c for c in args.repository if re.match(r'\w', c)]) collaborators = get_collaborators(args) skippable = set([collaborator['login'].lower() for collaborator in collaborators]) # Remove people that are in CONTRIBUTORS for some reason or another skippable.discard('lefticus') skippable.discard('ubsan') # Added in the thanks to section of the readme skippable.update(['filcab', 'voxelf', 'johanengelen', 'jsheard', 'dkm', 'andrewpardoe']) # Duplicated people under different accounts skippable.add('jaredadobe') all_contributors = get_contributors(args) # People already listed somewhere else. Use set diff? contributors = [contributor for contributor in all_contributors if contributor['login'].lower() not in skippable] print('Found {} contributors. Skipping {} collaborators'.format(len(contributors), len(skippable))) # Create cache folder, which can be cleared at any moment cache_dir_base = 'contributorer-cache-{}'.format(repository_safe) if not os.path.isdir(cache_dir_base): os.mkdir(cache_dir_base) dprint('Cache base dir: {}'.format(cache_dir_base), args) cache_dir_commits = '{}/commits'.format(cache_dir_base) if not os.path.isdir(cache_dir_commits): os.mkdir(cache_dir_commits) dprint('Cache commits dir: {}'.format(cache_dir_commits), args) first_commits = [] for contributor in contributors: commits = {} # Where should the commits for this contributor be? # This works even if outdated because we are looking for old commits, not new contrib_file = '{}/{}-commits.json'.format(cache_dir_commits, contributor['login']) dprint('Checking commits file: {}'.format(contrib_file), args) if os.path.isfile(contrib_file): dprint('File found, using as commit source', args) with open(contrib_file, 'r') as c: commits = json.load(c) else: dprint('None found, querying to GitHub', args) # TODO: Buffer them and send only 1 request? result = get_oauth('https://api.github.com/repos/{}/commits'.format(args.repository), args, params={'author': contributor['login']}) if result.status_code == 200: commits = result.json() dprint('Writing results to file', args) with open(contrib_file, 'w') as c: c.write(result.text) if len(commits) > 0: first_commit = commits[-1] dprint( 'First commit for {} was in {}'.format(contributor['login'], first_commit['commit']['author']['date']), args ) first_commits.append({'date': first_commit['commit']['author']['date'], 'name': first_commit['commit']['author']['name'] or '"{}"'.format(first_commit['author']['login']), 'url': first_commit['author']['html_url']}) dprint('Sorting commits from oldest to newest', args) sorted_commits = sorted(first_commits, key=lambda x: PySO8601.parse(x['date'])) with open(args.output, 'w') as md: dprint('Output file: {}'.format(args.output), args) md.write('From oldest to newest contributor, we would like to thank:\n\n') md.writelines(['- [{}]({})\n'.format(commit['name'], commit['url']) for commit in sorted_commits])
# This works even if outdated because we are looking for old commits, not new contrib_file = '{}/{}-commits.json'.format(cache_dir_commits, contributor['login']) dprint('Checking commits file: {}'.format(contrib_file)) if os.path.isfile(contrib_file): dprint('File found, using as commit source') with open(contrib_file, 'r') as c: commits = json.load(c) else: dprint('None found, querying to GitHub') # TODO: Buffer them and send only 1 request? result = get_oauth('https://api.github.com/repos/{}/commits'.format(REPOSITORY), params={'author': contributor['login']}) if result.status_code == 200: commits = result.json() dprint('Writting results to file') with open(contrib_file, 'w') as c: c.write(result.text) if len(commits) > 0: first_commit = commits[-1] dprint('First commit for {} was in {}'.format(contributor['login'], first_commit['commit']['author']['date'])) first_commits.append({'date': first_commit['commit']['author']['date'], 'name': first_commit['commit']['author']['name'] or '"{}"'.format(first_commit['author']['login']), 'url': first_commit['author']['html_url']}) dprint('Sorting commits from oldest to newest') sorted_commits = sorted(first_commits, key=lambda x: PySO8601.parse(x['date'])) with open(args.output, 'w') as md: dprint('Output file: {}'.format(args.output)) md.write('From oldest to newest contributor, we would like to thank:\n\n') md.writelines(['- [{}]({})\n'.format(commit['name'], commit['url']) for commit in sorted_commits])
if result.status_code == 200: commits = result.json() dprint('Writting results to file') with open(contrib_file, 'w') as c: c.write(result.text) if len(commits) > 0: first_commit = commits[-1] dprint('First commit for {} was in {}'.format( contributor['login'], first_commit['commit']['author']['date'])) first_commits.append({ 'date': first_commit['commit']['author']['date'], 'name': first_commit['commit']['author']['name'] or '"{}"'.format(first_commit['author']['login']), 'url': first_commit['author']['html_url'] }) dprint('Sorting commits from oldest to newest') sorted_commits = sorted(first_commits, key=lambda x: PySO8601.parse(x['date'])) with open(args.output, 'w') as md: dprint('Output file: {}'.format(args.output)) md.write( 'From oldest to newest contributor, we would like to thank:\n\n') md.writelines([ '- [{}]({})\n'.format(commit['name'], commit['url']) for commit in sorted_commits ])