def commit_diff(self, commit): from klaus.utils import guess_is_binary, force_unicode if commit.parents: parent_tree = self[commit.parents[0]].tree else: parent_tree = None changes = self.object_store.tree_changes(parent_tree, commit.tree) for (oldpath, newpath), (oldmode, newmode), (oldsha, newsha) in changes: try: if newsha and guess_is_binary(self[newsha]) or oldsha and guess_is_binary(self[oldsha]): yield { "is_binary": True, "old_filename": oldpath or "/dev/null", "new_filename": newpath or "/dev/null", "chunks": None, } continue except KeyError: # newsha/oldsha are probably related to submodules. # Dulwich will handle that. pass stringio = cStringIO.StringIO() dulwich.patch.write_object_diff( stringio, self.object_store, (oldpath, oldmode, oldsha), (newpath, newmode, newsha) ) files = prepare_udiff(force_unicode(stringio.getvalue()), want_header=False) if not files: # the diff module doesn't handle deletions/additions # of empty files correctly. yield {"old_filename": oldpath or "/dev/null", "new_filename": newpath or "/dev/null", "chunks": []} else: yield files[0]
def commit_diff(self, commit): """Return the list of changes introduced by `commit`.""" from klaus.utils import guess_is_binary if commit.parents: parent_tree = self[commit.parents[0]].tree else: parent_tree = None summary = {'nfiles': 0, 'nadditions': 0, 'ndeletions': 0} file_changes = [] # the changes in detail dulwich_changes = self.object_store.tree_changes( parent_tree, commit.tree) for (oldpath, newpath), (oldmode, newmode), (oldsha, newsha) in dulwich_changes: summary['nfiles'] += 1 try: # Check for binary files -- can't show diffs for these if newsha and guess_is_binary(self[newsha]) or \ oldsha and guess_is_binary(self[oldsha]): file_changes.append({ 'is_binary': True, 'old_filename': oldpath or '/dev/null', 'new_filename': newpath or '/dev/null', 'chunks': None }) continue except KeyError: # newsha/oldsha are probably related to submodules. # Dulwich will handle that. pass bytesio = io.BytesIO() dulwich.patch.write_object_diff(bytesio, self.object_store, (oldpath, oldmode, oldsha), (newpath, newmode, newsha)) files = prepare_udiff(decode_from_git(bytesio.getvalue()), want_header=False) if not files: # the diff module doesn't handle deletions/additions # of empty files correctly. file_changes.append({ 'old_filename': oldpath or '/dev/null', 'new_filename': newpath or '/dev/null', 'chunks': [], 'additions': 0, 'deletions': 0, }) else: change = files[0] summary['nadditions'] += change['additions'] summary['ndeletions'] += change['deletions'] file_changes.append(change) return summary, file_changes
def commit_diff(self, commit): from klaus.utils import guess_is_binary, force_unicode if commit.parents: parent_tree = self[commit.parents[0]].tree else: parent_tree = None summary = {'nfiles': 0, 'nadditions': 0, 'ndeletions': 0} file_changes = [] # the changes in detail dulwich_changes = self.object_store.tree_changes(parent_tree, commit.tree) for (oldpath, newpath), (oldmode, newmode), (oldsha, newsha) in dulwich_changes: summary['nfiles'] += 1 try: # Check for binary files -- can't show diffs for these if newsha and guess_is_binary(self[newsha]) or \ oldsha and guess_is_binary(self[oldsha]): file_changes.append({ 'is_binary': True, 'old_filename': oldpath or '/dev/null', 'new_filename': newpath or '/dev/null', 'chunks': None }) continue except KeyError: # newsha/oldsha are probably related to submodules. # Dulwich will handle that. pass stringio = cStringIO.StringIO() dulwich.patch.write_object_diff(stringio, self.object_store, (oldpath, oldmode, oldsha), (newpath, newmode, newsha)) files = prepare_udiff(force_unicode(stringio.getvalue()), want_header=False) if not files: # the diff module doesn't handle deletions/additions # of empty files correctly. file_changes.append({ 'old_filename': oldpath or '/dev/null', 'new_filename': newpath or '/dev/null', 'chunks': [], 'additions': 0, 'deletions': 0, }) else: change = files[0] summary['nadditions'] += change['additions'] summary['ndeletions'] += change['deletions'] file_changes.append(change) return summary, file_changes
def commit_diff(self, commit): """Return the list of changes introduced by `commit`.""" from klaus.utils import guess_is_binary if commit.parents: parent_tree = self[commit.parents[0]].tree else: parent_tree = None summary = {'nfiles': 0, 'nadditions': 0, 'ndeletions': 0} file_changes = [] # the changes in detail dulwich_changes = self.object_store.tree_changes( parent_tree, commit.tree) for (oldpath, newpath), (oldmode, newmode), (oldsha, newsha) in dulwich_changes: summary['nfiles'] += 1 try: oldblob = self.object_store[ oldsha] if oldsha else Blob.from_string(b'') newblob = self.object_store[ newsha] if newsha else Blob.from_string(b'') except KeyError: # newsha/oldsha are probably related to submodules. # Dulwich will handle that. pass # Check for binary files -- can't show diffs for these if guess_is_binary(newblob) or \ guess_is_binary(oldblob): file_changes.append({ 'is_binary': True, 'old_filename': oldpath or '/dev/null', 'new_filename': newpath or '/dev/null', 'chunks': None }) continue additions, deletions, chunks = render_diff(oldblob.splitlines(), newblob.splitlines()) change = { 'is_binary': False, 'old_filename': oldpath or '/dev/null', 'new_filename': newpath or '/dev/null', 'chunks': chunks, 'additions': additions, 'deletions': deletions, } summary['nadditions'] += additions summary['ndeletions'] += deletions file_changes.append(change) return summary, file_changes
def commit_diff(self, commit): """Return the list of changes introduced by `commit`.""" from klaus.utils import guess_is_binary if commit.parents: parent_tree = self[commit.parents[0]].tree else: parent_tree = None summary = {'nfiles': 0, 'nadditions': 0, 'ndeletions': 0} file_changes = [] # the changes in detail dulwich_changes = self.object_store.tree_changes(parent_tree, commit.tree) for (oldpath, newpath), (oldmode, newmode), (oldsha, newsha) in dulwich_changes: summary['nfiles'] += 1 try: oldblob = self.object_store[oldsha] if oldsha else Blob.from_string(b'') newblob = self.object_store[newsha] if newsha else Blob.from_string(b'') except KeyError: # newsha/oldsha are probably related to submodules. # Dulwich will handle that. pass # Check for binary files -- can't show diffs for these if guess_is_binary(newblob) or \ guess_is_binary(oldblob): file_changes.append({ 'is_binary': True, 'old_filename': oldpath or '/dev/null', 'new_filename': newpath or '/dev/null', 'chunks': None }) continue additions, deletions, chunks = render_diff( oldblob.splitlines(), newblob.splitlines()) change = { 'is_binary': False, 'old_filename': oldpath or '/dev/null', 'new_filename': newpath or '/dev/null', 'chunks': chunks, 'additions': additions, 'deletions': deletions, } summary['nadditions'] += additions summary['ndeletions'] += deletions file_changes.append(change) return summary, file_changes
def make_context(self, *args): super(BlobView, self).make_context(*args) if guess_is_binary(self.context['blob_or_tree']): self.context.update({ 'is_markup': False, 'is_binary': True, 'is_image': False, }) if guess_is_image(self.context['filename']): self.context.update({ 'is_image': True, }) else: render_markup = 'markup' not in request.args rendered_code = pygmentize( force_unicode(self.context['blob_or_tree'].data), self.context['filename'], render_markup ) self.context.update({ 'too_large': sum(map(len, self.context['blob_or_tree'].chunked)) > 100*1024, 'is_markup': markup.can_render(self.context['filename']), 'render_markup': render_markup, 'rendered_code': rendered_code, 'is_binary': False, })
def make_template_context(self, *args): super(BaseFileView, self).make_template_context(*args) self.context.update( { "can_render": True, "is_binary": False, "too_large": False, "is_markup": False, } ) binary = guess_is_binary(self.context["blob_or_tree"]) too_large = sum(map(len, self.context["blob_or_tree"].chunked)) > 100 * 1024 if binary: self.context.update( { "can_render": False, "is_binary": True, "is_image": guess_is_image(self.context["filename"]), } ) elif too_large: self.context.update( { "can_render": False, "too_large": True, } )
def make_template_context(self, *args): super(BaseFileView, self).make_template_context(*args) self.context.update({ 'can_render': True, 'is_binary': False, 'too_large': False, 'is_markup': False, }) binary = guess_is_binary(self.context['blob_or_tree']) too_large = sum(map(len, self.context['blob_or_tree'].chunked)) > 100 * 1024 if binary: self.context.update({ 'can_render': False, 'is_binary': True, 'is_image': guess_is_image(self.context['filename']), }) elif too_large: self.context.update({ 'can_render': False, 'too_large': True, })
def get_mimetype_and_encoding(self): if utils.guess_is_binary(self['blob'].chunked): mime, encoding = mimetypes.guess_type(self['filename']) if mime is None: mime = 'appliication/octet-stream' return mime, encoding else: return 'text/plain', 'utf-8'
def commit_diff(self, commit): from klaus.utils import guess_is_binary, force_unicode if commit.parents: parent_tree = self[commit.parents[0]].tree else: parent_tree = None changes = self.object_store.tree_changes(parent_tree, commit.tree) for (oldpath, newpath), (oldmode, newmode), (oldsha, newsha) in changes: try: if newsha and guess_is_binary(self[newsha]) or \ oldsha and guess_is_binary(self[oldsha]): yield { 'is_binary': True, 'old_filename': oldpath or '/dev/null', 'new_filename': newpath or '/dev/null', 'chunks': None } continue except KeyError: # newsha/oldsha are probably related to submodules. # Dulwich will handle that. pass stringio = StringIO.StringIO() dulwich.patch.write_object_diff(stringio, self.object_store, (oldpath, oldmode, oldsha), (newpath, newmode, newsha)) files = prepare_udiff(force_unicode(stringio.getvalue()), want_header=False) if not files: # the diff module doesn't handle deletions/additions # of empty files correctly. yield { 'old_filename': oldpath or '/dev/null', 'new_filename': newpath or '/dev/null', 'chunks': [] } else: yield files[0]
def make_template_context(self, *args): super(BaseFileView, self).make_template_context(*args) self.context.update({"can_render": True, "is_binary": False, "too_large": False, "is_markup": False}) binary = guess_is_binary(self.context["blob_or_tree"]) too_large = sum(map(len, self.context["blob_or_tree"].chunked)) > 100 * 1024 if binary: self.context.update( {"can_render": False, "is_binary": True, "is_image": guess_is_image(self.context["filename"])} ) elif too_large: self.context.update({"can_render": False, "too_large": True})
def make_context(self, *args): super(BlobView, self).make_context(*args) render_markup = 'markup' not in request.args rendered_code = pygmentize(force_unicode(self.context['blob'].data), self.context['filename'], render_markup) self.context.update({ 'too_large': sum(map(len, self.context['blob'].chunked)) > 100 * 1024, 'is_markup': markup.can_render(self.context['filename']), 'render_markup': render_markup, 'rendered_code': rendered_code, 'is_binary': guess_is_binary(self.context['blob']), 'is_image': guess_is_image(self.context['filename']), })
def make_template_context(self, *args): super(BlobView, self).make_template_context(*args) if not isinstance(self.context['blob_or_tree'], Blob): raise NotFound("Not a blob") binary = guess_is_binary(self.context['blob_or_tree']) too_large = sum(map(len, self.context['blob_or_tree'].chunked)) > 100 * 1024 if binary: self.context.update({ 'is_markup': False, 'is_binary': True, 'is_image': False, }) if guess_is_image(self.context['filename']): self.context.update({ 'is_image': True, }) elif too_large: self.context.update({ 'too_large': True, 'is_markup': False, 'is_binary': False, }) else: render_markup = 'markup' not in request.args rendered_code = pygmentize( force_unicode(self.context['blob_or_tree'].data), self.context['filename'], render_markup) self.context.update({ 'too_large': False, 'is_markup': markup.can_render(self.context['filename']), 'render_markup': render_markup, 'rendered_code': rendered_code, 'is_binary': False, })
def make_context(self, *args): super(BlobView, self).make_context(*args) if self.context['blob'] == None or not hasattr(self.context['blob'], 'chunked'): raise RedirectException(url_for('history', repo=self.context['repo'].name, commit_id='master', path=self.context['path'])) render_markup = 'markup' not in request.args rendered_code = pygmentize( force_unicode(self.context['blob'].data), self.context['filename'], render_markup ) self.context.update({ 'too_large': sum(map(len, self.context['blob'].chunked)) > 100*1024, 'is_markup': markup.can_render(self.context['filename']), 'render_markup': render_markup, 'rendered_code': rendered_code, 'is_binary': guess_is_binary(self.context['blob']), 'is_image': guess_is_image(self.context['filename']), })
def get_context_data(self, **ctx): context = super(BlobView, self).get_context_data(**ctx) if not isinstance(context['blob_or_tree'], Blob): raise RepoException("Not a blob") binary = guess_is_binary(context['blob_or_tree']) too_large = sum(map(len, context['blob_or_tree'].chunked)) > 100 * 1024 if binary: context.update({ 'is_markup': False, 'is_binary': True, 'is_image': False, }) if guess_is_image(context['filename']): context.update({ 'is_image': True, }) elif too_large: context.update({ 'too_large': True, 'is_markup': False, 'is_binary': False, }) else: render_markup = 'markup' not in self.request.GET rendered_code = pygmentize( force_unicode(context['blob_or_tree'].data), context['filename'], render_markup ) context.update({ 'too_large': False, 'is_markup': markup.can_render(context['filename']), 'render_markup': render_markup, 'rendered_code': rendered_code, 'is_binary': False, }) return context
def make_template_context(self, *args): super(BlobView, self).make_template_context(*args) if not isinstance(self.context['blob_or_tree'], Blob): raise NotFound("Not a blob") binary = guess_is_binary(self.context['blob_or_tree']) too_large = sum(map(len, self.context['blob_or_tree'].chunked)) > 100*1024 if binary: self.context.update({ 'is_markup': False, 'is_binary': True, 'is_image': False, }) if guess_is_image(self.context['filename']): self.context.update({ 'is_image': True, }) elif too_large: self.context.update({ 'too_large': True, 'is_markup': False, 'is_binary': False, }) else: render_markup = 'markup' not in request.args rendered_code = pygmentize( force_unicode(self.context['blob_or_tree'].data), self.context['filename'], render_markup ) self.context.update({ 'too_large': False, 'is_markup': markup.can_render(self.context['filename']), 'render_markup': render_markup, 'rendered_code': rendered_code, 'is_binary': False, })
def make_template_context(self, *args): super(BaseFileView, self).make_template_context(*args) self.context.update({ 'can_render': True, 'is_binary': False, 'too_large': False, 'is_markup': False, }) binary = guess_is_binary(self.context['blob_or_tree']) too_large = sum(map(len, self.context['blob_or_tree'].chunked)) > 100*1024 if binary: self.context.update({ 'can_render': False, 'is_binary': True, 'is_image': guess_is_image(self.context['filename']), }) elif too_large: self.context.update({ 'can_render': False, 'too_large': True, })
def get_context_data(self, **ctx): context = super(BlobView, self).get_context_data(**ctx) if not isinstance(context['blob_or_tree'], Blob): raise RepoException("Not a blob") binary = guess_is_binary(context['blob_or_tree']) too_large = sum(map(len, context['blob_or_tree'].chunked)) > 100 * 1024 if binary: context.update({ 'is_markup': False, 'is_binary': True, 'is_image': False, }) if guess_is_image(context['filename']): context.update({ 'is_image': True, }) elif too_large: context.update({ 'too_large': True, 'is_markup': False, 'is_binary': False, }) else: render_markup = 'markup' not in self.request.GET rendered_code = pygmentize( force_unicode(context['blob_or_tree'].data), context['filename'], render_markup) context.update({ 'too_large': False, 'is_markup': markup.can_render(context['filename']), 'render_markup': render_markup, 'rendered_code': rendered_code, 'is_binary': False, }) return context
def make_template_context(self, *args): super(BlameView, self).make_template_context(*args) if not isinstance(self.context['blob_or_tree'], Blob): raise NotFound("Not a blob") binary = guess_is_binary(self.context['blob_or_tree']) too_large = sum(map(len, self.context['blob_or_tree'].chunked)) > 100*1024 if binary: self.context.update({ 'is_markup': False, 'is_binary': True, 'is_image': False, }) if guess_is_image(self.context['filename']): self.context.update({ 'is_image': True, }) elif too_large: self.context.update({ 'too_large': True, 'is_markup': False, 'is_binary': False, }) else: self.context.update({ 'too_large': False, 'is_markup': markup.can_render(self.context['filename']), 'is_binary': False, 'rendered_code': pygmentize( force_unicode(self.context['blob_or_tree'].data), self.context['filename'], render_markup=False , linenos=False), 'authors': list(self.context["repo"].blame(self.context["commit"], self.context["path"])) })