def generate(patch): ''' This function generates ... ''' mapping = {} download_mapping() paths = [] for diff in whatthepatch.parse_patch(patch.decode('utf-8')): # Get old and new path, for files that have been renamed. path = diff.header.new_path[2:] if diff.header.new_path.startswith( 'b/') else diff.header.new_path # If the diff doesn't contain any changes, we skip it. if diff.changes is None: continue # If the file is not a source file, we skip it (as we already know # we have no coverage information for it). if not coverage_supported(path): continue paths.append(path) with sqlite3.connect('chunk_mapping.db') as conn: c = conn.cursor() for path in paths: c.execute('SELECT chunk FROM files WHERE path=?', (path.encode('utf-8'), )) mapping[path] = c.fetchall() return mapping
async def retrieve_coverage(path): # If the file is not a source file, we skip it (as we already know # we have no coverage information for it). if not coverage_supported(path): return None # Retrieve annotate data. async with aiohttp.request( 'GET', 'https://hg.mozilla.org/mozilla-central/json-annotate/{}/{}'. format(build_changeset, path)) as r: annotate_future = r.json() # Retrieve coverage data. coverage_future = coverage_service.get_file_coverage( build_changeset, path) # Use hg annotate to report lines in their correct positions and to avoid # reporting lines that have been modified by a successive patch in the same push. data = await annotate_future if 'not found in manifest' in data: # The file was removed. return None annotate = data['annotate'] coverage = await coverage_future # If we don't have coverage for this file, we skip it. if coverage is None: return None changes = [] for data in annotate: # Skip lines that were not added by this changeset or were overwritten by # another changeset. if data['node'][:len(changeset)] != changeset: continue new_line = data['lineno'] if new_line not in coverage or coverage[new_line] is None: # We have no coverage information for this line (e.g. a definition, like # a variable in a header file). covered = '?' elif coverage[new_line] > 0: covered = 'Y' else: covered = 'N' changes.append({ 'coverage': covered, 'line': data['targetline'], }) return { 'name': path, 'changes': changes, }
def parse_diff(diff): # Get old and new path, for files that have been renamed. new_path = diff.header.new_path[2:] if diff.header.new_path.startswith('b/') else diff.header.new_path # If the diff doesn't contain any changes, we skip it. if diff.changes is None: return None # If the file is not a source file, we skip it (as we already know # we have no coverage information for it). if not coverage_supported(new_path): return None # Retrieve coverage of added lines. coverage = coverage_service.get_file_coverage(build_changeset, new_path) # If we don't have coverage for this file, we skip it. if coverage is None: return None # Use hg annotate to report lines in their correct positions and to avoid # reporting lines that have been modified by a successive patch in the same push. r = requests.get('https://hg.mozilla.org/mozilla-central/json-annotate/%s/%s' % (build_changeset, new_path)) annotate = r.json()['annotate'] changes = [] for data in annotate: # Skip lines that were not added by this changeset or were overwritten by # another changeset. if data['node'][:len(changeset)] != changeset: continue new_line = data['lineno'] if new_line not in coverage or coverage[new_line] is None: # We have no coverage information for this line (e.g. a definition, like # a variable in a header file). covered = '?' elif coverage[new_line] > 0: covered = 'Y' else: covered = 'N' changes.append({ 'coverage': covered, 'line': data['targetline'], }) return { 'name': new_path, 'changes': changes, }
def generate(changeset, path): ''' This function generates a report containing the coverage information for a given file at a given revision. ''' # If the file is not a source file, we can return early (as we already know # we have no coverage information for it). if not coverage_supported(path): return {} _, build_changeset, _ = get_coverage_build(changeset) coverage = coverage_service.get_file_coverage(build_changeset, path) return coverage if coverage is not None else {}
def parse_diff(diff): # Get old and new path, for files that have been renamed. new_path = diff.header.new_path[2:] if diff.header.new_path.startswith('b/') else diff.header.new_path # If the diff doesn't contain any changes, we skip it. if diff.changes is None: return None # If the file is not a source file, we skip it (as we already know # we have no coverage information for it). if not coverage_supported(new_path): return None # Retrieve coverage of added lines. coverage = coverage_service.get_file_coverage(build_changeset, new_path) # If we don't have coverage for this file, we skip it. if coverage is None: return None changes = [] for old_line, new_line, _ in diff.changes: # Only consider added lines. if old_line is not None or new_line is None: continue if new_line not in coverage or coverage[new_line] is None: # We have no coverage information for this line (e.g. a definition, like # a variable in a header file). covered = '?' elif coverage[new_line] > 0: covered = 'Y' else: covered = 'N' changes.append({ 'coverage': covered, 'line': new_line, }) return { 'name': new_path, 'changes': changes, }
async def generate(changeset, path): ''' This function generates a report containing the coverage information for a given file at a given revision. ''' # If the file is not a source file, we can return early (as we already know # we have no coverage information for it). if not coverage_supported(path): return {} _, build_changeset, _ = await get_coverage_build(changeset) coverage = await coverage_service.get_file_coverage(build_changeset, path) if coverage is None: return {} return { 'git_build_changeset': await get_github_commit(changeset), 'build_changeset': build_changeset, 'data': coverage }