def report(self, morfs): """Generate an HTML report for `morfs`. `morfs` is a list of modules or file names. """ assert self.config.html_dir, "must give a directory for html reporting" # Read the status data. self.status.read(self.config.html_dir) # Check that this run used the same settings as the last run. m = Hasher() m.update(self.config) these_settings = m.hexdigest() if self.status.settings_hash() != these_settings: self.status.reset() self.status.set_settings_hash(these_settings) # The user may have extra CSS they want copied. if self.config.extra_css: self.extra_css = os.path.basename(self.config.extra_css) # Process all the files. self.report_files(self.html_file, morfs, self.config.html_dir) if not self.files: raise CoverageException("No data to report.") # Write the index file. self.index_file() self.make_local_static_report_files() return self.totals.n_statements and self.totals.pc_covered
def report(self, morfs): """Generate an HTML report for `morfs`. `morfs` is a list of modules or filenames. """ assert self.config.html_dir, "must give a directory for html reporting" # Read the status data. self.status.read(self.config.html_dir) # Check that this run used the same settings as the last run. m = Hasher() m.update(self.config) these_settings = m.hexdigest() if self.status.settings_hash() != these_settings: self.status.reset() self.status.set_settings_hash(these_settings) # The user may have extra CSS they want copied. if self.config.extra_css: self.extra_css = os.path.basename(self.config.extra_css) # Process all the files. self.report_files(self.html_file, morfs, self.config.html_dir) if not self.files: raise CoverageException("No data to report.") # Write the index file. self.index_file() self.make_local_static_report_files() return self.totals.pc_covered
def report(self, morfs, config=None): """Generate an HTML report for `morfs`. `morfs` is a list of modules or filenames. `config` is a CoverageConfig instance. """ assert config.html_dir, "must provide a directory for html reporting" # Read the status data. self.status.read(config.html_dir) # Check that this run used the same settings as the last run. m = Hasher() m.update(config) these_settings = m.digest() if self.status.settings_hash() != these_settings: self.status.reset() self.status.set_settings_hash(these_settings) # Process all the files. self.report_files(self.html_file, morfs, config, config.html_dir) if not self.files: raise CoverageException("No data to report.") # Write the index file. self.index_file() self.make_local_static_report_files()
def report(self, morfs, config=None): """Generate an HTML report for `morfs`. `morfs` is a list of modules or filenames. `config` is a CoverageConfig instance. """ assert config.html_dir, "must provide a directory for html reporting" # Read the status data. self.status.read(config.html_dir) # Check that this run used the same settings as the last run. m = Hasher() m.update(config) these_settings = m.digest() if self.status.settings_hash() != these_settings: self.status.reset() self.status.set_settings_hash(these_settings) # Process all the files. self.report_files(self.html_file, morfs, config, config.html_dir) if not self.files: raise CoverageException("No data to report.") # Write the index file. self.index_file() # Create the once-per-directory files. for static in self.STATIC_FILES: shutil.copyfile(data_filename("htmlfiles/" + static), os.path.join(self.directory, static))
def report(self, morfs, config=None): """Generate an HTML report for `morfs`. `morfs` is a list of modules or filenames. `config` is a CoverageConfig instance. """ assert config.html_dir, "must provide a directory for html reporting" # Read the status data. self.status.read(config.html_dir) # Check that this run used the same settings as the last run. m = Hasher() m.update(config) these_settings = m.digest() if self.status.settings_hash() != these_settings: self.status.reset() self.status.set_settings_hash(these_settings) # Process all the files. self.report_files(self.html_file, morfs, config, config.html_dir) if not self.files: raise CoverageException("No data to report.") # Write the index file. self.index_file() # Create the once-per-directory files. for static in self.STATIC_FILES: shutil.copyfile( data_filename("htmlfiles/" + static), os.path.join(self.directory, static) )
def check_global_data(self, *data): """Check the global data that can affect incremental reporting.""" m = Hasher() for d in data: m.update(d) these_globals = m.hexdigest() if self.globals != these_globals: self.reset() self.globals = these_globals
def report(self, morfs): self.status.read(self.config.html_dir) m = Hasher() m.update(self.config) these_settings = m.digest() if self.status.settings_hash() != these_settings: self.status.reset() self.status.set_settings_hash(these_settings) if self.config.extra_css: self.extra_css = os.path.basename(self.config.extra_css) self.report_files(self.html_file, morfs, self.config.html_dir) if not self.files: raise CoverageException('No data to report.') self.index_file() self.make_local_static_report_files() return self.totals.pc_covered
def can_skip_file(self, data, fr, rootname): """Can we skip reporting this file? `data` is a CoverageData object, `fr` is a `FileReporter`, and `rootname` is the name being used for the file. """ m = Hasher() m.update(fr.source().encode('utf-8')) add_data_to_hash(data, fr.filename, m) this_hash = m.hexdigest() that_hash = self.file_hash(rootname) if this_hash == that_hash: # Nothing has changed to require the file to be reported again. return True else: self.set_file_hash(rootname, this_hash) return False
def file_hash(self, source, fr): """Compute a hash that changes if the file needs to be re-reported.""" m = Hasher() m.update(source) self.coverage.data.add_to_hash(fr.filename, m) return m.hexdigest()
def test_unicode_hashing(self): h1 = Hasher() h1.update(u"Hello, world! \N{SNOWMAN}") h2 = Hasher() h2.update(u"Goodbye!") self.assertNotEqual(h1.hexdigest(), h2.hexdigest())
def test_dict_hashing(self): h1 = Hasher() h1.update({'a': 17, 'b': 23}) h2 = Hasher() h2.update({'b': 23, 'a': 17}) self.assertEqual(h1.hexdigest(), h2.hexdigest())
def test_dict_collision(self): h1 = Hasher() h1.update({'a': 17, 'b': {'c': 1, 'd': 2}}) h2 = Hasher() h2.update({'a': 17, 'b': {'c': 1}, 'd': 2}) self.assertNotEqual(h1.hexdigest(), h2.hexdigest())
def test_bytes_hashing(self): h1 = Hasher() h1.update(b"Hello, world!") h2 = Hasher() h2.update(b"Goodbye!") self.assertNotEqual(h1.hexdigest(), h2.hexdigest())
def test_dict_collision(self): h1 = Hasher() h1.update({'a': 17, 'b': {'c': 1, 'd': 2}}) h2 = Hasher() h2.update({'a': 17, 'b': {'c': 1}, 'd': 2}) assert h1.hexdigest() != h2.hexdigest()
def file_hash(self, source, cu): m = Hasher() m.update(source) self.coverage.data.add_to_hash(cu.filename, m) return m.digest()
def test_dict_hashing(self): h1 = Hasher() h1.update({'a': 17, 'b': 23}) h2 = Hasher() h2.update({'b': 23, 'a': 17}) assert h1.hexdigest() == h2.hexdigest()
def test_bytes_hashing(self): h1 = Hasher() h1.update(b"Hello, world!") h2 = Hasher() h2.update(b"Goodbye!") assert h1.hexdigest() != h2.hexdigest()
def test_string_hashing(self): h1 = Hasher() h1.update("Hello, world!") h2 = Hasher() h2.update("Goodbye!") h3 = Hasher() h3.update("Hello, world!") assert h1.hexdigest() != h2.hexdigest() assert h1.hexdigest() == h3.hexdigest()
def test_string_hashing(self): h1 = Hasher() h1.update("Hello, world!") h2 = Hasher() h2.update("Goodbye!") h3 = Hasher() h3.update("Hello, world!") self.assertNotEqual(h1.hexdigest(), h2.hexdigest()) self.assertEqual(h1.hexdigest(), h3.hexdigest())
def test_unicode_hashing(self): h1 = Hasher() h1.update("Hello, world! \N{SNOWMAN}") h2 = Hasher() h2.update("Goodbye!") assert h1.hexdigest() != h2.hexdigest()