def test_bin_echo(self): # Try parsing a file without the pyelftools logic mocked out elf_file = elf.ElfFile(path=sys.executable) self.assertThat(elf_file.path, Equals(sys.executable)) # The arch attribute will be a tuple of three strings self.assertTrue(isinstance(elf_file.arch, tuple)) self.assertThat(len(elf_file.arch), Equals(3)) self.assertThat(elf_file.arch[0], StartsWith('ELFCLASS')) self.assertThat(elf_file.arch[1], StartsWith('ELFDATA')) self.assertThat(elf_file.arch[2], StartsWith('EM_')) # We expect Python to be a dynamic linked executable with an # ELF interpreter. self.assertTrue(isinstance(elf_file.interp, str)) self.assertThat(elf_file.interp, NotEquals('')) # Python is not a shared library, so has no soname self.assertThat(elf_file.soname, Equals('')) # We expect that Python will be linked to libc for lib in elf_file.needed.values(): if lib.name.startswith('libc.so'): break else: self.fail("Expected to find libc in needed library list") self.assertTrue(isinstance(lib.name, str)) for version in lib.versions: self.assertTrue(isinstance(version, str), "expected {!r} to be a string".format(version))
def test_render(self): # Given the right configuration options, the PXE configuration is # correctly rendered. params = make_kernel_parameters(purpose="install") output = render_pxe_config(kernel_params=params) # The output is always a Unicode string. self.assertThat(output, IsInstance(unicode)) # The template has rendered without error. PXELINUX configurations # typically start with a DEFAULT line. self.assertThat(output, StartsWith("DEFAULT ")) # The PXE parameters are all set according to the options. image_dir = compose_image_path( arch=params.arch, subarch=params.subarch, release=params.release, purpose=params.purpose) self.assertThat( output, MatchesAll( MatchesRegex( r'.*^\s+KERNEL %s/linux$' % re.escape(image_dir), re.MULTILINE | re.DOTALL), MatchesRegex( r'.*^\s+INITRD %s/initrd[.]gz$' % re.escape(image_dir), re.MULTILINE | re.DOTALL), MatchesRegex( r'.*^\s+APPEND .+?$', re.MULTILINE | re.DOTALL)))
def test_describe_returns_json(self): response = self.client.get(reverse('describe')) self.assertThat((response.status_code, response['Content-Type'], response.content, response.content), MatchesListwise( (Equals(httplib.OK), Equals("application/json"), StartsWith(b'{'), Contains('name'))), response)
def test_classic_confinement_patchelf_disabled(self): if os.environ.get('ADT_TEST') and self.deb_arch == 'armhf': self.skipTest("The autopkgtest armhf runners can't install snaps") project_dir = 'classic-build' # Now we set the required environment variable self.useFixture( fixtures.EnvironmentVariable('SNAPCRAFT_SETUP_CORE', '1')) self.copy_project_to_cwd(project_dir) # Create a new snapcraft.yaml snapcraft_yaml = fixture_setup.SnapcraftYaml(self.path, confinement='classic') snapcraft_yaml.update_part('hello', { 'source': '.', 'plugin': 'make', 'build-attributes': ['no-patchelf'] }) self.useFixture(snapcraft_yaml) self.run_snapcraft('prime') bin_path = os.path.join(self.prime_dir, 'bin', 'hello-classic') self.assertThat(bin_path, FileExists()) interpreter = subprocess.check_output( [self.patchelf_command, '--print-interpreter', bin_path]).decode() self.assertThat(interpreter, StartsWith('/lib'))
def test_load_config(self): """ Correctly loads existing configuration """ basedir = FilePath(self.mktemp()) create_global_configuration(basedir, "tcp:-1", FilePath("/dev/null"), "tcp:127.0.0.1:-1") options = MagicFolderApiCommand() options.parseOptions([ "--config", basedir.path, "add-snapshot", "--file", "foo", "--folder", "asdf", ]) self.assertThat( options.config, IsInstance(GlobalConfigDatabase), ) self.assertThat( options.client, IsInstance(MagicFolderClient), ) self.assertThat( options.parent, Equals(None), ) self.assertThat( str(options), StartsWith("Usage: magic-folder-api"), )
def assertRemoved(self, account_id, person_id): # The Account row still exists, but has been anonymised, leaving # only a minimal audit trail. account = getUtility(IAccountSet).get(account_id) self.assertEqual('Removed by request', account.displayname) self.assertEqual(AccountStatus.CLOSED, account.status) self.assertIn('Closed using close-account.', account.status_history) # The Person row still exists to maintain links with information # that won't be removed, such as bug comments, but has been # anonymised. person = getUtility(IPersonSet).get(person_id) self.assertThat(person.name, StartsWith('removed')) self.assertEqual('Removed by request', person.display_name) self.assertEqual(account, person.account) # The corresponding PersonSettings row has been reset to the # defaults. self.assertFalse(person.selfgenerated_bugnotifications) self.assertFalse(person.expanded_notification_footers) self.assertFalse(person.require_strong_email_authentication) # EmailAddress and OpenIdIdentifier rows have been removed. self.assertEqual([], list( getUtility(IEmailAddressSet).getByPerson(person))) self.assertEqual([], list(account.openid_identifiers))
def test_encode_multipart_data_multiple_params(self): # Sequences of parameters and files can be passed to # encode_multipart_data() so that multiple parameters/files with the # same name can be provided. params_in = [ ("one", "ABC"), ("one", "XYZ"), ("two", "DEF"), ("two", "UVW"), ] files_in = [ ("f-one", BytesIO(urandom(32))), ("f-two", BytesIO(urandom(32))), ] body, headers = encode_multipart_data(params_in, files_in) self.assertEqual("%s" % len(body), headers["Content-Length"]) self.assertThat(headers["Content-Type"], StartsWith("multipart/form-data; boundary=")) # Round-trip through Django's multipart code. params_out, files_out = (parse_headers_and_body_with_django( headers, body)) params_out_expected = MultiValueDict() for name, value in params_in: params_out_expected.appendlist(name, value) self.assertEqual(params_out_expected, params_out, ahem_django_ahem) self.assertSetEqual({"f-one", "f-two"}, set(files_out)) files_expected = {name: buf.getvalue() for name, buf in files_in} files_observed = {name: buf.read() for name, buf in files_out.items()} self.assertEqual(files_expected, files_observed, ahem_django_ahem)
def test_checksumming_tree_signed_options_tarball(self): # Specifying no options should leave us with an open tree, # confirm it is checksummed. Supply an archive signing key # which should trigger signing of the checksum file. yield self.setUpArchiveKey() self.setUpUefiKeys() self.setUpKmodKeys() self.setUpOpalKeys() self.openArchive("test", "1.0", "amd64") self.tarfile.add_file("1.0/control/options", b"tarball") self.tarfile.add_file("1.0/empty.efi", b"") self.tarfile.add_file("1.0/empty.ko", b"") self.tarfile.add_file("1.0/empty.opal", b"") self.process_emulate() sha256file = os.path.join(self.getSignedPath("test", "amd64"), "1.0", "SHA256SUMS") self.assertTrue(os.path.exists(sha256file)) self.assertThat( sha256file + '.gpg', FileContains( matcher=StartsWith('-----BEGIN PGP SIGNATURE-----\n'))) tarfilename = os.path.join(self.getSignedPath("test", "amd64"), "1.0", "signed.tar.gz") with tarfile.open(tarfilename) as tarball: self.assertThat( tarball.getnames(), MatchesAll(*[ Not(Contains(name)) for name in [ "1.0/SHA256SUMS", "1.0/SHA256SUMS.gpg", "1.0/signed.tar.gz", ] ]))
def test_render_pxe_config_scenarios(self): # The commissioning config uses an extra PXELINUX module to auto # select between i386 and amd64. get_ephemeral_name = self.patch(kernel_opts, "get_ephemeral_name") get_ephemeral_name.return_value = factory.make_name("ephemeral") options = { "kernel_params": make_kernel_parameters(purpose=self.purpose), } output = render_pxe_config(**options) config = parse_pxe_config(output) # The default section is defined. default_section_label = config.header["DEFAULT"] self.assertThat(config, Contains(default_section_label)) default_section = config[default_section_label] # The default section uses the ifcpu64 module, branching to the "i386" # or "amd64" labels accordingly. self.assertEqual("ifcpu64.c32", default_section["KERNEL"]) self.assertEqual( ["amd64", "--", "i386"], default_section["APPEND"].split()) # Both "i386" and "amd64" sections exist. self.assertThat(config, ContainsAll(("i386", "amd64"))) # Each section defines KERNEL, INITRD, and APPEND settings. The # KERNEL and INITRD ones contain paths referring to their # architectures. for section_label in ("i386", "amd64"): section = config[section_label] self.assertThat( section, ContainsAll(("KERNEL", "INITRD", "APPEND"))) contains_arch_path = StartsWith("%s/" % section_label) self.assertThat(section["KERNEL"], contains_arch_path) self.assertThat(section["INITRD"], contains_arch_path) self.assertIn("APPEND", section)
def test_requestProxyToken(self): branch = self.factory.makeBranch() job = self.makeJob(branch=branch) yield job.extraBuildArgs() self.assertThat( self.proxy_api.tokens.requests, MatchesListwise([ MatchesDict({ "method": Equals("POST"), "uri": Equals( urlsplit(config.snappy.builder_proxy_auth_api_endpoint ).path), "headers": ContainsDict({ b"Authorization": MatchesListwise([ Equals(b"Basic " + base64.b64encode( b"admin-launchpad.dev:admin-secret")) ]), b"Content-Type": MatchesListwise([ Equals(b"application/json; charset=UTF-8"), ]), }), "content": AfterPreprocessing( json.loads, MatchesDict({ "username": StartsWith(job.build.build_cookie + "-"), })), }), ]))
def match(self, request): mismatch = MatchesStructure(url=self.url, **self.kwargs).match(request) if mismatch is not None: return mismatch if self.auth is not None: mismatch = Contains("Authorization").match(request.headers) if mismatch is not None: return mismatch auth_value = request.headers["Authorization"] auth_scheme, auth_params_matcher = self.auth mismatch = StartsWith(auth_scheme + " ").match(auth_value) if mismatch is not None: return mismatch mismatch = auth_params_matcher.match( parse_dict_header(auth_value[len(auth_scheme + " "):])) if mismatch is not None: return mismatch if self.json_data is not None: mismatch = Equals(self.json_data).match(json.loads(request.body)) if mismatch is not None: return mismatch if self.form_data is not None: if hasattr(request.body, "read"): body = request.body.read() else: body = request.body fs = FieldStorage( fp=io.BytesIO(body), environ={"REQUEST_METHOD": request.method}, headers=request.headers) mismatch = MatchesDict(self.form_data).match(fs) if mismatch is not None: return mismatch
def test_get_reader(self): # Given the right configuration options, the UEFI configuration is # correctly rendered. method = UEFIAMD64BootMethod() params = make_kernel_parameters(purpose="xinstall") output = method.get_reader(backend=None, kernel_params=params) # The output is a BytesReader. self.assertThat(output, IsInstance(BytesReader)) output = output.read(10000).decode("utf-8") # The template has rendered without error. UEFI configurations # typically start with a DEFAULT line. self.assertThat(output, StartsWith("set default=\"0\"")) # The UEFI parameters are all set according to the options. image_dir = compose_image_path(osystem=params.osystem, arch=params.arch, subarch=params.subarch, release=params.release, label=params.label) self.assertThat( output, MatchesAll( MatchesRegex( r".*\s+lin.*cc:\\{\'datasource_list\':" r" \[\'MAAS\'\]\\}end_cc.*", re.MULTILINE | re.DOTALL), MatchesRegex( r'.*^\s+linux %s/%s .+?$' % (re.escape(image_dir), params.kernel), re.MULTILINE | re.DOTALL), MatchesRegex( r'.*^\s+initrd %s/%s$' % (re.escape(image_dir), params.initrd), re.MULTILINE | re.DOTALL)))
def test_error(self): soname_cache = elf.SonameCache() raised = self.assertRaises( EnvironmentError, soname_cache.__setitem__, self.key, "/soname.so" ) self.assertThat(str(raised), StartsWith(self.partial_message))
def test_renderRealRequest(self): """ The request managed by L{WebSocketsResource.render} doesn't contain unnecessary HTTP headers like I{Content-Type}. """ channel = DummyChannel() channel.transport = StringTransportWithDisconnection() channel.transport.protocol = channel request = Request(channel, False) headers = { b"upgrade": b"Websocket", b"connection": b"Upgrade", b"sec-websocket-key": b"secure", b"sec-websocket-version": b"13" } for key, value in headers.items(): request.requestHeaders.setRawHeaders(key, [value]) request.method = b"GET" request.clientproto = b"HTTP/1.1" result = self.resource.render(request) self.assertEqual(NOT_DONE_YET, result) self.assertEqual( [(b"Connection", [b"Upgrade"]), (b"Sec-Websocket-Accept", [b"oYBv54i42V5dw6KnZqOFroecUTc="]), (b"Upgrade", [b"WebSocket"])], sorted(request.responseHeaders.getAllRawHeaders())) self.assertThat( channel.transport.value(), StartsWith(b"HTTP/1.1 101 Switching Protocols\r\n" b"Transfer-Encoding: chunked\r\n")) self.assertEqual(101, request.code) self.assertIdentical(None, request.transport)
def test_setup(self): """ The fixture passes port and dbpath as extra arguments, and configure the output format to match mongodb's one. """ self.reactor.process.data = b"\n".join(OUT) client = [] class FakeMongoClient(object): def __init__(self, endpoint): client.append(endpoint) def close(self): client.append("close") self.patch(self.fixture, "allocatePort", lambda: 666) self.patch(pymongo, "MongoClient", FakeMongoClient) self.fixture.setUp() executable, arg1, arg2, arg3 = self.reactor.process.args self.assertEqual(b"mongod", executable) self.assertEqual(b"--port=666", arg1) self.assertEqual(["mongodb://localhost:666"], client) self.assertThat(arg2, StartsWith(b"--dbpath=")) self.assertThat(arg2.split(b"=")[1], DirExists()) self.assertEqual(b"--nojournal", arg3) self.assertIn("waiting for connections on port 666", self.logger.output.split("\n")) self.fixture.cleanUp() self.assertEqual(["mongodb://localhost:666", "close"], client)
def test_get_reader_install(self): # Given the right configuration options, the PXE configuration is # correctly rendered. method = PXEBootMethod() params = make_kernel_parameters(self, purpose="xinstall") fs_host = 'http://%s:5248/images' % (convert_host_to_uri_str( params.fs_host)) output = method.get_reader(backend=None, kernel_params=params) # The output is a BytesReader. self.assertThat(output, IsInstance(BytesReader)) output = output.read(10000).decode("utf-8") # The template has rendered without error. PXELINUX configurations # typically start with a DEFAULT line. self.assertThat(output, StartsWith("DEFAULT ")) # The PXE parameters are all set according to the options. image_dir = compose_image_path(osystem=params.osystem, arch=params.arch, subarch=params.subarch, release=params.release, label=params.label) self.assertThat( output, MatchesAll( MatchesRegex( r'.*^\s+KERNEL %s/%s/%s$' % (re.escape(fs_host), re.escape(image_dir), params.kernel), re.MULTILINE | re.DOTALL), MatchesRegex( r'.*^\s+INITRD %s/%s/%s$' % (re.escape(fs_host), re.escape(image_dir), params.initrd), re.MULTILINE | re.DOTALL), MatchesRegex(r'.*^\s+APPEND .+?$', re.MULTILINE | re.DOTALL)))
def test_get_reader_scenarios(self): method = PXEBootMethod() get_ephemeral_name = self.patch(kernel_opts, "get_ephemeral_name") get_ephemeral_name.return_value = factory.make_name("ephemeral") osystem = factory.make_name('osystem') arch = factory.make_name('arch') subarch = factory.make_name('subarch') options = { "backend": None, "kernel_params": make_kernel_parameters(testcase=self, osystem=osystem, subarch=subarch, arch=arch, purpose=self.purpose), } fs_host = 'http://%s:5248/images' % (convert_host_to_uri_str( options['kernel_params'].fs_host)) output = method.get_reader(**options).read(10000).decode("utf-8") config = parse_pxe_config(output) # The default section is defined. default_section_label = config.header["DEFAULT"] self.assertThat(config, Contains(default_section_label)) default_section = dict(config[default_section_label]) contains_arch_path = StartsWith("%s/%s/%s/%s" % (fs_host, osystem, arch, subarch)) self.assertThat(default_section["KERNEL"], contains_arch_path) self.assertThat(default_section["INITRD"], contains_arch_path) self.assertEqual("2", default_section["IPAPPEND"])
def test_encode_multipart_data_multiple_params(self): # Sequences of parameters and files passed to # encode_multipart_data() permit use of the same name for # multiple parameters and/or files. See `make_payloads` to # understand how it processes different types of parameter # values. params_in = [ ("one", "ABC"), ("one", "XYZ"), ("two", ["DEF", "UVW"]), ] files_in = [ ("f-one", BytesIO(b"f1")), ("f-two", open(self.make_file(contents=b"f2"), "rb")), ("f-three", lambda: open(self.make_file(contents=b"f3"), "rb")), ] body, headers = encode_multipart_data(params_in, files_in) self.assertEqual("%s" % len(body), headers["Content-Length"]) self.assertThat(headers["Content-Type"], StartsWith("multipart/form-data; boundary=")) # Round-trip through Django's multipart code. params_out, files_out = (parse_headers_and_body_with_django( headers, body)) params_out_expected = MultiValueDict() params_out_expected.appendlist("one", "ABC") params_out_expected.appendlist("one", "XYZ") params_out_expected.appendlist("two", "DEF") params_out_expected.appendlist("two", "UVW") self.assertEqual(params_out_expected, params_out, ahem_django_ahem) files_expected = {"f-one": b"f1", "f-two": b"f2", "f-three": b"f3"} files_observed = {name: buf.read() for name, buf in files_out.items()} self.assertEqual(files_expected, files_observed, ahem_django_ahem)
def test_classic_confinement_with_existing_rpath(self): if os.environ.get('ADT_TEST') and self.deb_arch == 'armhf': self.skipTest("The autopkgtest armhf runners can't install snaps") project_dir = 'classic-build-existing-rpath' # The first run should fail as the environment variable is not # set but we can only test this on clean systems. if not os.path.exists(os.path.join( os.path.sep, 'snap', 'core', 'current')): try: self.run_snapcraft(['prime'], project_dir) except subprocess.CalledProcessError: pass else: self.fail( 'This should fail as SNAPCRAFT_SETUP_CORE is not set') # Now we set the required environment variable self.useFixture(fixtures.EnvironmentVariable( 'SNAPCRAFT_SETUP_CORE', '1')) self.run_snapcraft(['prime'], project_dir) bin_path = os.path.join(self.prime_dir, 'bin', 'hello-classic') self.assertThat(bin_path, FileExists()) rpath = subprocess.check_output([ self.patchelf_command, '--print-rpath', bin_path]).decode().strip() expected_rpath = '$ORIGIN/../fake-lib:/snap/core/current/' self.assertThat(rpath, StartsWith(expected_rpath))
def test_classic_confinement_patchelf_disabled(self): if os.environ.get("ADT_TEST") and self.deb_arch == "armhf": self.skipTest("The autopkgtest armhf runners can't install snaps") project_dir = "classic-build" # Now we set the required environment variable self.useFixture( fixtures.EnvironmentVariable("SNAPCRAFT_SETUP_CORE", "1")) self.copy_project_to_cwd(project_dir) # Create a new snapcraft.yaml snapcraft_yaml = fixture_setup.SnapcraftYaml(self.path, confinement="classic") snapcraft_yaml.update_part( "hello", { "source": ".", "plugin": "make", "build-attributes": ["no-patchelf"] }, ) self.useFixture(snapcraft_yaml) self.run_snapcraft("prime") bin_path = os.path.join(self.prime_dir, "bin", "hello-classic") self.assertThat(bin_path, FileExists()) interpreter = subprocess.check_output( [self.patchelf_command, "--print-interpreter", bin_path]).decode() self.assertThat(interpreter, StartsWith("/lib"))
def test_openid_adapter_openid_urls_obey_settings(self): self.set_launchpad_section_setings( openid_provider_root='https://some.new.provider.com', ) account = self.factory.makeAccount() i = IOpenIDPersistentIdentity(account) self.assertThat(i.openid_identity_url, StartsWith('https://some.new.provider.com'))
def test_classic_confinement_with_existing_rpath(self): if os.environ.get("ADT_TEST") and self.deb_arch == "armhf": self.skipTest("The autopkgtest armhf runners can't install snaps") project_dir = "classic-build-existing-rpath" # The first run should fail as the environment variable is not # set but we can only test this on clean systems. if not os.path.exists( os.path.join(os.path.sep, "snap", "core", "current")): try: self.run_snapcraft(["prime"], project_dir) except subprocess.CalledProcessError: pass else: self.fail( "This should fail as SNAPCRAFT_SETUP_CORE is not set") # Now we set the required environment variable self.useFixture( fixtures.EnvironmentVariable("SNAPCRAFT_SETUP_CORE", "1")) self.run_snapcraft(["prime"], project_dir) bin_path = os.path.join(self.prime_dir, "bin", "hello-classic") self.assertThat(bin_path, FileExists()) rpath = (subprocess.check_output( [self.patchelf_command, "--print-rpath", bin_path]).decode().strip()) expected_rpath = "$ORIGIN/../fake-lib:/snap/core/current/" self.assertThat(rpath, StartsWith(expected_rpath))
def test_handler_uris_are_absolute(self): params = self.make_params() server = params["SERVER_NAME"] # Without this, the test wouldn't be able to detect accidental # duplication of the script_name portion of the URL path: # /MAAS/MAAS/api/... self.patch_script_prefix(self.script_name) description = self.get_description(params) expected_uri = AfterPreprocessing( urlparse, MatchesStructure( scheme=Equals(self.scheme), hostname=Equals(server), # The path is always the script name followed by "api/" # because all API calls are within the "api" tree. path=StartsWith(self.script_name + "/api/"), ), ) expected_handler = MatchesAny( Is(None), AfterPreprocessing(itemgetter("uri"), expected_uri)) expected_resource = MatchesAll( AfterPreprocessing(itemgetter("anon"), expected_handler), AfterPreprocessing(itemgetter("auth"), expected_handler), ) resources = description["resources"] self.assertNotEqual([], resources) self.assertThat(resources, AllMatch(expected_resource))
def test_wadl(self): preexisting_wadl_cache = WebServiceApplication.cached_wadl.copy() config = getUtility(IWebServiceConfiguration) for version in config.active_versions: wadl = generate_wadl(version) self.assertThat(wadl[:40], StartsWith('<?xml ')) WebServiceApplication.cached_wadl = preexisting_wadl_cache
def test_uses_given_url(self): url = make_url('region') self.patch(start_cluster_controller, 'start_up') self.prepare_success_response() start_cluster_controller.run(make_args(server_url=url)) (args, kwargs) = MAASDispatcher.dispatch_query.call_args self.assertThat(args[0], StartsWith(url + 'api/1.0/nodegroups/'))
def test_action_type(self): """ Action types include their type name. """ message = next(tasks_from_iterable([action_task])).root().start_message self.assertThat( message_name(colors, no_formatting, message), StartsWith(colors.parent(message.contents.action_type)))
def test_message_type(self): """ Message types include their type name. """ message = WrittenMessage.from_dict(message_task) self.assertThat( message_name(colors, no_formatting, message), StartsWith(colors.parent(message.contents.message_type)))
def test_canonical_url(self): owner = self.factory.makePerson(name="person") snap = self.factory.makeSnap( registrant=owner, owner=owner, name="snap") build = self.factory.makeSnapBuild(requester=owner, snap=snap) self.assertThat( canonical_url(build), StartsWith("http://launchpad.dev/~person/+snap/snap/+build/"))
def test_canonical_url(self): owner = self.factory.makePerson(name='ppa-owner') ppa = self.factory.makeArchive(owner=owner, name='ppa') build = self.factory.makeSourcePackageRecipeBuild(archive=ppa) self.assertThat( canonical_url(build), StartsWith('http://launchpad.dev/~ppa-owner/+archive/ubuntu/ppa/' '+recipebuild/'))
def test_compose_enlistment_preseed_has_header(self): rack_controller = factory.make_RackController() url = factory.make_simple_http_url() request = make_HttpRequest() preseed = compose_enlistment_preseed(request, rack_controller, { 'metadata_enlist_url': url, 'syslog_host_port': url, }) self.assertThat(preseed, StartsWith("#cloud-config\n"))
def test_match(self): matcher = StartsWith("bar") self.assertIs(None, matcher.match("barf"))
def test_mismatch_sets_expected(self): matcher = StartsWith("bar") mismatch = matcher.match("foo") self.assertEqual("bar", mismatch.expected)
def test_mismatch_sets_matchee(self): matcher = StartsWith("bar") mismatch = matcher.match("foo") self.assertEqual("foo", mismatch.matchee)
def test_mismatch_returns_does_not_start_with(self): matcher = StartsWith("bar") self.assertIsInstance(matcher.match("foo"), DoesNotStartWith)