def test_read_only(self): # Read mode exects db to already exist testify.assert_raises( sqlite3dbm.dbm.error, lambda: sqlite3dbm.dbm.SqliteMap(self.path, flag='r'), ) # Create the db then re-open read-only smap = sqlite3dbm.dbm.SqliteMap(self.path, flag='c') smap = sqlite3dbm.dbm.SqliteMap(self.path, flag='r') # Check that all mutators raise exceptions mutator_raises = lambda callable_method: testify.assert_raises( sqlite3dbm.dbm.error, callable_method ) def do_setitem(): smap['foo'] = 'bar' mutator_raises(do_setitem) def do_delitem(): del smap['foo'] mutator_raises(do_delitem) mutator_raises(lambda: smap.clear()) mutator_raises(lambda: smap.pop('foo')) mutator_raises(lambda: smap.popitem()) mutator_raises(lambda: smap.update({'baz': 'qux'}))
def test_getitem(self): self.smap['jugglers'] = 'awesomesauce' testify.assert_equal(self.smap['jugglers'], 'awesomesauce') testify.assert_raises( KeyError, lambda: self.smap['unicyclers'] )
def test_output(self): # check plugin output length both for hosts self.sr.send_host("test_host", 0, util.get_chrs(send_nsca.nsca.MAX_PLUGINOUTPUT_LENGTH - 1)) assert_raises(ValueError, self.sr.send_host, "test_host", 0, util.get_chrs(send_nsca.nsca.MAX_PLUGINOUTPUT_LENGTH + 1)) # and for services self.sr.send_service("test_host", "test_service", 0, util.get_chrs(send_nsca.nsca.MAX_PLUGINOUTPUT_LENGTH - 1)) assert_raises(ValueError, self.sr.send_service, "test_host", "test_service", 0, util.get_chrs(send_nsca.nsca.MAX_PLUGINOUTPUT_LENGTH + 1))
def test_failed_job(self): mr_job = MRTwoStepJob(['-r', 'emr', '-v', '-c', self.mrjob_conf_path]) mr_job.sandbox() self.add_mock_s3_data({'walrus': {}}) self.mock_emr_failures = {('j-MOCKJOBFLOW0', 0): None} with mr_job.make_runner() as runner: assert isinstance(runner, EMRJobRunner) with logger_disabled('mrjob.emr'): assert_raises(Exception, runner.run) emr_conn = botoemr.EmrConnection() job_flow_id = runner.get_emr_job_flow_id() for i in range(10): emr_conn.simulate_progress(job_flow_id) job_flow = emr_conn.describe_jobflow(job_flow_id) assert_equal(job_flow.state, 'FAILED') # job should get terminated on cleanup emr_conn = runner.make_emr_conn() job_flow_id = runner.get_emr_job_flow_id() for i in range(10): emr_conn.simulate_progress(job_flow_id) job_flow = emr_conn.describe_jobflow(job_flow_id) assert_equal(job_flow.state, 'TERMINATED')
def test_unescape(self): # cases covered by string_escape: assert_equal(counter_unescape(r'\n'), '\n') assert_equal(counter_unescape(r'\\'), '\\') # cases covered by manual unescape: assert_equal(counter_unescape(r'\.'), '.') assert_raises(ValueError, counter_unescape, '\\')
def test_get_encryption_method(self): # map from crypter id to whether or not it should succeed crypters = { 0: True, 1: True, 2: True, 3: True, 4: True, 5: False, 6: False, 7: False, 8: True, 9: False, 10: False, 14: True, 15: True, 16: True, 255: False } for crypter, success in crypters.iteritems(): stream = cStringIO.StringIO() stream.write("encryption_method = %d\n" % crypter) if success: self.sender.parse_config(stream) assert_equal(self.sender.encryption_method_i, crypter) else: assert_raises(send_nsca.nsca.ConfigParseError, self.sender.parse_config, stream)
def test_deprecated_mapper_final_positional_arg(self): def mapper(k, v): pass def reducer(k, v): pass def mapper_final(): pass stderr = StringIO() with no_handlers_for_logger(): log_to_stream('mrjob.job', stderr) step = MRJob.mr(mapper, reducer, mapper_final) # should be allowed to specify mapper_final as a positional arg, # but we log a warning assert_equal(step, MRJob.mr(mapper=mapper, reducer=reducer, mapper_final=mapper_final)) assert_in('mapper_final should be specified', stderr.getvalue()) # can't specify mapper_final as a positional and keyword arg assert_raises( TypeError, MRJob.mr, mapper, reducer, mapper_final, mapper_final=mapper_final)
def test_deprecated_mapper_final_positional_arg(self): def mapper(k, v): pass def reducer(k, v): pass def mapper_final(): pass stderr = StringIO() with no_handlers_for_logger(): log_to_stream('mrjob.job', stderr) step = MRJob.mr(mapper, reducer, mapper_final) # should be allowed to specify mapper_final as a positional arg, # but we log a warning assert_equal( step, MRJob.mr(mapper=mapper, reducer=reducer, mapper_final=mapper_final)) assert_in('mapper_final should be specified', stderr.getvalue()) # can't specify mapper_final as a positional and keyword arg assert_raises(TypeError, MRJob.mr, mapper, reducer, mapper_final, mapper_final=mapper_final)
def test_historical_data_append_arms_with_variance_invalid(self): """Test that adding arms with variance causes a ValueError.""" historical_info = copy.deepcopy( self.three_arms_with_variance_no_unsampled_arm_test_case) T.assert_raises( ValueError, historical_info.append_sample_arms, self. three_arms_with_variance_no_unsampled_arm_test_case.arms_sampled)
def test_mapper_and_reducer_as_positional_args(self): def mapper(k, v): pass def reducer(k, v): pass def combiner(k, v): pass assert_equal(MRJob.mr(mapper), MRJob.mr(mapper=mapper)) assert_equal(MRJob.mr(mapper, reducer), MRJob.mr(mapper=mapper, reducer=reducer)) assert_equal(MRJob.mr(mapper, reducer=reducer), MRJob.mr(mapper=mapper, reducer=reducer)) assert_equal( MRJob.mr(mapper, reducer, combiner=combiner), MRJob.mr(mapper=mapper, reducer=reducer, combiner=combiner)) # can't specify something as a positional and keyword arg assert_raises(TypeError, MRJob.mr, mapper, mapper=mapper) assert_raises(TypeError, MRJob.mr, mapper, reducer, reducer=reducer)
def test_mapper_and_reducer_as_positional_args(self): def mapper(k, v): pass def reducer(k, v): pass def combiner(k, v): pass assert_equal(MRJob.mr(mapper), MRJob.mr(mapper=mapper)) assert_equal(MRJob.mr(mapper, reducer), MRJob.mr(mapper=mapper, reducer=reducer)) assert_equal(MRJob.mr(mapper, reducer=reducer), MRJob.mr(mapper=mapper, reducer=reducer)) assert_equal(MRJob.mr(mapper, reducer, combiner=combiner), MRJob.mr(mapper=mapper, reducer=reducer, combiner=combiner)) # can't specify something as a positional and keyword arg assert_raises(TypeError, MRJob.mr, mapper, mapper=mapper) assert_raises(TypeError, MRJob.mr, mapper, reducer, reducer=reducer)
def test_s3_ls(self): runner = EMRJobRunner(s3_scratch_uri='s3://walrus/tmp', conf_path=False) self.add_mock_s3_data({'walrus': {'one': '', 'two': '', 'three': ''}}) assert_equal( set(runner._s3_ls('s3://walrus/')), set([ 's3://walrus/one', 's3://walrus/two', 's3://walrus/three', ])) assert_equal(set(runner._s3_ls('s3://walrus/t')), set([ 's3://walrus/two', 's3://walrus/three', ])) assert_equal(set(runner._s3_ls('s3://walrus/t/')), set([])) # if we ask for a nonexistent bucket, we should get some sort # of exception (in practice, buckets with random names will # probably be owned by other people, and we'll get some sort # of permissions error) assert_raises(Exception, set, runner._s3_ls('s3://lolcat/'))
def test_hostname(self): # check that we can send valid packets self.sr.send_host(util.get_chrs(send_nsca.nsca.MAX_HOSTNAME_LENGTH - 1), 0, 'ok') self.sr.send_host(util.get_chrs(send_nsca.nsca.MAX_HOSTNAME_LENGTH), 0, 'ok') # check that we cannot send invalid packets assert_raises(ValueError, self.sr.send_host, util.get_chrs(send_nsca.nsca.MAX_HOSTNAME_LENGTH + 1), 0, 'ok') # ascii only assert_raises(ValueError, self.sr.send_host, u"\xff\xf302", 0, 'ok')
def test_historical_data_append_arms_with_variance_invalid(self): """Test that adding arms with variance causes a ValueError.""" historical_info = copy.deepcopy(self.three_arms_with_variance_no_unsampled_arm_test_case) T.assert_raises( ValueError, historical_info.append_sample_arms, self.three_arms_with_variance_no_unsampled_arm_test_case.arms_sampled )
def test_undecodable_output_strict(self): UNENCODABLE_RAW_INPUT = StringIO('foo\n' + '\xaa\n' + 'bar\n') mr_job = MRBoringJob(args=['--mapper', '--strict-protocols']) mr_job.sandbox(stdin=UNENCODABLE_RAW_INPUT) # make sure it raises an exception assert_raises(Exception, mr_job.run_mapper)
def test_mock_configuration_context_manager(self): one = self.getters.get('one') three = self.getters.get_int('three', default=3) with testing.MockConfiguration(dict(one=7), namespace=self.namespace): assert_equal(one, 7) assert_equal(three, 3) assert_raises(errors.ConfigurationError, self.getters.get('one'))
def test_mock_configuration_context_manager(self): one = staticconf.get("one") three = staticconf.get_int("three", default=3) with testing.MockConfiguration(dict(one=7)): assert_equal(one, 7) assert_equal(three, 3) assert_raises(errors.ConfigurationError, staticconf.get("one"))
def test_sample_arm_add_arm_with_variance_invalid(self): """Test that adding arms with variance causes a ValueError. Neither of the arms can have non-None variance.""" arm = SampleArm(win=2, loss=1, total=500, variance=0.1) T.assert_raises(ValueError, arm.__add__, SampleArm(win=2, loss=1, total=500, variance=None)) arm = SampleArm(win=2, loss=1, total=500, variance=None) T.assert_raises(ValueError, arm.__add__, SampleArm(win=2, loss=1, total=500, variance=0.1))
def test_mock_configuration_context_manager(self): one = staticconf.get('one') three = staticconf.get_int('three', default=3) with testing.MockConfiguration(dict(one=7)): assert_equal(one, 7) assert_equal(three, 3) assert_raises(errors.ConfigurationError, staticconf.get('one'))
def test_percent_escaping(self): assert_equal(strftime(y(2011), '110%%'), ['110%']) # don't incorrectly grab % out of %% to do globbing assert_equal(strftime(y(2011), '%m %%m %%%m'), ['* %m %*']) # catch invalid strftime string assert_raises(ValueError, strftime, y(2011), '110%')
def test_stale_module_check(self): test_settings = copy.deepcopy(Settings) repo_path = tempfile.mkdtemp(prefix="pushmanager") submodule_path = tempfile.mkdtemp(prefix="pushmanager") self.temp_git_dirs.append(repo_path) self.temp_git_dirs.append(submodule_path) test_settings['git']['local_repo_path'] = repo_path # Create main repo GitCommand('init', repo_path, cwd=repo_path).run() # Prevent Git complaints about names GitCommand('config', 'user.email', 'test@pushmanager', cwd=repo_path).run() GitCommand('config', 'user.name', 'pushmanager tester', cwd=repo_path).run() with open(os.path.join(repo_path, "code.py"), 'w') as f: f.write('#!/usr/bin/env python\n\nprint("Hello World!")\nPrint("Goodbye!")\n') GitCommand('add', repo_path, cwd=repo_path).run() GitCommand('commit', '-a', '-m', 'Master Commit', cwd=repo_path).run() # Create repo to use as submodule GitCommand('init', submodule_path, cwd=submodule_path).run() # Prevent Git complaints about names GitCommand('config', 'user.email', 'test@pushmanager', cwd=submodule_path).run() GitCommand('config', 'user.name', 'pushmanager tester', cwd=submodule_path).run() with open(os.path.join(submodule_path, "codemodule.py"), 'w') as f: f.write('#!/usr/bin/env python\n\nprint("Hello World!")\nPrint("Goodbye!")\n') GitCommand('add', submodule_path, cwd=submodule_path).run() GitCommand('commit', '-a', '-m', 'Master Commit', cwd=submodule_path).run() # Make two incompatible branches in the submodule GitCommand('checkout', '-b', 'change_german', cwd=submodule_path).run() with open(os.path.join(submodule_path, "codemodule.py"), 'w') as f: f.write('#!/usr/bin/env python\n\nprint("Hallo Welt!")\nPrint("Goodbye!")\n') GitCommand('commit', '-a', '-m', 'verpflichten', cwd=submodule_path).run() GitCommand('checkout', 'master', cwd=submodule_path).run() GitCommand('checkout', '-b', 'change_welsh', cwd=submodule_path).run() with open(os.path.join(submodule_path, "codemodule.py"), 'w') as f: f.write('#!/usr/bin/env python\n\nprint("Helo Byd!")\nPrint("Goodbye!")\n') GitCommand('commit', '-a', '-m', 'ymrwymo', cwd=submodule_path).run() GitCommand('checkout', 'master', cwd=submodule_path).run() # Add submodule at master to main repo GitCommand('submodule', 'add', submodule_path, cwd=repo_path).run() GitCommand('commit', '-a', '-m', 'Add submodule', cwd=repo_path).run() # Create branches in main repo, have each switch submodule to different branch internal_submodule_path = os.path.join(repo_path, submodule_path.split("/")[-1:][0]) GitCommand('checkout', '-b', 'change_german', cwd=repo_path).run() GitCommand('checkout', 'change_german', cwd=internal_submodule_path).run() GitCommand('commit', '-a', '-m', 'verpflichten', cwd=repo_path).run() GitCommand('checkout', 'master', cwd=repo_path).run() GitCommand('checkout', '-b', 'change_welsh', cwd=repo_path).run() GitCommand('commit', '-a', '-m', 'ymrwymo', cwd=repo_path).run() GitCommand('checkout', 'change_welsh', cwd=internal_submodule_path).run() GitCommand('checkout', 'master', cwd=repo_path).run() T.assert_raises(GitException, pushmanager.core.git._stale_submodule_check, repo_path)
def test_stale_module_check(self): test_settings = copy.deepcopy(Settings) repo_path = tempfile.mkdtemp(prefix="pushmanager") submodule_path = tempfile.mkdtemp(prefix="pushmanager") self.temp_git_dirs.append(repo_path) self.temp_git_dirs.append(submodule_path) test_settings['git']['local_repo_path'] = repo_path # Create main repo GitCommand('init', repo_path, cwd=repo_path).run() # Prevent Git complaints about names GitCommand('config', 'user.email', 'test@pushmanager', cwd=repo_path).run() GitCommand('config', 'user.name', 'pushmanager tester', cwd=repo_path).run() with open(os.path.join(repo_path, "code.py"), 'w') as f: f.write('#!/usr/bin/env python\n\nprint("Hello World!")\nPrint("Goodbye!")\n') GitCommand('add', repo_path, cwd=repo_path).run() GitCommand('commit', '-a', '-m', 'Master Commit', cwd=repo_path).run() # Create repo to use as submodule GitCommand('init', submodule_path, cwd=submodule_path).run() # Prevent Git complaints about names GitCommand('config', 'user.email', 'test@pushmanager', cwd=submodule_path).run() GitCommand('config', 'user.name', 'pushmanager tester', cwd=submodule_path).run() with open(os.path.join(submodule_path, "codemodule.py"), 'w') as f: f.write('#!/usr/bin/env python\n\nprint("Hello World!")\nPrint("Goodbye!")\n') GitCommand('add', submodule_path, cwd=submodule_path).run() GitCommand('commit', '-a', '-m', 'Master Commit', cwd=submodule_path).run() ## Make two incompatible branches in the submodule GitCommand('checkout', '-b', 'change_german', cwd=submodule_path).run() with open(os.path.join(submodule_path, "codemodule.py"), 'w') as f: f.write('#!/usr/bin/env python\n\nprint("Hallo Welt!")\nPrint("Goodbye!")\n') GitCommand('commit', '-a', '-m', 'verpflichten', cwd=submodule_path).run() GitCommand('checkout', 'master', cwd=submodule_path).run() GitCommand('checkout', '-b', 'change_welsh', cwd=submodule_path).run() with open(os.path.join(submodule_path, "codemodule.py"), 'w') as f: f.write('#!/usr/bin/env python\n\nprint("Helo Byd!")\nPrint("Goodbye!")\n') GitCommand('commit', '-a', '-m', 'ymrwymo', cwd=submodule_path).run() GitCommand('checkout', 'master', cwd=submodule_path).run() # Add submodule at master to main repo GitCommand('submodule', 'add', submodule_path, cwd=repo_path).run() GitCommand('commit', '-a', '-m', 'Add submodule', cwd=repo_path).run() # Create branches in main repo, have each switch submodule to different branch internal_submodule_path = os.path.join(repo_path, submodule_path.split("/")[-1:][0]) GitCommand('checkout', '-b', 'change_german', cwd=repo_path).run() GitCommand('checkout', 'change_german', cwd=internal_submodule_path).run() GitCommand('commit', '-a', '-m', 'verpflichten', cwd=repo_path).run() GitCommand('checkout', 'master', cwd=repo_path).run() GitCommand('checkout', '-b', 'change_welsh', cwd=repo_path).run() GitCommand('commit', '-a', '-m', 'ymrwymo', cwd=repo_path).run() GitCommand('checkout', 'change_welsh', cwd=internal_submodule_path).run() GitCommand('checkout', 'master', cwd=repo_path).run() T.assert_raises(GitException, pushmanager.core.git._stale_submodule_check, repo_path)
def test_bad_option_types(self): mr_job = MRJob() assert_raises( OptionError, mr_job.add_passthrough_option, '--stop-words', dest='stop_words', type='set', default=None) assert_raises( OptionError, mr_job.add_passthrough_option, '--leave-a-msg', dest='leave_a_msg', action='callback', default=None)
def test_case_sensitive(self): assert_raises(Exception, EMRJobRunner, conf_path=False, aws_region='eu') assert_raises(Exception, EMRJobRunner, conf_path=False, aws_region='US-WEST-1')
def test_undecodable_input_strict(self): BAD_JSON_INPUT = StringIO('BAD\tJSON\n' + '"foo"\t"bar"\n' + '"too"\t"many"\t"tabs"\n' + '"notabs"\n') mr_job = MRBoringJob(args=['--reducer', '--strict-protocols']) mr_job.sandbox(stdin=BAD_JSON_INPUT) # make sure it raises an exception assert_raises(Exception, mr_job.run_reducer)
def test_no_result_fails(self): mock_getaddrinfo = mock.Mock(return_value=[]) # use a non-standard port so we can ensure that the calling worked # right test_port = 3770 with mock.patch('socket.getaddrinfo', mock_getaddrinfo): assert_raises(socket.error, self.sender._sock_connect, 'test_host', test_port) assert_raises(socket.error, self.sender.connect) mock_getaddrinfo.assert_any_call('test_host', test_port, socket.AF_UNSPEC, socket.SOCK_STREAM, 0, 0) mock_getaddrinfo.assert_any_call('test_host', DEFAULT_PORT, socket.AF_UNSPEC, socket.SOCK_STREAM, 0, 0)
def test_mock_configuration(self): two = staticconf.get_string('two') stars = staticconf.get_bool('stars') mock_config = testing.MockConfiguration(dict(two=2, stars=False)) mock_config.setup() assert_equal(two, '2') assert not stars mock_config.teardown() assert_raises(errors.ConfigurationError, staticconf.get('two'))
def test_port_range_list(self): assert_equal(parse_port_range_list('1234'), [1234]) assert_equal(parse_port_range_list('123,456,789'), [123,456,789]) assert_equal(parse_port_range_list('1234,5678'), [1234, 5678]) assert_equal(parse_port_range_list('1234:1236'), [1234, 1235, 1236]) assert_equal(parse_port_range_list('123:125,456'), [123,124,125,456]) assert_equal(parse_port_range_list('123:125,456:458'), [123,124,125,456,457,458]) assert_equal(parse_port_range_list('0123'), [123]) assert_raises(ValueError, parse_port_range_list, 'Alexandria') assert_raises(ValueError, parse_port_range_list, 'Athens:Alexandria')
def test_gp_construction_singular_covariance_matrix(self): """Test that the GaussianProcess ctor indicates a singular covariance matrix when points_sampled contains duplicates (0 noise).""" index = numpy.argmax(numpy.greater_equal(self.num_sampled_list, 1)) domain, gaussian_process = self.gp_test_environments[index] point_one = SamplePoint([0.0] * domain.dim, 1.0, 0.0) # points two and three have duplicate coordinates and we have noise_variance = 0.0 point_two = SamplePoint([1.0] * domain.dim, 1.0, 0.0) point_three = point_two historical_data = HistoricalData(len(point_one.point), [point_one, point_two, point_three]) T.assert_raises(C_GP.SingularMatrixException, GaussianProcess, gaussian_process.get_covariance_copy(), historical_data)
def test_delitem(self): self.smap['boo'] = 'ahhh!' testify.assert_equal(self.smap['boo'], 'ahhh!') del self.smap['boo'] testify.assert_not_in('boo', self.smap) testify.assert_raises(KeyError, lambda : self.smap['boo']) def try_delete(): del self.smap['boo'] testify.assert_raises(KeyError, try_delete)
def test_pop(self): self.smap['jason'] = 'fennell' testify.assert_equal(self.smap.pop('jason'), 'fennell') testify.assert_not_in('jason', self.smap) assert self.smap.pop('jason', None) is None testify.assert_raises( KeyError, lambda: self.smap.pop('jason') )
def test_kill_processes_os_error(self): def side_effect(*args, **kwargs): raise OSError(errno.EPERM, "access denied") with contextlib.nested( self.mock_method('%s.pid.os.kill' % __name__, None, side_effect), self.mock_method('%s.pid.is_process_alive' % __name__, True, None) ): # This will fail with access denied, we can kill the # process. pids = [1, 2, 3, 4, 5] T.assert_raises(OSError, pid.kill_processes, pids)
def test_port_range_list(self): assert_equal(parse_port_range_list('1234'), [1234]) assert_equal(parse_port_range_list('123,456,789'), [123, 456, 789]) assert_equal(parse_port_range_list('1234,5678'), [1234, 5678]) assert_equal(parse_port_range_list('1234:1236'), [1234, 1235, 1236]) assert_equal(parse_port_range_list('123:125,456'), [123, 124, 125, 456]) assert_equal(parse_port_range_list('123:125,456:458'), [123, 124, 125, 456, 457, 458]) assert_equal(parse_port_range_list('0123'), [123]) assert_raises(ValueError, parse_port_range_list, 'Alexandria') assert_raises(ValueError, parse_port_range_list, 'Athens:Alexandria')
def test_custom_key_value_option_parsing(self): # simple example mr_job = MRBoringJob(['--cmdenv', 'FOO=bar']) assert_equal(mr_job.options.cmdenv, {'FOO': 'bar'}) # trickier example mr_job = MRBoringJob([ '--cmdenv', 'FOO=bar', '--cmdenv', 'FOO=baz', '--cmdenv', 'BAZ=qux=quux' ]) assert_equal(mr_job.options.cmdenv, {'FOO': 'baz', 'BAZ': 'qux=quux'}) # must have KEY=VALUE assert_raises(ValueError, MRBoringJob, ['--cmdenv', 'FOO'])
def test_bad_option_types(self): mr_job = MRJob() assert_raises(OptionError, mr_job.add_passthrough_option, '--stop-words', dest='stop_words', type='set', default=None) assert_raises(OptionError, mr_job.add_passthrough_option, '--leave-a-msg', dest='leave_a_msg', action='callback', default=None)
def test_select(self): droid = ['R2-D2', 'C-3P0'] self.smap_shelf.update({ 'jason': 'fennell', 'droid': droid, 'pi': 3.14 }) testify.assert_equal(self.smap_shelf.select('jason', 'droid', 'pi'), ['fennell', droid, 3.14]) testify.assert_raises( KeyError, lambda: self.smap_shelf.select('jason', 'droid', 'brandon'), )
def test_custom_key_value_option_parsing(self): # simple example mr_job = MRBoringJob(['--cmdenv', 'FOO=bar']) assert_equal(mr_job.options.cmdenv, {'FOO': 'bar'}) # trickier example mr_job = MRBoringJob( ['--cmdenv', 'FOO=bar', '--cmdenv', 'FOO=baz', '--cmdenv', 'BAZ=qux=quux']) assert_equal(mr_job.options.cmdenv, {'FOO': 'baz', 'BAZ': 'qux=quux'}) # must have KEY=VALUE assert_raises(ValueError, MRBoringJob, ['--cmdenv', 'FOO'])
def test_missing_service_node(self): test_config = self.BASE_CONFIG + """ services: - name: "test_job0" node: bogusssss schedule: "interval 20s" actions: - name: "action0_0" command: "test_command0.0" cleanup_action: command: "test_command0.1" """ assert_raises(ConfigError, update_config, self.filename, test_config)
def test_getitem_tuple(self): self.smap.update({ 'jason': 'fennell', 'dave': 'marin', }) testify.assert_equal(self.smap['jason','dave'], ['fennell', 'marin']) testify.assert_equal(self.smap['dave', 'jason'], ['marin', 'fennell']) testify.assert_equal(self.smap[('jason', 'dave')], ['fennell', 'marin']) gen = (x for x in ['jason', 'dave']) testify.assert_equal(self.smap[gen], ['fennell', 'marin']) testify.assert_raises( KeyError, lambda: self.smap['jason', 'brandon'] )
def test_popitem(self): d = {'1': 'a', '2': 'b'} self.prepopulate_map_test(d, self.smap) out_d = {} k, v = self.smap.popitem() out_d[k] = v k, v = self.smap.popitem() out_d[k] = v testify.assert_equal(out_d, d) testify.assert_raises( KeyError, lambda: self.smap.popitem() )
def test_no_mapper(self): def mapper_init(): pass def mapper_final(): pass def reducer(k, vs): pass assert_raises(Exception, MRJob.mr) assert_equal(MRJob.mr(reducer=reducer), stepdict(reducer=reducer)) assert_equal(MRJob.mr(reducer=reducer, mapper_final=mapper_final), stepdict(reducer=reducer, mapper_final=mapper_final)) assert_equal(MRJob.mr(reducer=reducer, mapper_init=mapper_init), stepdict(reducer=reducer, mapper_init=mapper_init))
def test_gp_add_sampled_points_singular_covariance_matrix(self): """Test that GaussianProcess.add_sampled_points indicates a singular covariance matrix when points_sampled contains duplicates (0 noise).""" test_environment_input = copy.copy(self.gp_test_environment_input) test_environment_input.num_sampled = 1 test_environment_input.gaussian_process_class = GaussianProcess _, gaussian_process = self._build_gaussian_process_test_data(test_environment_input) # points one and three have duplicate coordinates and we have noise_variance = 0.0 point_one = SamplePoint([0.5] * gaussian_process.dim, 1.0, 0.0) point_two = SamplePoint([1.0] * gaussian_process.dim, -1.0, 0.0) point_three = point_one # points one and two are different, so this is safe gaussian_process.add_sampled_points([point_one, point_two]) # point_three is identical to point_one; this will produce a singular covariance matrix T.assert_raises(C_GP.SingularMatrixException, gaussian_process.add_sampled_points, [point_three])
def test_raises_on_labels_wrong_length(self): with T.assert_raises(ValueError): self.get_elements({ 'type': 'string', 'enum': ['a', 'b', 'c'], 'labels': ['Letter A', 'Letter B'], })
def test_not_in_enum(self): with T.assert_raises(jsonschema.ValidationError): validate_default_value({ 'type': 'integer', 'enum': [1, 2, 3], 'default': 4, })
def test_non_decorator_with_not_secure(self): mock_request = self._get_fake_request(False) with contextlib.nested( mock.patch.object(flask, 'request', mock_request), T.assert_raises(werkzeug.exceptions.Forbidden), ): require_secure()
def test_categorization_response_error(self): """Tests whether the ResponseError is raised when the response returned from the actual API call is empty. """ domains = ['yosemite.gov', 'joushuatree.gov', 'deathvalley.gov'] # empty responses should raise an error all_responses = [{}] # mock cache file mock_read = mock_open(read_data="{}") with nested( patch('__builtin__.open', mock_read, create=True), patch.object(ApiCache, 'bulk_lookup', autospec=True, return_value={}), patch.object(MultiRequest, 'multi_post', autospec=True, return_value=all_responses), ) as (__, __, patched_multi_post): i = InvestigateApi('hocus pocus', 'cache.json') with T.assert_raises(ResponseError): i.categorization(domains)
def test_encode_local_ascii_fails(self): with assert_raises(UnicodeDecodeError): with mock.patch.object( sys, 'getfilesystemencoding', return_value='ascii'): encode_local(u'日本語')