def test_services_that_run_on_should_properly_read_configuration(self): expected = ['fake_service1', 'fake_service2'] fake_hostname = 'fake_hostname2' fake_service_configuration = self.fake_service_configuration actual = service_configuration_lib.services_that_run_on( fake_hostname, fake_service_configuration) T.assert_sorted_equal(expected, actual)
def test_exception_before_iteration(self, print_exc_mock, print_warning_mock): """For workers that raise exceptions before they even look at their input generators, make sure we return exceptions appropriately. NOTE: For the overridden ChunkedExceptionsTest, there will be only 1 in-flight job, containing all of the inputs. So, we only expect to see one exception from the pool, and the others may be printed when the pool is shut down. """ res_to_compare = [(inp, serialize_error(ec.value), typ) for inp, ec, typ in self.run_test_pool( worker_raise_exc_immediately)] # Each worker will stop processing once an exception makes it to # the top so we only get that number of exceptions back out. expected_res_to_compare = [ (vimap.pool.NO_INPUT, serialize_error( ValueError("hello")), 'exception'), ] * self.chunk_adjusted_num_output_exceptions(3) T.assert_sorted_equal(res_to_compare, expected_res_to_compare) self.check_died_prematurely_warning(print_warning_mock) # No matter how many exceptions are returned (3 in the default case, 1 # in the chunked case), there should always be an exception printed for # each worker before the pool is shut down. self.check_printed_exceptions(print_exc_mock, ValueError("hello"), 3)
def test_exception_with_curleys(self, print_exc_mock, print_warning_mock): '''Dumb test ... I aim to write tests for most every bug that had existed, but this is kinda 1-off ... (.format() got a curley brace). ''' res_to_compare = [ (serialize_error(ec.value), typ) for _, ec, typ in self.run_test_pool(worker_raise_exc_with_curleys) ] # Each worker will stop processing once an exception makes it to # the top so we only get that number of exceptions back out. expected_res_to_compare = [ # We're not sure which inputs # will get picked, but all # should return this exception. (serialize_error( ValueError( "{0} curley braces!")), 'exception'), ] * self.\ chunk_adjusted_num_output_exceptions( 3) T.assert_sorted_equal(res_to_compare, expected_res_to_compare) self.check_died_prematurely_warning(print_warning_mock)
def test_exception_before_iteration(self, print_exc_mock, print_warning_mock): """For workers that raise exceptions before they even look at their input generators, make sure we return exceptions appropriately. NOTE: For the overridden ChunkedExceptionsTest, there will be only 1 in-flight job, containing all of the inputs. So, we only expect to see one exception from the pool, and the others may be printed when the pool is shut down. """ res_to_compare = [ (inp, serialize_error(ec.value), typ) for inp, ec, typ in self.run_test_pool(worker_raise_exc_immediately) ] # Each worker will stop processing once an exception makes it to # the top so we only get that number of exceptions back out. expected_res_to_compare = [ (vimap.pool.NO_INPUT, serialize_error(ValueError("hello")), 'exception'), ] * self.chunk_adjusted_num_output_exceptions(3) T.assert_sorted_equal(res_to_compare, expected_res_to_compare) self.check_died_prematurely_warning(print_warning_mock) # No matter how many exceptions are returned (3 in the default case, 1 # in the chunked case), there should always be an exception printed for # each worker before the pool is shut down. self.check_printed_exceptions(print_exc_mock, ValueError("hello"), 3)
def test_fail_after_a_while(self, print_exc_mock, print_warning_mock): processes = vimap.pool.fork( (worker_raise_exc_with_curleys.init_args(init=i) for i in xrange(100)), in_queue_size_factor=2 ) processes.imap([-1] * 3000 + list(range(50))) # Check yielded output. res_to_compare = [] for inp, out, typ in processes.zip_in_out_typ(): if typ == "exception": res_to_compare.append((inp, serialize_error(out.value), typ)) else: res_to_compare.append((inp, out, typ)) # All the -1s will produce None output. expected_res_to_compare = [(-1, None, "output")] * 3000 # Once we get to the positive numbers, we start causing 50 of # the 100 workers to throw exceptions. expected_res_to_compare.extend( [(i, serialize_error(ValueError("{0} curley braces!")), "exception") for i in range(50)] ) T.assert_sorted_equal(res_to_compare, expected_res_to_compare) # Check out exception logging. calls = print_exc_mock.call_args_list errors = [serialize_error(call_args[0].value) for call_args, _ in calls] T.assert_equal(errors, [serialize_error(ValueError("{0} curley braces!"))] * 50) # NOTE: Sometimes, the weakref in the pool is deleted, so 'has_exceptions' is # not set, and the pool prints warnings we don't actually care about. Make # sure that this is the only warning printed. if print_warning_mock.call_args_list: T.assert_equal(len(print_warning_mock.call_args_list), 1) [warning] = print_warning_mock.call_args_list T.assert_in("Pool disposed before input was consumed", warning[0][0])
def test_exception_after_iteration_not_returned(self, print_exc_mock): res_to_compare = [(inp, out, typ) for inp, out, typ in self.run_test_pool(worker_raise_exc_after_iteration)] # The pool notices that all output has been returned, so doesn't # wait for any more responses. We shouldn't see exceptions. expected_res_to_compare = [(inp, inp, 'output') for inp in range(1, 10)] T.assert_sorted_equal(res_to_compare, expected_res_to_compare)
def test_services_that_run_on_should_return_an_empty_array_when_the_hostname_isnt_anywhere( self): expected = [] fake_hostname = 'non_existent_fake_hostname2' fake_service_configuration = self.fake_service_configuration actual = service_configuration_lib.services_that_run_on( fake_hostname, fake_service_configuration) T.assert_sorted_equal(expected, actual)
def test_exception_before_iteration(self, print_exc_mock, print_warning_mock): res_to_compare = [ (inp, serialize_error(ec.value), typ) for inp, ec, typ in run_test_pool(worker_raise_exc_immediately) ] # Each worker will stop processing once an exception makes it to # the top so we only get that number of exceptions back out. expected_res_to_compare = [(vimap.pool.NO_INPUT, serialize_error(ValueError("hello")), "exception")] * 3 T.assert_sorted_equal(res_to_compare, expected_res_to_compare) self.check_died_prematurely_warning(print_warning_mock)
def test_request_quicktags(self): tree = self.render_etree(self.newrequest_page) found_tags = [] for span in tree.iter('span'): if span.attrib['class'] == 'tag-suggestion': found_tags.append(span.text) T.assert_sorted_equal(self.tags, found_tags)
def verify_database_state(self, data, success, db_results): self.check_db_results(success, db_results) # id, user, state, repo, branch, *tags*, created, modified, etc... data_tags = [d[5] for d in data] #id, user, state, repo, branch, revision, *tags*, created, etc... tags = [result[6] for result in db_results.fetchall()] T.assert_sorted_equal(data_tags, tags)
def verify_database_state(self, data, success, db_results): self.check_db_results(success, db_results) # id, push, *type*, status, target data_types = [d[2] for d in data] # id, push, *type*, status, target types = [result[2] for result in db_results.fetchall()] T.assert_sorted_equal(data_types, types)
def verify_database_state(self, data, success, db_results): self.check_db_results(success, db_results) # id, user, state, repo, branch, *tags*, created, modified, etc... data_tags = [d[5] for d in data] # id, user, state, repo, branch, revision, *tags*, created, etc... tags = [result[6] for result in db_results.fetchall()] T.assert_sorted_equal(data_tags, tags)
def test_review_mapper(self): """Test the review_mapper function to make sure that based on a mock input, it produces the correct calculated output """ biz_review_positivity = (BIZ_NAME, (TEXT, 3)) job = WeightedPositiveWords() review_results = list(job.review_mapper(CATEGORY, biz_review_positivity)) results = [((CATEGORY, u'world'), (BIZ_NAME, 3)), ((CATEGORY, u'hello'), (BIZ_NAME, 3))] T.assert_sorted_equal(review_results, results)
def test_review_mapper(self): """Test the review_mapper function to make sure that based on a mock input, it produces the correct calculated output """ biz_review_positivity = (BIZ_NAME, (TEXT, 3)) job = WeightedPositiveWords() review_results = list( job.review_mapper(CATEGORY, biz_review_positivity)) results = [((CATEGORY, u'world'), (BIZ_NAME, 3)), ((CATEGORY, u'hello'), (BIZ_NAME, 3))] T.assert_sorted_equal(review_results, results)
def test_request_form_labels(self): tree = self.render_etree(self.newrequest_page) form_attr = ['request-form-%s' % elem for elem in self.form_elements] form_attr_with_id = ['takeover'] found_labels = [] for label in tree.iter('label'): found_labels.append(label.attrib['for']) if label.attrib['for'] in form_attr_with_id: T.assert_equal(label.attrib['id'], '%s-label' % label.attrib['for']) T.assert_sorted_equal(form_attr, found_labels)
def test_fail_after_a_while(self, print_exc_mock, print_warning_mock): processes = self.fork_pool( (worker_raise_exc_with_curleys.init_args(init=i) for i in xrange(100)), max_real_in_flight_factor=2 ) processes.imap([-1] * 3000 + list(range(50))) # Check yielded output. res_to_compare = [] for inp, out, typ in processes.zip_in_out_typ(): if typ == 'exception': res_to_compare.append( (inp, serialize_error(out.value), typ)) else: res_to_compare.append((inp, out, typ)) # All the -1s will produce None output. expected_res_to_compare = [ (-1, None, 'output') ] * 3000 # Once we get to the positive numbers, we start causing 50 of # the 100 workers to throw exceptions. expected_res_to_compare\ .extend([ (i, serialize_error(ValueError( "{0} curley braces!")), 'exception') for i in range( self .chunk_adjusted_num_output_exceptions(50)) ]) T.assert_sorted_equal(res_to_compare, expected_res_to_compare) # Check out exception logging. calls = print_exc_mock.call_args_list errors = [serialize_error(call_args[0].value) for call_args, _ in calls] T.assert_equal(errors, ( [serialize_error(ValueError("{0} curley braces!"))] * self.chunk_adjusted_num_output_exceptions(50))) # NOTE: Sometimes, the weakref in the pool is deleted, # so 'has_exceptions' is # not set, and the pool prints warnings # we don't actually care about. Make # sure that this is the only warning printed. if print_warning_mock.call_args_list: T.assert_equal(len(print_warning_mock.call_args_list), 1) [warning] = print_warning_mock.call_args_list T.assert_in('Pool disposed before input was consumed', warning[0][0])
def test_exception_after_iteration_not_returned(self, print_exc_mock): res_to_compare = [ (inp, out, typ) for inp, out, typ in self.run_test_pool(worker_raise_exc_after_iteration) ] # The pool notices that all output has been returned, so doesn't # wait for any more responses. We shouldn't see exceptions. expected_res_to_compare = [ (inp, inp, 'output') for inp in range(1, 10) ] T.assert_sorted_equal(res_to_compare, expected_res_to_compare)
def test_generation(self, write_file_mock, shutil_mock, fileinput_mock): """ The closest thing to a unit test right now. """ main.main(["basic", "-b"]) written_files = dict( (os.path.basename(filename), writer) for ((filename, writer), _) in write_file_mock.call_args_list ) testify.assert_sorted_equal(written_files.keys(), ["data.json", "index.html"]) testify.assert_subset(["copytree", "copy"], [c[0] for c in shutil_mock.method_calls]) # hacky stuff to make sure data.json is written correctly stringio = cStringIO.StringIO() written_files["data.json"](stringio) data_reread = simplejson.loads(stringio.getvalue()) testify.assert_equal(data_reread, [33])
def test_generation(self, write_file_mock, shutil_mock, fileinput_mock): """ The closest thing to a unit test right now. """ main.main(['basic', '-b']) written_files = dict( (os.path.basename(filename), writer) for ((filename, writer), _) in write_file_mock.call_args_list) testify.assert_sorted_equal(written_files.keys(), ['data.json', 'index.html']) testify.assert_subset(['copytree', 'copy'], [c[0] for c in shutil_mock.method_calls]) # hacky stuff to make sure data.json is written correctly stringio = cStringIO.StringIO() written_files['data.json'](stringio) data_reread = simplejson.loads(stringio.getvalue()) testify.assert_equal(data_reread, [33])
def test_request_form_input(self): tree = self.render_etree(self.newrequest_page) id_attr = ['request-form-%s' % elem for elem in self.form_elements] name_attr = ['request-%s' % elem for elem in self.form_elements] found_id = [] found_name = [] for field in tree.iter('input'): if 'type' not in field.attrib or field.attrib['type'] in ['checkbox']: # ignore hidden/submit found_id.append(field.attrib['id']) found_name.append(field.attrib['name']) for textarea in tree.iter('textarea'): found_id.append(textarea.attrib['id']) found_name.append(textarea.attrib['name']) T.assert_sorted_equal(id_attr, found_id) T.assert_sorted_equal(name_attr, found_name)