def test_scribe_tail_log_everything(): env = "fake_env" stream_name = "fake_stream" service = "fake_service" levels = ["fake_level1", "fake_level2"] components = ["build", "deploy"] clusters = ["fake_cluster1", "fake_cluster2"] instance = "fake_instance" queue = Queue() filter_fn = mock.Mock(return_value=True) tailer = iter( [ format_log_line(levels[0], clusters, instance, "build", "level: first. component: build."), format_log_line(levels[1], clusters, instance, "deploy", "level: second. component: deploy."), ] ) with contextlib.nested(mock.patch("paasta_tools.cli.cmds.logs.scribereader", autospec=True)) as ( mock_scribereader, ): mock_scribereader.get_env_scribe_host.return_value = {"host": "fake_host", "port": "fake_port"} mock_scribereader.get_stream_tailer.return_value = tailer logs.scribe_tail(env, stream_name, service, levels, components, clusters, queue, filter_fn) assert mock_scribereader.get_env_scribe_host.call_count == 1 mock_scribereader.get_stream_tailer.assert_called_once_with(stream_name, "fake_host", "fake_port") assert queue.qsize() == 2 # Sadly, fetching with a timeout seems to be needed with # multiprocessing.Queue (this was not the case with Queue.Queue). It # failed 8/10 times with a get_nowait() vs 0/10 times with a 0.1s # timeout. first_line = queue.get(True, 0.1) assert "level: first. component: build." in first_line second_line = queue.get(True, 0.1) assert "level: second. component: deploy." in second_line
def test_format_log_line_rejects_invalid_components(): with raises(utils.NoSuchLogComponent): utils.format_log_line( level="debug", cluster="fake_cluster", service="fake_service", instance="fake_instance", line="fake_line", component="BOGUS_COMPONENT", )
def test_format_log_line_rejects_invalid_components(): with raises(utils.NoSuchLogComponent): utils.format_log_line( level='debug', cluster='fake_cluster', service='fake_service', instance='fake_instance', line='fake_line', component='BOGUS_COMPONENT', )
def test_scribe_tail_log_nothing(): env = 'fake_env' stream_name = 'fake_stream' service = 'fake_service' levels = ['fake_level1', 'fake_level2'] components = ['build', 'deploy'] clusters = ['fake_cluster1', 'fake_cluster2'] instance = 'fake_instance' queue = Queue() filter_fn = mock.Mock(return_value=False) tailer = iter([ format_log_line( levels[0], clusters, instance, 'build', 'level: first. component: build.', ), format_log_line( levels[1], clusters, instance, 'deploy', 'level: second. component: deploy.', ), ]) with contextlib.nested( mock.patch('paasta_tools.cli.cmds.logs.scribereader', autospec=True), ) as ( mock_scribereader, ): mock_scribereader.get_env_scribe_host.return_value = { 'host': 'fake_host', 'port': 'fake_port', } mock_scribereader.get_stream_tailer.return_value = tailer logs.scribe_tail( env, stream_name, service, levels, components, clusters, queue, filter_fn, ) assert queue.qsize() == 0
def test_format_log_line(): input_line = 'foo' fake_cluster = 'fake_cluster' fake_service = 'fake_service' fake_instance = 'fake_instance' fake_component = 'build' fake_level = 'debug' fake_now = 'fake_now' expected = json.dumps({ 'timestamp': fake_now, 'level': fake_level, 'cluster': fake_cluster, 'service': fake_service, 'instance': fake_instance, 'component': fake_component, 'message': input_line, }, sort_keys=True) with mock.patch('paasta_tools.utils._now', autospec=True) as mock_now: mock_now.return_value = fake_now actual = utils.format_log_line( level=fake_level, cluster=fake_cluster, service=fake_service, instance=fake_instance, component=fake_component, line=input_line, ) assert actual == expected
def test_format_log_line(): input_line = "foo" fake_cluster = "fake_cluster" fake_service = "fake_service" fake_instance = "fake_instance" fake_component = "build" fake_level = "debug" fake_now = "fake_now" expected = json.dumps( { "timestamp": fake_now, "level": fake_level, "cluster": fake_cluster, "service": fake_service, "instance": fake_instance, "component": fake_component, "message": input_line, }, sort_keys=True, ) with mock.patch("paasta_tools.utils._now", autospec=True) as mock_now: mock_now.return_value = fake_now actual = utils.format_log_line( level=fake_level, cluster=fake_cluster, service=fake_service, instance=fake_instance, component=fake_component, line=input_line, ) assert actual == expected
def test_format_log_line_with_timestamp(): input_line = 'foo' fake_cluster = 'fake_cluster' fake_service = 'fake_service' fake_instance = 'fake_instance' fake_component = 'build' fake_level = 'debug' fake_timestamp = 'fake_timestamp' expected = json.dumps({ 'timestamp': fake_timestamp, 'level': fake_level, 'cluster': fake_cluster, 'service': fake_service, 'instance': fake_instance, 'component': fake_component, 'message': input_line, }, sort_keys=True) actual = utils.format_log_line( fake_level, fake_cluster, fake_service, fake_instance, fake_component, input_line, timestamp=fake_timestamp ) assert actual == expected
def test_paasta_log_line_passes_filter_true_when_default_cluster(): service = 'fake_service' levels = ['fake_level1', 'fake_level2'] clusters = ['fake_cluster1', 'fake_cluster2'] instance = 'fake_instance' components = ['build', 'deploy'] line = 'fake_line' formatted_line = format_log_line(levels[0], ANY_CLUSTER, service, instance, components[0], line) assert logs.paasta_log_line_passes_filter(formatted_line, levels, service, components, clusters) is True
def test_paasta_log_line_passes_filter_false_when_wrong_level(): service = 'fake_service' levels = ['fake_level1', 'fake_level2'] clusters = ['fake_cluster1', 'fake_cluster2'] instance = 'fake_instance' components = ['build', 'deploy'] line = 'fake_line' formatted_line = format_log_line('BOGUS_LEVEL', clusters[0], service, instance, components[0], line) assert logs.paasta_log_line_passes_filter(formatted_line, levels, service, components, clusters) is False
def test_paasta_log_line_passes_filter_false_when_wrong_level(): service = "fake_service" levels = ["fake_level1", "fake_level2"] clusters = ["fake_cluster1", "fake_cluster2"] instance = "fake_instance" components = ["build", "deploy"] line = "fake_line" formatted_line = format_log_line("BOGUS_LEVEL", clusters[0], service, instance, components[0], line) assert logs.paasta_log_line_passes_filter(formatted_line, levels, service, components, clusters) is False
def test_paasta_log_line_passes_filter_true_when_default_cluster(): service = "fake_service" levels = ["fake_level1", "fake_level2"] clusters = ["fake_cluster1", "fake_cluster2"] instance = "fake_instance" components = ["build", "deploy"] line = "fake_line" formatted_line = format_log_line(levels[0], ANY_CLUSTER, service, instance, components[0], line) assert logs.paasta_log_line_passes_filter(formatted_line, levels, service, components, clusters) is True
def test_paasta_log_line_passes_filter_true(): service = 'fake_service' levels = ['fake_level1', 'fake_level2'] clusters = ['fake_cluster1', 'fake_cluster2'] instance = 'fake_instance' instances = [instance] components = ['build', 'deploy'] line = 'fake_line' formatted_line = format_log_line(levels[0], clusters[0], service, instance, components[0], line) assert logs.paasta_log_line_passes_filter(formatted_line, levels, service, components, clusters, instances) is True
def test_paasta_log_line_passes_filter_false_when_wrong_cluster(): service = 'fake_service' levels = ['fake_level1', 'fake_level2'] clusters = ['fake_cluster1', 'fake_cluster2'] instance = 'fake_instance' components = ['build', 'deploy'] line = 'fake_line' # component must be legit as well as not in the list of requested # components formatted_line = format_log_line(levels[0], 'BOGUS_CLUSTER', service, instance, components[0], line) assert logs.paasta_log_line_passes_filter(formatted_line, levels, service, components, clusters) is False
def test_chronos_log_line_passes_filter_true_when_service_name_in_string(): service = "fake_service" levels = [] components = [] clusters = [] line = format_log_line( "fake_level", clusters, service, "fake_instance", "chronos", "fake message with service name %s" % service ) with mock.patch("paasta_tools.chronos_tools.compose_job_id", autospec=True) as format_job_id_patch: format_job_id_patch.return_value = service assert logs.chronos_log_line_passes_filter(line, levels, service, components, clusters)
def test_paasta_log_line_passes_filter_false_when_wrong_component(): service = "fake_service" levels = ["fake_level1", "fake_level2"] clusters = ["fake_cluster1", "fake_cluster2"] instance = "fake_instance" components = ["build", "deploy"] line = "fake_line" # component must be legit as well as not in the list of requested # components formatted_line = format_log_line(levels[0], clusters[0], service, instance, "monitoring", line) assert logs.paasta_log_line_passes_filter(formatted_line, levels, service, components, clusters) is False
def test_paasta_log_line_passes_filter_false_when_wrong_component(): service = 'fake_service' levels = ['fake_level1', 'fake_level2'] clusters = ['fake_cluster1', 'fake_cluster2'] instance = 'fake_instance' components = ['build', 'deploy'] line = 'fake_line' # component must be legit as well as not in the list of requested # components formatted_line = format_log_line(levels[0], clusters[0], service, instance, 'monitoring', line) assert logs.paasta_log_line_passes_filter(formatted_line, levels, service, components, clusters) is False
def test_scribe_tail_log_nothing(): env = "fake_env" stream_name = "fake_stream" service = "fake_service" levels = ["fake_level1", "fake_level2"] components = ["build", "deploy"] clusters = ["fake_cluster1", "fake_cluster2"] instance = "fake_instance" queue = Queue() filter_fn = mock.Mock(return_value=False) tailer = iter( [ format_log_line( levels[0], clusters, instance, "build", "level: first. component: build.", ), format_log_line( levels[1], clusters, instance, "deploy", "level: second. component: deploy.", ), ] ) with mock.patch( "paasta_tools.cli.cmds.logs.scribereader", autospec=True ) as mock_scribereader: mock_scribereader.get_env_scribe_host.return_value = { "host": "fake_host", "port": "fake_port", } mock_scribereader.get_stream_tailer.return_value = tailer logs.scribe_tail( env, stream_name, service, levels, components, clusters, queue, filter_fn ) assert queue.qsize() == 0
def test_marathon_log_line_passes_filter_false_when_service_name_missing(): service = "fake_service" levels = [] components = [] clusters = [] line = format_log_line( "fake_level", clusters, service, "fake_instance", "marathon", "fake message without service name" ) with mock.patch("paasta_tools.cli.cmds.logs.format_job_id", autospec=True) as format_job_id_patch: format_job_id_patch.return_value = service assert not logs.marathon_log_line_passes_filter(line, levels, service, components, clusters)
def parse_marathon_log_line(line, clusters): utc_timestamp = extract_utc_timestamp_from_log_line(line) if not utc_timestamp: return "" else: return format_log_line( level="event", cluster=clusters[0], instance="ALL", component="marathon", line=line.strip(), timestamp=utc_timestamp.strftime("%Y-%m-%dT%H:%M:%S.%f"), )
def parse_chronos_log_line(line, clusters): utc_timestamp = extract_utc_timestamp_from_log_line(line) if not utc_timestamp: return '' else: return format_log_line( level='event', cluster=clusters[0], instance='ALL', component='chronos', line=line.strip(), timestamp=utc_timestamp.strftime("%Y-%m-%dT%H:%M:%S.%f"), )
def test_paasta_log_line_passes_filter_true(): service = "fake_service" levels = ["fake_level1", "fake_level2"] clusters = ["fake_cluster1", "fake_cluster2"] instance = "fake_instance" instances = [instance] components = ["build", "deploy"] line = "fake_line" formatted_line = format_log_line(levels[0], clusters[0], service, instance, components[0], line) assert (logs.paasta_log_line_passes_filter( formatted_line, levels, service, components, clusters, instances) is True)
def test_paasta_log_line_passes_filter_false_when_wrong_level(): service = "fake_service" levels = ["fake_level1", "fake_level2"] clusters = ["fake_cluster1", "fake_cluster2"] instance = "fake_instance" instances = [instance] components = ["build", "deploy"] line = "fake_line" formatted_line = format_log_line("BOGUS_LEVEL", clusters[0], service, instance, components[0], line) assert (logs.paasta_log_line_passes_filter( formatted_line, levels, service, components, clusters, instances) is False)
def test_paasta_log_line_passes_filter_false_when_wrong_level(): service = 'fake_service' levels = ['fake_level1', 'fake_level2'] clusters = ['fake_cluster1', 'fake_cluster2'] instance = 'fake_instance' instances = [instance] components = ['build', 'deploy'] line = 'fake_line' formatted_line = format_log_line('BOGUS_LEVEL', clusters[0], service, instance, components[0], line) assert logs.paasta_log_line_passes_filter(formatted_line, levels, service, components, clusters, instances) is False
def test_paasta_log_line_passes_filter_false_when_invalid_time(): service = 'fake_service' levels = ['fake_level1', 'fake_level2'] clusters = ['fake_cluster1', 'fake_cluster2'] instance = 'fake_instance' components = ['build', 'deploy'] line = 'fake_line' formatted_line = format_log_line(levels[0], clusters[0], service, instance, components[0], line, timestamp=isodate.datetime_isoformat(datetime.datetime.utcnow())) start_time, end_time = logs.generate_start_end_time(from_string="5m", to_string="3m") assert logs.paasta_log_line_passes_filter(formatted_line, levels, service, components, clusters, start_time=start_time, end_time=end_time) is False
def parse_marathon_log_line(line, clusters, service): utc_timestamp = extract_utc_timestamp_from_log_line(line) if not utc_timestamp: return '' else: return format_log_line( level='event', cluster=clusters[0], service=service, instance='ALL', component='marathon', line=line.strip(), timestamp=utc_timestamp.strftime("%Y-%m-%dT%H:%M:%S.%f"), )
def parse_chronos_log_line(line, clusters, service): utc_timestamp = extract_utc_timestamp_from_log_line(line) if not utc_timestamp: return "" else: return format_log_line( level="event", cluster=clusters[0], service=service, instance="ALL", component="chronos", line=line.strip(), timestamp=utc_timestamp.strftime("%Y-%m-%dT%H:%M:%S.%f"), )
def test_paasta_log_line_passes_filter_true_when_valid_time(): service = 'fake_service' levels = ['fake_level1', 'fake_level2'] clusters = ['fake_cluster1', 'fake_cluster2'] instance = 'fake_instance' components = ['build', 'deploy'] line = 'fake_line' formatted_line = format_log_line(levels[0], clusters[0], service, instance, components[0], line, timestamp="2016-06-07T23:46:03+00:00") start_time = isodate.parse_datetime("2016-06-07T23:40:03+00:00") end_time = isodate.parse_datetime("2016-06-07T23:50:03+00:00") assert logs.paasta_log_line_passes_filter(formatted_line, levels, service, components, clusters, start_time=start_time, end_time=end_time) is True
def test_chronos_log_line_passes_filter_true_when_service_name_in_string(): service = 'fake_service' levels = [] components = [] clusters = [] line = format_log_line( 'fake_level', clusters, 'fake_instance', 'chronos', 'fake message with service name %s' % service, ) with mock.patch('paasta_tools.chronos_tools.compose_job_id', autospec=True) as format_job_id_patch: format_job_id_patch.return_value = service assert logs.chronos_log_line_passes_filter(line, levels, service, components, clusters)
def test_paasta_log_line_passes_filter_false_when_wrong_instance(): service = 'fake_service' levels = ['fake_level1', 'fake_level2'] clusters = ['fake_cluster1', 'fake_cluster2'] instance = 'non-existant_instance' instances = ['fake_instance'] components = ['build', 'deploy'] line = 'fake_line' # component must be legit as well as not in the list of requested # components formatted_line = format_log_line(levels[0], 'BOGUS_CLUSTER', service, instance, components[0], line) assert logs.paasta_log_line_passes_filter(formatted_line, levels, service, components, clusters, instances) is False
def test_paasta_log_line_passes_filter_false_when_wrong_instance(): service = "fake_service" levels = ["fake_level1", "fake_level2"] clusters = ["fake_cluster1", "fake_cluster2"] instance = "non-existant_instance" instances = ["fake_instance"] components = ["build", "deploy"] line = "fake_line" # component must be legit as well as not in the list of requested # components formatted_line = format_log_line(levels[0], "BOGUS_CLUSTER", service, instance, components[0], line) assert (logs.paasta_log_line_passes_filter( formatted_line, levels, service, components, clusters, instances) is False)
def test_scribe_tail_log_nothing(): env = "fake_env" stream_name = "fake_stream" service = "fake_service" levels = ["fake_level1", "fake_level2"] components = ["build", "deploy"] clusters = ["fake_cluster1", "fake_cluster2"] instance = "fake_instance" queue = Queue() filter_fn = mock.Mock(return_value=False) tailer = iter( [ format_log_line(levels[0], clusters, instance, "build", "level: first. component: build."), format_log_line(levels[1], clusters, instance, "deploy", "level: second. component: deploy."), ] ) with contextlib.nested(mock.patch("paasta_tools.cli.cmds.logs.scribereader", autospec=True)) as ( mock_scribereader, ): mock_scribereader.get_env_scribe_host.return_value = {"host": "fake_host", "port": "fake_port"} mock_scribereader.get_stream_tailer.return_value = tailer logs.scribe_tail(env, stream_name, service, levels, components, clusters, queue, filter_fn) assert queue.qsize() == 0
def test_marathon_log_line_passes_filter_false_when_service_name_missing(): service = 'fake_service' levels = [] components = [] clusters = [] line = format_log_line( 'fake_level', clusters, 'fake_instance', 'marathon', 'fake message without service name', ) with mock.patch('paasta_tools.cli.cmds.logs.format_job_id', autospec=True) as format_job_id_patch: format_job_id_patch.return_value = service assert not logs.marathon_log_line_passes_filter(line, levels, service, components, clusters)
def test_marathon_log_line_passes_filter_false_when_service_name_missing(): service = 'fake_service' levels = [] components = [] clusters = [] instances = [] line = format_log_line( 'fake_level', clusters, service, 'fake_instance', 'marathon', 'fake message without service name', ) with mock.patch('paasta_tools.cli.cmds.logs.format_job_id', autospec=True) as format_job_id_patch: format_job_id_patch.return_value = service assert not logs.marathon_log_line_passes_filter(line, levels, service, components, clusters, instances)
def test_format_log_line(): input_line = 'foo' fake_cluster = 'fake_cluster' fake_instance = 'fake_instance' fake_component = 'build' fake_level = 'debug' fake_now = 'fake_now' expected = json.dumps({ 'timestamp': fake_now, 'level': fake_level, 'cluster': fake_cluster, 'instance': fake_instance, 'component': fake_component, 'message': input_line, }, sort_keys=True) with mock.patch('paasta_tools.utils._now', autospec=True) as mock_now: mock_now.return_value = fake_now actual = utils.format_log_line(fake_level, fake_cluster, fake_instance, fake_component, input_line) assert actual == expected
def test_paasta_log_line_passes_filter_true_when_default_instance(): service = "fake_service" levels = ["fake_level1", "fake_level2"] clusters = ["fake_cluster1", "fake_cluster2"] instance = "fake_instance" instances = None pods = None components = ["build", "deploy"] line = "fake_line" formatted_line = format_log_line( levels[0], ANY_CLUSTER, service, instance, components[0], line, ) assert ( logs.paasta_log_line_passes_filter( formatted_line, levels, service, components, clusters, instances, pods ) is True )
def test_chronos_log_line_passes_filter_false_when_service_name_missing(): service = "fake_service" levels = [] components = [] clusters = [] instances = [] line = format_log_line( "fake_level", clusters, service, "fake_instance", "chronos", "fake message without service name", ) with mock.patch("paasta_tools.chronos_tools.compose_job_id", autospec=True) as format_job_id_patch: format_job_id_patch.return_value = service assert not logs.chronos_log_line_passes_filter( line, levels, service, components, clusters, instances)
def test_marathon_log_line_passes_filter_true_when_service_name_in_string(): service = "fake_service" levels = [] components = [] clusters = [] instances = [] line = format_log_line( "fake_level", clusters, service, "fake_instance", "marathon", "fake message with service name %s" % service, ) with mock.patch("paasta_tools.cli.cmds.logs.format_job_id", autospec=True) as format_job_id_patch: format_job_id_patch.return_value = service assert logs.marathon_log_line_passes_filter(line, levels, service, components, clusters, instances)
def test_paasta_log_line_passes_filter_false_when_wrong_component(): service = "fake_service" levels = ["fake_level1", "fake_level2"] clusters = ["fake_cluster1", "fake_cluster2"] instance = "fake_instance" instances = [instance] pods = None components = ["build", "deploy"] line = "fake_line" # component must be legit as well as not in the list of requested # components formatted_line = format_log_line( levels[0], clusters[0], service, instance, "monitoring", line, ) assert ( logs.paasta_log_line_passes_filter( formatted_line, levels, service, components, clusters, instances, pods ) is False )
def test_paasta_log_line_passes_filter_false_when_invalid_time(): service = "fake_service" levels = ["fake_level1", "fake_level2"] clusters = ["fake_cluster1", "fake_cluster2"] instance = "fake_instance" instances = [instance] pods = None components = ["build", "deploy"] line = "fake_line" formatted_line = format_log_line( levels[0], clusters[0], service, instance, components[0], line, timestamp=isodate.datetime_isoformat(datetime.datetime.utcnow()), ) start_time, end_time = logs.generate_start_end_time( from_string="5m", to_string="3m" ) assert ( logs.paasta_log_line_passes_filter( formatted_line, levels, service, components, clusters, instances, pods, start_time=start_time, end_time=end_time, ) is False )
def test_format_log_line_with_timestamp(): input_line = "foo" fake_cluster = "fake_cluster" fake_service = "fake_service" fake_instance = "fake_instance" fake_component = "build" fake_level = "debug" fake_timestamp = "fake_timestamp" expected = json.dumps( { "timestamp": fake_timestamp, "level": fake_level, "cluster": fake_cluster, "service": fake_service, "instance": fake_instance, "component": fake_component, "message": input_line, }, sort_keys=True, ) actual = utils.format_log_line( fake_level, fake_cluster, fake_service, fake_instance, fake_component, input_line, timestamp=fake_timestamp ) assert actual == expected
def test_paasta_log_line_passes_filter_true_when_valid_time(): service = "fake_service" levels = ["fake_level1", "fake_level2"] clusters = ["fake_cluster1", "fake_cluster2"] instance = "fake_instance" instances = [instance] pods = None components = ["build", "deploy"] line = "fake_line" formatted_line = format_log_line( levels[0], clusters[0], service, instance, components[0], line, timestamp="2016-06-07T23:46:03+00:00", ) start_time = isodate.parse_datetime("2016-06-07T23:40:03+00:00") end_time = isodate.parse_datetime("2016-06-07T23:50:03+00:00") assert ( logs.paasta_log_line_passes_filter( formatted_line, levels, service, components, clusters, instances, pods, start_time=start_time, end_time=end_time, ) is True )
def test_format_log_line_with_timestamp(): input_line = 'foo' fake_cluster = 'fake_cluster' fake_instance = 'fake_instance' fake_component = 'build' fake_level = 'debug' fake_timestamp = 'fake_timestamp' expected = json.dumps({ 'timestamp': fake_timestamp, 'level': fake_level, 'cluster': fake_cluster, 'instance': fake_instance, 'component': fake_component, 'message': input_line, }, sort_keys=True) actual = utils.format_log_line( fake_level, fake_cluster, fake_instance, fake_component, input_line, timestamp=fake_timestamp ) assert actual == expected
def test_format_log_line_rejects_invalid_components(): with raises(utils.NoSuchLogComponent): utils.format_log_line('fake_service', 'fake_line', 'BOGUS_COMPONENT', 'debug', 'fake_input')
def test_scribe_tail_log_everything(): env = 'fake_env' stream_name = 'fake_stream' service = 'fake_service' levels = ['fake_level1', 'fake_level2'] components = ['build', 'deploy'] clusters = ['fake_cluster1', 'fake_cluster2'] instance = 'fake_instance' queue = Queue() filter_fn = mock.Mock(return_value=True) tailer = iter([ format_log_line( levels[0], clusters, instance, 'build', 'level: first. component: build.', ), format_log_line( levels[1], clusters, instance, 'deploy', 'level: second. component: deploy.', ), ]) with contextlib.nested( mock.patch('paasta_tools.cli.cmds.logs.scribereader', autospec=True), ) as ( mock_scribereader, ): mock_scribereader.get_env_scribe_host.return_value = { 'host': 'fake_host', 'port': 'fake_port', } mock_scribereader.get_stream_tailer.return_value = tailer logs.scribe_tail( env, stream_name, service, levels, components, clusters, queue, filter_fn ) assert mock_scribereader.get_env_scribe_host.call_count == 1 mock_scribereader.get_stream_tailer.assert_called_once_with( stream_name, 'fake_host', 'fake_port', ) assert queue.qsize() == 2 # Sadly, fetching with a timeout seems to be needed with # multiprocessing.Queue (this was not the case with Queue.Queue). It # failed 8/10 times with a get_nowait() vs 0/10 times with a 0.1s # timeout. first_line = queue.get(True, 0.1) assert 'level: first. component: build.' in first_line second_line = queue.get(True, 0.1) assert 'level: second. component: deploy.' in second_line