Beispiel #1
0
	def test_main(self):
		def smear(l):
			"given a list of dictionary deltas, return a list of dictionaries"
			cur = {}
			out = []
			for delta in l:
				cur.update(delta)
				out.append(dict(cur))
			return out
		json.dump(smear([
			{'started': 1234, 'number': 1, 'tests_failed': 1, 'tests_run': 2, 'elapsed': 4, 'path': 'gs://logs/some-job/1', 'job': 'some-job', 'result': 'SUCCESS'},
			{'number': 2, 'path': 'gs://logs/some-job/2'},
			{'number': 3, 'path': 'gs://logs/some-job/3'},
			{'number': 4, 'path': 'gs://logs/some-job/4'},
			{'number': 5, 'path': 'gs://logs/other-job/5', 'job': 'other-job', 'elapsed': 8},
			{'number': 7, 'path': 'gs://logs/other-job/7', 'result': 'FAILURE'},
		]), open('builds.json', 'w'))
		json.dump(smear([
			{'name': 'example test', 'build': 'gs://logs/some-job/1', 'failure_text': 'some awful stack trace exit 1'},
			{'build': 'gs://logs/some-job/2'},
			{'build': 'gs://logs/some-job/3'},
			{'build': 'gs://logs/some-job/4'},
			{'failure_text': 'some other error message'},
			{'name': 'unrelated test', 'build': 'gs://logs/other-job/5'},
			{'build': 'gs://logs/other-job/7'},
		]), open('tests.json', 'w'))
		summarize.main(summarize.parse_args(['builds.json', 'tests.json']))
		output = json_load_byteified(open('failure_data.json'))

		# uncomment when output changes
		# import pprint; pprint.pprint(output)

		self.assertEqual(output['builds'],
			{'cols': {'elapsed': [8, 8, 4, 4, 4, 4],
                      'executor': [None, None, None, None, None, None],
                      'pr': [None, None, None, None, None, None],
                      'result': ['SUCCESS',
                                 'FAILURE',
                                 'SUCCESS',
                                 'SUCCESS',
                                 'SUCCESS',
                                 'SUCCESS'],
                      'started': [1234, 1234, 1234, 1234, 1234, 1234],
                      'tests_failed': [1, 1, 1, 1, 1, 1],
                      'tests_run': [2, 2, 2, 2, 2, 2]},
             'job_paths': {'other-job': 'gs://logs/other-job',
                            'some-job': 'gs://logs/some-job'},
             'jobs': {'other-job': {'5': 0, '7': 1},
                       'some-job': [1, 4, 2]}})

		random_hash_1 = output['clustered'][0][1]
		random_hash_2 = output['clustered'][1][1]
		self.assertEqual(output['clustered'],
			[['some awful stack trace exit 1', random_hash_1, 'some awful stack trace exit 1',
              [['example test', [['some-job', [1, 2, 3, 4]]]]]],
             ['some other error message', random_hash_2, 'some other error message',
              [['unrelated test', [['other-job', [5, 7]]]],
               ['example test', [['some-job', [4]]]]]]]
        )
    def test_main(self):
        def smear(l):
            "given a list of dictionary deltas, return a list of dictionaries"
            cur = {}
            out = []
            for delta in l:
                cur.update(delta)
                out.append(dict(cur))
            return out

        json.dump(
            smear([
                {
                    'started': 1234,
                    'number': 1,
                    'tests_failed': 1,
                    'tests_run': 2,
                    'elapsed': 4,
                    'path': 'gs://logs/some-job/1',
                    'job': 'some-job',
                    'result': 'SUCCESS'
                },
                {
                    'number': 2,
                    'path': 'gs://logs/some-job/2'
                },
                {
                    'number': 3,
                    'path': 'gs://logs/some-job/3'
                },
                {
                    'number': 4,
                    'path': 'gs://logs/some-job/4'
                },
                {
                    'number': 5,
                    'path': 'gs://logs/other-job/5',
                    'job': 'other-job',
                    'elapsed': 8
                },
                {
                    'number': 7,
                    'path': 'gs://logs/other-job/7',
                    'result': 'FAILURE'
                },
            ]), open('builds.json', 'w'))
        tests = smear([
            {
                'name': 'example test',
                'build': 'gs://logs/some-job/1',
                'failure_text': 'some awful stack trace exit 1'
            },
            {
                'build': 'gs://logs/some-job/2'
            },
            {
                'build': 'gs://logs/some-job/3'
            },
            {
                'build': 'gs://logs/some-job/4'
            },
            {
                'name': 'another test',
                'failure_text': 'some other error message'
            },
            {
                'name': 'unrelated test',
                'build': 'gs://logs/other-job/5'
            },
            {},  # intentional dupe
            {
                'build': 'gs://logs/other-job/7'
            },
        ])
        with open('tests.json', 'w') as f:
            for t in tests:
                f.write(json.dumps(t) + '\n')
        json.dump({'node': ['example']}, open('owners.json', 'w'))
        summarize.main(
            summarize.parse_args([
                'builds.json', 'tests.json',
                '--output_slices=failure_data_PREFIX.json',
                '--owners=owners.json'
            ]))
        output = json_load_byteified(open('failure_data.json'))

        # uncomment when output changes
        # import pprint; pprint.pprint(output)

        self.assertEqual(
            output['builds'], {
                'cols': {
                    'elapsed': [8, 8, 4, 4, 4, 4],
                    'executor': [None, None, None, None, None, None],
                    'pr': [None, None, None, None, None, None],
                    'result': [
                        'SUCCESS', 'FAILURE', 'SUCCESS', 'SUCCESS', 'SUCCESS',
                        'SUCCESS'
                    ],
                    'started': [1234, 1234, 1234, 1234, 1234, 1234],
                    'tests_failed': [1, 1, 1, 1, 1, 1],
                    'tests_run': [2, 2, 2, 2, 2, 2]
                },
                'job_paths': {
                    'other-job': 'gs://logs/other-job',
                    'some-job': 'gs://logs/some-job'
                },
                'jobs': {
                    'other-job': {
                        '5': 0,
                        '7': 1
                    },
                    'some-job': [1, 4, 2]
                }
            })

        random_hash_1 = output['clustered'][0]['id']
        random_hash_2 = output['clustered'][1]['id']

        self.assertEqual(output['clustered'], [{
            'id':
            random_hash_1,
            'key':
            'some awful stack trace exit 1',
            'tests': [{
                'jobs': [{
                    'builds': [4, 3, 2, 1],
                    'name': 'some-job'
                }],
                'name': 'example test'
            }],
            'spans': [29],
            'owner':
            'node',
            'text':
            'some awful stack trace exit 1'
        }, {
            'id':
            random_hash_2,
            'key':
            'some other error message',
            'tests': [{
                'jobs': [{
                    'builds': [7, 5],
                    'name': 'other-job'
                }],
                'name': 'unrelated test'
            }, {
                'jobs': [{
                    'builds': [4],
                    'name': 'some-job'
                }],
                'name': 'another test'
            }],
            'spans': [24],
            'owner':
            'testing',
            'text':
            'some other error message'
        }])

        slice_output = json_load_byteified(
            open('failure_data_%s.json' % random_hash_1[:2]))

        self.assertEqual(slice_output['clustered'], [output['clustered'][0]])
        self.assertEqual(slice_output['builds']['cols']['started'],
                         [1234, 1234, 1234, 1234])
Beispiel #3
0
def main():
    args = parse_input_arguments()

    acceptable_modules = [
        "phaseblock", "summarize", "visualize", "extend", "somatic", "ancestry"
    ]
    if args.module in acceptable_modules:
        no_error = True
        error_message = []

        if args.module == "phaseblock":
            if args.bam is None:
                no_error = False
                error_message.append("The phaseblock module requires a --bam.")
            if args.vcf is None:
                no_error = False
                error_message.append("The phaseblock module requires a --vcf.")
            if args.vcf_id is None:
                no_error = False
                error_message.append(
                    "The phaseblock module requires a --vcf_id.")
            if args.range is None:
                no_error = False
                error_message.append(
                    "The phaseblock module requires a --range.")
            if no_error:
                import phaseblock
                x = phaseblock.main(args)
            else:
                sys.exit("\n".join(error_message))
        elif args.module == "extend":
            if args.sum is None:
                no_error = False
                error_message.append("The extend module requires a --sum.")
            if args.pb1 is None:
                no_error = False
                error_message.append("The extend module requires a --pb1.")
            if args.pb2 is None:
                no_error = False
                error_message.append("The extend module requires a --pb2.")
            if args.range is None:
                no_error = False
                error_message.append("The extend module requires a --range.")
            if no_error:
                import extend
                x = extend.main(args)
            else:
                sys.exit("\n".join(error_message))
        elif args.module == "summarize":
            if args.pb1 is None:
                no_error = False
                error_message.append(
                    "The summarize module requires a --pb1 (phase block file)."
                )
            if no_error:
                import summarize
                x = summarize.main(args)
            else:
                sys.exit("\n".join(error_message))
        elif args.module == "somatic":
            if args.pb1 is None:
                no_error = False
                error_message.append(
                    "The somatic module requires a --pb1 (phase block file).")
            if args.range is None:
                no_error = False
                error_message.append(
                    "The somatic module requires a --range (genomic range).")
            if args.maf is None and args.variant is None:
                no_error = False
                error_message.append(
                    "The somatic module requires a --maf (MAF) or --variant (variant IDs)."
                )
            if args.maf is not None and args.variant is not None:
                no_error = False
                error_message.append(
                    "The somatic module can only have a --maf (MAF) or --variant (variant IDs), not both."
                )
            if args.sum is None:
                no_error = False
                error_message.append(
                    "The somatic module requires a --sum (phase block summary file)."
                )
            if no_error:
                import somatic
                x = somatic.main(args)
            else:
                sys.exit("\n".join(error_message))
        elif args.module == "ancestry":
            if args.pb1 is None:
                no_error = False
                error_message.append("The ancestry module requires a --pb1.")
            if args.vcf is None:
                no_error = False
                error_message.append("The ancestry module requires a --vcf.")
            if args.vcf_id is None:
                no_error = False
                error_message.append(
                    "The ancestry module requires a --vcf_id.")
            if args.range is None:
                no_error = False
                error_message.append("The ancestry module requires a --range.")
            if args.ibd is None:
                no_error = False
                error_message.append("The ancestry module requires a --ibd.")
            if args.hbd is None:
                no_error = False
                error_message.append("The ancestry module requires a --hbd.")
            if args.dem is None:
                no_error = False
                error_message.append("The ancestry module requires a --dem.")
            if no_error:
                import ancestry
                x = ancestry.main(args)
            else:
                sys.exit("\n".join(error_message))
        else:
            print("Not X")

    else:
        sys.exit("Module must be one of " + ', '.join(acceptable_modules) +
                 ".")
Beispiel #4
0
def search():
    term = request.form['input_text']
    return summarize.main(term)
    def test_main(self):
        def smear(l):
            "given a list of dictionary deltas, return a list of dictionaries"
            cur = {}
            out = []
            for delta in l:
                cur.update(delta)
                out.append(dict(cur))
            return out
        json.dump(smear([
            {'started': 1234, 'number': 1, 'tests_failed': 1, 'tests_run': 2,
             'elapsed': 4, 'path': 'gs://logs/some-job/1', 'job': 'some-job', 'result': 'SUCCESS'},
            {'number': 2, 'path': 'gs://logs/some-job/2'},
            {'number': 3, 'path': 'gs://logs/some-job/3'},
            {'number': 4, 'path': 'gs://logs/some-job/4'},
            {'number': 5, 'path': 'gs://logs/other-job/5', 'job': 'other-job', 'elapsed': 8},
            {'number': 7, 'path': 'gs://logs/other-job/7', 'result': 'FAILURE'},
        ]), open('builds.json', 'w'))
        tests = smear([
            {'name': 'example test', 'build': 'gs://logs/some-job/1',
             'failure_text': 'some awful stack trace exit 1'},
            {'build': 'gs://logs/some-job/2'},
            {'build': 'gs://logs/some-job/3'},
            {'build': 'gs://logs/some-job/4'},
            {'name': 'another test', 'failure_text': 'some other error message'},
            {'name': 'unrelated test', 'build': 'gs://logs/other-job/5'},
            {},  # intentional dupe
            {'build': 'gs://logs/other-job/7'},
        ])
        with open('tests.json', 'w') as f:
            for t in tests:
                f.write(json.dumps(t) + '\n')
        json.dump({
            'node': ['example']
        }, open('owners.json', 'w'))
        summarize.main(summarize.parse_args(
            ['builds.json', 'tests.json',
             '--output_slices=failure_data_PREFIX.json',
             '--owners=owners.json']))
        output = json_load_byteified(open('failure_data.json'))

        # uncomment when output changes
        # import pprint; pprint.pprint(output)

        self.assertEqual(
            output['builds'],
            {'cols': {'elapsed': [8, 8, 4, 4, 4, 4],
                      'executor': [None, None, None, None, None, None],
                      'pr': [None, None, None, None, None, None],
                      'result': ['SUCCESS',
                                 'FAILURE',
                                 'SUCCESS',
                                 'SUCCESS',
                                 'SUCCESS',
                                 'SUCCESS'],
                      'started': [1234, 1234, 1234, 1234, 1234, 1234],
                      'tests_failed': [1, 1, 1, 1, 1, 1],
                      'tests_run': [2, 2, 2, 2, 2, 2]},
             'job_paths': {'other-job': 'gs://logs/other-job',
                           'some-job': 'gs://logs/some-job'},
             'jobs': {'other-job': {'5': 0, '7': 1}, 'some-job': [1, 4, 2]}})

        random_hash_1 = output['clustered'][0]['id']
        random_hash_2 = output['clustered'][1]['id']

        self.assertEqual(
            output['clustered'],
            [{'id': random_hash_1,
              'key': 'some awful stack trace exit 1',
              'tests': [{'jobs': [{'builds': [4, 3, 2, 1],
                                   'name': 'some-job'}],
                         'name': 'example test'}],
              'spans': [29],
              'owner': 'node',
              'text': 'some awful stack trace exit 1'},
             {'id': random_hash_2,
              'key': 'some other error message',
              'tests': [{'jobs': [{'builds': [7, 5],
                                   'name': 'other-job'}],
                         'name': 'unrelated test'},
                        {'jobs': [{'builds': [4], 'name': 'some-job'}],
                         'name': 'another test'}],
              'spans': [24],
              'owner': 'testing',
              'text': 'some other error message'}]
        )

        slice_output = json_load_byteified(open('failure_data_%s.json' % random_hash_1[:2]))

        self.assertEqual(slice_output['clustered'], [output['clustered'][0]])
        self.assertEqual(slice_output['builds']['cols']['started'], [1234, 1234, 1234, 1234])