Ejemplo n.º 1
0
 def test_failures_by_type_added(self):
   aggregated_results = self._make_test_json({
       "builds": ["2", "1"],
       "tests": {
           "001.html": {
               "results": [[100, TEXT], [100, FAIL]],
               "times": [[200, 0]],
           }
       }
   }, json_string=JSON_RESULTS_OLD_TEMPLATE)
   incremental_results = self._make_test_json({
       "builds": ["3"],
       "tests": {
           "001.html": {
               "results": [[1, TEXT]],
               "times": [[1, 0]],
           }
       }
   }, json_string=JSON_RESULTS_OLD_TEMPLATE)
   incremental_json, _ = JsonResults._get_incremental_json(self._builder,
       JsonResults.load_json(incremental_results),
       is_full_results_format=False)
   merged_results, _ = JsonResults.merge(self._builder, aggregated_results,
       incremental_json, num_runs=201, sort_keys=True)
   self.assert_json_equal(merged_results, self._make_test_json({
       "builds": ["3", "2", "1"],
       "tests": {
           "001.html": {
               "results": [[101, TEXT], [100, FAIL]],
               "times": [[201, 0]],
           }
       }
   }))
Ejemplo n.º 2
0
 def test_failures_by_type_added(self):
   aggregated_results = self._make_test_json({
       "builds": ["2", "1"],
       "tests": {
           "001.html": {
               "results": [[100, TEXT], [100, FAIL]],
               "times": [[200, 0]],
           }
       }
   }, json_string=JSON_RESULTS_OLD_TEMPLATE)
   incremental_results = self._make_test_json({
       "builds": ["3"],
       "tests": {
           "001.html": {
               "results": [[1, TEXT]],
               "times": [[1, 0]],
           }
       }
   }, json_string=JSON_RESULTS_OLD_TEMPLATE)
   incremental_json, _ = JsonResults._get_incremental_json(self._builder,
       JsonResults.load_json(incremental_results),
       is_full_results_format=False)
   merged_results, _ = JsonResults.merge(self._builder, aggregated_results,
       incremental_json, num_runs=201, sort_keys=True)
   self.assert_json_equal(merged_results, self._make_test_json({
       "builds": ["3", "2", "1"],
       "tests": {
           "001.html": {
               "results": [[101, TEXT], [100, FAIL]],
               "times": [[201, 0]],
           }
       }
   }))
Ejemplo n.º 3
0
 def test_normalize_results_with_top_level_results_key_does_not_crash():
   aggregated_json = {
       'Linux Tests': {
           'results': {'foo': {'results': [(1, 'P')],
                               'times': [(1, 1)]}},
       }
   }
   JsonResults._normalize_results(aggregated_json, 1, 2)
Ejemplo n.º 4
0
 def test_normalize_results_with_top_level_results_key_does_not_crash():
   aggregated_json = {
       'Linux Tests': {
           'results': {'foo': {'results': [(1, 'P')],
                               'times': [(1, 1)]}},
       }
   }
   JsonResults._normalize_results(aggregated_json, 1, 2)
Ejemplo n.º 5
0
    def get(self):  # pragma: no cover
        key = self.request.get(PARAM_KEY)
        master = self.request.get(PARAM_MASTER)
        builder = self.request.get(PARAM_BUILDER)
        test_type = self.request.get(PARAM_TEST_TYPE)
        build_number = self.request.get(PARAM_BUILD_NUMBER, default_value=None)
        name = self.request.get(PARAM_NAME)
        before = self.request.get(PARAM_BEFORE)
        num_files = self.request.get(PARAM_NUM_FILES)
        test_list_json = self.request.get(PARAM_TEST_LIST_JSON)
        callback_name = self.request.get(PARAM_CALLBACK)

        logging.debug(("Getting files, master %s, builder: %s, test_type: %s, "
                       "build_number: %s, name: %s, before: %s."), master,
                      builder, test_type, build_number, name, before)

        if key:
            json, date = self._get_file_content_from_key(key)
        elif (num_files or not master or not builder or not test_type
              or (not build_number and not JsonResults.is_aggregate_file(name))
              or not name):
            limit = int(num_files) if num_files else 100
            self._get_file_list(master, builder, test_type, build_number, name,
                                before, limit, callback_name)
            return
        else:
            # FIXME: Stop using the old master name style after all files have been
            # updated.
            master_data = master_config.getMaster(master)
            if not master_data:
                master_data = master_config.getMasterByMasterName(master)
            if not master_data:
                self.response.headers["Access-Control-Allow-Origin"] = "*"
                self.response.set_status(404)
                return

            json, date = self._get_file_content(master_data['url_name'],
                                                builder, test_type,
                                                build_number, name)
            if json is None:
                json, date = self._get_file_content(master_data['name'],
                                                    builder, test_type,
                                                    build_number, name)

            if json and test_list_json:
                json = JsonResults.get_test_list(builder, json)

        if json:
            json = _replace_jsonp_callback(json, callback_name)

        self._serve_json(json, date)
Ejemplo n.º 6
0
  def get(self):  # pragma: no cover
    key = self.request.get(PARAM_KEY)
    master = self.request.get(PARAM_MASTER)
    builder = self.request.get(PARAM_BUILDER)
    test_type = self.request.get(PARAM_TEST_TYPE)
    build_number = self.request.get(PARAM_BUILD_NUMBER, default_value=None)
    name = self.request.get(PARAM_NAME)
    before = self.request.get(PARAM_BEFORE)
    num_files = self.request.get(PARAM_NUM_FILES)
    test_list_json = self.request.get(PARAM_TEST_LIST_JSON)
    callback_name = self.request.get(PARAM_CALLBACK)

    logging.debug(("Getting files, master %s, builder: %s, test_type: %s, "
                   "build_number: %s, name: %s, before: %s."),
        master, builder, test_type, build_number, name, before)

    if key:
      json, date = self._get_file_content_from_key(key)
    elif (num_files or not master or not builder or not test_type
          or (not build_number and not JsonResults.is_aggregate_file(name))
          or not name):
      limit = int(num_files) if num_files else 100
      self._get_file_list(master, builder, test_type, build_number, name,
          before, limit, callback_name)
      return
    else:
      # FIXME: Stop using the old master name style after all files have been
      # updated.
      master_data = master_config.getMaster(master)
      if not master_data:
        master_data = master_config.getMasterByMasterName(master)
      if not master_data:
        self.response.headers["Access-Control-Allow-Origin"] = "*"
        self.response.set_status(404)
        return

      json, date = self._get_file_content(
          master_data['url_name'], builder, test_type, build_number, name)
      if json is None:
        json, date = self._get_file_content(
            master_data['name'], builder, test_type, build_number, name)

      if json and test_list_json:
        json = JsonResults.get_test_list(builder, json)

    if json:
      json = _replace_jsonp_callback(json, callback_name)

    self._serve_json(json, date)
Ejemplo n.º 7
0
  def test_update_files_empty_incremental_data(self):
    small_file = MockFile(name='results-small.json')
    large_file = MockFile(name='results.json')

    aggregated_data = {
        "builds": ["2", "1"],
        "tests": {
            "001.html": {
                "results": [[200, jsonresults.TEXT]],
                "times": [[200, 0]],
            }
        }
    }
    aggregated_string = self._make_test_json(
        aggregated_data, builder_name=small_file.builder)

    small_file.data = large_file.data = aggregated_string

    incremental_data = {
        "builds": [],
        "tests": {}
    }
    incremental_string = self._make_test_json(
        incremental_data, builder_name=small_file.builder)

    results_tuple = JsonResults.update_files(small_file.builder,
        incremental_string, small_file, large_file,
        is_full_results_format=False)
    self.assertEqual(results_tuple, ('No incremental JSON data to merge.', 403))
    self.assert_json_equal(small_file.data, aggregated_string)
    self.assert_json_equal(large_file.data, aggregated_string)
Ejemplo n.º 8
0
  def test_update_files_empty_incremental_data(self):
    small_file = MockFile(name='results-small.json')
    large_file = MockFile(name='results.json')

    aggregated_data = {
        "builds": ["2", "1"],
        "tests": {
            "001.html": {
                "results": [[200, jsonresults.TEXT]],
                "times": [[200, 0]],
            }
        }
    }
    aggregated_string = self._make_test_json(
        aggregated_data, builder_name=small_file.builder)

    small_file.data = large_file.data = aggregated_string

    incremental_data = {
        "builds": [],
        "tests": {}
    }
    incremental_string = self._make_test_json(
        incremental_data, builder_name=small_file.builder)

    results_tuple = JsonResults.update_files(small_file.builder,
        incremental_string, small_file, large_file,
        is_full_results_format=False)
    self.assertEqual(results_tuple, ('No incremental JSON data to merge.', 403))
    self.assert_json_equal(small_file.data, aggregated_string)
    self.assert_json_equal(large_file.data, aggregated_string)
Ejemplo n.º 9
0
 def test_is_invalid_full_results_json_incorrect_failure_type_count(self):
   self.assertFalse(JsonResults.is_valid_full_results_json({
     'chromium_revision': '761b2a4cbc3103ef5e48cc7e77184f57eb50f6d4',
     'build_number': '12345',
     'version': '5',
     'builder_name': 'foobar',
     'seconds_since_epoch': '12345',
     'num_failures_by_type': {'FAIL': 'foobar'},
     'tests': {},
   }))
Ejemplo n.º 10
0
 def test_merge_with_empty_aggregated_results(self):
   incremental_data = {
       "builds": ["2", "1"],
       "tests": {
           "001.html": {
               "results": [[200, jsonresults.TEXT]],
               "times": [[200, 0]],
           }
       }
   }
   incremental_json = JsonResults.load_json(
       self._make_test_json(incremental_data))
   incremental_results, _ = JsonResults._get_incremental_json(
       self._builder, incremental_json, is_full_results_format=False)
   aggregated_results = ""
   merged_results, _ = JsonResults.merge(self._builder, aggregated_results,
       incremental_results, num_runs=jsonresults.JSON_RESULTS_MAX_BUILDS,
       sort_keys=True)
   self.assert_json_equal(merged_results, incremental_results)
Ejemplo n.º 11
0
 def test_is_invalid_full_results_json_missing_required_fields_in_test(self):
   self.assertFalse(JsonResults.is_valid_full_results_json({
     'chromium_revision': '761b2a4cbc3103ef5e48cc7e77184f57eb50f6d4',
     'build_number': '12345',
     'version': '5',
     'builder_name': 'foobar',
     'seconds_since_epoch': '12345',
     'num_failures_by_type': {'FAIL': '123'},
     'tests': {'test': {'actual': '10'}},
   }))
Ejemplo n.º 12
0
 def test_is_valid_full_results_json(self):
   self.assertTrue(JsonResults.is_valid_full_results_json({
     'chromium_revision': '761b2a4cbc3103ef5e48cc7e77184f57eb50f6d4',
     'build_number': '12345',
     'version': '5',
     'builder_name': 'foobar',
     'seconds_since_epoch': '12345',
     'num_failures_by_type': {'FAIL': '123'},
     'tests': {'test': {'actual': 'FAIL', 'expected': 'PASS', 'time': '10'}},
   }))
Ejemplo n.º 13
0
 def test_merge_with_empty_aggregated_results(self):
   incremental_data = {
       "builds": ["2", "1"],
       "tests": {
           "001.html": {
               "results": [[200, jsonresults.TEXT]],
               "times": [[200, 0]],
           }
       }
   }
   incremental_json = JsonResults.load_json(
       self._make_test_json(incremental_data))
   incremental_results, _ = JsonResults._get_incremental_json(
       self._builder, incremental_json, is_full_results_format=False)
   aggregated_results = ""
   merged_results, _ = JsonResults.merge(self._builder, aggregated_results,
       incremental_results, num_runs=jsonresults.JSON_RESULTS_MAX_BUILDS,
       sort_keys=True)
   self.assert_json_equal(merged_results, incremental_results)
Ejemplo n.º 14
0
 def test_is_invalid_full_results_json_incorrect_int_fields(self):
   self.assertFalse(JsonResults.is_valid_full_results_json({
     'chromium_revision': 'foobar',
     'build_number': 'foobar',
     'version': 'foobar',
     'builder_name': 'foobar',
     'seconds_since_epoch': 'foobar',
     'num_failures_by_type': 'foobar',
     'tests': 'foobar',
   }))
Ejemplo n.º 15
0
 def test_is_invalid_full_results_json_incorrect_int_fields(self):
   self.assertFalse(JsonResults.is_valid_full_results_json({
     'chromium_revision': 'foobar',
     'blink_revision': 'foobar',
     'build_number': 'foobar',
     'version': 'foobar',
     'builder_name': 'foobar',
     'seconds_since_epoch': 'foobar',
     'num_failures_by_type': 'foobar',
     'tests': 'foobar',
   }))
Ejemplo n.º 16
0
 def test_is_invalid_full_results_json_incorrect_failure_type_count(self):
   self.assertFalse(JsonResults.is_valid_full_results_json({
     'chromium_revision': '761b2a4cbc3103ef5e48cc7e77184f57eb50f6d4',
     'blink_revision': '12345',
     'build_number': '12345',
     'version': '5',
     'builder_name': 'foobar',
     'seconds_since_epoch': '12345',
     'num_failures_by_type': {'FAIL': 'foobar'},
     'tests': {},
   }))
Ejemplo n.º 17
0
 def test_is_valid_full_results_json(self):
   self.assertTrue(JsonResults.is_valid_full_results_json({
     'chromium_revision': '761b2a4cbc3103ef5e48cc7e77184f57eb50f6d4',
     'blink_revision': '12345',
     'build_number': '12345',
     'version': '5',
     'builder_name': 'foobar',
     'seconds_since_epoch': '12345',
     'num_failures_by_type': {'FAIL': '123'},
     'tests': {'test': {'actual': 'FAIL', 'expected': 'PASS', 'time': '10'}},
   }))
Ejemplo n.º 18
0
 def test_is_invalid_full_results_json_missing_required_fields_in_test(self):
   self.assertFalse(JsonResults.is_valid_full_results_json({
     'chromium_revision': '761b2a4cbc3103ef5e48cc7e77184f57eb50f6d4',
     'blink_revision': '12345',
     'build_number': '12345',
     'version': '5',
     'builder_name': 'foobar',
     'seconds_since_epoch': '12345',
     'num_failures_by_type': {'FAIL': '123'},
     'tests': {'test': {'actual': '10'}},
   }))
Ejemplo n.º 19
0
  def _test_merge(self, aggregated_data, incremental_data, expected_data,
        max_builds=jsonresults.JSON_RESULTS_MAX_BUILDS):
    aggregated_results = self._make_test_json(
        aggregated_data, builder_name=self._builder)
    incremental_results = self._make_test_json(
        incremental_data, builder_name=self._builder)
    # FIXME: Why is this called if we ignore the result?
    JsonResults._get_incremental_json(self._builder,
        JsonResults.load_json(aggregated_results),
            is_full_results_format=False)
    merged_results, status_code = JsonResults.merge(self._builder,
        aggregated_results, JsonResults.load_json(incremental_results),
        num_runs=max_builds, sort_keys=True)

    if expected_data:
      expected_results = self._make_test_json(
          expected_data, builder_name=self._builder)
      self.assert_json_equal(merged_results, expected_results)
      self.assertEqual(status_code, 200)
    else:
      self.assertTrue(status_code != 200)
Ejemplo n.º 20
0
 def test_is_valid_full_results_json_numeric_chromium_revision(self):
   self.assertTrue(JsonResults.is_valid_full_results_json({
     'chromium_revision': '12345',
     'blink_revision': '12345',
     'build_number': '12345',
     'version': '5',
     'builder_name': 'foobar',
     'seconds_since_epoch': '12345',
     'num_failures_by_type': {'FAIL': '123'},
     'tests': {'test': {'actual': 'FAIL', 'expected': 'PASS',
                        'time': '10'}},
   }))
Ejemplo n.º 21
0
  def test_update_files_empty_aggregate_data(self):
    small_file = MockFile(name='results-small.json')
    large_file = MockFile(name='results.json')

    incremental_data = {
        "builds": ["2", "1"],
        "tests": {
            "001.html": {
                "results": [[200, jsonresults.TEXT]],
                "times": [[200, 0]],
            }
        }
    }
    incremental_string = self._make_test_json(
        incremental_data, builder_name=small_file.builder)
    incremental_json = JsonResults.load_json(incremental_string)

    self.assertTrue(JsonResults.update_files(small_file.builder,
        incremental_json, small_file, large_file, is_full_results_format=False))
    self.assert_json_equal(small_file.data, incremental_string)
    self.assert_json_equal(large_file.data, incremental_string)
Ejemplo n.º 22
0
  def _test_merge(self, aggregated_data, incremental_data, expected_data,
        max_builds=jsonresults.JSON_RESULTS_MAX_BUILDS):
    aggregated_results = self._make_test_json(
        aggregated_data, builder_name=self._builder)
    incremental_results = self._make_test_json(
        incremental_data, builder_name=self._builder)
    # FIXME: Why is this called if we ignore the result?
    JsonResults._get_incremental_json(self._builder,
        JsonResults.load_json(aggregated_results),
            is_full_results_format=False)
    merged_results, status_code = JsonResults.merge(self._builder,
        aggregated_results, JsonResults.load_json(incremental_results),
        num_runs=max_builds, sort_keys=True)

    if expected_data:
      expected_results = self._make_test_json(
          expected_data, builder_name=self._builder)
      self.assert_json_equal(merged_results, expected_results)
      self.assertEqual(status_code, 200)
    else:
      self.assertTrue(status_code != 200)
Ejemplo n.º 23
0
  def test_update_files_empty_aggregate_data(self):
    small_file = MockFile(name='results-small.json')
    large_file = MockFile(name='results.json')

    incremental_data = {
        "builds": ["2", "1"],
        "tests": {
            "001.html": {
                "results": [[200, jsonresults.TEXT]],
                "times": [[200, 0]],
            }
        }
    }
    incremental_string = self._make_test_json(
        incremental_data, builder_name=small_file.builder)
    incremental_json = JsonResults.load_json(incremental_string)

    self.assertTrue(JsonResults.update_files(small_file.builder,
        incremental_json, small_file, large_file, is_full_results_format=False))
    self.assert_json_equal(small_file.data, incremental_string)
    self.assert_json_equal(large_file.data, incremental_string)
Ejemplo n.º 24
0
 def test_is_invalid_full_results_json_not_dict(self):
   self.assertFalse(JsonResults.is_valid_full_results_json([]))
   self.assertFalse(JsonResults.is_valid_full_results_json("foo"))
Ejemplo n.º 25
0
 def test_strip_prefix_suffix(self):
   json_string = "['contents']"
   stripped = jsonresults.JsonResults._strip_prefix_suffix(
       "ADD_RESULTS(" + json_string + ");")
   self.assertEqual(stripped, json_string)
   self.assertEqual(JsonResults._strip_prefix_suffix(json_string), json_string)
Ejemplo n.º 26
0
 def _test_get_test_list(self, input_data, expected_data):
   input_results = self._make_test_json(input_data)
   expected_results = JSON_RESULTS_TEST_LIST_TEMPLATE.replace(
       "{[TESTDATA_TESTS]}", json.dumps(expected_data, separators=(',', ':')))
   actual_results = JsonResults.get_test_list(self._builder, input_results)
   self.assert_json_equal(actual_results, expected_results)
Ejemplo n.º 27
0
 def test_strip_prefix_suffix(self):
   json_string = "['contents']"
   stripped = jsonresults.JsonResults._strip_prefix_suffix(
       "ADD_RESULTS(" + json_string + ");")
   self.assertEqual(stripped, json_string)
   self.assertEqual(JsonResults._strip_prefix_suffix(json_string), json_string)
Ejemplo n.º 28
0
 def test_is_invalid_full_results_json_missing_required_fields(self):
   self.assertFalse(JsonResults.is_valid_full_results_json({}))
Ejemplo n.º 29
0
 def _test_get_test_list(self, input_data, expected_data):
   input_results = self._make_test_json(input_data)
   expected_results = JSON_RESULTS_TEST_LIST_TEMPLATE.replace(
       "{[TESTDATA_TESTS]}", json.dumps(expected_data, separators=(',', ':')))
   actual_results = JsonResults.get_test_list(self._builder, input_results)
   self.assert_json_equal(actual_results, expected_results)
Ejemplo n.º 30
0
  def test_merge_full_results_format(self):
    expected_incremental_results = {
        "Webkit": {
            "blinkRevision": ["1234"],
            "buildNumbers": ["3"],
            "chromeRevision": ["5678"],
            "failure_map": jsonresults.CHAR_TO_FAILURE,
            "num_failures_by_type": {
                "AUDIO": [0],
                "CRASH": [3],
                "FAIL": [2],
                "IMAGE": [1],
                "IMAGE+TEXT": [0],
                "MISSING": [0],
                "PASS": [10],
                "SKIP": [2],
                "TEXT": [3],
                "TIMEOUT": [16],
                "LEAK": [1]
            },
            "secondsSinceEpoch": [1368146629],
            "tests": {
                "media": {
                    "W3C": {
                        "audio": {
                            "src": {
                                "src_removal_does_not_trigger_loadstart.html": {
                                    "results": [[1, PASS]],
                                    "times": [[1, 4]],
                                }
                            }
                        }
                    },
                    "encrypted-media": {
                        "encrypted-media-v2-events.html": {
                            "bugs": ["crbug.com/1234"],
                            "expected": "TIMEOUT",
                            "results": [[1, TIMEOUT]],
                            "times": [[1, 6]],
                        },
                        "encrypted-media-v2-syntax.html": {
                            "expected": "TIMEOUT",
                            "results": [[1, TIMEOUT]],
                            "times": [[1, 0]],
                        }
                    },
                    "media-document-audio-repaint.html": {
                        "expected": "IMAGE",
                        "results": [[1, IMAGE]],
                        "times": [[1, 0]],
                    },
                    "progress-events-generated-correctly.html": {
                        "expected": "PASS FAIL IMAGE TIMEOUT CRASH MISSING",
                        "results": [[1, TIMEOUT]],
                        "times": [[1, 6]],
                    },
                    "flaky-failed.html": {
                        "expected": "PASS FAIL",
                        "results": [[1, FAIL]],
                        "times": [[1, 0]],
                    },
                    "unexpected-fail.html": {
                        "results": [[1, FAIL]],
                        "times": [[1, 0]],
                    },
                    "unexpected-leak.html": {
                        "results": [[1, LEAK]],
                        "times": [[1, 0]],
                    },
                    "unexpected-flake.html": {
                        "results": [[1, FAIL + PASS]],
                        "times": [[1, 0]],
                    },
                    "unexpected-unexpected.html": {
                        "results": [[1, UNKNOWN]],
                        "times": [[1, 0]],
                    },
                }
            }
        },
        "version": 4
    }

    aggregated_results = ""
    incremental_json, _ = JsonResults._get_incremental_json(self._builder,
        JsonResults.load_json(FULL_RESULT_EXAMPLE),
        is_full_results_format=True)
    merged_results, _ = JsonResults.merge("Webkit", aggregated_results,
        incremental_json, num_runs=jsonresults.JSON_RESULTS_MAX_BUILDS,
        sort_keys=True)
    self.assert_json_equal(merged_results, expected_incremental_results)
Ejemplo n.º 31
0
  def test_merge_full_results_format(self):
    expected_incremental_results = {
        "Webkit": {
            "buildNumbers": ["3"],
            "chromeRevision": ["5678"],
            "failure_map": jsonresults.CHAR_TO_FAILURE,
            "num_failures_by_type": {
                "AUDIO": [0],
                "CRASH": [3],
                "FAIL": [2],
                "IMAGE": [1],
                "IMAGE+TEXT": [0],
                "MISSING": [0],
                "PASS": [10],
                "SKIP": [2],
                "TEXT": [3],
                "TIMEOUT": [16],
                "LEAK": [1]
            },
            "secondsSinceEpoch": [1368146629],
            "tests": {
                "media": {
                    "W3C": {
                        "audio": {
                            "src": {
                                "src_removal_does_not_trigger_loadstart.html": {
                                    "results": [[1, PASS]],
                                    "times": [[1, 4]],
                                }
                            }
                        }
                    },
                    "encrypted-media": {
                        "random-test-1.html": {
                            "bugs": ["crbug.com/1234"],
                            "expected": "TIMEOUT",
                            "results": [[1, TIMEOUT]],
                            "times": [[1, 6]],
                        },
                        "random-test-2.html": {
                            "expected": "TIMEOUT",
                            "results": [[1, TIMEOUT]],
                            "times": [[1, 0]],
                        }
                    },
                    "media-document-audio-repaint.html": {
                        "expected": "IMAGE",
                        "results": [[1, IMAGE]],
                        "times": [[1, 0]],
                    },
                    "progress-events-generated-correctly.html": {
                        "expected": "PASS FAIL IMAGE TIMEOUT CRASH MISSING",
                        "results": [[1, TIMEOUT]],
                        "times": [[1, 6]],
                    },
                    "flaky-failed.html": {
                        "expected": "PASS FAIL",
                        "results": [[1, FAIL]],
                        "times": [[1, 0]],
                    },
                    "unexpected-fail.html": {
                        "results": [[1, FAIL]],
                        "times": [[1, 0]],
                    },
                    "unexpected-leak.html": {
                        "results": [[1, LEAK]],
                        "times": [[1, 0]],
                    },
                    "unexpected-flake.html": {
                        "results": [[1, FAIL + PASS]],
                        "times": [[1, 0]],
                    },
                    "unexpected-unexpected.html": {
                        "results": [[1, UNKNOWN]],
                        "times": [[1, 0]],
                    },
                }
            }
        },
        "version": 4
    }

    aggregated_results = ""
    incremental_json, _ = JsonResults._get_incremental_json(self._builder,
        JsonResults.load_json(FULL_RESULT_EXAMPLE),
        is_full_results_format=True)
    merged_results, _ = JsonResults.merge("Webkit", aggregated_results,
        incremental_json, num_runs=jsonresults.JSON_RESULTS_MAX_BUILDS,
        sort_keys=True)
    self.assert_json_equal(merged_results, expected_incremental_results)
Ejemplo n.º 32
0
 def test_is_invalid_full_results_json_missing_required_fields(self):
   self.assertFalse(JsonResults.is_valid_full_results_json({}))
Ejemplo n.º 33
0
 def test_is_invalid_full_results_json_not_dict(self):
   self.assertFalse(JsonResults.is_valid_full_results_json([]))
   self.assertFalse(JsonResults.is_valid_full_results_json("foo"))
Ejemplo n.º 34
0
    def post(self):  # pragma: no cover
        file_params = self.request.POST.getall(PARAM_FILE)
        if not file_params:
            self.response.out.write("FAIL: missing upload file field.")
            return

        builder = self.request.get(PARAM_BUILDER)
        if not builder:
            self.response.out.write("FAIL: missing builder parameter.")
            return

        master_parameter = self.request.get(PARAM_MASTER)

        master_data = master_config.getMasterByMasterName(master_parameter)
        if master_data:
            deprecated_master = master_parameter
            master = master_data['url_name']
        else:
            deprecated_master = None
            master = master_parameter

        test_type = self.request.get(PARAM_TEST_TYPE)

        logging.debug(
            "Processing upload request, master: %s, builder: %s, test_type: %s.",
            master, builder, test_type)

        # There are two possible types of each file_params in the request:
        # one file item or a list of file items.
        # Normalize file_params to a file item list.
        files = []
        logging.debug("test: %s, type:%s", file_params, type(file_params))
        for item in file_params:
            if not isinstance(item, list) and not isinstance(item, tuple):
                item = [item]
            files.extend(item)

        errors = []
        final_status_code = 200
        for record in files:
            file_json = JsonResults.load_json(record.value)
            if record.filename == "incremental_results.json":
                # FIXME: Ferret out and eliminate remaining incremental_results.json
                # producers.
                logging.info(
                    ("incremental_results.json received from master: %s, "
                     "builder: %s, test_type: %s."), master, builder,
                    test_type)
                status_string, status_code = JsonResults.update(
                    master,
                    builder,
                    test_type,
                    file_json,
                    deprecated_master=deprecated_master,
                    is_full_results_format=False)
            else:
                try:
                    build_number = int(file_json.get('build_number', 0))
                    status_string, status_code = TestFile.add_file(
                        master, builder, test_type, build_number,
                        record.filename, record.value)
                except (ValueError, TypeError):
                    status_code = 403
                    status_string = (
                        'Could not cast the build_number field in the '
                        'json to an integer.')

                if status_code == 200:
                    logging.info(status_string)
                else:
                    logging.error(status_string)
                    errors.append(status_string)
                    final_status_code = status_code

            if status_code == 200 and record.filename == "full_results.json":
                status_string, status_code = JsonResults.update(
                    master,
                    builder,
                    test_type,
                    file_json,
                    deprecated_master=deprecated_master,
                    is_full_results_format=True)
                BuilderState.incremental_update(master, builder, test_type,
                                                datetime.now())

            if status_code == 200:
                logging.info(status_string)
            else:
                logging.error(status_string)
                errors.append(status_string)
                final_status_code = status_code

        if errors:
            messages = "FAIL: " + "; ".join(errors)
            self.response.set_status(final_status_code, messages)
            self.response.out.write(messages)
        else:
            self.response.set_status(200)
            self.response.out.write("OK")
Ejemplo n.º 35
0
  def post(self):  # pragma: no cover
    file_params = self.request.POST.getall(PARAM_FILE)
    if not file_params:
      self.response.out.write("FAIL: missing upload file field.")
      return

    builder = self.request.get(PARAM_BUILDER)
    if not builder:
      self.response.out.write("FAIL: missing builder parameter.")
      return

    master_parameter = self.request.get(PARAM_MASTER)

    master_data = master_config.getMasterByMasterName(master_parameter)
    if master_data:
      deprecated_master = master_parameter
      master = master_data['url_name']
    else:
      deprecated_master = None
      master = master_parameter

    test_type = self.request.get(PARAM_TEST_TYPE)
    test_type = util.normalize_test_type(test_type)

    logging.debug(
        "Processing upload request, master: %s, builder: %s, test_type: %s.",
        master, builder, test_type)

    # There are two possible types of each file_params in the request:
    # one file item or a list of file items.
    # Normalize file_params to a file item list.
    files = []
    logging.debug("test: %s, type:%s", file_params, type(file_params))
    for item in file_params:
      if not isinstance(item, list) and not isinstance(item, tuple):
        item = [item]
      files.extend(item)

    errors = []
    final_status_code = 200
    for record in files:
      file_json = JsonResults.load_json(record.value)
      if record.filename == "incremental_results.json":
        # FIXME: Ferret out and eliminate remaining incremental_results.json
        # producers.
        logging.info(("incremental_results.json received from master: %s, "
                      "builder: %s, test_type: %s."),
            master, builder, test_type)
        status_string, status_code = JsonResults.update(master, builder,
            test_type, file_json, deprecated_master=deprecated_master,
            is_full_results_format=False)
      else:
        try:
          build_number = int(file_json.get('build_number', 0))
          status_string, status_code = TestFile.add_file(master, builder,
              test_type, build_number, record.filename, record.value)
        except (ValueError, TypeError):
          status_code = 403
          status_string = ('Could not cast the build_number field in the '
                           'json to an integer.')

        if status_code == 200:
          logging.info(status_string)
        else:
          logging.error(status_string)
          errors.append(status_string)
          final_status_code = status_code

      if status_code == 200 and record.filename == "full_results.json":
        status_string, status_code = JsonResults.update(master, builder,
            test_type, file_json, deprecated_master=deprecated_master,
            is_full_results_format=True)
        BuilderState.incremental_update(master, builder, test_type,
            datetime.now())
        EventMonUploader.upload(master, builder, build_number, test_type,
            file_json)

      if status_code == 200:
        logging.info(status_string)
      else:
        logging.error(status_string)
        errors.append(status_string)
        final_status_code = status_code

    if errors:
      messages = "FAIL: " + "; ".join(errors)
      self.response.set_status(final_status_code, messages)
      self.response.out.write(messages)
    else:
      self.response.set_status(200)
      self.response.out.write("OK")
Ejemplo n.º 36
0
  def test_deprecated_master_name(self):
    tb = testbed.Testbed()
    tb.activate()
    tb.init_datastore_v3_stub()
    tb.init_blobstore_stub()

    master = master_config.getMaster('chromium.chromiumos')
    builder = 'test-builder'
    test_type = 'test-type'

    test_data = [
        {
            'tests': {
                'Test1.testproc1': {
                    'expected': 'PASS',
                    'actual': 'PASS',
                    'time': 1,
                }
            },
            'build_number': '123',
            'version': jsonresults.JSON_RESULTS_HIERARCHICAL_VERSION,
            'builder_name': builder,
            'seconds_since_epoch': 1406123456,
            'num_failures_by_type': {
                'FAIL': 0,
                'SKIP': 0,
                'PASS': 1
            },
            'chromium_revision': '761b2a4cbc3103ef5e48cc7e77184f57eb50f6d4',
        },
        {
            'tests': {
                'Test2.testproc2': {
                    'expected': 'PASS',
                    'actual': 'FAIL',
                    'time': 2,
                }
            },
            'build_number': '456',
            'version': jsonresults.JSON_RESULTS_HIERARCHICAL_VERSION,
            'builder_name': builder,
            'seconds_since_epoch': 1406654321,
            'num_failures_by_type': {
                'FAIL': 1,
                'SKIP': 0,
                'PASS': 0
            },
            'chromium_revision': '761b2a4cbc3103ef5e48cc7e77184f57eb50f6d5',
        },
    ]

    # Upload a file using old master name

    # Seed results files using the old name.
    JsonResults.update(
        master['name'], builder, test_type, test_data[0], None, True)
    # Update results files using the new name.
    JsonResults.update(master['url_name'], builder, test_type, test_data[1],
        master['name'], True)
    # Verify that the file keyed by url_name contains both sets of results.
    files = TestFile.get_files(
        master['url_name'], builder, test_type, None, None, limit=3)
    self.assertEqual(len(files), 2)
    for f in files:
      j = json.loads(f.data)
      self.assertItemsEqual(j[builder]['chromeRevision'],
                            ['761b2a4cbc3103ef5e48cc7e77184f57eb50f6d4',
                             '761b2a4cbc3103ef5e48cc7e77184f57eb50f6d5'])

    tb.deactivate()
Ejemplo n.º 37
0
  def post(self):
    if not self.request.body:
      logging.error('Missing request payload')
      self.response.set_status(400)
      return

    try:
      payload = json.loads(self.request.body)
    except ValueError:
      logging.error('Failed to parse request payload as JSON')
      self.response.set_status(400)
      return

    # Retrieve test json from datastore based on task parameters.
    master = payload.get('master')
    builder = payload.get('builder')
    build_number = payload.get('build_number')
    test_type = payload.get('test_type')
    step_name = payload.get('step_name')
    if (not master or not builder or build_number is None or not test_type or
        not step_name):
      logging.error(
          'Missing required parameters: (master=%s, builder=%s, '
          'build_number=%s, test_type=%s, step_name=%s)' %
          (master, builder, build_number, test_type, step_name))
      self.response.set_status(400)
      return

    files = TestFile.get_files(
        master, builder, test_type, build_number, 'full_results.json',
        load_data=True, limit=1)
    if not files:
      logging.error('Failed to find full_results.json for (%s, %s, %s, %s)' % (
                    master, builder, build_number, test_type))
      self.response.set_status(404)
      return
    file_json = JsonResults.load_json(files[0].data)

    # Create a proto event and send it to event_mon.
    event = event_mon.Event('POINT')
    test_results = event.proto.test_results
    test_results.master_name = master
    test_results.builder_name = builder
    test_results.build_number = int(build_number)
    test_results.test_type = test_type
    test_results.step_name = step_name
    if 'interrupted' in file_json:
      test_results.interrupted = file_json['interrupted']
    if 'version' in file_json:
      test_results.version = file_json['version']
    if 'seconds_since_epoch' in file_json:
      test_results.usec_since_epoch = long(
          float(file_json['seconds_since_epoch']) * 1000 * 1000)

    def convert_test_result_type(json_val):
      self.num_test_results.increment({
          'result_type': json_val, 'master': master, 'builder': builder,
          'test_type': test_type})
      try:
        return (event_mon.protos.chrome_infra_log_pb2.TestResultsEvent.
                TestResultType.Value(json_val.upper().replace('+', '_')))
      except ValueError:
        return event_mon.protos.chrome_infra_log_pb2.TestResultsEvent.UNKNOWN

    tests = util.flatten_tests_trie(
        file_json.get('tests', {}), file_json.get('path_delimiter', '/'))
    for name, test in tests.iteritems():
      test_result = test_results.tests.add()
      test_result.test_name = name
      test_result.actual.extend(
          convert_test_result_type(res) for res in test['actual'])
      test_result.expected.extend(
          convert_test_result_type(res) for res in test['expected'])

    event.send()
Ejemplo n.º 38
0
  def test_deprecated_master_name(self):
    tb = testbed.Testbed()
    tb.activate()
    tb.init_datastore_v3_stub()
    tb.init_blobstore_stub()

    master = master_config.getMaster('chromium.chromiumos')
    builder = 'test-builder'
    test_type = 'test-type'

    test_data = [
        {
            'tests': {
                'Test1.testproc1': {
                    'expected': 'PASS',
                    'actual': 'PASS',
                    'time': 1,
                }
            },
            'build_number': '123',
            'version': jsonresults.JSON_RESULTS_HIERARCHICAL_VERSION,
            'builder_name': builder,
            'blink_revision': '12345',
            'seconds_since_epoch': 1406123456,
            'num_failures_by_type': {
                'FAIL': 0,
                'SKIP': 0,
                'PASS': 1
            },
            'chromium_revision': '761b2a4cbc3103ef5e48cc7e77184f57eb50f6d4',
        },
        {
            'tests': {
                'Test2.testproc2': {
                    'expected': 'PASS',
                    'actual': 'FAIL',
                    'time': 2,
                }
            },
            'build_number': '456',
            'version': jsonresults.JSON_RESULTS_HIERARCHICAL_VERSION,
            'builder_name': builder,
            'blink_revision': '54321',
            'seconds_since_epoch': 1406654321,
            'num_failures_by_type': {
                'FAIL': 1,
                'SKIP': 0,
                'PASS': 0
            },
            'chromium_revision': '761b2a4cbc3103ef5e48cc7e77184f57eb50f6d5',
        },
    ]

    # Upload a file using old master name

    # Seed results files using the old name.
    JsonResults.update(
        master['name'], builder, test_type, test_data[0], None, True)
    # Update results files using the new name.
    JsonResults.update(master['url_name'], builder, test_type, test_data[1],
        master['name'], True)
    # Verify that the file keyed by url_name contains both sets of results.
    files = TestFile.get_files(
        master['url_name'], builder, test_type, None, None, limit=3)
    self.assertEqual(len(files), 2)
    for f in files:
      j = json.loads(f.data)
      self.assertItemsEqual(j[builder]['blinkRevision'], ['12345', '54321'])

    tb.deactivate()