Пример #1
0
    def run(self, platform, productName, appVersion, version, build_number,
            locale, hashFunction, extVersion, buildID, schemaVersion,
            **updateKwargs):
        assert schemaVersion in (
            3, 4), 'Unhandled schema version %s' % schemaVersion
        targets = buildbot2updatePlatforms(platform)
        # Some platforms may have alias', but those are set-up elsewhere
        # for release blobs.
        build_target = targets[0]

        name = get_release_blob_name(productName, version, build_number,
                                     self.dummy)
        data = {
            'buildID': buildID,
            'appVersion': appVersion,
            'platformVersion': extVersion,
            'displayVersion': getPrettyVersion(version)
        }

        data.update(
            self._get_update_data(productName, version, build_number,
                                  **updateKwargs))

        api = SingleLocale(name=name,
                           build_target=build_target,
                           locale=locale,
                           auth=self.auth,
                           api_root=self.api_root)
        current_data, data_version = api.get_data()
        api.update_build(data_version=data_version,
                         product=productName,
                         hashFunction=hashFunction,
                         buildData=json.dumps(
                             merge_partial_updates(current_data, data)),
                         schemaVersion=schemaVersion)
Пример #2
0
    def run(self, platform, productName, appVersion, version, build_number, locale,
            hashFunction, extVersion, buildID, schemaVersion, **updateKwargs):
        assert schemaVersion in (3, 4), 'Unhandled schema version %s' % schemaVersion
        targets = buildbot2updatePlatforms(platform)
        # Some platforms may have alias', but those are set-up elsewhere
        # for release blobs.
        build_target = targets[0]

        name = get_release_blob_name(productName, version, build_number,
                                     self.dummy)
        data = {
            'buildID': buildID,
            'appVersion': appVersion,
            'platformVersion': extVersion,
            'displayVersion': getPrettyVersion(version)
        }

        data.update(self._get_update_data(productName, version, build_number,
                                          **updateKwargs))

        api = SingleLocale(name=name, build_target=build_target, locale=locale,
                           auth=self.auth, api_root=self.api_root)
        schemaVersion = json.dumps(schemaVersion)
        current_data, data_version = api.get_data()
        api.update_build(
            data_version=data_version,
            product=productName, hashFunction=hashFunction,
            buildData=json.dumps(merge_partial_updates(current_data, data)),
            schemaVersion=schemaVersion)
Пример #3
0
 def update_dated():
     current_data, data_version = api.get_data()
     # explicitly pass data version
     api.update_build(
         product=productName, version=appVersion,
         hashFunction=hashFunction,
         buildData=json.dumps(merge_partial_updates(current_data,
                                                    data)),
         alias=json.dumps(alias),
         schemaVersion=schemaVersion, data_version=data_version)
Пример #4
0
 def update_dated():
     current_data, data_version = api.get_data()
     # If the  partials are already a subset of the blob and the
     # complete MAR is the same, skip the submission
     skip_submission = bool(
         current_data and
         current_data.get("completes") == data.get("completes") and
         all(p in current_data.get("partials", [])
             for p in data.get("partials", [])))
     if skip_submission:
         log.warn("Dated data didn't change, skipping update")
         return
     # explicitly pass data version
     api.update_build(
         product=productName, version=appVersion,
         hashFunction=hashFunction,
         buildData=json.dumps(merge_partial_updates(current_data,
                                                    data)),
         alias=json.dumps(alias),
         schemaVersion=schemaVersion, data_version=data_version)
Пример #5
0
 def update_dated():
     current_data, data_version = api.get_data()
     # If the  partials are already a subset of the blob and the
     # complete MAR is the same, skip the submission
     skip_submission = bool(
         current_data
         and current_data.get("completes") == data.get("completes")
         and all(p in current_data.get("partials", [])
                 for p in data.get("partials", [])))
     if skip_submission:
         log.warn("Dated data didn't change, skipping update")
         return
     # explicitly pass data version
     api.update_build(product=productName,
                      hashFunction=hashFunction,
                      buildData=json.dumps(
                          merge_partial_updates(current_data, data)),
                      alias=json.dumps(alias),
                      schemaVersion=schemaVersion,
                      data_version=data_version)
 def test_merge_updates(self):
     old_data = {
         'some_other_field': "123",
         'some_other_field2': {"a": "b", "c": 1},
         'some_other_list': [1, 2, 3],
         'completes': [
             {
                 'fileUrl': 'https://complete1',
                 'filesize': 123,
                 'from': '*',
                 'hashValue': '123abcdef'
             },
         ],
         'partials': [
             {
                 'fileUrl': 'https://partial1',
                 'filesize': 111,
                 'from': '111',
                 'hashValue': '123abc'
             },
             {
                 'fileUrl': 'https://partial2',
                 'filesize': 112,
                 'from': '112',
                 'hashValue': '223abc'
             },
         ]
     }
     new_data = {
         'completes': [
             {
                 'fileUrl': 'https://complete2',
                 'filesize': 122,
                 'from': '*',
                 'hashValue': '122abcdef'
             },
         ],
         'partials': [
             {
                 'fileUrl': 'https://partial2/differenturl',
                 'filesize': 112,
                 'from': '112',
                 'hashValue': '223abcd'
             },
             {
                 'fileUrl': 'https://partial3',
                 'filesize': 113,
                 'from': '113',
                 'hashValue': '323abc'
             },
         ]
     }
     merged = merge_partial_updates(old_data, new_data)
     expected_merged = {
         'some_other_field': "123",
         'some_other_field2': {"a": "b", "c": 1},
         'some_other_list': [1, 2, 3],
         'completes': [
             {
                 'fileUrl': 'https://complete2',
                 'filesize': 122,
                 'from': '*',
                 'hashValue': '122abcdef'
             },
         ],
         'partials': [
             {
                 'fileUrl': 'https://partial1',
                 'filesize': 111,
                 'from': '111',
                 'hashValue': '123abc'
             },
             {
                 'fileUrl': 'https://partial2/differenturl',
                 'filesize': 112,
                 'from': '112',
                 'hashValue': '223abcd'
             },
             {
                 'fileUrl': 'https://partial3',
                 'filesize': 113,
                 'from': '113',
                 'hashValue': '323abc'
             },
         ]
     }
     self.assertDictEqual(merged, expected_merged)
Пример #7
0
 def test_merge_updates(self):
     old_data = {
         'some_other_field':
         "123",
         'some_other_field2': {
             "a": "b",
             "c": 1
         },
         'some_other_list': [1, 2, 3],
         'completes': [
             {
                 'fileUrl': 'https://complete1',
                 'filesize': 123,
                 'from': '*',
                 'hashValue': '123abcdef'
             },
         ],
         'partials': [
             {
                 'fileUrl': 'https://partial1',
                 'filesize': 111,
                 'from': '111',
                 'hashValue': '123abc'
             },
             {
                 'fileUrl': 'https://partial2',
                 'filesize': 112,
                 'from': '112',
                 'hashValue': '223abc'
             },
         ]
     }
     new_data = {
         'completes': [
             {
                 'fileUrl': 'https://complete2',
                 'filesize': 122,
                 'from': '*',
                 'hashValue': '122abcdef'
             },
         ],
         'partials': [
             {
                 'fileUrl': 'https://partial2/differenturl',
                 'filesize': 112,
                 'from': '112',
                 'hashValue': '223abcd'
             },
             {
                 'fileUrl': 'https://partial3',
                 'filesize': 113,
                 'from': '113',
                 'hashValue': '323abc'
             },
         ]
     }
     merged = merge_partial_updates(old_data, new_data)
     expected_merged = {
         'some_other_field':
         "123",
         'some_other_field2': {
             "a": "b",
             "c": 1
         },
         'some_other_list': [1, 2, 3],
         'completes': [
             {
                 'fileUrl': 'https://complete2',
                 'filesize': 122,
                 'from': '*',
                 'hashValue': '122abcdef'
             },
         ],
         'partials': [
             {
                 'fileUrl': 'https://partial1',
                 'filesize': 111,
                 'from': '111',
                 'hashValue': '123abc'
             },
             {
                 'fileUrl': 'https://partial2/differenturl',
                 'filesize': 112,
                 'from': '112',
                 'hashValue': '223abcd'
             },
             {
                 'fileUrl': 'https://partial3',
                 'filesize': 113,
                 'from': '113',
                 'hashValue': '323abc'
             },
         ]
     }
     self.assertDictEqual(merged, expected_merged)