def test_merge_multiple_by(self): source = { 'x': { 'by-foo': { 'quick': 'fox', 'default': ['a', 'b', 'c'] } } } dest = { 'x': { 'by-bar': { 'purple': 'rain', 'default': ['x', 'y', 'z'] } } } expected = { 'x': { 'by-foo': { 'quick': 'fox', 'default': ['a', 'b', 'c'] } } } # source wins self.assertEqual(merge_to(source, dest), expected)
def test_merge_multiple_by(self): source = { "x": { "by-foo": { "quick": "fox", "default": ["a", "b", "c"] } } } dest = { "x": { "by-bar": { "purple": "rain", "default": ["x", "y", "z"] } } } expected = { "x": { "by-foo": { "quick": "fox", "default": ["a", "b", "c"] } } } # source wins self.assertEqual(merge_to(source, dest), expected)
def test_merge_to_dicts(self): source = {'a': 1, 'b': 2} dest = {'b': '20', 'c': 30} expected = { 'a': 1, # source only 'b': 2, # source overrides dest 'c': 30, # dest only } self.assertEqual(merge_to(source, dest), expected) self.assertEqual(dest, expected)
def test_merge_to_dicts(self): source = {"a": 1, "b": 2} dest = {"b": "20", "c": 30} expected = { "a": 1, # source only "b": 2, # source overrides dest "c": 30, # dest only } self.assertEqual(merge_to(source, dest), expected) self.assertEqual(dest, expected)
def test_merge_diff_types(self): source = {'x': [1, 2]} dest = {'x': 'abc'} expected = {'x': [1, 2]} # source wins self.assertEqual(merge_to(source, dest), expected) self.assertEqual(dest, expected)
def test_merge_to_lists(self): source = {'x': [3, 4]} dest = {'x': [1, 2]} expected = {'x': [1, 2, 3, 4]} # dest first self.assertEqual(merge_to(source, dest), expected) self.assertEqual(dest, expected)
def test_merge_diff_types(self): source = {"x": [1, 2]} dest = {"x": "abc"} expected = {"x": [1, 2]} # source wins self.assertEqual(merge_to(source, dest), expected) self.assertEqual(dest, expected)
def test_merge_to_lists(self): source = {"x": [3, 4]} dest = {"x": [1, 2]} expected = {"x": [1, 2, 3, 4]} # dest first self.assertEqual(merge_to(source, dest), expected) self.assertEqual(dest, expected)
def process_thirdparty_build(config, jobs): """ Set up a thirdparty library build, caching the built artifacts. """ for job in jobs: name = job['name'] thirdparty = job['thirdparty'] artifact_name = thirdparty['artifact'] script = os.path.join(COMM_SCRIPTS, thirdparty['script']) args = thirdparty.get('args', []) command = [script] + args task = make_base_task(config, name, job, script, command) merge_to(job['index'], task['index']) merge_to(job['treeherder'], task['treeherder']) merge_to(job['worker'], task['worker']) if 'run' in job: merge_to(job['run'], task['run']) if 'when' in job: merge_to(job['when'], task['when']) if 'toolchain' in job: task['fetches']['toolchain'] = job['toolchain'] when = task.pop('when') if 'when' in job: merge_to(job['when'], when) # The files-changed optimization is not actually used because it # conflicts with the indexing optimization, but the same list of files # is used to look up the revision with the most recent changes in # order to calculate a hash for the index. files_changed = when['files-changed'] task['worker'].setdefault('artifacts', []).append({ 'name': 'public/build', 'path': '/builds/worker/artifacts', 'type': 'directory', }) if not taskgraph.fast: project = config.params['project'] # Get the most recent revision with changes. files-changed paths # are relative to GECKO, so strip 'comm/' off first. files_changed = frozenset(map(lambda p: strip_comm_prefix(p), files_changed)) last_changed_rev = get_last_modified_revision(COMM, files_changed) logger.info("Using artifact from rev {}.".format(last_changed_rev)) cache_name = task['label'].replace('{}-'.format(config.kind), '', 1) # This adds the level to the index path automatically. add_optimization( config, task, cache_type=CACHE_TYPE, cache_name=cache_name, # Digest is based on the repo name and revision digest_data=command + [project, last_changed_rev, artifact_name], ) yield task