def context(tmpdir): context = Context() context.config = get_default_config() context.config['work_dir'] = os.path.join(tmpdir, 'work') context.config['artifact_dir'] = os.path.join(tmpdir, 'artifact') mkdir(context.config['work_dir']) mkdir(context.config['artifact_dir']) yield context
def context(tmpdir_factory): temp_dir = tmpdir_factory.mktemp("context", numbered=True) print(temp_dir) context = Context() context.config = { "work_dir": str(temp_dir), } return context
def get_context(config_override): context = Context() context.config, credentials = build_config(config_override) swlog.update_logging_config(context) utils.cleanup(context) with aiohttp.ClientSession() as session: context.session = session context.credentials = credentials yield context
def context(tmpdir_factory): temp_dir = tmpdir_factory.mktemp("context", numbered=True) path = str(temp_dir) context = Context() context.config = { 'log_dir': os.path.join(path, 'log'), 'artifact_dir': os.path.join(path, 'artifact'), 'work_dir': os.path.join(path, 'work'), } return context
async def get_context(config_override=None): context = Context() with tempfile.TemporaryDirectory() as tmp: context.config, credentials = build_config(config_override, basedir=tmp) swlog.update_logging_config(context) utils.cleanup(context) async with aiohttp.ClientSession() as session: context.session = session context.credentials = credentials yield context
def test_load_signing_server_config(): context = Context() context.config = { 'signing_server_config': os.path.join(os.path.dirname(__file__), "example_server_config.json") } cfg = load_signing_server_config(context) assert cfg["dep"][0].server == "server1:9000" assert cfg["dep"][1].user == "user2" assert cfg["notdep"][0].password == "pass1" assert cfg["notdep"][1].formats == ["f2", "f3"]
def context(tmpdir_factory): temp_dir = tmpdir_factory.mktemp("context", numbered=True) context = Context() context.config = { "log_fmt": "%(message)s", "log_datefmt": "%H:%M:%S", "log_dir": str(temp_dir), "log_max_bytes": 100, "log_num_backups": 1, "verbose": True, } return context
def context(): context = Context() context.config = { "amo_instances": { "project:releng:addons.mozilla.org:server:dev": { "amo_server": "http://some-amo-it.url", "jwt_user": "******", "jwt_secret": "secret" } } } context.task = {"scopes": ["project:releng:addons.mozilla.org:server:dev"]} return context
def test_setup_config(): expected_context = Context() expected_context.config = get_fake_valid_config() with pytest.raises(SystemExit): setup_config(None) actual_context = setup_config("beetmoverscript/test/fake_config.json") assert expected_context.config == actual_context.config args = ['beetmoverscript', "beetmoverscript/test/fake_config.json"] with mock.patch.object(sys, 'argv', args): actual_context = setup_config(None) assert expected_context.config == actual_context.config
def test_async_main(event_loop): context = Context() context.config = get_fake_valid_config() async def fake_move_beets(context, manifest): pass async def get_fake_props(context): return get_fake_balrog_props()['properties'] with mock.patch('beetmoverscript.script.move_beets', new=fake_move_beets): with mock.patch('beetmoverscript.script.get_props', new=get_fake_props): event_loop.run_until_complete(async_main(context))
def test_put_success(event_loop, fake_session): context = Context() context.config = get_fake_valid_config() context.session = fake_session response = event_loop.run_until_complete( put(context, url='https://foo.com/packages/fake.package', headers={}, abs_filename='beetmoverscript/test/fake_artifact.json', session=fake_session)) assert response.status == 200 assert response.resp == [b'asdf', b'asdf'] assert response.content.url == "https://foo.com/packages/fake.package" assert response.content.method == "PUT"
def context(): context = Context() context.config = { 'amo_instances': { 'project:releng:addons.mozilla.org:server:dev': { 'amo_server': 'http://some-amo-it.url', 'jwt_user': '******', 'jwt_secret': 'secret', }, }, } context.task = { 'scopes': ['project:releng:addons.mozilla.org:server:dev'], } return context
def context(): context = Context() context.config = { 'worker_group': 'worker_group', 'worker_id': 'worker_id', } context.poll_task_urls = { 'queues': [{ "signedPollUrl": "poll0", "signedDeleteUrl": "delete0", }, { "signedPollUrl": "poll1", "signedDeleteUrl": "delete1", }], } return context
def test_download(event_loop): context = Context() context.config = get_fake_valid_config() context.session = fake_session url = 'https://fake.com' path = '/fake/path' async def fake_download(context, url, path, session): return context, url, path, session # just make sure retry_download ends up calling scriptworker's download_file and passes the # right args, kwargs with mock.patch('beetmoverscript.script.download_file', fake_download): result = event_loop.run_until_complete( retry_download(context, url, path)) assert result == (context, url, path, context.session)
def context(): context = Context() context.task = get_fake_valid_task() context.config = get_fake_valid_config() context.release_props = get_fake_balrog_props()['properties'] context.release_props['platform'] = context.release_props['stage_platform'] context.bucket = 'nightly' context.action = 'push-to-nightly' yield context
def _craft_rw_context(tmp, event_loop, cot_product, session): config = get_unfrozen_copy(DEFAULT_CONFIG) config['cot_product'] = cot_product context = Context() context.session = session context.config = apply_product_config(config) context.config['gpg_lockfile'] = os.path.join(tmp, 'gpg_lockfile') context.config['cot_job_type'] = "signing" for key, value in context.config.items(): if key.endswith("_dir"): context.config[key] = os.path.join(tmp, key) makedirs(context.config[key]) if key.endswith("key_path") or key in ("gpg_home", ): context.config[key] = os.path.join(tmp, key) context.config['verbose'] = VERBOSE context.event_loop = event_loop return context
def _init_context(config_path=None, default_config=None): context = Context() # This prevents *script from pasting the whole context. context.write_json = noop_sync if config_path is None: if len(sys.argv) != 2: _usage() config_path = sys.argv[1] context.config = {} if default_config is None else default_config context.config.update(load_json_or_yaml(config_path, is_path=True)) context.task = get_task(context.config) return context
def test_checksums_manifest_generation(): checksums = { "firefox-53.0a1.en-US.linux-i686.complete.mar": { "sha512": "14f2d1cb999a8b42a3b6b671f7376c3e246daa65d108e2b8fe880f069601dc2b26afa155b52001235db059", "size": 618149, "sha256": "293975734953874539475" } } context = Context() context.task = get_fake_valid_task() context.config = get_fake_valid_config() context.checksums = checksums expected_checksums_manifest_dump = get_fake_checksums_manifest() checksums_manifest_dump = generate_checksums_manifest(context) assert checksums_manifest_dump == expected_checksums_manifest_dump
def context(): context = Context() context.task = get_fake_valid_task() context.config = get_fake_valid_config() context.release_props = context.task["payload"]["releaseProperties"] context.release_props["stage_platform"] = context.release_props["platform"] context.bucket = "nightly" context.action = "push-to-nightly" yield context
def test_beetmover_template_args_generation(): context = Context() context.task = get_fake_valid_task() context.config = get_fake_valid_config() context.properties = get_fake_balrog_props()["properties"] context.properties['platform'] = context.properties['stage_platform'] expected_template_args = { 'branch': 'mozilla-central', 'platform': 'android-api-15', 'product': 'Fake', 'stage_platform': 'android-api-15', 'template_key': 'fennec_nightly', 'upload_date': '2016/09/2016-09-01-16-26-14', 'version': '99.0a1' } template_args = generate_beetmover_template_args(context.task, context.properties) assert template_args == expected_template_args context.task['payload']['locale'] = 'ro' template_args = generate_beetmover_template_args(context.task, context.properties) assert template_args['template_key'] == 'fake_nightly_repacks'
def get_context_from_cmdln(args, desc="Run scriptworker"): """Create a Context object from args. Args: args (list): the commandline args. Generally sys.argv Returns: tuple: ``scriptworker.context.Context`` with populated config, and credentials immutabledict """ context = Context() parser = argparse.ArgumentParser(description=desc) parser.add_argument("config_path", type=str, nargs="?", default="scriptworker.yaml", help="the path to the config file") parsed_args = parser.parse_args(args) context.config, credentials = create_config(config_path=parsed_args.config_path) update_logging_config(context) return context, credentials
def test_main(event_loop, fake_session): context = Context() context.config = get_fake_valid_config() async def fake_async_main(context): pass async def fake_async_main_with_exception(context): raise ScriptWorkerTaskException("This is wrong, the answer is 42") with mock.patch('beetmoverscript.script.async_main', new=fake_async_main): main(name='__main__', config_path='beetmoverscript/test/fake_config.json') with mock.patch('beetmoverscript.script.async_main', new=fake_async_main_with_exception): try: main(name='__main__', config_path='beetmoverscript/test/fake_config.json') except SystemExit as exc: assert exc.code == 1
def test_main(fake_session): context = Context() context.config = get_fake_valid_config() async def fake_async_main(context): pass async def fake_async_main_with_exception(context): raise ScriptWorkerTaskException("This is wrong, the answer is 42") with mock.patch("beetmoverscript.script.async_main", new=fake_async_main): main(config_path="tests/fake_config.json") with mock.patch("beetmoverscript.script.async_main", new=fake_async_main_with_exception): try: main(config_path="tests/fake_config.json") except SystemExit as exc: assert exc.code == 1
def context(tmpdir): context = Context() context.config = get_default_config() context.config["signing_server_config"] = SERVER_CONFIG_PATH context.config["work_dir"] = os.path.join(tmpdir, "work") context.config["artifact_dir"] = os.path.join(tmpdir, "artifact") context.config["taskcluster_scope_prefixes"] = [DEFAULT_SCOPE_PREFIX] context.signing_servers = load_signing_server_config(context) mkdir(context.config["work_dir"]) mkdir(context.config["artifact_dir"]) yield context
def context(tmpdir): context = Context() context.config = get_default_config() context.config["autograph_configs"] = SERVER_CONFIG_PATH context.config["work_dir"] = os.path.join(tmpdir, "work") context.config["artifact_dir"] = os.path.join(tmpdir, "artifact") context.config["taskcluster_scope_prefixes"] = [DEFAULT_SCOPE_PREFIX] context.autograph_configs = load_autograph_configs(SERVER_CONFIG_PATH) mkdir(context.config["work_dir"]) mkdir(context.config["artifact_dir"]) yield context
def context(tmpdir): context = Context() context.config = get_default_config() context.config['signing_server_config'] = SERVER_CONFIG_PATH context.config['work_dir'] = os.path.join(tmpdir, 'work') context.config['artifact_dir'] = os.path.join(tmpdir, 'artifact') context.config['taskcluster_scope_prefixes'] = [DEFAULT_SCOPE_PREFIX] context.signing_servers = load_signing_server_config(context) mkdir(context.config['work_dir']) mkdir(context.config['artifact_dir']) yield context
def _craft_rw_context(tmp, event_loop, cot_product, session): config = get_unfrozen_copy(DEFAULT_CONFIG) config["cot_product"] = cot_product context = Context() context.session = session context.config = apply_product_config(config) context.config["cot_job_type"] = "scriptworker" for key, value in context.config.items(): if key.endswith("_dir"): context.config[key] = os.path.join(tmp, key) makedirs(context.config[key]) if key.endswith("key_path"): context.config[key] = os.path.join(tmp, key) context.config["verbose"] = VERBOSE context.event_loop = event_loop return context
def context(tmpdir): context = Context() context.config = { 'log_dir': os.path.join(tmpdir, "log"), 'artifact_dir': os.path.join(tmpdir, "artifact"), 'task_log_dir': os.path.join(tmpdir, "artifact", "public", "logs"), 'work_dir': os.path.join(tmpdir, "work"), 'artifact_upload_timeout': 200, 'artifact_expiration_hours': 1, 'reclaim_interval': 0.001, 'task_script': ('bash', '-c', '>&2 echo bar && echo foo && exit 1'), 'task_max_timeout': .1, } context.claim_task = { 'credentials': {'a': 'b'}, 'status': {'taskId': 'taskId'}, 'task': {'dependencies': ['dependency1', 'dependency2'], 'taskGroupId': 'dependency0'}, 'runId': 'runId', } return context
def test_move_beet(event_loop): context = Context() context.config = get_fake_valid_config() context.task = get_fake_valid_task() locale = "sample-locale" target_source = 'https://queue.taskcluster.net/v1/task/VALID_TASK_ID/artifacts/public/build/target.package' target_destinations = ( 'pub/mobile/nightly/2016/09/2016-09-01-16-26-14-mozilla-central-fake/fake-99.0a1.multi.fake.package', 'pub/mobile/nightly/latest-mozilla-central-fake/fake-99.0a1.multi.fake.package' ) expected_download_args = [ 'https://queue.taskcluster.net/v1/task/VALID_TASK_ID/artifacts/public/build/target.package', 'beetmoverscript/test/test_work_dir/public/build/target.package' ] expected_upload_args = [( 'pub/mobile/nightly/2016/09/2016-09-01-16-26-14-mozilla-central-fake/fake-99.0a1.multi.fake.package', 'pub/mobile/nightly/latest-mozilla-central-fake/fake-99.0a1.multi.fake.package' ), 'beetmoverscript/test/test_work_dir/public/build/target.package'] actual_download_args = [] actual_upload_args = [] async def fake_retry_download(context, url, path): actual_download_args.extend([url, path]) async def fake_retry_upload(context, destinations, path): actual_upload_args.extend([destinations, path]) with mock.patch('beetmoverscript.script.retry_download', fake_retry_download): with mock.patch('beetmoverscript.script.retry_upload', fake_retry_upload): event_loop.run_until_complete( move_beet(context, target_source, target_destinations, locale, update_balrog_manifest=False)) assert sorted(expected_download_args) == sorted(actual_download_args) assert expected_upload_args == actual_upload_args
def context(tmpdir): context = Context() context.config = { 'amo_instances': { 'project:releng:addons.mozilla.org:server:dev': { 'amo_server': 'http://some-amo-it.url', 'jwt_user': '******', 'jwt_secret': 'some-secret', }, }, 'work_dir': tmpdir, } context.task = { 'dependencies': ['someTaskId'], 'payload': { 'release_name': 'Firefox-59.0b3-build1', }, 'scopes': ['project:releng:addons.mozilla.org:server:dev'], } return context
def context(tmpdir): context = Context() context.config = { "amo_instances": { "project:releng:addons.mozilla.org:server:dev": { "amo_server": "http://some-amo-it.url", "jwt_user": "******", "jwt_secret": "some-secret" }, }, "work_dir": tmpdir, } context.task = { "dependencies": ["someTaskId"], "payload": { "release_name": "Firefox-59.0b3-build1" }, "scopes": ["project:releng:addons.mozilla.org:server:dev"], } return context
def test_exception_get_upstream_artifacts(): context = Context() context.config = get_fake_valid_config() context.task = get_fake_valid_task() context.properties = get_fake_balrog_props()["properties"] context.properties['platform'] = context.properties['stage_platform'] context.task['payload']['upstreamArtifacts'][0]['paths'].append('fake_file') with pytest.raises(ScriptWorkerTaskException): context.artifacts_to_beetmove = get_upstream_artifacts(context)
def get_context_from_cmdln(args, desc="Run scriptworker"): """Create a Context object from args. Args: args (list): the commandline args. Generally sys.argv Returns: tuple: ``scriptworker.context.Context`` with populated config, and credentials frozendict """ context = Context() parser = argparse.ArgumentParser(description=desc) parser.add_argument( "config_path", type=str, nargs="?", default="scriptworker.yaml", help="the path to the config file" ) parsed_args = parser.parse_args(args) context.config, credentials = create_config(config_path=parsed_args.config_path) update_logging_config(context) return context, credentials
def test_balrog_manifest_to_artifacts(): context = Context() context.task = get_fake_valid_task() context.config = get_fake_valid_config() fake_balrog_manifest = context.task["payload"]["releaseProperties"] context.balrog_manifest = fake_balrog_manifest # fake the path to to able to check the contents written later on with tempfile.TemporaryDirectory() as tmpdirname: context.config["artifact_dir"] = tmpdirname file_path = os.path.join(context.config["artifact_dir"], "public/manifest.json") # <temp-dir>/public doesn't exist yet and it's not automatically # being created so we need to ensure it exists public_tmpdirname = os.path.join(tmpdirname, "public") if not os.path.exists(public_tmpdirname): os.makedirs(public_tmpdirname) add_balrog_manifest_to_artifacts(context) with open(file_path, "r") as fread: retrieved_data = json.load(fread) assert fake_balrog_manifest == retrieved_data
def context(): context = Context() context.config = { 'mark_as_shipped_schema_file': os.path.join(os.getcwd(), 'shipitscript', 'data', 'mark_as_shipped_task_schema.json'), 'mark_as_started_schema_file': os.path.join(os.getcwd(), 'shipitscript', 'data', 'mark_as_started_task_schema.json') } context.config['ship_it_instances'] = { 'project:releng:ship-it:server:dev': { 'api_root': 'http://some-ship-it.url', 'api_root_v2': 'http://some-ship-it.url/v2', 'timeout_in_seconds': 1, 'taskcluster_client_id': 'some-id', 'taskcluster_access_token': 'some-token', 'username': '******', 'password': '******' } } context.config['taskcluster_scope_prefix'] = "project:releng:ship-it:" context.task = { 'dependencies': ['someTaskId'], 'payload': { 'release_name': 'Firefox-59.0b3-build1' }, 'scopes': ['project:releng:ship-it:server:dev'], } return context
def test_move_beets(event_loop): context = Context() context.config = get_fake_valid_config() context.task = get_fake_valid_task() context.properties = get_fake_balrog_props()["properties"] context.properties['platform'] = context.properties['stage_platform'] manifest = generate_candidates_manifest(context) expected_sources = [ 'https://queue.taskcluster.net/v1/task/VALID_TASK_ID/artifacts/public/build/target.package', 'https://queue.taskcluster.net/v1/task/VALID_TASK_ID/artifacts/public/build/en-US/target.package' ] expected_destinations = [ ('pub/mobile/nightly/2016/09/2016-09-01-16-26-14-mozilla-central-fake/fake-99.0a1.multi.fake.package', 'pub/mobile/nightly/latest-mozilla-central-fake/fake-99.0a1.multi.fake.package' ), ('pub/mobile/nightly/2016/09/2016-09-01-16-26-14-mozilla-central-fake/en-US/fake-99.0a1.en-US.fake.package', 'pub/mobile/nightly/latest-mozilla-central-fake/en-US/fake-99.0a1.en-US.fake.package' ) ] actual_sources = [] actual_destinations = [] async def fake_move_beet(context, source, destinations, locale, update_balrog_manifest): actual_sources.append(source) actual_destinations.append(destinations) with mock.patch('beetmoverscript.script.move_beet', fake_move_beet): event_loop.run_until_complete(move_beets(context, manifest)) assert sorted(expected_sources) == sorted(actual_sources) assert sorted(expected_destinations) == sorted(actual_destinations)
def context(tmpdir): GPG_HOME = os.path.join(os.path.dirname(__file__), "data", "gpg") context_ = Context() context_.config = { "artifact_dir": ARTIFACT_DIR, "work_dir": os.path.join(tmpdir, "work"), "log_dir": os.path.join(tmpdir, "log"), "chain_of_trust_hash_algorithm": "sha256", "cot_schema_path": DEFAULT_CONFIG['cot_schema_path'], "gpg_home": GPG_HOME, "gpg_encoding": 'utf-8', "gpg_options": None, "gpg_path": os.environ.get("GPG_PATH", None), "gpg_public_keyring": "%(gpg_home)s/pubring.gpg", "gpg_secret_keyring": "%(gpg_home)s/secring.gpg", "gpg_use_agent": None, "sign_chain_of_trust": True, "worker_id": "worker_id", "worker_type": "worker_type", } context_.claim_task = { "runId": 2, "status": { "taskId": "taskId", }, "task": { 'dependencies': [], "payload": {}, "scopes": ["foo"], "taskGroupId": "taskGroupId", "workerType": "workerType", }, "workerGroup": "worker_group", "credentials": { 'c': 'd' }, } yield context_
def _craft_rw_context(tmp, event_loop, cot_product, session, private=False): config = get_unfrozen_copy(DEFAULT_CONFIG) config["cot_product"] = cot_product context = Context() context.session = session context.config = apply_product_config(config) context.config["cot_job_type"] = "scriptworker" for key, value in context.config.items(): if key.endswith("_dir"): context.config[key] = os.path.join(tmp, key) makedirs(context.config[key]) if key.endswith("key_path"): context.config[key] = os.path.join(tmp, key) if private: for rule in context.config["trusted_vcs_rules"]: rule["require_secret"] = True context.config["verbose"] = VERBOSE context.event_loop = event_loop return context
def _craft_rw_context(tmp, event_loop, cot_product, session): config = get_unfrozen_copy(DEFAULT_CONFIG) config['cot_product'] = cot_product context = Context() context.session = session context.config = apply_product_config(config) context.config['cot_job_type'] = "signing" for key, value in context.config.items(): if key.endswith("_dir"): context.config[key] = os.path.join(tmp, key) makedirs(context.config[key]) if key.endswith("key_path"): context.config[key] = os.path.join(tmp, key) context.config['verbose'] = VERBOSE context.event_loop = event_loop return context
def main(): """Scriptworker entry point: get everything set up, then enter the main loop """ context = Context() kwargs = {} if len(sys.argv) > 1: if len(sys.argv) > 2: print("Usage: {} [configfile]".format(sys.argv[0]), file=sys.stderr) sys.exit(1) kwargs['path'] = sys.argv[1] context.config, credentials = create_config(**kwargs) update_logging_config(context) cleanup(context) conn = aiohttp.TCPConnector(limit=context.config["max_connections"]) loop = asyncio.get_event_loop() with aiohttp.ClientSession(connector=conn) as session: context.session = session context.credentials = credentials while True: try: loop.create_task(async_main(context)) loop.run_forever() except RuntimeError: pass
def context(tmpdir_factory): temp_dir = tmpdir_factory.mktemp("context", numbered=True) context = Context() context.config = deepcopy(DEFAULT_CONFIG) context.config['log_dir'] = os.path.join(str(temp_dir), "log") context.config['work_dir'] = os.path.join(str(temp_dir), "work") context.config['artifact_dir'] = os.path.join(str(temp_dir), "artifact") context.config['poll_interval'] = .1 context.config['credential_update_interval'] = .1 context.credentials_timestamp = arrow.utcnow().replace(minutes=-10).timestamp context.poll_task_urls = { 'queues': [{ "signedPollUrl": "poll0", "signedDeleteUrl": "delete0", }, { "signedPollUrl": "poll1", "signedDeleteUrl": "delete1", }], 'expires': arrow.utcnow().replace(hours=10).isoformat(), } return context
def _init_context(config_path=None, default_config=None): context = Context() # This prevents *script from overwriting json on disk context.write_json = lambda *args: None context.write_json() # for coverage if config_path is None: if len(sys.argv) != 2: _usage() config_path = sys.argv[1] context.config = {} if default_config is None else default_config context.config.update(load_json_or_yaml(config_path, is_path=True)) context.task = get_task(context.config) return context
def context(): context = Context() context.config = get_default_config() return context
def context(tmpdir): context = Context() context.config = get_default_config() context.config['work_dir'] = os.path.join(tmpdir, 'work') context.task = {} yield context