def main(name=None, config_path=None, close_loop=True): if name not in (None, '__main__'): return context = Context() context.config = dict() if config_path is None: if len(sys.argv) != 2: usage() config_path = sys.argv[1] context.config.update(load_json(path=config_path)) logging.basicConfig(**craft_logging_config(context)) logging.getLogger('taskcluster').setLevel(logging.WARNING) loop = asyncio.get_event_loop() with aiohttp.ClientSession() as session: context.session = session try: loop.run_until_complete(async_main(context)) except ScriptWorkerTaskException as exc: traceback.print_exc() sys.exit(exc.exit_code) if close_loop: # Loop cannot be reopen once closed. Not closing it allows to run several tests on main() loop.close()
async def test_put_success(fake_session): context = Context() context.config = get_fake_valid_config() context.session = fake_session response = await put(context, url=URL("https://foo.com/packages/fake.package"), headers={}, abs_filename="tests/fake_artifact.json", session=fake_session) assert response.status == 200 assert response.resp == [b"asdf", b"asdf"]
def test_beetmover_template_args_generation(): context = Context() context.task = get_fake_valid_task() context.config = get_fake_valid_config() context.properties = get_fake_balrog_props()["properties"] context.properties['platform'] = context.properties['stage_platform'] expected_template_args = { 'branch': 'mozilla-central', 'platform': 'android-api-15', 'product': 'Fake', 'stage_platform': 'android-api-15', 'template_key': 'fennec_nightly', 'upload_date': '2016/09/2016-09-01-16-26-14', 'version': '99.0a1' } template_args = generate_beetmover_template_args(context.task, context.properties) assert template_args == expected_template_args context.task['payload']['locale'] = 'ro' template_args = generate_beetmover_template_args(context.task, context.properties) assert template_args['template_key'] == 'fake_nightly_repacks'
def test_get_schema_key_by_action(scopes, expected): context = Context() context.config = get_fake_valid_config() context.task = get_fake_valid_task() context.task["scopes"] = scopes assert expected == get_schema_key_by_action(context)
def main(): """Scriptworker entry point: get everything set up, then enter the main loop """ context = Context() kwargs = {} if len(sys.argv) > 1: if len(sys.argv) > 2: print("Usage: {} [configfile]".format(sys.argv[0]), file=sys.stderr) sys.exit(1) kwargs['path'] = sys.argv[1] context.config, credentials = create_config(**kwargs) update_logging_config(context) cleanup(context) conn = aiohttp.TCPConnector(limit=context.config["max_connections"]) loop = asyncio.get_event_loop() with aiohttp.ClientSession(connector=conn) as session: context.session = session context.credentials = credentials while True: try: loop.create_task(async_main(context)) loop.run_forever() except RuntimeError: pass
def test_balrog_manifest_to_artifacts(): context = Context() context.task = get_fake_valid_task() context.config = get_fake_valid_config() fake_balrog_manifest = context.task["payload"]["releaseProperties"] context.balrog_manifest = fake_balrog_manifest # fake the path to to able to check the contents written later on with tempfile.TemporaryDirectory() as tmpdirname: context.config["artifact_dir"] = tmpdirname file_path = os.path.join(context.config["artifact_dir"], "public/manifest.json") # <temp-dir>/public doesn't exist yet and it's not automatically # being created so we need to ensure it exists public_tmpdirname = os.path.join(tmpdirname, "public") if not os.path.exists(public_tmpdirname): os.makedirs(public_tmpdirname) add_balrog_manifest_to_artifacts(context) with open(file_path, "r") as fread: retrieved_data = json.load(fread) assert fake_balrog_manifest == retrieved_data
async def context(): context = Context() context.config = { 'jwt_user': '******', 'jwt_secret': 'secret', } return context
def get_context_from_cmdln(args, desc="Run scriptworker"): """Create a Context object from args. This was originally part of main(), but we use it in `scriptworker.gpg.create_initial_gpg_homedirs` too. Args: args (list): the commandline args. Generally sys.argv Returns: tuple: `scriptworker.context.Context` with populated config, and credentials frozendict """ context = Context() parser = argparse.ArgumentParser(description=desc) parser.add_argument( "config_path", type=str, nargs="?", default="scriptworker.json", help="the path to the config file" ) parser.add_argument( "cot_config_path", type=str, nargs="?", help="the path to the chain of trust config file" ) parsed_args = parser.parse_args(args) context.config, credentials = create_config(config_path=parsed_args.config_path) update_logging_config(context) context.cot_config = create_cot_config(context, cot_config_path=parsed_args.cot_config_path) return context, credentials
def test_validate_scopes(): context = Context() context.config = get_fake_valid_config() context.task = get_fake_valid_task() context.properties = get_fake_balrog_props()["properties"] context.properties['platform'] = context.properties['stage_platform'] context.artifacts_to_beetmove = get_upstream_artifacts(context) manifest = generate_beetmover_manifest(context.config, context.task, context.properties) context.task['scopes'] = [] with pytest.raises(SystemExit): validate_task_scopes(context, manifest) context.task['scopes'] = ["project:releng:beetmover:!@#nightly_(@#$"] with pytest.raises(SystemExit): validate_task_scopes(context, manifest) context.task['scopes'] = ["project:releng:beetmover:mightly"] with pytest.raises(SystemExit): validate_task_scopes(context, manifest) context.task['scopes'] = ["project:releng:beetmover:dep"] manifest['s3_prefix_dated'] = "pub/mobile/nightly/2017/01/2017-01-dep.." manifest['s3_prefix_latest'] = "pub/mobile/nightly/2017/01/2017-01-dep.." with pytest.raises(SystemExit): validate_task_scopes(context, manifest)
def test_move_beets(event_loop): context = Context() context.config = get_fake_valid_config() context.task = get_fake_valid_task() context.properties = get_fake_balrog_props()["properties"] context.properties['platform'] = context.properties['stage_platform'] manifest = generate_candidates_manifest(context) expected_sources = [ 'https://queue.taskcluster.net/v1/task/VALID_TASK_ID/artifacts/public/build/target.package', 'https://queue.taskcluster.net/v1/task/VALID_TASK_ID/artifacts/public/build/en-US/target.package' ] expected_destinations = [ ('pub/mobile/nightly/2016/09/2016-09-01-16-26-14-mozilla-central-fake/fake-99.0a1.multi.fake.package', 'pub/mobile/nightly/latest-mozilla-central-fake/fake-99.0a1.multi.fake.package' ), ('pub/mobile/nightly/2016/09/2016-09-01-16-26-14-mozilla-central-fake/en-US/fake-99.0a1.en-US.fake.package', 'pub/mobile/nightly/latest-mozilla-central-fake/en-US/fake-99.0a1.en-US.fake.package' ) ] actual_sources = [] actual_destinations = [] async def fake_move_beet(context, source, destinations, locale, update_balrog_manifest): actual_sources.append(source) actual_destinations.append(destinations) with mock.patch('beetmoverscript.script.move_beet', fake_move_beet): event_loop.run_until_complete(move_beets(context, manifest)) assert sorted(expected_sources) == sorted(actual_sources) assert sorted(expected_destinations) == sorted(actual_destinations)
def context(): context = Context() context.config = { 'mark_as_shipped_schema_file': os.path.join(os.getcwd(), 'shipitscript', 'data', 'mark_as_shipped_task_schema.json'), 'mark_as_started_schema_file': os.path.join(os.getcwd(), 'shipitscript', 'data', 'mark_as_started_task_schema.json') } context.config['ship_it_instances'] = { 'project:releng:ship-it:server:dev': { 'api_root': 'http://some-ship-it.url', 'api_root_v2': 'http://some-ship-it.url/v2', 'timeout_in_seconds': 1, 'taskcluster_client_id': 'some-id', 'taskcluster_access_token': 'some-token', 'username': '******', 'password': '******' } } context.config['taskcluster_scope_prefix'] = "project:releng:ship-it:" context.task = { 'dependencies': ['someTaskId'], 'payload': { 'release_name': 'Firefox-59.0b3-build1' }, 'scopes': ['project:releng:ship-it:server:dev'], } return context
def context(tmpdir_factory): temp_dir = tmpdir_factory.mktemp("context", numbered=True) context = Context() context.config = { 'log_dir': os.path.join(str(temp_dir), "log"), 'artifact_dir': os.path.join(str(temp_dir), "artifact"), 'work_dir': os.path.join(str(temp_dir), "work"), 'artifact_upload_timeout': 200, 'artifact_expiration_hours': 1, 'reclaim_interval': 0.001, 'task_script': ('bash', '-c', '>&2 echo bar && echo foo && exit 1'), 'task_max_timeout': .1, } context.claim_task = { 'credentials': { 'a': 'b' }, 'status': { 'taskId': 'taskId' }, 'task': { 'task_defn': True }, 'runId': 'runId', } return context
def aliases_context(): context = Context() context.task = get_fake_valid_task("aliases") context.config = get_fake_valid_config() context.server = "project:releng:bouncer:server:production" yield context
def test_get_initial_release_props_file(): context = Context() context.task = get_fake_valid_task() context.config = get_fake_valid_config() context.task['payload']['upstreamArtifacts'] = [{'paths': []}] with pytest.raises(ScriptWorkerTaskException): get_initial_release_props_file(context)
def context(tmpdir_factory): temp_dir = tmpdir_factory.mktemp("context", numbered=True) print(temp_dir) context = Context() context.config = { "work_dir": str(temp_dir), } return context
def context(tmpdir): context = Context() context.config = get_default_config() context.config['work_dir'] = os.path.join(tmpdir, 'work') context.config['artifact_dir'] = os.path.join(tmpdir, 'artifact') mkdir(context.config['work_dir']) mkdir(context.config['artifact_dir']) yield context
def context(): context_ = Context() context_.config = { "schema_file": os.path.join(os.path.dirname(pushapkscript.__file__), "data/pushapk_task_schema.json") } return context_
def context(): context_ = Context() context_.config = { 'schema_file': os.path.join(os.path.dirname(pushapkscript.__file__), 'data/pushapk_task_schema.json'), } return context_
def test_get_product_config(): context = Context() context.config = {"products": [{"product_names": ["fenix"], "foo": "bar"}]} assert _get_product_config(context, "fenix") == { "product_names": ["fenix"], "foo": "bar" }
def get_context(gpg_home): context = Context() context.config = {} for k, v in DEFAULT_CONFIG.items(): if k.startswith("gpg_"): context.config[k] = v context.config["gpg_home"] = gpg_home return context
def test_move_beets(event_loop): context = Context() context.config = get_fake_valid_config() context.task = get_fake_valid_task() context.release_props = get_fake_balrog_props()["properties"] context.release_props['platform'] = context.release_props['stage_platform'] context.bucket = 'nightly' context.action = 'push-to-nightly' context.artifacts_to_beetmove = get_upstream_artifacts(context) manifest = generate_beetmover_manifest(context) expected_sources = [ os.path.abspath( 'beetmoverscript/test/test_work_dir/cot/eSzfNqMZT_mSiQQXu8hyqg/public/build/target.mozinfo.json' ), os.path.abspath( 'beetmoverscript/test/test_work_dir/cot/eSzfNqMZT_mSiQQXu8hyqg/public/build/target.txt', ), os.path.abspath( 'beetmoverscript/test/test_work_dir/cot/eSzfNqMZT_mSiQQXu8hyqg/public/build/target_info.txt' ), os.path.abspath( 'beetmoverscript/test/test_work_dir/cot/eSzfNqMZT_mSiQQXu8hyqg/public/build/target.test_packages.json' ), ] expected_destinations = [ [ 'pub/mobile/nightly/2016/09/2016-09-01-16-26-14-mozilla-central-fake/en-US/fake-99.0a1.en-US.target_info.txt', 'pub/mobile/nightly/latest-mozilla-central-fake/en-US/fake-99.0a1.en-US.target_info.txt' ], [ 'pub/mobile/nightly/2016/09/2016-09-01-16-26-14-mozilla-central-fake/en-US/fake-99.0a1.en-US.target.mozinfo.json', 'pub/mobile/nightly/latest-mozilla-central-fake/en-US/fake-99.0a1.en-US.target.mozinfo.json' ], [ 'pub/mobile/nightly/2016/09/2016-09-01-16-26-14-mozilla-central-fake/en-US/fake-99.0a1.en-US.target.txt', 'pub/mobile/nightly/latest-mozilla-central-fake/en-US/fake-99.0a1.en-US.target.txt' ], [ 'pub/mobile/nightly/2016/09/2016-09-01-16-26-14-mozilla-central-fake/en-US/fake-99.0a1.en-US.target.test_packages.json', 'pub/mobile/nightly/latest-mozilla-central-fake/en-US/fake-99.0a1.en-US.target.test_packages.json' ], ] actual_sources = [] actual_destinations = [] async def fake_move_beet(context, source, destinations, locale, update_balrog_manifest, artifact_pretty_name): actual_sources.append(source) actual_destinations.append(destinations) with mock.patch('beetmoverscript.script.move_beet', fake_move_beet): event_loop.run_until_complete( move_beets(context, context.artifacts_to_beetmove, manifest)) assert sorted(expected_sources) == sorted(actual_sources) assert sorted(expected_destinations) == sorted(actual_destinations)
def get_context(config_override): context = Context() context.config, credentials = build_config(config_override) swlog.update_logging_config(context) utils.cleanup(context) with aiohttp.ClientSession() as session: context.session = session context.credentials = credentials yield context
def test_get_upstream_artifacts(expected, preserve): context = Context() context.config = get_fake_valid_config() context.task = get_fake_valid_task() context.properties = context.task["payload"]["releaseProperties"] artifacts_to_beetmove = get_upstream_artifacts( context, preserve_full_paths=preserve) assert sorted(list(artifacts_to_beetmove["en-US"])) == sorted(expected)
def setup_config(config_path): if config_path is None: if len(sys.argv) != 2: usage() config_path = sys.argv[1] context = Context() context.config = {} context.config.update(load_json(path=config_path)) return context
def context(tmpdir): context = Context() context.config = { 'log_dir': os.path.join(tmpdir, 'log'), 'task_log_dir': os.path.join(tmpdir, 'artifact', 'public', 'logs'), 'artifact_dir': os.path.join(tmpdir, 'artifact'), 'work_dir': os.path.join(tmpdir, 'work'), } return context
async def test_put_failure(fake_session_500): context = Context() context.config = get_fake_valid_config() context.session = fake_session_500 with pytest.raises(ScriptWorkerRetryException): await put( context, url=URL('https://foo.com/packages/fake.package'), headers={}, abs_filename='beetmoverscript/test/fake_artifact.json', session=fake_session_500 )
def context(): context = Context() context.config = { "amo_instances": { "project:releng:addons.mozilla.org:server:dev": {"amo_server": "http://some-amo-it.url", "jwt_user": "******", "jwt_secret": "secret"}, }, } context.task = {"scopes": ["project:releng:addons.mozilla.org:server:dev"]} return context
async def test_craft_aiohttp_connector(): context = Context() context.config = {} connector = script._craft_aiohttp_connector(context) assert connector._ssl is None context.config["ssl_cert"] = SSL_CERT connector = script._craft_aiohttp_connector(context) assert connector._ssl
def test_get_upstream_artifacts(expected, preserve): context = Context() context.config = get_fake_valid_config() context.task = get_fake_valid_task() context.properties = get_fake_balrog_props()["properties"] context.properties['platform'] = context.properties['stage_platform'] artifacts_to_beetmove = get_upstream_artifacts(context, preserve_full_paths=preserve) assert sorted(list(artifacts_to_beetmove['en-US'])) == sorted(expected)
def test_async_main(event_loop): context = Context() context.config = get_fake_valid_config() async def fake_move_beets(context, artifacts_to_beetmove, manifest): pass with mock.patch('beetmoverscript.script.move_beets', new=fake_move_beets): event_loop.run_until_complete(async_main(context))
async def get_context(config_override=None): context = Context() with tempfile.TemporaryDirectory() as tmp: context.config, credentials = build_config(config_override, basedir=tmp) swlog.update_logging_config(context) utils.cleanup(context) async with aiohttp.ClientSession() as session: context.session = session context.credentials = credentials yield context
def context(tmpdir_factory): temp_dir = tmpdir_factory.mktemp("context", numbered=True) path = str(temp_dir) context = Context() context.config = { 'log_dir': os.path.join(path, 'log'), 'artifact_dir': os.path.join(path, 'artifact'), 'work_dir': os.path.join(path, 'work'), } return context
def test_load_signing_server_config(): context = Context() context.config = { 'signing_server_config': os.path.join(os.path.dirname(__file__), "example_server_config.json") } cfg = load_signing_server_config(context) assert cfg["dep"][0].server == "server1:9000" assert cfg["dep"][1].user == "user2" assert cfg["notdep"][0].password == "pass1" assert cfg["notdep"][1].formats == ["f2", "f3"]
def context(tmpdir_factory): temp_dir = tmpdir_factory.mktemp("context", numbered=True) context = Context() context.config = { "log_fmt": "%(message)s", "log_datefmt": "%H:%M:%S", "log_dir": str(temp_dir), "log_max_bytes": 100, "log_num_backups": 1, "verbose": True, } return context
def _craft_rw_context(tmp, event_loop, cot_product, session): config = get_unfrozen_copy(DEFAULT_CONFIG) config['cot_product'] = cot_product context = Context() context.session = session context.config = apply_product_config(config) context.config['cot_job_type'] = "signing" for key, value in context.config.items(): if key.endswith("_dir"): context.config[key] = os.path.join(tmp, key) makedirs(context.config[key]) if key.endswith("key_path"): context.config[key] = os.path.join(tmp, key) context.config['verbose'] = VERBOSE context.event_loop = event_loop return context
def context(): context = Context() context.config = { 'worker_group': 'worker_group', 'worker_id': 'worker_id', } context.poll_task_urls = { 'queues': [{ "signedPollUrl": "poll0", "signedDeleteUrl": "delete0", }, { "signedPollUrl": "poll1", "signedDeleteUrl": "delete1", }], } return context
def _init_context(config_path=None, default_config=None): context = Context() # This prevents *script from overwriting json on disk context.write_json = lambda *args: None context.write_json() # for coverage if config_path is None: if len(sys.argv) != 2: _usage() config_path = sys.argv[1] context.config = {} if default_config is None else default_config context.config.update(load_json_or_yaml(config_path, is_path=True)) context.task = get_task(context.config) return context
def context(tmpdir_factory): temp_dir = tmpdir_factory.mktemp("context", numbered=True) context = Context() context.config = deepcopy(DEFAULT_CONFIG) context.config['log_dir'] = os.path.join(str(temp_dir), "log") context.config['work_dir'] = os.path.join(str(temp_dir), "work") context.config['artifact_dir'] = os.path.join(str(temp_dir), "artifact") context.config['poll_interval'] = .1 context.config['credential_update_interval'] = .1 context.credentials_timestamp = arrow.utcnow().replace(minutes=-10).timestamp context.poll_task_urls = { 'queues': [{ "signedPollUrl": "poll0", "signedDeleteUrl": "delete0", }, { "signedPollUrl": "poll1", "signedDeleteUrl": "delete1", }], 'expires': arrow.utcnow().replace(hours=10).isoformat(), } return context
def get_context_from_cmdln(args, desc="Run scriptworker"): """Create a Context object from args. Args: args (list): the commandline args. Generally sys.argv Returns: tuple: ``scriptworker.context.Context`` with populated config, and credentials frozendict """ context = Context() parser = argparse.ArgumentParser(description=desc) parser.add_argument( "config_path", type=str, nargs="?", default="scriptworker.yaml", help="the path to the config file" ) parsed_args = parser.parse_args(args) context.config, credentials = create_config(config_path=parsed_args.config_path) update_logging_config(context) return context, credentials
def context(): context = Context() context.config = get_default_config() return context
def context(tmpdir): context = Context() context.config = get_default_config() context.config['work_dir'] = os.path.join(tmpdir, 'work') context.task = {} yield context