def test_filter_schema(): env = AuroraConfigLoader.load(BytesIO(MESOS_CONFIG)) job_dict = env['jobs'][0].get() job_dict['unknown_attribute'] = 'foo bar baz' job_json_string = json.dumps(job_dict) with pytest.raises(AttributeError): AuroraConfigLoader.loads_json(job_json_string)
def test_memoized_load_json_cache_hit(mock_gen_content_key): expected_env = AuroraConfigLoader.load(BytesIO(MESOS_CONFIG)) expected_job_json = json.dumps(expected_env['jobs'][0].get()) mock_gen_content_key.return_value = MESOS_CONFIG_MD5 AuroraConfigLoader.CACHED_JSON = {MESOS_CONFIG_MD5: expected_job_json} loaded_job_json = AuroraConfigLoader.load_json('a/path', is_memoized=True) assert loaded_job_json == expected_job_json, "Test cache hit load_json"
def test_memoized_load(): AuroraConfigLoader.CACHED_ENV = {} def check_env(env, config): assert 'jobs' in env and len( env['jobs']) == 1, ("Match expected jobs for config=%s" % config) assert env['jobs'][0].name().get() == 'hello_world' with temporary_dir() as d: with open(os.path.join(d, 'config.aurora'), 'w+') as fp: fp.write(MESOS_CONFIG) fp.flush() fp.seek(0) for config in (fp.name, fp): AuroraConfigLoader.CACHED_ENV = {} env = AuroraConfigLoader.load(config, is_memoized=False) check_env(env, config) assert MESOS_CONFIG_MD5 not in AuroraConfigLoader.CACHED_ENV.keys( ), ("No key is cached when config=%s and is_memoized=False") fp.seek(0) # previous load results in filepointer at eof env = AuroraConfigLoader.load(config, is_memoized=True) check_env(env, config) assert MESOS_CONFIG_MD5 in AuroraConfigLoader.CACHED_ENV.keys( ), ("Key is cached when config=%s and is_memoized=True" % config)
def test_memoized_load(): AuroraConfigLoader.CACHED_ENV = {} def check_env(env, config): assert 'jobs' in env and len(env['jobs']) == 1, ( "Match expected jobs for config=%s" % config) assert env['jobs'][0].name().get() == 'hello_world' with temporary_dir() as d: with open(os.path.join(d, 'config.aurora'), 'w+') as fp: fp.write(MESOS_CONFIG) fp.flush() fp.seek(0) for config in (fp.name, fp): AuroraConfigLoader.CACHED_ENV = {} env = AuroraConfigLoader.load(config, is_memoized=False) check_env(env, config) assert MESOS_CONFIG_MD5 not in AuroraConfigLoader.CACHED_ENV.keys(), ( "No key is cached when config=%s and is_memoized=False") fp.seek(0) # previous load results in filepointer at eof env = AuroraConfigLoader.load(config, is_memoized=True) check_env(env, config) assert MESOS_CONFIG_MD5 in AuroraConfigLoader.CACHED_ENV.keys(), ( "Key is cached when config=%s and is_memoized=True" % config)
def test_filter_schema(): env = AuroraConfigLoader.load(BytesIO(MESOS_CONFIG)) job_dict = env['jobs'][0].get() job_dict['unknown_attribute'] = 'foo bar baz' job_json_string = json.dumps(job_dict) # If this fails, will raise an InvalidConfigError or other exception and fail the test. AuroraConfigLoader.loads_json(job_json_string)
def execute(self, context): def maybe_bind(j): return j.bind(*bindings) if bindings else j def get_jobkey(job): return "/".join([ job.cluster().get(), job.role().get(), job.environment().get(), job.name().get() ]) try: if context.options.read_json: env = AuroraConfigLoader.load_json(context.options.config_file) else: env = AuroraConfigLoader.load(context.options.config_file) except (AuroraConfig.Error, AuroraConfigLoader.Error, ValueError) as e: context.print_err("Error loading configuration file: %s" % e) return EXIT_COMMAND_FAILURE bindings = context.options.bindings job_list = env.get("jobs", []) if not job_list: context.print_out("jobs=[]") else: bound_jobs = map(maybe_bind, job_list) job_names = map(get_jobkey, bound_jobs) context.print_out("jobs=[%s]" % (", ".join(job_names))) return EXIT_OK
def test_load_json_multi(): env = AuroraConfigLoader.load(BytesIO(MESOS_CONFIG_MULTI)) jobs = env['jobs'] json_env = AuroraConfigLoader.loads_json( json.dumps({'jobs': [job.get() for job in jobs]})) json_jobs = json_env['jobs'] assert jobs == json_jobs
def test_load_json(): with temporary_file() as fp: fp.write(MESOS_CONFIG) fp.flush() env = AuroraConfigLoader.load(fp.name) job = env['jobs'][0] with temporary_file() as fp: fp.write(json.dumps(job.get())) fp.flush() new_job = AuroraConfigLoader.load_json(fp.name) assert new_job == job
def before_execution(self, context): # register usable backends sacker_ledger.register_ledger('s3', S3Ledger) sacker_ledger.register_ledger('dynamo', DynamoLedger) sacker_store.register_store('s3', S3Store) # register schema AuroraConfigLoader.register_schema(sacker_schema) # register binding helper BindingHelper.register(SackerBindingHelper())
def test_pick(): env = AuroraConfigLoader.load(BytesIO(MESOS_CONFIG)) hello_world = env['jobs'][0] assert AuroraConfig.pick(env, 'hello_world', None) == hello_world env['jobs'][0] = env['jobs'][0](name='something_{{else}}') assert str(AuroraConfig.pick(env, 'something_else', [{'else': 'else'}]).name()) == ( 'something_else')
def test_gen_content_key(): content = "one two three" expected_md5 = hashlib.md5(content).hexdigest() assert AuroraConfigLoader.gen_content_key(1) is None, ( "Non filetype results in None") with temporary_dir() as d: filename = os.path.join(d, 'file') assert AuroraConfigLoader.gen_content_key(filename) is None, ( "non existant file results in key=None") with open(filename, 'w+') as fp: fp.write(content) fp.flush() fp.seek(0) for config in (fp.name, fp): assert expected_md5 == AuroraConfigLoader.gen_content_key( config), ("check hexdigest for %s" % config)
def test_gen_content_key(): content = "one two three" expected_md5 = hashlib.md5(content).hexdigest() assert AuroraConfigLoader.gen_content_key(1) is None, ( "Non filetype results in None") with temporary_dir() as d: filename = os.path.join(d, 'file') assert AuroraConfigLoader.gen_content_key(filename) is None, ( "non existant file results in key=None") with open(filename, 'w+') as fp: fp.write(content) fp.flush() fp.seek(0) for config in (fp.name, fp): assert expected_md5 == AuroraConfigLoader.gen_content_key(config), ( "check hexdigest for %s" % config)
def test_pick(): env = AuroraConfigLoader.load(BytesIO(MESOS_CONFIG)) hello_world = env['jobs'][0] assert AuroraConfig.pick(env, 'hello_world', None) == hello_world env['jobs'][0] = env['jobs'][0](name='something_{{else}}') assert str( AuroraConfig.pick(env, 'something_else', [{ 'else': 'else' }]).name()) == ('something_else')
def test_load(): with temporary_file() as fp: fp.write(MESOS_CONFIG) fp.flush() fp.seek(0) for config in (fp.name, fp): env = AuroraConfigLoader.load(config) assert 'jobs' in env and len(env['jobs']) == 1 hello_world = env['jobs'][0] assert hello_world.name().get() == 'hello_world'
def test_load_json_memoized(): AuroraConfigLoader.CACHED_JSON = {} env = AuroraConfigLoader.load(BytesIO(MESOS_CONFIG_MULTI)) jobs = env['jobs'] content = json.dumps(jobs[0].get()) expected_md5 = hashlib.md5(content).hexdigest() with temporary_dir() as d: filename = os.path.join(d, 'config.json') with open(filename, 'w+') as fp: fp.write(json.dumps(jobs[0].get())) fp.close() loaded_job = AuroraConfigLoader.load_json(fp.name, is_memoized=False)['jobs'][0] assert loaded_job == jobs[0] assert expected_md5 not in AuroraConfigLoader.CACHED_JSON, ( "No key is cached when is_memoized=False") loaded_job = AuroraConfigLoader.load_json(fp.name, is_memoized=True)['jobs'][0] assert loaded_job == jobs[0] assert expected_md5 in AuroraConfigLoader.CACHED_JSON, ( "Key is cached when is_memoized=True")
def test_pick(): with temporary_file() as fp: fp.write(MESOS_CONFIG) fp.flush() env = AuroraConfigLoader.load(fp.name) hello_world = env['jobs'][0] assert AuroraConfig.pick(env, 'hello_world', None) == hello_world env['jobs'][0] = env['jobs'][0](name='something_{{else}}') assert str(AuroraConfig.pick(env, 'something_else', [{'else': 'else'}]).name()) == ( 'something_else')
def test_load_json_memoized(): AuroraConfigLoader.CACHED_JSON = {} env = AuroraConfigLoader.load(BytesIO(MESOS_CONFIG_MULTI)) jobs = env['jobs'] content = json.dumps(jobs[0].get()) expected_md5 = hashlib.md5(content).hexdigest() with temporary_dir() as d: filename = os.path.join(d, 'config.json') with open(filename, 'w+') as fp: fp.write(json.dumps(jobs[0].get())) fp.close() loaded_job = AuroraConfigLoader.load_json( fp.name, is_memoized=False)['jobs'][0] assert loaded_job == jobs[0] assert expected_md5 not in AuroraConfigLoader.CACHED_JSON, ( "No key is cached when is_memoized=False") loaded_job = AuroraConfigLoader.load_json( fp.name, is_memoized=True)['jobs'][0] assert loaded_job == jobs[0] assert expected_md5 in AuroraConfigLoader.CACHED_JSON, ( "Key is cached when is_memoized=True")
def test_pick(): with temporary_file() as fp: fp.write(MESOS_CONFIG) fp.flush() env = AuroraConfigLoader.load(fp.name) hello_world = env['jobs'][0] assert AuroraConfig.pick(env, 'hello_world', None) == hello_world env['jobs'][0] = env['jobs'][0](name='something_{{else}}') assert str( AuroraConfig.pick(env, 'something_else', [{ 'else': 'else' }]).name()) == ('something_else')
def execute(self, context): def maybe_bind(j): return j.bind(*bindings) if bindings else j def get_jobkey(job): return "/".join([job.cluster().get(), job.role().get(), job.environment().get(), job.name().get()]) try: if context.options.read_json: env = AuroraConfigLoader.load_json(context.options.config_file) else: env = AuroraConfigLoader.load(context.options.config_file) except (AuroraConfig.Error, AuroraConfigLoader.Error, ValueError) as e: context.print_err("Error loading configuration file: %s" % e) return EXIT_COMMAND_FAILURE bindings = context.options.bindings job_list = env.get("jobs", []) if not job_list: context.print_out("jobs=[]") else: bound_jobs = map(maybe_bind, job_list) job_names = map(get_jobkey, bound_jobs) context.print_out("jobs=[%s]" % (", ".join(job_names))) return EXIT_OK
def test_load_with_includes(): with temporary_dir() as tmp_dir: f1_name = 'f1.aurora' f2_name = 'f2.aurora' with open(os.path.join(tmp_dir, f1_name), 'w+') as f1: f1.write(MESOS_CONFIG) f1.flush() f1.seek(0) with open(os.path.join(tmp_dir, f2_name), 'w+') as f2: f2.write(MESOS_CONFIG_WITH_INCLUDE_TEMPLATE % f1_name) f2.flush() f2.seek(0) env = AuroraConfigLoader.load(f2.name, is_memoized=True) assert 'jobs' in env and len(env['jobs']) == 2 hello_world = env['jobs'][0] assert hello_world.name().get() == 'hello_world' other_job = env['jobs'][1] assert other_job.name().get() == 'otherjob'
def test_bad_config(): with temporary_file() as fp: fp.write(BAD_MESOS_CONFIG) fp.flush() with pytest.raises(AuroraConfigLoader.InvalidConfigError): AuroraConfigLoader.load(fp.name)
def test_bad_config(): with pytest.raises(AuroraConfigLoader.InvalidConfigError): AuroraConfigLoader.load(BytesIO(BAD_MESOS_CONFIG))
def test_enoent(): nonexistent_file = tempfile.mktemp() with pytest.raises(AuroraConfigLoader.NotFound): AuroraConfigLoader.load(nonexistent_file)
def test_empty_config(): with temporary_file() as fp: fp.flush() AuroraConfigLoader.load(fp.name)
def test_empty_config(): AuroraConfigLoader.load(BytesIO())
def test_memoized_load_cache_hit(mock_gen_content_key): expected_env = AuroraConfigLoader.load(BytesIO(MESOS_CONFIG)) mock_gen_content_key.return_value = MESOS_CONFIG_MD5 AuroraConfigLoader.CACHED_ENV = {MESOS_CONFIG_MD5: expected_env} loaded_env = AuroraConfigLoader.load('a/path', is_memoized=True) assert loaded_env == expected_env, "Test cache hit"
def test_load_json_single(): env = AuroraConfigLoader.load(BytesIO(MESOS_CONFIG)) job = env['jobs'][0] new_job = AuroraConfigLoader.loads_json(json.dumps(job.get()))['jobs'][0] assert new_job == job
def test_load_json_multi(): env = AuroraConfigLoader.load(BytesIO(MESOS_CONFIG_MULTI)) jobs = env['jobs'] json_env = AuroraConfigLoader.loads_json(json.dumps({'jobs': [job.get() for job in jobs]})) json_jobs = json_env['jobs'] assert jobs == json_jobs