def test_control_failing_its_initialization_must_not_be_registered(): exp = deepcopy(experiments.ExperimentNoControls) settings = { "dummy-key": "hello there", "controls": { "dummy-failed": { "provider": { "type": "python", "module": "fixtures.controls.dummy_with_failing_init" } }, "dummy": { "provider": { "type": "python", "module": "fixtures.controls.dummy" } } } } load_global_controls(settings) run_experiment(exp, settings) assert "should_never_been_called" not in exp activities = get_all_activities(exp) for activity in activities: assert "before_activity_control" in activity assert "after_activity_control" in activity assert activity["before_activity_control"] is True assert activity["after_activity_control"] is True
def test_controls_on_loaded_experiment(): settings = { "controls": { "dummy": { "provider": { "type": "python", "module": "fixtures.controls.dummy_retitle_experiment_on_loading" } } } } load_global_controls(settings) initialize_global_controls({}, {}, {}, settings) with tempfile.NamedTemporaryFile(suffix=".json") as f: try: f.write( json.dumps(experiments.ExperimentNoControls).encode('utf-8')) f.seek(0) experiment = load_experiment(f.name) assert experiment["title"] == "BOOM I changed it" finally: cleanup_global_controls()
def test_load_global_controls_from_settings_configured_via_exp_config(): exp = deepcopy(experiments.ExperimentUsingConfigToConfigureControls) activities = get_all_activities(exp) for activity in activities: assert "before_activity_control" not in activity assert "after_activity_control" not in activity assert get_global_controls() == [] settings = { "controls": { "dummy": { "provider": { "type": "python", "module": "fixtures.controls.dummy" } } } } load_global_controls(settings) run_experiment(exp, settings) assert get_global_controls() == [] assert exp["control-value"] == "blah blah" for activity in activities: assert "before_activity_control" in activity assert "after_activity_control" in activity assert activity["before_activity_control"] is True assert activity["after_activity_control"] is True
def test_apply_controls_even_on_background_activity(): exp = deepcopy(experiments.ExperimentNoControls) exp["method"][0]["background"] = True exp["method"][0]["pauses"] = {"after": 1} activities = get_all_activities(exp) for activity in activities: assert "before_activity_control" not in activity assert "after_activity_control" not in activity assert get_global_controls() == [] settings = { "dummy-key": "hello there", "controls": { "dummy": { "provider": { "type": "python", "module": "fixtures.controls.dummy" } } } } load_global_controls(settings) run_experiment(exp, settings) assert get_global_controls() == [] assert exp["control-value"] == "hello there" for activity in activities: assert "before_activity_control" in activity assert "after_activity_control" in activity assert activity["before_activity_control"] is True assert activity["after_activity_control"] is True
def test_get_globally_loaded_controls_from_settings(): assert get_global_controls() == [] settings = { "controls": { "dummy": { "provider": { "type": "python", "module": "fixtures.controls.dummy" } } } } load_global_controls(settings) initialize_global_controls({}, {}, {}, settings) try: ctrls = get_global_controls() assert len(ctrls) == 1 assert ctrls[0]["name"] == "dummy" assert ctrls[0]["provider"]["type"] == "python" assert ctrls[0]["provider"]["module"] == "fixtures.controls.dummy" finally: cleanup_global_controls() assert get_global_controls() == []
def run(ctx: click.Context, source: str, journal_path: str = "./journal.json", dry: bool = False, no_validation: bool = False, no_exit: bool = False, no_verify_tls: bool = False, rollback_strategy: str = "default") -> Journal: """Run the experiment loaded from SOURCE, either a local file or a HTTP resource. SOURCE can be formatted as JSON or YAML.""" settings = load_settings(ctx.obj["settings_path"]) or {} has_deviated = False has_failed = False load_global_controls(settings) try: experiment = load_experiment(source, settings, verify_tls=not no_verify_tls) except InvalidSource as x: logger.error(str(x)) logger.debug(x) ctx.exit(1) notify(settings, RunFlowEvent.RunStarted, experiment) if not no_validation: try: ensure_experiment_is_valid(experiment) except ChaosException as x: logger.error(str(x)) logger.debug(x) ctx.exit(1) experiment["dry"] = dry settings.setdefault("runtime", {}).setdefault("rollbacks", {}).setdefault("strategy", rollback_strategy) journal = run_experiment(experiment, settings=settings) has_deviated = journal.get("deviated", False) has_failed = journal["status"] != "completed" with io.open(journal_path, "w") as r: json.dump(journal, r, indent=2, ensure_ascii=False, default=encoder) if journal["status"] == "completed": notify(settings, RunFlowEvent.RunCompleted, journal) elif has_failed: notify(settings, RunFlowEvent.RunFailed, journal) if has_deviated: notify(settings, RunFlowEvent.RunDeviated, journal) if (has_failed or has_deviated) and not no_exit: ctx.exit(1) return journal
def test_load_global_controls_from_settings(): exp = deepcopy(experiments.ExperimentNoControls) activities = get_all_activities(exp) for activity in activities: assert "before_activity_control" not in activity assert "after_activity_control" not in activity assert get_global_controls() == [] settings = { "dummy-key": "hello there", "controls": { "dummy": { "provider": { "type": "python", "module": "fixtures.controls.dummy" } } }, } load_global_controls(settings) run_experiment(exp, settings) assert get_global_controls() == [] assert exp["control-value"] == "hello there" for activity in activities: assert "before_activity_control" in activity assert "after_activity_control" in activity assert activity["before_activity_control"] is True assert activity["after_activity_control"] is True
def run_chaos_engine(self, file, env_params: dict, report: str, report_endpoint: str) -> bool: """ Runs chaos engine programmatically instead of using chaos binary :param file: :param env_params: :param report: :param report_endpoint: :return: """ settings = ({}, os.environ.get("settings_path") )[os.environ.get("settings_path") is not None] has_deviated = False has_failed = False load_global_controls(settings) jornal_file_suffix = file try: try: with open(file, "r"): logger.info("File exists in local") except FileNotFoundError: logger.info( "File is not available in the current directory, looking inside site packages" ) location = site.getsitepackages().__getitem__(0) file_found = False for root, dirs, files in os.walk(location): if file in files: file_found = True file = os.path.join(root, file) break if not file_found: logger.error("File " + file + " not found in site packages too, quitting") raise FileNotFoundError("Chaos file is not found") experiment = load_experiment(click.format_filename(file), settings) except InvalidSource as x: logger.error(str(x)) logger.debug(x) sys.exit(1) logger.info("chaos json file found, proceeding with test") journal = run_experiment(experiment, settings=settings) has_deviated = journal.get("deviated", False) has_failed = journal["status"] != "completed" json_file_name = "journal" + "-" + jornal_file_suffix with open(json_file_name, "w") as r: json.dump(journal, r, indent=2, ensure_ascii=False, default=encoder) r.close() if report == 'true': self.create_report(os.environ, journal, report_endpoint) if has_failed or has_deviated: logger.error("Test Failed") return has_failed and has_deviated else: logger.info("Test Passed") return True
def verify(ctx: click.Context, source: str, journal_path: str = "./journal.json", dry: bool = False, no_validation: bool = False, no_exit: bool = False, no_verify_tls: bool = False): """Run the verification loaded from SOURCE, either a local file or a HTTP resource. SOURCE can be formatted as JSON or YAML.""" settings = load_settings(ctx.obj["settings_path"]) or {} load_global_controls(settings) try: if not switch_team_during_verification_run(source, settings): ctx.exit(1) verification = load_experiment(source, settings, verify_tls=not no_verify_tls) except InvalidSource as x: logger.error(str(x)) logger.debug(x) ctx.exit(1) if not no_validation: try: ensure_verification_is_valid(verification) except ChaosException as v: logger.error(str(v)) logger.debug(v) ctx.exit(1) verification["dry"] = dry journal = run_verification(verification, settings=settings, strategy=Strategy.CONTINOUS) with io.open(journal_path, "w") as r: json.dump(journal, r, indent=2, ensure_ascii=False, default=encoder) return journal
def test_controls_on_loading_experiment(): settings = { "controls": { "dummy": { "provider": { "type": "python", "module": "fixtures.controls.dummy_fail_loading_experiment" } } } } load_global_controls(settings) initialize_global_controls({}, {}, {}, settings) with tempfile.NamedTemporaryFile(suffix=".json") as f: try: with pytest.raises(InterruptExecution): load_experiment(f.name) finally: cleanup_global_controls()
def test_control_must_not_rest_state_before_calling_the_after_side(): exp = deepcopy(experiments.ExperimentNoControlsWithDeviation) settings = { "controls": { "dummy": { "provider": { "type": "python", "module": "fixtures.controls.dummy_need_access_to_end_state" } } } } load_global_controls(settings) journal = run_experiment(exp, settings) before_hypo_result = journal["steady_states"]["before"] assert "after_hypothesis_control" in before_hypo_result assert before_hypo_result["after_hypothesis_control"] == True assert "after_experiment_control" in journal assert journal["after_experiment_control"] == True
def run( ctx: click.Context, source: str, journal_path: str = "./journal.json", dry: Optional[str] = None, no_validation: bool = False, no_exit: bool = False, no_verify_tls: bool = False, rollback_strategy: str = "default", var: Dict[str, Any] = None, var_file: List[str] = None, hypothesis_strategy: str = "default", hypothesis_frequency: float = 1.0, fail_fast: bool = False, ) -> Journal: """Run the experiment loaded from SOURCE, either a local file or a HTTP resource. SOURCE can be formatted as JSON or YAML.""" settings = load_settings(ctx.obj["settings_path"]) or {} has_deviated = False has_failed = False experiment_vars = merge_vars(var, var_file) load_global_controls(settings) try: experiment = load_experiment(source, settings, verify_tls=not no_verify_tls) except InvalidSource as x: logger.error(str(x)) logger.debug(x) ctx.exit(1) notify(settings, RunFlowEvent.RunStarted, experiment) if not no_validation: try: ensure_experiment_is_valid(experiment) except ChaosException as x: logger.error(str(x)) logger.debug(x) ctx.exit(1) experiment["dry"] = Dry.from_string(dry) settings.setdefault("runtime", {}).setdefault("rollbacks", {}).setdefault( "strategy", rollback_strategy ) hypothesis_strategy = check_hypothesis_strategy_spelling(hypothesis_strategy) schedule = Schedule( continuous_hypothesis_frequency=hypothesis_frequency, fail_fast=fail_fast ) journal = run_experiment( experiment, settings=settings, strategy=hypothesis_strategy, schedule=schedule, experiment_vars=experiment_vars, ) has_deviated = journal.get("deviated", False) has_failed = journal["status"] != "completed" if "dry" in journal["experiment"]: journal["experiment"]["dry"] = dry with open(journal_path, "w") as r: json.dump(journal, r, indent=2, ensure_ascii=False, default=encoder) if journal["status"] == "completed": notify(settings, RunFlowEvent.RunCompleted, journal) elif has_failed: notify(settings, RunFlowEvent.RunFailed, journal) if has_deviated: notify(settings, RunFlowEvent.RunDeviated, journal) if (has_failed or has_deviated) and not no_exit: ctx.exit(1) return journal