def test_push_no_target_name_in_yaml(request, config_yaml, tmp_path): config_yaml = request.getfixturevalue(config_yaml) config_yaml = config_yaml + "\nmodelID: {}".format(modelID) with open(os.path.join(tmp_path, MODEL_CONFIG_FILENAME), mode="w") as f: f.write(config_yaml) config = read_model_metadata_yaml(tmp_path) from argparse import Namespace options = Namespace(code_dir=tmp_path, model_config=config) with pytest.raises(DrumCommonException, match="Missing keys: \['targetName'\]"): drum_push(options)
def run(self): try: if self.options.docker and (self.run_mode not in ( RunMode.PUSH, RunMode.PERF_TEST, RunMode.VALIDATION)): ret = self._run_inside_docker(self.options, self.run_mode, self.raw_arguments) if ret: raise DrumCommonException( "Error from docker process: {}".format(ret)) return except DrumCommonException as e: self.logger.error(e) raise except AttributeError as e: # In some parser the options.docker does not exists if "docker" not in str(e): raise e self._print_welcome_header() if self.run_mode in [RunMode.SERVER, RunMode.SCORE]: self._run_fit_and_predictions_pipelines_in_mlpiper() elif self.run_mode == RunMode.FIT: self.run_fit() elif self.run_mode == RunMode.PERF_TEST: CMRunTests(self.options, self.run_mode).performance_test() elif self.run_mode == RunMode.VALIDATION: CMRunTests(self.options, self.run_mode, self.target_type).validation_test() elif self.run_mode == RunMode.NEW: self._generate_template() elif self.run_mode == RunMode.PUSH: options, run_mode, raw_arguments = setup_validation_options( copy.deepcopy(self.options)) validation_runner = CMRunner(self.runtime) validation_runner.options = options validation_runner.run_mode = run_mode validation_runner.raw_arguments = raw_arguments validation_runner.run() print( "Your model was successfully validated locally! Now we will add it into DataRobot" ) drum_push(self.options) else: error_message = "{} mode is not implemented".format(self.run_mode) print(error_message) raise DrumCommonException(error_message)