def test_blueprint_validate_definitions_version_false(self): with open(utils.get_configuration_path()) as f: config = yaml.safe_load(f.read()) with open(utils.get_configuration_path(), "w") as f: config["validate_definitions_version"] = False f.write(yaml.safe_dump(config)) cli_runner.run_cli( "cfy blueprints validate " "-p {0}/local/blueprint_validate_definitions_version.yaml".format(BLUEPRINTS_DIR) )
def test_blueprint_validate_definitions_version_false(self): with open(utils.get_configuration_path()) as f: config = yaml.safe_load(f.read()) with open(utils.get_configuration_path(), 'w') as f: config['validate_definitions_version'] = False f.write(yaml.safe_dump(config)) cli_runner.run_cli( 'cfy blueprints validate ' '-p {0}/local/blueprint_validate_definitions_version.yaml'.format( BLUEPRINTS_DIR))
def test_validate_definitions_version(self): blueprint = 'blueprint_validate_definitions_version' self._init() self.assertRaises( parser_exceptions.DSLParsingLogicException, self._local_init, blueprint=blueprint) with open(utils.get_configuration_path()) as f: config = yaml.safe_load(f.read()) with open(utils.get_configuration_path(), 'w') as f: config['validate_definitions_version'] = False f.write(yaml.safe_dump(config)) # Parsing occurs during init self._local_init(blueprint=blueprint)
def test_validate_definitions_version(self): blueprint = 'blueprint_validate_definitions_version' self._init() self.assertRaises(parser_exceptions.DSLParsingLogicException, self._local_init, blueprint=blueprint) with open(utils.get_configuration_path()) as f: config = yaml.safe_load(f.read()) with open(utils.get_configuration_path(), 'w') as f: config['validate_definitions_version'] = False f.write(yaml.safe_dump(config)) # Parsing occurs during init self._local_init(blueprint=blueprint)
def test_local_provider_context(self): self._init() with open(utils.get_configuration_path()) as f: config = yaml.safe_load(f.read()) with open(utils.get_configuration_path(), 'w') as f: config['local_provider_context'] = {'stub1': 'value1'} f.write(yaml.safe_dump(config)) self._local_init() self._local_execute() output = cli_runner.run_cli('cfy local outputs') self.assertIn('"provider_context":', output) self.assertIn('stub1', output) self.assertIn('value1', output)
def test_local_provider_context(self): self._init() with open(utils.get_configuration_path()) as f: config = yaml.safe_load(f.read()) with open(utils.get_configuration_path(), "w") as f: config["local_provider_context"] = {"stub1": "value1"} f.write(yaml.safe_dump(config)) self._local_init() self._local_execute() output = cli_runner.run_cli("cfy local outputs") self.assertIn('"provider_context":', output) self.assertIn("stub1", output) self.assertIn("value1", output)
def test_local_provider_context(self): self._init() with open(utils.get_configuration_path()) as f: config = yaml.safe_load(f.read()) with open(utils.get_configuration_path(), 'w') as f: config['local_provider_context'] = { 'stub1': 'value1' } f.write(yaml.safe_dump(config)) self._local_init() self._local_execute() output = cli_runner.run_cli('cfy local outputs') self.assertIn('"provider_context":', output) self.assertIn('stub1', output) self.assertIn('value1', output)
def bootstrap(self, blueprint_path, inputs_file=None, install_plugins=True, keep_up_on_failure=False, validate_only=False, reset_config=False, task_retries=5, task_retry_interval=90, subgraph_retries=2, verbose=False): with self.workdir: cfy.init(reset_config=reset_config).wait() with YamlPatcher(get_configuration_path()) as patch: prop_path = ('local_provider_context.' 'cloudify.workflows.subgraph_retries') patch.set_value(prop_path, subgraph_retries) if not inputs_file: inputs_file = self._get_inputs_in_temp_file({}, 'manager') cfy.bootstrap( blueprint_path=blueprint_path, inputs=inputs_file, install_plugins=install_plugins, keep_up_on_failure=keep_up_on_failure, validate_only=validate_only, task_retries=task_retries, task_retry_interval=task_retry_interval, verbose=verbose).wait()
def _configure_from_file(): from cloudify_cli import utils config_path = utils.get_configuration_path() logging_config = yaml.safe_load(file(config_path, 'r'))['logging'] loggers_config = logging_config['loggers'] logfile = logging_config['filename'] # set filename on file handler logger_dict = copy.deepcopy(logger_config.LOGGER) logger_dict['handlers']['file']['filename'] = logfile logfile_dir = os.path.dirname(logfile) if not os.path.exists(logfile_dir): os.makedirs(logfile_dir) # add handlers to every logger # specified in the file loggers = {} for logger_name in loggers_config: loggers[logger_name] = { 'handlers': list(logger_dict['handlers'].keys()) } logger_dict['loggers'] = loggers # set level for each logger for logger_name, logging_level in loggers_config.iteritems(): log = logging.getLogger(logger_name) level = logging._levelNames[logging_level.upper()] log.setLevel(level) _all_loggers.add(logger_name) logging.config.dictConfig(logger_dict)
def bootstrap(self, blueprint_path, inputs_file=None, install_plugins=True, keep_up_on_failure=False, validate_only=False, reset_config=False, task_retries=5, task_retry_interval=90, subgraph_retries=2, verbose=False): with self.workdir: cfy.init(reset_config=reset_config).wait() with YamlPatcher(get_configuration_path()) as patch: prop_path = ('local_provider_context.' 'cloudify.workflows.subgraph_retries') patch.set_value(prop_path, subgraph_retries) if not inputs_file: inputs_file = self._get_inputs_in_temp_file({}, 'manager') cfy.bootstrap(blueprint_path=blueprint_path, inputs=inputs_file, install_plugins=install_plugins, keep_up_on_failure=keep_up_on_failure, validate_only=validate_only, task_retries=task_retries, task_retry_interval=task_retry_interval, verbose=verbose).wait()
def update_config_file(resolver_configuration): config_path = utils.get_configuration_path() with open(config_path, 'a') as f: yaml.dump(resolver_configuration, f)