def set_remote_conf_from_file(config_yml): file_conf = utils.load_yaml_from_file(config_yml) url_conf: list = file_conf.get('REMOTE_URL', []) for url in url_conf: endpoint_info = utils.parse_endpoint(url) if endpoint_info['scheme'] == 'file': yaml_file = f'{endpoint_info["path"]}' conf_to_merge = utils.load_yaml_from_file(yaml_file) set_global(**conf_to_merge) elif endpoint_info['scheme'] in ['http', 'https']: conf_to_merge = utils.load_yaml_from_url(url) set_global(**conf_to_merge)
def setUpClass(cls): azure_cred = os.environ.get('AZURE_CRED') test_config = utils.load_yaml_from_file(azure_cred) cls.schema = 'azure_client_secret' cls.azure_credentials = test_config.get('AZURE_CREDENTIALS', {}) super().setUpClass()
def setUpClass(cls): config.init_conf(package='spaceone.statistics') connect('test', host='mongomock://localhost') config_path = os.environ.get('SPACEONE_CONFIG_FILE') test_config = utils.load_yaml_from_file(config_path) super().setUpClass()
def setUpClass(cls): try: cls.config = utils.load_yaml_from_file(cls.config_uri).get( 'GLOBAL', {}) endpoints = cls.config.get('ENDPOINTS', {}) # version = 'v1' version = _guess_version(endpoints) # Create client of endpoints cls.client = {} except Exception as e: print(e) _LOGGER.warning(f'SKIP Load config') endpoints = {} for (endpoint, v) in endpoints.items(): try: cls.client[endpoint] = pygrpc.client(endpoint=v.get(version), version=version) setattr(cls, endpoint, cls.client[endpoint]) _LOGGER.debug(f"Initialize {endpoint}") except Exception as e: _LOGGER.error(f"Fail to connector: {endpoint}") _LOGGER.error(e) try: cls.scenario_obj = Scenario(cls.scenario_uri, cls.client, cls.scenario_params) cls.domain = cls.scenario_obj.run_scenario() cls.meta = cls.scenario_obj.get_meta() except Exception as e: traceback.print_exc()
def init_var(var_file: str, var: tuple): if var_file: data = utils.load_yaml_from_file(var_file) if isinstance(data, dict): set_var(data.get('var', {})) set_var(_parse_inputs(var))
def _load_template(service, resource, columns, template_path): if columns: template = {'template': {'list': columns.split(',')}} elif template_path: template = load_yaml_from_file(template_path) else: template = get_template(service, resource) return template
def setUpClass(cls): config.init_conf(package='spaceone.inventory') config_path = os.environ.get('TEST_CONFIG') test_config = utils.load_yaml_from_file(config_path) cls.schema = 'azure_client_secret' cls.azure_credentials = test_config.get('AZURE_CREDENTIALS', {}) cls.azure_connector = AzureVMConnector(Transaction(), {}) super().setUpClass()
def _get_default_template(service, resource): try: default_template_path = os.path.join(DEFAULT_TEMPLATE_DIR, f'{service}.{resource}.yml') data = utils.load_yaml_from_file(default_template_path) data['type'] = 'default' return data except Exception: return None
def _get_my_template(service, resource): try: my_template_path = os.path.join(TEMPLATE_DIR, f'{service}.{resource}.yml') data = utils.load_yaml_from_file(my_template_path) data['type'] = 'custom' return data except Exception: return None
def apply_input(env, var, var_file): data = {} if var_file is not None: data = utils.load_yaml_from_file(var_file) set_var(data.get("var", {})) set_var(parse_key_value(var)) set_env(data.get("env", {})) set_env(parse_key_value(env))
def load_template(service, resource, columns, template_path=None): if columns: template = {'template': {'list': columns}} else: if template_path is not None: template = load_yaml_from_file(template_path) else: template = get_template(service, resource) return template
def setUpClass(cls): config.init_conf(package='spaceone.monitoring') config_path = os.environ.get('TEST_CONFIG') test_config = utils.load_yaml_from_file(config_path) cls.transaction = Transaction() cls.connector_conf = test_config.get('MonitoringPluginConnector', {}) cls.mp_connector = MonitoringPluginConnector(cls.transaction, {}) super().setUpClass()
def set(resource, file_path): """Set resource template""" service, resource = _get_service_and_resource(resource) if file_path: template = load_yaml_from_file(file_path) set_template(service, resource, template) else: raise Exception("'--file' option is required.")
def setUpClass(cls): config.init_conf(package='spaceone.monitoring') config.set_service_config() config.set_global(MOCK_MODE=True) config_path = os.environ.get('TEST_CONFIG') test_config = utils.load_yaml_from_file(config_path) cls.transaction = Transaction() cls.connector_conf = test_config.get('MonitoringPluginConnector', {}) cls.dsp_connector = DataSourcePluginConnector(cls.transaction, {}) super().setUpClass()
def import_config(import_file_path, environment=None): if environment is None: environment = get_environment() try: environment_path = os.path.join(ENVIRONMENT_DIR, f'{environment}.yml') data = utils.load_yaml_from_file(import_file_path) utils.save_yaml_to_file(data, environment_path) except Exception: raise Exception( f'Import file format is invalid. (file = {import_file_path})')
def setUpClass(cls): config.init_conf(package='spaceone.power_scheduler') config_path = os.environ.get('TEST_CONFIG') test_config = utils.load_yaml_from_file(config_path) cls.schema = 'azure_client_secret' cls.azure_credentials = test_config.get('AZURE_CREDENTIALS', {}) cls.vmss_connector = AzureVmScaleSetConnector(transaction=Transaction(), config={}, secret_data=cls.azure_credentials) cls.vmss_manager = AzureVmScaleSetManager(Transaction()) super().setUpClass()
def setUpClass(cls): config.init_conf(package='spaceone.monitoring') config_path = os.environ.get('TEST_CONFIG') test_config = utils.load_yaml_from_file(config_path) cls.schema = 'aws_access_key' cls.aws_credentials = test_config.get('AWS_CREDENTIALS', {}) cls.resource = test_config.get('RESOURCE') cls.metric = test_config.get('METRIC') cls.aws_connector = AWSBotoConnector(Transaction(), {}) super().setUpClass()
def setUpClass(cls): config.init_conf(package='spaceone.notification') config_path = os.environ.get('TEST_CONFIG') test_config = utils.load_yaml_from_file(config_path) cls.transaction = Transaction( {'token': test_config.get('access_token')}) cls.domain_id = test_config.get('domain_id') cls.connector_conf = test_config.get('IdentityConnector') cls.identity_connector = IdentityConnector(cls.transaction, cls.connector_conf) super().setUpClass()
def load(self, file_path): data = utils.load_yaml_from_file(file_path) # data = yaml.safe_load(file_path) if "import" in data: for import_file in data["import"]: # import file path is relative to current file_path absolute_location = Path(file_path).parent self.load(os.path.join(absolute_location, import_file)) store.set_var(data.get('var', {})) store.set_env(data.get('env', {})) for task in data.get("tasks", []): self.task_queue.append(task)
def setUpClass(cls): config.init_conf(package='spaceone.inventory') config_path = os.environ.get('TEST_CONFIG') test_config = utils.load_yaml_from_file(config_path) cls.schema = 'azure_client_secret' cls.azure_credentials = test_config.get('AZURE_CREDENTIALS', {}) cls.application_gateway_connector = ApplicationGatewayConnector(transaction=Transaction(), config={}, secret_data=cls.azure_credentials) cls.application_gateway_manager = ApplicationGatewayManager(Transaction()) super().setUpClass()
def load(self, file_path: str): data: dict = utils.load_yaml_from_file(file_path) for import_file in data.get('import', []): # import file path is relative to current file_path absolute_location = Path(file_path).parent self.load(os.path.join(absolute_location, import_file)) store.set_var(data.get('var', {})) store.set_env(data.get('env', {})) for task in data.get('tasks', []): self._check_task(task) self.task_queue.append(task)
def get_environment(): try: data = utils.load_yaml_from_file(ENVIRONMENT_CONF_PATH) except Exception: raise Exception( 'spaceconfig is undefined. (Use "spacectl config init")') environment = data.get('environment') if not environment: raise Exception( 'The environment is not set. Switch the environment. (Use "spacectl config environment --help")' ) return environment
def import_remote_conf(uri): endpoint = utils.parse_endpoint(uri) scheme = endpoint.get('scheme') if scheme == 'file': remote_conf = utils.load_yaml_from_file(endpoint['path']) elif scheme in ['http', 'https']: remote_conf = utils.load_yaml_from_url(uri) elif scheme == 'consul': remote_conf = load_consul_config(endpoint) if isinstance(remote_conf, dict): set_global(**remote_conf)
class TestCloudServiceAPIs(TestCase): config = utils.load_yaml_from_file( os.environ.get('SPACEONE_TEST_CONFIG_FILE', './config.yml')) endpoints = config.get('ENDPOINTS', {}) secret_data = { 'aws_access_key_id': AKI, 'aws_secret_access_key': SAK, } if ROLE_ARN is not None: secret_data.update({ 'role_arn': ROLE_ARN }) if REGION_NAME is not None: secret_data.update({ 'region_name': REGION_NAME }) def test_init(self): v_info = self.inventory.Collector.init({'options': {}}) print_json(v_info) def test_verify(self): options = { 'domain': 'mz.co.kr' } v_info = self.inventory.Collector.verify({'options': options, 'secret_data': self.secret_data}) print_json(v_info) def test_collect(self): options = { 'cloud_service_types': [ 'Lightsail' ] } # options = {} filter = {} res_stream = self.inventory.Collector.collect( {'options': options, 'secret_data': self.secret_data, 'filter': filter} ) for res in res_stream: self.assertIsNotNone(res) print_json(res) # self.assertEqual('CLOUD_SERVICE', res.resource_type)
def get_config(key=None, default=None, environment=None): if environment is None: environment = get_environment() try: environment_path = os.path.join(ENVIRONMENT_DIR, f'{environment}.yml') data = utils.load_yaml_from_file(environment_path) except Exception: raise Exception( 'spaceconfig is undefined. (Use "spacectl config init")') if key: return data.get(key, default) else: return data
def set_file_conf(config_yml: str): file_conf: dict = utils.load_yaml_from_file(config_yml) global_conf: dict = file_conf.get('GLOBAL', {}) set_global(**global_conf) import_conf: list = file_conf.get('IMPORT', []) if isinstance(import_conf, list): for uri in import_conf: import_remote_conf(uri) # DEPRECATED: REMOTE_URL setting changed to IMPORT import_conf: list = file_conf.get('REMOTE_URL', []) if isinstance(import_conf, list): for uri in import_conf: import_remote_conf(uri)
def setUpClass(cls): config.init_conf(package='spaceone.monitoring') config.set_service_config() config.set_global(MOCK_MODE=True) config_path = os.environ.get('TEST_CONFIG') test_config = utils.load_yaml_from_file(config_path) cls.transaction = Transaction( {'token': test_config.get('access_token')}) cls.domain_id = test_config.get('domain_id') cls.connector_conf = test_config.get('SecretConnector') cls.secret_connector = SecretConnector(cls.transaction, cls.connector_conf) super().setUpClass()
class TestCloudServiceAPIs(TestCase): config = utils.load_yaml_from_file( os.environ.get('SPACEONE_TEST_CONFIG_FILE', './config.yml')) endpoints = config.get('ENDPOINTS', {}) secret_data = { 'aws_access_key_id': AKI, 'aws_secret_access_key': SAK, } if ROLE_ARN is not None: secret_data.update({'role_arn': ROLE_ARN}) if REGION_NAME is not None: secret_data.update({'region_name': REGION_NAME}) def test_init(self): v_info = self.inventory.Collector.init({'options': {}}) print_json(v_info) def test_verify(self): options = {'domain': 'mz.co.kr'} v_info = self.inventory.Collector.verify({ 'options': options, 'secret_data': self.secret_data }) print_json(v_info) def test_collect(self): options = {} filter = {} res_stream = self.inventory.Collector.collect({ 'options': options, 'secret_data': self.secret_data, 'filter': filter }) for res in res_stream: #print_json(res) res_json = to_json(res) try: if res_json['resource']['data']['resources_summary'][ 'resources_flagged'] > 0: print_json(res) except: pass
def setUpClass(cls): config.init_conf(package='spaceone.inventory') config_path = os.environ.get('TEST_CONFIG') test_config = utils.load_yaml_from_file(config_path) cls.schema = 'azure_client_secret' cls.azure_credentials = { 'secret_data': test_config.get('AZURE_CREDENTIALS', {}) } cls.azure_connector = SnapshotConnector(transaction=Transaction(), config={}, secret_data=test_config.get( 'AZURE_CREDENTIALS', {})) # cls.azure_connector = DiskConnector(transaction=Transaction(), config={}, secret_data=cls.azure_credentials['secret_data']) super().setUpClass()
def load_config(config_yaml_file, service=None): file_conf = utils.load_yaml_from_file(config_yaml_file) conf_to_merge = file_conf.get('GLOBAL', {}) if service is None: service = conf_to_merge.get('SERVICE', None) init_conf(service=service, server_type=conf_to_merge.get('SERVER_TYPE', None), port=conf_to_merge.get('PORT', None)) set_default_conf() set_global(**conf_to_merge) return get_global()