def test_from_invalid_mongodb_yamls(self):
        """Test creating Config object using invalid YAML configuration directory"""

        # Initialising config object with a tap that's referencing an unknown target should exit
        yaml_config_dir = '{}/resources/test_invalid_tap_mongo_yaml_config'.format(
            os.path.dirname(__file__))
        vault_secret = '{}/resources/vault-secret.txt'.format(
            os.path.dirname(__file__))
        print(yaml_config_dir)
        with pytest.raises(InvalidConfigException):
            Config.from_yamls(PIPELINEWISE_TEST_HOME, yaml_config_dir,
                              vault_secret)
Esempio n. 2
0
    def test_constructor(self):
        """Test Config construction functions"""
        config = Config(PIPELINEWISE_TEST_HOME)

        # config dir and path should be generated automatically
        assert config.config_dir == PIPELINEWISE_TEST_HOME
        assert config.config_path == '{}/config.json'.format(
            PIPELINEWISE_TEST_HOME)
        assert config.targets == {}
    def test_save_config_with_optional_slack_channel_for_alerts(self):
        """Test config target and tap JSON save functionalities if there is a optional setting for slack channel"""

        # Load a full configuration set from YAML files
        yaml_config_dir = '{}/resources/test_yaml_config_with_slack_channel'.format(
            os.path.dirname(__file__))
        vault_secret = '{}/resources/vault-secret.txt'.format(
            os.path.dirname(__file__))

        json_config_dir = './pipelinewise-test-config'
        config = Config.from_yamls(json_config_dir, yaml_config_dir,
                                   vault_secret)

        # Save the config as singer compatible JSON files
        config.save()

        # Check if every required JSON file created, both for target and tap
        main_config_json = '{}/config.json'.format(json_config_dir)

        # Check content of the generated JSON files
        actual_config_json = cli.utils.load_json(main_config_json)

        expected_taps_config = [{
            'id': 'mysql_sample_1',
            'type': 'tap-mysql',
            'name': 'Sample MySQL Database',
            'owner': '*****@*****.**',
            'stream_buffer_size': None,
            'send_alert': True,
            'enabled': True,
            'slack_alert_channel': '#test-channel_1'
        }, {
            'id': 'mysql_sample_2',
            'type': 'tap-mysql',
            'name': 'Sample MySQL Database',
            'owner': '*****@*****.**',
            'stream_buffer_size': None,
            'send_alert': True,
            'enabled': True,
            'slack_alert_channel': '#test-channel_2'
        }, {
            'id': 'mysql_sample_3',
            'type': 'tap-mysql',
            'name': 'Sample MySQL Database',
            'owner': '*****@*****.**',
            'stream_buffer_size': None,
            'send_alert': True,
            'enabled': True,
        }]

        assert len(actual_config_json['targets'][0]['taps']) == 3

        for tap_config in expected_taps_config:
            assert tap_config in actual_config_json['targets'][0]['taps']

        # Delete the generated JSON config directory
        shutil.rmtree(json_config_dir)
Esempio n. 4
0
    def test_get_target(self):
        """Selecting target by ID should append connector files"""
        # Get target definitions from JSON file
        targets = cli.utils.load_json('{}/config.json'.format(CONFIG_DIR)).get(
            'targets', [])
        exp_target_one = next(
            (item for item in targets if item['id'] == 'target_one'), False)
        exp_target_two = next(
            (item for item in targets if item['id'] == 'target_two'), False)

        # Append the connector file paths to the expected targets
        exp_target_one['files'] = Config.get_connector_files(
            '{}/target_one'.format(CONFIG_DIR))
        exp_target_two['files'] = Config.get_connector_files(
            '{}/target_two'.format(CONFIG_DIR))

        # Getting target by ID should match to original JSON and should contains the connector files list
        assert self.pipelinewise.get_target('target_one') == exp_target_one
        assert self.pipelinewise.get_target('target_two') == exp_target_two
Esempio n. 5
0
 def test_connector_files(self):
     """Every singer connector must have a list of JSON files at certain locations"""
     assert Config.get_connector_files('/var/singer-connector') == {
         'config': '/var/singer-connector/config.json',
         'inheritable_config':
         '/var/singer-connector/inheritable_config.json',
         'properties': '/var/singer-connector/properties.json',
         'state': '/var/singer-connector/state.json',
         'transformation': '/var/singer-connector/transformation.json',
         'selection': '/var/singer-connector/selection.json',
         'pidfile': '/var/singer-connector/pipelinewise.pid',
     }
Esempio n. 6
0
    def test_from_invalid_yamls(self):
        """Test creating Config object using invalid YAML configuration directory"""

        # TODO: Make behaviours consistent.
        #   In some cases it raise exception in some other cases it does exit

        # Initialising Config object with a not existing directory should raise an exception
        with pytest.raises(Exception):
            Config.from_yamls(PIPELINEWISE_TEST_HOME,
                              'not-existing-yaml-config-directory')

        # Initialising config object with a tap that's referencing an unknown target should exit
        yaml_config_dir = '{}/resources/test_invalid_yaml_config'.format(
            os.path.dirname(__file__))
        vault_secret = '{}/resources/vault-secret.txt'.format(
            os.path.dirname(__file__))

        with pytest.raises(SystemExit) as pytest_wrapped_e:
            Config.from_yamls(PIPELINEWISE_TEST_HOME, yaml_config_dir,
                              vault_secret)
        assert pytest_wrapped_e.type == SystemExit
        assert pytest_wrapped_e.value.code == 1
Esempio n. 7
0
    def test_get_tap(self):
        """Getting tap by ID should return status, connector and target props as well"""
        # Get target definitions from JSON file
        targets = cli.utils.load_json('{}/config.json'.format(CONFIG_DIR)).get(
            'targets', [])
        target_one = next(
            (item for item in targets if item['id'] == 'target_one'), False)

        # Append the tap status, files and target keys to the tap
        exp_tap_one = target_one['taps'][0]
        exp_tap_one['status'] = self.pipelinewise.detect_tap_status(
            'target_one', exp_tap_one['id'])
        exp_tap_one['files'] = Config.get_connector_files(
            '{}/target_one/tap_one'.format(CONFIG_DIR))
        exp_tap_one['target'] = self.pipelinewise.get_target('target_one')

        # Getting tap by ID should match to original JSON and should contain  status, connector files and target props
        assert self.pipelinewise.get_tap('target_one',
                                         'tap_one') == exp_tap_one
Esempio n. 8
0
    def test_getters(self):
        """Test Config getter functions"""
        config = Config(PIPELINEWISE_TEST_HOME)

        # Target and tap directory should be g
        assert config.get_temp_dir() == '{}/tmp'.format(PIPELINEWISE_TEST_HOME)
        assert config.get_target_dir(
            'test-target-id') == '{}/test-target-id'.format(
                PIPELINEWISE_TEST_HOME)
        assert config.get_tap_dir(
            'test-target-id',
            'test-tap-id') == '{}/test-target-id/test-tap-id'.format(
                PIPELINEWISE_TEST_HOME)

        assert config.get_connector_files('/var/singer-connector') == {
            'config': '/var/singer-connector/config.json',
            'inheritable_config':
            '/var/singer-connector/inheritable_config.json',
            'properties': '/var/singer-connector/properties.json',
            'state': '/var/singer-connector/state.json',
            'transformation': '/var/singer-connector/transformation.json',
            'selection': '/var/singer-connector/selection.json',
            'pidfile': '/var/singer-connector/pipelinewise.pid',
        }
Esempio n. 9
0
    def test_from_yamls(self):
        """Test creating Config object using YAML configuration directory as the input"""

        # Create Config object by parsing target and tap YAMLs in a directory
        yaml_config_dir = f'{os.path.dirname(__file__)}/resources/test_yaml_config'

        vault_secret = f'{os.path.dirname(__file__)}/resources/vault-secret.txt'

        # Parse YAML files and create the config object
        config = Config.from_yamls(PIPELINEWISE_TEST_HOME, yaml_config_dir,
                                   vault_secret)

        # config dir and path should be generated automatically
        assert config.config_dir == PIPELINEWISE_TEST_HOME
        assert config.config_path == f'{PIPELINEWISE_TEST_HOME}/config.json'

        # Vault encrypted alert handlers should be loaded into global config
        assert config.global_config == {
            'alert_handlers': {
                'slack': {
                    'token': 'Vault Encrypted Secret Fruit',
                    'channel': '#slack-channel',
                }
            }
        }

        # The target dictionary should contain every target and tap parsed from YAML files
        assert config.targets == {
            'test_snowflake_target': {
                'id':
                'test_snowflake_target',
                'name':
                'Test Target Connector',
                'type':
                'target-snowflake',
                'db_conn': {
                    'account': 'account',
                    'aws_access_key_id': 'access_key_id',
                    'aws_secret_access_key': 'secret_access_key',
                    'client_side_encryption_master_key': 'master_key',
                    'dbname': 'foo_db',
                    'file_format': 'foo_file_format',
                    'password': '******',
                    's3_bucket': 's3_bucket',
                    's3_key_prefix': 's3_prefix/',
                    'stage': 'foo_stage',
                    'user': '******',
                    'warehouse': 'MY_WAREHOUSE',
                },
                'files': {
                    'config':
                    f'{PIPELINEWISE_TEST_HOME}/test_snowflake_target/config.json',
                    'inheritable_config':
                    f'{ PIPELINEWISE_TEST_HOME}/test_snowflake_target/inheritable_config.json',
                    'properties':
                    f'{PIPELINEWISE_TEST_HOME}/test_snowflake_target/properties.json',
                    'selection':
                    f'{PIPELINEWISE_TEST_HOME}/test_snowflake_target/selection.json',
                    'state':
                    f'{PIPELINEWISE_TEST_HOME}/test_snowflake_target/state.json',
                    'transformation':
                    f'{PIPELINEWISE_TEST_HOME}/test_snowflake_target/transformation.json',
                    'pidfile':
                    f'{PIPELINEWISE_TEST_HOME}/test_snowflake_target/pipelinewise.pid',
                },
                'taps': [{
                    'id':
                    'mysql_sample',
                    'name':
                    'Sample MySQL Database',
                    'type':
                    'tap-mysql',
                    'owner':
                    '*****@*****.**',
                    'target':
                    'test_snowflake_target',
                    'batch_size_rows':
                    20000,
                    'batch_wait_limit_seconds':
                    3600,
                    'split_large_files':
                    True,
                    'split_file_chunk_size_mb':
                    500,
                    'split_file_max_chunks':
                    25,
                    'db_conn': {
                        'dbname': '<DB_NAME>',
                        'host': '<HOST>',
                        'password': '******',
                        'port': 3306,
                        'user': '******',
                    },
                    'files': {
                        'config':
                        f'{PIPELINEWISE_TEST_HOME}/test_snowflake_target/mysql_sample/config.json',
                        'inheritable_config':
                        f'{PIPELINEWISE_TEST_HOME}'
                        f'/test_snowflake_target/mysql_sample/inheritable_config.json',
                        'properties':
                        f'{PIPELINEWISE_TEST_HOME}/'
                        f'test_snowflake_target/mysql_sample/properties.json',
                        'selection':
                        f'{PIPELINEWISE_TEST_HOME}/test_snowflake_target/mysql_sample/selection.json',
                        'state':
                        f'{PIPELINEWISE_TEST_HOME}/test_snowflake_target/mysql_sample/state.json',
                        'transformation':
                        f'{PIPELINEWISE_TEST_HOME}'
                        f'/test_snowflake_target/mysql_sample/transformation.json',
                        'pidfile':
                        f'{PIPELINEWISE_TEST_HOME}/test_snowflake_target/mysql_sample/pipelinewise.pid',
                    },
                    'schemas': [{
                        'source_schema':
                        'my_db',
                        'target_schema':
                        'repl_my_db',
                        'target_schema_select_permissions': ['grp_stats'],
                        'tables': [
                            {
                                'table_name': 'table_one',
                                'replication_method': 'INCREMENTAL',
                                'replication_key': 'last_update',
                            },
                            {
                                'table_name': 'table_two',
                                'replication_method': 'LOG_BASED',
                            },
                        ],
                    }],
                }],
            }
        }
Esempio n. 10
0
    def test_save_config(self):
        """Test config target and tap JSON save functionalities"""

        # Load a full configuration set from YAML files
        yaml_config_dir = '{}/resources/test_yaml_config'.format(
            os.path.dirname(__file__))
        vault_secret = '{}/resources/vault-secret.txt'.format(
            os.path.dirname(__file__))

        json_config_dir = './pipelinewise-test-config'
        config = Config.from_yamls(json_config_dir, yaml_config_dir,
                                   vault_secret)

        # Save the config as singer compatible JSON files
        config.save()

        # Check if every required JSON file created, both for target and tap
        main_config_json = '{}/config.json'.format(json_config_dir)
        target_config_json = '{}/test_snowflake_target/config.json'.format(
            json_config_dir)
        tap_config_json = '{}/test_snowflake_target/mysql_sample/config.json'.format(
            json_config_dir)
        tap_inheritable_config_json = (
            '{}/test_snowflake_target/mysql_sample/inheritable_config.json'.
            format(json_config_dir))
        tap_selection_json = (
            '{}/test_snowflake_target/mysql_sample/selection.json'.format(
                json_config_dir))
        tap_transformation_json = (
            '{}/test_snowflake_target/mysql_sample/transformation.json'.format(
                json_config_dir))

        # Check content of the generated JSON files
        assert cli.utils.load_json(main_config_json) == {
            'alert_handlers': {
                'slack': {
                    'token': 'Vault Encrypted Secret Fruit',
                    'channel': '#slack-channel',
                }
            },
            'targets': [{
                'id':
                'test_snowflake_target',
                'type':
                'target-snowflake',
                'name':
                'Test Target Connector',
                'status':
                'ready',
                'taps': [{
                    'id': 'mysql_sample',
                    'type': 'tap-mysql',
                    'name': 'Sample MySQL Database',
                    'owner': '*****@*****.**',
                    'stream_buffer_size': None,
                    'send_alert': True,
                    'enabled': True,
                }],
            }],
        }
        assert cli.utils.load_json(target_config_json) == {
            'account': 'account',
            'aws_access_key_id': 'access_key_id',
            'aws_secret_access_key': 'secret_access_key',
            'client_side_encryption_master_key': 'master_key',
            'dbname': 'foo_db',
            'file_format': 'foo_file_format',
            'password': '******',
            's3_bucket': 's3_bucket',
            's3_key_prefix': 's3_prefix/',
            'stage': 'foo_stage',
            'user': '******',
            'warehouse': 'MY_WAREHOUSE',
        }
        assert cli.utils.load_json(tap_config_json) == {
            'dbname': '<DB_NAME>',
            'host': '<HOST>',
            'port': 3306,
            'user': '******',
            'password': '******',
            'server_id': cli.utils.load_json(tap_config_json)['server_id'],
        }
        assert cli.utils.load_json(tap_selection_json) == {
            'selection': [
                {
                    'replication_key': 'last_update',
                    'replication_method': 'INCREMENTAL',
                    'tap_stream_id': 'my_db-table_one',
                },
                {
                    'replication_method': 'LOG_BASED',
                    'tap_stream_id': 'my_db-table_two'
                },
            ]
        }
        assert cli.utils.load_json(tap_transformation_json) == {
            'transformations': []
        }
        assert cli.utils.load_json(tap_inheritable_config_json) == {
            'batch_size_rows':
            20000,
            'batch_wait_limit_seconds':
            3600,
            'data_flattening_max_level':
            0,
            'flush_all_streams':
            False,
            'hard_delete':
            True,
            'parallelism':
            0,
            'parallelism_max':
            4,
            'primary_key_required':
            True,
            'schema_mapping': {
                'my_db': {
                    'target_schema': 'repl_my_db',
                    'target_schema_select_permissions': ['grp_stats'],
                }
            },
            'temp_dir':
            './pipelinewise-test-config/tmp',
            'tap_id':
            'mysql_sample',
            'query_tag':
            '{"ppw_component": "tap-mysql", "tap_id": "mysql_sample", '
            '"database": "{{database}}", "schema": "{{schema}}", "table": "{{table}}"}',
            'validate_records':
            False,
            'add_metadata_columns':
            False,
            'split_large_files':
            True,
            'split_file_chunk_size_mb':
            500,
            'split_file_max_chunks':
            25,
            'archive_load_files':
            False,
        }

        # Delete the generated JSON config directory
        shutil.rmtree(json_config_dir)