Beispiel #1
0
def test_oneof_usage(connector_spec, should_fail):
    t = _TestSpec()
    if should_fail is True:
        with pytest.raises(AssertionError):
            t.test_oneof_usage(actual_connector_spec=ConnectorSpecification(
                connectionSpecification=connector_spec))
    else:
        t.test_oneof_usage(actual_connector_spec=ConnectorSpecification(
            connectionSpecification=connector_spec))
    def test_run_check(self, mocker, destination: Destination, tmp_path):
        file_path = tmp_path / "config.json"
        dummy_config = {"user": "******"}
        write_file(file_path, dummy_config)
        args = {"command": "check", "config": file_path}

        parsed_args = argparse.Namespace(**args)
        destination.run_cmd(parsed_args)
        spec_msg = ConnectorSpecification(connectionSpecification={})
        mocker.patch.object(destination, "spec", return_value=spec_msg)
        validate_mock = mocker.patch(
            "airbyte_cdk.destinations.destination.check_config_against_spec_or_exit"
        )
        expected_check_result = AirbyteConnectionStatus(
            status=Status.SUCCEEDED)
        mocker.patch.object(destination,
                            "check",
                            return_value=expected_check_result,
                            autospec=True)

        returned_check_result = next(iter(destination.run_cmd(parsed_args)))
        # verify method call with the correct params
        # Affirm to Mypy that this is indeed a method on this mock
        destination.check.assert_called_once()  # type: ignore
        # Affirm to Mypy that this is indeed a method on this mock
        destination.check.assert_called_with(
            logger=ANY, config=dummy_config)  # type: ignore
        # Check if config validation has been called
        validate_mock.assert_called_with(dummy_config, spec_msg,
                                         destination.logger)

        # verify output was correct
        assert _wrapped(expected_check_result) == returned_check_result
Beispiel #3
0
def test_airbyte_secret_is_masked_on_logger_output(source_spec, mocker, config,
                                                   caplog):
    caplog.set_level(logging.DEBUG, logger="airbyte.test")
    caplog.handler.setFormatter(AirbyteLogFormatter())
    entrypoint = AirbyteEntrypoint(MockSource())
    parsed_args = Namespace(command="read", config="", state="", catalog="")
    mocker.patch.object(
        MockSource,
        "spec",
        return_value=ConnectorSpecification(
            connectionSpecification=source_spec),
    )
    mocker.patch.object(MockSource, "configure", return_value=config)
    mocker.patch.object(MockSource, "read_config", return_value=None)
    mocker.patch.object(MockSource, "read_state", return_value={})
    mocker.patch.object(MockSource, "read_catalog", return_value={})
    list(entrypoint.run(parsed_args))
    log_result = caplog.text
    expected_secret_values = [
        config[k] for k, v in source_spec["properties"].items()
        if v.get("airbyte_secret")
    ]
    expected_plain_text_values = [
        config[k] for k, v in source_spec["properties"].items()
        if not v.get("airbyte_secret")
    ]
    assert all([str(v) not in log_result for v in expected_secret_values])
    assert all([str(v) in log_result for v in expected_plain_text_values])
Beispiel #4
0
    def spec(self, logger: AirbyteLogger) -> ConnectorSpecification:
        """Tries to find and remove a spec data about local storage settings"""

        parent_code_dir = os.path.dirname(source_file.source.__file__)
        parent_spec_file = os.path.join(parent_code_dir, "spec.json")
        with open(parent_spec_file, "r") as f:
            spec = ConnectorSpecification.parse_obj(json.load(f))

        # correction of  the "storage" property to const type
        for provider in spec.connectionSpecification["properties"]["provider"][
                "oneOf"]:
            storage = provider["properties"]["storage"]

            if "enum" in storage:
                storage.pop("enum")
                storage["const"] = storage.pop("default")

        for i in range(
                len(spec.connectionSpecification["properties"]["provider"]
                    ["oneOf"])):
            provider = spec.connectionSpecification["properties"]["provider"][
                "oneOf"][i]
            if provider["properties"]["storage"][
                    "const"] == LOCAL_STORAGE_NAME:
                spec.connectionSpecification["properties"]["provider"][
                    "oneOf"].pop(i)
        return spec
Beispiel #5
0
 def spec(self, *args, **kwargs) -> ConnectorSpecification:
     """Returns the spec for this integration."""
     return ConnectorSpecification(
         documentationUrl=DOCUMENTATION_URL,
         changelogUrl=DOCUMENTATION_URL,
         supportsIncremental=True,
         supported_destination_sync_modes=[
             DestinationSyncMode.overwrite, DestinationSyncMode.append,
             DestinationSyncMode.append_dedup
         ],
         connectionSpecification=SourceTiktokMarketingSpec.schema(),
         additionalProperties=True,
         advanced_auth=AdvancedAuth(
             auth_flow_type=AuthFlowType.oauth2_0,
             predicate_key=["credentials", "auth_type"],
             predicate_value="oauth2.0",
             oauth_config_specification=OAuthConfigSpecification(
                 complete_oauth_output_specification=
                 CompleteOauthOutputSpecification.schema(),
                 complete_oauth_server_input_specification=
                 CompleteOauthServerInputSpecification.schema(),
                 complete_oauth_server_output_specification=
                 CompleteOauthServerOutputSpecification.schema(),
             ),
         ),
     )
Beispiel #6
0
    def spec(self, logger: logging.Logger) -> ConnectorSpecification:
        """
        Returns the spec for this integration. The spec is a JSON-Schema object describing the required configurations (e.g: username and password)
        required to run this integration. By default, this will be loaded from a "spec.yaml" or a "spec.json" in the package root.
        """

        package = self.__class__.__module__.split(".")[0]

        yaml_spec = load_optional_package_file(package, "spec.yaml")
        json_spec = load_optional_package_file(package, "spec.json")

        if yaml_spec and json_spec:
            raise RuntimeError(
                "Found multiple spec files in the package. Only one of spec.yaml or spec.json should be provided."
            )

        if yaml_spec:
            spec_obj = yaml.load(yaml_spec, Loader=yaml.SafeLoader)
        elif json_spec:
            spec_obj = json.loads(json_spec)
        else:
            raise FileNotFoundError(
                "Unable to find spec.yaml or spec.json in the package.")

        return ConnectorSpecification.parse_obj(spec_obj)
Beispiel #7
0
 def spec(self, logger: AirbyteLogger) -> ConnectorSpecification:
     """
     Returns the spec for this integration. The spec is a JSON-Schema object describing the required configurations (e.g: username and password)
     required to run this integration.
     """
     raw_spec = pkgutil.get_data(
         self.__class__.__module__.split(".")[0], "spec.json")
     return ConnectorSpecification.parse_obj(json.loads(raw_spec))
Beispiel #8
0
def test_non_airbyte_secrets_are_not_masked_on_uncaught_exceptions(
        mocker, caplog, capsys):
    caplog.set_level(logging.DEBUG, logger="airbyte.test")
    caplog.handler.setFormatter(AirbyteLogFormatter())

    class BrokenSource(MockSource):
        def read(
            self,
            logger: logging.Logger,
            config: Mapping[str, Any],
            catalog: ConfiguredAirbyteCatalog,
            state: MutableMapping[str, Any] = None,
        ):
            raise Exception("Exception:" + NOT_A_SECRET_VALUE)

    entrypoint = AirbyteEntrypoint(BrokenSource())
    parsed_args = Namespace(command="read", config="", state="", catalog="")
    source_spec = {
        "type": "object",
        "required": ["api_token"],
        "additionalProperties": False,
        "properties": {
            SECRET_PROPERTY: {
                "type": "string",
                "airbyte_secret": True
            },
            NOT_SECRET_PROPERTY: {
                "type": "string",
                "airbyte_secret": False
            },
        },
    }
    simple_config = {
        SECRET_PROPERTY: I_AM_A_SECRET_VALUE,
        NOT_SECRET_PROPERTY: NOT_A_SECRET_VALUE,
    }
    mocker.patch.object(
        MockSource,
        "spec",
        return_value=ConnectorSpecification(
            connectionSpecification=source_spec),
    )
    mocker.patch.object(MockSource, "configure", return_value=simple_config)
    mocker.patch.object(MockSource, "read_config", return_value=None)
    mocker.patch.object(MockSource, "read_state", return_value={})
    mocker.patch.object(MockSource, "read_catalog", return_value={})
    mocker.patch.object(MockSource,
                        "read",
                        side_effect=Exception("Exception:" +
                                              NOT_A_SECRET_VALUE))

    try:
        list(entrypoint.run(parsed_args))
    except Exception:
        sys.excepthook(*sys.exc_info())
        assert NOT_A_SECRET_VALUE in capsys.readouterr(
        ).out, "Should not have filtered non-secret value from exception trace message"
        assert NOT_A_SECRET_VALUE in caplog.text, "Should not have filtered non-secret value from exception log message"
Beispiel #9
0
 def spec(self, *args, **kwargs) -> ConnectorSpecification:
     """Returns the spec for this integration."""
     return ConnectorSpecification(
         documentationUrl=DOCUMENTATION_URL,
         changelogUrl=DOCUMENTATION_URL,
         supportsIncremental=True,
         supported_destination_sync_modes=[DestinationSyncMode.overwrite, DestinationSyncMode.append, DestinationSyncMode.append_dedup],
         connectionSpecification=SourceTiktokMarketingSpec.schema(),
     )
Beispiel #10
0
 def spec(self, logger: logging.Logger) -> ConnectorSpecification:
     """
     Returns the spec for this integration. The spec is a JSON-Schema object describing the required configurations (e.g: username and password)
     required to run this integration.
     """
     raw_spec: Optional[bytes] = pkgutil.get_data(
         self.__class__.__module__.split(".")[0], "spec.json")
     if not raw_spec:
         raise ValueError("Unable to find spec.json.")
     return ConnectorSpecification.parse_obj(json.loads(raw_spec))
Beispiel #11
0
 def spec(self, *args, **kwargs) -> ConnectorSpecification:
     """
     Returns the spec for this integration. The spec is a JSON-Schema object describing the required configurations (e.g: username and password)
     required to run this integration.
     """
     return ConnectorSpecification(
         documentationUrl="https://docs.airbyte.io/integrations/sources/facebook-marketing",
         changelogUrl="https://docs.airbyte.io/integrations/sources/facebook-marketing",
         supportsIncremental=True,
         supported_destination_sync_modes=[DestinationSyncMode.append],
         connectionSpecification=ConnectorConfig.schema(),
     )
Beispiel #12
0
 def spec(self, *args, **kwargs) -> ConnectorSpecification:
     return ConnectorSpecification(
         documentationUrl=
         "https://docs.airbyte.io/integrations/sources/lever-hiring",
         changelogUrl=
         "https://docs.airbyte.io/integrations/sources/lever-hiring#changelog",
         connectionSpecification=ConnectorConfig.schema(),
         authSpecification=AuthSpecification(
             auth_type="oauth2.0",
             oauth2Specification=OAuth2Specification(
                 oauthFlowInitParameters=[["client_id"], ["client_secret"],
                                          ["refresh_token"]]),
         ),
     )
Beispiel #13
0
    def spec(self, *args, **kwargs) -> ConnectorSpecification:
        """
        Returns the spec for this integration. The spec is a JSON-Schema object describing the required
        configurations (e.g: username and password) required to run this integration.
        """
        # FIXME: airbyte-cdk does not parse pydantic $ref correctly. This override won't be needed after the fix
        schema = ConnectorConfig.schema()
        schema["properties"]["aws_environment"] = schema["definitions"]["AWSEnvironment"]
        schema["properties"]["region"] = schema["definitions"]["AWSRegion"]

        return ConnectorSpecification(
            documentationUrl="https://docs.airbyte.io/integrations/sources/amazon-seller-partner",
            changelogUrl="https://docs.airbyte.io/integrations/sources/amazon-seller-partner",
            connectionSpecification=schema,
        )
Beispiel #14
0
def test_config_validate(entrypoint: AirbyteEntrypoint, mocker, config_mock,
                         schema, config_valid):
    parsed_args = Namespace(command="check", config="config_path")
    check_value = AirbyteConnectionStatus(status=Status.SUCCEEDED)
    mocker.patch.object(MockSource, "check", return_value=check_value)
    mocker.patch.object(
        MockSource,
        "spec",
        return_value=ConnectorSpecification(connectionSpecification=schema))
    if config_valid:
        messages = list(entrypoint.run(parsed_args))
        assert [_wrap_message(check_value)] == messages
    else:
        with pytest.raises(Exception, match=r"(?i)Config Validation Error:.*"):
            list(entrypoint.run(parsed_args))
Beispiel #15
0
 def spec(self, *args, **kwargs) -> ConnectorSpecification:
     """
     Returns the spec for this integration. The spec is a JSON-Schema object describing the required configurations (e.g: username and password)
     required to run this integration.
     """
     return ConnectorSpecification(
         documentationUrl="https://docs.airbyte.io/integrations/sources/facebook-marketing",
         changelogUrl="https://docs.airbyte.io/integrations/sources/facebook-marketing",
         supportsIncremental=True,
         supported_destination_sync_modes=[DestinationSyncMode.append],
         connectionSpecification=expand_local_ref(ConnectorConfig.schema()),
         authSpecification=AuthSpecification(
             auth_type="oauth2.0",
             oauth2Specification=OAuth2Specification(
                 rootObject=[], oauthFlowInitParameters=[], oauthFlowOutputParameters=[["access_token"]]
             ),
         ),
     )
Beispiel #16
0
    def test_run_spec(self, mocker, destination: Destination):
        args = {"command": "spec"}
        parsed_args = argparse.Namespace(**args)

        expected_spec = ConnectorSpecification(
            connectionSpecification={"json_schema": {
                "prop": "value"
            }})
        mocker.patch.object(destination,
                            "spec",
                            return_value=expected_spec,
                            autospec=True)

        spec_message = next(iter(destination.run_cmd(parsed_args)))

        # Mypy doesn't understand magicmock so it thinks spec doesn't have assert_called_once attr
        destination.spec.assert_called_once()  # type: ignore

        # verify the output of spec was returned
        assert _wrapped(expected_spec) == spec_message
Beispiel #17
0
    def spec(self, *args, **kwargs) -> ConnectorSpecification:
        """
        Returns the spec for this integration. The spec is a JSON-Schema object describing the required configurations (e.g: username and password)
        required to run this integration.
        """
        # make dummy instance of stream_class in order to get 'supports_incremental' property
        incremental = self.stream_class(dataset="",
                                        provider="",
                                        format="",
                                        path_pattern="").supports_incremental

        supported_dest_sync_modes = [DestinationSyncMode.overwrite]
        if incremental:
            supported_dest_sync_modes.extend(
                [DestinationSyncMode.append, DestinationSyncMode.append_dedup])

        return ConnectorSpecification(
            documentationUrl=self.documentation_url,
            changelogUrl=self.documentation_url,
            supportsIncremental=incremental,
            supported_destination_sync_modes=supported_dest_sync_modes,
            connectionSpecification=self.spec_class.schema(),
        )
Beispiel #18
0
def connector_spec_fixture(connector_spec_path) -> ConnectorSpecification:
    spec_obj = load_yaml_or_json_path(connector_spec_path)
    return ConnectorSpecification.parse_obj(spec_obj)
Beispiel #19
0
 def spec(self, *args) -> ConnectorSpecification:
     return ConnectorSpecification(
         documentationUrl=
         "https://docs.airbyte.io/integrations/sources/amazon-ads",
         connectionSpecification=AmazonAdsConfig.schema(),
     )
Beispiel #20
0
def spec_mock(mocker):
    expected = ConnectorSpecification(connectionSpecification={})
    mock = MagicMock(return_value=expected)
    mocker.patch.object(MockSource, "spec", mock)
    return mock
Beispiel #21
0
 def spec(self, logger: AirbyteLogger) -> ConnectorSpecification:
     return ConnectorSpecification(
         connectionSpecification=BraintreeConfig.schema(),
         documentationUrl=
         "https://docs.airbyte.io/integrations/sources/braintree")
Beispiel #22
0
def test_run_spec(entrypoint: AirbyteEntrypoint, mocker):
    parsed_args = Namespace(command="spec")
    expected = ConnectorSpecification(connectionSpecification={"hi": "hi"})
    mocker.patch.object(MockSource, "spec", return_value=expected)
    assert [_wrap_message(expected)] == list(entrypoint.run(parsed_args))
Beispiel #23
0
def connector_spec_fixture(connector_spec_path) -> ConnectorSpecification:
    return ConnectorSpecification.parse_file(connector_spec_path)
Beispiel #24
0
def _spec(schema: Dict[str, Any]) -> ConnectorSpecification:
    return ConnectorSpecification(connectionSpecification=schema)
Beispiel #25
0
        with pytest.raises(
                AssertionError,
                match="stream should have some fields mentioned by json schema"
        ):
            t.test_read(None, catalog, input_config, [], docker_runner_mock,
                        MagicMock())
    else:
        t.test_read(None, catalog, input_config, [], docker_runner_mock,
                    MagicMock())


@pytest.mark.parametrize(
    "connector_spec, expected_error",
    [
        # SUCCESS: no authSpecification specified
        (ConnectorSpecification(connectionSpecification={}), ""),
        # FAIL: Field specified in root object does not exist
        (
            ConnectorSpecification(
                connectionSpecification={"type": "object"},
                authSpecification={
                    "auth_type": "oauth2.0",
                    "oauth2Specification": {
                        "rootObject": ["credentials", 0],
                        "oauthFlowInitParameters": [["client_id"],
                                                    ["client_secret"]],
                        "oauthFlowOutputParameters": [["access_token"],
                                                      ["refresh_token"]],
                    },
                },
            ),
Beispiel #26
0
    def test_run_write(self, mocker, destination: Destination, tmp_path,
                       monkeypatch):
        config_path, dummy_config = tmp_path / "config.json", {
            "user": "******"
        }
        write_file(config_path, dummy_config)

        dummy_catalog = ConfiguredAirbyteCatalog(streams=[
            ConfiguredAirbyteStream(
                stream=AirbyteStream(name="mystream",
                                     json_schema={"type": "object"}),
                sync_mode=SyncMode.full_refresh,
                destination_sync_mode=DestinationSyncMode.overwrite,
            )
        ])
        catalog_path = tmp_path / "catalog.json"
        write_file(catalog_path, dummy_catalog.json(exclude_unset=True))

        args = {
            "command": "write",
            "config": config_path,
            "catalog": catalog_path
        }
        parsed_args = argparse.Namespace(**args)

        expected_write_result = [
            _wrapped(_state({"k1": "v1"})),
            _wrapped(_state({"k2": "v2"}))
        ]
        mocker.patch.object(
            destination,
            "write",
            return_value=iter(expected_write_result),
            autospec=True  # convert to iterator to mimic real usage
        )
        spec_msg = ConnectorSpecification(connectionSpecification={})
        mocker.patch.object(destination, "spec", return_value=spec_msg)
        validate_mock = mocker.patch(
            "airbyte_cdk.destinations.destination.check_config_against_spec_or_exit"
        )
        # mock input is a record followed by some state messages
        mocked_input: List[AirbyteMessage] = [
            _wrapped(_record("s1", {"k1": "v1"})), *expected_write_result
        ]
        mocked_stdin_string = "\n".join(
            [record.json(exclude_unset=True) for record in mocked_input])
        mocked_stdin_string += "\n add this non-serializable string to verify the destination does not break on malformed input"
        mocked_stdin = io.TextIOWrapper(
            io.BytesIO(bytes(mocked_stdin_string, "utf-8")))

        monkeypatch.setattr("sys.stdin", mocked_stdin)

        returned_write_result = list(destination.run_cmd(parsed_args))
        # verify method call with the correct params
        # Affirm to Mypy that call_count is indeed a method on this mock
        destination.write.assert_called_once()  # type: ignore
        # Affirm to Mypy that call_count is indeed a method on this mock
        destination.write.assert_called_with(  # type: ignore
            config=dummy_config,
            configured_catalog=dummy_catalog,
            # Stdin is internally consumed as a generator so we use a custom matcher
            # that iterates over two iterables to check equality
            input_messages=OrderedIterableMatcher(mocked_input),
        )
        # Check if config validation has been called
        validate_mock.assert_called_with(dummy_config, spec_msg,
                                         destination.logger)

        # verify output was correct
        assert expected_write_result == returned_write_result