def test_run_bad2():
    class TestWriterTmp(AbstractWriter):
        @staticmethod
        def necessary_config(node_config):
            return set([])

        def run(self, data_object):
            raise Exception("Deliberate error")
            # return data_object, False

    NodeFactory().register("TestWriterTmp", TestWriterTmp)

    config = {
        "implementation_config": {
            "writer_config": {
                "mywriter": {
                    "class": "TestWriterTmp",
                    "destinations": []
                }
            }
        }
    }
    configuration = Configuration(None,
                                  is_dict_config=True,
                                  dict_config=config)

    runner = DagRunner(configuration)

    with pytest.raises(Exception) as e:
        runner.run()
    assert "Deliberate error" in str(e)
def test_run2():
    config = {
        "implementation_config": {
            "reader_config": {
                "csv_reader": {
                    "class": "CsvReader",
                    "filename": "test/minimal.csv",
                    "destinations": [],
                }
            }
        }
    }
    configuration = Configuration(None,
                                  is_dict_config=True,
                                  dict_config=config)
    runner = DagRunner(configuration)
    with LogCapture() as l:
        runner.run(dry_run=True)
    l.check(
        ("root", "INFO", "Taking nodes to run from default"),
        (
            "root",
            "INFO",
            "DRY RUN 0: would run node csv_reader of type reader_config and class CsvReader",
        ),
        ("root", "INFO", "All done. Bye bye!"),
    )
def test_run_bad():
    class TestWriterTmp(AbstractWriter):
        @staticmethod
        def necessary_config(node_config):
            return set([])

        def run(self, data_object):
            return data_object, False

    NodeFactory().register("TestWriterTmp", TestWriterTmp)

    config = {
        "implementation_config": {
            "writer_config": {
                "mywriter": {
                    "class": "TestWriterTmp",
                    "destinations": []
                }
            }
        }
    }
    configuration = Configuration(None,
                                  is_dict_config=True,
                                  dict_config=config)

    runner = DagRunner(configuration)

    # unregister this class
    del NodeFactory().name_dict["TestWriterTmp"]

    with pytest.raises(Exception) as e:
        runner.run()
    assert "Issue instantiating mywriter and class TestWriterTmp" in str(e)
def test_run_notification_error():
    config = {
        "metadata": {
            "section_registry": ["phase1"],
            "notify_on_error": {
                "client": "SlackClient",
                "channel": "some-channel",
                "token": "slack-api-token",
                "member_id": "optional-key",
            },
        },
        "implementation_config": {
            "phase1": {
                "csv_reader": {
                    "class": "CsvReader",
                    "filename": "bad/path.csv"
                }
            }
        },
    }
    configuration = Configuration(None,
                                  is_dict_config=True,
                                  dict_config=config)
    runner = DagRunner(configuration)

    mock_client = mock.Mock()
    mock_client.post_message = mock.Mock()
    mock_get_notification_client = mock.Mock(return_value=mock_client)

    path = "primrose.notification_utils.get_notification_client"
    with mock.patch(path) as mock_get_notification_client:
        with pytest.raises(Exception) as error:
            runner.run()
            assert mock_get_notification_client.post_message.call_count == 1
def test_run4():
    class TestWriter(AbstractFileWriter):
        def __init__(self, configuration, instance_name):
            pass

        @staticmethod
        def necessary_config(node_config):
            return set([])

        def run(self, data_object):
            terminate = True
            return data_object, terminate

    NodeFactory().register("TestWriter", TestWriter)

    config = {
        "implementation_config": {
            "reader_config": {
                "csv_reader": {
                    "class": "CsvReader",
                    "filename": "test/minimal.csv",
                    "destinations": ["csv_writer"],
                }
            },
            "writer_config": {
                "csv_writer": {
                    "class": "TestWriter"
                }
            },
        }
    }

    configuration = Configuration(None,
                                  is_dict_config=True,
                                  dict_config=config)
    runner = DagRunner(configuration)

    with LogCapture() as l:
        runner.run(dry_run=False)
    l.check(
        ("root", "INFO", "Taking nodes to run from default"),
        (
            "root",
            "INFO",
            "received node csv_reader of type reader_config and class CsvReader",
        ),
        ("root", "INFO", "Reading test/minimal.csv from CSV"),
        (
            "root",
            "INFO",
            "received node csv_writer of type writer_config and class TestWriter",
        ),
        ("root", "INFO", "Terminating early due to signal from csv_writer"),
        ("root", "INFO", "All done. Bye bye!"),
    )
def test_run6():
    config = {
        "metadata": {
            "section_registry": ["phase1", "cleanup_config"],
            "notify_on_error": {
                "client": "SlackClient",
                "channel": "some-channel",
                "token": "slack-api-token",
                "member_id": "optional-key",
            },
        },
        "implementation_config": {
            "phase1": {
                "csv_reader": {
                    "class": "CsvReader",
                    "filename": "test/minimal.csv",
                    "destinations": ["notification"],
                }
            },
            "cleanup_config": {
                "notification": {
                    "class": "ClientNotification",
                    "client": "SlackClient",
                    "channel": "some-channel",
                    "token": "slack-api-token",
                    "member_id": "optional-key",
                    "message": "Yay! Sucess",
                }
            },
        },
    }
    configuration = Configuration(None,
                                  is_dict_config=True,
                                  dict_config=config)
    runner = DagRunner(configuration)

    with LogCapture() as l:
        runner.run(dry_run=True)
    l.check(
        ("root", "INFO", "Taking nodes to run from section_registry"),
        (
            "root",
            "INFO",
            "DRY RUN 0: would run node csv_reader of type phase1 and class CsvReader",
        ),
        (
            "root",
            "INFO",
            "DRY RUN 1: would run node notification of type cleanup_config and class ClientNotification",
        ),
        ("root", "INFO", "All done. Bye bye!"),
    )
def test_run():
    config = {
        "implementation_config": {
            "reader_config": {
                "csv_reader": {
                    "class": "CsvReader",
                    "filename": "test/minimal.csv",
                    "destinations": [],
                }
            }
        }
    }
    configuration = Configuration(None,
                                  is_dict_config=True,
                                  dict_config=config)
    runner = DagRunner(configuration)
    runner.run()
def test_run5():
    config = {
        "metadata": {
            "section_registry": ["phase1", "phase2"]
        },
        "implementation_config": {
            "phase1": {
                "csv_reader": {
                    "class": "CsvReader",
                    "filename": "test/minimal.csv",
                    "destinations": ["csv_writer"],
                }
            },
            "phase2": {
                "csv_writer": {
                    "class": "CsvWriter",
                    "key": "test_data",
                    "dir": "cache",
                    "filename": "test/unittest_similar_recipes.csv",
                }
            },
        },
    }
    configuration = Configuration(None,
                                  is_dict_config=True,
                                  dict_config=config)
    runner = DagRunner(configuration)

    with LogCapture() as l:
        runner.run(dry_run=True)
    l.check(
        ("root", "INFO", "Taking nodes to run from section_registry"),
        (
            "root",
            "INFO",
            "DRY RUN 0: would run node csv_reader of type phase1 and class CsvReader",
        ),
        (
            "root",
            "INFO",
            "DRY RUN 1: would run node csv_writer of type phase2 and class CsvWriter",
        ),
        ("root", "INFO", "All done. Bye bye!"),
    )
def test_run_pruned():
    config = {
        "implementation_config": {
            "reader_config": {
                "read_data": {
                    "class": "CsvReader",
                    "filename": "data/tennis.csv",
                    "destinations": ["conditional_node"],
                },
                "conditional_node": {
                    "class": "SimpleSwitch",
                    "path_to_travel": "left",
                    "destinations": ["left", "right"],
                },
            },
            "writer_config": {
                "left": {
                    "class": "LoggingSuccess",
                    "msg": "left node!",
                    "level": "INFO",
                },
                "right": {
                    "class": "LoggingSuccess",
                    "msg": "right node!",
                    "level": "INFO",
                    "destinations": ["right2"],
                },
                "right2": {
                    "class": "LoggingSuccess",
                    "msg": "right node2!",
                    "level": "INFO",
                },
            },
        }
    }
    configuration = Configuration(None,
                                  is_dict_config=True,
                                  dict_config=config)
    runner = DagRunner(configuration)
    with LogCapture() as l:
        runner.run()
    l.check(
        ("root", "INFO", "Taking nodes to run from default"),
        (
            "root",
            "INFO",
            "received node read_data of type reader_config and class CsvReader",
        ),
        ("root", "INFO", "Reading data/tennis.csv from CSV"),
        (
            "root",
            "INFO",
            "received node conditional_node of type reader_config and class SimpleSwitch",
        ),
        ("root", "INFO", "Skipping pruned node right"),
        ("root", "INFO", "Skipping pruned node right2"),
        (
            "root",
            "INFO",
            "received node left of type writer_config and class LoggingSuccess",
        ),
        ("root", "INFO", "left node!"),
        ("root", "INFO", "All done. Bye bye!"),
    )