Exemple #1
0
def test_alert_t2(logger, first_pass_config):
    """
    With live_history disabled, T2s run on alert history only
    """
    template = ZTFLegacyChannelTemplate(
        **{
            "channel": "EXAMPLE_TNS_MSIP",
            "contact": "*****@*****.**",
            "version": 0,
            "active": True,
            "auto_complete": False,
            "template": "ztf_uw_public",
            "t0_filter": {
                "unit": "BasicMultiFilter",
                "config": {
                    "filters": []
                }
            },
            "t2_compute": {
                "unit": "DemoLightCurveT2Unit",
            },
            "live_history": False,
        })
    process = template.get_processes(logger=logger,
                                     first_pass_config=first_pass_config)[0]
    assert process["tier"] == 0
    directive = IngestDirective(
        **process["processor"]["config"]["directives"][0])
    assert directive.ingest.mux is None
    assert len(directive.ingest.combine) == 1
    assert len(units := directive.ingest.combine[0].state_t2) == 2
    assert {u.unit
            for u in units} == {"DemoLightCurveT2Unit", "T2LightCurveSummary"}
Exemple #2
0
def test_alert_only(logger, first_pass_config, unit_loader: UnitLoader):
    template = ZTFLegacyChannelTemplate(
        **{
            "channel": "EXAMPLE_TNS_MSIP",
            "version": 0,
            "contact": "*****@*****.**",
            "active": True,
            "auto_complete": False,
            "template": "ztf_uw_public",
            "t0_filter": {
                "unit": "BasicMultiFilter",
                "config": {
                    "filters": []
                }
            },
        })
    process = template.get_processes(logger=logger,
                                     first_pass_config=first_pass_config)[0]
    assert process["tier"] == 0
    with unit_loader.validate_unit_models():
        directive = IngestDirective(
            **process["processor"]["config"]["directives"][0])
    assert isinstance(directive.filter, FilterModel)
    assert isinstance(directive.ingest.mux, MuxModel)
    assert directive.ingest.mux.combine
    assert len(directive.ingest.mux.combine) == 1
    assert (isinstance((combine := directive.ingest.mux.combine[0]), T1Combine)
            and isinstance(
                (units := combine.state_t2), list) and len(units) == 1)
    assert units[0].unit == "T2LightCurveSummary"
    assert directive.ingest.combine is None

    with unit_loader.validate_unit_models():
        process["processor"]["config"]["process_name"] = "foo"
        ProcessModel(**(process | {"version": 0}))
Exemple #3
0
def test_integration(patch_mongo, dev_context, mock_get_photopoints, alerts):
    directive = {
        "channel": "EXAMPLE_TNS_MSIP",
        "ingest": {
            "combine": [
                {"unit": "ZiT1Combiner", "state_t2": [{"unit": "DemoLightCurveT2Unit"}]}
            ],
            "mux": {
                "unit": "ZiArchiveMuxer",
                "config": {"history_days": 30},
                "combine": [
                    {
                        "unit": "ZiT1Combiner",
                        "state_t2": [{"unit": "DemoLightCurveT2Unit"}],
                    }
                ],
            },
        },
    }

    handler = get_handler(dev_context, [IngestDirective(**directive)])

    t0 = dev_context.db.get_collection("t0")
    t1 = dev_context.db.get_collection("t1")
    t2 = dev_context.db.get_collection("t2")
    assert t0.count_documents({}) == 0

    alert_list = list(alerts())

    handler.ingest(
        alert_list[1].datapoints, stock_id=alert_list[1].stock, filter_results=[(0, True)]
    )
    handler.updates_buffer.push_updates()

    assert ZiArchiveMuxer.get_photopoints.called_once()

    # note lack of handler.updates_buffer.push_updates() here;
    # ZiAlertContentIngester has to be synchronous to deal with superseded
    # photopoints
    assert t0.count_documents({}) == len(alert_list[1].datapoints) + len(
        alert_list[0].datapoints
    ), "datapoints ingested for archival alert"

    assert t1.count_documents({}) == 2, "two compounds produced"
    assert t2.count_documents({}) == 2, "two t2 docs produced"

    assert t2.find_one(
        {"link": t1.find_one({"dps": {"$size": len(alert_list[1].datapoints)}})["link"]}
    )
    assert t2.find_one(
        {
            "link": t1.find_one(
                {"dps": {"$size": len(alert_list[1].datapoints) + len(alert_list[0].datapoints)}}
            )["link"]
        }
    )
Exemple #4
0
def ingestion_handler_with_mongomuxer(mock_context):
    directive = {
        "channel": "EXAMPLE_TNS_MSIP",
        "ingest": {
            "mux": {
                "unit": "ZiMongoMuxer",
                "combine": [
                    {
                        "unit": "ZiT1Combiner",
                    }
                ],
            },
        },
    }

    return get_handler(mock_context, [IngestDirective(**directive)])
Exemple #5
0
def single_source_directive(dev_context: DevAmpelContext,
                            dummy_units) -> IngestDirective:

    return IngestDirective(
        channel="TEST_CHANNEL",
        ingest=IngestBody(
            stock_t2=[T2Compute(unit="DummyStockT2Unit")],
            point_t2=[T2Compute(unit="DummyPointT2Unit")],
            combine=[
                T1Combine(
                    unit="T1SimpleCombiner",
                    state_t2=[T2Compute(unit="DummyStateT2Unit")],
                )
            ],
        ),
    )
Exemple #6
0
def test_superseded_candidates_concurrent(mock_context, superseded_alerts, ordering):
    directive = {
        "channel": "EXAMPLE_TNS_MSIP",
        "ingest": {
            "mux": {
                "unit": "ZiMongoMuxer",
                "combine": [
                    {
                        "unit": "ZiT1Combiner",
                    }
                ],
            },
        },
    }

    alerts = list(reversed(list(superseded_alerts())))
    assert alerts[0].datapoints[0]["jd"] == alerts[1].datapoints[0]["jd"]
    candids = [alert.datapoints[0]["candid"] for alert in alerts]

    ingesters = [
        get_handler(mock_context, [IngestDirective(**directive)], i)
        for i in range(len(alerts))
    ]

    assert len(alerts) == 3

    def _ingest(indexes: list[int]):
        for i in indexes:
            next(iter(ingesters[i]._mux_cache.values())).index = i
            ingesters[i].ingest(
                alerts[i].datapoints, filter_results=[(0, True)], stock_id=alerts[i].stock
            )
            ingesters[i].updates_buffer.push_updates()

    # simulate real-world race conditions by running an entire ingestion immediately after
    # one ingester retrieves existing datapoints, but before it pushes any updates
    #
    # this creates sequences like the following:
    # 0 begin 1391345455815015017
    # 2 begin 1391345455815015019
    # 1 begin 1391345455815015018
    # 1 end  1391345455815015018
    # 2 end  1391345455815015019
    # 0 end  1391345455815015017
    def ingest(indexes: list[int], interleave=True):
        if interleave and len(indexes) > 1:
            with before_after.after(
                "ampel.ztf.ingest.ZiMongoMuxer.ZiMongoMuxer._get_dps",
                lambda *args: ingest(indexes[1:], interleave),
            ):
                _ingest(indexes[:1])
        else:
            _ingest(indexes)

    ingest(ordering)

    t0 = mock_context.db.get_collection("t0")

    def assert_superseded(old, new):
        doc = t0.find_one({"id": old})
        meta = doc.get("meta", [])
        assert (
            "SUPERSEDED" in doc["tag"]
            and len(
                [
                    m
                    for m in doc.get("meta", [])
                    if m.get("tag") == "SUPERSEDED"
                    and m.get("extra", {}).get("newId") == new
                ]
            )
            == 1
        ), f"candid {old} superseded by {new}"

    assert_superseded(candids[0], candids[1])
    assert_superseded(candids[0], candids[2])
    assert_superseded(candids[1], candids[2])
    assert (
        "SUPERSEDED" not in t0.find_one({"id": candids[2]})["tag"]
    ), f"candid {candids[2]} not superseded"