示例#1
0
def test_audit_cleanup_disabled_export(
    app: LinOTPApp,
    runner: FlaskCliRunner,
    freezer: FrozenDateTimeFactory,
    export_dir: Path,
    setup_audit_table: None,
):
    freezer.move_to("2020-01-01 09:50:00")
    formated_time = datetime.now().strftime(
        app.config["BACKUP_FILE_TIME_FORMAT"])

    runner.invoke(
        cli_main,
        [
            "-vv",
            "audit",
            "cleanup",
            "--max",
            "10",
            "--min",
            "10",
            "--no-export",
            "--exportdir",
            str(export_dir),
        ],
    )

    deleted = AUDIT_AMOUNT_ENTRIES - 10

    filename = f"SQLAuditExport.{formated_time}.{deleted}.csv"
    export_file_backup_dir = Path(app.config["BACKUP_DIR"]) / filename

    assert not export_file_backup_dir.is_file()
    assert len(list(export_dir.iterdir())) == 0
示例#2
0
def test_audit_cleanup_parameters(
    app: LinOTPApp,
    runner: FlaskCliRunner,
    setup_audit_table: None,
    freezer: FrozenDateTimeFactory,
    options: List,
    deleted: int,
    remaining: int,
    cleaned: bool,
):
    """Run audit cleanup with different max and min values"""

    freezer.move_to("2020-01-01 09:50:00")
    formated_time = datetime.now().strftime(
        app.config["BACKUP_FILE_TIME_FORMAT"])

    result = runner.invoke(cli_main, ["-vv", "audit", "cleanup"] + options)

    assert result.exit_code == 0

    filename = f"SQLAuditExport.{formated_time}.{deleted}.csv"
    export_file = Path(app.config["BACKUP_DIR"]) / filename
    if cleaned:
        num_lines = sum(1 for _ in export_file.open())
        # expected: Number of deleted lines + header row
        assert num_lines == deleted + 1
        assert f"{remaining} entries left in database" in result.stderr
        assert f"Exported into {export_file}" in result.stderr
    else:
        assert not export_file.is_file()
        assert f"{remaining} entries in database" in result.stderr
        assert "Exported" not in result.stderr

    assert db.session.query(AuditTable).count() == remaining
示例#3
0
    async def test_channel_record(
        self,
        client: ClientSession,
        snapshot: SnapshotAssertion,
        factories: Factories,
        freezer: FrozenDateTimeFactory,
    ):
        freezer.move_to(datetime(2020, 1, 1))
        user1 = factories.user.create(xid=101, name="user1")
        user2 = factories.user.create(xid=102, name="user2")
        guild = factories.guild.create(xid=201, name="guild")
        channel = factories.channel.create(xid=301, name="channel", guild=guild)
        game1 = factories.game.create(
            id=1,
            seats=2,
            status=GameStatus.STARTED.value,
            format=GameFormat.MODERN.value,
            guild=guild,
            channel=channel,
            message_xid=901,
            created_at=datetime.utcnow(),
            updated_at=datetime.utcnow(),
        )
        game2 = factories.game.create(
            id=2,
            seats=2,
            status=GameStatus.STARTED.value,
            format=GameFormat.STANDARD.value,
            guild=guild,
            channel=channel,
            message_xid=902,
            created_at=datetime.utcnow(),
            updated_at=datetime.utcnow(),
        )
        game3 = factories.game.create(
            id=3,
            seats=2,
            status=GameStatus.STARTED.value,
            format=GameFormat.LEGACY.value,
            guild=guild,
            channel=channel,
            message_xid=903,
            created_at=datetime.utcnow(),
            updated_at=datetime.utcnow(),
        )
        factories.play.create(game_id=game1.id, user_xid=user1.xid, points=3)
        factories.play.create(game_id=game1.id, user_xid=user2.xid, points=1)
        factories.play.create(game_id=game2.id, user_xid=user1.xid, points=0)
        factories.play.create(game_id=game2.id, user_xid=user2.xid, points=5)
        factories.play.create(game_id=game3.id, user_xid=user1.xid, points=0)
        factories.play.create(game_id=game3.id, user_xid=user2.xid, points=10)

        resp = await client.get(f"/g/{guild.xid}/c/{channel.xid}")
        assert resp.status == 200
        text = await resp.text()
        assert text == snapshot
示例#4
0
async def test_full_flow_yaml_creds(
    hass: HomeAssistant,
    mock_code_flow: Mock,
    mock_exchange: Mock,
    component_setup: ComponentSetup,
    freezer: FrozenDateTimeFactory,
) -> None:
    """Test successful creds setup."""
    assert await component_setup()

    result = await hass.config_entries.flow.async_init(
        DOMAIN, context={"source": config_entries.SOURCE_USER})
    assert result.get("type") == "progress"
    assert result.get("step_id") == "auth"
    assert "description_placeholders" in result
    assert "url" in result["description_placeholders"]

    with patch("homeassistant.components.google.async_setup_entry",
               return_value=True) as mock_setup:
        # Run one tick to invoke the credential exchange check
        freezer.tick(CODE_CHECK_ALARM_TIMEDELTA)
        await fire_alarm(hass, datetime.datetime.utcnow())
        await hass.async_block_till_done()
        result = await hass.config_entries.flow.async_configure(
            flow_id=result["flow_id"])

    assert result.get("type") == "create_entry"
    assert result.get("title") == EMAIL_ADDRESS
    assert "data" in result
    data = result["data"]
    assert "token" in data
    assert (data["token"]["expires_in"] == 60 * 60 -
            CODE_CHECK_ALARM_TIMEDELTA.total_seconds())
    assert data["token"]["expires_at"] == 1654273199.0
    data["token"].pop("expires_at")
    data["token"].pop("expires_in")
    assert data == {
        "auth_implementation": "device_auth",
        "token": {
            "access_token": "ACCESS_TOKEN",
            "refresh_token": "REFRESH_TOKEN",
            "scope": "https://www.googleapis.com/auth/calendar",
            "token_type": "Bearer",
        },
    }
    assert result.get("options") == {"calendar_access": "read_write"}

    assert len(mock_setup.mock_calls) == 1
    entries = hass.config_entries.async_entries(DOMAIN)
    assert len(entries) == 1
def test_parse_extracts_due(faker: Faker,
                            freezer: FrozenDateTimeFactory) -> None:
    """Test parsing of due dates."""
    description = faker.sentence()
    freezer.move_to("2017-05-20")
    due = "1d"
    task_arguments = [
        description,
        f"due:{due}",
    ]

    result = _parse_changes(task_arguments)

    assert result.task_attributes == {
        "description": description,
        "due": datetime(2017, 5, 21),
    }
示例#6
0
async def test_coordinator_next_departuredate(freezer: FrozenDateTimeFactory) -> None:
    """Test the Trafikverket Ferry next_departuredate calculation."""
    freezer.move_to("2022-05-15")
    today = date.today()
    day_list = ["wed", "thu", "fri", "sat"]
    test = next_departuredate(day_list)
    assert test == today + timedelta(days=3)
    day_list = WEEKDAYS
    test = next_departuredate(day_list)
    assert test == today + timedelta(days=0)
    day_list = ["sun"]
    test = next_departuredate(day_list)
    assert test == today + timedelta(days=0)
    freezer.move_to("2022-05-16")
    today = date.today()
    day_list = ["wed", "thu", "fri", "sat", "sun"]
    test = next_departuredate(day_list)
    assert test == today + timedelta(days=2)
def test_orchestrator_dummy_model_handles_disabled_database(
    freezer: FrozenDateTimeFactory, monkeypatch: MonkeyPatch
) -> None:
    monkeypatch.setattr(orchestrator_module, "forecast_model", dummy_forecast_model)

    freezer.move_to("2010-12-31-15-00")
    orchestrator_result = setup_orchestrator_result(use_real_database=True)

    orchestrator_result.orchestrator._internal_database._is_disabled = True

    freezer.move_to("2015-12-31-15-00")
    orchestrator_result.runtime_config.model_configs = [ModelConfigAccountTestDummy]
    orchestrator_result.orchestrator.run()
    orchestrator_result.assert_results([ModelConfigAccountTestDummy])

    orchestrator_result.orchestrator._internal_database._is_disabled = False

    forecast_run = get_forecast_run(orchestrator_result.orchestrator)
    assert forecast_run is None
def test_orchestrator_dummy_model_updates_database_status_on_keyboard_interrupt(
    freezer: FrozenDateTimeFactory, with_cleaned_data_in_database: OrchestratorResult
) -> None:
    with_cleaned_data_in_database.orchestrator._import_cleaned_data = lambda: None  # type: ignore
    with_cleaned_data_in_database.runtime_config.engine_run_type = (
        EngineRunType.development
    )  # Allow incomplete cleaning run in test

    freezer.move_to("2015-12-31-15-00")
    with_cleaned_data_in_database.runtime_config.model_configs = [ModelConfigAccountTestDummy]

    with patch("forecasting_platform.services.orchestrator.execute_models", side_effect=KeyboardInterrupt):
        with pytest.raises(KeyboardInterrupt):
            with_cleaned_data_in_database.orchestrator.run()

    forecast_run = get_forecast_run(with_cleaned_data_in_database.orchestrator)
    assert forecast_run
    assert forecast_run.status == ForecastRunStatus.CANCELLED
    assert forecast_run.start == datetime(2010, 12, 31, 15, 0)
    assert forecast_run.end == datetime(2015, 12, 31, 15, 0)
def test_orchestrator_dummy_model_updates_database_status(
    freezer: FrozenDateTimeFactory, with_cleaned_data_in_database: OrchestratorResult, monkeypatch: MonkeyPatch
) -> None:
    monkeypatch.setattr(orchestrator_module, "forecast_model", dummy_forecast_model)

    with_cleaned_data_in_database.orchestrator._import_cleaned_data = lambda: None  # type: ignore
    with_cleaned_data_in_database.runtime_config.engine_run_type = (
        EngineRunType.development
    )  # Allow incomplete cleaning run in test

    freezer.move_to("2015-12-31-15-00")

    with_cleaned_data_in_database.runtime_config.model_configs = [ModelConfigAccountTestDummy]
    with_cleaned_data_in_database.orchestrator.run()
    with_cleaned_data_in_database.assert_results([ModelConfigAccountTestDummy])

    forecast_run = get_forecast_run(with_cleaned_data_in_database.orchestrator)
    assert forecast_run
    assert forecast_run.status == ForecastRunStatus.COMPLETED
    assert forecast_run.start == datetime(2010, 12, 31, 15, 0)
    assert forecast_run.end == datetime(2015, 12, 31, 15, 0)
def test_orchestrator_dummy_model_handles_disabled_db_on_keyboard_interrupt(
    freezer: FrozenDateTimeFactory, caplog: LogCaptureFixture
) -> None:
    freezer.move_to("2010-12-31-15-00")
    orchestrator_result = setup_orchestrator_result(use_real_database=True)

    caplog.set_level(logging.INFO)
    orchestrator_result.orchestrator._internal_database._is_disabled = True

    freezer.move_to("2015-12-31-15-00")
    orchestrator_result.runtime_config.model_configs = [ModelConfigAccountTestDummy]

    with patch("forecasting_platform.services.orchestrator.execute_models", side_effect=KeyboardInterrupt):
        with pytest.raises(KeyboardInterrupt):
            orchestrator_result.orchestrator.run()

    orchestrator_result.orchestrator._internal_database._is_disabled = False

    forecast_run = get_forecast_run(orchestrator_result.orchestrator)
    assert forecast_run is None

    assert "Skip cancelling forecast model run in database because of disabled internal database" in caplog.messages
示例#11
0
def test_audit_cleanup_custom_export_dir(
    app: LinOTPApp,
    runner: FlaskCliRunner,
    freezer: FrozenDateTimeFactory,
    export_dir: Path,
    setup_audit_table: None,
):
    freezer.move_to("2020-01-01 09:50:00")
    formated_time = datetime.now().strftime(
        app.config["BACKUP_FILE_TIME_FORMAT"])

    runner.invoke(
        cli_main,
        [
            "-vvv",
            "audit",
            "cleanup",
            "--max",
            "10",
            "--min",
            "10",
            "--exportdir",
            str(export_dir),
        ],
    )

    deleted = AUDIT_AMOUNT_ENTRIES - 10

    filename = f"SQLAuditExport.{formated_time}.{deleted}.csv"
    export_file_backup_dir = Path(app.config["BACKUP_DIR"]) / filename
    export_file_export_dir = export_dir / filename

    assert not export_file_backup_dir.is_file()
    assert export_file_export_dir.is_file()

    num_lines = sum(1 for _ in export_file_export_dir.open())
    # expected: Number of deleted lines + header row
    assert num_lines == deleted + 1
示例#12
0
    def freeze(time_to_freeze_str, tz_offset=0):
        if isinstance(time_to_freeze_str, datetime.datetime):
            time_to_freeze = time_to_freeze_str
        elif isinstance(time_to_freeze_str, datetime.date):
            time_to_freeze = datetime.datetime.combine(time_to_freeze_str,
                                                       datetime.time())
        else:
            time_to_freeze = dateutil.parser.parse(time_to_freeze_str)

        time_to_freeze = convert_to_timezone_naive(time_to_freeze)
        time_to_freeze = FrozenDateTimeFactory(time_to_freeze)

        FakeDate.dates_to_freeze.append(time_to_freeze)
        FakeDate.tz_offsets.append(tz_offset)

        FakeDatetime.times_to_freeze.append(time_to_freeze)
        FakeDatetime.tz_offsets.append(tz_offset)

        mocker.patch('datetime.date', FakeDate)
        mocker.patch('datetime.datetime', FakeDatetime)

        request.addfinalizer(unfreeze)
示例#13
0
async def test_button(
    hass: HomeAssistant,
    load_int: ConfigEntry,
    monkeypatch: MonkeyPatch,
    get_data: SensiboData,
    freezer: FrozenDateTimeFactory,
) -> None:
    """Test the Sensibo button."""

    state_button = hass.states.get("button.hallway_reset_filter")
    state_filter_clean = hass.states.get("binary_sensor.hallway_filter_clean_required")
    state_filter_last_reset = hass.states.get("sensor.hallway_filter_last_reset")

    assert state_button.state is STATE_UNKNOWN
    assert state_filter_clean.state is STATE_ON
    assert state_filter_last_reset.state == "2022-03-12T15:24:26+00:00"

    freezer.move_to(datetime(2022, 6, 19, 20, 0, 0))

    with patch(
        "homeassistant.components.sensibo.util.SensiboClient.async_get_devices_data",
        return_value=get_data,
    ), patch(
        "homeassistant.components.sensibo.util.SensiboClient.async_reset_filter",
        return_value={"status": "success"},
    ):
        await hass.services.async_call(
            BUTTON_DOMAIN,
            SERVICE_PRESS,
            {
                ATTR_ENTITY_ID: state_button.entity_id,
            },
            blocking=True,
        )
    await hass.async_block_till_done()

    monkeypatch.setattr(get_data.parsed["ABC999111"], "filter_clean", False)
    monkeypatch.setattr(
        get_data.parsed["ABC999111"],
        "filter_last_reset",
        datetime(2022, 6, 19, 20, 0, 0, tzinfo=dt.UTC),
    )

    with patch(
        "homeassistant.components.sensibo.coordinator.SensiboClient.async_get_devices_data",
        return_value=get_data,
    ):
        async_fire_time_changed(
            hass,
            dt.utcnow() + timedelta(minutes=5),
        )
        await hass.async_block_till_done()

    state_button = hass.states.get("button.hallway_reset_filter")
    state_filter_clean = hass.states.get("binary_sensor.hallway_filter_clean_required")
    state_filter_last_reset = hass.states.get("sensor.hallway_filter_last_reset")
    assert (
        state_button.state == datetime(2022, 6, 19, 20, 0, 0, tzinfo=dt.UTC).isoformat()
    )
    assert state_filter_clean.state is STATE_OFF
    assert state_filter_last_reset.state == "2022-06-19T20:00:00+00:00"
示例#14
0
    async def test_user_record_with_cookies(
        self,
        client: ClientSession,
        snapshot: SnapshotAssertion,
        factories: Factories,
        freezer: FrozenDateTimeFactory,
    ):
        freezer.move_to(datetime(2020, 1, 1))
        user1 = factories.user.create(xid=101, name="user:1")
        user2 = factories.user.create(xid=102, name="user@2")
        guild = factories.guild.create(xid=201, name="guild")
        channel = factories.channel.create(xid=301, name="channel", guild=guild)
        game1 = factories.game.create(
            id=1,
            seats=2,
            status=GameStatus.STARTED.value,
            format=GameFormat.MODERN.value,
            guild=guild,
            channel=channel,
            created_at=datetime.utcnow(),
            updated_at=datetime.utcnow(),
            message_xid=901,
        )
        game2 = factories.game.create(
            id=2,
            seats=2,
            status=GameStatus.STARTED.value,
            format=GameFormat.STANDARD.value,
            guild=guild,
            channel=channel,
            created_at=datetime.utcnow(),
            updated_at=datetime.utcnow(),
            message_xid=902,
        )
        game3 = factories.game.create(
            id=3,
            seats=2,
            status=GameStatus.STARTED.value,
            format=GameFormat.LEGACY.value,
            guild=guild,
            channel=channel,
            created_at=datetime.utcnow(),
            updated_at=datetime.utcnow(),
            message_xid=903,
        )
        factories.play.create(game_id=game1.id, user_xid=user1.xid, points=3)
        factories.play.create(game_id=game1.id, user_xid=user2.xid, points=1)
        factories.play.create(game_id=game2.id, user_xid=user1.xid, points=None)
        factories.play.create(game_id=game2.id, user_xid=user2.xid, points=5)
        factories.play.create(game_id=game3.id, user_xid=user1.xid, points=None)
        factories.play.create(game_id=game3.id, user_xid=user2.xid, points=10)

        resp = await client.get(
            f"/g/{guild.xid}/u/{user1.xid}",
            cookies={
                "timezone_offset": "480",
                "timezone_name": "America/Los_Angeles",
            },
        )
        assert resp.status == 200
        text = await resp.text()
        assert text == snapshot