예제 #1
0
@pytest.fixture()
def mock_package_sql(mocker, datadir):
    """This bypasses the module manipulation to only returns the data test files.

    """
    from swh.core.utils import numfile_sortkey as sortkey

    mock_sql_files = mocker.patch("swh.core.cli.db.get_sql_for_package")
    sql_files = sorted(glob.glob(path.join(datadir, "cli", "*.sql")), key=sortkey)
    mock_sql_files.return_value = sql_files
    return mock_sql_files


# We do not want the truncate behavior for those tests
test_db = postgresql_fact(
    "postgresql_proc", db_name="clidb", no_truncate_tables={"dbversion", "origin"}
)


@pytest.fixture
def swh_db_cli(cli_runner, monkeypatch, test_db):
    """This initializes a cli_runner and sets the correct environment variable expected by
       the cli to run appropriately (when not specifying the --db-name flag)

    """
    db_params = test_db.get_dsn_parameters()
    monkeypatch.setenv("PGHOST", db_params["host"])
    monkeypatch.setenv("PGUSER", db_params["user"])
    monkeypatch.setenv("PGPORT", db_params["port"])

    return cli_runner, db_params
예제 #2
0
from swh.storage import get_storage

from .utils import fill_obj_storage, fill_storage

TASK_NAMES: List[Tuple[str, str]] = [
    # (scheduler-task-type, task-class-test-name)
    ("index-revision-metadata", "revision_intrinsic_metadata"),
    ("index-origin-metadata", "origin_intrinsic_metadata"),
]


SQL_FILES = path.join(path.dirname(swh.indexer.__file__), "sql", "*.sql")


idx_storage_postgresql = postgresql_fact(
    "postgresql_proc", dbname="indexer_storage", dump_files=SQL_FILES,
)


@pytest.fixture
def indexer_scheduler(swh_scheduler):
    # Insert the expected task types within the scheduler
    for task_name, task_class_name in TASK_NAMES:
        swh_scheduler.create_task_type(
            {
                "type": task_name,
                "description": f"The {task_class_name} indexer testing task",
                "backend_name": f"swh.indexer.tests.tasks.{task_class_name}",
                "default_interval": timedelta(days=1),
                "min_interval": timedelta(hours=6),
                "max_interval": timedelta(days=12),
예제 #3
0
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information

import glob
import os

from swh.core.db import BaseDb
from swh.core.db.pytest_plugin import postgresql_fact

SQL_DIR = os.path.join(os.path.dirname(__file__), "data")

# db with special policy for tables dbversion and people
postgres_fun = postgresql_fact(
    "postgresql_proc",
    db_name="fun",
    dump_files=f"{SQL_DIR}/*.sql",
    no_truncate_tables={"dbversion", "people"},
)

postgres_fun2 = postgresql_fact(
    "postgresql_proc",
    db_name="fun2",
    dump_files=sorted(glob.glob(f"{SQL_DIR}/*.sql")),
    no_truncate_tables={"dbversion", "people"},
)


def test_smoke_test_fun_db_is_up(postgres_fun):
    """This ensures the db is created and configured according to its dumps files.

    """
예제 #4
0
STATIC_ROW_IN = tuple(field.in_wrapper(field.example) for field in FIELDS)
EXPECTED_ROW_OUT = tuple(field.example for field in FIELDS)

db_rows = strategies.lists(
    strategies.tuples(*(field.strategy for field in FIELDS)))


def convert_lines(cur):
    return [
        tuple(field.out_converter(x) for x, field in zip(line, FIELDS))
        for line in cur
    ]


test_db = postgresql_fact("postgresql_proc", db_name="test-db2")


@pytest.fixture
def db_with_data(test_db, request):
    """Fixture to initialize a db with some data out of the "INIT_SQL above

    """
    db = BaseDb.connect(test_db.dsn)
    with db.cursor() as cur:
        psycopg2.extras.register_default_jsonb(cur)
        cur.execute(INIT_SQL)
    yield db
    db.conn.rollback()
    db.conn.close()
예제 #5
0
import pkg_resources
import pytest

from swh.core.db.pytest_plugin import postgresql_fact
import swh.scheduler
from swh.scheduler import get_scheduler

SQL_DIR = os.path.join(os.path.dirname(swh.scheduler.__file__), "sql")

# celery tasks for testing purpose; tasks themselves should be
# in swh/scheduler/tests/tasks.py
TASK_NAMES = ["ping", "multiping", "add", "error", "echo"]

postgresql_scheduler = postgresql_fact(
    "postgresql_proc",
    db_name="scheduler",
    dump_files=os.path.join(SQL_DIR, "*.sql"),
    no_truncate_tables={"dbversion", "priority_ratio"},
)


@pytest.fixture
def swh_scheduler_config(request, postgresql_scheduler):
    return {
        "db": postgresql_scheduler.dsn,
    }


@pytest.fixture
def swh_scheduler(swh_scheduler_config):
    scheduler = get_scheduler("local", **swh_scheduler_config)
    for taskname in TASK_NAMES:
예제 #6
0
    dbname="storage",
    load=[
        partial(initialize_database_for_module, "storage",
                StorageDb.current_version)
    ],
)

vault_postgresql_proc = factories.postgresql_proc(
    dbname="vault",
    load=[
        partial(initialize_database_for_module, "vault",
                VaultBackend.current_version)
    ],
)

postgres_vault = postgresql_fact("vault_postgresql_proc")
postgres_storage = postgresql_fact(
    "storage_postgresql_proc",
    no_db_drop=True,  # keep the db for performance reasons
)


@pytest.fixture
def swh_vault_config(postgres_vault, postgres_storage,
                     tmp_path) -> Dict[str, Any]:
    tmp_path = str(tmp_path)
    return {
        "db": postgres_vault.dsn,
        "storage": {
            "cls": "postgresql",
            "db": postgres_storage.dsn,