コード例 #1
0
ファイル: fixtures.py プロジェクト: vitodb/decisionengine
from pytest_postgresql import factories

DE_DB_HOST = '127.0.0.1'
DE_DB_USER = '******'
DE_DB_PASS = None
DE_DB_NAME = 'decisionengine'
DE_SCHEMA = [
    os.path.dirname(os.path.abspath(__file__)) + "/../postgresql.sql",
]

# DE_DB_PORT assigned at random
PG_PROG = factories.postgresql_proc(user=DE_DB_USER,
                                    password=DE_DB_PASS,
                                    host=DE_DB_HOST,
                                    port=None)
DE_DB = factories.postgresql('PG_PROG', db_name=DE_DB_NAME, load=DE_SCHEMA)


@pytest.fixture
def mock_data_block():
    '''
    This fixture replaces the standard datablock implementation.

    The current DataBlock implementation does not own any data
    products but forwards them immediately to a backend datasource.
    The only implemented datasource requires Postgres, which is
    overkill when needing to test simple data-product communication
    between modules.

    This mock datablock class directly owns the data products, thus
    avoiding the need for a datasource backend.  It is anticipated
コード例 #2
0
ファイル: conftest.py プロジェクト: Stranger6667/xdump
)
SELECT * FROM employees_cte
"""

ALL = {"postgres", "sqlite"}
DATABASE = os.environ["DB"]
IS_POSTGRES = DATABASE == "postgres"
IS_SQLITE = DATABASE == "sqlite"

# Travis has only PostgreSQL 9.6
if IS_POSTGRES and "TRAVIS" in os.environ:
    from pytest_postgresql import factories

    postgresql_proc = factories.postgresql_proc(
        executable="/usr/lib/postgresql/9.6/bin/pg_ctl")
    postgresql = factories.postgresql("postgresql_proc")


def is_search_path_fixed(connection):
    """Check if a security issue with `search_path` is fixed in the current PG version.

    Ref: CVE-2018-1058.
    """
    version = connection.server_version
    return (version >= 100003 or 90608 <= version < 100000
            or 90512 <= version < 90600 or 90417 <= version < 90500
            or 90322 <= version < 90400)


@pytest.fixture
def dbname(tmpdir):
コード例 #3
0
ファイル: conftest.py プロジェクト: adixsyukri/jslcrud
import os
import pytest
import shutil
import mirakuru
from elasticsearch import Elasticsearch
from pytest_postgresql import factories

pgsql_proc = factories.postgresql_proc(executable='/usr/bin/pg_ctl',
                                       host='localhost',
                                       port=45678,
                                       user='******')
pgsql_db = factories.postgresql('pgsql_proc', db='jslcrud_tests')


@pytest.fixture(scope='session')
def es_proc(request):
    port = 9085
    home_dir = '/tmp/elasticsearch_%s' % port
    os.environ['ES_HOME'] = home_dir
    command = [
        os.environ['ELASTICSEARCH_EXECUTABLE'], '-p',
        '/tmp/elasticsearch.%s.pid' % port, '-E',
        'http.port=%s' % port, '-E',
        'default.path.logs=/tmp/elasticsearch_%s_logs' % port, '-E',
        'cluster.name=elasticsearch_cluster_%s' % port, '-E',
        "network.publish_host='127.0.0.1'", '-E', 'index.store.type=mmapfs'
    ]
    es_proc = mirakuru.HTTPExecutor(command,
                                    shell=True,
                                    url='http://127.0.0.1:%s' % port)
    es_proc.start()
コード例 #4
0
                    f.write(chunk)
        # extract the tarball
        t = tarfile.open(tarball, "r")
        t.extractall(path=cache_dir)
        # server_root should be created by extraction
        assert os.path.isdir(server_root)

    return server_root


_php_ini = os.path.join(os.path.dirname(__file__), "../../misc/php.ini")
mw_nginx_proc = factories.nginx_php_proc(
    "mw_server_root", php_fpm_params="--php-ini {}".format(_php_ini))

# direct connection to MediaWiki's database
mwpg_conn = postgresql("postgresql_proc", db_name=_mw_db_name)


class MediaWikiFixtureInstance:
    def __init__(self, mw_nginx_proc, postgresql_proc):
        self._mw_nginx_proc = mw_nginx_proc
        self._postgresql_proc = postgresql_proc

        # trivial aliases, usable also in tests
        self.hostname = mw_nginx_proc.host
        self.port = mw_nginx_proc.port

        # always write the config to reflect its possible updates
        self._init_local_settings()

        # init the database and users
コード例 #5
0
    # linux
    return find_pg_ctl(f'/usr/lib/postgresql/{ver}/')


TEST_SQL_DIR = os.path.dirname(os.path.abspath(__file__)) + '/test_sql/'

# pylint:disable=invalid-name
postgresql95 = factories.postgresql_proc(create_version(ver='9.5'), port=None)
postgresql96 = factories.postgresql_proc(create_version(ver='9.6'), port=None)
postgresql10 = factories.postgresql_proc(create_version(ver='10'), port=None)
postgresql11 = factories.postgresql_proc(create_version(ver='11'), port=None)
postgresql12 = factories.postgresql_proc(create_version(ver='12'), port=None)
postgresql13 = factories.postgresql_proc(create_version(ver='13'), port=None)

postgresql_proc2 = factories.postgresql_proc(port=9876)
postgres10 = factories.postgresql('postgresql10')
postgresql2 = factories.postgresql('postgresql_proc2', db_name='test-db')
postgresql_load_1 = factories.postgresql('postgresql_proc2',
                                         db_name='test-db',
                                         load=[
                                             TEST_SQL_DIR + 'test.sql',
                                         ])
postgresql_load_2 = factories.postgresql(
    'postgresql_proc2',
    db_name='test-db',
    load=[TEST_SQL_DIR + 'test.sql', TEST_SQL_DIR + 'test2.sql'])

postgresql_rand_proc = factories.postgresql_proc(port=None)
postgresql_rand = factories.postgresql('postgresql_rand_proc')

postgresql_max_conns_proc = factories.postgresql_proc(postgres_options='-N 11')
コード例 #6
0
def test_postgresql_proc(request, postgres):
    """Test different postgresql versions."""
    postgresql_proc = request.getfixturevalue(postgres)
    assert postgresql_proc.running() is True


def test_main_postgres(postgresql):
    """Check main postgresql fixture."""
    cur = postgresql.cursor()
    cur.execute(query)
    postgresql.commit()
    cur.close()


postgresql_proc2 = factories.postgresql_proc(port=9876)
postgresql2 = factories.postgresql('postgresql_proc2')


def test_two_postgreses(postgresql, postgresql2):
    """Check two postgresql fixtures on one test."""
    cur = postgresql.cursor()
    cur.execute(query)
    postgresql.commit()
    cur.close()

    cur = postgresql2.cursor()
    cur.execute(query)
    postgresql2.commit()
    cur.close()

コード例 #7
0
"""Template database tests."""
import pytest

from pytest_postgresql.factories import postgresql, postgresql_proc
from pytest_postgresql.compat import connection
from tests.loader import load_database

postgresql_proc_with_template = postgresql_proc(
    port=21987,
    dbname="stories_templated",
    load=[load_database],
)

postgresql_template = postgresql(
    "postgresql_proc_with_template",
    dbname="stories_templated",
)


@pytest.mark.parametrize("_", range(5))
def test_template_database(postgresql_template: connection, _: int) -> None:
    """Check that the database struture gets recreated out of a template."""
    with postgresql_template.cursor() as cur:
        cur.execute("SELECT * FROM stories")
        res = cur.fetchall()
        assert len(res) == 4
        cur.execute("TRUNCATE stories")
        cur.execute("SELECT * FROM stories")
        res = cur.fetchall()
        assert len(res) == 0
コード例 #8
0
"""Tests main conftest file."""
import os

from pytest_postgresql import factories

pytest_plugins = ["pytester"]
POSTGRESQL_VERSION = os.environ.get("POSTGRES", "13")


TEST_SQL_DIR = os.path.dirname(os.path.abspath(__file__)) + "/test_sql/"

postgresql_proc2 = factories.postgresql_proc(port=None)
postgresql2 = factories.postgresql("postgresql_proc2", dbname="test-db")
postgresql_load_1 = factories.postgresql(
    "postgresql_proc2",
    dbname="test-load-db",
    load=[
        TEST_SQL_DIR + "test.sql",
    ],
)
postgresql_load_2 = factories.postgresql(
    "postgresql_proc2",
    dbname="test-load-moredb",
    load=[TEST_SQL_DIR + "test.sql", TEST_SQL_DIR + "test2.sql"],
)
コード例 #9
0
from swh.scheduler import get_scheduler
from swh.scheduler.backend import SchedulerBackend

# celery tasks for testing purpose; tasks themselves should be
# in swh/scheduler/tests/tasks.py
TASK_NAMES = ["ping", "multiping", "add", "error", "echo"]

scheduler_postgresql_proc = factories.postgresql_proc(load=[
    partial(
        initialize_database_for_module,
        modname="scheduler",
        version=SchedulerBackend.current_version,
    )
], )

postgresql_scheduler = factories.postgresql("scheduler_postgresql_proc")


@pytest.fixture
def swh_scheduler_config(request, postgresql_scheduler):
    return {
        "db": postgresql_scheduler.dsn,
    }


@pytest.fixture
def swh_scheduler(swh_scheduler_config):
    scheduler = get_scheduler("postgresql", **swh_scheduler_config)
    for taskname in TASK_NAMES:
        scheduler.create_task_type({
            "type":
コード例 #10
0
ファイル: conftest.py プロジェクト: PHMark/ports-automation
            "owner": "test",
            "start_date": datetime.datetime(2020, 1, 1)
        },
        schedule_interval="@daily",
    )


@pytest.fixture(autouse=True)
def ports_collection():
    client = mongomock.MongoClient()
    collection = client['unece_test']['ports']
    collection.insert_one({
        'countryName': 'Philippines',
        'portName': 'Aleran/Ozamis',
        'unlocode': 'PH ALE',
        'coordinates': '4234N 00135E'
    })
    return collection


postgresql_proc = postgresql_noproc(
    host='postgresqldb_test',
    port=5432,
    user=os.environ.get('POSTGRES_USER'),
    password=os.environ.get('POSTGRES_PASSWORD'))

postgresql = postgresql(
    'postgresql_proc',
    load=[os.path.join(COMMON_BASE_DIR, 'db', 'postgres-init.sql')],
    db_name='unece_test')
コード例 #11
0
import subprocess

import pytest
import pytest_postgresql
from pytest_postgresql import factories

from rdbms_subsetter.subsetter import Db

try:
    subprocess.check_output('command -v pg_ctl', shell=True)
    PG_CTL_MISSING = False  # sorry for the double-negative, but it's convenient later
except subprocess.CalledProcessError:
    PG_CTL_MISSING = True

postgresql_dest_proc = factories.postgresql_proc()
postgresql_dest = factories.postgresql('postgresql_dest_proc')


class DummyArgs(object):
    logarithmic = False
    fraction = 0.25
    force_rows = {}
    children = 25
    config = {}
    tables = []
    schema = []
    exclude_tables = []
    full_tables = []
    buffer = 1000

コード例 #12
0
import pandas as pd
from pytest_postgresql import factories
from postgres_pandas import db_utils
import pytest


pginst = factories.postgresql_proc('/usr/local/bin/pg_ctl')
dbinst = factories.postgresql('pginst', 'test_db')


@pytest.fixture
def dsn(pginst):
    return {'host': pginst.host,
            'port': pginst.port,
            'user': pginst.user}


@pytest.fixture(autouse=True)
def use_test_db(monkeypatch, dsn):

    def mock_config():
        return {**dsn, 'dbname': 'test_db'}

    monkeypatch.setattr(db_utils, 'get_db_config', mock_config)


@pytest.fixture
def test_data():
    df = pd.DataFrame({'plant_name': ['p1', 'p2', 'p3'],
                       'ph_6_code': ['0815', '0815', '0815'],
                       'date': ['2019-01-01', '2019-01-01', '2019-01-01'],
コード例 #13
0
        ),
        possible_exception=psycopg.OperationalError,
    )

    with pytest.raises(psycopg.OperationalError):
        psycopg.connect(
            dbname=postgres_with_password.user,
            user=postgres_with_password.user,
            password="******",
            host=postgres_with_password.host,
            port=postgres_with_password.port,
        )


postgresql_max_conns_proc = postgresql_proc(postgres_options="-N 42")
postgres_max_conns = postgresql("postgresql_max_conns_proc")


def test_postgres_options(postgres_max_conns: connection) -> None:
    """Check that max connections (-N 42) is honored."""
    cur = postgres_max_conns.cursor()
    cur.execute("SHOW max_connections")
    assert cur.fetchone() == ("42", )


postgres_isolation_level = postgresql(
    "postgresql_proc", isolation_level=psycopg.IsolationLevel.SERIALIZABLE)


def test_custom_isolation_level(postgres_isolation_level: connection) -> None:
    """Check that a client fixture with a custom isolation level works."""
コード例 #14
0
ファイル: fixtures.py プロジェクト: shreyb/decisionengine
from pytest_postgresql import factories

DE_DB_HOST = "127.0.0.1"
DE_DB_USER = "******"
DE_DB_PASS = None
DE_DB_NAME = "decisionengine"
DE_SCHEMA = [
    os.path.dirname(os.path.abspath(__file__)) + "/../postgresql.sql",
]

# DE_DB_PORT assigned at random
PG_PROG = factories.postgresql_proc(user=DE_DB_USER,
                                    password=DE_DB_PASS,
                                    host=DE_DB_HOST,
                                    port=None)
DE_DB = factories.postgresql("PG_PROG", dbname=DE_DB_NAME, load=DE_SCHEMA)


@pytest.fixture
def mock_data_block():
    """
    This fixture replaces the standard datablock implementation.

    The current DataBlock implementation does not own any data
    products but forwards them immediately to a backend datasource.
    The only implemented datasource requires Postgres, which is
    overkill when needing to test simple data-product communication
    between modules.

    This mock datablock class directly owns the data products, thus
    avoiding the need for a datasource backend.  It is anticipated
コード例 #15
0
import pytest
from pytest_postgresql import factories

from pgantomizer.anonymize import (InvalidAnonymizationSchemaError,
                                   MissingAnonymizationRuleError,
                                   load_anonymize_remove,
                                   load_db_to_new_instance)
from pgantomizer.dump import dump_db
from pgantomizer.dump import main as dump_main
from pgantomizer.anonymize import main as anonymize_main

from .asserts import assert_db_anonymized, assert_db_empty

anonymized_proc = factories.postgresql_proc(port='8765', logsdir='/tmp')
anonymized = factories.postgresql('anonymized_proc')

DUMP_PATH = 'test_dump.sql'
SCHEMA_PATH = 'example_schema.yaml'
ORIGINAL_DB_ARGS = {
    'password': '',
    'dbname': 'tests',
    'user': '******',
    'host': '127.0.0.1',
    'port': '9876'
}
ANONYMIZED_DB_ARGS = {**ORIGINAL_DB_ARGS, **{'port': '8765'}}
DUMP_DB_ARGS = [
    ORIGINAL_DB_ARGS[arg] for arg in ('dbname', 'user', 'host', 'port')
]
コード例 #16
0
ファイル: postgresql.py プロジェクト: lahwaacz/wiki-scripts
#! /usr/bin/env python3

import pytest
from pytest_postgresql import factories
import sqlalchemy

pg_executable = "/usr/bin/pg_ctl"
db_name = "wiki_scripts"

# postgresql process fixture
postgresql_proc = factories.postgresql_proc(logs_prefix="pytest-", executable=pg_executable)
# fixture holding an instance of a psycopg2 connection
postgresql = factories.postgresql("postgresql_proc", db=db_name)

@pytest.fixture(scope="function")
def pg_engine(postgresql):
    return sqlalchemy.create_engine("postgresql+psycopg2://", poolclass=sqlalchemy.pool.StaticPool, creator=lambda: postgresql)

__all__ = ("postgresql_proc", "postgresql", "pg_engine")
コード例 #17
0
from pytest_rabbitmq.factories import rabbitmq_proc
import pytest
import pika
import os
import pytest
import shutil
import mirakuru
from elasticsearch import Elasticsearch
from pytest_postgresql import factories
import time

pgsql_proc = factories.postgresql_proc(executable='/usr/bin/pg_ctl',
                                       host='localhost',
                                       port=45678,
                                       user='******')
pgsql_db = factories.postgresql('pgsql_proc', db='morp_tests')

rabbitmq_pika_proc = rabbitmq_proc(
    server='/usr/lib/rabbitmq/bin/rabbitmq-server', port=34567)


@pytest.fixture
def pika_connection_channel(rabbitmq_pika_proc):
    connection = pika.BlockingConnection(
        pika.ConnectionParameters(host='localhost',
                                  port=34567,
                                  credentials=pika.PlainCredentials(
                                      'guest', 'guest')))
    channel = connection.channel()
    return [connection, channel]
コード例 #18
0
    parser.addoption('--postgresql-options',
                     action='store',
                     dest='postgresql_options',
                     help=_help_options)

    parser.addoption('--postgresql-startparams',
                     action='store',
                     dest='postgresql_startparams',
                     help=_help_startparams)

    parser.addoption('--postgresql-logsprefix',
                     action='store',
                     dest='postgresql_logsprefix',
                     help=_help_logsprefix)

    parser.addoption('--postgresql-unixsocketdir',
                     action='store',
                     dest='postgresql_unixsocketdir',
                     help=_help_unixsocketdir)

    parser.addoption('--postgresql-dbname',
                     action='store',
                     dest='postgresql_dbname',
                     help=_help_dbname)


postgresql_proc = factories.postgresql_proc()
postgresql_nooproc = factories.postgresql_noproc()
postgresql = factories.postgresql('postgresql_proc')
コード例 #19
0
ファイル: test_pg_types.py プロジェクト: 18F/rdbms-subsetter
import subprocess

import pytest
import pytest_postgresql
from pytest_postgresql import factories

from rdbms_subsetter.subsetter import Db

try:
    subprocess.check_output('command -v pg_ctl', shell=True)
    PG_CTL_MISSING = False  # sorry for the double-negative, but it's convenient later
except subprocess.CalledProcessError:
    PG_CTL_MISSING = True

postgresql_dest_proc = factories.postgresql_proc()
postgresql_dest = factories.postgresql('postgresql_dest_proc')


class DummyArgs(object):
    logarithmic = False
    fraction = 0.25
    force_rows = {}
    children = 25
    config = {}
    tables = []
    schema = []
    exclude_tables = []
    full_tables = []
    buffer = 1000

コード例 #20
0
    Initializes contracts to be used with the `app` fixture
    """
    return init_contracts(app.config)


@pytest.fixture
def celery_app_contracts(celery_app):  # pylint: disable=redefined-outer-name
    """
    Initializes contracts to be used with the `celery_app` fixture
    """
    # Pull singletons off of the default first task
    task = celery_app.celery.tasks["update_discovery_provider"]
    return init_contracts(task.shared_config)


postgresql_my = factories.postgresql("postgresql_nooproc")


# Returns Postgres DB session, and configures
# SQLAlchemy to use said connection.
# This fixture is primarily used by the
# `postgres_mock_db` fixture, and probably shouldn't
# be consumed directly by a test.
#
# More or less follows steps here:
# https://medium.com/@geoffreykoh/fun-with-fixtures-for-database-applications-8253eaf1a6d
# pylint: disable=W0621
@pytest.fixture(scope="function")
def setup_database(postgresql_my):
    def dbcreator():
        return postgresql_my.cursor().connection
コード例 #21
0
from swh.core.db.pytest_plugin import initialize_database_for_module
from swh.storage import get_storage
from swh.storage.postgresql.storage import Storage as StorageDatastore
from swh.storage.tests.storage_data import StorageData

environ["LC_ALL"] = "C.UTF-8"

swh_storage_postgresql_proc = factories.postgresql_proc(load=[
    partial(
        initialize_database_for_module,
        modname="storage",
        version=StorageDatastore.current_version,
    )
], )

swh_storage_postgresql = factories.postgresql("swh_storage_postgresql_proc", )


@pytest.fixture
def swh_storage_backend_config(swh_storage_postgresql):
    """Basic pg storage configuration with no journal collaborator
    (to avoid pulling optional dependency on clients of this fixture)

    """
    yield {
        "cls": "postgresql",
        "db": swh_storage_postgresql.dsn,
        "objstorage": {
            "cls": "memory"
        },
        "check_config": {
コード例 #22
0
    "SQLALCHEMY_PG_WITH_SCHEMA",
    "SQLALCHEMY_TEMPFILE_SQLITE",
    "datasource",
    "mock_data_block",
]

# DE_DB_PORT assigned at random
PG_PROG = factories.postgresql_proc(
    user="******",
    password=None,
    host="127.0.0.1",
    port=None,
    postgres_options="-N 1000",
)
PG_DE_DB_WITHOUT_SCHEMA = factories.postgresql(
    "PG_PROG",
    dbname="decisionengine",
)

if sys.version_info.major == 3 and sys.version_info.minor > 6 and platform.python_implementation(
) == "CPython":
    # sqlite on EL7 is too old for efficient testing
    # sqlite on pypy is unnecessary and really slow
    DATABASES_TO_TEST = ("SQLALCHEMY_PG_WITH_SCHEMA",
                         "SQLALCHEMY_TEMPFILE_SQLITE")
else:
    DATABASES_TO_TEST = ("SQLALCHEMY_PG_WITH_SCHEMA", )


@pytest.fixture()
def SQLALCHEMY_PG_WITH_SCHEMA(PG_DE_DB_WITHOUT_SCHEMA):
    """
コード例 #23
0
#! /usr/bin/env python3

import pytest
from pytest_postgresql import factories
import sqlalchemy

pg_executable = "/usr/bin/pg_ctl"
db_name = "wiki_scripts"

# postgresql process fixture
postgresql_proc = factories.postgresql_proc(logs_prefix="pytest-", executable=pg_executable)
# fixture holding an instance of a psycopg2 connection
postgresql = factories.postgresql("postgresql_proc", db_name=db_name)

@pytest.fixture(scope="function")
def pg_engine(postgresql):
    return sqlalchemy.create_engine("postgresql+psycopg2://", poolclass=sqlalchemy.pool.StaticPool, creator=lambda: postgresql)

__all__ = ("postgresql_proc", "postgresql", "pg_engine")
コード例 #24
0
import pytest
from pytest_postgresql import factories as psql_factories
from pytest_redis import factories as redis_factories
from sanauth.core import sanauth

postgresql_my_proc = psql_factories.postgresql_proc()
postgresql_my = psql_factories.postgresql('postgresql_my_proc')
redis_my_proc = redis_factories.redis_proc()
redis_my = redis_factories.redisdb('redis_my_proc')


@pytest.fixture
def db_settings(request):
    return dict(
        user='******',
        password='',
        host='127.0.0.1',
        port=5433,
    ), dict(address=('127.0.0.1', 6380), minsize=1, maxsize=10)


@pytest.fixture
def app(db_settings, postgresql_my_proc, postgresql_my, redis_my_proc,
        redis_my):
    pg_settings, redis_config = db_settings
    sanauth_app = sanauth(pg_cfg=pg_settings, r_cfg=redis_config)
    yield sanauth_app


@pytest.fixture
def app_fixture(loop, app, test_server):
コード例 #25
0
ファイル: fixtures.py プロジェクト: goodenou/decisionengine
    "PG_DE_DB_WITHOUT_SCHEMA",
    "SQLALCHEMY_PG_WITH_SCHEMA",
    "SQLALCHEMY_IN_MEMORY_SQLITE",
    "datasource",
    "mock_data_block",
]

# DE_DB_PORT assigned at random
PG_PROG = factories.postgresql_proc(user="******",
                                    password=None,
                                    host="127.0.0.1",
                                    port=None)
PG_DE_DB_WITH_SCHEMA = factories.postgresql(
    "PG_PROG",
    dbname="decisionengine",
    load=[
        os.path.dirname(os.path.abspath(__file__)) + "/../postgresql.sql",
    ],
)
PG_DE_DB_WITHOUT_SCHEMA = factories.postgresql(
    "PG_PROG",
    dbname="decisionengine",
)

DATABASES_TO_TEST = ("PG_DE_DB_WITH_SCHEMA", )


@pytest.fixture
def SQLALCHEMY_PG_WITH_SCHEMA(request):
    """
    Get a blank database from pytest_postgresql.
コード例 #26
0
ファイル: conftest.py プロジェクト: morpframework/morpcc
# Copyright (c) 2019 Mohd Izhar Firdaus Bin Ismail
#
# This software is released under the MIT License.
# https://opensource.org/licenses/MIT

from pytest_postgresql import factories

pgsql_proc = factories.postgresql_proc(
    executable="/usr/bin/pg_ctl",
    host="localhost",
    port=45678,
    user="******",
)
pgsql_db = factories.postgresql("pgsql_proc", db_name="morpcc_tests")
pgsql_db_warehouse = factories.postgresql("pgsql_proc",
                                          db_name="morpcc_warehouse")
pgsql_db_cache = factories.postgresql("pgsql_proc", db_name="morpcc_cache")
コード例 #27
0
    track_factory_contract = web3.eth.contract(
        address=track_factory_address, abi=abi_values["TrackFactory"]["abi"])

    return {
        "abi_values": abi_values,
        "registry_address": registry_address,
        "user_factory_address": user_factory_address,
        "user_factory_contract": user_factory_contract,
        "track_factory_address": track_factory_address,
        "track_factory_contract": track_factory_contract,
        "web3": web3,
    }


postgresql_my = factories.postgresql('postgresql_nooproc')


# Returns Postgres DB session, and configures
# SQLAlchemy to use said connection.
# This fixture is primarily used by the
# `postgres_mock_db` fixture, and probably shouldn't
# be consumed directly by a test.
#
# More or less follows steps here:
# https://medium.com/@geoffreykoh/fun-with-fixtures-for-database-applications-8253eaf1a6d
# pylint: disable=W0621
@pytest.fixture(scope='function')
def setup_database(postgresql_my):
    def dbcreator():
        return postgresql_my.cursor().connection
コード例 #28
0
"""Tests main conftest file."""
from pytest_postgresql import factories

PG_CTL = '/usr/lib/postgresql/{ver}/bin/pg_ctl'

# pylint:disable=invalid-name
postgresql92 = factories.postgresql_proc(PG_CTL.format(ver='9.2'), port=None)
postgresql93 = factories.postgresql_proc(PG_CTL.format(ver='9.3'), port=None)
postgresql94 = factories.postgresql_proc(PG_CTL.format(ver='9.4'), port=None)
postgresql95 = factories.postgresql_proc(PG_CTL.format(ver='9.5'), port=None)
postgresql96 = factories.postgresql_proc(PG_CTL.format(ver='9.6'), port=None)
postgresql10 = factories.postgresql_proc(PG_CTL.format(ver='10'), port=None)
postgresql11 = factories.postgresql_proc(PG_CTL.format(ver='11'), port=None)

postgresql_proc2 = factories.postgresql_proc(port=9876)
postgresql2 = factories.postgresql('postgresql_proc2', db_name='test-db')

postgresql_rand_proc = factories.postgresql_proc(port=None)
postgresql_rand = factories.postgresql('postgresql_rand_proc')
# pylint:enable=invalid-name
コード例 #29
0
import pytest
import datetime
from django.urls import reverse
from django.conf import settings
from pytest_postgresql import factories
postgresql_external = factories.postgresql('postgresql_nooproc')

<<<<<<< HEAD
=======


# @pytest.fixture(scope='session')
>>>>>>> ft-testing-backend
@pytest.fixture
def api_client():
    from rest_framework.test import APIClient
    return APIClient()

@pytest.mark.django_db
def test_part_list(api_client):
    url = reverse('parturation_list')
    res = api_client.get(url)
    assert res.status_code == 200
コード例 #30
0
    def connection(self):
        """Simple connection instead of TMDB's pool."""
        return self._connection

    def add_test_unit(self, source, target, slang=None, tlang=None):
        lang_config = tmdb.lang_to_config('en')
        po = pofile()
        u = po.addsourceunit(source)
        u.target = target
        self.add_store(po, slang or 'en', tlang or 'af')
        # avoid cached language lists:
        self._available_langs = {}


pg_server = factories.postgresql_proc(port=None, logsdir='/tmp')
pg_connection = factories.postgresql('pg_server', db_name='amagama_test')


@pytest.fixture
def amagama(pg_connection):
    """Returns an amagama app already connected to a database."""
    app = amagama_server_factory()
    app.testing = True
    app.tmdb = TempTMDB(connection=pg_connection, app=app)
    app.tmdb.init_db(['en'])
    from flask_caching import Cache
    cache = Cache(app,
                  config={
                      'CACHE_TYPE': 'simple',
                      'CACHE_THRESHOLD': 100000,
                  })
コード例 #31
0
import os
import pytest
import pickle
from google.cloud import bigquery
from pytest_postgresql import factories

from coinblas.bitcoin import Chain, Block, Tx, Address
from pygraphblas import *

postgresql_my_proc = factories.postgresql_noproc(host="db",
                                                 user="******",
                                                 password="******")

postgresql = factories.postgresql("postgresql_my_proc",
                                  db_name="test",
                                  load=["/docker-entrypoint-initdb.d/01.sql"])

import pytest


@pytest.fixture
def btc(postgresql, mocker, datadir, tmp_path):
    q = mocker.patch("google.cloud.bigquery.Client.query", autospec=True)
    q.return_value = pickle.load(
        open(datadir / "initialize_blocks.pickle", "rb"))
    c = Chain(
        "host=db dbname=test user=postgres password=postgres",
        tmp_path / "blocks",
        pool_size=1,
    )
    c.initialize_blocks()
コード例 #32
0
import json
import os
import shutil
from pathlib import Path

import dodoo_init.cache as cache
import pytest
from psycopg2.extensions import make_dsn
from pytest_postgresql import factories

postgres = factories.postgresql_proc()
pg_conn_main = factories.postgresql("postgres")
pg_conn_test = factories.postgresql("postgres", db_name="test")


@pytest.fixture(autouse=True, scope="package")
def environ(tmp_path_factory) -> None:
    secrets = tmp_path_factory.mktemp("secrets")
    admin_passwd = secrets / "admin"
    admin_passwd.write_text("admin-pwd")
    admin_passwd.chmod(0o500)
    session_encryption_key = secrets / "sessionencryptionkey"
    session_encryption_key.write_text("secret-key")
    session_encryption_key.chmod(0o500)
    smtpuser = secrets / "smtpuser"
    smtpuser.write_text("smtp-user")
    smtpuser.chmod(0o500)
    smtppwd = secrets / "smtppwd"
    smtppwd.write_text("smtp-pwd")
    smtppwd.chmod(0o500)
    os.environ.update(ODOOADMINPASSWORD_FILE=str(admin_passwd))
コード例 #33
0
ファイル: mediawiki.py プロジェクト: lahwaacz/wiki-scripts
                for chunk in r.iter_content(chunk_size=4096):
                    f.write(chunk)
        # extract the tarball
        t = tarfile.open(tarball, "r")
        t.extractall(path=cache_dir)
        # server_root should be created by extraction
        assert os.path.isdir(server_root)

    return server_root

_php_ini = os.path.join(os.path.dirname(__file__), "../../misc/php.ini")
mw_nginx_proc = factories.nginx_php_proc("mw_server_root",
                                         php_fpm_params="--php-ini {}".format(_php_ini))

# direct connection to MediaWiki's database
mwpg_conn = postgresql("postgresql_proc", db=_mw_db_name)

class MediaWikiFixtureInstance:
    def __init__(self, mw_nginx_proc, postgresql_proc):
        self._mw_nginx_proc = mw_nginx_proc
        self._postgresql_proc = postgresql_proc

        # trivial aliases, usable also in tests
        self.hostname=mw_nginx_proc.host
        self.port=mw_nginx_proc.port

        # always write the config to reflect its possible updates
        self._init_local_settings()

        # init the database and users
        self._init_mw_database()