예제 #1
0
    def setUpClass(cls):
        cls.Session = sessionmaker()
        cls.engine = create_engine(
            'postgresql+psycopg2://ostf:ostf@localhost/ostf'
        )

        cls.Session.configure(bind=cls.engine, autocommit=True)
        session = cls.Session()
        discovery(path=TEST_PATH, session=session)

        cls.ext_id = 'fuel_plugin.tests.functional.dummy_tests.'
        cls.expected = {
            'cluster': {
                'id': 1,
                'deployment_tags': set(['ha', 'rhel', 'nova_network'])
            },
            'test_sets': ['general_test',
                          'stopped_test', 'ha_deployment_test'],
            'tests': [cls.ext_id + test for test in [
                ('deployment_types_tests.ha_deployment_test.'
                 'HATest.test_ha_depl'),
                ('deployment_types_tests.ha_deployment_test.'
                 'HATest.test_ha_rhel_depl'),
                'general_test.Dummy_test.test_fast_pass',
                'general_test.Dummy_test.test_long_pass',
                'general_test.Dummy_test.test_fast_fail',
                'general_test.Dummy_test.test_fast_error',
                'general_test.Dummy_test.test_fail_with_step',
                'stopped_test.dummy_tests_stopped.test_really_long',
                'stopped_test.dummy_tests_stopped.test_one_no_so_long',
                'stopped_test.dummy_tests_stopped.test_not_long_at_all'
            ]]
        }
def main():

    cli_args = cli_config.parse_cli()

    config = {
        "server": {"host": cli_args.host, "port": cli_args.port},
        "dbpath": cli_args.dbpath,
        "debug": cli_args.debug,
        "debug_tests": cli_args.debug_tests,
    }

    logger.setup(log_file=cli_args.log_file)

    log = logging.getLogger(__name__)

    root = app.setup_app(config=config)

    if getattr(cli_args, "after_init_hook"):
        return nailgun_hooks.after_initialization_environment_hook()
    nose_discovery.discovery(cli_args.debug_tests)
    host, port = pecan.conf.server.host, pecan.conf.server.port
    srv = pywsgi.WSGIServer((host, int(port)), root)

    log.info("Starting server in PID %s", os.getpid())
    log.info("serving on http://%s:%s", host, port)

    try:
        signal.signal(signal.SIGCHLD, signal.SIG_IGN)
        srv.serve_forever()
    except KeyboardInterrupt:
        pass
예제 #3
0
    def setUp(self):
        # orm session wrapping
        config.init_config([])
        self.connection = self.engine.connect()
        self.trans = self.connection.begin()

        self.Session.configure(
            bind=self.connection
        )
        self.session = self.Session()

        test_sets = self.session.query(models.TestSet).all()

        # need this if start unit tests in conjuction with integration
        if not test_sets:
            discovery(path=TEST_PATH, session=self.session)

        mixins.cache_test_repository(self.session)

        # mocking
        # request mocking
        self.request_mock = MagicMock()

        self.request_patcher = patch(
            'fuel_plugin.ostf_adapter.wsgi.controllers.request',
            self.request_mock
        )
        self.request_patcher.start()

        # engine.get_session mocking
        self.request_mock.session = self.session
    def test_get_proper_description(self):
        expected = {
            'title': 'fake empty test',
            'name': ('fuel_plugin.tests.functional.'
                     'dummy_tests.deployment_types_tests.'
                     'ha_deployment_test.HATest.test_ha_rhel_depl'),
            'duration': '0sec',
            'test_set_id': 'ha_deployment_test',
            'cluster_id': self.fixtures['ha_deployment_test']['cluster_id'],
            'deployment_tags': ['ha', 'rhel']

        }

        nose_discovery.discovery(
            path='fuel_plugin.tests.functional.dummy_tests.deployment_types_tests.ha_deployment_test',
            deployment_info=self.fixtures['ha_deployment_test']
        )

        test = self.session.query(models.Test)\
            .filter_by(name=expected['name'])\
            .filter_by(cluster_id=expected['cluster_id'])\
            .filter_by(test_set_id=expected['test_set_id'])\
            .one()

        self.assertTrue(
            all(
                [
                    expected[key] == getattr(test, key)
                    for key in expected.keys()
                ]
            )
        )
    def test_discovery_tests(self):
        expected = {
            'test_set_id': 'ha_deployment_test',
            'cluster_id': 1,
            'results_count': 2,
            'results_data': {
                'names': [
                    'fuel_plugin.tests.functional.dummy_tests.deployment_types_tests.ha_deployment_test.HATest.test_ha_rhel_depl',
                    'fuel_plugin.tests.functional.dummy_tests.deployment_types_tests.ha_deployment_test.HATest.test_ha_depl'
                ]
            }
        }
        nose_discovery.discovery(
            path='fuel_plugin.tests.functional.dummy_tests.deployment_types_tests.ha_deployment_test',
            deployment_info=self.fixtures['ha_deployment_test']
        )

        tests = self.session.query(models.Test)\
            .filter_by(test_set_id=expected['test_set_id'])\
            .filter_by(cluster_id=expected['cluster_id'])\
            .all()

        self.assertTrue(len(tests) == expected['results_count'])

        for test in tests:
            self.assertTrue(test.name in expected['results_data']['names'])
            self.assertTrue(
                set(test.deployment_tags)
                .issubset(self.fixtures['ha_deployment_test']['deployment_tags'])
            )
예제 #6
0
    def test_discovery(self, engine):
        engine.get_session().merge.side_effect = \
            lambda *args, **kwargs: self.fixtures_iter.next()

        nose_discovery.discovery(
            path='fuel_plugin/tests/functional/dummy_tests')

        self.assertEqual(engine.get_session().merge.call_count, 2)
    def test_get_proper_description(self, engine):
        '''
        Checks whether retrived docsctrings from tests
        are correct (in this occasion -- full).

        Magic that is used here is based on using
        data that is stored deeply in passed to test
        method mock object.
        '''
        #etalon data is list of docstrings of tests
        #of particular test set
        expected = {
            'title': 'fast pass test',
            'name':
                'fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_fast_pass',
            'duration': '1sec',
            'description':
                '        This is a simple always pass test\n        '
        }

        #mocking behaviour of afterImport hook from DiscoveryPlugin
        #so that another hook -- addSuccess could process data properly
        engine.get_session().merge = lambda arg: arg

        #following code provide mocking logic for
        #addSuccess hook from DiscoveryPlugin that
        #(mentioned logic) in turn allows us to
        #capture data about test object that are processed
        engine.get_session()\
              .query()\
              .filter_by()\
              .update\
              .return_value = None

        nose_discovery.discovery(
            path='fuel_plugin/tests/functional/dummy_tests'
        )

        #now we can refer to captured test objects (not test_sets) in order to
        #make test comparison against etalon
        test_obj_to_compare = [
            call[0][0] for call in engine.get_session().add.call_args_list
            if (
                isinstance(call[0][0], models.Test)
                and
                call[0][0].name.rsplit('.')[-1] == 'test_fast_pass'
            )
        ][0]

        self.assertTrue(
            all(
                [
                    expected[key] == test_obj_to_compare.__dict__[key]
                    for key in expected.keys()
                ]
            )
        )
    def test_discovery(self, engine):
        engine.get_session().merge.side_effect = \
            lambda *args, **kwargs: self.fixtures_iter.next()

        nose_discovery.discovery(
            path='fuel_plugin/tests/functional/dummy_tests'
        )

        self.assertEqual(engine.get_session().merge.call_count, 2)
예제 #9
0
def main():

    settings = Ostf_Config()

    cli_args = cli_config.parse_cli()

    config = {
        'server': {
            'host': settings.adapter.server_host or cli_args.host,
            'port': settings.adapter.server_port or cli_args.port
        },
        'dbpath': settings.adapter.dbpath or cli_args.dbpath,
        'debug': cli_args.debug,
        'debug_tests': cli_args.debug_tests,
        'lock_dir': settings.adapter.lock_dir or cli_args.lock_dir,
        'nailgun': {
            'host': settings.adapter.nailgun_host or cli_args.nailgun_host,
            'port': settings.adapter.nailgun_port or cli_args.nailgun_port
        }
    }
    logger.setup(log_file=(
        settings.adapter.log_file or cli_args.log_file))

    log = logging.getLogger(__name__)

    root = app.setup_app(config=config)

    if settings.adapter.after_init_hook or\
            getattr(cli_args, 'after_init_hook'):
        return nailgun_hooks.after_initialization_environment_hook()

    with engine.contexted_session(pecan.conf.dbpath) as session:
        # performing cleaning of expired data (if any) in db
        mixins.clean_db(session)

        # discover testsets and their tests
        CORE_PATH = pecan.conf.debug_tests if \
            pecan.conf.get('debug_tests') else 'fuel_health'

        nose_discovery.discovery(path=CORE_PATH, session=session)

        # cache needed data from test repository
        mixins.cache_test_repository(session)

    host, port = pecan.conf.server.host, pecan.conf.server.port
    srv = pywsgi.WSGIServer((host, int(port)), root)

    log.info('Starting server in PID %s', os.getpid())
    log.info("serving on http://%s:%s", host, port)

    try:
        signal.signal(signal.SIGCHLD, signal.SIG_IGN)
        srv.serve_forever()
    except KeyboardInterrupt:
        pass
예제 #10
0
    def test_get_proper_description(self, engine):
        '''
        Checks whether retrived docsctrings from tests
        are correct (in this occasion -- full).

        Magic that is used here is based on using
        data that is stored deeply in passed to test
        method mock object.
        '''
        #etalon data is list of docstrings of tests
        #of particular test set
        expected = {
            'title': 'fast pass test',
            'name':
            'fuel_plugin.tests.functional.dummy_tests.general_test.Dummy_test.test_fast_pass',
            'duration': '1sec',
            'description':
            '        This is a simple always pass test\n        '
        }

        #mocking behaviour of afterImport hook from DiscoveryPlugin
        #so that another hook -- addSuccess could process data properly
        engine.get_session().merge = lambda arg: arg

        #following code provide mocking logic for
        #addSuccess hook from DiscoveryPlugin that
        #(mentioned logic) in turn allows us to
        #capture data about test object that are processed
        engine.get_session()\
              .query()\
              .filter_by()\
              .update\
              .return_value = None

        nose_discovery.discovery(
            path='fuel_plugin/tests/functional/dummy_tests')

        #now we can refer to captured test objects (not test_sets) in order to
        #make test comparison against etalon
        test_obj_to_compare = [
            call[0][0] for call in engine.get_session().add.call_args_list
            if (isinstance(call[0][0], models.Test)
                and call[0][0].name.rsplit('.')[-1] == 'test_fast_pass')
        ][0]

        self.assertTrue(
            all([
                expected[key] == test_obj_to_compare.__dict__[key]
                for key in expected.keys()
            ]))
예제 #11
0
def main():

    cli_args = cli_config.parse_cli()

    config = {
        'server': {
            'host': cli_args.host,
            'port': cli_args.port
        },
        'dbpath': cli_args.dbpath,
        'debug': cli_args.debug,
        'debug_tests': cli_args.debug_tests,
        'nailgun': {
            'host': cli_args.nailgun_host,
            'port': cli_args.nailgun_port
        }
    }

    logger.setup(log_file=cli_args.log_file)

    log = logging.getLogger(__name__)

    root = app.setup_app(config=config)

    if getattr(cli_args, 'after_init_hook'):
        return nailgun_hooks.after_initialization_environment_hook()

    #performing cleaning of expired data (if any) in db
    clean_db()

    #discover testsets and their tests
    CORE_PATH = pecan.conf.debug_tests if \
        pecan.conf.get('debug_tests') else 'fuel_health'

    discovery(path=CORE_PATH, session=engine.get_session())

    #cache needed data from test repository
    cache_data()

    host, port = pecan.conf.server.host, pecan.conf.server.port
    srv = pywsgi.WSGIServer((host, int(port)), root)

    log.info('Starting server in PID %s', os.getpid())
    log.info("serving on http://%s:%s", host, port)

    try:
        signal.signal(signal.SIGCHLD, signal.SIG_IGN)
        srv.serve_forever()
    except KeyboardInterrupt:
        pass
예제 #12
0
    def setUpClass(cls):
        session_mock = Mock()
        session_mock.begin = TransactionBeginMock

        nose_discovery.discovery(path=TEST_PATH, session=session_mock)

        cls.test_sets = [
            el[0][0] for el in session_mock.merge.call_args_list
            if isinstance(el[0][0], models.TestSet)
        ]

        cls.tests = [
            el[0][0] for el in session_mock.merge.call_args_list
            if isinstance(el[0][0], models.Test)
        ]
예제 #13
0
def main():

    ostf_config.init_config(sys.argv[1:])

    logger.setup(log_file=CONF.adapter.log_file)

    log = logging.getLogger(__name__)
    log.info('Start app configuration')

    root = app.setup_app({})

    # completely clean db (drop tables, constraints and types)
    # plus drop alembic_version table (needed if, for example, head migration
    # script was changed after applying)
    if CONF.clear_db:
        return nailgun_hooks.clear_db(CONF.adapter.dbpath)

    if CONF.after_initialization_environment_hook:
        return nailgun_hooks.after_initialization_environment_hook()

    with engine.contexted_session(CONF.adapter.dbpath) as session:
        # performing cleaning of expired data (if any) in db
        mixins.delete_db_data(session)
        log.info('Cleaned up database.')
        # discover testsets and their tests
        CORE_PATH = CONF.debug_tests or 'fuel_health'

        log.info('Performing nose discovery with {0}.'.format(CORE_PATH))

        nose_discovery.discovery(path=CORE_PATH, session=session)

        # cache needed data from test repository
        mixins.cache_test_repository(session)

    log.info('Discovery is completed')
    host, port = CONF.adapter.server_host, CONF.adapter.server_port
    srv = pywsgi.WSGIServer((host, port), root)

    log.info('Starting server in PID %s', os.getpid())
    log.info("serving on http://%s:%s", host, port)

    try:
        signal.signal(signal.SIGCHLD, signal.SIG_IGN)
        srv.serve_forever()
    except KeyboardInterrupt:
        pass
예제 #14
0
def main():

    ostf_config.init_config(sys.argv[1:])

    logger.setup(log_file=CONF.adapter.log_file)

    log = logging.getLogger(__name__)
    log.info('Start app configuration')

    root = app.setup_app({})

    # completely clean db (drop tables, constraints and types)
    # plus drop alembic_version table (needed if, for example, head migration
    # script was changed after applying)
    if CONF.clear_db:
        return nailgun_hooks.clear_db(CONF.adapter.dbpath)

    if CONF.after_initialization_environment_hook:
        return nailgun_hooks.after_initialization_environment_hook()

    with engine.contexted_session(CONF.adapter.dbpath) as session:
        # performing cleaning of expired data (if any) in db
        mixins.delete_db_data(session)
        log.info('Cleaned up database.')
        # discover testsets and their tests
        CORE_PATH = CONF.debug_tests or 'fuel_health'

        log.info('Performing nose discovery with {0}.'.format(CORE_PATH))

        nose_discovery.discovery(path=CORE_PATH, session=session)

        # cache needed data from test repository
        mixins.cache_test_repository(session)

    log.info('Discovery is completed')
    host, port = CONF.adapter.server_host, CONF.adapter.server_port
    srv = pywsgi.WSGIServer((host, port), root)

    log.info('Starting server in PID %s', os.getpid())
    log.info("serving on http://%s:%s", host, port)

    try:
        signal.signal(signal.SIGCHLD, signal.SIG_IGN)
        srv.serve_forever()
    except KeyboardInterrupt:
        pass
    def setUpClass(cls):
        session_mock = Mock()
        session_mock.begin = TransactionBeginMock

        nose_discovery.discovery(
            path=TEST_PATH,
            session=session_mock
        )

        cls.test_sets = [
            el[0][0] for el in session_mock.merge.call_args_list
            if isinstance(el[0][0], models.TestSet)
        ]

        cls.tests = [
            el[0][0] for el in session_mock.merge.call_args_list
            if isinstance(el[0][0], models.Test)
        ]
예제 #16
0
def discovery_check(cluster):
    #get needed information from nailgun via series of
    #requests to nailgun api. At this time we need
    #info about deployment type(ha, non-ha), type of network
    #management (nova-network, quntum) and attributes that
    #indicate that savanna/murano is installed
    cluster_deployment_args = _get_cluster_depl_tags(cluster)

    cluster_data = {
        'cluster_id': cluster,
        'deployment_tags': cluster_deployment_args
    }

    session = engine.get_session()
    with session.begin(subtransactions=True):
        test_sets = session.query(models.TestSet)\
            .filter_by(cluster_id=cluster)\
            .all()

        if not test_sets:
            nose_discovery.discovery(
                path=CORE_PATH,
                deployment_info=cluster_data
            )
        else:
            for testset in test_sets:
                deployment_tags = testset.deployment_tags
                deployment_tags = deployment_tags if deployment_tags else []
                if not set(deployment_tags).issubset(
                    cluster_data['deployment_tags']
                ):
                    #perform cascade deletion of testset
                    #and corresponding to it tests and
                    #testruns with their tests too
                    session.query(models.TestSet)\
                        .filter_by(id=testset.id)\
                        .filter_by(cluster_id=testset.cluster_id)\
                        .delete()

            #perform final discovery for tests
            nose_discovery.discovery(
                path=CORE_PATH,
                deployment_info=cluster_data
            )
예제 #17
0
파일: base.py 프로젝트: loles/fuel-ostf
    def setUp(self):
        #orm session wrapping
        self.connection = self.engine.connect()
        self.trans = self.connection.begin()

        self.Session.configure(
            bind=self.connection
        )
        self.session = self.Session(autocommit=True)

        with self.session.begin(subtransactions=True):
            test_sets = self.session.query(models.TestSet).all()

        #need this if start unit tests in conjuction with integration
        if not test_sets:
            discovery(path=TEST_PATH, session=self.session)

        mixins.cache_test_repository(self.session)

        #mocking
        #request mocking
        self.request_mock = MagicMock()

        self.request_patcher = patch(
            'fuel_plugin.ostf_adapter.wsgi.controllers.request',
            self.request_mock
        )
        self.request_patcher.start()

        #pecan conf mocking
        self.pecan_conf_mock = MagicMock()
        self.pecan_conf_mock.nailgun.host = '127.0.0.1'
        self.pecan_conf_mock.nailgun.port = 8888

        self.pecan_conf_patcher = patch(
            'fuel_plugin.ostf_adapter.mixins.conf',
            self.pecan_conf_mock
        )
        self.pecan_conf_patcher.start()

        #engine.get_session mocking
        self.request_mock.session = self.session
    def test_discovery_testsets(self):
        expected = {
            'id': 'ha_deployment_test',
            'cluster_id': 1,
            'deployment_tags': ['ha']
        }

        nose_discovery.discovery(
            path='fuel_plugin.tests.functional.dummy_tests.deployment_types_tests.ha_deployment_test',
            deployment_info=self.fixtures['ha_deployment_test']
        )

        test_set = self.session.query(models.TestSet)\
            .filter_by(id=expected['id'])\
            .filter_by(cluster_id=expected['cluster_id'])\
            .one()

        self.assertEqual(
            test_set.deployment_tags,
            expected['deployment_tags']
        )
    def setUp(self):
        super(TestTestRunsController, self).setUp()

        #test_runs depends on tests and test_sets data
        #in database so we must execute discovery function
        #in setUp in order to provide this data
        depl_info = {
            'cluster_id': 1,
            'deployment_tags': set([
                'ha',
                'rhel'
            ])
        }

        discovery(deployment_info=depl_info, path=TEST_PATH)

        self.testruns = [
            {
                'testset': 'ha_deployment_test',
                'metadata': {'cluster_id': 1}
            }
        ]

        self.controller = controllers.TestrunsController()
 def discovery_mock(**kwargs):
     kwargs['path'] = TEST_PATH
     return discovery(**kwargs)
예제 #21
0
파일: base.py 프로젝트: openstack/fuel-ostf
 def discovery(self):
     """Discover dummy tests used for testsing."""
     mixins.TEST_REPOSITORY = []
     nose_discovery.discovery(path=TEST_PATH, session=self.session)
     mixins.cache_test_repository(self.session)
     self.session.flush()
예제 #22
0
 def discovery(self):
     """Discover dummy tests used for testsing."""
     mixins.TEST_REPOSITORY = []
     nose_discovery.discovery(path=TEST_PATH, session=self.session)
     mixins.cache_test_repository(self.session)
     self.session.flush()