Beispiel #1
0
def test_for(inp):
    mod,fn = inp.split('.')

    # module
    if not mod in _modules:
        try:
            _modules[mod] = __import__(mod)
        except ImportError:
            return unittest.skip("No such module '%s'" % mod)

    # function
    f = getattr(_modules[mod], fn, None)
    if f is None:
        return unittest.skip("No such method '%s.%s'" % (mod,fn))

    # make sure function is implemented
    if not_implemented(f):
        return unittest.skip("'%s.%s' is not implemented" % (mod,fn))

    # return testcase if everything works 
    def deco(cls):
        module = sys.modules[cls.__module__]
        setattr(module, mod, _modules[mod])
        cls.__testing__ = inp
        return cls
    return deco 
Beispiel #2
0
def test_for(inp):
    mod, fn = inp.split('.')

    # module
    if not mod in _modules:
        try:
            _modules[mod] = __import__(mod)
        except ImportError:
            return unittest.skip("No such module '%s'" % mod)

    # function
    f = getattr(_modules[mod], fn, None)
    if f is None:
        return unittest.skip("No such method '%s.%s'" % (mod, fn))

    # make sure function is implemented
    if not_implemented(f):
        return unittest.skip("'%s.%s' is not implemented" % (mod, fn))

    # return testcase if everything works
    def deco(cls):
        module = sys.modules[cls.__module__]
        setattr(module, mod, _modules[mod])
        cls.__testing__ = inp
        return cls

    return deco
Beispiel #3
0
    def setUpClass(cls):
        try:
            from web.db.me import MongoEngineMiddleware

        except ImportError:
            skip("MongoEngine not available; skipping MongoEngine tests.")

        cls.middleware = MongoEngineMiddleware
Beispiel #4
0
    def setUpClass(cls):
        try:
            from web.db.mongo import MongoMiddleware

        except ImportError:
            skip("PyMongo not available; skipping MongoDB tests.")

        cls.middleware = MongoMiddleware
Beispiel #5
0
 def test_positive_update_my_account_4(self):
     """
     @Feature: My Account - Positive Update
     @Test: Update Language in My Account
     @Steps:
     1. Update current User with all different Language options
     @Assert: Current User is updated
     @Status: Manual
     """
     unittest.skip(NOT_IMPLEMENTED)
Beispiel #6
0
 def test_positive_update_my_account_3(self):
     """
     @Feature: My Account - Positive Update
     @Test: Update Email Address in My Account
     @Steps:
     1. Update current User with all variations of Email Address in [1]
     @Assert: Current User is updated
     @Status: Manual
     """
     unittest.skip(NOT_IMPLEMENTED)
Beispiel #7
0
 def test_positive_update_my_account_5(self):
     """
     @Feature: My Account - Positive Update
     @Test: Update Password/Verify fields in My Account
     @Steps:
     1. Update Password/Verify fields with all variations in [1]
     @Assert: User is updated
     @Status: Manual
     """
     unittest.skip(NOT_IMPLEMENTED)
Beispiel #8
0
 def test_negative_update_my_account_2(self):
     """
     @Feature: My Account - Negative Update
     @Test: Update My Account with invalid Surname
     @Steps:
     1. Update Current user with all variations of Surname in [2]
     @Assert: User is not updated. Appropriate error shown.
     @Status: Manual
     """
     unittest.skip(NOT_IMPLEMENTED)
Beispiel #9
0
 def test_negative_update_my_account_5(self):
     """
     @Feature: My Account - Negative Update
     @Test: Update My Account with non-matching values in Password and
     verify fields
     @Steps:
     1. Update Current user with non-matching values in Password and verify
     fields
     @Assert: User is not updated. Appropriate error shown.
     @Status: Manual
     """
     unittest.skip(NOT_IMPLEMENTED)
Beispiel #10
0
def skip_open_issue(type, bug_id):
    """ Skips the test if there is an open issue for that test.

    @param type: The issue tracker type (e.g., Launchpad, GitHub).
    @param bug_id: ID of the issue for the test.
    """
    if type.lower() == 'launchpad' and LaunchpadTracker.is_bug_open(
            bug_id=bug_id):
        return skip('Launchpad Bug #{0}'.format(bug_id))
    elif type.lower() == 'github' and GitHubTracker.is_bug_open(
            issue_id=bug_id):
        return skip('GitHub Issue #{0}'.format(bug_id))
    return lambda obj: obj
Beispiel #11
0
def skip_open_issue(type, bug_id):
    """ Skips the test if there is an open issue for that test.

    @param type: The issue tracker type (e.g., Launchpad, GitHub).
    @param bug_id: ID of the issue for the test.
    """
    if type.lower() == 'launchpad' and LaunchpadTracker.is_bug_open(
            bug_id=bug_id):
        return skip('Launchpad Bug #{0}'.format(bug_id))
    elif type.lower() == 'github' and GitHubTracker.is_bug_open(
            issue_id=bug_id):
        return skip('GitHub Issue #{0}'.format(bug_id))
    return lambda obj: obj
Beispiel #12
0
 def test_negative_update_my_account_6(self):
     """
     @Feature: My Account - Negative Update
     @Test: [UI ONLY] Attempt to update all info in My Accounts page and
     Cancel
     @Steps:
     1. Update Current user with valid Firstname, Surname, Email Address,
     Language, Password/Verify fields
     2. Click Cancel
     @Assert: User is not updated.
     @Status: Manual
     """
     unittest.skip(NOT_IMPLEMENTED)
Beispiel #13
0
    def test_datetime(self):
        """If DATETIME is set to a tuple, it should be used to override LOCALE
        """
        from datetime import datetime
        from sys import platform
        dt = datetime(2015, 9, 13)
        # make a deep copy of page_kawgs
        page_kwargs = dict([(key, self.page_kwargs[key]) for key in
                            self.page_kwargs])
        for key in page_kwargs:
            if not isinstance(page_kwargs[key], dict):
                break
            page_kwargs[key] = dict([(subkey, page_kwargs[key][subkey])
                                     for subkey in page_kwargs[key]])
        # set its date to dt
        page_kwargs['metadata']['date'] = dt
        page = Page(**page_kwargs)

        self.assertEqual(page.locale_date,
            unicode(dt.strftime(_DEFAULT_CONFIG['DEFAULT_DATE_FORMAT']),
                                'utf-8'))

        page_kwargs['settings'] = dict([(x, _DEFAULT_CONFIG[x]) for x in
                                        _DEFAULT_CONFIG])

        # I doubt this can work on all platforms ...
        if platform == "win32":
            locale = 'jpn'
        else:
            locale = 'ja_JP.utf8'
        page_kwargs['settings']['DATE_FORMATS'] = {'jp': (locale,
                                                          '%Y-%m-%d(%a)')}
        page_kwargs['metadata']['lang'] = 'jp'

        import locale as locale_module
        try:
            page = Page(**page_kwargs)
            self.assertEqual(page.locale_date, u'2015-09-13(\u65e5)')
            # above is unicode in Japanese: 2015-09-13(“ú)
        except locale_module.Error:
            # The constructor of ``Page`` will try to set the locale to
            # ``ja_JP.utf8``. But this attempt will failed when there is no
            # such locale in the system. You can see which locales there are
            # in your system with ``locale -a`` command.
            #
            # Until we find some other method to test this functionality, we
            # will simply skip this test.
            skip("There is no locale %s in this system." % locale)
Beispiel #14
0
def platform_skip(platform_list):
    def _noop(obj):
        return obj

    if platform in platform_list:
        return unittest2.skip("Test disabled in the current platform")
    return _noop
Beispiel #15
0
def all_drivers(testcase):
    """Decorator for test classes so that the tests are run against all drivers.
    """

    module = sys.modules[testcase.__module__]
    drivers = (
        ('Mechanize', MECHANIZE_TESTING, LIB_MECHANIZE),
        ('Requests', REQUESTS_TESTING, LIB_REQUESTS),
        ('Traversal', TRAVERSAL_TESTING, LIB_TRAVERSAL),
        ('TraversalIntegration', TRAVERSAL_INTEGRATION_TESTING, LIB_TRAVERSAL),
    )
    testcase._testbrowser_abstract_testclass = True

    for postfix, layer, constant in drivers:
        name = testcase.__name__ + postfix
        custom = {'layer': layer,
                  '__module__': testcase.__module__,
                  '_testbrowser_abstract_testclass': False}

        subclass = type(name, (testcase,), custom)
        for attrname in dir(subclass):
            method = getattr(subclass, attrname, None)
            func = getattr(method, 'im_func', None)
            if constant in getattr(func, '_testbrowser_skip_driver', {}):
                reason = func._testbrowser_skip_driver[constant]
                setattr(subclass, attrname, skip(reason)(method))

        setattr(module, name, subclass)

    setattr(module, 'load_tests', load_tests)
    return testcase
Beispiel #16
0
    def add_test_methods(test_class):
        ignored = set(glob.iglob(ignore_glob))

        for filename in glob.iglob(os.path.join(basedir, tests_glob)):
            if filename in ignored:
                continue

            validating, _ = os.path.splitext(os.path.basename(filename))

            with open(filename) as test_file:
                data = json.load(test_file)

                for case in data:
                    for test in case["tests"]:
                        a_test = make_case(
                            case["schema"],
                            test["data"],
                            test["valid"],
                        )

                        test_name = "test_%s_%s" % (
                            validating,
                            re.sub(r"[\W ]+", "_", test["description"]),
                        )

                        if not PY3:
                            test_name = test_name.encode("utf-8")
                        a_test.__name__ = test_name

                        if skip is not None and skip(case):
                            a_test = unittest.skip("Checker not present.")(
                                a_test
                            )

                        setattr(test_class, test_name, a_test)
Beispiel #17
0
def platform_skip(platform_list):
    def _noop(obj):
        return obj

    if platform in platform_list:
        return unittest2.skip("Test disabled in the current platform")
    return _noop
Beispiel #18
0
def slow_test(unused=None):
    def _id(obj):
        return obj

    if 'SKIP_SLOW_TESTS' in os.environ:
        return unittest.skip(u'test is slow')
    return _id
Beispiel #19
0
    def add_test_methods(test_class):
        for filename in glob.iglob(os.path.join(basedir, tests_glob)):
            validating, _ = os.path.splitext(os.path.basename(filename))

            with open(filename) as test_file:
                data = json.load(test_file)

                for case in data:
                    for test in case["tests"]:
                        a_test = make_case(
                            case["schema"],
                            test["data"],
                            test["valid"],
                        )

                        test_name = "test_%s_%s" % (
                            validating,
                            re.sub(r"[\W ]+", "_", test["description"]),
                        )

                        if not PY3:
                            test_name = test_name.encode("utf-8")
                        a_test.__name__ = test_name

                        if skip is not None and skip(case):
                            a_test = unittest.skip("Checker not present.")(
                                a_test
                            )

                        setattr(test_class, test_name, a_test)

        return test_class
    def test_old_testresult_class(self):
        class Test(unittest2.TestCase):

            def testFoo(self):
                pass
        Test = unittest2.skip('no reason')(Test)
        self.assertOldResultWarning(Test('testFoo'), 0)
Beispiel #21
0
def slowTest(obj):
	'''Decorator for slow tests

	Tests wrapped with this decorator are ignored when you run
	C{test.py --fast}. You can either wrap whole test classes::

		@tests.slowTest
		class MyTest(tests.TestCase):
			...

	or individual test functions::

		class MyTest(tests.TestCase):

			@tests.slowTest
			def testFoo(self):
				...

			def testBar(self):
				...
	'''
	if FAST_TEST:
		wrapper = skip('Slow test')
		return wrapper(obj)
	else:
		return obj
Beispiel #22
0
def skipIfSingleNode():
    """
    Skip a test if its a single node install.
    """
    if len(get_host_list()[1]) == 0:
        return unittest.skip('requires multiple nodes')
    return lambda o: o
Beispiel #23
0
def create_backend_case(base, name, module="passlib.tests.test_drivers"):
    "create a test case for specific backend of a multi-backend handler"
    #get handler, figure out if backend should be tested
    handler = base.handler
    assert hasattr(handler, "backends"), "handler must support uh.HasManyBackends protocol"
    enable, reason = _enable_backend_case(handler, name)

    #UT1 doesn't support skipping whole test cases,
    #so we just return None.
    if not enable and ut_version < 2:
        return None

    #make classname match what it's stored under, to be tidy
    cname = name.title().replace("_","") + "_" + base.__name__.lstrip("_")

    #create subclass of 'base' which uses correct backend
    subcase = type(
        cname,
        (base,),
        dict(
            case_prefix = "%s (%s backend)" % (handler.name, name),
            backend = name,
            __module__=module,
        )
    )

    if not enable:
        subcase = unittest.skip(reason)(subcase)

    return subcase
Beispiel #24
0
def stubbed(reason=None):
    """Skips test due to non-implentation or some other reason."""

    # Assume 'not implemented' if no reason is given
    if reason is None:
        reason = NOT_IMPLEMENTED
    return unittest2.skip(reason)
Beispiel #25
0
 def decorator(func):
     if hasattr(unittest, 'skip'):
         # If we don't have discovery, we probably don't skip, but we'll
         # try anyways...
         return unittest.skip('Discovery not supported.')(func)
     else:
         return None
Beispiel #26
0
def slowTest(obj):
    '''Decorator for slow tests

	Tests wrapped with this decorator are ignored when you run
	C{test.py --fast}. You can either wrap whole test classes::

		@tests.slowTest
		class MyTest(tests.TestCase):
			...

	or individual test functions::

		class MyTest(tests.TestCase):

			@tests.slowTest
			def testFoo(self):
				...

			def testBar(self):
				...
	'''
    if FAST_TEST:
        wrapper = skip('Slow test')
        return wrapper(obj)
    else:
        return obj
Beispiel #27
0
    def test_old_testresult_class(self):
        class Test(unittest2.TestCase):
            def testFoo(self):
                pass

        Test = unittest2.skip('no reason')(Test)
        self.assertOldResultWarning(Test('testFoo'), 0)
Beispiel #28
0
    def init_plugin(self, config_content=None):
        conf = None
        if config_content:
            conf = XmlConfigParser()
            conf.setXml(config_content)
        elif os.path.exists(default_plugin_file):
            conf = default_plugin_file
        else:
            unittest.skip("cannot get default plugin config file at %s" % default_plugin_file)

        self.p = AdvPlugin(self.console, conf)
        self.conf = self.p.config
        self.log.setLevel(logging.DEBUG)
        self.log.info("============================= Adv plugin: loading config ============================")
        self.p.onLoadConfig()
        self.log.info("============================= Adv plugin: starting  =================================")
        self.p.onStartup()
def local_decorator_creator():
    if not CASSANDRA_IP.startswith("127.0.0."):
        return unittest.skip('Tests only runs against local C*')

    def _id_and_mark(f):
        f.local = True

    return _id_and_mark
Beispiel #30
0
def local_decorator_creator():
    if not CASSANDRA_IP.startswith("127.0.0."):
        return unittest.skip('Tests only runs against local C*')

    def _id_and_mark(f):
        f.local = True

    return _id_and_mark
Beispiel #31
0
 def wrapper(func):
     # Replicate the same behavior as doing:
     #
     # @unittest2.skip(reason)
     # @pytest.mark.stubbed
     # def func(...):
     #     ...
     return unittest2.skip(reason)(pytest.mark.stubbed(func))
Beispiel #32
0
 def newTestMethod(*args, **kwargs):
     if TestOptionParser.__long__ is None:
         raise Exception("TestOptionParser must be used in order to use @longTest" "decorator.")
     if TestOptionParser.__long__:
         return testMethod(*args, **kwargs)
     else:
         msg = "Skipping long test: %s" % testMethod.__name__
         return unittest.skip(msg)(testMethod)(*args, **kwargs)
Beispiel #33
0
 def test_old_reader_name_mapping(self):
     """Test that requesting old reader names raises a warning."""
     from satpy.readers import configs_for_reader, OLD_READER_NAMES
     if not OLD_READER_NAMES:
         return unittest.skip("Skipping deprecated reader tests because "
                              "no deprecated readers.")
     test_reader = sorted(OLD_READER_NAMES.keys())[0]
     self.assertRaises(ValueError, list, configs_for_reader(test_reader))
Beispiel #34
0
 def wrapper(func):
     # Replicate the same behaviour as doing:
     #
     # @unittest2.skip(reason)
     # @pytest.mark.stubbed
     # def func(...):
     #     ...
     return unittest2.skip(reason)(pytest.mark.stubbed(func))
Beispiel #35
0
 def newTestMethod(*args, **kwargs):
   if TestOptionParser.__long__ is None:
     raise Exception('TestOptionParser must be used in order to use @longTest'
                     'decorator.')
   if TestOptionParser.__long__:
     return testMethod(*args, **kwargs)
   else:
     msg = 'Skipping long test: %s' % testMethod.__name__
     return unittest.skip(msg)(testMethod)(*args, **kwargs)
Beispiel #36
0
def skip_unless_module(module):
    available = True
    try:
        __import__(module)
    except ImportError:
        available = False
    if available:
        return lambda func: func
    return skip("Module %s could not be loaded, dependent test skipped." % module)
Beispiel #37
0
    def test_areas_rasterio(self):
        """Test all areas have valid projections with rasterio."""
        try:
            from rasterio.crs import CRS
        except ImportError:
            return unittest.skip("Missing rasterio dependency")
        if not hasattr(CRS, 'from_dict'):
            return unittest.skip("RasterIO 1.0+ required")

        from pyresample import parse_area_file
        from satpy.resample import get_area_file
        all_areas = parse_area_file(get_area_file())
        for area_obj in all_areas:
            if getattr(area_obj, 'optimize_projection', False):
                # the PROJ.4 is known to not be valid on this DynamicAreaDef
                continue
            proj_dict = area_obj.proj_dict
            _ = CRS.from_dict(proj_dict)
Beispiel #38
0
 def newTestMethod(*args, **kwargs):
   if TestOptionParser.__long__ is None:
     raise Exception('TestOptionParser must be used in order to use @longTest'
                     'decorator.')
   if TestOptionParser.__long__:
     return testMethod(*args, **kwargs)
   else:
     msg = 'Skipping long test: {0!s}'.format(testMethod.__name__)
     return unittest.skip(msg)(testMethod)(*args, **kwargs)
Beispiel #39
0
def skip_if_binaries_missing(binaries, check_all=False):
    # While there's no new release of salt-testing
    def _id(obj):
        return obj

    if sys.version_info < (2, 7):
        from unittest2 import skip  # pylint: disable=F0401
    else:
        from unittest import skip  # pylint: disable=E0611

    if check_all:
        for binary in binaries:
            if salt.utils.which(binary) is None:
                return skip('The {0!r} binary was not found')
    elif salt.utils.which_bin(binaries) is None:
        return skip('None of the following binaries was found: {0}'.format(
            ', '.join(binaries)))
    return _id
Beispiel #40
0
def skipIfNoStandby():
    """
    A decorator which skips a unit test if a standby
    is not already present in the cluster.
    """
    standby = get_host_list()[0]
    if standby is None:
        return unittest.skip('requires standby') 
    return lambda o: o 
Beispiel #41
0
def skip_unless_module(module):
    available = True
    try:
        __import__(module)
    except ImportError:
        available = False
    if available:
        return lambda func: func
    return skip("Module %s could not be loaded, dependent test skipped." % module)
Beispiel #42
0
    def init_plugin(self, config_content=None):
        conf = None
        if config_content:
            conf = XmlConfigParser()
            conf.setXml(config_content)
        elif default_plugin_content:
            conf = XmlConfigParser()
            conf.setXml(default_plugin_content)
        else:
            unittest.skip("cannot get default plugin config file at %s" % default_plugin_file)

        self.p = AdvPlugin(self.console, conf)
        self.p.save = Mock()
        self.conf = self.p.config
        self.log.setLevel(logging.DEBUG)
        self.log.info("============================= Adv plugin: loading config ============================")
        self.p.onLoadConfig()
        self.log.info("============================= Adv plugin: starting  =================================")
        self.p.onStartup()
Beispiel #43
0
        class SkipWithLt(unittest.TestCase):
            def runTest(self):
                self.fail("version < 2.7")

            try:
                runTest = unittest.skip("2.7 <= version")(runTest)
            except AttributeError:
                self.has_skip = False
            else:
                self.has_skip = True
def testcase_generator(basepath, mainfunc, files, options):
    fontpath = get_fontpath(basepath)
    if os.path.exists(fontpath):
        options = options + ['-f', fontpath]

    for source in files:
        yield generate, mainfunc, 'svg', source, options

        if supported_pil() and os.path.exists(fontpath):
            yield generate, mainfunc, 'png', source, options
            yield generate, mainfunc, 'png', source, options + ['--antialias']
        else:
            yield unittest.skip("Pillow is not available")(generate)
            yield unittest.skip("Pillow is not available")(generate)

        if supported_pdf() and os.path.exists(fontpath):
            yield generate, mainfunc, 'pdf', source, options
        else:
            yield unittest.skip("reportlab is not available")(generate)
    def init_plugin(self, config_content=None):
        conf = None
        if config_content:
            conf = XmlConfigParser()
            conf.setXml(config_content)
        elif ADV_CONFIG_CONTENT:
            conf = XmlConfigParser()
            conf.setXml(ADV_CONFIG_CONTENT)
        else:
            unittest.skip("cannot get default plugin config file at %s" % ADV_CONFIG_FILE)

        self.p = AdvPlugin(self.console, conf)
        self.p.save = Mock()
        self.conf = self.p.config
        self.log.setLevel(logging.DEBUG)
        self.log.info("============================= Adv plugin: loading config ============================")
        self.p.onLoadConfig()
        self.log.info("============================= Adv plugin: starting  =================================")
        self.p.onStartup()
Beispiel #46
0
def skip_unless_has_memory_collection(cls):
    """Class decorator to skip tests that require memory collection.

    Any test that uses memory collection (such as the resource leak tests)
    can decorate their class with skip_unless_has_memory_collection to
    indicate that if the platform does not support memory collection
    the tests should be skipped.
    """
    if platform.system() not in ['Darwin', 'Linux']:
        return unittest.skip('Memory tests only supported on mac/linux.')(cls)
    return cls
Beispiel #47
0
 def wrap(func):
     func_flags = _ALL_FLAGS.setdefault(id(func), [])
     func_flags.extend(flags)
     fl = set(func_flags) & _FLAGS
     if any([f in _TAG_OR for f in func_flags]) and "wip" not in fl:
         func.__unittest_skip__ = False
         return func
     if len(fl):
         return unittest.skip("skipping tests flagged with %s" %
                              (" ".join(fl)))(func)
     return func
Beispiel #48
0
def skip_without_setting(setting):
    try:
        from . import integration_settings
    except ImportError:
        pass
    else:
        if hasattr(integration_settings, setting):
            return lambda f: f

    message = 'integration_settings.{0} was not found'.format(setting)
    return unittest.skip(message)
Beispiel #49
0
def incompatible_2_6(test):
    """
    Test won't work in Python 2.6
    """
    major = sys.version_info[0]
    minor = sys.version_info[1]
    py26 = False
    if major <= 2:
        if minor <= 6:
            py26 = True
    return test if not py26 else unittest.skip('error on Python 2.6')(test)
Beispiel #50
0
def skip_unless_any_module(modules):
    available = False
    for module in modules:
        try:
            __import__(module)
        except ImportError:
            continue
        available = True
    if available:
        return lambda func: func
    return skip("None of the modules %s could be loaded, dependent test skipped." % modules)
Beispiel #51
0
def skip_unless_has_memory_collection(cls):
    """Class decorator to skip tests that require memory collection.

    Any test that uses memory collection (such as the resource leak tests)
    can decorate their class with skip_unless_has_memory_collection to
    indicate that if the platform does not support memory collection
    the tests should be skipped.
    """
    if platform.system() not in ['Darwin', 'Linux']:
        return unittest.skip('Memory tests only supported on mac/linux.')(cls)
    return cls
Beispiel #52
0
def skip(reason):
    """
    A docorator for test skipping.
    """
    version = getPythonVersion()
    if version >= 2.7:
        import unittest
        return unittest.skip(reason)
    else:
        import unittest2
        return unittest2.skip(reason)
Beispiel #53
0
def longtest(description):
    """
    Used as a function decorator, for example, @wttest.longtest("description").
    The decorator indicates that this test function should only be included
    when running the test suite with the --long option.
    """
    def runit_decorator(func):
        return func
    if not WiredTigerTestCase._longtest:
        return unittest.skip(description + ' (enable with --long)')
    else:
        return runit_decorator
class CLAClassifierDiffTest(cla_classifier_test.CLAClassifierTest):
    """CLAClassifierDiff unit tests."""
    def setUp(self):
        self._classifier = CLAClassifierDiff

    unittest.skip(
        "The classifier diff fails for this test for some reason. "
        "Should be fixed but the diff classifier is just for testing "
        "anyway.")

    def testComputeCategory2(self):
        pass
Beispiel #55
0
def testcase_generator(basepath, mainfunc, files, options):
    fontpath = get_fontpath(basepath)
    options = options + ['-f', fontpath]

    for source in files:
        yield generate, mainfunc, 'svg', source, options

        if not supported_pil():
            yield unittest.skip("Pillow is not available")(generate)
            yield unittest.skip("Pillow is not available")(generate)
        elif os.environ.get('ALL_TESTS') is None:
            message = "Skipped by default. To enable it, specify $ALL_TESTS=1"
            yield unittest.skip(message)(generate)
            yield unittest.skip(message)(generate)
        else:
            yield generate, mainfunc, 'png', source, options
            yield generate, mainfunc, 'png', source, options + ['--antialias']

        if not supported_pdf():
            yield unittest.skip("reportlab is not available")(generate)
        elif os.environ.get('ALL_TESTS') is None:
            message = "Skipped by default. To enable it, specify $ALL_TESTS=1"
            yield unittest.skip(message)(generate)
        else:
            yield generate, mainfunc, 'pdf', source, options
Beispiel #56
0
def incompatible_pandas_0131(test):
    """
    Test won't work on pandas 0.18.0 due to pandas/numpy issue with
    np.round.
    """

    if pd.__version__ == '0.13.1':
        out = unittest.skip(
            'error on pandas 0.13.1 due to pandas/numpy')(test)
    else:
        out = test

    return out
Beispiel #57
0
 def test_skip_class(self):
     class Foo(unittest2.TestCase):
         def test_1(self):
             record.append(1)
     
     # was originally a class decorator...
     Foo = unittest2.skip("testing")(Foo)
     record = []
     result = unittest2.TestResult()
     test = Foo("test_1")
     suite = unittest2.TestSuite([test])
     suite.run(result)
     self.assertEqual(result.skipped, [(test, "testing")])
     self.assertEqual(record, [])