def test_monotonic_can_be_ignored(): if sys.version_info[0] != 3: raise skip.SkipTest("test target is Python3") initial_datetime = datetime.datetime(year=1, month=7, day=12, hour=15, minute=6, second=3) other_datetime = datetime.datetime(year=2, month=8, day=13, hour=14, minute=5, second=0) # time.monotonic() will not affected by freeze_time with freeze_time(initial_datetime, ignore=['time.monotonic']) as frozen_datetime: initial_monotonic = time.monotonic() frozen_datetime.move_to(other_datetime) second_monotonic = time.monotonic() assert second_monotonic > initial_monotonic frozen_datetime.move_to(initial_datetime) assert time.monotonic() > second_monotonic
def test_insert_document(self): catalog = self.pod.catalogs.get('de') content = catalog.content mimetype = 'text/x-gettext-translation' source_lang = 'en' lang = str(catalog.locale) name = 'Test Display Name' credentials, _ = oauth.get_credentials_and_storage( scope=google_translator_toolkit.OAUTH_SCOPE, storage_key=google_translator_toolkit.STORAGE_KEY) if not credentials: text = ( 'Skipping Google Translator Toolkit test' ' because we don\'t have auth keys. Run' ' `grow upload_translations` or `grow download_translations`' ' to acquire auth keys and re-run the test.') raise skip.SkipTest(text) gtt = google_translator_toolkit.Gtt() insert_resp = gtt.insert_document(name=name, content=content, source_lang=source_lang, lang=lang, mimetype=mimetype) document_id = insert_resp['id'] time.sleep(2) # Wait for the document to be ready in GTT. download_resp = gtt.download_document(document_id)
def test_move_to_monotonic(): if sys.version_info[0] != 3: raise skip.SkipTest("test target is Python3") initial_datetime = datetime.datetime(year=1, month=7, day=12, hour=15, minute=6, second=3) other_datetime = datetime.datetime(year=2, month=8, day=13, hour=14, minute=5, second=0) with freeze_time(initial_datetime) as frozen_datetime: initial_monotonic = time.monotonic() frozen_datetime.move_to(other_datetime) assert time.monotonic() - initial_monotonic == ( other_datetime - initial_datetime).total_seconds() frozen_datetime.move_to(initial_datetime) assert time.monotonic() == initial_monotonic
def test_correct_values_medium(self): reason = "Test data not in place" raise skip.SkipTest(reason) script_path = os.path.join(test_dir_path, "..", "scripts", "COG_phylum_table.py") blast_output = os.path.join(test_data_path, "medium", "test_contigs.out") clustering_file = os.path.join(test_data_path, "medium", "test_clustering_gt1000.csv") phylum_scg_file = os.path.join(test_data_path, "medium", "test_phyla_scg.tsv") cdd_to_cog_file = os.path.join(test_dir_path, "..", "data", "cdd_to_cog.tsv") output_file = os.path.join(tmp_dir_path, "test_script_output_path.csv") cmd = [ "python", script_path, "-b", blast_output, "-c", clustering_file, "-m", phylum_scg_file, "--cdd_cog_file", cdd_to_cog_file, "--output_file", output_file ] cmd = " ".join(cmd) output = subprocess.check_output(cmd, shell=True) result_matrix = pd.DataFrame.from_csv(output_file, index_col=0)
def setUp(self): super(TestGetEventInterval, self).setUp() # NOTE(dhellmann): mim requires spidermonkey to implement the # map-reduce functions, so if we can't import it then just # skip these tests unless we aren't using mim. try: import spidermonkey except: if isinstance(self.conn.conn, mim.Connection): raise skip.SkipTest('requires spidermonkey') # Create events relative to the range and pretend # that the intervening events exist. self.start = datetime.datetime(2012, 8, 28, 0, 0) self.end = datetime.datetime(2012, 8, 29, 0, 0) self.early1 = self.start - datetime.timedelta(minutes=20) self.early2 = self.start - datetime.timedelta(minutes=10) self.middle1 = self.start + datetime.timedelta(minutes=10) self.middle2 = self.end - datetime.timedelta(minutes=10) self.late1 = self.end + datetime.timedelta(minutes=10) self.late2 = self.end + datetime.timedelta(minutes=20) self._filter = storage.EventFilter( resource='resource-id', meter='instance', start=self.start, end=self.end, )
def test_deploy_local(self): if utils.is_appengine(): text = 'Skipping Git destination test on GAE.' raise skip.SkipTest(text) import git path = tempfile.mkdtemp() git.Repo.init(path) self._test_deploy(path)
def test_ticking_datetime_monotonic(): if sys.version_info[0] != 3: raise skip.SkipTest("test target is Python3") with freeze_time("Jan 14th, 2012", ticking_speed=1.0): initial_monotonic = time.monotonic() time.sleep(0.001) # Deal with potential clock resolution problems assert time.monotonic() > initial_monotonic
def skip(reason): try: from nose.plugins import skip raise skip.SkipTest(reason) except ImportError: if hasattr(unittest, 'SkipTest'): raise unittest.SkipTest(reason) else: raise SkipTest(reason)
def test_maya_datetimes(): if not maya: raise skip.SkipTest("maya is optional since it's not supported for " "enough python versions") with freeze_time(maya.when("October 2nd, 1997")): assert datetime.datetime.now() == datetime.datetime(year=1997, month=10, day=2)
def __enter__(self): self.old = locale.setlocale(locale.LC_ALL) for target in self.targets: try: locale.setlocale(locale.LC_ALL, target) return except locale.Error: pass msg = 'could not set locale to any of: %s' % ', '.join(self.targets) raise skip.SkipTest(msg)
def setUp(self): super(SumTest, self).setUp() # NOTE(dhellmann): mim requires spidermonkey to implement the # map-reduce functions, so if we can't import it then just # skip these tests unless we aren't using mim. try: import spidermonkey except: if isinstance(self.conn.conn, mim.Connection): raise skip.SkipTest('requires spidermonkey')
def test_upload_translations(self): credentials, _ = oauth.get_credentials_and_storage( scope=google_sheets.OAUTH_SCOPE, storage_key=google_drive.STORAGE_KEY) if not credentials: text = ( 'Skipping Google Sheets Translator test' ' because we don\'t have auth keys. Run' ' `grow upload_translations` or `grow download_translations`' ' to acquire auth keys and re-run the test.') raise skip.SkipTest(text) translator = self.pod.get_translator('google_sheets') translator.upload(locales=['de'])
def test_nested_monotonic(): if sys.version_info[0] != 3: raise skip.SkipTest("test target is Python3") with freeze_time('2012-01-14') as frozen_datetime_1: initial_monotonic_1 = time.monotonic() with freeze_time('2012-12-25') as frozen_datetime_2: initial_monotonic_2 = time.monotonic() frozen_datetime_2.tick() initial_monotonic_2 += 1 assert time.monotonic() == initial_monotonic_2 assert time.monotonic() == initial_monotonic_1 frozen_datetime_1.tick() initial_monotonic_1 += 1 assert time.monotonic() == initial_monotonic_1
def test_upload_and_download_gtt_translations(self): self.assertRaises(ValueError, self.pod.get_translator, 'gtt') translator = self.pod.get_translator('google_translator_toolkit') credentials, _ = oauth.get_credentials_and_storage( scope=google_translator_toolkit.OAUTH_SCOPE, storage_key=google_translator_toolkit.STORAGE_KEY) if not credentials: text = ( 'Skipping Google Translator Toolkit test' ' because we don\'t have auth keys. Run' ' `grow upload_translations` or `grow download_translations`' ' to acquire auth keys and re-run the test.') raise skip.SkipTest(text) translator.upload(locales=['de']) time.sleep(2) # Wait for the document to be ready in GTT. translator.download(locales=['de']) translator.update_acl() translator.update_acl(locales=['de'])
def test_manual_increment_monotonic(): if sys.version_info[0] != 3: raise skip.SkipTest("test target is Python3") initial_datetime = datetime.datetime(year=1, month=7, day=12, hour=15, minute=6, second=3) with freeze_time(initial_datetime) as frozen_datetime: initial_monotonic = time.monotonic() frozen_datetime.tick() initial_monotonic += 1 assert time.monotonic() == initial_monotonic frozen_datetime.tick(delta=datetime.timedelta(seconds=10)) initial_monotonic += 10 assert time.monotonic() == initial_monotonic
import os import sys import mock MOCK_RADOS = mock.Mock(name='rados') MOCK_RADOS.__name__ = 'rados' sys.modules['rados'] = MOCK_RADOS import cStringIO import unittest try: from test.unit.proxy import test_server from test.unit.proxy.test_server import teardown except ImportError: import nose.plugins.skip as skip raise skip.SkipTest("Swift test environ not installed") from swift_ceph_backend import rados_server class ObjectNotFound(Exception): pass MOCK_RADOS.ObjectNotFound = ObjectNotFound class MockIoctx(object): def __init__(self): self._objs = {} def get_xattr(self, key, attr_name): if self._objs.get(key) is None:
def _skipper(*args, **kw): """Wrapped skipper function.""" raise skip.SkipTest(self.message)
def test_deploy_online(self): online_url = os.getenv('GROW_TEST_REPO_URL') if not online_url: text = 'Set $GROW_TEST_REPO_URL to test online Git deployment.' raise skip.SkipTest(text) self._test_deploy(online_url)
def test_class_decorator_wraps_callable_object_py3(self): if sys.version_info[0] != 3: raise skip.SkipTest("test target is Python3") assert self.a_mock.__wrapped__.__class__ == Callable
def test_class_decorator_skips_callable_object_py2(self): if sys.version_info[0] != 2: raise skip.SkipTest("test target is Python2") assert self.a_mock.__class__ == Callable
def _skipper(*args, **kw): """Wrapped skipper function.""" if not self.condition: raise skip.SkipTest(self.message) func(*args, **kw)
def wrapper(*args): if not _is_cpython: raise skip.SkipTest("Requires CPython") return func(*args)
def test_gff_file(self): """Test for read_gff_file method.""" reason = "Test not written yet" raise skip.SkipTest(reason)
import imposm.app import imposm.db.config import imposm.mapping from nose.tools import eq_ from nose.plugins import skip temp_dir = None old_cwd = None try: from imposm_test_conf import db_conf db_conf = imposm.mapping.Options(db_conf) except ImportError: raise skip.SkipTest('no imposm_test_conf.py with db_conf found') def setup_module(): global old_cwd, temp_dir old_cwd = os.getcwd() temp_dir = tempfile.mkdtemp() os.chdir(temp_dir) test_osm_file = os.path.join(os.path.dirname(__file__), 'test.out.osm') with capture_out(): print db_conf.password imposm.app.main([ '--read', test_osm_file, '--write', '--proj', db_conf.proj, '--table-prefix', db_conf.prefix, '--connect', 'postgis://%(user)s:%(password)s@%(host)s:%(port)s/%(db)s' % db_conf