def test_nginx_x_accel_redirect_global_settings(self): """Global settings for Nginx middleware are deprecated.""" settings_overrides = { 'NGINX_DOWNLOAD_MIDDLEWARE_WITH_BUFFERING': True, 'NGINX_DOWNLOAD_MIDDLEWARE_LIMIT_RATE': 32, 'NGINX_DOWNLOAD_MIDDLEWARE_EXPIRES': 3600, 'NGINX_DOWNLOAD_MIDDLEWARE_MEDIA_ROOT': '/', 'NGINX_DOWNLOAD_MIDDLEWARE_SOURCE_DIR': '/', 'NGINX_DOWNLOAD_MIDDLEWARE_SOURCE_URL': '/', 'NGINX_DOWNLOAD_MIDDLEWARE_MEDIA_URL': '/', 'NGINX_DOWNLOAD_MIDDLEWARE_DESTINATION_URL': '/', } import django_downloadview.nginx.settings missed_warnings = [] for setting_name, setting_value in settings_overrides.items(): warnings.resetwarnings() warnings.simplefilter("always") with warnings.catch_warnings(record=True) as warning_list: with override_settings(**{setting_name: setting_value}): reload(django_downloadview.nginx.settings) caught = False for warning_item in warning_list: if warning_item.category == DeprecationWarning: if 'deprecated' in str(warning_item.message): if setting_name in str(warning_item.message): caught = True break if not caught: missed_warnings.append(setting_name) if missed_warnings: self.fail( 'No DeprecationWarning raised about following settings: ' '{settings}.'.format(settings=', '.join(missed_warnings)))
def test_fetch_hosts_from_rc(): if os.getenv("PLATOON_HOSTS"): os.environ.pop("PLATOON_HOSTS") os.environ["PLATOONRC"] = "../../../platoonrc.conf" reload(cfgp) hosts = cfgp.fetch_hosts() assert hosts == ["lisa0", "lisa1", "lisa3"], (hosts)
def test_try_again(monkeypatch): push.push_to_try( "fuzzy", "Fuzzy message", try_task_config=push.generate_try_task_config( "fuzzy", ["foo", "bar"], {"use-artifact-builds": True}, ), ) assert os.path.isfile(push.history_path) with open(push.history_path, "r") as fh: assert len(fh.readlines()) == 1 def fake_push_to_try(*args, **kwargs): return args, kwargs monkeypatch.setattr(push, "push_to_try", fake_push_to_try) reload(again) args, kwargs = again.run() assert args[0] == "again" assert args[1] == "Fuzzy message" try_task_config = kwargs.pop("try_task_config") assert sorted(try_task_config.get("tasks")) == sorted(["foo", "bar"]) assert try_task_config.get("env") == {"TRY_SELECTOR": "fuzzy"} assert try_task_config.get('use-artifact-builds') with open(push.history_path, "r") as fh: assert len(fh.readlines()) == 1
def test_fetch_devices_from_envs(): if os.getenv("PLATOON_DEVICES"): os.environ.pop("PLATOON_DEVICES") os.environ["PLATOON_DEVICES"] = "cuda0,opencl0:1" reload(cfgp) devices = cfgp.fetch_devices_for_host("asfasfa") assert devices == ["cuda0", "opencl0:1"], (devices)
def test_standard_artifact_registry(): mock_entrypoint = mock.Mock() mock_entrypoint.name = "mock-scheme" with mock.patch( "entrypoints.get_group_all", return_value=[mock_entrypoint] ): # Entrypoints are registered at import time, so we need to reload the # module to register the entrypoint given by the mocked # extrypoints.get_group_all reload(artifact_repository_registry) expected_artifact_repository_registry = { '', 's3', 'gs', 'wasbs', 'ftp', 'sftp', 'dbfs', 'mock-scheme' } assert expected_artifact_repository_registry.issubset( artifact_repository_registry._artifact_repository_registry._registry.keys() )
def test_standard_store_registry_with_mocked_entrypoint(): mock_entrypoint = mock.Mock() mock_entrypoint.name = "mock-scheme" with mock.patch( "entrypoints.get_group_all", return_value=[mock_entrypoint] ): # Entrypoints are registered at import time, so we need to reload the # module to register the entrypoint given by the mocked # extrypoints.get_group_all reload(mlflow.tracking.utils) expected_standard_registry = { '', 'file', 'http', 'https', 'postgresql', 'mysql', 'sqlite', 'mssql', 'databricks', 'mock-scheme' } assert expected_standard_registry.issubset( mlflow.tracking.utils._tracking_store_registry._registry.keys() )
def test_nginx_x_accel_redirect_middleware(self): "XAccelRedirectMiddleware in settings triggers ImproperlyConfigured." with override_settings( MIDDLEWARE_CLASSES=[ 'django_downloadview.nginx.XAccelRedirectMiddleware']): with self.assertRaises(ImproperlyConfigured): import django_downloadview.nginx.settings reload(django_downloadview.nginx.settings)
def test_fetch_hosts_from_envs(): if os.getenv("PLATOON_HOSTS"): os.environ.pop("PLATOON_HOSTS") true_hosts = ["test0", "tes1", "te2"] os.environ["PLATOON_HOSTS"] = "test0,tes1,te2" reload(cfgp) hosts = cfgp.fetch_hosts() assert hosts == true_hosts, (hosts)
def test_dtype_to_mpi_dtype_fail(self): reload(util) with self.assertRaises(TypeError): util.dtype_to_mpi('sadfa') with self.assertRaises(TypeError): util.dtype_to_mpi('') # TODO Find how to convert from half type to MPI dtype # and use in collectives with self.assertRaises(TypeError): util.dtype_to_mpi('float16')
def __init__(self, *args, **kwargs): reload(pyrax) self.orig_connect_to_cloudservers = pyrax.connect_to_cloudservers self.orig_connect_to_cloudfiles = pyrax.connect_to_cloudfiles ctclb = pyrax.connect_to_cloud_loadbalancers self.orig_connect_to_cloud_loadbalancers = ctclb self.orig_connect_to_cloud_databases = pyrax.connect_to_cloud_databases self.orig_get_service_endpoint = pyrax._get_service_endpoint super(PyraxInitTest, self).__init__(*args, **kwargs) self.username = "******" self.password = "******" self.tenant_id = "faketenantid"
def import_path(fullpath): """ Import a file with full path specification. Allows one to import from anywhere, something __import__ does not do. """ # http://zephyrfalcon.org/weblog/arch_d7_2002_08_31.html path, filename = os.path.split(fullpath) filename, ext = os.path.splitext(filename) sys.path.append(path) module = __import__(filename) reload(module) # Might be out of date during tests del sys.path[-1] return module
def use(arg, warn=True, force=False): """ Set the matplotlib backend to one of the known backends. The argument is case-insensitive. *warn* specifies whether a warning should be issued if a backend has already been set up. *force* is an **experimental** flag that tells matplotlib to attempt to initialize a new backend by reloading the backend module. .. note:: This function must be called *before* importing pyplot for the first time; or, if you are not using pyplot, it must be called before importing matplotlib.backends. If warn is True, a warning is issued if you try and call this after pylab or pyplot have been loaded. In certain black magic use cases, e.g. :func:`pyplot.switch_backend`, we are doing the reloading necessary to make the backend switch work (in some cases, e.g., pure image backends) so one can set warn=False to suppress the warnings. To find out which backend is currently set, see :func:`matplotlib.get_backend`. """ # Lets determine the proper backend name first if arg.startswith('module://'): name = arg else: # Lowercase only non-module backend names (modules are case-sensitive) arg = arg.lower() name = validate_backend(arg) # Check if we've already set up a backend if 'matplotlib.backends' in sys.modules: # Warn only if called with a different name if (rcParams['backend'] != name) and warn: warnings.warn(_use_error_msg) # Unless we've been told to force it, just return if not force: return need_reload = True else: need_reload = False # Store the backend name rcParams['backend'] = name # If needed we reload here because a lot of setup code is triggered on # module import. See backends/__init__.py for more detail. if need_reload: reload(sys.modules['matplotlib.backends'])
def test_op_to_mpi(self): reload(util) assert util.op_to_mpi('+') == MPI.SUM assert util.op_to_mpi("sum") == MPI.SUM assert util.op_to_mpi("add") == MPI.SUM assert util.op_to_mpi('*') == MPI.PROD assert util.op_to_mpi("prod") == MPI.PROD assert util.op_to_mpi("product") == MPI.PROD assert util.op_to_mpi("mul") == MPI.PROD assert util.op_to_mpi("max") == MPI.MAX assert util.op_to_mpi("maximum") == MPI.MAX assert util.op_to_mpi("min") == MPI.MIN assert util.op_to_mpi("minimum") == MPI.MIN
def test_run_context_provider_registry_with_installed_plugin(tmp_wkdir): """This test requires the package in tests/resources/mlflow-test-plugin to be installed""" reload(mlflow.tracking.context) from mlflow_test_plugin import PluginRunContextProvider assert PluginRunContextProvider in _currently_registered_run_context_provider_classes() # The test plugin's context provider always returns False from in_context # to avoid polluting tags in developers' environments. The following mock overrides this to # perform the integration test. with mock.patch.object(PluginRunContextProvider, "in_context", return_value=True): assert resolve_tags()["test"] == "tag"
def test_import_fail(self): import __builtin__ sav_import = __builtin__.__import__ def fake_import(nm, *args): if nm == "identity": raise ImportError else: return sav_import(nm, *args) __builtin__.__import__ = fake_import self.assertRaises(ImportError, reload, pyos) __builtin__.__import__ = sav_import reload(pyos)
def test_import_fail(self): _builtin = six.moves.builtins sav_import = _builtin.__import__ def fake_import(nm, *args): if nm == "identity": raise ImportError else: return sav_import(nm, *args) _builtin.__import__ = fake_import self.assertRaises(ImportError, reload, pyrax) _builtin.__import__ = sav_import reload(pyrax)
def reload_objects(self): ''' Reload objects which depends on the patch module. This allows to see the changes in code without restarting the python interpreter. ''' reload_modules = [ 'objects.movies', 'objects.musicvideos', 'objects.tvshows', 'objects.music', 'objects.obj', 'objects.actions', 'objects.kodi.kodi', 'objects.kodi.movies', 'objects.kodi.musicvideos', 'objects.kodi.tvshows', 'objects.kodi.music', 'objects.kodi.artwork', 'objects.kodi.queries', 'objects.kodi.queries_music', 'objects.kodi.queries_texture' ] for mod in reload_modules: del sys.modules[mod] reload(objects.kodi) reload(objects) reload(library) reload(monitor) objects.obj.Objects().mapping() LOG.info("---[ objects reloaded ]")
def test_version_reader(): """Test that get the expected idd_version when reading an IDF/IDD. """ # We need to reload modeleditor since the IDF class may have had an IDD # which causes problems. # https://stackoverflow.com/questions/437589/how-do-i-unload-reload-a-python-module reload(modeleditor) iddfile = os.path.join(IDD_FILES, TEST_IDD) fname1 = os.path.join(IDF_FILES, TEST_IDF) modeleditor.IDF.setiddname(iddfile, testing=True) idf = modeleditor.IDF(fname1, TEST_EPW) ep_version = idf.idd_version assert ep_version == versiontuple(VERSION) ep_version = modeleditor.IDF.idd_version assert ep_version == versiontuple(VERSION)
def test_standard_store_registry_with_installed_plugin(tmp_wkdir): """This test requires the package in tests/resources/mlflow-test-plugin to be installed""" reload(mlflow.tracking.utils) assert "file-plugin" in mlflow.tracking.utils._tracking_store_registry._registry.keys() from mlflow_test_plugin import PluginFileStore env = { _TRACKING_URI_ENV_VAR: "file-plugin:test-path", } with mock.patch.dict(os.environ, env): plugin_file_store = mlflow.tracking.utils._get_store() assert isinstance(plugin_file_store, PluginFileStore) assert plugin_file_store.is_plugin
def test_dtype_to_mpi(self): reload(util) assert util.dtype_to_mpi(np.dtype('bool')) == MPI.C_BOOL assert util.dtype_to_mpi(np.dtype('int8')) == MPI.INT8_T assert util.dtype_to_mpi(np.dtype('uint8')) == MPI.UINT8_T assert util.dtype_to_mpi(np.dtype('int16')) == MPI.INT16_T assert util.dtype_to_mpi(np.dtype('uint16')) == MPI.UINT16_T assert util.dtype_to_mpi(np.dtype('int32')) == MPI.INT32_T assert util.dtype_to_mpi(np.dtype('uint32')) == MPI.UINT32_T assert util.dtype_to_mpi(np.dtype('int64')) == MPI.INT64_T assert util.dtype_to_mpi(np.dtype('uint64')) == MPI.UINT64_T assert util.dtype_to_mpi(np.dtype('float32')) == MPI.FLOAT assert util.dtype_to_mpi(np.dtype('float64')) == MPI.DOUBLE assert util.dtype_to_mpi(np.dtype('complex64')) == MPI.C_FLOAT_COMPLEX assert util.dtype_to_mpi(np.dtype('complex128')) == MPI.C_DOUBLE_COMPLEX
def test_plugin_registration_via_installed_package(): """This test requires the package in tests/resources/mlflow-test-plugin to be installed""" reload(artifact_repository_registry) assert "file-plugin" in artifact_repository_registry._artifact_repository_registry._registry from mlflow_test_plugin.local_artifact import PluginLocalArtifactRepository test_uri = "file-plugin:test-path" plugin_repo = artifact_repository_registry.get_artifact_repository(test_uri) assert isinstance(plugin_repo, PluginLocalArtifactRepository) assert plugin_repo.is_plugin
def test_registry_instance_loads_entrypoints(): class MockRunContext(object): pass mock_entrypoint = mock.Mock() mock_entrypoint.load.return_value = MockRunContext with mock.patch("entrypoints.get_group_all", return_value=[mock_entrypoint]) as mock_get_group_all: # Entrypoints are registered at import time, so we need to reload the module to register th # entrypoint given by the mocked extrypoints.get_group_all reload(mlflow.tracking.context.registry) assert MockRunContext in _currently_registered_run_context_provider_classes( ) mock_get_group_all.assert_called_once_with("mlflow.run_context_provider")
def test_dtype_to_mpi(self): reload(util) assert util.dtype_to_mpi(np.dtype('bool')) == MPI.C_BOOL assert util.dtype_to_mpi(np.dtype('int8')) == MPI.INT8_T assert util.dtype_to_mpi(np.dtype('uint8')) == MPI.UINT8_T assert util.dtype_to_mpi(np.dtype('int16')) == MPI.INT16_T assert util.dtype_to_mpi(np.dtype('uint16')) == MPI.UINT16_T assert util.dtype_to_mpi(np.dtype('int32')) == MPI.INT32_T assert util.dtype_to_mpi(np.dtype('uint32')) == MPI.UINT32_T assert util.dtype_to_mpi(np.dtype('int64')) == MPI.INT64_T assert util.dtype_to_mpi(np.dtype('uint64')) == MPI.UINT64_T assert util.dtype_to_mpi(np.dtype('float32')) == MPI.FLOAT assert util.dtype_to_mpi(np.dtype('float64')) == MPI.DOUBLE assert util.dtype_to_mpi(np.dtype('complex64')) == MPI.C_FLOAT_COMPLEX assert util.dtype_to_mpi( np.dtype('complex128')) == MPI.C_DOUBLE_COMPLEX
def test_easyopen(): """py.test for easyopen""" ver = latestidd() txt, result = (" Version,{};".format(ver), '{}'.format(ver)) fhandle = StringIO(txt) reload(modeleditor) reload(easyopen) idf = easyopen.easyopen(fhandle) versions = idf.idfobjects['version'.upper()] version = versions[0] ver = version.Version_Identifier assert result == ver # test with epw=weatherfile fhandle = StringIO(txt) epwname = 'weatherfile.epw' idf = easyopen.easyopen(fhandle, epw=epwname) assert idf.epw == epwname
def import_path(fullpath): """ Import a file with full path specification. Allows one to import from anywhere, something __import__ does not do. """ if PY33: name = os.path.splitext(os.path.basename(fullpath))[0] return machinery.SourceFileLoader( name, fullpath).load_module(name) else: # http://zephyrfalcon.org/weblog/arch_d7_2002_08_31.html path, filename = os.path.split(fullpath) filename, ext = os.path.splitext(filename) sys.path.append(path) try: module = __import__(filename) reload(module) # Might be out of date during tests return module finally: del sys.path[-1]
def test_fetch_devices_from_rc(): if os.getenv("PLATOON_DEVICES"): os.environ.pop("PLATOON_DEVICES") os.environ["PLATOON_DEVICES"] = "" os.environ["PLATOONRC"] = "../../../platoonrc.conf" reload(cfgp) devs = cfgp.fetch_devices_for_host("lisa0") assert devs == ["cuda0", "cuda1"], (devs) devs = cfgp.fetch_devices_for_host("lisa1") assert devs == ["cuda3", "cuda0"], (devs) devs = cfgp.fetch_devices_for_host("lisa3") assert devs == ["cuda"], (devs) keyerror = False try: devs = cfgp.fetch_devices_for_host("asfasfa") except KeyError: keyerror = True except: pass assert keyerror
def test_easyopen_idfopen(): """py.test for easyopen""" ver = idd_helpers.latestidd() txt, result = (" Version,{};".format(ver), "{}".format(ver)) fhandle1 = StringIO(txt) fhandle2 = StringIO(txt) reload(eppy) reload(modeleditor) reload(easyopen) idf1, idf2 = easyopen.easyopen(fhandle1), eppy.openidf(fhandle2) for idf in [idf1, idf2]: versions = idf.idfobjects["version"] version = versions[0] ver = version.Version_Identifier assert result == ver # test with epw=weatherfile fhandle1 = StringIO(txt) fhandle2 = StringIO(txt) epwname = "weatherfile.epw" idf1, idf2 = ( easyopen.easyopen(fhandle1, epw=epwname), eppy.openidf(fhandle2, epw=epwname), ) for idf in [idf1, idf2]: assert idf.epw == epwname
def test_easyopen_withidd(): """py.test for easyopen""" ver = idd_helpers.latestidd() iddfile = easyopen.getiddfile(ver) txt, result = (" Version,{};".format(ver), '{}'.format(ver)) fhandle1 = StringIO(txt) fhandle2 = StringIO(txt) reload(eppy) reload(modeleditor) reload(easyopen) idf1, idf2 = easyopen.easyopen(fhandle1, idd=iddfile), eppy.openidf(fhandle2, idd=iddfile) for idf in [idf1, idf2]: versions = idf.idfobjects['version'.upper()] version = versions[0] ver = version.Version_Identifier assert result == ver # test with epw=weatherfile fhandle1 = StringIO(txt) fhandle2 = StringIO(txt) epwname = 'weatherfile.epw' idf1, idf2 = easyopen.easyopen(fhandle1, idd=iddfile, epw=epwname), eppy.openidf(fhandle2, idd=iddfile, epw=epwname) for idf in [idf1, idf2]: assert idf.epw == epwname
def test_easyopen_idfopen(): """py.test for easyopen""" ver = idd_helpers.latestidd() txt, result = (" Version,{};".format(ver), '{}'.format(ver)) fhandle1 = StringIO(txt) fhandle2 = StringIO(txt) reload(eppy) reload(modeleditor) reload(easyopen) idf1, idf2 = easyopen.easyopen(fhandle1), eppy.openidf(fhandle2) for idf in [idf1, idf2]: versions = idf.idfobjects['version'.upper()] version = versions[0] ver = version.Version_Identifier assert result == ver # test with epw=weatherfile fhandle1 = StringIO(txt) fhandle2 = StringIO(txt) epwname = 'weatherfile.epw' idf1, idf2 = easyopen.easyopen(fhandle1, epw=epwname), eppy.openidf(fhandle2, epw=epwname) for idf in [idf1, idf2]: assert idf.epw == epwname
""" Python client for Taskcluster """ from __future__ import absolute_import, division, print_function, unicode_literals import logging import os import taskcluster.client import taskcluster.utils from six.moves import reload_module as reload reload(taskcluster.client) reload(taskcluster.utils) log = logging.getLogger(__name__) if os.environ.get('DEBUG_TASKCLUSTER_CLIENT'): log.setLevel(logging.DEBUG) if len(log.handlers) == 0: log.addHandler(logging.StreamHandler()) log.addHandler(logging.NullHandler()) from taskcluster.client import * # NOQA from taskcluster.utils import * # NOQA
def try_reload(): sys.path[0:0] = [config.compiledir] reload(lazylinker_ext) del sys.path[0]
def test_op_to_mpi_op_fail(self): reload(util) with self.assertRaises(ValueError): util.op_to_mpi('asdfasfda') with self.assertRaises(ValueError): util.op_to_mpi('-')
def try_reload(): sys.path[0:0] = [config.compiledir] reload(scan_perform) del sys.path[0]
def reload_context_module(): """Reload the context module to clear caches.""" reload(mlflow.tracking.context)
def reload_context_registry(): """Reload the context registry module to clear caches.""" reload(mlflow.tracking.context.registry)