Esempio n. 1
0
    def test_all_public_methods_are_traced(self):
        profiler_opts.set_defaults(conf.CONF)
        self.config(enabled=True,
                    group='profiler')

        classes = [
            'magnum.conductor.api.API',
            'magnum.conductor.api.ListenerAPI',
            'magnum.conductor.handlers.ca_conductor.Handler',
            'magnum.conductor.handlers.cluster_conductor.Handler',
            'magnum.conductor.handlers.conductor_listener.Handler',
            'magnum.conductor.handlers.indirection_api.Handler',
            'magnum.service.periodic.MagnumPeriodicTasks',
        ]
        for clsname in classes:
            # give the metaclass and trace_cls() decorator a chance to patch
            # methods of the classes above
            six.reload_module(
                importutils.import_module(clsname.rsplit('.', 1)[0]))
            cls = importutils.import_class(clsname)

            for attr, obj in cls.__dict__.items():
                # only public methods are traced
                if attr.startswith('_'):
                    continue
                # only checks callables
                if not (inspect.ismethod(obj) or inspect.isfunction(obj)):
                    continue
                # osprofiler skips static methods
                if isinstance(obj, staticmethod):
                    continue

                self.assertTrue(getattr(obj, '__traced__', False), obj)
Esempio n. 2
0
    def test_case_action_note_created_on_successful_action(self):
        """
        If the action is successful, a case action note should be created.
        """
        with self.settings(PODS=[{'label': 'success_pod'}]):
            from casepro.pods import registry
            reload_module(registry)

        response = self.url_post_json(
            'unicef', reverse('perform_pod_action', args=('0',)), {
                'case_id': self.case.id,
                'action': {
                    'type': 'foo',
                    'payload': {'foo': 'bar'},
                },
            })

        self.assertEqual(response.status_code, 200)

        message = 'Type foo Params {"foo": "bar"}'
        self.assertEqual(response.json, {
            'success': True,
            'payload': {
                'message': message
            }
        })

        [caseaction] = CaseAction.objects.all()
        self.assertEqual(
            caseaction.note,
            "%s %s" % (self.admin.username, message))
Esempio n. 3
0
def _set_bedtools_path(path=""):
    old_path = settings._bedtools_path
    settings._bedtools_path = path
    if old_path != path:
        reload_module(bedtool)
        reload_module(pybedtools)
        return True
 def testHalfDuplexCallWedged(self):
   import protoc_plugin_test_pb2 as test_pb2  # pylint: disable=g-import-not-at-top
   moves.reload_module(test_pb2)
   condition = threading.Condition()
   wait_cell = [False]
   @contextlib.contextmanager
   def wait():  # pylint: disable=invalid-name
     # Where's Python 3's 'nonlocal' statement when you need it?
     with condition:
       wait_cell[0] = True
     yield
     with condition:
       wait_cell[0] = False
       condition.notify_all()
   def half_duplex_request_iterator():
     request = test_pb2.StreamingOutputCallRequest()
     request.response_parameters.add(size=1, interval_us=0)
     yield request
     with condition:
       while wait_cell[0]:
         condition.wait()
   with _CreateService(test_pb2) as (methods, stub):
     with wait():
       responses = stub.HalfDuplexCall(
           half_duplex_request_iterator(), test_constants.SHORT_TIMEOUT)
       # half-duplex waits for the client to send all info
       with self.assertRaises(face.ExpirationError):
         next(responses)
Esempio n. 5
0
def configure_locale():
    global SYS_ENCODING

    try:
        locale.setlocale(locale.LC_ALL, '')
        SYS_ENCODING = locale.getpreferredencoding()
    except (locale.Error, IOError):
        pass

    # For OSes that are poorly configured I'll just randomly force UTF-8
    if not SYS_ENCODING or SYS_ENCODING in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'):
        SYS_ENCODING = 'UTF-8'

    if six.PY2:
        if not hasattr(sys, 'setdefaultencoding'):
            reload_module(sys)

        try:
            # pylint: disable=E1101
            # On non-unicode builds this will raise an AttributeError, if encoding type is not valid it throws a LookupError
            sys.setdefaultencoding(SYS_ENCODING)
        except Exception:
            print('Sorry, you MUST add the nzbToMedia folder to the PYTHONPATH environment variable'
                  '\nor find another way to force Python to use {codec} for string encoding.'.format
                  (codec=SYS_ENCODING))
            if 'NZBOP_SCRIPTDIR' in os.environ:
                sys.exit(NZBGET_POSTPROCESS_ERROR)
            else:
                sys.exit(1)
Esempio n. 6
0
    def _test_enforce_network_driver_types_create(
            self,
            network_driver_type,
            network_driver_config_dict,
            coe='kubernetes',
            assert_raised=False):

        @v.enforce_network_driver_types_create()
        def test(self, baymodel):
            pass

        for key, val in network_driver_config_dict.items():
            cfg.CONF.set_override(key, val, 'baymodel')
        baymodel = mock.MagicMock()
        baymodel.name = 'test_baymodel'
        baymodel.network_driver = network_driver_type
        baymodel.coe = coe

        # Reload the validator module so that baymodel configs are
        # re-evaluated.
        reload_module(v)
        validator = v.K8sValidator
        validator.supported_drivers = ['flannel', 'type1', 'type2']

        if assert_raised:
            self.assertRaises(exception.InvalidParameterValue,
                              test, self, baymodel)
        else:
            test(self, baymodel)
        return baymodel
Esempio n. 7
0
 def setUp(self):
     reload_module(local)
     self.tmpdir = tempfile.mkdtemp()
     self.key_file = os.path.join(self.tmpdir, 'key')
     self.patcher = mock.patch('secretcrypt.local._key_dir')
     mock_key_dir = self.patcher.start()
     mock_key_dir.return_value = self.tmpdir
Esempio n. 8
0
    def tearDown(self):
        super(RbacHorizonTests, self).tearDown()
        # Restore our settings
        settings.HORIZON_CONFIG['default_dashboard'] = self.old_default_dash
        settings.HORIZON_CONFIG['dashboards'] = self.old_dashboards
        # Destroy our singleton and re-create it.
        base.HorizonSite._instance = None
        del base.Horizon
        base.Horizon = base.HorizonSite()
        # Reload the convenience references to Horizon stored in __init__
        moves.reload_module(import_module("horizon"))

        # Reset Cats and Dogs default_panel to default values
        Cats.default_panel = 'kittens'
        Dogs.default_panel = 'puppies'

        # Re-register our original dashboards and panels.
        # This is necessary because autodiscovery only works on the first
        # import, and calling reload introduces innumerable additional
        # problems. Manual re-registration is the only good way for testing.
        self._discovered_dashboards.remove(Cats)
        self._discovered_dashboards.remove(Dogs)
        for dash in self._discovered_dashboards:
            base.Horizon.register(dash)
            for panel in self._discovered_panels[dash]:
                dash.register(panel)
Esempio n. 9
0
    def run(self):
        ''' run command '''
        # find the files that call generate
        generate_files = find_files('roles',
                                    ['inventory',
                                     'test',
                                     'playbooks'],
                                    None,
                                    'generate.py$')

        if len(generate_files) < 1:
            print('Did not find any code generation.  Please verify module code generation.')  # noqa: E501
            raise SystemExit(1)

        errors = False
        for gen in generate_files:
            print('Checking generated module code: {0}'.format(gen))
            try:
                sys.path.insert(0, os.path.dirname(gen))
                # we are importing dynamically.  This isn't in
                # the python path.
                # pylint: disable=import-error
                import generate
                reload_module(generate)
                generate.verify()
            except generate.GenerateAnsibleException as gae:
                print(gae.args)
                errors = True

        if errors:
            print('Found errors while generating module code.')
            raise SystemExit(1)

        print('\nAll generate scripts passed.\n')
Esempio n. 10
0
def test_channel(sol):
    config.update(
        {
            'Re': 8000.,
            'nu': 1./8000.,              # Viscosity
            'dt': 0.001,                 # Time step
            'T': 0.01,                   # End time
            'L': [2, 2*pi, 4*pi/3.],
            'M': [7, 5, 2],
            'eps': 1e-7
        }, "channel"
    )

    solver = get_solver(regression_test=regression_test,
                        mesh="channel",
                        parse_args=[sol])
    context = solver.get_context()
    initialize(solver, context)
    set_Source(**context)
    solve(solver, context)

    config.params.dealias = '3/2-rule'
    config.params.optimization = 'cython'
    reload_module(solver) # Need to reload to enable optimization
    initialize(solver, context)
    solve(solver, context)

    config.params.dealias_cheb = True
    config.params.checkpoint = 5
    config.params.write_result = 2
    initialize(solver, context)
    solve(solver, context)
Esempio n. 11
0
def verify(request_type, success_url=None, failure_url=None, **kwargs):
    """
    Call the verify view function. All kwargs not specified above will be passed
    as GET or POST arguments.
    """
    if request_type == 'get':
        request = factory.get('/browserid/verify', kwargs)
    else:
        request = factory.post('/browserid/verify', kwargs)

    # Patch settings prior to importing verify
    patches = {'BROWSERID_CREATE_USER': True, 'SITE_URL': 'http://testserver'}
    if success_url is not None:
        patches['LOGIN_REDIRECT_URL'] = success_url
    if failure_url is not None:
        patches['LOGIN_REDIRECT_URL_FAILURE'] = failure_url

    # We need to reload verify for the setting changes to take effect.
    with patch_settings(**patches):
        reload_module(views)
        verify_view = views.Verify.as_view()
        with patch.object(auth, 'login'):
            response = verify_view(request)

    return response
Esempio n. 12
0
    def _test_enforce_volume_driver_types_update(
            self,
            mock_get_resource,
            mock_pecan_request,
            volume_driver_type,
            op,
            assert_raised=False):

        @v.enforce_volume_driver_types_update()
        def test(self, baymodel_ident, patch):
            pass

        baymodel_ident = 'test_uuid_or_name'
        patch = [{'path': '/volume_driver', 'value': volume_driver_type,
                  'op': op}]
        context = mock_pecan_request.context
        baymodel = obj_utils.get_test_baymodel(context,
                                               uuid=baymodel_ident,
                                               coe='kubernetes')
        mock_get_resource.return_value = baymodel

        # Reload the validator module so that baymodel configs are
        # re-evaluated.
        reload_module(v)
        validator = v.K8sValidator
        validator.supported_volume_driver = ['cinder']

        if assert_raised:
            self.assertRaises(exception.InvalidParameterValue,
                              test, self, baymodel_ident, patch)
        else:
            test(self, baymodel_ident, patch)
            mock_get_resource.assert_called_once_with(
                'BayModel', baymodel_ident)
Esempio n. 13
0
    def setUp(self):
        super(DynamoDBTests, self).setUp()
        for key, value in iteritems(self.default_item_data):
            setattr(self, key, value)

        from boto.dynamodb import layer1
        reload_module(layer1)
        self.boto_layer1 = layer1
        # Mock out layer1 completely. This is where all the network interface occurs.
        MockLayer1 = mock.Mock(spec=layer1.Layer1)
        MockLayer1.return_value = MockLayer1

        layer1_patcher = self.layer1_patcher = mock.patch('boto.dynamodb.layer1.Layer1')
        self.mocked_layer1 = layer1_patcher.start()

        # Spy on layer2, making sure that it gets a mock layer1 object.
        from boto.dynamodb import layer2
        reload_module(layer2)
        self.boto_layer2 = layer2
        MockLayer2 = self.MockLayer2 = mock.Mock(
            spec = layer2.Layer2,
            wraps = layer2.Layer2())
        MockLayer2.layer1 = MockLayer1

        # Mock out the connection to return our special MockLayer2.
        connect_patcher = self.connect_dynamodb_patcher = mock.patch('boto.connect_dynamodb')
        self.mocked_connect_dynamodb = connect_patcher.start()
        self.mocked_connect_dynamodb.return_value = MockLayer2

        # Wire up
        self.mock_boto_tables()
Esempio n. 14
0
def test_schema_local():
    def __test(ns_key):

        # Get the schema
        schema = jams.schema.namespace(ns_key)

        # Make sure it has the correct properties
        valid_keys = set(['time', 'duration', 'value', 'confidence'])
        for key in schema['properties']:
            assert key in valid_keys

        for key in ['time', 'duration']:
            assert key in schema['properties']

    os.environ['JAMS_SCHEMA_DIR'] = resource_filename(jams.__name__, 
                                                      os.path.join('tests',
                                                                   'fixtures',
                                                                   'schema'))

    # Namespace should not exist yet
    test_ns = 'testing_tag_upper'
    yield raises(NamespaceError)(__test), test_ns

    reload_module(jams)

    # Now it should
    yield __test, test_ns
    
    del os.environ['JAMS_SCHEMA_DIR']
Esempio n. 15
0
def generate_hierarchy_from_module(module):
    from .case import is_testcase_subclass

    if isinstance(module, str):
        module = importlib.import_module(module)
    logger.debug("reload %s", module)
    reload_module(module)

    children = []

    for attr_name in dir(module):
        obj = getattr(module, attr_name)
        if is_testcase_subclass(obj) and not inspect.isabstract(obj):
            case_hierarchy = generate_hierarchy_from_testcase_class(obj)
            if case_hierarchy["children"]:
                children.append(case_hierarchy)

    imp_loader = pkgutil.get_loader(module)
    if imp_loader.is_package(module.__name__):
        for module_loader, sub_module_name, is_pkg in pkgutil.iter_modules(path=module.__path__):
            sub_suite_module = importlib.import_module(module.__name__ + "." + sub_module_name)
            sub_suite_hierarchy = generate_hierarchy_from_module(sub_suite_module)
            if sub_suite_hierarchy["children"]:
                children.append(sub_suite_hierarchy)

    return {"id": module.__name__, "name": module.__name__.rpartition(".")[2], "children": children}
    def setUp(self):
        super(OAuth2RequiredDecoratorTest, self).setUp()
        self.save_settings = copy.deepcopy(django.conf.settings)

        reload_module(oauth2client.contrib.django_util)
        self.user = User.objects.create_user(
            username='******', email='*****@*****.**', password='******')
 def tearDown(self):
     if hasattr(conf.settings, 'DDF_DEFAULT_DATA_FIXTURE'): del conf.settings.DDF_DEFAULT_DATA_FIXTURE
     if hasattr(conf.settings, 'DDF_FILL_NULLABLE_FIELDS'): del conf.settings.DDF_FILL_NULLABLE_FIELDS
     if hasattr(conf.settings, 'DDF_IGNORE_FIELDS'): del conf.settings.DDF_IGNORE_FIELDS
     if hasattr(conf.settings, 'DDF_NUMBER_OF_LAPS'): del conf.settings.DDF_NUMBER_OF_LAPS
     if hasattr(conf.settings, 'DDF_VALIDATE_MODELS'): del conf.settings.DDF_VALIDATE_MODELS
     reload_module(conf)
Esempio n. 18
0
def test_NS2D(args):
    config.update(
        {
            'nu': 0.01,
            'dt': 0.05,
            'T': 10
        }, 'doublyperiodic')
    solver = get_solver(regression_test=regression_test,
                        mesh='doublyperiodic',
                        parse_args=args)
    context = solver.get_context()
    initialize(solver, **context)
    solve(solver, context)

    config.params.dealias = '3/2-rule'
    initialize(solver, **context)
    solve(solver, context)

    config.params.dealias = '2/3-rule'
    config.params.optimization = 'cython'
    reload_module(solver)
    initialize(solver, **context)
    solve(solver, context)

    config.params.write_result = 1
    config.params.checkpoint = 1
    config.params.dt = 0.01
    config.params.t = 0.0
    config.params.tstep = 0
    config.params.T = 0.04
    solver.regression_test = lambda c: None
    initialize(solver, **context)
    solve(solver, context)
Esempio n. 19
0
def setup_sdk_imports():
    """Sets up appengine SDK third-party imports."""
    if six.PY3:
        return

    if 'GAE_SDK_PATH' not in os.environ:
        return

    sys.path.insert(0, os.environ['GAE_SDK_PATH'])

    if 'google' in sys.modules:
        # Some packages, such as protobuf, clobber the google
        # namespace package. This prevents that.
        try:
            reload_module(sys.modules['google'])
        except ImportError:
            pass

    # This sets up google-provided libraries.
    import dev_appserver
    dev_appserver.fix_sys_path()

    # Fixes timezone and other os-level items.
    import google.appengine.tools.os_compat
    (google.appengine.tools.os_compat)
Esempio n. 20
0
    def tearDown(self):
        """Clean ups"""
        if self.logger_patcher:
            self.logger_patcher.stop()

        self.module_patcher.stop()
        reload_module(actionlogger)
Esempio n. 21
0
 def fin():
     monkeypatch.undo()
     reload_module(sherpa)
     reload_module(plot)
     reload_module(astro_plot)
     reload_module(sherpa.all)
     reload_module(sherpa.astro.all)  # These are required because otherwise Python will not match imported classes.
Esempio n. 22
0
    def test_create_new_task(self):
        """Test a basic export."""
        self.import_course_tarball(self.repo)
        resources = LearningResource.objects.filter(
            course__repository__id=self.repo.id).all()
        for resource in resources:
            self.create_learning_resource_export(self.repo.slug, {
                "id": resource.id
            })

        # Skip first one to test that it's excluded from export.
        task_id = self.create_learning_resource_export_task(
            self.repo.slug,
            {"ids": [r.id for r in resources[1:]]}
        )['id']

        result = self.get_learning_resource_export_tasks(
            self.repo.slug)['results'][0]
        self.assertEqual(task_id, result['id'])
        self.assertEqual("success", result['status'])
        self.assertTrue(result['url'].startswith(
            "/media/resource_exports/test_exports.tar"))

        with self.settings(
            DEFAULT_FILE_STORAGE='storages.backends.s3boto.S3BotoStorage'
        ):
            # change the default file storage to S3
            reload_module(ui.urls)
            # the view is not available any more
            resp = self.client.get(result['url'])
            self.assertEqual(resp.status_code, HTTP_404_NOT_FOUND)

        # Update for change in file storage.
        reload_module(ui.urls)

        resp = self.client.get(result['url'])
        self.assertEqual(HTTP_200_OK, resp.status_code)

        tempdir = mkdtemp()

        def make_path(resource):
            """Create a path that should exist for a resource."""
            type_name = resource.learning_resource_type.name
            return os.path.join(
                tempdir, type_name, "{id}_{url_name}.xml".format(
                    id=resource.id,
                    url_name=slugify(resource.url_name)[:200],
                )
            )
        try:
            fakefile = BytesIO(b"".join(resp.streaming_content))
            with tarfile.open(fileobj=fakefile, mode="r:gz") as tar:
                tar.extractall(path=tempdir)

            self.assertFalse(os.path.isfile(make_path(resources[0])))
            assert_resource_directory(self, resources[1:], tempdir)

        finally:
            rmtree(tempdir)
Esempio n. 23
0
def ensure_current_migrations_module_is_loaded():
    # everytime within the same python process we add migrations we need to reload the migrations module
    # for it could be cached from a previous test
    if sys.version_info >= (3, 3):
        from importlib import invalidate_caches
        invalidate_caches()
    m = import_module('migrations')
    reload_module(m)
Esempio n. 24
0
 def tearDown(self):
     self.validateMockedRequestsConsumed()
     super(MockHttpTest, self).tearDown()
     # TODO: this nuke from orbit clean up seems to be encouraging
     # un-hygienic mocking on the swiftclient.client module; which may lead
     # to some unfortunate test order dependency bugs by way of the broken
     # window theory if any other modules are similarly patched
     reload_module(c)
Esempio n. 25
0
def ui(mock_chips):
    """
    Load and return the sherpa.astro.ui module
    """
    from sherpa.astro import ui

    reload_module(ui)
    return ui
Esempio n. 26
0
    def test_should_return_empty_url_on_django_driver(self):
        with override_settings(TEST_DRIVER='django'):
            reload_module(smarttest.testcases)

            smarttest.testcases.SplinterTestCase.runTest = Mock()
            obj = smarttest.testcases.SplinterTestCase()
            result = obj.get_host()
            self.assertEqual(result, '')
Esempio n. 27
0
 def setUp(self):
     super(LocalStoreTestCase, self).setUp()
     # NOTE(mrodden): we need to make sure that local store
     # gets imported in the current python context we are
     # testing in (eventlet vs normal python threading) so
     # we test the correct type of local store for the current
     # threading model
     moves.reload_module(local)
Esempio n. 28
0
 def testImportAttributes(self):
   # check that we can access the generated module and its members.
   import protoc_plugin_test_pb2 as test_pb2  # pylint: disable=g-import-not-at-top
   moves.reload_module(test_pb2)
   self.assertIsNotNone(getattr(test_pb2, SERVICER_IDENTIFIER, None))
   self.assertIsNotNone(getattr(test_pb2, STUB_IDENTIFIER, None))
   self.assertIsNotNone(getattr(test_pb2, SERVER_FACTORY_IDENTIFIER, None))
   self.assertIsNotNone(getattr(test_pb2, STUB_FACTORY_IDENTIFIER, None))
Esempio n. 29
0
 def testUnaryCall(self):
   import protoc_plugin_test_pb2 as test_pb2  # pylint: disable=g-import-not-at-top
   moves.reload_module(test_pb2)
   with _CreateService(test_pb2) as (methods, stub):
     request = test_pb2.SimpleRequest(response_size=13)
     response = stub.UnaryCall(request, test_constants.LONG_TIMEOUT)
   expected_response = methods.UnaryCall(request, 'not a real context!')
   self.assertEqual(expected_response, response)
Esempio n. 30
0
 def test_pod_types_registered_on_import(self):
     """
     On import, the pod types specified in the settings file should be registered.
     """
     from casepro.pods import registry, PodPlugin
     reload_module(registry)
     [pod_type] = registry.pod_types
     self.assertTrue(isinstance(pod_type, PodPlugin))
Esempio n. 31
0
def test_force_no_numba():
    """
    Force execution of code for non-existence of Numba
    """
    global Magic_calc6

    # Mock the numba module
    from mock import Mock
    mck = Mock()
    hasattr(mck, 'jit')
    del mck.jit
    import taxcalc
    nmba = sys.modules.get('numba', None)
    sys.modules.update([('numba', mck)])
    # Reload the decorators with faked out numba
    reload_module(taxcalc.decorators)
    # Get access to iterate_jit and force to jit
    ij = taxcalc.decorators.iterate_jit
    taxcalc.decorators.DO_JIT = True
    # Now use iterate_jit on a dummy function
    Magic_calc6 = ij(parameters=['w'], nopython=True)(Magic_calc6)
    # Do work and verify function works as expected
    pm = Foo()
    pf = Foo()
    pm.a = np.ones((5, ))
    pm.b = np.ones((5, ))
    pm.w = np.ones((5, ))
    pf.x = np.ones((5, ))
    pf.y = np.ones((5, ))
    pf.z = np.ones((5, ))
    ans = Magic_calc6(pm, pf)
    exp = DataFrame(data=[[2.0, 4.0]] * 5, columns=["a", "b"])
    assert_frame_equal(ans, exp)
    # Restore numba module
    if nmba:
        sys.modules['numba'] = nmba
Esempio n. 32
0
def test_python_no_c_extension_with_environment_variable():
    from six.moves import reload_module
    import pyrsistent._pvector
    import pyrsistent
    import os

    os.environ['PYRSISTENT_NO_C_EXTENSION'] = 'TRUE'

    reload_module(pyrsistent._pvector)
    reload_module(pyrsistent)

    assert type(pyrsistent.pvector()) is pyrsistent._pvector.PythonPVector

    del os.environ['PYRSISTENT_NO_C_EXTENSION']

    reload_module(pyrsistent._pvector)
    reload_module(pyrsistent)
Esempio n. 33
0
def test_patch_webbrowser(*_):

    # Make sure that webbrowser re-generates the browser list using the
    # mocked environment
    import webbrowser
    webbrowser = reload_module(webbrowser)

    # By default, we expect that BROWSER will be loaded as a generic browser
    # This is because "safari" is not a valid script in the system PATH
    assert isinstance(webbrowser.get(), webbrowser.GenericBrowser)

    # After patching, the default webbrowser should now be interpreted as an
    # OSAScript browser
    patch_webbrowser()
    assert isinstance(webbrowser.get(), webbrowser.MacOSXOSAScript)
    assert webbrowser._tryorder[0] == 'safari'
Esempio n. 34
0
def __force_reload_rec(module_obj, indent=0):
    if not isinstance(module_obj, types.ModuleType):
        return

    global __reloaded_modules
    if module_obj in __reloaded_modules:
        return False

    if not __is_reload_target(module_obj):
        return

    print('{}reload {}, {}'.format('  ' * indent, module_obj.__name__,
                                   module_obj.__file__))

    for submodule in __find_submodules(module_obj):
        if not __is_reload_target(submodule.module_obj):
            continue

        __force_reload_rec(submodule.module_obj, indent + 1)

        module_obj = reload_module(module_obj)
        __reloaded_modules.add(module_obj)

        if submodule.from_import:
            for symbol in submodule.symbols:
                name = symbol.name

                as_name = symbol.as_name
                if as_name is None:
                    as_name = name

                # if as_name[0] != 'Q':
                #     if as_name == name:
                #         print('{} - ({}) {} {} {} -> {}'.format(
                #             '  ' * (indent + 1),
                #             module_obj.__name__,
                #             name, module_obj.__dict__[as_name],
                #             id(module_obj.__dict__[as_name]), id(submodule.module_obj.__dict__[name])))
                #     else:
                #         print('{} - ({}) {} as {} {} {} -> {}'.format(
                #             '  ' * (indent + 1),
                #             module_obj.__name__,
                #             name, as_name, module_obj.__dict__[as_name],
                #             id(module_obj.__dict__[as_name]), id(submodule.module_obj.__dict__[name])))

                module_obj.__dict__[as_name] = submodule.module_obj.__dict__[
                    name]
Esempio n. 35
0
    def test_may_be_an_internal_data_fixture_nick_name(self):
        conf.settings.DDF_DEFAULT_DATA_FIXTURE = 'sequential'
        reload_module(global_settings)
        assert SequentialDataFixture == type(global_settings.DDF_DEFAULT_DATA_FIXTURE)

        conf.settings.DDF_DEFAULT_DATA_FIXTURE = 'random'
        reload_module(global_settings)
        assert RandomDataFixture == type(global_settings.DDF_DEFAULT_DATA_FIXTURE)

        conf.settings.DDF_DEFAULT_DATA_FIXTURE = 'static_sequential'
        reload_module(global_settings)
        assert StaticSequentialDataFixture == type(global_settings.DDF_DEFAULT_DATA_FIXTURE)
Esempio n. 36
0
    def init_cugrape(self, plen, dt):
        from .cugrape.configure_cugrape import configure
        Hs = [self.H0] + list(self.Hcs)
        Hs = [dt*H for H in Hs]
        nstate = len(self.inits[0])
        configure(self.mode_dims, Hs, plen, nstate, self.taylor_order, self.use_double)
        from .cugrape import cugrape
        from six.moves import reload_module
        self.cugrape = reload_module(cugrape)

        cugrape.init_gpu_memory()
        for i, (mds, psi0, psif) in enumerate(zip(self.mode_dims, self.inits, self.finals)):
            print(mds, np.product(mds), psi0.shape, psif.shape)
            dim = psi0.shape[1]
            assert psi0.shape == (nstate, dim)
            assert psif.shape == (nstate, dim)
            assert np.product(mds) == dim
            cugrape.load_states(i, psi0, psif)
Esempio n. 37
0
 def testImportErrors(self):
     for key in ['openslide', 'PIL', 'libtiff']:
         sys.modules[key] = None
     try:
         reload_module(girder.plugins.large_image.tilesource.test)
         self.assertTrue(False)
     except ImportError as exc:
         self.assertIn('No module named PIL', exc.args[0])
     try:
         reload_module(girder.plugins.large_image.tilesource.tiff_reader)
         self.assertTrue(False)
     except ImportError as exc:
         self.assertIn('No module named libtiff', exc.args[0])
     try:
         reload_module(girder.plugins.large_image.tilesource.svs)
         self.assertTrue(False)
     except ImportError as exc:
         self.assertIn('No module named openslide', exc.args[0])
     sys.modules['girder.plugins.large_image.tilesource.test'] = None
     reload_module(girder.plugins.large_image.tilesource)
    def testRunsInitCodeOnImportWithFailure(self):
        _altair._register_hook()

        altair = importlib.import_module('altair')

        self.assertNotIn('COLAB_ALTAIR_IMPORT_HOOK_EXCEPTION', os.environ)
        self.assertIn('altair', sys.modules)
        self.assertEqual('colab', altair.renderers.active)

        # Reload of the module should not re-execute code.
        # Modify the active renderer and ensure that a reload doesn't reset it to
        # colab.
        altair.renderers.enable('default')
        self.assertEqual('default', altair.renderers.active)

        altair = reload_module(altair)
        self.assertNotIn('COLAB_ALTAIR_IMPORT_HOOK_EXCEPTION', os.environ)
        self.assertIn('altair', sys.modules)
        self.assertEqual('default', altair.renderers.active)
Esempio n. 39
0
    def child():
        OLD_STYLE = False
        try:
            original_import = getattr(__builtins__, '__import__')
            OLD_STYLE = True
        except AttributeError:
            original_import = __builtins__['__import__']

        tty_modules = ('termios', 'fcntl', 'tty')

        def __import__(name, *args, **kwargs):
            if name in tty_modules:
                raise ImportError
            return original_import(name, *args, **kwargs)

        for module in tty_modules:
            sys.modules.pop(module, None)

        warnings.filterwarnings("error", category=UserWarning)
        try:
            if OLD_STYLE:
                __builtins__.__import__ = __import__
            else:
                __builtins__['__import__'] = __import__
            try:
                import blessed.terminal
                reload_module(blessed.terminal)
            except UserWarning:
                err = sys.exc_info()[1]
                assert err.args[0] == blessed.terminal._MSG_NOSUPPORT

            warnings.filterwarnings("ignore", category=UserWarning)
            import blessed.terminal
            reload_module(blessed.terminal)
            assert not blessed.terminal.HAS_TTY
            term = blessed.terminal.Terminal('ansi')
            # https://en.wikipedia.org/wiki/VGA-compatible_text_mode
            # see section '#PC_common_text_modes'
            assert term.height == 25
            assert term.width == 80

        finally:
            if OLD_STYLE:
                setattr(__builtins__, '__import__', original_import)
            else:
                __builtins__['__import__'] = original_import
            warnings.resetwarnings()
            import blessed.terminal
            reload_module(blessed.terminal)
Esempio n. 40
0
    def _import(klass):
        '''1) Get a reference to the module
           2) Check the file that module's imported from
           3) If that file's been updated, force a reload of that module
                return it'''
        mod = __import__(klass.rpartition('.')[0])
        for segment in klass.split('.')[1:-1]:
            mod = getattr(mod, segment)

        # Alright, now check the file associated with it. Note that clases
        # defined in __main__ don't have a __file__ attribute
        if klass not in BaseJob._loaded:
            BaseJob._loaded[klass] = time.time()
        if hasattr(mod, '__file__'):
            try:
                mtime = os.stat(mod.__file__).st_mtime
                if BaseJob._loaded[klass] < mtime:
                    mod = reload_module(mod)
            except OSError:
                logger.warn('Could not check modification time of %s',
                    mod.__file__)

        return getattr(mod, klass.rpartition('.')[2])
Esempio n. 41
0
def test_missing_ordereddict_uses_module(monkeypatch):
    """ordereddict module is imported when without collections.OrderedDict."""
    import blessed.keyboard

    if hasattr(collections, 'OrderedDict'):
        monkeypatch.delattr('collections.OrderedDict')

    try:
        reload_module(blessed.keyboard)
    except ImportError as err:
        assert err.args[0] in ("No module named ordereddict",  # py2
                               "No module named 'ordereddict'")  # py3
        sys.modules['ordereddict'] = mock.Mock()
        sys.modules['ordereddict'].OrderedDict = -1
        reload_module(blessed.keyboard)
        assert blessed.keyboard.OrderedDict == -1
        del sys.modules['ordereddict']
        monkeypatch.undo()
        reload_module(blessed.keyboard)
    else:
        assert platform.python_version_tuple() < ('2', '7')  # reached by py2.6
Esempio n. 42
0
 def setUp(self):
     reload_module(cli)
Esempio n. 43
0
def write_to_excel(xls_out, list_vars):
    """
    Function that reads all the variables (in list_vars) and inserts them one by one to excel

    :param xls_out: The path of the folder where the excel files are to be written
    :param list_vars: List containing the dispaset variables
    :returns: Binary variable (True)
    """


    reload_module(sys)
    try: # Hack needed in 2.7
        sys.setdefaultencoding("utf-8")
    except:
        pass

    if not os.path.exists(xls_out):
        os.mkdir(xls_out)


        # Printing all sets in one sheet:
    writer = pd.ExcelWriter(os.path.join(xls_out, 'InputDispa-SET - Sets.xlsx'), engine='xlsxwriter')

    [sets, parameters] = list_vars

    try:
        config = parameters['Config']['val']
        first_day = pd.datetime(config[0, 0], config[0, 1], config[0, 2], 0)
        last_day = pd.datetime(config[1, 0], config[1, 1], config[1, 2], 23)
        dates = pd.date_range(start=first_day, end=last_day, freq='1h')
    except:
        dates = []

    i = 0
    for s in sets:
        df = pd.DataFrame(sets[s], columns=[s])
        df.to_excel(writer, sheet_name='Sets', startrow=1, startcol=i, header=True, index=False)
        i += 1
    writer.save()
    logging.info('All sets successfully written to excel')

    # Printing each parameter in a separate sheet and workbook:
    for p in parameters:
        var = parameters[p]
        dim = len(var['sets'])
        if var['sets'][-1] == 'h' and isinstance(dates, pd.DatetimeIndex) and dim > 1:
            if len(dates) != var['val'].shape[-1]:
                logging.critical('The date range in the Config variable (' + str(
                    len(dates)) + ' time steps) does not match the length of the time index (' + str(
                    var['val'].shape[-1]) + ') for variable ' + p)
                sys.exit(1)
            var['firstrow'] = 5
        else:
            var['firstrow'] = 1
        writer = pd.ExcelWriter(os.path.join(xls_out, 'InputDispa-SET - ' + p + '.xlsx'), engine='xlsxwriter')
        if dim == 1:
            df = pd.DataFrame(var['val'], columns=[p], index=sets[var['sets'][0]])
            df.to_excel(writer, sheet_name=p, startrow=var['firstrow'], startcol=0, header=True, index=True)
            worksheet = writer.sheets[p]
            worksheet.write_string(0, 0, p + '(' + var['sets'][0] + ')')
            worksheet.set_column(0, 0, 30)
        elif dim == 2:
            list_sets = [sets[var['sets'][0]], sets[var['sets'][1]]]
            values = var['val']
            df = pd.DataFrame(values, columns=list_sets[1], index=list_sets[0])
            df.to_excel(writer, sheet_name=p, startrow=var['firstrow'], startcol=0, header=True, index=True)
            worksheet = writer.sheets[p]
            if var['firstrow'] == 5:
                worksheet.write_row(1, 1, dates.year)
                worksheet.write_row(2, 1, dates.month)
                worksheet.write_row(3, 1, dates.day)
                worksheet.write_row(4, 1, dates.hour + 1)
            worksheet.write_string(0, 0, p + '(' + var['sets'][0] + ',' + var['sets'][1] + ')')
            worksheet.freeze_panes(var['firstrow'] + 1, 1)
            worksheet.set_column(0, 0, 30)
        elif dim == 3:
            list_sets = [sets[var['sets'][0]], sets[var['sets'][1]], sets[var['sets'][2]]]
            values = var['val']
            for i in range(len(list_sets[0])):
                key = list_sets[0][i]
                Nrows = len(list_sets[1])
                df = pd.DataFrame(values[i, :, :], columns=list_sets[2], index=list_sets[1])
                df.to_excel(writer, sheet_name=p, startrow=var['firstrow'] + 1 + i * Nrows, startcol=1, header=False,
                            index=True)
                df2 = pd.DataFrame(np.array([key]).repeat(Nrows))
                df2.to_excel(writer, sheet_name=p, startrow=var['firstrow'] + 1 + i * Nrows, startcol=0, header=False,
                             index=False)
            worksheet = writer.sheets[p]
            if var['firstrow'] == 5:
                worksheet.write_row(1, 2, dates.year)
                worksheet.write_row(2, 2, dates.month)
                worksheet.write_row(3, 2, dates.day)
                worksheet.write_row(4, 2, dates.hour + 1)
            worksheet.write_string(0, 0, p + '(' + var['sets'][0] + ',' + var['sets'][1] + ',' + var['sets'][2] + ')')
            worksheet.write_string(var['firstrow'] - 1, 0, var['sets'][0])
            worksheet.write_string(var['firstrow'] - 1, 1, var['sets'][1])
            worksheet.freeze_panes(var['firstrow'], 2)
            worksheet.set_column(0, 1, 30)
            df = pd.DataFrame(columns=list_sets[2])
            df.to_excel(writer, sheet_name=p, startrow=var['firstrow'], startcol=2, header=True, index=False)
        else:
            logging.error('Only three dimensions currently supported. Parameter ' + p + ' has ' + str(dim) + ' dimensions.')
        writer.save()
        logging.info('Parameter ' + p + ' successfully written to excel')


    # Writing a gams file to process the excel sheets:
    gmsfile = open(os.path.join(xls_out, 'make_gdx.gms'), 'w')
    i = 0

    for s in sets:
        gmsfile.write('\n')
        gmsfile.write('$CALL GDXXRW "InputDispa-SET - Sets.xlsx" Set=' + s + ' rng=' + chr(
            i + ord('A')) + '3 Rdim=1  O=' + s + '.gdx \n')
        gmsfile.write('$GDXIN ' + s + '.gdx \n')
        gmsfile.write('Set ' + s + '; \n')
        gmsfile.write('$LOAD ' + s + '\n')
        gmsfile.write('$GDXIN \n')
        i = i + 1

    for p in parameters:
        var = parameters[p]
        dim = len(var['sets'])
        gmsfile.write('\n')
        if dim == 1:
            gmsfile.write('$CALL GDXXRW "InputDispa-SET - ' + p + '.xlsx" par=' + p + ' rng=A' + str(
                var['firstrow'] + 1) + ' Rdim=1 \n')
        elif dim == 2:
            gmsfile.write('$CALL GDXXRW "InputDispa-SET - ' + p + '.xlsx" par=' + p + ' rng=A' + str(
                var['firstrow'] + 1) + ' Rdim=1 Cdim=1 \n')
        elif dim == 3:
            gmsfile.write('$CALL GDXXRW "InputDispa-SET - ' + p + '.xlsx" par=' + p + ' rng=A' + str(
                var['firstrow'] + 1) + ' Rdim=2 Cdim=1 \n')
        gmsfile.write('$GDXIN "InputDispa-SET - ' + p + '.gdx" \n')
        gmsfile.write('Parameter ' + p + '; \n')
        gmsfile.write('$LOAD ' + p + '\n')
        gmsfile.write('$GDXIN \n')

    gmsfile.write('\n')
    gmsfile.write('Execute_Unload "Inputs.gdx"')
    gmsfile.close()

    logging.info('Data Successfully written to the ' + xls_out + ' directory.')
Esempio n. 44
0
 def test_get_unknown_env(self):
     self.assertEqual(env.get('FOO'), 'foo')
     reload_module(env)
Esempio n. 45
0
def switch_branch(branch,
                  apps=None,
                  bench_path='.',
                  upgrade=False,
                  check_upgrade=True):
    from bench.utils import update_requirements, update_node_packages, backup_all_sites, patch_sites, build_assets, post_upgrade
    apps_dir = os.path.join(bench_path, 'apps')
    version_upgrade = (False, )
    switched_apps = []

    if not apps:
        apps = [
            name for name in os.listdir(apps_dir)
            if os.path.isdir(os.path.join(apps_dir, name))
        ]
        if branch == "v4.x.x":
            apps.append('shopping_cart')

    for app in apps:
        app_dir = os.path.join(apps_dir, app)

        if not os.path.exists(app_dir):
            bench.utils.log("{} does not exist!".format(app), level=2)
            continue

        repo = git.Repo(app_dir)
        unshallow_flag = os.path.exists(
            os.path.join(app_dir, ".git", "shallow"))
        bench.utils.log("Fetching upstream {0}for {1}".format(
            "unshallow " if unshallow_flag else "", app))

        bench.utils.exec_cmd("git remote set-branches upstream  '*'",
                             cwd=app_dir)
        bench.utils.exec_cmd("git fetch --all{0} --quiet".format(
            " --unshallow" if unshallow_flag else ""),
                             cwd=app_dir)

        if check_upgrade:
            version_upgrade = is_version_upgrade(app=app,
                                                 bench_path=bench_path,
                                                 branch=branch)
            if version_upgrade[0] and not upgrade:
                bench.utils.log(
                    "Switching to {0} will cause upgrade from {1} to {2}. Pass --upgrade to confirm"
                    .format(branch, version_upgrade[1], version_upgrade[2]),
                    level=2)
                sys.exit(1)

        print("Switching for " + app)
        bench.utils.exec_cmd("git checkout -f {0}".format(branch), cwd=app_dir)

        if str(repo.active_branch) == branch:
            switched_apps.append(app)
        else:
            bench.utils.log("Switching branches failed for: {}".format(app),
                            level=2)

    if switched_apps:
        bench.utils.log("Successfully switched branches for: " +
                        ", ".join(switched_apps),
                        level=1)
        print(
            'Please run `bench update --patch` to be safe from any differences in database schema'
        )

    if version_upgrade[0] and upgrade:
        update_requirements()
        update_node_packages()
        reload_module(bench.utils)
        backup_all_sites()
        patch_sites()
        build_assets()
        post_upgrade(version_upgrade[1], version_upgrade[2])
Esempio n. 46
0
 def test_index_authors_on_author_id(self):
     # reload module first to avoid memoize remembering data from other test scenarios
     reload_module(data)
     expected_row_count = 10
     article_author_index = data.index_authors_on_author_id()
     self.assertEqual(len(article_author_index), expected_row_count)
Esempio n. 47
0
 def test_get_impl_linux(self):
     self.addCleanup(self._reload_original_os_module)
     with mock.patch('os.name', 'posix'):
         moves.reload_module(api)
         from os_vif.internal.ip.linux import impl_pyroute2
         self.assertIsInstance(api.ip, impl_pyroute2.PyRoute2)
Esempio n. 48
0
 def _reload_original_os_module():
     moves.reload_module(api)
Esempio n. 49
0
 def setUp(self):
     super(LockDirUntilExit, self).setUp()
     # reset global state from other tests
     import certbot.util
     reload_module(certbot.util)
Esempio n. 50
0
 def setUp(self):
     super(OAuth2AuthorizeTest, self).setUp()
     self.save_settings = copy.deepcopy(django.conf.settings)
     reload_module(googleoauth2django)
     self.user = django_models.User.objects.create_user(
         username='******', email='*****@*****.**', password='******')
Esempio n. 51
0
def pytest_runtest_setup(item):
    """.. todo:: stop reloading configs module in 1.x."""
    from six.moves import reload_module
    from exhale import configs
    reload_module(configs)
Esempio n. 52
0
    def start(self):  # pylint: disable=too-many-branches,too-many-statements
        """
        Start SickChill
        """
        # do some preliminary stuff
        sickbeard.MY_FULLNAME = ek(os.path.normpath, ek(os.path.abspath, __file__))
        sickbeard.MY_NAME = ek(os.path.basename, sickbeard.MY_FULLNAME)
        sickbeard.PROG_DIR = ek(os.path.dirname, sickbeard.MY_FULLNAME)
        sickbeard.LOCALE_DIR = ek(os.path.join, sickbeard.PROG_DIR, 'locale')
        sickbeard.DATA_DIR = sickbeard.PROG_DIR
        sickbeard.MY_ARGS = sys.argv[1:]

        try:
            locale.setlocale(locale.LC_ALL, '')
            sickbeard.SYS_ENCODING = locale.getpreferredencoding()
        except (locale.Error, IOError):
            sickbeard.SYS_ENCODING = 'UTF-8'

        # pylint: disable=no-member
        if not sickbeard.SYS_ENCODING or sickbeard.SYS_ENCODING.lower() in ('ansi_x3.4-1968', 'us-ascii', 'ascii', 'charmap') or \
                (sys.platform.startswith('win') and sys.getwindowsversion()[0] >= 6 and str(getattr(sys.stdout, 'device', sys.stdout).encoding).lower() in ('cp65001', 'charmap')):
            sickbeard.SYS_ENCODING = 'UTF-8'

        # TODO: Continue working on making this unnecessary, this hack creates all sorts of hellish problems
        if not hasattr(sys, 'setdefaultencoding'):
            reload_module(sys)

        try:
            # On non-unicode builds this will raise an AttributeError, if encoding type is not valid it throws a LookupError
            sys.setdefaultencoding(sickbeard.SYS_ENCODING)  # pylint: disable=no-member
        except (AttributeError, LookupError):
            sys.exit('Sorry, you MUST add the SickChill folder to the PYTHONPATH environment variable\n'
                     'or find another way to force Python to use {} for string encoding.'.format(sickbeard.SYS_ENCODING))

        # Rename the main thread
        threading.currentThread().name = 'MAIN'

        args = SickChillArgumentParser(sickbeard.PROG_DIR).parse_args()

        if args.force_update:
            result = self.force_update()
            sys.exit(int(not result))  # Ok -> 0 , Error -> 1

        # Need console logging for SickBeard.py and SickBeard-console.exe
        sickbeard.NO_RESIZE = args.noresize
        self.console_logging = (not hasattr(sys, 'frozen')) or (sickbeard.MY_NAME.lower().find('-console') > 0) and not args.quiet
        self.no_launch = args.nolaunch
        self.forced_port = args.port
        if args.daemon:
            self.run_as_daemon = platform.system() != 'Windows'
            self.console_logging = False
            self.no_launch = True

        self.create_pid = bool(args.pidfile)
        self.pid_file = args.pidfile
        if self.pid_file and ek(os.path.exists, self.pid_file):
            # If the pid file already exists, SickChill may still be running, so exit
            raise SystemExit('PID file: {0} already exists. Exiting.'.format(self.pid_file))

        sickbeard.DATA_DIR = ek(os.path.abspath, args.datadir) if args.datadir else sickbeard.DATA_DIR
        sickbeard.CONFIG_FILE = ek(os.path.abspath, args.config) if args.config else ek(os.path.join, sickbeard.DATA_DIR, 'config.ini')

        # The pid file is only useful in daemon mode, make sure we can write the file properly
        if self.create_pid:
            if self.run_as_daemon:
                pid_dir = ek(os.path.dirname, self.pid_file)
                if not ek(os.access, pid_dir, os.F_OK):
                    sys.exit('PID dir: {0} doesn\'t exist. Exiting.'.format(pid_dir))
                if not ek(os.access, pid_dir, os.W_OK):
                    raise SystemExit('PID dir: {0} must be writable (write permissions). Exiting.'.format(pid_dir))
            else:
                if self.console_logging:
                    sys.stdout.write('Not running in daemon mode. PID file creation disabled.\n')
                self.create_pid = False

        # Make sure that we can create the data dir
        if not ek(os.access, sickbeard.DATA_DIR, os.F_OK):
            try:
                ek(os.makedirs, sickbeard.DATA_DIR, 0o744)
            except os.error:
                raise SystemExit('Unable to create data directory: {0}'.format(sickbeard.DATA_DIR))

        # Make sure we can write to the data dir
        if not ek(os.access, sickbeard.DATA_DIR, os.W_OK):
            raise SystemExit('Data directory must be writeable: {0}'.format(sickbeard.DATA_DIR))

        # Make sure we can write to the config file
        if not ek(os.access, sickbeard.CONFIG_FILE, os.W_OK):
            if ek(os.path.isfile, sickbeard.CONFIG_FILE):
                raise SystemExit('Config file must be writeable: {0}'.format(sickbeard.CONFIG_FILE))
            elif not ek(os.access, ek(os.path.dirname, sickbeard.CONFIG_FILE), os.W_OK):
                raise SystemExit('Config file root dir must be writeable: {0}'.format(ek(os.path.dirname, sickbeard.CONFIG_FILE)))

        ek(os.chdir, sickbeard.DATA_DIR)

        # Check if we need to perform a restore first
        restore_dir = ek(os.path.join, sickbeard.DATA_DIR, 'restore')
        if ek(os.path.exists, restore_dir):
            success = self.restore_db(restore_dir, sickbeard.DATA_DIR)
            if self.console_logging:
                sys.stdout.write('Restore: restoring DB and config.ini {0}!\n'.format(('FAILED', 'SUCCESSFUL')[success]))

        # Load the config and publish it to the sickbeard package
        if self.console_logging and not ek(os.path.isfile, sickbeard.CONFIG_FILE):
            sys.stdout.write('Unable to find {0}, all settings will be default!\n'.format(sickbeard.CONFIG_FILE))

        sickbeard.CFG = ConfigObj(sickbeard.CONFIG_FILE, encoding='UTF-8')

        # Initialize the config and our threads
        sickbeard.initialize(consoleLogging=self.console_logging)

        if self.run_as_daemon:
            self.daemonize()

        # Get PID
        sickbeard.PID = os.getpid()

        # Build from the DB to start with
        self.load_shows_from_db()

        logger.log('Starting SickChill [{branch}] using \'{config}\''.format
                   (branch=sickbeard.BRANCH, config=sickbeard.CONFIG_FILE))

        self.clear_cache()

        if self.forced_port:
            logger.log('Forcing web server to port {port}'.format(port=self.forced_port))
            self.start_port = self.forced_port
        else:
            self.start_port = sickbeard.WEB_PORT

        if sickbeard.WEB_LOG:
            self.log_dir = sickbeard.LOG_DIR
        else:
            self.log_dir = None

        # sickbeard.WEB_HOST is available as a configuration value in various
        # places but is not configurable. It is supported here for historic reasons.
        if sickbeard.WEB_HOST and sickbeard.WEB_HOST != '0.0.0.0':
            self.web_host = sickbeard.WEB_HOST
        else:
            self.web_host = '' if sickbeard.WEB_IPV6 else '0.0.0.0'

        # web server options
        self.web_options = {
            'port': int(self.start_port),
            'host': self.web_host,
            'data_root': ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME),
            'web_root': sickbeard.WEB_ROOT,
            'log_dir': self.log_dir,
            'username': sickbeard.WEB_USERNAME,
            'password': sickbeard.WEB_PASSWORD,
            'enable_https': sickbeard.ENABLE_HTTPS,
            'handle_reverse_proxy': sickbeard.HANDLE_REVERSE_PROXY,
            'https_cert': ek(os.path.join, sickbeard.PROG_DIR, sickbeard.HTTPS_CERT),
            'https_key': ek(os.path.join, sickbeard.PROG_DIR, sickbeard.HTTPS_KEY),
        }

        # start web server
        self.web_server = SRWebServer(self.web_options)
        self.web_server.start()

        # Fire up all our threads
        sickbeard.start()

        # Build internal name cache
        name_cache.buildNameCache()

        # Pre-populate network timezones, it isn't thread safe
        network_timezones.update_network_dict()

        # sure, why not?
        if sickbeard.USE_FAILED_DOWNLOADS:
            failed_history.trimHistory()

        # Check for metadata indexer updates for shows (sets the next aired ep!)
        # sickbeard.showUpdateScheduler.forceRun()

        # Launch browser
        if sickbeard.LAUNCH_BROWSER and not (self.no_launch or self.run_as_daemon):
            sickbeard.launchBrowser('https' if sickbeard.ENABLE_HTTPS else 'http', self.start_port, sickbeard.WEB_ROOT)

        # main loop
        while True:
            time.sleep(1)
Esempio n. 53
0
 def test_snappy_import_fails(self):
     import sys
     with patch.dict(sys.modules, values={'snappy': None}):
         reload_module(afkak.codec)
         self.assertFalse(afkak.codec.has_snappy())
     reload_module(afkak.codec)
Esempio n. 54
0
def _reload_modules(items):
    # type: (List[_ModuleItem]) -> NoReturn
    for item in items:
        print('reload: {}'.format(item.module.__name__))
        reload_module(item.module)
 def setUp(self):
     super(ThemePreviewTests, self).setUp()
     urlresolvers.clear_url_caches()
     moves.reload_module(import_module(settings.ROOT_URLCONF))
     base.Horizon.register(Developer)
     base.Horizon._urls()
import pytest
from six.moves import reload_module

# HACK: if the plugin is imported before the coverage plugin then all
# the top-level code will be omitted from coverage, so force it to be
# reloaded within this unit test under coverage
import pytest_fixture_config
reload_module(pytest_fixture_config)

from pytest_fixture_config import Config, requires_config, yield_requires_config


class DummyConfig(Config):
    __slots__ = ('foo', 'bar')


def test_config_update():
    cfg = DummyConfig(foo=1, bar=2)
    cfg.update({"foo": 10, "bar": 20})
    assert cfg.foo == 10
    assert cfg.bar == 20
    with pytest.raises(ValueError):
        cfg.update({"baz": 30})


CONFIG1 = DummyConfig(foo=None, bar=1)


@requires_config(CONFIG1, ('foo', 'bar'))
@pytest.fixture
def a_fixture(request):
Esempio n. 57
0
 def test_get_impl_windows(self):
     self.addCleanup(self._reload_original_os_module)
     with mock.patch('os.name', 'nt'):
         moves.reload_module(api)
         from os_vif.internal.ip.windows import impl_netifaces
         self.assertIsInstance(api.ip, impl_netifaces.Netifaces)
Esempio n. 58
0
 def tearDown(self):
     # Need to reload the module to ensure
     # getting back to normal
     import certbot.plugins.util
     sys.modules["psutil"] = self.psutil
     reload_module(certbot.plugins.util)
Esempio n. 59
0
 def setUp(self):
     """Establish a clean test environment."""
     super(TestRegistryV1ClientApi, self).setUp()
     self.context = context.RequestContext()
     reload_module(rapi)
Esempio n. 60
0
def test_data_utils(in_tmpdir):
    """Tests get_file from a url, plus extraction and validation.
    """
    dirname = 'data_utils'

    with open('test.txt', 'w') as text_file:
        text_file.write('Float like a butterfly, sting like a bee.')

    with tarfile.open('test.tar.gz', 'w:gz') as tar_file:
        tar_file.add('test.txt')

    with zipfile.ZipFile('test.zip', 'w') as zip_file:
        zip_file.write('test.txt')

    origin = urljoin('file://', pathname2url(os.path.abspath('test.tar.gz')))

    path = get_file(dirname, origin, untar=True)
    filepath = path + '.tar.gz'
    data_keras_home = os.path.dirname(
        os.path.dirname(os.path.abspath(filepath)))
    assert data_keras_home == os.path.dirname(load_backend._config_path)
    os.remove(filepath)

    _keras_home = os.path.join(os.path.abspath('.'), '.keras')
    if not os.path.exists(_keras_home):
        os.makedirs(_keras_home)
    os.environ['KERAS_HOME'] = _keras_home
    reload_module(load_backend)
    path = get_file(dirname, origin, untar=True)
    filepath = path + '.tar.gz'
    data_keras_home = os.path.dirname(
        os.path.dirname(os.path.abspath(filepath)))
    assert data_keras_home == os.path.dirname(load_backend._config_path)
    os.environ.pop('KERAS_HOME')
    shutil.rmtree(_keras_home)
    reload_module(load_backend)

    path = get_file(dirname, origin, untar=True)
    filepath = path + '.tar.gz'
    hashval_sha256 = _hash_file(filepath)
    hashval_md5 = _hash_file(filepath, algorithm='md5')
    path = get_file(dirname, origin, md5_hash=hashval_md5, untar=True)
    path = get_file(filepath, origin, file_hash=hashval_sha256, extract=True)
    assert os.path.exists(filepath)
    assert validate_file(filepath, hashval_sha256)
    assert validate_file(filepath, hashval_md5)
    os.remove(filepath)
    os.remove('test.tar.gz')

    origin = urljoin('file://', pathname2url(os.path.abspath('test.zip')))

    hashval_sha256 = _hash_file('test.zip')
    hashval_md5 = _hash_file('test.zip', algorithm='md5')
    path = get_file(dirname, origin, md5_hash=hashval_md5, extract=True)
    path = get_file(dirname, origin, file_hash=hashval_sha256, extract=True)
    assert os.path.exists(path)
    assert validate_file(path, hashval_sha256)
    assert validate_file(path, hashval_md5)

    os.remove(path)
    os.remove(os.path.join(os.path.dirname(path), 'test.txt'))
    os.remove('test.txt')
    os.remove('test.zip')